##// END OF EJS Templates
debugwhyunstable: add support for revsets...
Martin von Zweigbergk -
r37414:9966f44e default
parent child Browse files
Show More
@@ -1,3079 +1,3079 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import tempfile
24 import tempfile
25 import time
25 import time
26
26
27 from .i18n import _
27 from .i18n import _
28 from .node import (
28 from .node import (
29 bin,
29 bin,
30 hex,
30 hex,
31 nullhex,
31 nullhex,
32 nullid,
32 nullid,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from . import (
36 from . import (
37 bundle2,
37 bundle2,
38 changegroup,
38 changegroup,
39 cmdutil,
39 cmdutil,
40 color,
40 color,
41 context,
41 context,
42 dagparser,
42 dagparser,
43 dagutil,
43 dagutil,
44 encoding,
44 encoding,
45 error,
45 error,
46 exchange,
46 exchange,
47 extensions,
47 extensions,
48 filemerge,
48 filemerge,
49 fileset,
49 fileset,
50 formatter,
50 formatter,
51 hg,
51 hg,
52 httppeer,
52 httppeer,
53 localrepo,
53 localrepo,
54 lock as lockmod,
54 lock as lockmod,
55 logcmdutil,
55 logcmdutil,
56 merge as mergemod,
56 merge as mergemod,
57 obsolete,
57 obsolete,
58 obsutil,
58 obsutil,
59 phases,
59 phases,
60 policy,
60 policy,
61 pvec,
61 pvec,
62 pycompat,
62 pycompat,
63 registrar,
63 registrar,
64 repair,
64 repair,
65 revlog,
65 revlog,
66 revset,
66 revset,
67 revsetlang,
67 revsetlang,
68 scmutil,
68 scmutil,
69 setdiscovery,
69 setdiscovery,
70 simplemerge,
70 simplemerge,
71 smartset,
71 smartset,
72 sshpeer,
72 sshpeer,
73 sslutil,
73 sslutil,
74 streamclone,
74 streamclone,
75 templater,
75 templater,
76 treediscovery,
76 treediscovery,
77 upgrade,
77 upgrade,
78 url as urlmod,
78 url as urlmod,
79 util,
79 util,
80 vfs as vfsmod,
80 vfs as vfsmod,
81 wireprotoframing,
81 wireprotoframing,
82 wireprotoserver,
82 wireprotoserver,
83 )
83 )
84 from .utils import (
84 from .utils import (
85 dateutil,
85 dateutil,
86 procutil,
86 procutil,
87 stringutil,
87 stringutil,
88 )
88 )
89
89
90 release = lockmod.release
90 release = lockmod.release
91
91
92 command = registrar.command()
92 command = registrar.command()
93
93
94 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
94 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
95 def debugancestor(ui, repo, *args):
95 def debugancestor(ui, repo, *args):
96 """find the ancestor revision of two revisions in a given index"""
96 """find the ancestor revision of two revisions in a given index"""
97 if len(args) == 3:
97 if len(args) == 3:
98 index, rev1, rev2 = args
98 index, rev1, rev2 = args
99 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
99 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
100 lookup = r.lookup
100 lookup = r.lookup
101 elif len(args) == 2:
101 elif len(args) == 2:
102 if not repo:
102 if not repo:
103 raise error.Abort(_('there is no Mercurial repository here '
103 raise error.Abort(_('there is no Mercurial repository here '
104 '(.hg not found)'))
104 '(.hg not found)'))
105 rev1, rev2 = args
105 rev1, rev2 = args
106 r = repo.changelog
106 r = repo.changelog
107 lookup = repo.lookup
107 lookup = repo.lookup
108 else:
108 else:
109 raise error.Abort(_('either two or three arguments required'))
109 raise error.Abort(_('either two or three arguments required'))
110 a = r.ancestor(lookup(rev1), lookup(rev2))
110 a = r.ancestor(lookup(rev1), lookup(rev2))
111 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
111 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
112
112
113 @command('debugapplystreamclonebundle', [], 'FILE')
113 @command('debugapplystreamclonebundle', [], 'FILE')
114 def debugapplystreamclonebundle(ui, repo, fname):
114 def debugapplystreamclonebundle(ui, repo, fname):
115 """apply a stream clone bundle file"""
115 """apply a stream clone bundle file"""
116 f = hg.openpath(ui, fname)
116 f = hg.openpath(ui, fname)
117 gen = exchange.readbundle(ui, f, fname)
117 gen = exchange.readbundle(ui, f, fname)
118 gen.apply(repo)
118 gen.apply(repo)
119
119
120 @command('debugbuilddag',
120 @command('debugbuilddag',
121 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
121 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
122 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
122 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
123 ('n', 'new-file', None, _('add new file at each rev'))],
123 ('n', 'new-file', None, _('add new file at each rev'))],
124 _('[OPTION]... [TEXT]'))
124 _('[OPTION]... [TEXT]'))
125 def debugbuilddag(ui, repo, text=None,
125 def debugbuilddag(ui, repo, text=None,
126 mergeable_file=False,
126 mergeable_file=False,
127 overwritten_file=False,
127 overwritten_file=False,
128 new_file=False):
128 new_file=False):
129 """builds a repo with a given DAG from scratch in the current empty repo
129 """builds a repo with a given DAG from scratch in the current empty repo
130
130
131 The description of the DAG is read from stdin if not given on the
131 The description of the DAG is read from stdin if not given on the
132 command line.
132 command line.
133
133
134 Elements:
134 Elements:
135
135
136 - "+n" is a linear run of n nodes based on the current default parent
136 - "+n" is a linear run of n nodes based on the current default parent
137 - "." is a single node based on the current default parent
137 - "." is a single node based on the current default parent
138 - "$" resets the default parent to null (implied at the start);
138 - "$" resets the default parent to null (implied at the start);
139 otherwise the default parent is always the last node created
139 otherwise the default parent is always the last node created
140 - "<p" sets the default parent to the backref p
140 - "<p" sets the default parent to the backref p
141 - "*p" is a fork at parent p, which is a backref
141 - "*p" is a fork at parent p, which is a backref
142 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
142 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
143 - "/p2" is a merge of the preceding node and p2
143 - "/p2" is a merge of the preceding node and p2
144 - ":tag" defines a local tag for the preceding node
144 - ":tag" defines a local tag for the preceding node
145 - "@branch" sets the named branch for subsequent nodes
145 - "@branch" sets the named branch for subsequent nodes
146 - "#...\\n" is a comment up to the end of the line
146 - "#...\\n" is a comment up to the end of the line
147
147
148 Whitespace between the above elements is ignored.
148 Whitespace between the above elements is ignored.
149
149
150 A backref is either
150 A backref is either
151
151
152 - a number n, which references the node curr-n, where curr is the current
152 - a number n, which references the node curr-n, where curr is the current
153 node, or
153 node, or
154 - the name of a local tag you placed earlier using ":tag", or
154 - the name of a local tag you placed earlier using ":tag", or
155 - empty to denote the default parent.
155 - empty to denote the default parent.
156
156
157 All string valued-elements are either strictly alphanumeric, or must
157 All string valued-elements are either strictly alphanumeric, or must
158 be enclosed in double quotes ("..."), with "\\" as escape character.
158 be enclosed in double quotes ("..."), with "\\" as escape character.
159 """
159 """
160
160
161 if text is None:
161 if text is None:
162 ui.status(_("reading DAG from stdin\n"))
162 ui.status(_("reading DAG from stdin\n"))
163 text = ui.fin.read()
163 text = ui.fin.read()
164
164
165 cl = repo.changelog
165 cl = repo.changelog
166 if len(cl) > 0:
166 if len(cl) > 0:
167 raise error.Abort(_('repository is not empty'))
167 raise error.Abort(_('repository is not empty'))
168
168
169 # determine number of revs in DAG
169 # determine number of revs in DAG
170 total = 0
170 total = 0
171 for type, data in dagparser.parsedag(text):
171 for type, data in dagparser.parsedag(text):
172 if type == 'n':
172 if type == 'n':
173 total += 1
173 total += 1
174
174
175 if mergeable_file:
175 if mergeable_file:
176 linesperrev = 2
176 linesperrev = 2
177 # make a file with k lines per rev
177 # make a file with k lines per rev
178 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
178 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
179 initialmergedlines.append("")
179 initialmergedlines.append("")
180
180
181 tags = []
181 tags = []
182
182
183 wlock = lock = tr = None
183 wlock = lock = tr = None
184 try:
184 try:
185 wlock = repo.wlock()
185 wlock = repo.wlock()
186 lock = repo.lock()
186 lock = repo.lock()
187 tr = repo.transaction("builddag")
187 tr = repo.transaction("builddag")
188
188
189 at = -1
189 at = -1
190 atbranch = 'default'
190 atbranch = 'default'
191 nodeids = []
191 nodeids = []
192 id = 0
192 id = 0
193 ui.progress(_('building'), id, unit=_('revisions'), total=total)
193 ui.progress(_('building'), id, unit=_('revisions'), total=total)
194 for type, data in dagparser.parsedag(text):
194 for type, data in dagparser.parsedag(text):
195 if type == 'n':
195 if type == 'n':
196 ui.note(('node %s\n' % pycompat.bytestr(data)))
196 ui.note(('node %s\n' % pycompat.bytestr(data)))
197 id, ps = data
197 id, ps = data
198
198
199 files = []
199 files = []
200 filecontent = {}
200 filecontent = {}
201
201
202 p2 = None
202 p2 = None
203 if mergeable_file:
203 if mergeable_file:
204 fn = "mf"
204 fn = "mf"
205 p1 = repo[ps[0]]
205 p1 = repo[ps[0]]
206 if len(ps) > 1:
206 if len(ps) > 1:
207 p2 = repo[ps[1]]
207 p2 = repo[ps[1]]
208 pa = p1.ancestor(p2)
208 pa = p1.ancestor(p2)
209 base, local, other = [x[fn].data() for x in (pa, p1,
209 base, local, other = [x[fn].data() for x in (pa, p1,
210 p2)]
210 p2)]
211 m3 = simplemerge.Merge3Text(base, local, other)
211 m3 = simplemerge.Merge3Text(base, local, other)
212 ml = [l.strip() for l in m3.merge_lines()]
212 ml = [l.strip() for l in m3.merge_lines()]
213 ml.append("")
213 ml.append("")
214 elif at > 0:
214 elif at > 0:
215 ml = p1[fn].data().split("\n")
215 ml = p1[fn].data().split("\n")
216 else:
216 else:
217 ml = initialmergedlines
217 ml = initialmergedlines
218 ml[id * linesperrev] += " r%i" % id
218 ml[id * linesperrev] += " r%i" % id
219 mergedtext = "\n".join(ml)
219 mergedtext = "\n".join(ml)
220 files.append(fn)
220 files.append(fn)
221 filecontent[fn] = mergedtext
221 filecontent[fn] = mergedtext
222
222
223 if overwritten_file:
223 if overwritten_file:
224 fn = "of"
224 fn = "of"
225 files.append(fn)
225 files.append(fn)
226 filecontent[fn] = "r%i\n" % id
226 filecontent[fn] = "r%i\n" % id
227
227
228 if new_file:
228 if new_file:
229 fn = "nf%i" % id
229 fn = "nf%i" % id
230 files.append(fn)
230 files.append(fn)
231 filecontent[fn] = "r%i\n" % id
231 filecontent[fn] = "r%i\n" % id
232 if len(ps) > 1:
232 if len(ps) > 1:
233 if not p2:
233 if not p2:
234 p2 = repo[ps[1]]
234 p2 = repo[ps[1]]
235 for fn in p2:
235 for fn in p2:
236 if fn.startswith("nf"):
236 if fn.startswith("nf"):
237 files.append(fn)
237 files.append(fn)
238 filecontent[fn] = p2[fn].data()
238 filecontent[fn] = p2[fn].data()
239
239
240 def fctxfn(repo, cx, path):
240 def fctxfn(repo, cx, path):
241 if path in filecontent:
241 if path in filecontent:
242 return context.memfilectx(repo, cx, path,
242 return context.memfilectx(repo, cx, path,
243 filecontent[path])
243 filecontent[path])
244 return None
244 return None
245
245
246 if len(ps) == 0 or ps[0] < 0:
246 if len(ps) == 0 or ps[0] < 0:
247 pars = [None, None]
247 pars = [None, None]
248 elif len(ps) == 1:
248 elif len(ps) == 1:
249 pars = [nodeids[ps[0]], None]
249 pars = [nodeids[ps[0]], None]
250 else:
250 else:
251 pars = [nodeids[p] for p in ps]
251 pars = [nodeids[p] for p in ps]
252 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
252 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
253 date=(id, 0),
253 date=(id, 0),
254 user="debugbuilddag",
254 user="debugbuilddag",
255 extra={'branch': atbranch})
255 extra={'branch': atbranch})
256 nodeid = repo.commitctx(cx)
256 nodeid = repo.commitctx(cx)
257 nodeids.append(nodeid)
257 nodeids.append(nodeid)
258 at = id
258 at = id
259 elif type == 'l':
259 elif type == 'l':
260 id, name = data
260 id, name = data
261 ui.note(('tag %s\n' % name))
261 ui.note(('tag %s\n' % name))
262 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
262 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
263 elif type == 'a':
263 elif type == 'a':
264 ui.note(('branch %s\n' % data))
264 ui.note(('branch %s\n' % data))
265 atbranch = data
265 atbranch = data
266 ui.progress(_('building'), id, unit=_('revisions'), total=total)
266 ui.progress(_('building'), id, unit=_('revisions'), total=total)
267 tr.close()
267 tr.close()
268
268
269 if tags:
269 if tags:
270 repo.vfs.write("localtags", "".join(tags))
270 repo.vfs.write("localtags", "".join(tags))
271 finally:
271 finally:
272 ui.progress(_('building'), None)
272 ui.progress(_('building'), None)
273 release(tr, lock, wlock)
273 release(tr, lock, wlock)
274
274
275 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
275 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
276 indent_string = ' ' * indent
276 indent_string = ' ' * indent
277 if all:
277 if all:
278 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
278 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
279 % indent_string)
279 % indent_string)
280
280
281 def showchunks(named):
281 def showchunks(named):
282 ui.write("\n%s%s\n" % (indent_string, named))
282 ui.write("\n%s%s\n" % (indent_string, named))
283 for deltadata in gen.deltaiter():
283 for deltadata in gen.deltaiter():
284 node, p1, p2, cs, deltabase, delta, flags = deltadata
284 node, p1, p2, cs, deltabase, delta, flags = deltadata
285 ui.write("%s%s %s %s %s %s %d\n" %
285 ui.write("%s%s %s %s %s %s %d\n" %
286 (indent_string, hex(node), hex(p1), hex(p2),
286 (indent_string, hex(node), hex(p1), hex(p2),
287 hex(cs), hex(deltabase), len(delta)))
287 hex(cs), hex(deltabase), len(delta)))
288
288
289 chunkdata = gen.changelogheader()
289 chunkdata = gen.changelogheader()
290 showchunks("changelog")
290 showchunks("changelog")
291 chunkdata = gen.manifestheader()
291 chunkdata = gen.manifestheader()
292 showchunks("manifest")
292 showchunks("manifest")
293 for chunkdata in iter(gen.filelogheader, {}):
293 for chunkdata in iter(gen.filelogheader, {}):
294 fname = chunkdata['filename']
294 fname = chunkdata['filename']
295 showchunks(fname)
295 showchunks(fname)
296 else:
296 else:
297 if isinstance(gen, bundle2.unbundle20):
297 if isinstance(gen, bundle2.unbundle20):
298 raise error.Abort(_('use debugbundle2 for this file'))
298 raise error.Abort(_('use debugbundle2 for this file'))
299 chunkdata = gen.changelogheader()
299 chunkdata = gen.changelogheader()
300 for deltadata in gen.deltaiter():
300 for deltadata in gen.deltaiter():
301 node, p1, p2, cs, deltabase, delta, flags = deltadata
301 node, p1, p2, cs, deltabase, delta, flags = deltadata
302 ui.write("%s%s\n" % (indent_string, hex(node)))
302 ui.write("%s%s\n" % (indent_string, hex(node)))
303
303
304 def _debugobsmarkers(ui, part, indent=0, **opts):
304 def _debugobsmarkers(ui, part, indent=0, **opts):
305 """display version and markers contained in 'data'"""
305 """display version and markers contained in 'data'"""
306 opts = pycompat.byteskwargs(opts)
306 opts = pycompat.byteskwargs(opts)
307 data = part.read()
307 data = part.read()
308 indent_string = ' ' * indent
308 indent_string = ' ' * indent
309 try:
309 try:
310 version, markers = obsolete._readmarkers(data)
310 version, markers = obsolete._readmarkers(data)
311 except error.UnknownVersion as exc:
311 except error.UnknownVersion as exc:
312 msg = "%sunsupported version: %s (%d bytes)\n"
312 msg = "%sunsupported version: %s (%d bytes)\n"
313 msg %= indent_string, exc.version, len(data)
313 msg %= indent_string, exc.version, len(data)
314 ui.write(msg)
314 ui.write(msg)
315 else:
315 else:
316 msg = "%sversion: %d (%d bytes)\n"
316 msg = "%sversion: %d (%d bytes)\n"
317 msg %= indent_string, version, len(data)
317 msg %= indent_string, version, len(data)
318 ui.write(msg)
318 ui.write(msg)
319 fm = ui.formatter('debugobsolete', opts)
319 fm = ui.formatter('debugobsolete', opts)
320 for rawmarker in sorted(markers):
320 for rawmarker in sorted(markers):
321 m = obsutil.marker(None, rawmarker)
321 m = obsutil.marker(None, rawmarker)
322 fm.startitem()
322 fm.startitem()
323 fm.plain(indent_string)
323 fm.plain(indent_string)
324 cmdutil.showmarker(fm, m)
324 cmdutil.showmarker(fm, m)
325 fm.end()
325 fm.end()
326
326
327 def _debugphaseheads(ui, data, indent=0):
327 def _debugphaseheads(ui, data, indent=0):
328 """display version and markers contained in 'data'"""
328 """display version and markers contained in 'data'"""
329 indent_string = ' ' * indent
329 indent_string = ' ' * indent
330 headsbyphase = phases.binarydecode(data)
330 headsbyphase = phases.binarydecode(data)
331 for phase in phases.allphases:
331 for phase in phases.allphases:
332 for head in headsbyphase[phase]:
332 for head in headsbyphase[phase]:
333 ui.write(indent_string)
333 ui.write(indent_string)
334 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
334 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
335
335
336 def _quasirepr(thing):
336 def _quasirepr(thing):
337 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
337 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
338 return '{%s}' % (
338 return '{%s}' % (
339 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
339 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
340 return pycompat.bytestr(repr(thing))
340 return pycompat.bytestr(repr(thing))
341
341
342 def _debugbundle2(ui, gen, all=None, **opts):
342 def _debugbundle2(ui, gen, all=None, **opts):
343 """lists the contents of a bundle2"""
343 """lists the contents of a bundle2"""
344 if not isinstance(gen, bundle2.unbundle20):
344 if not isinstance(gen, bundle2.unbundle20):
345 raise error.Abort(_('not a bundle2 file'))
345 raise error.Abort(_('not a bundle2 file'))
346 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
346 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
347 parttypes = opts.get(r'part_type', [])
347 parttypes = opts.get(r'part_type', [])
348 for part in gen.iterparts():
348 for part in gen.iterparts():
349 if parttypes and part.type not in parttypes:
349 if parttypes and part.type not in parttypes:
350 continue
350 continue
351 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
351 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
352 if part.type == 'changegroup':
352 if part.type == 'changegroup':
353 version = part.params.get('version', '01')
353 version = part.params.get('version', '01')
354 cg = changegroup.getunbundler(version, part, 'UN')
354 cg = changegroup.getunbundler(version, part, 'UN')
355 if not ui.quiet:
355 if not ui.quiet:
356 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
356 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
357 if part.type == 'obsmarkers':
357 if part.type == 'obsmarkers':
358 if not ui.quiet:
358 if not ui.quiet:
359 _debugobsmarkers(ui, part, indent=4, **opts)
359 _debugobsmarkers(ui, part, indent=4, **opts)
360 if part.type == 'phase-heads':
360 if part.type == 'phase-heads':
361 if not ui.quiet:
361 if not ui.quiet:
362 _debugphaseheads(ui, part, indent=4)
362 _debugphaseheads(ui, part, indent=4)
363
363
364 @command('debugbundle',
364 @command('debugbundle',
365 [('a', 'all', None, _('show all details')),
365 [('a', 'all', None, _('show all details')),
366 ('', 'part-type', [], _('show only the named part type')),
366 ('', 'part-type', [], _('show only the named part type')),
367 ('', 'spec', None, _('print the bundlespec of the bundle'))],
367 ('', 'spec', None, _('print the bundlespec of the bundle'))],
368 _('FILE'),
368 _('FILE'),
369 norepo=True)
369 norepo=True)
370 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
370 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
371 """lists the contents of a bundle"""
371 """lists the contents of a bundle"""
372 with hg.openpath(ui, bundlepath) as f:
372 with hg.openpath(ui, bundlepath) as f:
373 if spec:
373 if spec:
374 spec = exchange.getbundlespec(ui, f)
374 spec = exchange.getbundlespec(ui, f)
375 ui.write('%s\n' % spec)
375 ui.write('%s\n' % spec)
376 return
376 return
377
377
378 gen = exchange.readbundle(ui, f, bundlepath)
378 gen = exchange.readbundle(ui, f, bundlepath)
379 if isinstance(gen, bundle2.unbundle20):
379 if isinstance(gen, bundle2.unbundle20):
380 return _debugbundle2(ui, gen, all=all, **opts)
380 return _debugbundle2(ui, gen, all=all, **opts)
381 _debugchangegroup(ui, gen, all=all, **opts)
381 _debugchangegroup(ui, gen, all=all, **opts)
382
382
383 @command('debugcapabilities',
383 @command('debugcapabilities',
384 [], _('PATH'),
384 [], _('PATH'),
385 norepo=True)
385 norepo=True)
386 def debugcapabilities(ui, path, **opts):
386 def debugcapabilities(ui, path, **opts):
387 """lists the capabilities of a remote peer"""
387 """lists the capabilities of a remote peer"""
388 opts = pycompat.byteskwargs(opts)
388 opts = pycompat.byteskwargs(opts)
389 peer = hg.peer(ui, opts, path)
389 peer = hg.peer(ui, opts, path)
390 caps = peer.capabilities()
390 caps = peer.capabilities()
391 ui.write(('Main capabilities:\n'))
391 ui.write(('Main capabilities:\n'))
392 for c in sorted(caps):
392 for c in sorted(caps):
393 ui.write((' %s\n') % c)
393 ui.write((' %s\n') % c)
394 b2caps = bundle2.bundle2caps(peer)
394 b2caps = bundle2.bundle2caps(peer)
395 if b2caps:
395 if b2caps:
396 ui.write(('Bundle2 capabilities:\n'))
396 ui.write(('Bundle2 capabilities:\n'))
397 for key, values in sorted(b2caps.iteritems()):
397 for key, values in sorted(b2caps.iteritems()):
398 ui.write((' %s\n') % key)
398 ui.write((' %s\n') % key)
399 for v in values:
399 for v in values:
400 ui.write((' %s\n') % v)
400 ui.write((' %s\n') % v)
401
401
402 @command('debugcheckstate', [], '')
402 @command('debugcheckstate', [], '')
403 def debugcheckstate(ui, repo):
403 def debugcheckstate(ui, repo):
404 """validate the correctness of the current dirstate"""
404 """validate the correctness of the current dirstate"""
405 parent1, parent2 = repo.dirstate.parents()
405 parent1, parent2 = repo.dirstate.parents()
406 m1 = repo[parent1].manifest()
406 m1 = repo[parent1].manifest()
407 m2 = repo[parent2].manifest()
407 m2 = repo[parent2].manifest()
408 errors = 0
408 errors = 0
409 for f in repo.dirstate:
409 for f in repo.dirstate:
410 state = repo.dirstate[f]
410 state = repo.dirstate[f]
411 if state in "nr" and f not in m1:
411 if state in "nr" and f not in m1:
412 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
412 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
413 errors += 1
413 errors += 1
414 if state in "a" and f in m1:
414 if state in "a" and f in m1:
415 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
415 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
416 errors += 1
416 errors += 1
417 if state in "m" and f not in m1 and f not in m2:
417 if state in "m" and f not in m1 and f not in m2:
418 ui.warn(_("%s in state %s, but not in either manifest\n") %
418 ui.warn(_("%s in state %s, but not in either manifest\n") %
419 (f, state))
419 (f, state))
420 errors += 1
420 errors += 1
421 for f in m1:
421 for f in m1:
422 state = repo.dirstate[f]
422 state = repo.dirstate[f]
423 if state not in "nrm":
423 if state not in "nrm":
424 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
424 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
425 errors += 1
425 errors += 1
426 if errors:
426 if errors:
427 error = _(".hg/dirstate inconsistent with current parent's manifest")
427 error = _(".hg/dirstate inconsistent with current parent's manifest")
428 raise error.Abort(error)
428 raise error.Abort(error)
429
429
430 @command('debugcolor',
430 @command('debugcolor',
431 [('', 'style', None, _('show all configured styles'))],
431 [('', 'style', None, _('show all configured styles'))],
432 'hg debugcolor')
432 'hg debugcolor')
433 def debugcolor(ui, repo, **opts):
433 def debugcolor(ui, repo, **opts):
434 """show available color, effects or style"""
434 """show available color, effects or style"""
435 ui.write(('color mode: %s\n') % ui._colormode)
435 ui.write(('color mode: %s\n') % ui._colormode)
436 if opts.get(r'style'):
436 if opts.get(r'style'):
437 return _debugdisplaystyle(ui)
437 return _debugdisplaystyle(ui)
438 else:
438 else:
439 return _debugdisplaycolor(ui)
439 return _debugdisplaycolor(ui)
440
440
441 def _debugdisplaycolor(ui):
441 def _debugdisplaycolor(ui):
442 ui = ui.copy()
442 ui = ui.copy()
443 ui._styles.clear()
443 ui._styles.clear()
444 for effect in color._activeeffects(ui).keys():
444 for effect in color._activeeffects(ui).keys():
445 ui._styles[effect] = effect
445 ui._styles[effect] = effect
446 if ui._terminfoparams:
446 if ui._terminfoparams:
447 for k, v in ui.configitems('color'):
447 for k, v in ui.configitems('color'):
448 if k.startswith('color.'):
448 if k.startswith('color.'):
449 ui._styles[k] = k[6:]
449 ui._styles[k] = k[6:]
450 elif k.startswith('terminfo.'):
450 elif k.startswith('terminfo.'):
451 ui._styles[k] = k[9:]
451 ui._styles[k] = k[9:]
452 ui.write(_('available colors:\n'))
452 ui.write(_('available colors:\n'))
453 # sort label with a '_' after the other to group '_background' entry.
453 # sort label with a '_' after the other to group '_background' entry.
454 items = sorted(ui._styles.items(),
454 items = sorted(ui._styles.items(),
455 key=lambda i: ('_' in i[0], i[0], i[1]))
455 key=lambda i: ('_' in i[0], i[0], i[1]))
456 for colorname, label in items:
456 for colorname, label in items:
457 ui.write(('%s\n') % colorname, label=label)
457 ui.write(('%s\n') % colorname, label=label)
458
458
459 def _debugdisplaystyle(ui):
459 def _debugdisplaystyle(ui):
460 ui.write(_('available style:\n'))
460 ui.write(_('available style:\n'))
461 width = max(len(s) for s in ui._styles)
461 width = max(len(s) for s in ui._styles)
462 for label, effects in sorted(ui._styles.items()):
462 for label, effects in sorted(ui._styles.items()):
463 ui.write('%s' % label, label=label)
463 ui.write('%s' % label, label=label)
464 if effects:
464 if effects:
465 # 50
465 # 50
466 ui.write(': ')
466 ui.write(': ')
467 ui.write(' ' * (max(0, width - len(label))))
467 ui.write(' ' * (max(0, width - len(label))))
468 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
468 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
469 ui.write('\n')
469 ui.write('\n')
470
470
471 @command('debugcreatestreamclonebundle', [], 'FILE')
471 @command('debugcreatestreamclonebundle', [], 'FILE')
472 def debugcreatestreamclonebundle(ui, repo, fname):
472 def debugcreatestreamclonebundle(ui, repo, fname):
473 """create a stream clone bundle file
473 """create a stream clone bundle file
474
474
475 Stream bundles are special bundles that are essentially archives of
475 Stream bundles are special bundles that are essentially archives of
476 revlog files. They are commonly used for cloning very quickly.
476 revlog files. They are commonly used for cloning very quickly.
477 """
477 """
478 # TODO we may want to turn this into an abort when this functionality
478 # TODO we may want to turn this into an abort when this functionality
479 # is moved into `hg bundle`.
479 # is moved into `hg bundle`.
480 if phases.hassecret(repo):
480 if phases.hassecret(repo):
481 ui.warn(_('(warning: stream clone bundle will contain secret '
481 ui.warn(_('(warning: stream clone bundle will contain secret '
482 'revisions)\n'))
482 'revisions)\n'))
483
483
484 requirements, gen = streamclone.generatebundlev1(repo)
484 requirements, gen = streamclone.generatebundlev1(repo)
485 changegroup.writechunks(ui, gen, fname)
485 changegroup.writechunks(ui, gen, fname)
486
486
487 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
487 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
488
488
489 @command('debugdag',
489 @command('debugdag',
490 [('t', 'tags', None, _('use tags as labels')),
490 [('t', 'tags', None, _('use tags as labels')),
491 ('b', 'branches', None, _('annotate with branch names')),
491 ('b', 'branches', None, _('annotate with branch names')),
492 ('', 'dots', None, _('use dots for runs')),
492 ('', 'dots', None, _('use dots for runs')),
493 ('s', 'spaces', None, _('separate elements by spaces'))],
493 ('s', 'spaces', None, _('separate elements by spaces'))],
494 _('[OPTION]... [FILE [REV]...]'),
494 _('[OPTION]... [FILE [REV]...]'),
495 optionalrepo=True)
495 optionalrepo=True)
496 def debugdag(ui, repo, file_=None, *revs, **opts):
496 def debugdag(ui, repo, file_=None, *revs, **opts):
497 """format the changelog or an index DAG as a concise textual description
497 """format the changelog or an index DAG as a concise textual description
498
498
499 If you pass a revlog index, the revlog's DAG is emitted. If you list
499 If you pass a revlog index, the revlog's DAG is emitted. If you list
500 revision numbers, they get labeled in the output as rN.
500 revision numbers, they get labeled in the output as rN.
501
501
502 Otherwise, the changelog DAG of the current repo is emitted.
502 Otherwise, the changelog DAG of the current repo is emitted.
503 """
503 """
504 spaces = opts.get(r'spaces')
504 spaces = opts.get(r'spaces')
505 dots = opts.get(r'dots')
505 dots = opts.get(r'dots')
506 if file_:
506 if file_:
507 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
507 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
508 file_)
508 file_)
509 revs = set((int(r) for r in revs))
509 revs = set((int(r) for r in revs))
510 def events():
510 def events():
511 for r in rlog:
511 for r in rlog:
512 yield 'n', (r, list(p for p in rlog.parentrevs(r)
512 yield 'n', (r, list(p for p in rlog.parentrevs(r)
513 if p != -1))
513 if p != -1))
514 if r in revs:
514 if r in revs:
515 yield 'l', (r, "r%i" % r)
515 yield 'l', (r, "r%i" % r)
516 elif repo:
516 elif repo:
517 cl = repo.changelog
517 cl = repo.changelog
518 tags = opts.get(r'tags')
518 tags = opts.get(r'tags')
519 branches = opts.get(r'branches')
519 branches = opts.get(r'branches')
520 if tags:
520 if tags:
521 labels = {}
521 labels = {}
522 for l, n in repo.tags().items():
522 for l, n in repo.tags().items():
523 labels.setdefault(cl.rev(n), []).append(l)
523 labels.setdefault(cl.rev(n), []).append(l)
524 def events():
524 def events():
525 b = "default"
525 b = "default"
526 for r in cl:
526 for r in cl:
527 if branches:
527 if branches:
528 newb = cl.read(cl.node(r))[5]['branch']
528 newb = cl.read(cl.node(r))[5]['branch']
529 if newb != b:
529 if newb != b:
530 yield 'a', newb
530 yield 'a', newb
531 b = newb
531 b = newb
532 yield 'n', (r, list(p for p in cl.parentrevs(r)
532 yield 'n', (r, list(p for p in cl.parentrevs(r)
533 if p != -1))
533 if p != -1))
534 if tags:
534 if tags:
535 ls = labels.get(r)
535 ls = labels.get(r)
536 if ls:
536 if ls:
537 for l in ls:
537 for l in ls:
538 yield 'l', (r, l)
538 yield 'l', (r, l)
539 else:
539 else:
540 raise error.Abort(_('need repo for changelog dag'))
540 raise error.Abort(_('need repo for changelog dag'))
541
541
542 for line in dagparser.dagtextlines(events(),
542 for line in dagparser.dagtextlines(events(),
543 addspaces=spaces,
543 addspaces=spaces,
544 wraplabels=True,
544 wraplabels=True,
545 wrapannotations=True,
545 wrapannotations=True,
546 wrapnonlinear=dots,
546 wrapnonlinear=dots,
547 usedots=dots,
547 usedots=dots,
548 maxlinewidth=70):
548 maxlinewidth=70):
549 ui.write(line)
549 ui.write(line)
550 ui.write("\n")
550 ui.write("\n")
551
551
552 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
552 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
553 def debugdata(ui, repo, file_, rev=None, **opts):
553 def debugdata(ui, repo, file_, rev=None, **opts):
554 """dump the contents of a data file revision"""
554 """dump the contents of a data file revision"""
555 opts = pycompat.byteskwargs(opts)
555 opts = pycompat.byteskwargs(opts)
556 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
556 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
557 if rev is not None:
557 if rev is not None:
558 raise error.CommandError('debugdata', _('invalid arguments'))
558 raise error.CommandError('debugdata', _('invalid arguments'))
559 file_, rev = None, file_
559 file_, rev = None, file_
560 elif rev is None:
560 elif rev is None:
561 raise error.CommandError('debugdata', _('invalid arguments'))
561 raise error.CommandError('debugdata', _('invalid arguments'))
562 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
562 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
563 try:
563 try:
564 ui.write(r.revision(r.lookup(rev), raw=True))
564 ui.write(r.revision(r.lookup(rev), raw=True))
565 except KeyError:
565 except KeyError:
566 raise error.Abort(_('invalid revision identifier %s') % rev)
566 raise error.Abort(_('invalid revision identifier %s') % rev)
567
567
568 @command('debugdate',
568 @command('debugdate',
569 [('e', 'extended', None, _('try extended date formats'))],
569 [('e', 'extended', None, _('try extended date formats'))],
570 _('[-e] DATE [RANGE]'),
570 _('[-e] DATE [RANGE]'),
571 norepo=True, optionalrepo=True)
571 norepo=True, optionalrepo=True)
572 def debugdate(ui, date, range=None, **opts):
572 def debugdate(ui, date, range=None, **opts):
573 """parse and display a date"""
573 """parse and display a date"""
574 if opts[r"extended"]:
574 if opts[r"extended"]:
575 d = dateutil.parsedate(date, util.extendeddateformats)
575 d = dateutil.parsedate(date, util.extendeddateformats)
576 else:
576 else:
577 d = dateutil.parsedate(date)
577 d = dateutil.parsedate(date)
578 ui.write(("internal: %d %d\n") % d)
578 ui.write(("internal: %d %d\n") % d)
579 ui.write(("standard: %s\n") % dateutil.datestr(d))
579 ui.write(("standard: %s\n") % dateutil.datestr(d))
580 if range:
580 if range:
581 m = dateutil.matchdate(range)
581 m = dateutil.matchdate(range)
582 ui.write(("match: %s\n") % m(d[0]))
582 ui.write(("match: %s\n") % m(d[0]))
583
583
584 @command('debugdeltachain',
584 @command('debugdeltachain',
585 cmdutil.debugrevlogopts + cmdutil.formatteropts,
585 cmdutil.debugrevlogopts + cmdutil.formatteropts,
586 _('-c|-m|FILE'),
586 _('-c|-m|FILE'),
587 optionalrepo=True)
587 optionalrepo=True)
588 def debugdeltachain(ui, repo, file_=None, **opts):
588 def debugdeltachain(ui, repo, file_=None, **opts):
589 """dump information about delta chains in a revlog
589 """dump information about delta chains in a revlog
590
590
591 Output can be templatized. Available template keywords are:
591 Output can be templatized. Available template keywords are:
592
592
593 :``rev``: revision number
593 :``rev``: revision number
594 :``chainid``: delta chain identifier (numbered by unique base)
594 :``chainid``: delta chain identifier (numbered by unique base)
595 :``chainlen``: delta chain length to this revision
595 :``chainlen``: delta chain length to this revision
596 :``prevrev``: previous revision in delta chain
596 :``prevrev``: previous revision in delta chain
597 :``deltatype``: role of delta / how it was computed
597 :``deltatype``: role of delta / how it was computed
598 :``compsize``: compressed size of revision
598 :``compsize``: compressed size of revision
599 :``uncompsize``: uncompressed size of revision
599 :``uncompsize``: uncompressed size of revision
600 :``chainsize``: total size of compressed revisions in chain
600 :``chainsize``: total size of compressed revisions in chain
601 :``chainratio``: total chain size divided by uncompressed revision size
601 :``chainratio``: total chain size divided by uncompressed revision size
602 (new delta chains typically start at ratio 2.00)
602 (new delta chains typically start at ratio 2.00)
603 :``lindist``: linear distance from base revision in delta chain to end
603 :``lindist``: linear distance from base revision in delta chain to end
604 of this revision
604 of this revision
605 :``extradist``: total size of revisions not part of this delta chain from
605 :``extradist``: total size of revisions not part of this delta chain from
606 base of delta chain to end of this revision; a measurement
606 base of delta chain to end of this revision; a measurement
607 of how much extra data we need to read/seek across to read
607 of how much extra data we need to read/seek across to read
608 the delta chain for this revision
608 the delta chain for this revision
609 :``extraratio``: extradist divided by chainsize; another representation of
609 :``extraratio``: extradist divided by chainsize; another representation of
610 how much unrelated data is needed to load this delta chain
610 how much unrelated data is needed to load this delta chain
611
611
612 If the repository is configured to use the sparse read, additional keywords
612 If the repository is configured to use the sparse read, additional keywords
613 are available:
613 are available:
614
614
615 :``readsize``: total size of data read from the disk for a revision
615 :``readsize``: total size of data read from the disk for a revision
616 (sum of the sizes of all the blocks)
616 (sum of the sizes of all the blocks)
617 :``largestblock``: size of the largest block of data read from the disk
617 :``largestblock``: size of the largest block of data read from the disk
618 :``readdensity``: density of useful bytes in the data read from the disk
618 :``readdensity``: density of useful bytes in the data read from the disk
619 :``srchunks``: in how many data hunks the whole revision would be read
619 :``srchunks``: in how many data hunks the whole revision would be read
620
620
621 The sparse read can be enabled with experimental.sparse-read = True
621 The sparse read can be enabled with experimental.sparse-read = True
622 """
622 """
623 opts = pycompat.byteskwargs(opts)
623 opts = pycompat.byteskwargs(opts)
624 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
624 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
625 index = r.index
625 index = r.index
626 generaldelta = r.version & revlog.FLAG_GENERALDELTA
626 generaldelta = r.version & revlog.FLAG_GENERALDELTA
627 withsparseread = getattr(r, '_withsparseread', False)
627 withsparseread = getattr(r, '_withsparseread', False)
628
628
629 def revinfo(rev):
629 def revinfo(rev):
630 e = index[rev]
630 e = index[rev]
631 compsize = e[1]
631 compsize = e[1]
632 uncompsize = e[2]
632 uncompsize = e[2]
633 chainsize = 0
633 chainsize = 0
634
634
635 if generaldelta:
635 if generaldelta:
636 if e[3] == e[5]:
636 if e[3] == e[5]:
637 deltatype = 'p1'
637 deltatype = 'p1'
638 elif e[3] == e[6]:
638 elif e[3] == e[6]:
639 deltatype = 'p2'
639 deltatype = 'p2'
640 elif e[3] == rev - 1:
640 elif e[3] == rev - 1:
641 deltatype = 'prev'
641 deltatype = 'prev'
642 elif e[3] == rev:
642 elif e[3] == rev:
643 deltatype = 'base'
643 deltatype = 'base'
644 else:
644 else:
645 deltatype = 'other'
645 deltatype = 'other'
646 else:
646 else:
647 if e[3] == rev:
647 if e[3] == rev:
648 deltatype = 'base'
648 deltatype = 'base'
649 else:
649 else:
650 deltatype = 'prev'
650 deltatype = 'prev'
651
651
652 chain = r._deltachain(rev)[0]
652 chain = r._deltachain(rev)[0]
653 for iterrev in chain:
653 for iterrev in chain:
654 e = index[iterrev]
654 e = index[iterrev]
655 chainsize += e[1]
655 chainsize += e[1]
656
656
657 return compsize, uncompsize, deltatype, chain, chainsize
657 return compsize, uncompsize, deltatype, chain, chainsize
658
658
659 fm = ui.formatter('debugdeltachain', opts)
659 fm = ui.formatter('debugdeltachain', opts)
660
660
661 fm.plain(' rev chain# chainlen prev delta '
661 fm.plain(' rev chain# chainlen prev delta '
662 'size rawsize chainsize ratio lindist extradist '
662 'size rawsize chainsize ratio lindist extradist '
663 'extraratio')
663 'extraratio')
664 if withsparseread:
664 if withsparseread:
665 fm.plain(' readsize largestblk rddensity srchunks')
665 fm.plain(' readsize largestblk rddensity srchunks')
666 fm.plain('\n')
666 fm.plain('\n')
667
667
668 chainbases = {}
668 chainbases = {}
669 for rev in r:
669 for rev in r:
670 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
670 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
671 chainbase = chain[0]
671 chainbase = chain[0]
672 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
672 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
673 start = r.start
673 start = r.start
674 length = r.length
674 length = r.length
675 basestart = start(chainbase)
675 basestart = start(chainbase)
676 revstart = start(rev)
676 revstart = start(rev)
677 lineardist = revstart + comp - basestart
677 lineardist = revstart + comp - basestart
678 extradist = lineardist - chainsize
678 extradist = lineardist - chainsize
679 try:
679 try:
680 prevrev = chain[-2]
680 prevrev = chain[-2]
681 except IndexError:
681 except IndexError:
682 prevrev = -1
682 prevrev = -1
683
683
684 chainratio = float(chainsize) / float(uncomp)
684 chainratio = float(chainsize) / float(uncomp)
685 extraratio = float(extradist) / float(chainsize)
685 extraratio = float(extradist) / float(chainsize)
686
686
687 fm.startitem()
687 fm.startitem()
688 fm.write('rev chainid chainlen prevrev deltatype compsize '
688 fm.write('rev chainid chainlen prevrev deltatype compsize '
689 'uncompsize chainsize chainratio lindist extradist '
689 'uncompsize chainsize chainratio lindist extradist '
690 'extraratio',
690 'extraratio',
691 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
691 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
692 rev, chainid, len(chain), prevrev, deltatype, comp,
692 rev, chainid, len(chain), prevrev, deltatype, comp,
693 uncomp, chainsize, chainratio, lineardist, extradist,
693 uncomp, chainsize, chainratio, lineardist, extradist,
694 extraratio,
694 extraratio,
695 rev=rev, chainid=chainid, chainlen=len(chain),
695 rev=rev, chainid=chainid, chainlen=len(chain),
696 prevrev=prevrev, deltatype=deltatype, compsize=comp,
696 prevrev=prevrev, deltatype=deltatype, compsize=comp,
697 uncompsize=uncomp, chainsize=chainsize,
697 uncompsize=uncomp, chainsize=chainsize,
698 chainratio=chainratio, lindist=lineardist,
698 chainratio=chainratio, lindist=lineardist,
699 extradist=extradist, extraratio=extraratio)
699 extradist=extradist, extraratio=extraratio)
700 if withsparseread:
700 if withsparseread:
701 readsize = 0
701 readsize = 0
702 largestblock = 0
702 largestblock = 0
703 srchunks = 0
703 srchunks = 0
704
704
705 for revschunk in revlog._slicechunk(r, chain):
705 for revschunk in revlog._slicechunk(r, chain):
706 srchunks += 1
706 srchunks += 1
707 blkend = start(revschunk[-1]) + length(revschunk[-1])
707 blkend = start(revschunk[-1]) + length(revschunk[-1])
708 blksize = blkend - start(revschunk[0])
708 blksize = blkend - start(revschunk[0])
709
709
710 readsize += blksize
710 readsize += blksize
711 if largestblock < blksize:
711 if largestblock < blksize:
712 largestblock = blksize
712 largestblock = blksize
713
713
714 readdensity = float(chainsize) / float(readsize)
714 readdensity = float(chainsize) / float(readsize)
715
715
716 fm.write('readsize largestblock readdensity srchunks',
716 fm.write('readsize largestblock readdensity srchunks',
717 ' %10d %10d %9.5f %8d',
717 ' %10d %10d %9.5f %8d',
718 readsize, largestblock, readdensity, srchunks,
718 readsize, largestblock, readdensity, srchunks,
719 readsize=readsize, largestblock=largestblock,
719 readsize=readsize, largestblock=largestblock,
720 readdensity=readdensity, srchunks=srchunks)
720 readdensity=readdensity, srchunks=srchunks)
721
721
722 fm.plain('\n')
722 fm.plain('\n')
723
723
724 fm.end()
724 fm.end()
725
725
726 @command('debugdirstate|debugstate',
726 @command('debugdirstate|debugstate',
727 [('', 'nodates', None, _('do not display the saved mtime')),
727 [('', 'nodates', None, _('do not display the saved mtime')),
728 ('', 'datesort', None, _('sort by saved mtime'))],
728 ('', 'datesort', None, _('sort by saved mtime'))],
729 _('[OPTION]...'))
729 _('[OPTION]...'))
730 def debugstate(ui, repo, **opts):
730 def debugstate(ui, repo, **opts):
731 """show the contents of the current dirstate"""
731 """show the contents of the current dirstate"""
732
732
733 nodates = opts.get(r'nodates')
733 nodates = opts.get(r'nodates')
734 datesort = opts.get(r'datesort')
734 datesort = opts.get(r'datesort')
735
735
736 timestr = ""
736 timestr = ""
737 if datesort:
737 if datesort:
738 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
738 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
739 else:
739 else:
740 keyfunc = None # sort by filename
740 keyfunc = None # sort by filename
741 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
741 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
742 if ent[3] == -1:
742 if ent[3] == -1:
743 timestr = 'unset '
743 timestr = 'unset '
744 elif nodates:
744 elif nodates:
745 timestr = 'set '
745 timestr = 'set '
746 else:
746 else:
747 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
747 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
748 time.localtime(ent[3]))
748 time.localtime(ent[3]))
749 timestr = encoding.strtolocal(timestr)
749 timestr = encoding.strtolocal(timestr)
750 if ent[1] & 0o20000:
750 if ent[1] & 0o20000:
751 mode = 'lnk'
751 mode = 'lnk'
752 else:
752 else:
753 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
753 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
754 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
754 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
755 for f in repo.dirstate.copies():
755 for f in repo.dirstate.copies():
756 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
756 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
757
757
758 @command('debugdiscovery',
758 @command('debugdiscovery',
759 [('', 'old', None, _('use old-style discovery')),
759 [('', 'old', None, _('use old-style discovery')),
760 ('', 'nonheads', None,
760 ('', 'nonheads', None,
761 _('use old-style discovery with non-heads included')),
761 _('use old-style discovery with non-heads included')),
762 ('', 'rev', [], 'restrict discovery to this set of revs'),
762 ('', 'rev', [], 'restrict discovery to this set of revs'),
763 ] + cmdutil.remoteopts,
763 ] + cmdutil.remoteopts,
764 _('[--rev REV] [OTHER]'))
764 _('[--rev REV] [OTHER]'))
765 def debugdiscovery(ui, repo, remoteurl="default", **opts):
765 def debugdiscovery(ui, repo, remoteurl="default", **opts):
766 """runs the changeset discovery protocol in isolation"""
766 """runs the changeset discovery protocol in isolation"""
767 opts = pycompat.byteskwargs(opts)
767 opts = pycompat.byteskwargs(opts)
768 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
768 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
769 remote = hg.peer(repo, opts, remoteurl)
769 remote = hg.peer(repo, opts, remoteurl)
770 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
770 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
771
771
772 # make sure tests are repeatable
772 # make sure tests are repeatable
773 random.seed(12323)
773 random.seed(12323)
774
774
775 def doit(pushedrevs, remoteheads, remote=remote):
775 def doit(pushedrevs, remoteheads, remote=remote):
776 if opts.get('old'):
776 if opts.get('old'):
777 if not util.safehasattr(remote, 'branches'):
777 if not util.safehasattr(remote, 'branches'):
778 # enable in-client legacy support
778 # enable in-client legacy support
779 remote = localrepo.locallegacypeer(remote.local())
779 remote = localrepo.locallegacypeer(remote.local())
780 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
780 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
781 force=True)
781 force=True)
782 common = set(common)
782 common = set(common)
783 if not opts.get('nonheads'):
783 if not opts.get('nonheads'):
784 ui.write(("unpruned common: %s\n") %
784 ui.write(("unpruned common: %s\n") %
785 " ".join(sorted(short(n) for n in common)))
785 " ".join(sorted(short(n) for n in common)))
786 dag = dagutil.revlogdag(repo.changelog)
786 dag = dagutil.revlogdag(repo.changelog)
787 all = dag.ancestorset(dag.internalizeall(common))
787 all = dag.ancestorset(dag.internalizeall(common))
788 common = dag.externalizeall(dag.headsetofconnecteds(all))
788 common = dag.externalizeall(dag.headsetofconnecteds(all))
789 else:
789 else:
790 nodes = None
790 nodes = None
791 if pushedrevs:
791 if pushedrevs:
792 revs = scmutil.revrange(repo, pushedrevs)
792 revs = scmutil.revrange(repo, pushedrevs)
793 nodes = [repo[r].node() for r in revs]
793 nodes = [repo[r].node() for r in revs]
794 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
794 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
795 ancestorsof=nodes)
795 ancestorsof=nodes)
796 common = set(common)
796 common = set(common)
797 rheads = set(hds)
797 rheads = set(hds)
798 lheads = set(repo.heads())
798 lheads = set(repo.heads())
799 ui.write(("common heads: %s\n") %
799 ui.write(("common heads: %s\n") %
800 " ".join(sorted(short(n) for n in common)))
800 " ".join(sorted(short(n) for n in common)))
801 if lheads <= common:
801 if lheads <= common:
802 ui.write(("local is subset\n"))
802 ui.write(("local is subset\n"))
803 elif rheads <= common:
803 elif rheads <= common:
804 ui.write(("remote is subset\n"))
804 ui.write(("remote is subset\n"))
805
805
806 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
806 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
807 localrevs = opts['rev']
807 localrevs = opts['rev']
808 doit(localrevs, remoterevs)
808 doit(localrevs, remoterevs)
809
809
810 _chunksize = 4 << 10
810 _chunksize = 4 << 10
811
811
812 @command('debugdownload',
812 @command('debugdownload',
813 [
813 [
814 ('o', 'output', '', _('path')),
814 ('o', 'output', '', _('path')),
815 ],
815 ],
816 optionalrepo=True)
816 optionalrepo=True)
817 def debugdownload(ui, repo, url, output=None, **opts):
817 def debugdownload(ui, repo, url, output=None, **opts):
818 """download a resource using Mercurial logic and config
818 """download a resource using Mercurial logic and config
819 """
819 """
820 fh = urlmod.open(ui, url, output)
820 fh = urlmod.open(ui, url, output)
821
821
822 dest = ui
822 dest = ui
823 if output:
823 if output:
824 dest = open(output, "wb", _chunksize)
824 dest = open(output, "wb", _chunksize)
825 try:
825 try:
826 data = fh.read(_chunksize)
826 data = fh.read(_chunksize)
827 while data:
827 while data:
828 dest.write(data)
828 dest.write(data)
829 data = fh.read(_chunksize)
829 data = fh.read(_chunksize)
830 finally:
830 finally:
831 if output:
831 if output:
832 dest.close()
832 dest.close()
833
833
834 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
834 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
835 def debugextensions(ui, **opts):
835 def debugextensions(ui, **opts):
836 '''show information about active extensions'''
836 '''show information about active extensions'''
837 opts = pycompat.byteskwargs(opts)
837 opts = pycompat.byteskwargs(opts)
838 exts = extensions.extensions(ui)
838 exts = extensions.extensions(ui)
839 hgver = util.version()
839 hgver = util.version()
840 fm = ui.formatter('debugextensions', opts)
840 fm = ui.formatter('debugextensions', opts)
841 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
841 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
842 isinternal = extensions.ismoduleinternal(extmod)
842 isinternal = extensions.ismoduleinternal(extmod)
843 extsource = pycompat.fsencode(extmod.__file__)
843 extsource = pycompat.fsencode(extmod.__file__)
844 if isinternal:
844 if isinternal:
845 exttestedwith = [] # never expose magic string to users
845 exttestedwith = [] # never expose magic string to users
846 else:
846 else:
847 exttestedwith = getattr(extmod, 'testedwith', '').split()
847 exttestedwith = getattr(extmod, 'testedwith', '').split()
848 extbuglink = getattr(extmod, 'buglink', None)
848 extbuglink = getattr(extmod, 'buglink', None)
849
849
850 fm.startitem()
850 fm.startitem()
851
851
852 if ui.quiet or ui.verbose:
852 if ui.quiet or ui.verbose:
853 fm.write('name', '%s\n', extname)
853 fm.write('name', '%s\n', extname)
854 else:
854 else:
855 fm.write('name', '%s', extname)
855 fm.write('name', '%s', extname)
856 if isinternal or hgver in exttestedwith:
856 if isinternal or hgver in exttestedwith:
857 fm.plain('\n')
857 fm.plain('\n')
858 elif not exttestedwith:
858 elif not exttestedwith:
859 fm.plain(_(' (untested!)\n'))
859 fm.plain(_(' (untested!)\n'))
860 else:
860 else:
861 lasttestedversion = exttestedwith[-1]
861 lasttestedversion = exttestedwith[-1]
862 fm.plain(' (%s!)\n' % lasttestedversion)
862 fm.plain(' (%s!)\n' % lasttestedversion)
863
863
864 fm.condwrite(ui.verbose and extsource, 'source',
864 fm.condwrite(ui.verbose and extsource, 'source',
865 _(' location: %s\n'), extsource or "")
865 _(' location: %s\n'), extsource or "")
866
866
867 if ui.verbose:
867 if ui.verbose:
868 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
868 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
869 fm.data(bundled=isinternal)
869 fm.data(bundled=isinternal)
870
870
871 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
871 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
872 _(' tested with: %s\n'),
872 _(' tested with: %s\n'),
873 fm.formatlist(exttestedwith, name='ver'))
873 fm.formatlist(exttestedwith, name='ver'))
874
874
875 fm.condwrite(ui.verbose and extbuglink, 'buglink',
875 fm.condwrite(ui.verbose and extbuglink, 'buglink',
876 _(' bug reporting: %s\n'), extbuglink or "")
876 _(' bug reporting: %s\n'), extbuglink or "")
877
877
878 fm.end()
878 fm.end()
879
879
880 @command('debugfileset',
880 @command('debugfileset',
881 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
881 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
882 _('[-r REV] FILESPEC'))
882 _('[-r REV] FILESPEC'))
883 def debugfileset(ui, repo, expr, **opts):
883 def debugfileset(ui, repo, expr, **opts):
884 '''parse and apply a fileset specification'''
884 '''parse and apply a fileset specification'''
885 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
885 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
886 if ui.verbose:
886 if ui.verbose:
887 tree = fileset.parse(expr)
887 tree = fileset.parse(expr)
888 ui.note(fileset.prettyformat(tree), "\n")
888 ui.note(fileset.prettyformat(tree), "\n")
889
889
890 for f in ctx.getfileset(expr):
890 for f in ctx.getfileset(expr):
891 ui.write("%s\n" % f)
891 ui.write("%s\n" % f)
892
892
893 @command('debugformat',
893 @command('debugformat',
894 [] + cmdutil.formatteropts,
894 [] + cmdutil.formatteropts,
895 _(''))
895 _(''))
896 def debugformat(ui, repo, **opts):
896 def debugformat(ui, repo, **opts):
897 """display format information about the current repository
897 """display format information about the current repository
898
898
899 Use --verbose to get extra information about current config value and
899 Use --verbose to get extra information about current config value and
900 Mercurial default."""
900 Mercurial default."""
901 opts = pycompat.byteskwargs(opts)
901 opts = pycompat.byteskwargs(opts)
902 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
902 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
903 maxvariantlength = max(len('format-variant'), maxvariantlength)
903 maxvariantlength = max(len('format-variant'), maxvariantlength)
904
904
905 def makeformatname(name):
905 def makeformatname(name):
906 return '%s:' + (' ' * (maxvariantlength - len(name)))
906 return '%s:' + (' ' * (maxvariantlength - len(name)))
907
907
908 fm = ui.formatter('debugformat', opts)
908 fm = ui.formatter('debugformat', opts)
909 if fm.isplain():
909 if fm.isplain():
910 def formatvalue(value):
910 def formatvalue(value):
911 if util.safehasattr(value, 'startswith'):
911 if util.safehasattr(value, 'startswith'):
912 return value
912 return value
913 if value:
913 if value:
914 return 'yes'
914 return 'yes'
915 else:
915 else:
916 return 'no'
916 return 'no'
917 else:
917 else:
918 formatvalue = pycompat.identity
918 formatvalue = pycompat.identity
919
919
920 fm.plain('format-variant')
920 fm.plain('format-variant')
921 fm.plain(' ' * (maxvariantlength - len('format-variant')))
921 fm.plain(' ' * (maxvariantlength - len('format-variant')))
922 fm.plain(' repo')
922 fm.plain(' repo')
923 if ui.verbose:
923 if ui.verbose:
924 fm.plain(' config default')
924 fm.plain(' config default')
925 fm.plain('\n')
925 fm.plain('\n')
926 for fv in upgrade.allformatvariant:
926 for fv in upgrade.allformatvariant:
927 fm.startitem()
927 fm.startitem()
928 repovalue = fv.fromrepo(repo)
928 repovalue = fv.fromrepo(repo)
929 configvalue = fv.fromconfig(repo)
929 configvalue = fv.fromconfig(repo)
930
930
931 if repovalue != configvalue:
931 if repovalue != configvalue:
932 namelabel = 'formatvariant.name.mismatchconfig'
932 namelabel = 'formatvariant.name.mismatchconfig'
933 repolabel = 'formatvariant.repo.mismatchconfig'
933 repolabel = 'formatvariant.repo.mismatchconfig'
934 elif repovalue != fv.default:
934 elif repovalue != fv.default:
935 namelabel = 'formatvariant.name.mismatchdefault'
935 namelabel = 'formatvariant.name.mismatchdefault'
936 repolabel = 'formatvariant.repo.mismatchdefault'
936 repolabel = 'formatvariant.repo.mismatchdefault'
937 else:
937 else:
938 namelabel = 'formatvariant.name.uptodate'
938 namelabel = 'formatvariant.name.uptodate'
939 repolabel = 'formatvariant.repo.uptodate'
939 repolabel = 'formatvariant.repo.uptodate'
940
940
941 fm.write('name', makeformatname(fv.name), fv.name,
941 fm.write('name', makeformatname(fv.name), fv.name,
942 label=namelabel)
942 label=namelabel)
943 fm.write('repo', ' %3s', formatvalue(repovalue),
943 fm.write('repo', ' %3s', formatvalue(repovalue),
944 label=repolabel)
944 label=repolabel)
945 if fv.default != configvalue:
945 if fv.default != configvalue:
946 configlabel = 'formatvariant.config.special'
946 configlabel = 'formatvariant.config.special'
947 else:
947 else:
948 configlabel = 'formatvariant.config.default'
948 configlabel = 'formatvariant.config.default'
949 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
949 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
950 label=configlabel)
950 label=configlabel)
951 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
951 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
952 label='formatvariant.default')
952 label='formatvariant.default')
953 fm.plain('\n')
953 fm.plain('\n')
954 fm.end()
954 fm.end()
955
955
956 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
956 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
957 def debugfsinfo(ui, path="."):
957 def debugfsinfo(ui, path="."):
958 """show information detected about current filesystem"""
958 """show information detected about current filesystem"""
959 ui.write(('path: %s\n') % path)
959 ui.write(('path: %s\n') % path)
960 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
960 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
961 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
961 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
962 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
962 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
963 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
963 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
964 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
964 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
965 casesensitive = '(unknown)'
965 casesensitive = '(unknown)'
966 try:
966 try:
967 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
967 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
968 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
968 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
969 except OSError:
969 except OSError:
970 pass
970 pass
971 ui.write(('case-sensitive: %s\n') % casesensitive)
971 ui.write(('case-sensitive: %s\n') % casesensitive)
972
972
973 @command('debuggetbundle',
973 @command('debuggetbundle',
974 [('H', 'head', [], _('id of head node'), _('ID')),
974 [('H', 'head', [], _('id of head node'), _('ID')),
975 ('C', 'common', [], _('id of common node'), _('ID')),
975 ('C', 'common', [], _('id of common node'), _('ID')),
976 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
976 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
977 _('REPO FILE [-H|-C ID]...'),
977 _('REPO FILE [-H|-C ID]...'),
978 norepo=True)
978 norepo=True)
979 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
979 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
980 """retrieves a bundle from a repo
980 """retrieves a bundle from a repo
981
981
982 Every ID must be a full-length hex node id string. Saves the bundle to the
982 Every ID must be a full-length hex node id string. Saves the bundle to the
983 given file.
983 given file.
984 """
984 """
985 opts = pycompat.byteskwargs(opts)
985 opts = pycompat.byteskwargs(opts)
986 repo = hg.peer(ui, opts, repopath)
986 repo = hg.peer(ui, opts, repopath)
987 if not repo.capable('getbundle'):
987 if not repo.capable('getbundle'):
988 raise error.Abort("getbundle() not supported by target repository")
988 raise error.Abort("getbundle() not supported by target repository")
989 args = {}
989 args = {}
990 if common:
990 if common:
991 args[r'common'] = [bin(s) for s in common]
991 args[r'common'] = [bin(s) for s in common]
992 if head:
992 if head:
993 args[r'heads'] = [bin(s) for s in head]
993 args[r'heads'] = [bin(s) for s in head]
994 # TODO: get desired bundlecaps from command line.
994 # TODO: get desired bundlecaps from command line.
995 args[r'bundlecaps'] = None
995 args[r'bundlecaps'] = None
996 bundle = repo.getbundle('debug', **args)
996 bundle = repo.getbundle('debug', **args)
997
997
998 bundletype = opts.get('type', 'bzip2').lower()
998 bundletype = opts.get('type', 'bzip2').lower()
999 btypes = {'none': 'HG10UN',
999 btypes = {'none': 'HG10UN',
1000 'bzip2': 'HG10BZ',
1000 'bzip2': 'HG10BZ',
1001 'gzip': 'HG10GZ',
1001 'gzip': 'HG10GZ',
1002 'bundle2': 'HG20'}
1002 'bundle2': 'HG20'}
1003 bundletype = btypes.get(bundletype)
1003 bundletype = btypes.get(bundletype)
1004 if bundletype not in bundle2.bundletypes:
1004 if bundletype not in bundle2.bundletypes:
1005 raise error.Abort(_('unknown bundle type specified with --type'))
1005 raise error.Abort(_('unknown bundle type specified with --type'))
1006 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1006 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1007
1007
1008 @command('debugignore', [], '[FILE]')
1008 @command('debugignore', [], '[FILE]')
1009 def debugignore(ui, repo, *files, **opts):
1009 def debugignore(ui, repo, *files, **opts):
1010 """display the combined ignore pattern and information about ignored files
1010 """display the combined ignore pattern and information about ignored files
1011
1011
1012 With no argument display the combined ignore pattern.
1012 With no argument display the combined ignore pattern.
1013
1013
1014 Given space separated file names, shows if the given file is ignored and
1014 Given space separated file names, shows if the given file is ignored and
1015 if so, show the ignore rule (file and line number) that matched it.
1015 if so, show the ignore rule (file and line number) that matched it.
1016 """
1016 """
1017 ignore = repo.dirstate._ignore
1017 ignore = repo.dirstate._ignore
1018 if not files:
1018 if not files:
1019 # Show all the patterns
1019 # Show all the patterns
1020 ui.write("%s\n" % pycompat.byterepr(ignore))
1020 ui.write("%s\n" % pycompat.byterepr(ignore))
1021 else:
1021 else:
1022 m = scmutil.match(repo[None], pats=files)
1022 m = scmutil.match(repo[None], pats=files)
1023 for f in m.files():
1023 for f in m.files():
1024 nf = util.normpath(f)
1024 nf = util.normpath(f)
1025 ignored = None
1025 ignored = None
1026 ignoredata = None
1026 ignoredata = None
1027 if nf != '.':
1027 if nf != '.':
1028 if ignore(nf):
1028 if ignore(nf):
1029 ignored = nf
1029 ignored = nf
1030 ignoredata = repo.dirstate._ignorefileandline(nf)
1030 ignoredata = repo.dirstate._ignorefileandline(nf)
1031 else:
1031 else:
1032 for p in util.finddirs(nf):
1032 for p in util.finddirs(nf):
1033 if ignore(p):
1033 if ignore(p):
1034 ignored = p
1034 ignored = p
1035 ignoredata = repo.dirstate._ignorefileandline(p)
1035 ignoredata = repo.dirstate._ignorefileandline(p)
1036 break
1036 break
1037 if ignored:
1037 if ignored:
1038 if ignored == nf:
1038 if ignored == nf:
1039 ui.write(_("%s is ignored\n") % m.uipath(f))
1039 ui.write(_("%s is ignored\n") % m.uipath(f))
1040 else:
1040 else:
1041 ui.write(_("%s is ignored because of "
1041 ui.write(_("%s is ignored because of "
1042 "containing folder %s\n")
1042 "containing folder %s\n")
1043 % (m.uipath(f), ignored))
1043 % (m.uipath(f), ignored))
1044 ignorefile, lineno, line = ignoredata
1044 ignorefile, lineno, line = ignoredata
1045 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1045 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1046 % (ignorefile, lineno, line))
1046 % (ignorefile, lineno, line))
1047 else:
1047 else:
1048 ui.write(_("%s is not ignored\n") % m.uipath(f))
1048 ui.write(_("%s is not ignored\n") % m.uipath(f))
1049
1049
1050 @command('debugindex', cmdutil.debugrevlogopts +
1050 @command('debugindex', cmdutil.debugrevlogopts +
1051 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1051 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1052 _('[-f FORMAT] -c|-m|FILE'),
1052 _('[-f FORMAT] -c|-m|FILE'),
1053 optionalrepo=True)
1053 optionalrepo=True)
1054 def debugindex(ui, repo, file_=None, **opts):
1054 def debugindex(ui, repo, file_=None, **opts):
1055 """dump the contents of an index file"""
1055 """dump the contents of an index file"""
1056 opts = pycompat.byteskwargs(opts)
1056 opts = pycompat.byteskwargs(opts)
1057 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1057 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1058 format = opts.get('format', 0)
1058 format = opts.get('format', 0)
1059 if format not in (0, 1):
1059 if format not in (0, 1):
1060 raise error.Abort(_("unknown format %d") % format)
1060 raise error.Abort(_("unknown format %d") % format)
1061
1061
1062 if ui.debugflag:
1062 if ui.debugflag:
1063 shortfn = hex
1063 shortfn = hex
1064 else:
1064 else:
1065 shortfn = short
1065 shortfn = short
1066
1066
1067 # There might not be anything in r, so have a sane default
1067 # There might not be anything in r, so have a sane default
1068 idlen = 12
1068 idlen = 12
1069 for i in r:
1069 for i in r:
1070 idlen = len(shortfn(r.node(i)))
1070 idlen = len(shortfn(r.node(i)))
1071 break
1071 break
1072
1072
1073 if format == 0:
1073 if format == 0:
1074 if ui.verbose:
1074 if ui.verbose:
1075 ui.write((" rev offset length linkrev"
1075 ui.write((" rev offset length linkrev"
1076 " %s %s p2\n") % ("nodeid".ljust(idlen),
1076 " %s %s p2\n") % ("nodeid".ljust(idlen),
1077 "p1".ljust(idlen)))
1077 "p1".ljust(idlen)))
1078 else:
1078 else:
1079 ui.write((" rev linkrev %s %s p2\n") % (
1079 ui.write((" rev linkrev %s %s p2\n") % (
1080 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1080 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1081 elif format == 1:
1081 elif format == 1:
1082 if ui.verbose:
1082 if ui.verbose:
1083 ui.write((" rev flag offset length size link p1"
1083 ui.write((" rev flag offset length size link p1"
1084 " p2 %s\n") % "nodeid".rjust(idlen))
1084 " p2 %s\n") % "nodeid".rjust(idlen))
1085 else:
1085 else:
1086 ui.write((" rev flag size link p1 p2 %s\n") %
1086 ui.write((" rev flag size link p1 p2 %s\n") %
1087 "nodeid".rjust(idlen))
1087 "nodeid".rjust(idlen))
1088
1088
1089 for i in r:
1089 for i in r:
1090 node = r.node(i)
1090 node = r.node(i)
1091 if format == 0:
1091 if format == 0:
1092 try:
1092 try:
1093 pp = r.parents(node)
1093 pp = r.parents(node)
1094 except Exception:
1094 except Exception:
1095 pp = [nullid, nullid]
1095 pp = [nullid, nullid]
1096 if ui.verbose:
1096 if ui.verbose:
1097 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1097 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1098 i, r.start(i), r.length(i), r.linkrev(i),
1098 i, r.start(i), r.length(i), r.linkrev(i),
1099 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1099 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1100 else:
1100 else:
1101 ui.write("% 6d % 7d %s %s %s\n" % (
1101 ui.write("% 6d % 7d %s %s %s\n" % (
1102 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1102 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1103 shortfn(pp[1])))
1103 shortfn(pp[1])))
1104 elif format == 1:
1104 elif format == 1:
1105 pr = r.parentrevs(i)
1105 pr = r.parentrevs(i)
1106 if ui.verbose:
1106 if ui.verbose:
1107 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1107 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1108 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1108 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1109 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1109 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1110 else:
1110 else:
1111 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1111 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1112 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1112 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1113 shortfn(node)))
1113 shortfn(node)))
1114
1114
1115 @command('debugindexdot', cmdutil.debugrevlogopts,
1115 @command('debugindexdot', cmdutil.debugrevlogopts,
1116 _('-c|-m|FILE'), optionalrepo=True)
1116 _('-c|-m|FILE'), optionalrepo=True)
1117 def debugindexdot(ui, repo, file_=None, **opts):
1117 def debugindexdot(ui, repo, file_=None, **opts):
1118 """dump an index DAG as a graphviz dot file"""
1118 """dump an index DAG as a graphviz dot file"""
1119 opts = pycompat.byteskwargs(opts)
1119 opts = pycompat.byteskwargs(opts)
1120 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1120 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1121 ui.write(("digraph G {\n"))
1121 ui.write(("digraph G {\n"))
1122 for i in r:
1122 for i in r:
1123 node = r.node(i)
1123 node = r.node(i)
1124 pp = r.parents(node)
1124 pp = r.parents(node)
1125 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1125 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1126 if pp[1] != nullid:
1126 if pp[1] != nullid:
1127 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1127 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1128 ui.write("}\n")
1128 ui.write("}\n")
1129
1129
1130 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1130 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1131 def debuginstall(ui, **opts):
1131 def debuginstall(ui, **opts):
1132 '''test Mercurial installation
1132 '''test Mercurial installation
1133
1133
1134 Returns 0 on success.
1134 Returns 0 on success.
1135 '''
1135 '''
1136 opts = pycompat.byteskwargs(opts)
1136 opts = pycompat.byteskwargs(opts)
1137
1137
1138 def writetemp(contents):
1138 def writetemp(contents):
1139 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1139 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1140 f = os.fdopen(fd, r"wb")
1140 f = os.fdopen(fd, r"wb")
1141 f.write(contents)
1141 f.write(contents)
1142 f.close()
1142 f.close()
1143 return name
1143 return name
1144
1144
1145 problems = 0
1145 problems = 0
1146
1146
1147 fm = ui.formatter('debuginstall', opts)
1147 fm = ui.formatter('debuginstall', opts)
1148 fm.startitem()
1148 fm.startitem()
1149
1149
1150 # encoding
1150 # encoding
1151 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1151 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1152 err = None
1152 err = None
1153 try:
1153 try:
1154 codecs.lookup(pycompat.sysstr(encoding.encoding))
1154 codecs.lookup(pycompat.sysstr(encoding.encoding))
1155 except LookupError as inst:
1155 except LookupError as inst:
1156 err = stringutil.forcebytestr(inst)
1156 err = stringutil.forcebytestr(inst)
1157 problems += 1
1157 problems += 1
1158 fm.condwrite(err, 'encodingerror', _(" %s\n"
1158 fm.condwrite(err, 'encodingerror', _(" %s\n"
1159 " (check that your locale is properly set)\n"), err)
1159 " (check that your locale is properly set)\n"), err)
1160
1160
1161 # Python
1161 # Python
1162 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1162 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1163 pycompat.sysexecutable)
1163 pycompat.sysexecutable)
1164 fm.write('pythonver', _("checking Python version (%s)\n"),
1164 fm.write('pythonver', _("checking Python version (%s)\n"),
1165 ("%d.%d.%d" % sys.version_info[:3]))
1165 ("%d.%d.%d" % sys.version_info[:3]))
1166 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1166 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1167 os.path.dirname(pycompat.fsencode(os.__file__)))
1167 os.path.dirname(pycompat.fsencode(os.__file__)))
1168
1168
1169 security = set(sslutil.supportedprotocols)
1169 security = set(sslutil.supportedprotocols)
1170 if sslutil.hassni:
1170 if sslutil.hassni:
1171 security.add('sni')
1171 security.add('sni')
1172
1172
1173 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1173 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1174 fm.formatlist(sorted(security), name='protocol',
1174 fm.formatlist(sorted(security), name='protocol',
1175 fmt='%s', sep=','))
1175 fmt='%s', sep=','))
1176
1176
1177 # These are warnings, not errors. So don't increment problem count. This
1177 # These are warnings, not errors. So don't increment problem count. This
1178 # may change in the future.
1178 # may change in the future.
1179 if 'tls1.2' not in security:
1179 if 'tls1.2' not in security:
1180 fm.plain(_(' TLS 1.2 not supported by Python install; '
1180 fm.plain(_(' TLS 1.2 not supported by Python install; '
1181 'network connections lack modern security\n'))
1181 'network connections lack modern security\n'))
1182 if 'sni' not in security:
1182 if 'sni' not in security:
1183 fm.plain(_(' SNI not supported by Python install; may have '
1183 fm.plain(_(' SNI not supported by Python install; may have '
1184 'connectivity issues with some servers\n'))
1184 'connectivity issues with some servers\n'))
1185
1185
1186 # TODO print CA cert info
1186 # TODO print CA cert info
1187
1187
1188 # hg version
1188 # hg version
1189 hgver = util.version()
1189 hgver = util.version()
1190 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1190 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1191 hgver.split('+')[0])
1191 hgver.split('+')[0])
1192 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1192 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1193 '+'.join(hgver.split('+')[1:]))
1193 '+'.join(hgver.split('+')[1:]))
1194
1194
1195 # compiled modules
1195 # compiled modules
1196 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1196 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1197 policy.policy)
1197 policy.policy)
1198 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1198 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1199 os.path.dirname(pycompat.fsencode(__file__)))
1199 os.path.dirname(pycompat.fsencode(__file__)))
1200
1200
1201 if policy.policy in ('c', 'allow'):
1201 if policy.policy in ('c', 'allow'):
1202 err = None
1202 err = None
1203 try:
1203 try:
1204 from .cext import (
1204 from .cext import (
1205 base85,
1205 base85,
1206 bdiff,
1206 bdiff,
1207 mpatch,
1207 mpatch,
1208 osutil,
1208 osutil,
1209 )
1209 )
1210 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1210 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1211 except Exception as inst:
1211 except Exception as inst:
1212 err = stringutil.forcebytestr(inst)
1212 err = stringutil.forcebytestr(inst)
1213 problems += 1
1213 problems += 1
1214 fm.condwrite(err, 'extensionserror', " %s\n", err)
1214 fm.condwrite(err, 'extensionserror', " %s\n", err)
1215
1215
1216 compengines = util.compengines._engines.values()
1216 compengines = util.compengines._engines.values()
1217 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1217 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1218 fm.formatlist(sorted(e.name() for e in compengines),
1218 fm.formatlist(sorted(e.name() for e in compengines),
1219 name='compengine', fmt='%s', sep=', '))
1219 name='compengine', fmt='%s', sep=', '))
1220 fm.write('compenginesavail', _('checking available compression engines '
1220 fm.write('compenginesavail', _('checking available compression engines '
1221 '(%s)\n'),
1221 '(%s)\n'),
1222 fm.formatlist(sorted(e.name() for e in compengines
1222 fm.formatlist(sorted(e.name() for e in compengines
1223 if e.available()),
1223 if e.available()),
1224 name='compengine', fmt='%s', sep=', '))
1224 name='compengine', fmt='%s', sep=', '))
1225 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1225 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1226 fm.write('compenginesserver', _('checking available compression engines '
1226 fm.write('compenginesserver', _('checking available compression engines '
1227 'for wire protocol (%s)\n'),
1227 'for wire protocol (%s)\n'),
1228 fm.formatlist([e.name() for e in wirecompengines
1228 fm.formatlist([e.name() for e in wirecompengines
1229 if e.wireprotosupport()],
1229 if e.wireprotosupport()],
1230 name='compengine', fmt='%s', sep=', '))
1230 name='compengine', fmt='%s', sep=', '))
1231 re2 = 'missing'
1231 re2 = 'missing'
1232 if util._re2:
1232 if util._re2:
1233 re2 = 'available'
1233 re2 = 'available'
1234 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1234 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1235 fm.data(re2=bool(util._re2))
1235 fm.data(re2=bool(util._re2))
1236
1236
1237 # templates
1237 # templates
1238 p = templater.templatepaths()
1238 p = templater.templatepaths()
1239 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1239 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1240 fm.condwrite(not p, '', _(" no template directories found\n"))
1240 fm.condwrite(not p, '', _(" no template directories found\n"))
1241 if p:
1241 if p:
1242 m = templater.templatepath("map-cmdline.default")
1242 m = templater.templatepath("map-cmdline.default")
1243 if m:
1243 if m:
1244 # template found, check if it is working
1244 # template found, check if it is working
1245 err = None
1245 err = None
1246 try:
1246 try:
1247 templater.templater.frommapfile(m)
1247 templater.templater.frommapfile(m)
1248 except Exception as inst:
1248 except Exception as inst:
1249 err = stringutil.forcebytestr(inst)
1249 err = stringutil.forcebytestr(inst)
1250 p = None
1250 p = None
1251 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1251 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1252 else:
1252 else:
1253 p = None
1253 p = None
1254 fm.condwrite(p, 'defaulttemplate',
1254 fm.condwrite(p, 'defaulttemplate',
1255 _("checking default template (%s)\n"), m)
1255 _("checking default template (%s)\n"), m)
1256 fm.condwrite(not m, 'defaulttemplatenotfound',
1256 fm.condwrite(not m, 'defaulttemplatenotfound',
1257 _(" template '%s' not found\n"), "default")
1257 _(" template '%s' not found\n"), "default")
1258 if not p:
1258 if not p:
1259 problems += 1
1259 problems += 1
1260 fm.condwrite(not p, '',
1260 fm.condwrite(not p, '',
1261 _(" (templates seem to have been installed incorrectly)\n"))
1261 _(" (templates seem to have been installed incorrectly)\n"))
1262
1262
1263 # editor
1263 # editor
1264 editor = ui.geteditor()
1264 editor = ui.geteditor()
1265 editor = util.expandpath(editor)
1265 editor = util.expandpath(editor)
1266 editorbin = procutil.shellsplit(editor)[0]
1266 editorbin = procutil.shellsplit(editor)[0]
1267 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1267 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1268 cmdpath = procutil.findexe(editorbin)
1268 cmdpath = procutil.findexe(editorbin)
1269 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1269 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1270 _(" No commit editor set and can't find %s in PATH\n"
1270 _(" No commit editor set and can't find %s in PATH\n"
1271 " (specify a commit editor in your configuration"
1271 " (specify a commit editor in your configuration"
1272 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1272 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1273 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1273 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1274 _(" Can't find editor '%s' in PATH\n"
1274 _(" Can't find editor '%s' in PATH\n"
1275 " (specify a commit editor in your configuration"
1275 " (specify a commit editor in your configuration"
1276 " file)\n"), not cmdpath and editorbin)
1276 " file)\n"), not cmdpath and editorbin)
1277 if not cmdpath and editor != 'vi':
1277 if not cmdpath and editor != 'vi':
1278 problems += 1
1278 problems += 1
1279
1279
1280 # check username
1280 # check username
1281 username = None
1281 username = None
1282 err = None
1282 err = None
1283 try:
1283 try:
1284 username = ui.username()
1284 username = ui.username()
1285 except error.Abort as e:
1285 except error.Abort as e:
1286 err = stringutil.forcebytestr(e)
1286 err = stringutil.forcebytestr(e)
1287 problems += 1
1287 problems += 1
1288
1288
1289 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1289 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1290 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1290 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1291 " (specify a username in your configuration file)\n"), err)
1291 " (specify a username in your configuration file)\n"), err)
1292
1292
1293 fm.condwrite(not problems, '',
1293 fm.condwrite(not problems, '',
1294 _("no problems detected\n"))
1294 _("no problems detected\n"))
1295 if not problems:
1295 if not problems:
1296 fm.data(problems=problems)
1296 fm.data(problems=problems)
1297 fm.condwrite(problems, 'problems',
1297 fm.condwrite(problems, 'problems',
1298 _("%d problems detected,"
1298 _("%d problems detected,"
1299 " please check your install!\n"), problems)
1299 " please check your install!\n"), problems)
1300 fm.end()
1300 fm.end()
1301
1301
1302 return problems
1302 return problems
1303
1303
1304 @command('debugknown', [], _('REPO ID...'), norepo=True)
1304 @command('debugknown', [], _('REPO ID...'), norepo=True)
1305 def debugknown(ui, repopath, *ids, **opts):
1305 def debugknown(ui, repopath, *ids, **opts):
1306 """test whether node ids are known to a repo
1306 """test whether node ids are known to a repo
1307
1307
1308 Every ID must be a full-length hex node id string. Returns a list of 0s
1308 Every ID must be a full-length hex node id string. Returns a list of 0s
1309 and 1s indicating unknown/known.
1309 and 1s indicating unknown/known.
1310 """
1310 """
1311 opts = pycompat.byteskwargs(opts)
1311 opts = pycompat.byteskwargs(opts)
1312 repo = hg.peer(ui, opts, repopath)
1312 repo = hg.peer(ui, opts, repopath)
1313 if not repo.capable('known'):
1313 if not repo.capable('known'):
1314 raise error.Abort("known() not supported by target repository")
1314 raise error.Abort("known() not supported by target repository")
1315 flags = repo.known([bin(s) for s in ids])
1315 flags = repo.known([bin(s) for s in ids])
1316 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1316 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1317
1317
1318 @command('debuglabelcomplete', [], _('LABEL...'))
1318 @command('debuglabelcomplete', [], _('LABEL...'))
1319 def debuglabelcomplete(ui, repo, *args):
1319 def debuglabelcomplete(ui, repo, *args):
1320 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1320 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1321 debugnamecomplete(ui, repo, *args)
1321 debugnamecomplete(ui, repo, *args)
1322
1322
1323 @command('debuglocks',
1323 @command('debuglocks',
1324 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1324 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1325 ('W', 'force-wlock', None,
1325 ('W', 'force-wlock', None,
1326 _('free the working state lock (DANGEROUS)')),
1326 _('free the working state lock (DANGEROUS)')),
1327 ('s', 'set-lock', None, _('set the store lock until stopped')),
1327 ('s', 'set-lock', None, _('set the store lock until stopped')),
1328 ('S', 'set-wlock', None,
1328 ('S', 'set-wlock', None,
1329 _('set the working state lock until stopped'))],
1329 _('set the working state lock until stopped'))],
1330 _('[OPTION]...'))
1330 _('[OPTION]...'))
1331 def debuglocks(ui, repo, **opts):
1331 def debuglocks(ui, repo, **opts):
1332 """show or modify state of locks
1332 """show or modify state of locks
1333
1333
1334 By default, this command will show which locks are held. This
1334 By default, this command will show which locks are held. This
1335 includes the user and process holding the lock, the amount of time
1335 includes the user and process holding the lock, the amount of time
1336 the lock has been held, and the machine name where the process is
1336 the lock has been held, and the machine name where the process is
1337 running if it's not local.
1337 running if it's not local.
1338
1338
1339 Locks protect the integrity of Mercurial's data, so should be
1339 Locks protect the integrity of Mercurial's data, so should be
1340 treated with care. System crashes or other interruptions may cause
1340 treated with care. System crashes or other interruptions may cause
1341 locks to not be properly released, though Mercurial will usually
1341 locks to not be properly released, though Mercurial will usually
1342 detect and remove such stale locks automatically.
1342 detect and remove such stale locks automatically.
1343
1343
1344 However, detecting stale locks may not always be possible (for
1344 However, detecting stale locks may not always be possible (for
1345 instance, on a shared filesystem). Removing locks may also be
1345 instance, on a shared filesystem). Removing locks may also be
1346 blocked by filesystem permissions.
1346 blocked by filesystem permissions.
1347
1347
1348 Setting a lock will prevent other commands from changing the data.
1348 Setting a lock will prevent other commands from changing the data.
1349 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1349 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1350 The set locks are removed when the command exits.
1350 The set locks are removed when the command exits.
1351
1351
1352 Returns 0 if no locks are held.
1352 Returns 0 if no locks are held.
1353
1353
1354 """
1354 """
1355
1355
1356 if opts.get(r'force_lock'):
1356 if opts.get(r'force_lock'):
1357 repo.svfs.unlink('lock')
1357 repo.svfs.unlink('lock')
1358 if opts.get(r'force_wlock'):
1358 if opts.get(r'force_wlock'):
1359 repo.vfs.unlink('wlock')
1359 repo.vfs.unlink('wlock')
1360 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1360 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1361 return 0
1361 return 0
1362
1362
1363 locks = []
1363 locks = []
1364 try:
1364 try:
1365 if opts.get(r'set_wlock'):
1365 if opts.get(r'set_wlock'):
1366 try:
1366 try:
1367 locks.append(repo.wlock(False))
1367 locks.append(repo.wlock(False))
1368 except error.LockHeld:
1368 except error.LockHeld:
1369 raise error.Abort(_('wlock is already held'))
1369 raise error.Abort(_('wlock is already held'))
1370 if opts.get(r'set_lock'):
1370 if opts.get(r'set_lock'):
1371 try:
1371 try:
1372 locks.append(repo.lock(False))
1372 locks.append(repo.lock(False))
1373 except error.LockHeld:
1373 except error.LockHeld:
1374 raise error.Abort(_('lock is already held'))
1374 raise error.Abort(_('lock is already held'))
1375 if len(locks):
1375 if len(locks):
1376 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1376 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1377 return 0
1377 return 0
1378 finally:
1378 finally:
1379 release(*locks)
1379 release(*locks)
1380
1380
1381 now = time.time()
1381 now = time.time()
1382 held = 0
1382 held = 0
1383
1383
1384 def report(vfs, name, method):
1384 def report(vfs, name, method):
1385 # this causes stale locks to get reaped for more accurate reporting
1385 # this causes stale locks to get reaped for more accurate reporting
1386 try:
1386 try:
1387 l = method(False)
1387 l = method(False)
1388 except error.LockHeld:
1388 except error.LockHeld:
1389 l = None
1389 l = None
1390
1390
1391 if l:
1391 if l:
1392 l.release()
1392 l.release()
1393 else:
1393 else:
1394 try:
1394 try:
1395 st = vfs.lstat(name)
1395 st = vfs.lstat(name)
1396 age = now - st[stat.ST_MTIME]
1396 age = now - st[stat.ST_MTIME]
1397 user = util.username(st.st_uid)
1397 user = util.username(st.st_uid)
1398 locker = vfs.readlock(name)
1398 locker = vfs.readlock(name)
1399 if ":" in locker:
1399 if ":" in locker:
1400 host, pid = locker.split(':')
1400 host, pid = locker.split(':')
1401 if host == socket.gethostname():
1401 if host == socket.gethostname():
1402 locker = 'user %s, process %s' % (user, pid)
1402 locker = 'user %s, process %s' % (user, pid)
1403 else:
1403 else:
1404 locker = 'user %s, process %s, host %s' \
1404 locker = 'user %s, process %s, host %s' \
1405 % (user, pid, host)
1405 % (user, pid, host)
1406 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1406 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1407 return 1
1407 return 1
1408 except OSError as e:
1408 except OSError as e:
1409 if e.errno != errno.ENOENT:
1409 if e.errno != errno.ENOENT:
1410 raise
1410 raise
1411
1411
1412 ui.write(("%-6s free\n") % (name + ":"))
1412 ui.write(("%-6s free\n") % (name + ":"))
1413 return 0
1413 return 0
1414
1414
1415 held += report(repo.svfs, "lock", repo.lock)
1415 held += report(repo.svfs, "lock", repo.lock)
1416 held += report(repo.vfs, "wlock", repo.wlock)
1416 held += report(repo.vfs, "wlock", repo.wlock)
1417
1417
1418 return held
1418 return held
1419
1419
1420 @command('debugmergestate', [], '')
1420 @command('debugmergestate', [], '')
1421 def debugmergestate(ui, repo, *args):
1421 def debugmergestate(ui, repo, *args):
1422 """print merge state
1422 """print merge state
1423
1423
1424 Use --verbose to print out information about whether v1 or v2 merge state
1424 Use --verbose to print out information about whether v1 or v2 merge state
1425 was chosen."""
1425 was chosen."""
1426 def _hashornull(h):
1426 def _hashornull(h):
1427 if h == nullhex:
1427 if h == nullhex:
1428 return 'null'
1428 return 'null'
1429 else:
1429 else:
1430 return h
1430 return h
1431
1431
1432 def printrecords(version):
1432 def printrecords(version):
1433 ui.write(('* version %d records\n') % version)
1433 ui.write(('* version %d records\n') % version)
1434 if version == 1:
1434 if version == 1:
1435 records = v1records
1435 records = v1records
1436 else:
1436 else:
1437 records = v2records
1437 records = v2records
1438
1438
1439 for rtype, record in records:
1439 for rtype, record in records:
1440 # pretty print some record types
1440 # pretty print some record types
1441 if rtype == 'L':
1441 if rtype == 'L':
1442 ui.write(('local: %s\n') % record)
1442 ui.write(('local: %s\n') % record)
1443 elif rtype == 'O':
1443 elif rtype == 'O':
1444 ui.write(('other: %s\n') % record)
1444 ui.write(('other: %s\n') % record)
1445 elif rtype == 'm':
1445 elif rtype == 'm':
1446 driver, mdstate = record.split('\0', 1)
1446 driver, mdstate = record.split('\0', 1)
1447 ui.write(('merge driver: %s (state "%s")\n')
1447 ui.write(('merge driver: %s (state "%s")\n')
1448 % (driver, mdstate))
1448 % (driver, mdstate))
1449 elif rtype in 'FDC':
1449 elif rtype in 'FDC':
1450 r = record.split('\0')
1450 r = record.split('\0')
1451 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1451 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1452 if version == 1:
1452 if version == 1:
1453 onode = 'not stored in v1 format'
1453 onode = 'not stored in v1 format'
1454 flags = r[7]
1454 flags = r[7]
1455 else:
1455 else:
1456 onode, flags = r[7:9]
1456 onode, flags = r[7:9]
1457 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1457 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1458 % (f, rtype, state, _hashornull(hash)))
1458 % (f, rtype, state, _hashornull(hash)))
1459 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1459 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1460 ui.write((' ancestor path: %s (node %s)\n')
1460 ui.write((' ancestor path: %s (node %s)\n')
1461 % (afile, _hashornull(anode)))
1461 % (afile, _hashornull(anode)))
1462 ui.write((' other path: %s (node %s)\n')
1462 ui.write((' other path: %s (node %s)\n')
1463 % (ofile, _hashornull(onode)))
1463 % (ofile, _hashornull(onode)))
1464 elif rtype == 'f':
1464 elif rtype == 'f':
1465 filename, rawextras = record.split('\0', 1)
1465 filename, rawextras = record.split('\0', 1)
1466 extras = rawextras.split('\0')
1466 extras = rawextras.split('\0')
1467 i = 0
1467 i = 0
1468 extrastrings = []
1468 extrastrings = []
1469 while i < len(extras):
1469 while i < len(extras):
1470 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1470 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1471 i += 2
1471 i += 2
1472
1472
1473 ui.write(('file extras: %s (%s)\n')
1473 ui.write(('file extras: %s (%s)\n')
1474 % (filename, ', '.join(extrastrings)))
1474 % (filename, ', '.join(extrastrings)))
1475 elif rtype == 'l':
1475 elif rtype == 'l':
1476 labels = record.split('\0', 2)
1476 labels = record.split('\0', 2)
1477 labels = [l for l in labels if len(l) > 0]
1477 labels = [l for l in labels if len(l) > 0]
1478 ui.write(('labels:\n'))
1478 ui.write(('labels:\n'))
1479 ui.write((' local: %s\n' % labels[0]))
1479 ui.write((' local: %s\n' % labels[0]))
1480 ui.write((' other: %s\n' % labels[1]))
1480 ui.write((' other: %s\n' % labels[1]))
1481 if len(labels) > 2:
1481 if len(labels) > 2:
1482 ui.write((' base: %s\n' % labels[2]))
1482 ui.write((' base: %s\n' % labels[2]))
1483 else:
1483 else:
1484 ui.write(('unrecognized entry: %s\t%s\n')
1484 ui.write(('unrecognized entry: %s\t%s\n')
1485 % (rtype, record.replace('\0', '\t')))
1485 % (rtype, record.replace('\0', '\t')))
1486
1486
1487 # Avoid mergestate.read() since it may raise an exception for unsupported
1487 # Avoid mergestate.read() since it may raise an exception for unsupported
1488 # merge state records. We shouldn't be doing this, but this is OK since this
1488 # merge state records. We shouldn't be doing this, but this is OK since this
1489 # command is pretty low-level.
1489 # command is pretty low-level.
1490 ms = mergemod.mergestate(repo)
1490 ms = mergemod.mergestate(repo)
1491
1491
1492 # sort so that reasonable information is on top
1492 # sort so that reasonable information is on top
1493 v1records = ms._readrecordsv1()
1493 v1records = ms._readrecordsv1()
1494 v2records = ms._readrecordsv2()
1494 v2records = ms._readrecordsv2()
1495 order = 'LOml'
1495 order = 'LOml'
1496 def key(r):
1496 def key(r):
1497 idx = order.find(r[0])
1497 idx = order.find(r[0])
1498 if idx == -1:
1498 if idx == -1:
1499 return (1, r[1])
1499 return (1, r[1])
1500 else:
1500 else:
1501 return (0, idx)
1501 return (0, idx)
1502 v1records.sort(key=key)
1502 v1records.sort(key=key)
1503 v2records.sort(key=key)
1503 v2records.sort(key=key)
1504
1504
1505 if not v1records and not v2records:
1505 if not v1records and not v2records:
1506 ui.write(('no merge state found\n'))
1506 ui.write(('no merge state found\n'))
1507 elif not v2records:
1507 elif not v2records:
1508 ui.note(('no version 2 merge state\n'))
1508 ui.note(('no version 2 merge state\n'))
1509 printrecords(1)
1509 printrecords(1)
1510 elif ms._v1v2match(v1records, v2records):
1510 elif ms._v1v2match(v1records, v2records):
1511 ui.note(('v1 and v2 states match: using v2\n'))
1511 ui.note(('v1 and v2 states match: using v2\n'))
1512 printrecords(2)
1512 printrecords(2)
1513 else:
1513 else:
1514 ui.note(('v1 and v2 states mismatch: using v1\n'))
1514 ui.note(('v1 and v2 states mismatch: using v1\n'))
1515 printrecords(1)
1515 printrecords(1)
1516 if ui.verbose:
1516 if ui.verbose:
1517 printrecords(2)
1517 printrecords(2)
1518
1518
1519 @command('debugnamecomplete', [], _('NAME...'))
1519 @command('debugnamecomplete', [], _('NAME...'))
1520 def debugnamecomplete(ui, repo, *args):
1520 def debugnamecomplete(ui, repo, *args):
1521 '''complete "names" - tags, open branch names, bookmark names'''
1521 '''complete "names" - tags, open branch names, bookmark names'''
1522
1522
1523 names = set()
1523 names = set()
1524 # since we previously only listed open branches, we will handle that
1524 # since we previously only listed open branches, we will handle that
1525 # specially (after this for loop)
1525 # specially (after this for loop)
1526 for name, ns in repo.names.iteritems():
1526 for name, ns in repo.names.iteritems():
1527 if name != 'branches':
1527 if name != 'branches':
1528 names.update(ns.listnames(repo))
1528 names.update(ns.listnames(repo))
1529 names.update(tag for (tag, heads, tip, closed)
1529 names.update(tag for (tag, heads, tip, closed)
1530 in repo.branchmap().iterbranches() if not closed)
1530 in repo.branchmap().iterbranches() if not closed)
1531 completions = set()
1531 completions = set()
1532 if not args:
1532 if not args:
1533 args = ['']
1533 args = ['']
1534 for a in args:
1534 for a in args:
1535 completions.update(n for n in names if n.startswith(a))
1535 completions.update(n for n in names if n.startswith(a))
1536 ui.write('\n'.join(sorted(completions)))
1536 ui.write('\n'.join(sorted(completions)))
1537 ui.write('\n')
1537 ui.write('\n')
1538
1538
1539 @command('debugobsolete',
1539 @command('debugobsolete',
1540 [('', 'flags', 0, _('markers flag')),
1540 [('', 'flags', 0, _('markers flag')),
1541 ('', 'record-parents', False,
1541 ('', 'record-parents', False,
1542 _('record parent information for the precursor')),
1542 _('record parent information for the precursor')),
1543 ('r', 'rev', [], _('display markers relevant to REV')),
1543 ('r', 'rev', [], _('display markers relevant to REV')),
1544 ('', 'exclusive', False, _('restrict display to markers only '
1544 ('', 'exclusive', False, _('restrict display to markers only '
1545 'relevant to REV')),
1545 'relevant to REV')),
1546 ('', 'index', False, _('display index of the marker')),
1546 ('', 'index', False, _('display index of the marker')),
1547 ('', 'delete', [], _('delete markers specified by indices')),
1547 ('', 'delete', [], _('delete markers specified by indices')),
1548 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1548 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1549 _('[OBSOLETED [REPLACEMENT ...]]'))
1549 _('[OBSOLETED [REPLACEMENT ...]]'))
1550 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1550 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1551 """create arbitrary obsolete marker
1551 """create arbitrary obsolete marker
1552
1552
1553 With no arguments, displays the list of obsolescence markers."""
1553 With no arguments, displays the list of obsolescence markers."""
1554
1554
1555 opts = pycompat.byteskwargs(opts)
1555 opts = pycompat.byteskwargs(opts)
1556
1556
1557 def parsenodeid(s):
1557 def parsenodeid(s):
1558 try:
1558 try:
1559 # We do not use revsingle/revrange functions here to accept
1559 # We do not use revsingle/revrange functions here to accept
1560 # arbitrary node identifiers, possibly not present in the
1560 # arbitrary node identifiers, possibly not present in the
1561 # local repository.
1561 # local repository.
1562 n = bin(s)
1562 n = bin(s)
1563 if len(n) != len(nullid):
1563 if len(n) != len(nullid):
1564 raise TypeError()
1564 raise TypeError()
1565 return n
1565 return n
1566 except TypeError:
1566 except TypeError:
1567 raise error.Abort('changeset references must be full hexadecimal '
1567 raise error.Abort('changeset references must be full hexadecimal '
1568 'node identifiers')
1568 'node identifiers')
1569
1569
1570 if opts.get('delete'):
1570 if opts.get('delete'):
1571 indices = []
1571 indices = []
1572 for v in opts.get('delete'):
1572 for v in opts.get('delete'):
1573 try:
1573 try:
1574 indices.append(int(v))
1574 indices.append(int(v))
1575 except ValueError:
1575 except ValueError:
1576 raise error.Abort(_('invalid index value: %r') % v,
1576 raise error.Abort(_('invalid index value: %r') % v,
1577 hint=_('use integers for indices'))
1577 hint=_('use integers for indices'))
1578
1578
1579 if repo.currenttransaction():
1579 if repo.currenttransaction():
1580 raise error.Abort(_('cannot delete obsmarkers in the middle '
1580 raise error.Abort(_('cannot delete obsmarkers in the middle '
1581 'of transaction.'))
1581 'of transaction.'))
1582
1582
1583 with repo.lock():
1583 with repo.lock():
1584 n = repair.deleteobsmarkers(repo.obsstore, indices)
1584 n = repair.deleteobsmarkers(repo.obsstore, indices)
1585 ui.write(_('deleted %i obsolescence markers\n') % n)
1585 ui.write(_('deleted %i obsolescence markers\n') % n)
1586
1586
1587 return
1587 return
1588
1588
1589 if precursor is not None:
1589 if precursor is not None:
1590 if opts['rev']:
1590 if opts['rev']:
1591 raise error.Abort('cannot select revision when creating marker')
1591 raise error.Abort('cannot select revision when creating marker')
1592 metadata = {}
1592 metadata = {}
1593 metadata['user'] = opts['user'] or ui.username()
1593 metadata['user'] = opts['user'] or ui.username()
1594 succs = tuple(parsenodeid(succ) for succ in successors)
1594 succs = tuple(parsenodeid(succ) for succ in successors)
1595 l = repo.lock()
1595 l = repo.lock()
1596 try:
1596 try:
1597 tr = repo.transaction('debugobsolete')
1597 tr = repo.transaction('debugobsolete')
1598 try:
1598 try:
1599 date = opts.get('date')
1599 date = opts.get('date')
1600 if date:
1600 if date:
1601 date = dateutil.parsedate(date)
1601 date = dateutil.parsedate(date)
1602 else:
1602 else:
1603 date = None
1603 date = None
1604 prec = parsenodeid(precursor)
1604 prec = parsenodeid(precursor)
1605 parents = None
1605 parents = None
1606 if opts['record_parents']:
1606 if opts['record_parents']:
1607 if prec not in repo.unfiltered():
1607 if prec not in repo.unfiltered():
1608 raise error.Abort('cannot used --record-parents on '
1608 raise error.Abort('cannot used --record-parents on '
1609 'unknown changesets')
1609 'unknown changesets')
1610 parents = repo.unfiltered()[prec].parents()
1610 parents = repo.unfiltered()[prec].parents()
1611 parents = tuple(p.node() for p in parents)
1611 parents = tuple(p.node() for p in parents)
1612 repo.obsstore.create(tr, prec, succs, opts['flags'],
1612 repo.obsstore.create(tr, prec, succs, opts['flags'],
1613 parents=parents, date=date,
1613 parents=parents, date=date,
1614 metadata=metadata, ui=ui)
1614 metadata=metadata, ui=ui)
1615 tr.close()
1615 tr.close()
1616 except ValueError as exc:
1616 except ValueError as exc:
1617 raise error.Abort(_('bad obsmarker input: %s') %
1617 raise error.Abort(_('bad obsmarker input: %s') %
1618 pycompat.bytestr(exc))
1618 pycompat.bytestr(exc))
1619 finally:
1619 finally:
1620 tr.release()
1620 tr.release()
1621 finally:
1621 finally:
1622 l.release()
1622 l.release()
1623 else:
1623 else:
1624 if opts['rev']:
1624 if opts['rev']:
1625 revs = scmutil.revrange(repo, opts['rev'])
1625 revs = scmutil.revrange(repo, opts['rev'])
1626 nodes = [repo[r].node() for r in revs]
1626 nodes = [repo[r].node() for r in revs]
1627 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1627 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1628 exclusive=opts['exclusive']))
1628 exclusive=opts['exclusive']))
1629 markers.sort(key=lambda x: x._data)
1629 markers.sort(key=lambda x: x._data)
1630 else:
1630 else:
1631 markers = obsutil.getmarkers(repo)
1631 markers = obsutil.getmarkers(repo)
1632
1632
1633 markerstoiter = markers
1633 markerstoiter = markers
1634 isrelevant = lambda m: True
1634 isrelevant = lambda m: True
1635 if opts.get('rev') and opts.get('index'):
1635 if opts.get('rev') and opts.get('index'):
1636 markerstoiter = obsutil.getmarkers(repo)
1636 markerstoiter = obsutil.getmarkers(repo)
1637 markerset = set(markers)
1637 markerset = set(markers)
1638 isrelevant = lambda m: m in markerset
1638 isrelevant = lambda m: m in markerset
1639
1639
1640 fm = ui.formatter('debugobsolete', opts)
1640 fm = ui.formatter('debugobsolete', opts)
1641 for i, m in enumerate(markerstoiter):
1641 for i, m in enumerate(markerstoiter):
1642 if not isrelevant(m):
1642 if not isrelevant(m):
1643 # marker can be irrelevant when we're iterating over a set
1643 # marker can be irrelevant when we're iterating over a set
1644 # of markers (markerstoiter) which is bigger than the set
1644 # of markers (markerstoiter) which is bigger than the set
1645 # of markers we want to display (markers)
1645 # of markers we want to display (markers)
1646 # this can happen if both --index and --rev options are
1646 # this can happen if both --index and --rev options are
1647 # provided and thus we need to iterate over all of the markers
1647 # provided and thus we need to iterate over all of the markers
1648 # to get the correct indices, but only display the ones that
1648 # to get the correct indices, but only display the ones that
1649 # are relevant to --rev value
1649 # are relevant to --rev value
1650 continue
1650 continue
1651 fm.startitem()
1651 fm.startitem()
1652 ind = i if opts.get('index') else None
1652 ind = i if opts.get('index') else None
1653 cmdutil.showmarker(fm, m, index=ind)
1653 cmdutil.showmarker(fm, m, index=ind)
1654 fm.end()
1654 fm.end()
1655
1655
1656 @command('debugpathcomplete',
1656 @command('debugpathcomplete',
1657 [('f', 'full', None, _('complete an entire path')),
1657 [('f', 'full', None, _('complete an entire path')),
1658 ('n', 'normal', None, _('show only normal files')),
1658 ('n', 'normal', None, _('show only normal files')),
1659 ('a', 'added', None, _('show only added files')),
1659 ('a', 'added', None, _('show only added files')),
1660 ('r', 'removed', None, _('show only removed files'))],
1660 ('r', 'removed', None, _('show only removed files'))],
1661 _('FILESPEC...'))
1661 _('FILESPEC...'))
1662 def debugpathcomplete(ui, repo, *specs, **opts):
1662 def debugpathcomplete(ui, repo, *specs, **opts):
1663 '''complete part or all of a tracked path
1663 '''complete part or all of a tracked path
1664
1664
1665 This command supports shells that offer path name completion. It
1665 This command supports shells that offer path name completion. It
1666 currently completes only files already known to the dirstate.
1666 currently completes only files already known to the dirstate.
1667
1667
1668 Completion extends only to the next path segment unless
1668 Completion extends only to the next path segment unless
1669 --full is specified, in which case entire paths are used.'''
1669 --full is specified, in which case entire paths are used.'''
1670
1670
1671 def complete(path, acceptable):
1671 def complete(path, acceptable):
1672 dirstate = repo.dirstate
1672 dirstate = repo.dirstate
1673 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1673 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1674 rootdir = repo.root + pycompat.ossep
1674 rootdir = repo.root + pycompat.ossep
1675 if spec != repo.root and not spec.startswith(rootdir):
1675 if spec != repo.root and not spec.startswith(rootdir):
1676 return [], []
1676 return [], []
1677 if os.path.isdir(spec):
1677 if os.path.isdir(spec):
1678 spec += '/'
1678 spec += '/'
1679 spec = spec[len(rootdir):]
1679 spec = spec[len(rootdir):]
1680 fixpaths = pycompat.ossep != '/'
1680 fixpaths = pycompat.ossep != '/'
1681 if fixpaths:
1681 if fixpaths:
1682 spec = spec.replace(pycompat.ossep, '/')
1682 spec = spec.replace(pycompat.ossep, '/')
1683 speclen = len(spec)
1683 speclen = len(spec)
1684 fullpaths = opts[r'full']
1684 fullpaths = opts[r'full']
1685 files, dirs = set(), set()
1685 files, dirs = set(), set()
1686 adddir, addfile = dirs.add, files.add
1686 adddir, addfile = dirs.add, files.add
1687 for f, st in dirstate.iteritems():
1687 for f, st in dirstate.iteritems():
1688 if f.startswith(spec) and st[0] in acceptable:
1688 if f.startswith(spec) and st[0] in acceptable:
1689 if fixpaths:
1689 if fixpaths:
1690 f = f.replace('/', pycompat.ossep)
1690 f = f.replace('/', pycompat.ossep)
1691 if fullpaths:
1691 if fullpaths:
1692 addfile(f)
1692 addfile(f)
1693 continue
1693 continue
1694 s = f.find(pycompat.ossep, speclen)
1694 s = f.find(pycompat.ossep, speclen)
1695 if s >= 0:
1695 if s >= 0:
1696 adddir(f[:s])
1696 adddir(f[:s])
1697 else:
1697 else:
1698 addfile(f)
1698 addfile(f)
1699 return files, dirs
1699 return files, dirs
1700
1700
1701 acceptable = ''
1701 acceptable = ''
1702 if opts[r'normal']:
1702 if opts[r'normal']:
1703 acceptable += 'nm'
1703 acceptable += 'nm'
1704 if opts[r'added']:
1704 if opts[r'added']:
1705 acceptable += 'a'
1705 acceptable += 'a'
1706 if opts[r'removed']:
1706 if opts[r'removed']:
1707 acceptable += 'r'
1707 acceptable += 'r'
1708 cwd = repo.getcwd()
1708 cwd = repo.getcwd()
1709 if not specs:
1709 if not specs:
1710 specs = ['.']
1710 specs = ['.']
1711
1711
1712 files, dirs = set(), set()
1712 files, dirs = set(), set()
1713 for spec in specs:
1713 for spec in specs:
1714 f, d = complete(spec, acceptable or 'nmar')
1714 f, d = complete(spec, acceptable or 'nmar')
1715 files.update(f)
1715 files.update(f)
1716 dirs.update(d)
1716 dirs.update(d)
1717 files.update(dirs)
1717 files.update(dirs)
1718 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1718 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1719 ui.write('\n')
1719 ui.write('\n')
1720
1720
1721 @command('debugpeer', [], _('PATH'), norepo=True)
1721 @command('debugpeer', [], _('PATH'), norepo=True)
1722 def debugpeer(ui, path):
1722 def debugpeer(ui, path):
1723 """establish a connection to a peer repository"""
1723 """establish a connection to a peer repository"""
1724 # Always enable peer request logging. Requires --debug to display
1724 # Always enable peer request logging. Requires --debug to display
1725 # though.
1725 # though.
1726 overrides = {
1726 overrides = {
1727 ('devel', 'debug.peer-request'): True,
1727 ('devel', 'debug.peer-request'): True,
1728 }
1728 }
1729
1729
1730 with ui.configoverride(overrides):
1730 with ui.configoverride(overrides):
1731 peer = hg.peer(ui, {}, path)
1731 peer = hg.peer(ui, {}, path)
1732
1732
1733 local = peer.local() is not None
1733 local = peer.local() is not None
1734 canpush = peer.canpush()
1734 canpush = peer.canpush()
1735
1735
1736 ui.write(_('url: %s\n') % peer.url())
1736 ui.write(_('url: %s\n') % peer.url())
1737 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1737 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1738 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1738 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1739
1739
1740 @command('debugpickmergetool',
1740 @command('debugpickmergetool',
1741 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1741 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1742 ('', 'changedelete', None, _('emulate merging change and delete')),
1742 ('', 'changedelete', None, _('emulate merging change and delete')),
1743 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1743 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1744 _('[PATTERN]...'),
1744 _('[PATTERN]...'),
1745 inferrepo=True)
1745 inferrepo=True)
1746 def debugpickmergetool(ui, repo, *pats, **opts):
1746 def debugpickmergetool(ui, repo, *pats, **opts):
1747 """examine which merge tool is chosen for specified file
1747 """examine which merge tool is chosen for specified file
1748
1748
1749 As described in :hg:`help merge-tools`, Mercurial examines
1749 As described in :hg:`help merge-tools`, Mercurial examines
1750 configurations below in this order to decide which merge tool is
1750 configurations below in this order to decide which merge tool is
1751 chosen for specified file.
1751 chosen for specified file.
1752
1752
1753 1. ``--tool`` option
1753 1. ``--tool`` option
1754 2. ``HGMERGE`` environment variable
1754 2. ``HGMERGE`` environment variable
1755 3. configurations in ``merge-patterns`` section
1755 3. configurations in ``merge-patterns`` section
1756 4. configuration of ``ui.merge``
1756 4. configuration of ``ui.merge``
1757 5. configurations in ``merge-tools`` section
1757 5. configurations in ``merge-tools`` section
1758 6. ``hgmerge`` tool (for historical reason only)
1758 6. ``hgmerge`` tool (for historical reason only)
1759 7. default tool for fallback (``:merge`` or ``:prompt``)
1759 7. default tool for fallback (``:merge`` or ``:prompt``)
1760
1760
1761 This command writes out examination result in the style below::
1761 This command writes out examination result in the style below::
1762
1762
1763 FILE = MERGETOOL
1763 FILE = MERGETOOL
1764
1764
1765 By default, all files known in the first parent context of the
1765 By default, all files known in the first parent context of the
1766 working directory are examined. Use file patterns and/or -I/-X
1766 working directory are examined. Use file patterns and/or -I/-X
1767 options to limit target files. -r/--rev is also useful to examine
1767 options to limit target files. -r/--rev is also useful to examine
1768 files in another context without actual updating to it.
1768 files in another context without actual updating to it.
1769
1769
1770 With --debug, this command shows warning messages while matching
1770 With --debug, this command shows warning messages while matching
1771 against ``merge-patterns`` and so on, too. It is recommended to
1771 against ``merge-patterns`` and so on, too. It is recommended to
1772 use this option with explicit file patterns and/or -I/-X options,
1772 use this option with explicit file patterns and/or -I/-X options,
1773 because this option increases amount of output per file according
1773 because this option increases amount of output per file according
1774 to configurations in hgrc.
1774 to configurations in hgrc.
1775
1775
1776 With -v/--verbose, this command shows configurations below at
1776 With -v/--verbose, this command shows configurations below at
1777 first (only if specified).
1777 first (only if specified).
1778
1778
1779 - ``--tool`` option
1779 - ``--tool`` option
1780 - ``HGMERGE`` environment variable
1780 - ``HGMERGE`` environment variable
1781 - configuration of ``ui.merge``
1781 - configuration of ``ui.merge``
1782
1782
1783 If merge tool is chosen before matching against
1783 If merge tool is chosen before matching against
1784 ``merge-patterns``, this command can't show any helpful
1784 ``merge-patterns``, this command can't show any helpful
1785 information, even with --debug. In such case, information above is
1785 information, even with --debug. In such case, information above is
1786 useful to know why a merge tool is chosen.
1786 useful to know why a merge tool is chosen.
1787 """
1787 """
1788 opts = pycompat.byteskwargs(opts)
1788 opts = pycompat.byteskwargs(opts)
1789 overrides = {}
1789 overrides = {}
1790 if opts['tool']:
1790 if opts['tool']:
1791 overrides[('ui', 'forcemerge')] = opts['tool']
1791 overrides[('ui', 'forcemerge')] = opts['tool']
1792 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1792 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1793
1793
1794 with ui.configoverride(overrides, 'debugmergepatterns'):
1794 with ui.configoverride(overrides, 'debugmergepatterns'):
1795 hgmerge = encoding.environ.get("HGMERGE")
1795 hgmerge = encoding.environ.get("HGMERGE")
1796 if hgmerge is not None:
1796 if hgmerge is not None:
1797 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1797 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1798 uimerge = ui.config("ui", "merge")
1798 uimerge = ui.config("ui", "merge")
1799 if uimerge:
1799 if uimerge:
1800 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1800 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1801
1801
1802 ctx = scmutil.revsingle(repo, opts.get('rev'))
1802 ctx = scmutil.revsingle(repo, opts.get('rev'))
1803 m = scmutil.match(ctx, pats, opts)
1803 m = scmutil.match(ctx, pats, opts)
1804 changedelete = opts['changedelete']
1804 changedelete = opts['changedelete']
1805 for path in ctx.walk(m):
1805 for path in ctx.walk(m):
1806 fctx = ctx[path]
1806 fctx = ctx[path]
1807 try:
1807 try:
1808 if not ui.debugflag:
1808 if not ui.debugflag:
1809 ui.pushbuffer(error=True)
1809 ui.pushbuffer(error=True)
1810 tool, toolpath = filemerge._picktool(repo, ui, path,
1810 tool, toolpath = filemerge._picktool(repo, ui, path,
1811 fctx.isbinary(),
1811 fctx.isbinary(),
1812 'l' in fctx.flags(),
1812 'l' in fctx.flags(),
1813 changedelete)
1813 changedelete)
1814 finally:
1814 finally:
1815 if not ui.debugflag:
1815 if not ui.debugflag:
1816 ui.popbuffer()
1816 ui.popbuffer()
1817 ui.write(('%s = %s\n') % (path, tool))
1817 ui.write(('%s = %s\n') % (path, tool))
1818
1818
1819 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1819 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1820 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1820 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1821 '''access the pushkey key/value protocol
1821 '''access the pushkey key/value protocol
1822
1822
1823 With two args, list the keys in the given namespace.
1823 With two args, list the keys in the given namespace.
1824
1824
1825 With five args, set a key to new if it currently is set to old.
1825 With five args, set a key to new if it currently is set to old.
1826 Reports success or failure.
1826 Reports success or failure.
1827 '''
1827 '''
1828
1828
1829 target = hg.peer(ui, {}, repopath)
1829 target = hg.peer(ui, {}, repopath)
1830 if keyinfo:
1830 if keyinfo:
1831 key, old, new = keyinfo
1831 key, old, new = keyinfo
1832 r = target.pushkey(namespace, key, old, new)
1832 r = target.pushkey(namespace, key, old, new)
1833 ui.status(pycompat.bytestr(r) + '\n')
1833 ui.status(pycompat.bytestr(r) + '\n')
1834 return not r
1834 return not r
1835 else:
1835 else:
1836 for k, v in sorted(target.listkeys(namespace).iteritems()):
1836 for k, v in sorted(target.listkeys(namespace).iteritems()):
1837 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1837 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1838 stringutil.escapestr(v)))
1838 stringutil.escapestr(v)))
1839
1839
1840 @command('debugpvec', [], _('A B'))
1840 @command('debugpvec', [], _('A B'))
1841 def debugpvec(ui, repo, a, b=None):
1841 def debugpvec(ui, repo, a, b=None):
1842 ca = scmutil.revsingle(repo, a)
1842 ca = scmutil.revsingle(repo, a)
1843 cb = scmutil.revsingle(repo, b)
1843 cb = scmutil.revsingle(repo, b)
1844 pa = pvec.ctxpvec(ca)
1844 pa = pvec.ctxpvec(ca)
1845 pb = pvec.ctxpvec(cb)
1845 pb = pvec.ctxpvec(cb)
1846 if pa == pb:
1846 if pa == pb:
1847 rel = "="
1847 rel = "="
1848 elif pa > pb:
1848 elif pa > pb:
1849 rel = ">"
1849 rel = ">"
1850 elif pa < pb:
1850 elif pa < pb:
1851 rel = "<"
1851 rel = "<"
1852 elif pa | pb:
1852 elif pa | pb:
1853 rel = "|"
1853 rel = "|"
1854 ui.write(_("a: %s\n") % pa)
1854 ui.write(_("a: %s\n") % pa)
1855 ui.write(_("b: %s\n") % pb)
1855 ui.write(_("b: %s\n") % pb)
1856 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1856 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1857 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1857 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1858 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1858 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1859 pa.distance(pb), rel))
1859 pa.distance(pb), rel))
1860
1860
1861 @command('debugrebuilddirstate|debugrebuildstate',
1861 @command('debugrebuilddirstate|debugrebuildstate',
1862 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1862 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1863 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1863 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1864 'the working copy parent')),
1864 'the working copy parent')),
1865 ],
1865 ],
1866 _('[-r REV]'))
1866 _('[-r REV]'))
1867 def debugrebuilddirstate(ui, repo, rev, **opts):
1867 def debugrebuilddirstate(ui, repo, rev, **opts):
1868 """rebuild the dirstate as it would look like for the given revision
1868 """rebuild the dirstate as it would look like for the given revision
1869
1869
1870 If no revision is specified the first current parent will be used.
1870 If no revision is specified the first current parent will be used.
1871
1871
1872 The dirstate will be set to the files of the given revision.
1872 The dirstate will be set to the files of the given revision.
1873 The actual working directory content or existing dirstate
1873 The actual working directory content or existing dirstate
1874 information such as adds or removes is not considered.
1874 information such as adds or removes is not considered.
1875
1875
1876 ``minimal`` will only rebuild the dirstate status for files that claim to be
1876 ``minimal`` will only rebuild the dirstate status for files that claim to be
1877 tracked but are not in the parent manifest, or that exist in the parent
1877 tracked but are not in the parent manifest, or that exist in the parent
1878 manifest but are not in the dirstate. It will not change adds, removes, or
1878 manifest but are not in the dirstate. It will not change adds, removes, or
1879 modified files that are in the working copy parent.
1879 modified files that are in the working copy parent.
1880
1880
1881 One use of this command is to make the next :hg:`status` invocation
1881 One use of this command is to make the next :hg:`status` invocation
1882 check the actual file content.
1882 check the actual file content.
1883 """
1883 """
1884 ctx = scmutil.revsingle(repo, rev)
1884 ctx = scmutil.revsingle(repo, rev)
1885 with repo.wlock():
1885 with repo.wlock():
1886 dirstate = repo.dirstate
1886 dirstate = repo.dirstate
1887 changedfiles = None
1887 changedfiles = None
1888 # See command doc for what minimal does.
1888 # See command doc for what minimal does.
1889 if opts.get(r'minimal'):
1889 if opts.get(r'minimal'):
1890 manifestfiles = set(ctx.manifest().keys())
1890 manifestfiles = set(ctx.manifest().keys())
1891 dirstatefiles = set(dirstate)
1891 dirstatefiles = set(dirstate)
1892 manifestonly = manifestfiles - dirstatefiles
1892 manifestonly = manifestfiles - dirstatefiles
1893 dsonly = dirstatefiles - manifestfiles
1893 dsonly = dirstatefiles - manifestfiles
1894 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1894 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1895 changedfiles = manifestonly | dsnotadded
1895 changedfiles = manifestonly | dsnotadded
1896
1896
1897 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1897 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1898
1898
1899 @command('debugrebuildfncache', [], '')
1899 @command('debugrebuildfncache', [], '')
1900 def debugrebuildfncache(ui, repo):
1900 def debugrebuildfncache(ui, repo):
1901 """rebuild the fncache file"""
1901 """rebuild the fncache file"""
1902 repair.rebuildfncache(ui, repo)
1902 repair.rebuildfncache(ui, repo)
1903
1903
1904 @command('debugrename',
1904 @command('debugrename',
1905 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1905 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1906 _('[-r REV] FILE'))
1906 _('[-r REV] FILE'))
1907 def debugrename(ui, repo, file1, *pats, **opts):
1907 def debugrename(ui, repo, file1, *pats, **opts):
1908 """dump rename information"""
1908 """dump rename information"""
1909
1909
1910 opts = pycompat.byteskwargs(opts)
1910 opts = pycompat.byteskwargs(opts)
1911 ctx = scmutil.revsingle(repo, opts.get('rev'))
1911 ctx = scmutil.revsingle(repo, opts.get('rev'))
1912 m = scmutil.match(ctx, (file1,) + pats, opts)
1912 m = scmutil.match(ctx, (file1,) + pats, opts)
1913 for abs in ctx.walk(m):
1913 for abs in ctx.walk(m):
1914 fctx = ctx[abs]
1914 fctx = ctx[abs]
1915 o = fctx.filelog().renamed(fctx.filenode())
1915 o = fctx.filelog().renamed(fctx.filenode())
1916 rel = m.rel(abs)
1916 rel = m.rel(abs)
1917 if o:
1917 if o:
1918 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1918 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1919 else:
1919 else:
1920 ui.write(_("%s not renamed\n") % rel)
1920 ui.write(_("%s not renamed\n") % rel)
1921
1921
1922 @command('debugrevlog', cmdutil.debugrevlogopts +
1922 @command('debugrevlog', cmdutil.debugrevlogopts +
1923 [('d', 'dump', False, _('dump index data'))],
1923 [('d', 'dump', False, _('dump index data'))],
1924 _('-c|-m|FILE'),
1924 _('-c|-m|FILE'),
1925 optionalrepo=True)
1925 optionalrepo=True)
1926 def debugrevlog(ui, repo, file_=None, **opts):
1926 def debugrevlog(ui, repo, file_=None, **opts):
1927 """show data and statistics about a revlog"""
1927 """show data and statistics about a revlog"""
1928 opts = pycompat.byteskwargs(opts)
1928 opts = pycompat.byteskwargs(opts)
1929 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1929 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1930
1930
1931 if opts.get("dump"):
1931 if opts.get("dump"):
1932 numrevs = len(r)
1932 numrevs = len(r)
1933 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1933 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1934 " rawsize totalsize compression heads chainlen\n"))
1934 " rawsize totalsize compression heads chainlen\n"))
1935 ts = 0
1935 ts = 0
1936 heads = set()
1936 heads = set()
1937
1937
1938 for rev in xrange(numrevs):
1938 for rev in xrange(numrevs):
1939 dbase = r.deltaparent(rev)
1939 dbase = r.deltaparent(rev)
1940 if dbase == -1:
1940 if dbase == -1:
1941 dbase = rev
1941 dbase = rev
1942 cbase = r.chainbase(rev)
1942 cbase = r.chainbase(rev)
1943 clen = r.chainlen(rev)
1943 clen = r.chainlen(rev)
1944 p1, p2 = r.parentrevs(rev)
1944 p1, p2 = r.parentrevs(rev)
1945 rs = r.rawsize(rev)
1945 rs = r.rawsize(rev)
1946 ts = ts + rs
1946 ts = ts + rs
1947 heads -= set(r.parentrevs(rev))
1947 heads -= set(r.parentrevs(rev))
1948 heads.add(rev)
1948 heads.add(rev)
1949 try:
1949 try:
1950 compression = ts / r.end(rev)
1950 compression = ts / r.end(rev)
1951 except ZeroDivisionError:
1951 except ZeroDivisionError:
1952 compression = 0
1952 compression = 0
1953 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1953 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1954 "%11d %5d %8d\n" %
1954 "%11d %5d %8d\n" %
1955 (rev, p1, p2, r.start(rev), r.end(rev),
1955 (rev, p1, p2, r.start(rev), r.end(rev),
1956 r.start(dbase), r.start(cbase),
1956 r.start(dbase), r.start(cbase),
1957 r.start(p1), r.start(p2),
1957 r.start(p1), r.start(p2),
1958 rs, ts, compression, len(heads), clen))
1958 rs, ts, compression, len(heads), clen))
1959 return 0
1959 return 0
1960
1960
1961 v = r.version
1961 v = r.version
1962 format = v & 0xFFFF
1962 format = v & 0xFFFF
1963 flags = []
1963 flags = []
1964 gdelta = False
1964 gdelta = False
1965 if v & revlog.FLAG_INLINE_DATA:
1965 if v & revlog.FLAG_INLINE_DATA:
1966 flags.append('inline')
1966 flags.append('inline')
1967 if v & revlog.FLAG_GENERALDELTA:
1967 if v & revlog.FLAG_GENERALDELTA:
1968 gdelta = True
1968 gdelta = True
1969 flags.append('generaldelta')
1969 flags.append('generaldelta')
1970 if not flags:
1970 if not flags:
1971 flags = ['(none)']
1971 flags = ['(none)']
1972
1972
1973 nummerges = 0
1973 nummerges = 0
1974 numfull = 0
1974 numfull = 0
1975 numprev = 0
1975 numprev = 0
1976 nump1 = 0
1976 nump1 = 0
1977 nump2 = 0
1977 nump2 = 0
1978 numother = 0
1978 numother = 0
1979 nump1prev = 0
1979 nump1prev = 0
1980 nump2prev = 0
1980 nump2prev = 0
1981 chainlengths = []
1981 chainlengths = []
1982 chainbases = []
1982 chainbases = []
1983 chainspans = []
1983 chainspans = []
1984
1984
1985 datasize = [None, 0, 0]
1985 datasize = [None, 0, 0]
1986 fullsize = [None, 0, 0]
1986 fullsize = [None, 0, 0]
1987 deltasize = [None, 0, 0]
1987 deltasize = [None, 0, 0]
1988 chunktypecounts = {}
1988 chunktypecounts = {}
1989 chunktypesizes = {}
1989 chunktypesizes = {}
1990
1990
1991 def addsize(size, l):
1991 def addsize(size, l):
1992 if l[0] is None or size < l[0]:
1992 if l[0] is None or size < l[0]:
1993 l[0] = size
1993 l[0] = size
1994 if size > l[1]:
1994 if size > l[1]:
1995 l[1] = size
1995 l[1] = size
1996 l[2] += size
1996 l[2] += size
1997
1997
1998 numrevs = len(r)
1998 numrevs = len(r)
1999 for rev in xrange(numrevs):
1999 for rev in xrange(numrevs):
2000 p1, p2 = r.parentrevs(rev)
2000 p1, p2 = r.parentrevs(rev)
2001 delta = r.deltaparent(rev)
2001 delta = r.deltaparent(rev)
2002 if format > 0:
2002 if format > 0:
2003 addsize(r.rawsize(rev), datasize)
2003 addsize(r.rawsize(rev), datasize)
2004 if p2 != nullrev:
2004 if p2 != nullrev:
2005 nummerges += 1
2005 nummerges += 1
2006 size = r.length(rev)
2006 size = r.length(rev)
2007 if delta == nullrev:
2007 if delta == nullrev:
2008 chainlengths.append(0)
2008 chainlengths.append(0)
2009 chainbases.append(r.start(rev))
2009 chainbases.append(r.start(rev))
2010 chainspans.append(size)
2010 chainspans.append(size)
2011 numfull += 1
2011 numfull += 1
2012 addsize(size, fullsize)
2012 addsize(size, fullsize)
2013 else:
2013 else:
2014 chainlengths.append(chainlengths[delta] + 1)
2014 chainlengths.append(chainlengths[delta] + 1)
2015 baseaddr = chainbases[delta]
2015 baseaddr = chainbases[delta]
2016 revaddr = r.start(rev)
2016 revaddr = r.start(rev)
2017 chainbases.append(baseaddr)
2017 chainbases.append(baseaddr)
2018 chainspans.append((revaddr - baseaddr) + size)
2018 chainspans.append((revaddr - baseaddr) + size)
2019 addsize(size, deltasize)
2019 addsize(size, deltasize)
2020 if delta == rev - 1:
2020 if delta == rev - 1:
2021 numprev += 1
2021 numprev += 1
2022 if delta == p1:
2022 if delta == p1:
2023 nump1prev += 1
2023 nump1prev += 1
2024 elif delta == p2:
2024 elif delta == p2:
2025 nump2prev += 1
2025 nump2prev += 1
2026 elif delta == p1:
2026 elif delta == p1:
2027 nump1 += 1
2027 nump1 += 1
2028 elif delta == p2:
2028 elif delta == p2:
2029 nump2 += 1
2029 nump2 += 1
2030 elif delta != nullrev:
2030 elif delta != nullrev:
2031 numother += 1
2031 numother += 1
2032
2032
2033 # Obtain data on the raw chunks in the revlog.
2033 # Obtain data on the raw chunks in the revlog.
2034 segment = r._getsegmentforrevs(rev, rev)[1]
2034 segment = r._getsegmentforrevs(rev, rev)[1]
2035 if segment:
2035 if segment:
2036 chunktype = bytes(segment[0:1])
2036 chunktype = bytes(segment[0:1])
2037 else:
2037 else:
2038 chunktype = 'empty'
2038 chunktype = 'empty'
2039
2039
2040 if chunktype not in chunktypecounts:
2040 if chunktype not in chunktypecounts:
2041 chunktypecounts[chunktype] = 0
2041 chunktypecounts[chunktype] = 0
2042 chunktypesizes[chunktype] = 0
2042 chunktypesizes[chunktype] = 0
2043
2043
2044 chunktypecounts[chunktype] += 1
2044 chunktypecounts[chunktype] += 1
2045 chunktypesizes[chunktype] += size
2045 chunktypesizes[chunktype] += size
2046
2046
2047 # Adjust size min value for empty cases
2047 # Adjust size min value for empty cases
2048 for size in (datasize, fullsize, deltasize):
2048 for size in (datasize, fullsize, deltasize):
2049 if size[0] is None:
2049 if size[0] is None:
2050 size[0] = 0
2050 size[0] = 0
2051
2051
2052 numdeltas = numrevs - numfull
2052 numdeltas = numrevs - numfull
2053 numoprev = numprev - nump1prev - nump2prev
2053 numoprev = numprev - nump1prev - nump2prev
2054 totalrawsize = datasize[2]
2054 totalrawsize = datasize[2]
2055 datasize[2] /= numrevs
2055 datasize[2] /= numrevs
2056 fulltotal = fullsize[2]
2056 fulltotal = fullsize[2]
2057 fullsize[2] /= numfull
2057 fullsize[2] /= numfull
2058 deltatotal = deltasize[2]
2058 deltatotal = deltasize[2]
2059 if numrevs - numfull > 0:
2059 if numrevs - numfull > 0:
2060 deltasize[2] /= numrevs - numfull
2060 deltasize[2] /= numrevs - numfull
2061 totalsize = fulltotal + deltatotal
2061 totalsize = fulltotal + deltatotal
2062 avgchainlen = sum(chainlengths) / numrevs
2062 avgchainlen = sum(chainlengths) / numrevs
2063 maxchainlen = max(chainlengths)
2063 maxchainlen = max(chainlengths)
2064 maxchainspan = max(chainspans)
2064 maxchainspan = max(chainspans)
2065 compratio = 1
2065 compratio = 1
2066 if totalsize:
2066 if totalsize:
2067 compratio = totalrawsize / totalsize
2067 compratio = totalrawsize / totalsize
2068
2068
2069 basedfmtstr = '%%%dd\n'
2069 basedfmtstr = '%%%dd\n'
2070 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2070 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2071
2071
2072 def dfmtstr(max):
2072 def dfmtstr(max):
2073 return basedfmtstr % len(str(max))
2073 return basedfmtstr % len(str(max))
2074 def pcfmtstr(max, padding=0):
2074 def pcfmtstr(max, padding=0):
2075 return basepcfmtstr % (len(str(max)), ' ' * padding)
2075 return basepcfmtstr % (len(str(max)), ' ' * padding)
2076
2076
2077 def pcfmt(value, total):
2077 def pcfmt(value, total):
2078 if total:
2078 if total:
2079 return (value, 100 * float(value) / total)
2079 return (value, 100 * float(value) / total)
2080 else:
2080 else:
2081 return value, 100.0
2081 return value, 100.0
2082
2082
2083 ui.write(('format : %d\n') % format)
2083 ui.write(('format : %d\n') % format)
2084 ui.write(('flags : %s\n') % ', '.join(flags))
2084 ui.write(('flags : %s\n') % ', '.join(flags))
2085
2085
2086 ui.write('\n')
2086 ui.write('\n')
2087 fmt = pcfmtstr(totalsize)
2087 fmt = pcfmtstr(totalsize)
2088 fmt2 = dfmtstr(totalsize)
2088 fmt2 = dfmtstr(totalsize)
2089 ui.write(('revisions : ') + fmt2 % numrevs)
2089 ui.write(('revisions : ') + fmt2 % numrevs)
2090 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2090 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2091 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2091 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2092 ui.write(('revisions : ') + fmt2 % numrevs)
2092 ui.write(('revisions : ') + fmt2 % numrevs)
2093 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2093 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2094 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2094 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2095 ui.write(('revision size : ') + fmt2 % totalsize)
2095 ui.write(('revision size : ') + fmt2 % totalsize)
2096 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2096 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2097 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2097 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2098
2098
2099 def fmtchunktype(chunktype):
2099 def fmtchunktype(chunktype):
2100 if chunktype == 'empty':
2100 if chunktype == 'empty':
2101 return ' %s : ' % chunktype
2101 return ' %s : ' % chunktype
2102 elif chunktype in pycompat.bytestr(string.ascii_letters):
2102 elif chunktype in pycompat.bytestr(string.ascii_letters):
2103 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2103 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2104 else:
2104 else:
2105 return ' 0x%s : ' % hex(chunktype)
2105 return ' 0x%s : ' % hex(chunktype)
2106
2106
2107 ui.write('\n')
2107 ui.write('\n')
2108 ui.write(('chunks : ') + fmt2 % numrevs)
2108 ui.write(('chunks : ') + fmt2 % numrevs)
2109 for chunktype in sorted(chunktypecounts):
2109 for chunktype in sorted(chunktypecounts):
2110 ui.write(fmtchunktype(chunktype))
2110 ui.write(fmtchunktype(chunktype))
2111 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2111 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2112 ui.write(('chunks size : ') + fmt2 % totalsize)
2112 ui.write(('chunks size : ') + fmt2 % totalsize)
2113 for chunktype in sorted(chunktypecounts):
2113 for chunktype in sorted(chunktypecounts):
2114 ui.write(fmtchunktype(chunktype))
2114 ui.write(fmtchunktype(chunktype))
2115 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2115 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2116
2116
2117 ui.write('\n')
2117 ui.write('\n')
2118 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2118 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2119 ui.write(('avg chain length : ') + fmt % avgchainlen)
2119 ui.write(('avg chain length : ') + fmt % avgchainlen)
2120 ui.write(('max chain length : ') + fmt % maxchainlen)
2120 ui.write(('max chain length : ') + fmt % maxchainlen)
2121 ui.write(('max chain reach : ') + fmt % maxchainspan)
2121 ui.write(('max chain reach : ') + fmt % maxchainspan)
2122 ui.write(('compression ratio : ') + fmt % compratio)
2122 ui.write(('compression ratio : ') + fmt % compratio)
2123
2123
2124 if format > 0:
2124 if format > 0:
2125 ui.write('\n')
2125 ui.write('\n')
2126 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2126 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2127 % tuple(datasize))
2127 % tuple(datasize))
2128 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2128 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2129 % tuple(fullsize))
2129 % tuple(fullsize))
2130 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2130 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2131 % tuple(deltasize))
2131 % tuple(deltasize))
2132
2132
2133 if numdeltas > 0:
2133 if numdeltas > 0:
2134 ui.write('\n')
2134 ui.write('\n')
2135 fmt = pcfmtstr(numdeltas)
2135 fmt = pcfmtstr(numdeltas)
2136 fmt2 = pcfmtstr(numdeltas, 4)
2136 fmt2 = pcfmtstr(numdeltas, 4)
2137 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2137 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2138 if numprev > 0:
2138 if numprev > 0:
2139 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2139 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2140 numprev))
2140 numprev))
2141 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2141 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2142 numprev))
2142 numprev))
2143 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2143 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2144 numprev))
2144 numprev))
2145 if gdelta:
2145 if gdelta:
2146 ui.write(('deltas against p1 : ')
2146 ui.write(('deltas against p1 : ')
2147 + fmt % pcfmt(nump1, numdeltas))
2147 + fmt % pcfmt(nump1, numdeltas))
2148 ui.write(('deltas against p2 : ')
2148 ui.write(('deltas against p2 : ')
2149 + fmt % pcfmt(nump2, numdeltas))
2149 + fmt % pcfmt(nump2, numdeltas))
2150 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2150 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2151 numdeltas))
2151 numdeltas))
2152
2152
2153 @command('debugrevspec',
2153 @command('debugrevspec',
2154 [('', 'optimize', None,
2154 [('', 'optimize', None,
2155 _('print parsed tree after optimizing (DEPRECATED)')),
2155 _('print parsed tree after optimizing (DEPRECATED)')),
2156 ('', 'show-revs', True, _('print list of result revisions (default)')),
2156 ('', 'show-revs', True, _('print list of result revisions (default)')),
2157 ('s', 'show-set', None, _('print internal representation of result set')),
2157 ('s', 'show-set', None, _('print internal representation of result set')),
2158 ('p', 'show-stage', [],
2158 ('p', 'show-stage', [],
2159 _('print parsed tree at the given stage'), _('NAME')),
2159 _('print parsed tree at the given stage'), _('NAME')),
2160 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2160 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2161 ('', 'verify-optimized', False, _('verify optimized result')),
2161 ('', 'verify-optimized', False, _('verify optimized result')),
2162 ],
2162 ],
2163 ('REVSPEC'))
2163 ('REVSPEC'))
2164 def debugrevspec(ui, repo, expr, **opts):
2164 def debugrevspec(ui, repo, expr, **opts):
2165 """parse and apply a revision specification
2165 """parse and apply a revision specification
2166
2166
2167 Use -p/--show-stage option to print the parsed tree at the given stages.
2167 Use -p/--show-stage option to print the parsed tree at the given stages.
2168 Use -p all to print tree at every stage.
2168 Use -p all to print tree at every stage.
2169
2169
2170 Use --no-show-revs option with -s or -p to print only the set
2170 Use --no-show-revs option with -s or -p to print only the set
2171 representation or the parsed tree respectively.
2171 representation or the parsed tree respectively.
2172
2172
2173 Use --verify-optimized to compare the optimized result with the unoptimized
2173 Use --verify-optimized to compare the optimized result with the unoptimized
2174 one. Returns 1 if the optimized result differs.
2174 one. Returns 1 if the optimized result differs.
2175 """
2175 """
2176 opts = pycompat.byteskwargs(opts)
2176 opts = pycompat.byteskwargs(opts)
2177 aliases = ui.configitems('revsetalias')
2177 aliases = ui.configitems('revsetalias')
2178 stages = [
2178 stages = [
2179 ('parsed', lambda tree: tree),
2179 ('parsed', lambda tree: tree),
2180 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2180 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2181 ui.warn)),
2181 ui.warn)),
2182 ('concatenated', revsetlang.foldconcat),
2182 ('concatenated', revsetlang.foldconcat),
2183 ('analyzed', revsetlang.analyze),
2183 ('analyzed', revsetlang.analyze),
2184 ('optimized', revsetlang.optimize),
2184 ('optimized', revsetlang.optimize),
2185 ]
2185 ]
2186 if opts['no_optimized']:
2186 if opts['no_optimized']:
2187 stages = stages[:-1]
2187 stages = stages[:-1]
2188 if opts['verify_optimized'] and opts['no_optimized']:
2188 if opts['verify_optimized'] and opts['no_optimized']:
2189 raise error.Abort(_('cannot use --verify-optimized with '
2189 raise error.Abort(_('cannot use --verify-optimized with '
2190 '--no-optimized'))
2190 '--no-optimized'))
2191 stagenames = set(n for n, f in stages)
2191 stagenames = set(n for n, f in stages)
2192
2192
2193 showalways = set()
2193 showalways = set()
2194 showchanged = set()
2194 showchanged = set()
2195 if ui.verbose and not opts['show_stage']:
2195 if ui.verbose and not opts['show_stage']:
2196 # show parsed tree by --verbose (deprecated)
2196 # show parsed tree by --verbose (deprecated)
2197 showalways.add('parsed')
2197 showalways.add('parsed')
2198 showchanged.update(['expanded', 'concatenated'])
2198 showchanged.update(['expanded', 'concatenated'])
2199 if opts['optimize']:
2199 if opts['optimize']:
2200 showalways.add('optimized')
2200 showalways.add('optimized')
2201 if opts['show_stage'] and opts['optimize']:
2201 if opts['show_stage'] and opts['optimize']:
2202 raise error.Abort(_('cannot use --optimize with --show-stage'))
2202 raise error.Abort(_('cannot use --optimize with --show-stage'))
2203 if opts['show_stage'] == ['all']:
2203 if opts['show_stage'] == ['all']:
2204 showalways.update(stagenames)
2204 showalways.update(stagenames)
2205 else:
2205 else:
2206 for n in opts['show_stage']:
2206 for n in opts['show_stage']:
2207 if n not in stagenames:
2207 if n not in stagenames:
2208 raise error.Abort(_('invalid stage name: %s') % n)
2208 raise error.Abort(_('invalid stage name: %s') % n)
2209 showalways.update(opts['show_stage'])
2209 showalways.update(opts['show_stage'])
2210
2210
2211 treebystage = {}
2211 treebystage = {}
2212 printedtree = None
2212 printedtree = None
2213 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2213 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2214 for n, f in stages:
2214 for n, f in stages:
2215 treebystage[n] = tree = f(tree)
2215 treebystage[n] = tree = f(tree)
2216 if n in showalways or (n in showchanged and tree != printedtree):
2216 if n in showalways or (n in showchanged and tree != printedtree):
2217 if opts['show_stage'] or n != 'parsed':
2217 if opts['show_stage'] or n != 'parsed':
2218 ui.write(("* %s:\n") % n)
2218 ui.write(("* %s:\n") % n)
2219 ui.write(revsetlang.prettyformat(tree), "\n")
2219 ui.write(revsetlang.prettyformat(tree), "\n")
2220 printedtree = tree
2220 printedtree = tree
2221
2221
2222 if opts['verify_optimized']:
2222 if opts['verify_optimized']:
2223 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2223 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2224 brevs = revset.makematcher(treebystage['optimized'])(repo)
2224 brevs = revset.makematcher(treebystage['optimized'])(repo)
2225 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2225 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2226 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2226 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2227 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2227 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2228 arevs = list(arevs)
2228 arevs = list(arevs)
2229 brevs = list(brevs)
2229 brevs = list(brevs)
2230 if arevs == brevs:
2230 if arevs == brevs:
2231 return 0
2231 return 0
2232 ui.write(('--- analyzed\n'), label='diff.file_a')
2232 ui.write(('--- analyzed\n'), label='diff.file_a')
2233 ui.write(('+++ optimized\n'), label='diff.file_b')
2233 ui.write(('+++ optimized\n'), label='diff.file_b')
2234 sm = difflib.SequenceMatcher(None, arevs, brevs)
2234 sm = difflib.SequenceMatcher(None, arevs, brevs)
2235 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2235 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2236 if tag in ('delete', 'replace'):
2236 if tag in ('delete', 'replace'):
2237 for c in arevs[alo:ahi]:
2237 for c in arevs[alo:ahi]:
2238 ui.write('-%s\n' % c, label='diff.deleted')
2238 ui.write('-%s\n' % c, label='diff.deleted')
2239 if tag in ('insert', 'replace'):
2239 if tag in ('insert', 'replace'):
2240 for c in brevs[blo:bhi]:
2240 for c in brevs[blo:bhi]:
2241 ui.write('+%s\n' % c, label='diff.inserted')
2241 ui.write('+%s\n' % c, label='diff.inserted')
2242 if tag == 'equal':
2242 if tag == 'equal':
2243 for c in arevs[alo:ahi]:
2243 for c in arevs[alo:ahi]:
2244 ui.write(' %s\n' % c)
2244 ui.write(' %s\n' % c)
2245 return 1
2245 return 1
2246
2246
2247 func = revset.makematcher(tree)
2247 func = revset.makematcher(tree)
2248 revs = func(repo)
2248 revs = func(repo)
2249 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2249 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2250 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2250 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2251 if not opts['show_revs']:
2251 if not opts['show_revs']:
2252 return
2252 return
2253 for c in revs:
2253 for c in revs:
2254 ui.write("%d\n" % c)
2254 ui.write("%d\n" % c)
2255
2255
2256 @command('debugserve', [
2256 @command('debugserve', [
2257 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2257 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2258 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2258 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2259 ('', 'logiofile', '', _('file to log server I/O to')),
2259 ('', 'logiofile', '', _('file to log server I/O to')),
2260 ], '')
2260 ], '')
2261 def debugserve(ui, repo, **opts):
2261 def debugserve(ui, repo, **opts):
2262 """run a server with advanced settings
2262 """run a server with advanced settings
2263
2263
2264 This command is similar to :hg:`serve`. It exists partially as a
2264 This command is similar to :hg:`serve`. It exists partially as a
2265 workaround to the fact that ``hg serve --stdio`` must have specific
2265 workaround to the fact that ``hg serve --stdio`` must have specific
2266 arguments for security reasons.
2266 arguments for security reasons.
2267 """
2267 """
2268 opts = pycompat.byteskwargs(opts)
2268 opts = pycompat.byteskwargs(opts)
2269
2269
2270 if not opts['sshstdio']:
2270 if not opts['sshstdio']:
2271 raise error.Abort(_('only --sshstdio is currently supported'))
2271 raise error.Abort(_('only --sshstdio is currently supported'))
2272
2272
2273 logfh = None
2273 logfh = None
2274
2274
2275 if opts['logiofd'] and opts['logiofile']:
2275 if opts['logiofd'] and opts['logiofile']:
2276 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2276 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2277
2277
2278 if opts['logiofd']:
2278 if opts['logiofd']:
2279 # Line buffered because output is line based.
2279 # Line buffered because output is line based.
2280 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2280 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2281 elif opts['logiofile']:
2281 elif opts['logiofile']:
2282 logfh = open(opts['logiofile'], 'ab', 1)
2282 logfh = open(opts['logiofile'], 'ab', 1)
2283
2283
2284 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2284 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2285 s.serve_forever()
2285 s.serve_forever()
2286
2286
2287 @command('debugsetparents', [], _('REV1 [REV2]'))
2287 @command('debugsetparents', [], _('REV1 [REV2]'))
2288 def debugsetparents(ui, repo, rev1, rev2=None):
2288 def debugsetparents(ui, repo, rev1, rev2=None):
2289 """manually set the parents of the current working directory
2289 """manually set the parents of the current working directory
2290
2290
2291 This is useful for writing repository conversion tools, but should
2291 This is useful for writing repository conversion tools, but should
2292 be used with care. For example, neither the working directory nor the
2292 be used with care. For example, neither the working directory nor the
2293 dirstate is updated, so file status may be incorrect after running this
2293 dirstate is updated, so file status may be incorrect after running this
2294 command.
2294 command.
2295
2295
2296 Returns 0 on success.
2296 Returns 0 on success.
2297 """
2297 """
2298
2298
2299 node1 = scmutil.revsingle(repo, rev1).node()
2299 node1 = scmutil.revsingle(repo, rev1).node()
2300 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2300 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2301
2301
2302 with repo.wlock():
2302 with repo.wlock():
2303 repo.setparents(node1, node2)
2303 repo.setparents(node1, node2)
2304
2304
2305 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2305 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2306 def debugssl(ui, repo, source=None, **opts):
2306 def debugssl(ui, repo, source=None, **opts):
2307 '''test a secure connection to a server
2307 '''test a secure connection to a server
2308
2308
2309 This builds the certificate chain for the server on Windows, installing the
2309 This builds the certificate chain for the server on Windows, installing the
2310 missing intermediates and trusted root via Windows Update if necessary. It
2310 missing intermediates and trusted root via Windows Update if necessary. It
2311 does nothing on other platforms.
2311 does nothing on other platforms.
2312
2312
2313 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2313 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2314 that server is used. See :hg:`help urls` for more information.
2314 that server is used. See :hg:`help urls` for more information.
2315
2315
2316 If the update succeeds, retry the original operation. Otherwise, the cause
2316 If the update succeeds, retry the original operation. Otherwise, the cause
2317 of the SSL error is likely another issue.
2317 of the SSL error is likely another issue.
2318 '''
2318 '''
2319 if not pycompat.iswindows:
2319 if not pycompat.iswindows:
2320 raise error.Abort(_('certificate chain building is only possible on '
2320 raise error.Abort(_('certificate chain building is only possible on '
2321 'Windows'))
2321 'Windows'))
2322
2322
2323 if not source:
2323 if not source:
2324 if not repo:
2324 if not repo:
2325 raise error.Abort(_("there is no Mercurial repository here, and no "
2325 raise error.Abort(_("there is no Mercurial repository here, and no "
2326 "server specified"))
2326 "server specified"))
2327 source = "default"
2327 source = "default"
2328
2328
2329 source, branches = hg.parseurl(ui.expandpath(source))
2329 source, branches = hg.parseurl(ui.expandpath(source))
2330 url = util.url(source)
2330 url = util.url(source)
2331 addr = None
2331 addr = None
2332
2332
2333 defaultport = {'https': 443, 'ssh': 22}
2333 defaultport = {'https': 443, 'ssh': 22}
2334 if url.scheme in defaultport:
2334 if url.scheme in defaultport:
2335 try:
2335 try:
2336 addr = (url.host, int(url.port or defaultport[url.scheme]))
2336 addr = (url.host, int(url.port or defaultport[url.scheme]))
2337 except ValueError:
2337 except ValueError:
2338 raise error.Abort(_("malformed port number in URL"))
2338 raise error.Abort(_("malformed port number in URL"))
2339 else:
2339 else:
2340 raise error.Abort(_("only https and ssh connections are supported"))
2340 raise error.Abort(_("only https and ssh connections are supported"))
2341
2341
2342 from . import win32
2342 from . import win32
2343
2343
2344 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2344 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2345 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2345 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2346
2346
2347 try:
2347 try:
2348 s.connect(addr)
2348 s.connect(addr)
2349 cert = s.getpeercert(True)
2349 cert = s.getpeercert(True)
2350
2350
2351 ui.status(_('checking the certificate chain for %s\n') % url.host)
2351 ui.status(_('checking the certificate chain for %s\n') % url.host)
2352
2352
2353 complete = win32.checkcertificatechain(cert, build=False)
2353 complete = win32.checkcertificatechain(cert, build=False)
2354
2354
2355 if not complete:
2355 if not complete:
2356 ui.status(_('certificate chain is incomplete, updating... '))
2356 ui.status(_('certificate chain is incomplete, updating... '))
2357
2357
2358 if not win32.checkcertificatechain(cert):
2358 if not win32.checkcertificatechain(cert):
2359 ui.status(_('failed.\n'))
2359 ui.status(_('failed.\n'))
2360 else:
2360 else:
2361 ui.status(_('done.\n'))
2361 ui.status(_('done.\n'))
2362 else:
2362 else:
2363 ui.status(_('full certificate chain is available\n'))
2363 ui.status(_('full certificate chain is available\n'))
2364 finally:
2364 finally:
2365 s.close()
2365 s.close()
2366
2366
2367 @command('debugsub',
2367 @command('debugsub',
2368 [('r', 'rev', '',
2368 [('r', 'rev', '',
2369 _('revision to check'), _('REV'))],
2369 _('revision to check'), _('REV'))],
2370 _('[-r REV] [REV]'))
2370 _('[-r REV] [REV]'))
2371 def debugsub(ui, repo, rev=None):
2371 def debugsub(ui, repo, rev=None):
2372 ctx = scmutil.revsingle(repo, rev, None)
2372 ctx = scmutil.revsingle(repo, rev, None)
2373 for k, v in sorted(ctx.substate.items()):
2373 for k, v in sorted(ctx.substate.items()):
2374 ui.write(('path %s\n') % k)
2374 ui.write(('path %s\n') % k)
2375 ui.write((' source %s\n') % v[0])
2375 ui.write((' source %s\n') % v[0])
2376 ui.write((' revision %s\n') % v[1])
2376 ui.write((' revision %s\n') % v[1])
2377
2377
2378 @command('debugsuccessorssets',
2378 @command('debugsuccessorssets',
2379 [('', 'closest', False, _('return closest successors sets only'))],
2379 [('', 'closest', False, _('return closest successors sets only'))],
2380 _('[REV]'))
2380 _('[REV]'))
2381 def debugsuccessorssets(ui, repo, *revs, **opts):
2381 def debugsuccessorssets(ui, repo, *revs, **opts):
2382 """show set of successors for revision
2382 """show set of successors for revision
2383
2383
2384 A successors set of changeset A is a consistent group of revisions that
2384 A successors set of changeset A is a consistent group of revisions that
2385 succeed A. It contains non-obsolete changesets only unless closests
2385 succeed A. It contains non-obsolete changesets only unless closests
2386 successors set is set.
2386 successors set is set.
2387
2387
2388 In most cases a changeset A has a single successors set containing a single
2388 In most cases a changeset A has a single successors set containing a single
2389 successor (changeset A replaced by A').
2389 successor (changeset A replaced by A').
2390
2390
2391 A changeset that is made obsolete with no successors are called "pruned".
2391 A changeset that is made obsolete with no successors are called "pruned".
2392 Such changesets have no successors sets at all.
2392 Such changesets have no successors sets at all.
2393
2393
2394 A changeset that has been "split" will have a successors set containing
2394 A changeset that has been "split" will have a successors set containing
2395 more than one successor.
2395 more than one successor.
2396
2396
2397 A changeset that has been rewritten in multiple different ways is called
2397 A changeset that has been rewritten in multiple different ways is called
2398 "divergent". Such changesets have multiple successor sets (each of which
2398 "divergent". Such changesets have multiple successor sets (each of which
2399 may also be split, i.e. have multiple successors).
2399 may also be split, i.e. have multiple successors).
2400
2400
2401 Results are displayed as follows::
2401 Results are displayed as follows::
2402
2402
2403 <rev1>
2403 <rev1>
2404 <successors-1A>
2404 <successors-1A>
2405 <rev2>
2405 <rev2>
2406 <successors-2A>
2406 <successors-2A>
2407 <successors-2B1> <successors-2B2> <successors-2B3>
2407 <successors-2B1> <successors-2B2> <successors-2B3>
2408
2408
2409 Here rev2 has two possible (i.e. divergent) successors sets. The first
2409 Here rev2 has two possible (i.e. divergent) successors sets. The first
2410 holds one element, whereas the second holds three (i.e. the changeset has
2410 holds one element, whereas the second holds three (i.e. the changeset has
2411 been split).
2411 been split).
2412 """
2412 """
2413 # passed to successorssets caching computation from one call to another
2413 # passed to successorssets caching computation from one call to another
2414 cache = {}
2414 cache = {}
2415 ctx2str = bytes
2415 ctx2str = bytes
2416 node2str = short
2416 node2str = short
2417 for rev in scmutil.revrange(repo, revs):
2417 for rev in scmutil.revrange(repo, revs):
2418 ctx = repo[rev]
2418 ctx = repo[rev]
2419 ui.write('%s\n'% ctx2str(ctx))
2419 ui.write('%s\n'% ctx2str(ctx))
2420 for succsset in obsutil.successorssets(repo, ctx.node(),
2420 for succsset in obsutil.successorssets(repo, ctx.node(),
2421 closest=opts[r'closest'],
2421 closest=opts[r'closest'],
2422 cache=cache):
2422 cache=cache):
2423 if succsset:
2423 if succsset:
2424 ui.write(' ')
2424 ui.write(' ')
2425 ui.write(node2str(succsset[0]))
2425 ui.write(node2str(succsset[0]))
2426 for node in succsset[1:]:
2426 for node in succsset[1:]:
2427 ui.write(' ')
2427 ui.write(' ')
2428 ui.write(node2str(node))
2428 ui.write(node2str(node))
2429 ui.write('\n')
2429 ui.write('\n')
2430
2430
2431 @command('debugtemplate',
2431 @command('debugtemplate',
2432 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2432 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2433 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2433 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2434 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2434 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2435 optionalrepo=True)
2435 optionalrepo=True)
2436 def debugtemplate(ui, repo, tmpl, **opts):
2436 def debugtemplate(ui, repo, tmpl, **opts):
2437 """parse and apply a template
2437 """parse and apply a template
2438
2438
2439 If -r/--rev is given, the template is processed as a log template and
2439 If -r/--rev is given, the template is processed as a log template and
2440 applied to the given changesets. Otherwise, it is processed as a generic
2440 applied to the given changesets. Otherwise, it is processed as a generic
2441 template.
2441 template.
2442
2442
2443 Use --verbose to print the parsed tree.
2443 Use --verbose to print the parsed tree.
2444 """
2444 """
2445 revs = None
2445 revs = None
2446 if opts[r'rev']:
2446 if opts[r'rev']:
2447 if repo is None:
2447 if repo is None:
2448 raise error.RepoError(_('there is no Mercurial repository here '
2448 raise error.RepoError(_('there is no Mercurial repository here '
2449 '(.hg not found)'))
2449 '(.hg not found)'))
2450 revs = scmutil.revrange(repo, opts[r'rev'])
2450 revs = scmutil.revrange(repo, opts[r'rev'])
2451
2451
2452 props = {}
2452 props = {}
2453 for d in opts[r'define']:
2453 for d in opts[r'define']:
2454 try:
2454 try:
2455 k, v = (e.strip() for e in d.split('=', 1))
2455 k, v = (e.strip() for e in d.split('=', 1))
2456 if not k or k == 'ui':
2456 if not k or k == 'ui':
2457 raise ValueError
2457 raise ValueError
2458 props[k] = v
2458 props[k] = v
2459 except ValueError:
2459 except ValueError:
2460 raise error.Abort(_('malformed keyword definition: %s') % d)
2460 raise error.Abort(_('malformed keyword definition: %s') % d)
2461
2461
2462 if ui.verbose:
2462 if ui.verbose:
2463 aliases = ui.configitems('templatealias')
2463 aliases = ui.configitems('templatealias')
2464 tree = templater.parse(tmpl)
2464 tree = templater.parse(tmpl)
2465 ui.note(templater.prettyformat(tree), '\n')
2465 ui.note(templater.prettyformat(tree), '\n')
2466 newtree = templater.expandaliases(tree, aliases)
2466 newtree = templater.expandaliases(tree, aliases)
2467 if newtree != tree:
2467 if newtree != tree:
2468 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2468 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2469
2469
2470 if revs is None:
2470 if revs is None:
2471 tres = formatter.templateresources(ui, repo)
2471 tres = formatter.templateresources(ui, repo)
2472 t = formatter.maketemplater(ui, tmpl, resources=tres)
2472 t = formatter.maketemplater(ui, tmpl, resources=tres)
2473 ui.write(t.renderdefault(props))
2473 ui.write(t.renderdefault(props))
2474 else:
2474 else:
2475 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2475 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2476 for r in revs:
2476 for r in revs:
2477 displayer.show(repo[r], **pycompat.strkwargs(props))
2477 displayer.show(repo[r], **pycompat.strkwargs(props))
2478 displayer.close()
2478 displayer.close()
2479
2479
2480 @command('debuguigetpass', [
2480 @command('debuguigetpass', [
2481 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2481 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2482 ], _('[-p TEXT]'), norepo=True)
2482 ], _('[-p TEXT]'), norepo=True)
2483 def debuguigetpass(ui, prompt=''):
2483 def debuguigetpass(ui, prompt=''):
2484 """show prompt to type password"""
2484 """show prompt to type password"""
2485 r = ui.getpass(prompt)
2485 r = ui.getpass(prompt)
2486 ui.write(('respose: %s\n') % r)
2486 ui.write(('respose: %s\n') % r)
2487
2487
2488 @command('debuguiprompt', [
2488 @command('debuguiprompt', [
2489 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2489 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2490 ], _('[-p TEXT]'), norepo=True)
2490 ], _('[-p TEXT]'), norepo=True)
2491 def debuguiprompt(ui, prompt=''):
2491 def debuguiprompt(ui, prompt=''):
2492 """show plain prompt"""
2492 """show plain prompt"""
2493 r = ui.prompt(prompt)
2493 r = ui.prompt(prompt)
2494 ui.write(('response: %s\n') % r)
2494 ui.write(('response: %s\n') % r)
2495
2495
2496 @command('debugupdatecaches', [])
2496 @command('debugupdatecaches', [])
2497 def debugupdatecaches(ui, repo, *pats, **opts):
2497 def debugupdatecaches(ui, repo, *pats, **opts):
2498 """warm all known caches in the repository"""
2498 """warm all known caches in the repository"""
2499 with repo.wlock(), repo.lock():
2499 with repo.wlock(), repo.lock():
2500 repo.updatecaches(full=True)
2500 repo.updatecaches(full=True)
2501
2501
2502 @command('debugupgraderepo', [
2502 @command('debugupgraderepo', [
2503 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2503 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2504 ('', 'run', False, _('performs an upgrade')),
2504 ('', 'run', False, _('performs an upgrade')),
2505 ])
2505 ])
2506 def debugupgraderepo(ui, repo, run=False, optimize=None):
2506 def debugupgraderepo(ui, repo, run=False, optimize=None):
2507 """upgrade a repository to use different features
2507 """upgrade a repository to use different features
2508
2508
2509 If no arguments are specified, the repository is evaluated for upgrade
2509 If no arguments are specified, the repository is evaluated for upgrade
2510 and a list of problems and potential optimizations is printed.
2510 and a list of problems and potential optimizations is printed.
2511
2511
2512 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2512 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2513 can be influenced via additional arguments. More details will be provided
2513 can be influenced via additional arguments. More details will be provided
2514 by the command output when run without ``--run``.
2514 by the command output when run without ``--run``.
2515
2515
2516 During the upgrade, the repository will be locked and no writes will be
2516 During the upgrade, the repository will be locked and no writes will be
2517 allowed.
2517 allowed.
2518
2518
2519 At the end of the upgrade, the repository may not be readable while new
2519 At the end of the upgrade, the repository may not be readable while new
2520 repository data is swapped in. This window will be as long as it takes to
2520 repository data is swapped in. This window will be as long as it takes to
2521 rename some directories inside the ``.hg`` directory. On most machines, this
2521 rename some directories inside the ``.hg`` directory. On most machines, this
2522 should complete almost instantaneously and the chances of a consumer being
2522 should complete almost instantaneously and the chances of a consumer being
2523 unable to access the repository should be low.
2523 unable to access the repository should be low.
2524 """
2524 """
2525 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2525 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2526
2526
2527 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2527 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2528 inferrepo=True)
2528 inferrepo=True)
2529 def debugwalk(ui, repo, *pats, **opts):
2529 def debugwalk(ui, repo, *pats, **opts):
2530 """show how files match on given patterns"""
2530 """show how files match on given patterns"""
2531 opts = pycompat.byteskwargs(opts)
2531 opts = pycompat.byteskwargs(opts)
2532 m = scmutil.match(repo[None], pats, opts)
2532 m = scmutil.match(repo[None], pats, opts)
2533 ui.write(('matcher: %r\n' % m))
2533 ui.write(('matcher: %r\n' % m))
2534 items = list(repo[None].walk(m))
2534 items = list(repo[None].walk(m))
2535 if not items:
2535 if not items:
2536 return
2536 return
2537 f = lambda fn: fn
2537 f = lambda fn: fn
2538 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2538 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2539 f = lambda fn: util.normpath(fn)
2539 f = lambda fn: util.normpath(fn)
2540 fmt = 'f %%-%ds %%-%ds %%s' % (
2540 fmt = 'f %%-%ds %%-%ds %%s' % (
2541 max([len(abs) for abs in items]),
2541 max([len(abs) for abs in items]),
2542 max([len(m.rel(abs)) for abs in items]))
2542 max([len(m.rel(abs)) for abs in items]))
2543 for abs in items:
2543 for abs in items:
2544 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2544 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2545 ui.write("%s\n" % line.rstrip())
2545 ui.write("%s\n" % line.rstrip())
2546
2546
2547 @command('debugwhyunstable', [], _('REV'))
2547 @command('debugwhyunstable', [], _('REV'))
2548 def debugwhyunstable(ui, repo, rev):
2548 def debugwhyunstable(ui, repo, rev):
2549 """explain instabilities of a changeset"""
2549 """explain instabilities of a changeset"""
2550 for entry in obsutil.whyunstable(repo, repo[rev]):
2550 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2551 dnodes = ''
2551 dnodes = ''
2552 if entry.get('divergentnodes'):
2552 if entry.get('divergentnodes'):
2553 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2553 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2554 for ctx in entry['divergentnodes']) + ' '
2554 for ctx in entry['divergentnodes']) + ' '
2555 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2555 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2556 entry['reason'], entry['node']))
2556 entry['reason'], entry['node']))
2557
2557
2558 @command('debugwireargs',
2558 @command('debugwireargs',
2559 [('', 'three', '', 'three'),
2559 [('', 'three', '', 'three'),
2560 ('', 'four', '', 'four'),
2560 ('', 'four', '', 'four'),
2561 ('', 'five', '', 'five'),
2561 ('', 'five', '', 'five'),
2562 ] + cmdutil.remoteopts,
2562 ] + cmdutil.remoteopts,
2563 _('REPO [OPTIONS]... [ONE [TWO]]'),
2563 _('REPO [OPTIONS]... [ONE [TWO]]'),
2564 norepo=True)
2564 norepo=True)
2565 def debugwireargs(ui, repopath, *vals, **opts):
2565 def debugwireargs(ui, repopath, *vals, **opts):
2566 opts = pycompat.byteskwargs(opts)
2566 opts = pycompat.byteskwargs(opts)
2567 repo = hg.peer(ui, opts, repopath)
2567 repo = hg.peer(ui, opts, repopath)
2568 for opt in cmdutil.remoteopts:
2568 for opt in cmdutil.remoteopts:
2569 del opts[opt[1]]
2569 del opts[opt[1]]
2570 args = {}
2570 args = {}
2571 for k, v in opts.iteritems():
2571 for k, v in opts.iteritems():
2572 if v:
2572 if v:
2573 args[k] = v
2573 args[k] = v
2574 args = pycompat.strkwargs(args)
2574 args = pycompat.strkwargs(args)
2575 # run twice to check that we don't mess up the stream for the next command
2575 # run twice to check that we don't mess up the stream for the next command
2576 res1 = repo.debugwireargs(*vals, **args)
2576 res1 = repo.debugwireargs(*vals, **args)
2577 res2 = repo.debugwireargs(*vals, **args)
2577 res2 = repo.debugwireargs(*vals, **args)
2578 ui.write("%s\n" % res1)
2578 ui.write("%s\n" % res1)
2579 if res1 != res2:
2579 if res1 != res2:
2580 ui.warn("%s\n" % res2)
2580 ui.warn("%s\n" % res2)
2581
2581
2582 def _parsewirelangblocks(fh):
2582 def _parsewirelangblocks(fh):
2583 activeaction = None
2583 activeaction = None
2584 blocklines = []
2584 blocklines = []
2585
2585
2586 for line in fh:
2586 for line in fh:
2587 line = line.rstrip()
2587 line = line.rstrip()
2588 if not line:
2588 if not line:
2589 continue
2589 continue
2590
2590
2591 if line.startswith(b'#'):
2591 if line.startswith(b'#'):
2592 continue
2592 continue
2593
2593
2594 if not line.startswith(' '):
2594 if not line.startswith(' '):
2595 # New block. Flush previous one.
2595 # New block. Flush previous one.
2596 if activeaction:
2596 if activeaction:
2597 yield activeaction, blocklines
2597 yield activeaction, blocklines
2598
2598
2599 activeaction = line
2599 activeaction = line
2600 blocklines = []
2600 blocklines = []
2601 continue
2601 continue
2602
2602
2603 # Else we start with an indent.
2603 # Else we start with an indent.
2604
2604
2605 if not activeaction:
2605 if not activeaction:
2606 raise error.Abort(_('indented line outside of block'))
2606 raise error.Abort(_('indented line outside of block'))
2607
2607
2608 blocklines.append(line)
2608 blocklines.append(line)
2609
2609
2610 # Flush last block.
2610 # Flush last block.
2611 if activeaction:
2611 if activeaction:
2612 yield activeaction, blocklines
2612 yield activeaction, blocklines
2613
2613
2614 @command('debugwireproto',
2614 @command('debugwireproto',
2615 [
2615 [
2616 ('', 'localssh', False, _('start an SSH server for this repo')),
2616 ('', 'localssh', False, _('start an SSH server for this repo')),
2617 ('', 'peer', '', _('construct a specific version of the peer')),
2617 ('', 'peer', '', _('construct a specific version of the peer')),
2618 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2618 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2619 ] + cmdutil.remoteopts,
2619 ] + cmdutil.remoteopts,
2620 _('[PATH]'),
2620 _('[PATH]'),
2621 optionalrepo=True)
2621 optionalrepo=True)
2622 def debugwireproto(ui, repo, path=None, **opts):
2622 def debugwireproto(ui, repo, path=None, **opts):
2623 """send wire protocol commands to a server
2623 """send wire protocol commands to a server
2624
2624
2625 This command can be used to issue wire protocol commands to remote
2625 This command can be used to issue wire protocol commands to remote
2626 peers and to debug the raw data being exchanged.
2626 peers and to debug the raw data being exchanged.
2627
2627
2628 ``--localssh`` will start an SSH server against the current repository
2628 ``--localssh`` will start an SSH server against the current repository
2629 and connect to that. By default, the connection will perform a handshake
2629 and connect to that. By default, the connection will perform a handshake
2630 and establish an appropriate peer instance.
2630 and establish an appropriate peer instance.
2631
2631
2632 ``--peer`` can be used to bypass the handshake protocol and construct a
2632 ``--peer`` can be used to bypass the handshake protocol and construct a
2633 peer instance using the specified class type. Valid values are ``raw``,
2633 peer instance using the specified class type. Valid values are ``raw``,
2634 ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending raw data
2634 ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending raw data
2635 payloads and don't support higher-level command actions.
2635 payloads and don't support higher-level command actions.
2636
2636
2637 ``--noreadstderr`` can be used to disable automatic reading from stderr
2637 ``--noreadstderr`` can be used to disable automatic reading from stderr
2638 of the peer (for SSH connections only). Disabling automatic reading of
2638 of the peer (for SSH connections only). Disabling automatic reading of
2639 stderr is useful for making output more deterministic.
2639 stderr is useful for making output more deterministic.
2640
2640
2641 Commands are issued via a mini language which is specified via stdin.
2641 Commands are issued via a mini language which is specified via stdin.
2642 The language consists of individual actions to perform. An action is
2642 The language consists of individual actions to perform. An action is
2643 defined by a block. A block is defined as a line with no leading
2643 defined by a block. A block is defined as a line with no leading
2644 space followed by 0 or more lines with leading space. Blocks are
2644 space followed by 0 or more lines with leading space. Blocks are
2645 effectively a high-level command with additional metadata.
2645 effectively a high-level command with additional metadata.
2646
2646
2647 Lines beginning with ``#`` are ignored.
2647 Lines beginning with ``#`` are ignored.
2648
2648
2649 The following sections denote available actions.
2649 The following sections denote available actions.
2650
2650
2651 raw
2651 raw
2652 ---
2652 ---
2653
2653
2654 Send raw data to the server.
2654 Send raw data to the server.
2655
2655
2656 The block payload contains the raw data to send as one atomic send
2656 The block payload contains the raw data to send as one atomic send
2657 operation. The data may not actually be delivered in a single system
2657 operation. The data may not actually be delivered in a single system
2658 call: it depends on the abilities of the transport being used.
2658 call: it depends on the abilities of the transport being used.
2659
2659
2660 Each line in the block is de-indented and concatenated. Then, that
2660 Each line in the block is de-indented and concatenated. Then, that
2661 value is evaluated as a Python b'' literal. This allows the use of
2661 value is evaluated as a Python b'' literal. This allows the use of
2662 backslash escaping, etc.
2662 backslash escaping, etc.
2663
2663
2664 raw+
2664 raw+
2665 ----
2665 ----
2666
2666
2667 Behaves like ``raw`` except flushes output afterwards.
2667 Behaves like ``raw`` except flushes output afterwards.
2668
2668
2669 command <X>
2669 command <X>
2670 -----------
2670 -----------
2671
2671
2672 Send a request to run a named command, whose name follows the ``command``
2672 Send a request to run a named command, whose name follows the ``command``
2673 string.
2673 string.
2674
2674
2675 Arguments to the command are defined as lines in this block. The format of
2675 Arguments to the command are defined as lines in this block. The format of
2676 each line is ``<key> <value>``. e.g.::
2676 each line is ``<key> <value>``. e.g.::
2677
2677
2678 command listkeys
2678 command listkeys
2679 namespace bookmarks
2679 namespace bookmarks
2680
2680
2681 Values are interpreted as Python b'' literals. This allows encoding
2681 Values are interpreted as Python b'' literals. This allows encoding
2682 special byte sequences via backslash escaping.
2682 special byte sequences via backslash escaping.
2683
2683
2684 The following arguments have special meaning:
2684 The following arguments have special meaning:
2685
2685
2686 ``PUSHFILE``
2686 ``PUSHFILE``
2687 When defined, the *push* mechanism of the peer will be used instead
2687 When defined, the *push* mechanism of the peer will be used instead
2688 of the static request-response mechanism and the content of the
2688 of the static request-response mechanism and the content of the
2689 file specified in the value of this argument will be sent as the
2689 file specified in the value of this argument will be sent as the
2690 command payload.
2690 command payload.
2691
2691
2692 This can be used to submit a local bundle file to the remote.
2692 This can be used to submit a local bundle file to the remote.
2693
2693
2694 batchbegin
2694 batchbegin
2695 ----------
2695 ----------
2696
2696
2697 Instruct the peer to begin a batched send.
2697 Instruct the peer to begin a batched send.
2698
2698
2699 All ``command`` blocks are queued for execution until the next
2699 All ``command`` blocks are queued for execution until the next
2700 ``batchsubmit`` block.
2700 ``batchsubmit`` block.
2701
2701
2702 batchsubmit
2702 batchsubmit
2703 -----------
2703 -----------
2704
2704
2705 Submit previously queued ``command`` blocks as a batch request.
2705 Submit previously queued ``command`` blocks as a batch request.
2706
2706
2707 This action MUST be paired with a ``batchbegin`` action.
2707 This action MUST be paired with a ``batchbegin`` action.
2708
2708
2709 httprequest <method> <path>
2709 httprequest <method> <path>
2710 ---------------------------
2710 ---------------------------
2711
2711
2712 (HTTP peer only)
2712 (HTTP peer only)
2713
2713
2714 Send an HTTP request to the peer.
2714 Send an HTTP request to the peer.
2715
2715
2716 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2716 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2717
2717
2718 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2718 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2719 headers to add to the request. e.g. ``Accept: foo``.
2719 headers to add to the request. e.g. ``Accept: foo``.
2720
2720
2721 The following arguments are special:
2721 The following arguments are special:
2722
2722
2723 ``BODYFILE``
2723 ``BODYFILE``
2724 The content of the file defined as the value to this argument will be
2724 The content of the file defined as the value to this argument will be
2725 transferred verbatim as the HTTP request body.
2725 transferred verbatim as the HTTP request body.
2726
2726
2727 ``frame <type> <flags> <payload>``
2727 ``frame <type> <flags> <payload>``
2728 Send a unified protocol frame as part of the request body.
2728 Send a unified protocol frame as part of the request body.
2729
2729
2730 All frames will be collected and sent as the body to the HTTP
2730 All frames will be collected and sent as the body to the HTTP
2731 request.
2731 request.
2732
2732
2733 close
2733 close
2734 -----
2734 -----
2735
2735
2736 Close the connection to the server.
2736 Close the connection to the server.
2737
2737
2738 flush
2738 flush
2739 -----
2739 -----
2740
2740
2741 Flush data written to the server.
2741 Flush data written to the server.
2742
2742
2743 readavailable
2743 readavailable
2744 -------------
2744 -------------
2745
2745
2746 Close the write end of the connection and read all available data from
2746 Close the write end of the connection and read all available data from
2747 the server.
2747 the server.
2748
2748
2749 If the connection to the server encompasses multiple pipes, we poll both
2749 If the connection to the server encompasses multiple pipes, we poll both
2750 pipes and read available data.
2750 pipes and read available data.
2751
2751
2752 readline
2752 readline
2753 --------
2753 --------
2754
2754
2755 Read a line of output from the server. If there are multiple output
2755 Read a line of output from the server. If there are multiple output
2756 pipes, reads only the main pipe.
2756 pipes, reads only the main pipe.
2757
2757
2758 ereadline
2758 ereadline
2759 ---------
2759 ---------
2760
2760
2761 Like ``readline``, but read from the stderr pipe, if available.
2761 Like ``readline``, but read from the stderr pipe, if available.
2762
2762
2763 read <X>
2763 read <X>
2764 --------
2764 --------
2765
2765
2766 ``read()`` N bytes from the server's main output pipe.
2766 ``read()`` N bytes from the server's main output pipe.
2767
2767
2768 eread <X>
2768 eread <X>
2769 ---------
2769 ---------
2770
2770
2771 ``read()`` N bytes from the server's stderr pipe, if available.
2771 ``read()`` N bytes from the server's stderr pipe, if available.
2772
2772
2773 Specifying Unified Frame-Based Protocol Frames
2773 Specifying Unified Frame-Based Protocol Frames
2774 ----------------------------------------------
2774 ----------------------------------------------
2775
2775
2776 It is possible to emit a *Unified Frame-Based Protocol* by using special
2776 It is possible to emit a *Unified Frame-Based Protocol* by using special
2777 syntax.
2777 syntax.
2778
2778
2779 A frame is composed as a type, flags, and payload. These can be parsed
2779 A frame is composed as a type, flags, and payload. These can be parsed
2780 from a string of the form:
2780 from a string of the form:
2781
2781
2782 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2782 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2783
2783
2784 ``request-id`` and ``stream-id`` are integers defining the request and
2784 ``request-id`` and ``stream-id`` are integers defining the request and
2785 stream identifiers.
2785 stream identifiers.
2786
2786
2787 ``type`` can be an integer value for the frame type or the string name
2787 ``type`` can be an integer value for the frame type or the string name
2788 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2788 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2789 ``command-name``.
2789 ``command-name``.
2790
2790
2791 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2791 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2792 components. Each component (and there can be just one) can be an integer
2792 components. Each component (and there can be just one) can be an integer
2793 or a flag name for stream flags or frame flags, respectively. Values are
2793 or a flag name for stream flags or frame flags, respectively. Values are
2794 resolved to integers and then bitwise OR'd together.
2794 resolved to integers and then bitwise OR'd together.
2795
2795
2796 ``payload`` represents the raw frame payload. If it begins with
2796 ``payload`` represents the raw frame payload. If it begins with
2797 ``cbor:``, the following string is evaluated as Python code and the
2797 ``cbor:``, the following string is evaluated as Python code and the
2798 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
2798 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
2799 as a Python byte string literal.
2799 as a Python byte string literal.
2800 """
2800 """
2801 opts = pycompat.byteskwargs(opts)
2801 opts = pycompat.byteskwargs(opts)
2802
2802
2803 if opts['localssh'] and not repo:
2803 if opts['localssh'] and not repo:
2804 raise error.Abort(_('--localssh requires a repository'))
2804 raise error.Abort(_('--localssh requires a repository'))
2805
2805
2806 if opts['peer'] and opts['peer'] not in ('raw', 'ssh1', 'ssh2'):
2806 if opts['peer'] and opts['peer'] not in ('raw', 'ssh1', 'ssh2'):
2807 raise error.Abort(_('invalid value for --peer'),
2807 raise error.Abort(_('invalid value for --peer'),
2808 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2808 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2809
2809
2810 if path and opts['localssh']:
2810 if path and opts['localssh']:
2811 raise error.Abort(_('cannot specify --localssh with an explicit '
2811 raise error.Abort(_('cannot specify --localssh with an explicit '
2812 'path'))
2812 'path'))
2813
2813
2814 if ui.interactive():
2814 if ui.interactive():
2815 ui.write(_('(waiting for commands on stdin)\n'))
2815 ui.write(_('(waiting for commands on stdin)\n'))
2816
2816
2817 blocks = list(_parsewirelangblocks(ui.fin))
2817 blocks = list(_parsewirelangblocks(ui.fin))
2818
2818
2819 proc = None
2819 proc = None
2820 stdin = None
2820 stdin = None
2821 stdout = None
2821 stdout = None
2822 stderr = None
2822 stderr = None
2823 opener = None
2823 opener = None
2824
2824
2825 if opts['localssh']:
2825 if opts['localssh']:
2826 # We start the SSH server in its own process so there is process
2826 # We start the SSH server in its own process so there is process
2827 # separation. This prevents a whole class of potential bugs around
2827 # separation. This prevents a whole class of potential bugs around
2828 # shared state from interfering with server operation.
2828 # shared state from interfering with server operation.
2829 args = procutil.hgcmd() + [
2829 args = procutil.hgcmd() + [
2830 '-R', repo.root,
2830 '-R', repo.root,
2831 'debugserve', '--sshstdio',
2831 'debugserve', '--sshstdio',
2832 ]
2832 ]
2833 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2833 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2834 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2834 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2835 bufsize=0)
2835 bufsize=0)
2836
2836
2837 stdin = proc.stdin
2837 stdin = proc.stdin
2838 stdout = proc.stdout
2838 stdout = proc.stdout
2839 stderr = proc.stderr
2839 stderr = proc.stderr
2840
2840
2841 # We turn the pipes into observers so we can log I/O.
2841 # We turn the pipes into observers so we can log I/O.
2842 if ui.verbose or opts['peer'] == 'raw':
2842 if ui.verbose or opts['peer'] == 'raw':
2843 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2843 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2844 logdata=True)
2844 logdata=True)
2845 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2845 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2846 logdata=True)
2846 logdata=True)
2847 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2847 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2848 logdata=True)
2848 logdata=True)
2849
2849
2850 # --localssh also implies the peer connection settings.
2850 # --localssh also implies the peer connection settings.
2851
2851
2852 url = 'ssh://localserver'
2852 url = 'ssh://localserver'
2853 autoreadstderr = not opts['noreadstderr']
2853 autoreadstderr = not opts['noreadstderr']
2854
2854
2855 if opts['peer'] == 'ssh1':
2855 if opts['peer'] == 'ssh1':
2856 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2856 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2857 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2857 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2858 None, autoreadstderr=autoreadstderr)
2858 None, autoreadstderr=autoreadstderr)
2859 elif opts['peer'] == 'ssh2':
2859 elif opts['peer'] == 'ssh2':
2860 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2860 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2861 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2861 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2862 None, autoreadstderr=autoreadstderr)
2862 None, autoreadstderr=autoreadstderr)
2863 elif opts['peer'] == 'raw':
2863 elif opts['peer'] == 'raw':
2864 ui.write(_('using raw connection to peer\n'))
2864 ui.write(_('using raw connection to peer\n'))
2865 peer = None
2865 peer = None
2866 else:
2866 else:
2867 ui.write(_('creating ssh peer from handshake results\n'))
2867 ui.write(_('creating ssh peer from handshake results\n'))
2868 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2868 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2869 autoreadstderr=autoreadstderr)
2869 autoreadstderr=autoreadstderr)
2870
2870
2871 elif path:
2871 elif path:
2872 # We bypass hg.peer() so we can proxy the sockets.
2872 # We bypass hg.peer() so we can proxy the sockets.
2873 # TODO consider not doing this because we skip
2873 # TODO consider not doing this because we skip
2874 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
2874 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
2875 u = util.url(path)
2875 u = util.url(path)
2876 if u.scheme != 'http':
2876 if u.scheme != 'http':
2877 raise error.Abort(_('only http:// paths are currently supported'))
2877 raise error.Abort(_('only http:// paths are currently supported'))
2878
2878
2879 url, authinfo = u.authinfo()
2879 url, authinfo = u.authinfo()
2880 openerargs = {}
2880 openerargs = {}
2881
2881
2882 # Turn pipes/sockets into observers so we can log I/O.
2882 # Turn pipes/sockets into observers so we can log I/O.
2883 if ui.verbose:
2883 if ui.verbose:
2884 openerargs = {
2884 openerargs = {
2885 r'loggingfh': ui,
2885 r'loggingfh': ui,
2886 r'loggingname': b's',
2886 r'loggingname': b's',
2887 r'loggingopts': {
2887 r'loggingopts': {
2888 r'logdata': True,
2888 r'logdata': True,
2889 r'logdataapis': False,
2889 r'logdataapis': False,
2890 },
2890 },
2891 }
2891 }
2892
2892
2893 if ui.debugflag:
2893 if ui.debugflag:
2894 openerargs[r'loggingopts'][r'logdataapis'] = True
2894 openerargs[r'loggingopts'][r'logdataapis'] = True
2895
2895
2896 # Don't send default headers when in raw mode. This allows us to
2896 # Don't send default headers when in raw mode. This allows us to
2897 # bypass most of the behavior of our URL handling code so we can
2897 # bypass most of the behavior of our URL handling code so we can
2898 # have near complete control over what's sent on the wire.
2898 # have near complete control over what's sent on the wire.
2899 if opts['peer'] == 'raw':
2899 if opts['peer'] == 'raw':
2900 openerargs[r'sendaccept'] = False
2900 openerargs[r'sendaccept'] = False
2901
2901
2902 opener = urlmod.opener(ui, authinfo, **openerargs)
2902 opener = urlmod.opener(ui, authinfo, **openerargs)
2903
2903
2904 if opts['peer'] == 'raw':
2904 if opts['peer'] == 'raw':
2905 ui.write(_('using raw connection to peer\n'))
2905 ui.write(_('using raw connection to peer\n'))
2906 peer = None
2906 peer = None
2907 elif opts['peer']:
2907 elif opts['peer']:
2908 raise error.Abort(_('--peer %s not supported with HTTP peers') %
2908 raise error.Abort(_('--peer %s not supported with HTTP peers') %
2909 opts['peer'])
2909 opts['peer'])
2910 else:
2910 else:
2911 peer = httppeer.httppeer(ui, path, url, opener)
2911 peer = httppeer.httppeer(ui, path, url, opener)
2912 peer._fetchcaps()
2912 peer._fetchcaps()
2913
2913
2914 # We /could/ populate stdin/stdout with sock.makefile()...
2914 # We /could/ populate stdin/stdout with sock.makefile()...
2915 else:
2915 else:
2916 raise error.Abort(_('unsupported connection configuration'))
2916 raise error.Abort(_('unsupported connection configuration'))
2917
2917
2918 batchedcommands = None
2918 batchedcommands = None
2919
2919
2920 # Now perform actions based on the parsed wire language instructions.
2920 # Now perform actions based on the parsed wire language instructions.
2921 for action, lines in blocks:
2921 for action, lines in blocks:
2922 if action in ('raw', 'raw+'):
2922 if action in ('raw', 'raw+'):
2923 if not stdin:
2923 if not stdin:
2924 raise error.Abort(_('cannot call raw/raw+ on this peer'))
2924 raise error.Abort(_('cannot call raw/raw+ on this peer'))
2925
2925
2926 # Concatenate the data together.
2926 # Concatenate the data together.
2927 data = ''.join(l.lstrip() for l in lines)
2927 data = ''.join(l.lstrip() for l in lines)
2928 data = stringutil.unescapestr(data)
2928 data = stringutil.unescapestr(data)
2929 stdin.write(data)
2929 stdin.write(data)
2930
2930
2931 if action == 'raw+':
2931 if action == 'raw+':
2932 stdin.flush()
2932 stdin.flush()
2933 elif action == 'flush':
2933 elif action == 'flush':
2934 if not stdin:
2934 if not stdin:
2935 raise error.Abort(_('cannot call flush on this peer'))
2935 raise error.Abort(_('cannot call flush on this peer'))
2936 stdin.flush()
2936 stdin.flush()
2937 elif action.startswith('command'):
2937 elif action.startswith('command'):
2938 if not peer:
2938 if not peer:
2939 raise error.Abort(_('cannot send commands unless peer instance '
2939 raise error.Abort(_('cannot send commands unless peer instance '
2940 'is available'))
2940 'is available'))
2941
2941
2942 command = action.split(' ', 1)[1]
2942 command = action.split(' ', 1)[1]
2943
2943
2944 args = {}
2944 args = {}
2945 for line in lines:
2945 for line in lines:
2946 # We need to allow empty values.
2946 # We need to allow empty values.
2947 fields = line.lstrip().split(' ', 1)
2947 fields = line.lstrip().split(' ', 1)
2948 if len(fields) == 1:
2948 if len(fields) == 1:
2949 key = fields[0]
2949 key = fields[0]
2950 value = ''
2950 value = ''
2951 else:
2951 else:
2952 key, value = fields
2952 key, value = fields
2953
2953
2954 args[key] = stringutil.unescapestr(value)
2954 args[key] = stringutil.unescapestr(value)
2955
2955
2956 if batchedcommands is not None:
2956 if batchedcommands is not None:
2957 batchedcommands.append((command, args))
2957 batchedcommands.append((command, args))
2958 continue
2958 continue
2959
2959
2960 ui.status(_('sending %s command\n') % command)
2960 ui.status(_('sending %s command\n') % command)
2961
2961
2962 if 'PUSHFILE' in args:
2962 if 'PUSHFILE' in args:
2963 with open(args['PUSHFILE'], r'rb') as fh:
2963 with open(args['PUSHFILE'], r'rb') as fh:
2964 del args['PUSHFILE']
2964 del args['PUSHFILE']
2965 res, output = peer._callpush(command, fh,
2965 res, output = peer._callpush(command, fh,
2966 **pycompat.strkwargs(args))
2966 **pycompat.strkwargs(args))
2967 ui.status(_('result: %s\n') % stringutil.escapestr(res))
2967 ui.status(_('result: %s\n') % stringutil.escapestr(res))
2968 ui.status(_('remote output: %s\n') %
2968 ui.status(_('remote output: %s\n') %
2969 stringutil.escapestr(output))
2969 stringutil.escapestr(output))
2970 else:
2970 else:
2971 res = peer._call(command, **pycompat.strkwargs(args))
2971 res = peer._call(command, **pycompat.strkwargs(args))
2972 ui.status(_('response: %s\n') % stringutil.pprint(res))
2972 ui.status(_('response: %s\n') % stringutil.pprint(res))
2973
2973
2974 elif action == 'batchbegin':
2974 elif action == 'batchbegin':
2975 if batchedcommands is not None:
2975 if batchedcommands is not None:
2976 raise error.Abort(_('nested batchbegin not allowed'))
2976 raise error.Abort(_('nested batchbegin not allowed'))
2977
2977
2978 batchedcommands = []
2978 batchedcommands = []
2979 elif action == 'batchsubmit':
2979 elif action == 'batchsubmit':
2980 # There is a batching API we could go through. But it would be
2980 # There is a batching API we could go through. But it would be
2981 # difficult to normalize requests into function calls. It is easier
2981 # difficult to normalize requests into function calls. It is easier
2982 # to bypass this layer and normalize to commands + args.
2982 # to bypass this layer and normalize to commands + args.
2983 ui.status(_('sending batch with %d sub-commands\n') %
2983 ui.status(_('sending batch with %d sub-commands\n') %
2984 len(batchedcommands))
2984 len(batchedcommands))
2985 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
2985 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
2986 ui.status(_('response #%d: %s\n') %
2986 ui.status(_('response #%d: %s\n') %
2987 (i, stringutil.escapestr(chunk)))
2987 (i, stringutil.escapestr(chunk)))
2988
2988
2989 batchedcommands = None
2989 batchedcommands = None
2990
2990
2991 elif action.startswith('httprequest '):
2991 elif action.startswith('httprequest '):
2992 if not opener:
2992 if not opener:
2993 raise error.Abort(_('cannot use httprequest without an HTTP '
2993 raise error.Abort(_('cannot use httprequest without an HTTP '
2994 'peer'))
2994 'peer'))
2995
2995
2996 request = action.split(' ', 2)
2996 request = action.split(' ', 2)
2997 if len(request) != 3:
2997 if len(request) != 3:
2998 raise error.Abort(_('invalid httprequest: expected format is '
2998 raise error.Abort(_('invalid httprequest: expected format is '
2999 '"httprequest <method> <path>'))
2999 '"httprequest <method> <path>'))
3000
3000
3001 method, httppath = request[1:]
3001 method, httppath = request[1:]
3002 headers = {}
3002 headers = {}
3003 body = None
3003 body = None
3004 frames = []
3004 frames = []
3005 for line in lines:
3005 for line in lines:
3006 line = line.lstrip()
3006 line = line.lstrip()
3007 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3007 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3008 if m:
3008 if m:
3009 headers[m.group(1)] = m.group(2)
3009 headers[m.group(1)] = m.group(2)
3010 continue
3010 continue
3011
3011
3012 if line.startswith(b'BODYFILE '):
3012 if line.startswith(b'BODYFILE '):
3013 with open(line.split(b' ', 1), 'rb') as fh:
3013 with open(line.split(b' ', 1), 'rb') as fh:
3014 body = fh.read()
3014 body = fh.read()
3015 elif line.startswith(b'frame '):
3015 elif line.startswith(b'frame '):
3016 frame = wireprotoframing.makeframefromhumanstring(
3016 frame = wireprotoframing.makeframefromhumanstring(
3017 line[len(b'frame '):])
3017 line[len(b'frame '):])
3018
3018
3019 frames.append(frame)
3019 frames.append(frame)
3020 else:
3020 else:
3021 raise error.Abort(_('unknown argument to httprequest: %s') %
3021 raise error.Abort(_('unknown argument to httprequest: %s') %
3022 line)
3022 line)
3023
3023
3024 url = path + httppath
3024 url = path + httppath
3025
3025
3026 if frames:
3026 if frames:
3027 body = b''.join(bytes(f) for f in frames)
3027 body = b''.join(bytes(f) for f in frames)
3028
3028
3029 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3029 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3030
3030
3031 # urllib.Request insists on using has_data() as a proxy for
3031 # urllib.Request insists on using has_data() as a proxy for
3032 # determining the request method. Override that to use our
3032 # determining the request method. Override that to use our
3033 # explicitly requested method.
3033 # explicitly requested method.
3034 req.get_method = lambda: method
3034 req.get_method = lambda: method
3035
3035
3036 try:
3036 try:
3037 opener.open(req).read()
3037 opener.open(req).read()
3038 except util.urlerr.urlerror as e:
3038 except util.urlerr.urlerror as e:
3039 e.read()
3039 e.read()
3040
3040
3041 elif action == 'close':
3041 elif action == 'close':
3042 peer.close()
3042 peer.close()
3043 elif action == 'readavailable':
3043 elif action == 'readavailable':
3044 if not stdout or not stderr:
3044 if not stdout or not stderr:
3045 raise error.Abort(_('readavailable not available on this peer'))
3045 raise error.Abort(_('readavailable not available on this peer'))
3046
3046
3047 stdin.close()
3047 stdin.close()
3048 stdout.read()
3048 stdout.read()
3049 stderr.read()
3049 stderr.read()
3050
3050
3051 elif action == 'readline':
3051 elif action == 'readline':
3052 if not stdout:
3052 if not stdout:
3053 raise error.Abort(_('readline not available on this peer'))
3053 raise error.Abort(_('readline not available on this peer'))
3054 stdout.readline()
3054 stdout.readline()
3055 elif action == 'ereadline':
3055 elif action == 'ereadline':
3056 if not stderr:
3056 if not stderr:
3057 raise error.Abort(_('ereadline not available on this peer'))
3057 raise error.Abort(_('ereadline not available on this peer'))
3058 stderr.readline()
3058 stderr.readline()
3059 elif action.startswith('read '):
3059 elif action.startswith('read '):
3060 count = int(action.split(' ', 1)[1])
3060 count = int(action.split(' ', 1)[1])
3061 if not stdout:
3061 if not stdout:
3062 raise error.Abort(_('read not available on this peer'))
3062 raise error.Abort(_('read not available on this peer'))
3063 stdout.read(count)
3063 stdout.read(count)
3064 elif action.startswith('eread '):
3064 elif action.startswith('eread '):
3065 count = int(action.split(' ', 1)[1])
3065 count = int(action.split(' ', 1)[1])
3066 if not stderr:
3066 if not stderr:
3067 raise error.Abort(_('eread not available on this peer'))
3067 raise error.Abort(_('eread not available on this peer'))
3068 stderr.read(count)
3068 stderr.read(count)
3069 else:
3069 else:
3070 raise error.Abort(_('unknown action: %s') % action)
3070 raise error.Abort(_('unknown action: %s') % action)
3071
3071
3072 if batchedcommands is not None:
3072 if batchedcommands is not None:
3073 raise error.Abort(_('unclosed "batchbegin" request'))
3073 raise error.Abort(_('unclosed "batchbegin" request'))
3074
3074
3075 if peer:
3075 if peer:
3076 peer.close()
3076 peer.close()
3077
3077
3078 if proc:
3078 if proc:
3079 proc.kill()
3079 proc.kill()
General Comments 0
You need to be logged in to leave comments. Login now