##// END OF EJS Templates
setdiscovery: don't use dagutil for rev -> node conversions...
Gregory Szorc -
r39195:5b32b3c6 default
parent child Browse files
Show More
@@ -1,3325 +1,3327 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from .thirdparty import (
35 from .thirdparty import (
36 cbor,
36 cbor,
37 )
37 )
38 from . import (
38 from . import (
39 bundle2,
39 bundle2,
40 changegroup,
40 changegroup,
41 cmdutil,
41 cmdutil,
42 color,
42 color,
43 context,
43 context,
44 dagparser,
44 dagparser,
45 dagutil,
45 dagutil,
46 encoding,
46 encoding,
47 error,
47 error,
48 exchange,
48 exchange,
49 extensions,
49 extensions,
50 filemerge,
50 filemerge,
51 filesetlang,
51 filesetlang,
52 formatter,
52 formatter,
53 hg,
53 hg,
54 httppeer,
54 httppeer,
55 localrepo,
55 localrepo,
56 lock as lockmod,
56 lock as lockmod,
57 logcmdutil,
57 logcmdutil,
58 merge as mergemod,
58 merge as mergemod,
59 obsolete,
59 obsolete,
60 obsutil,
60 obsutil,
61 phases,
61 phases,
62 policy,
62 policy,
63 pvec,
63 pvec,
64 pycompat,
64 pycompat,
65 registrar,
65 registrar,
66 repair,
66 repair,
67 revlog,
67 revlog,
68 revset,
68 revset,
69 revsetlang,
69 revsetlang,
70 scmutil,
70 scmutil,
71 setdiscovery,
71 setdiscovery,
72 simplemerge,
72 simplemerge,
73 sshpeer,
73 sshpeer,
74 sslutil,
74 sslutil,
75 streamclone,
75 streamclone,
76 templater,
76 templater,
77 treediscovery,
77 treediscovery,
78 upgrade,
78 upgrade,
79 url as urlmod,
79 url as urlmod,
80 util,
80 util,
81 vfs as vfsmod,
81 vfs as vfsmod,
82 wireprotoframing,
82 wireprotoframing,
83 wireprotoserver,
83 wireprotoserver,
84 wireprotov2peer,
84 wireprotov2peer,
85 )
85 )
86 from .utils import (
86 from .utils import (
87 dateutil,
87 dateutil,
88 procutil,
88 procutil,
89 stringutil,
89 stringutil,
90 )
90 )
91
91
92 release = lockmod.release
92 release = lockmod.release
93
93
94 command = registrar.command()
94 command = registrar.command()
95
95
96 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
96 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
97 def debugancestor(ui, repo, *args):
97 def debugancestor(ui, repo, *args):
98 """find the ancestor revision of two revisions in a given index"""
98 """find the ancestor revision of two revisions in a given index"""
99 if len(args) == 3:
99 if len(args) == 3:
100 index, rev1, rev2 = args
100 index, rev1, rev2 = args
101 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
101 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
102 lookup = r.lookup
102 lookup = r.lookup
103 elif len(args) == 2:
103 elif len(args) == 2:
104 if not repo:
104 if not repo:
105 raise error.Abort(_('there is no Mercurial repository here '
105 raise error.Abort(_('there is no Mercurial repository here '
106 '(.hg not found)'))
106 '(.hg not found)'))
107 rev1, rev2 = args
107 rev1, rev2 = args
108 r = repo.changelog
108 r = repo.changelog
109 lookup = repo.lookup
109 lookup = repo.lookup
110 else:
110 else:
111 raise error.Abort(_('either two or three arguments required'))
111 raise error.Abort(_('either two or three arguments required'))
112 a = r.ancestor(lookup(rev1), lookup(rev2))
112 a = r.ancestor(lookup(rev1), lookup(rev2))
113 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
113 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
114
114
115 @command('debugapplystreamclonebundle', [], 'FILE')
115 @command('debugapplystreamclonebundle', [], 'FILE')
116 def debugapplystreamclonebundle(ui, repo, fname):
116 def debugapplystreamclonebundle(ui, repo, fname):
117 """apply a stream clone bundle file"""
117 """apply a stream clone bundle file"""
118 f = hg.openpath(ui, fname)
118 f = hg.openpath(ui, fname)
119 gen = exchange.readbundle(ui, f, fname)
119 gen = exchange.readbundle(ui, f, fname)
120 gen.apply(repo)
120 gen.apply(repo)
121
121
122 @command('debugbuilddag',
122 @command('debugbuilddag',
123 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
123 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
124 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
124 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
125 ('n', 'new-file', None, _('add new file at each rev'))],
125 ('n', 'new-file', None, _('add new file at each rev'))],
126 _('[OPTION]... [TEXT]'))
126 _('[OPTION]... [TEXT]'))
127 def debugbuilddag(ui, repo, text=None,
127 def debugbuilddag(ui, repo, text=None,
128 mergeable_file=False,
128 mergeable_file=False,
129 overwritten_file=False,
129 overwritten_file=False,
130 new_file=False):
130 new_file=False):
131 """builds a repo with a given DAG from scratch in the current empty repo
131 """builds a repo with a given DAG from scratch in the current empty repo
132
132
133 The description of the DAG is read from stdin if not given on the
133 The description of the DAG is read from stdin if not given on the
134 command line.
134 command line.
135
135
136 Elements:
136 Elements:
137
137
138 - "+n" is a linear run of n nodes based on the current default parent
138 - "+n" is a linear run of n nodes based on the current default parent
139 - "." is a single node based on the current default parent
139 - "." is a single node based on the current default parent
140 - "$" resets the default parent to null (implied at the start);
140 - "$" resets the default parent to null (implied at the start);
141 otherwise the default parent is always the last node created
141 otherwise the default parent is always the last node created
142 - "<p" sets the default parent to the backref p
142 - "<p" sets the default parent to the backref p
143 - "*p" is a fork at parent p, which is a backref
143 - "*p" is a fork at parent p, which is a backref
144 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
144 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
145 - "/p2" is a merge of the preceding node and p2
145 - "/p2" is a merge of the preceding node and p2
146 - ":tag" defines a local tag for the preceding node
146 - ":tag" defines a local tag for the preceding node
147 - "@branch" sets the named branch for subsequent nodes
147 - "@branch" sets the named branch for subsequent nodes
148 - "#...\\n" is a comment up to the end of the line
148 - "#...\\n" is a comment up to the end of the line
149
149
150 Whitespace between the above elements is ignored.
150 Whitespace between the above elements is ignored.
151
151
152 A backref is either
152 A backref is either
153
153
154 - a number n, which references the node curr-n, where curr is the current
154 - a number n, which references the node curr-n, where curr is the current
155 node, or
155 node, or
156 - the name of a local tag you placed earlier using ":tag", or
156 - the name of a local tag you placed earlier using ":tag", or
157 - empty to denote the default parent.
157 - empty to denote the default parent.
158
158
159 All string valued-elements are either strictly alphanumeric, or must
159 All string valued-elements are either strictly alphanumeric, or must
160 be enclosed in double quotes ("..."), with "\\" as escape character.
160 be enclosed in double quotes ("..."), with "\\" as escape character.
161 """
161 """
162
162
163 if text is None:
163 if text is None:
164 ui.status(_("reading DAG from stdin\n"))
164 ui.status(_("reading DAG from stdin\n"))
165 text = ui.fin.read()
165 text = ui.fin.read()
166
166
167 cl = repo.changelog
167 cl = repo.changelog
168 if len(cl) > 0:
168 if len(cl) > 0:
169 raise error.Abort(_('repository is not empty'))
169 raise error.Abort(_('repository is not empty'))
170
170
171 # determine number of revs in DAG
171 # determine number of revs in DAG
172 total = 0
172 total = 0
173 for type, data in dagparser.parsedag(text):
173 for type, data in dagparser.parsedag(text):
174 if type == 'n':
174 if type == 'n':
175 total += 1
175 total += 1
176
176
177 if mergeable_file:
177 if mergeable_file:
178 linesperrev = 2
178 linesperrev = 2
179 # make a file with k lines per rev
179 # make a file with k lines per rev
180 initialmergedlines = ['%d' % i
180 initialmergedlines = ['%d' % i
181 for i in pycompat.xrange(0, total * linesperrev)]
181 for i in pycompat.xrange(0, total * linesperrev)]
182 initialmergedlines.append("")
182 initialmergedlines.append("")
183
183
184 tags = []
184 tags = []
185 progress = ui.makeprogress(_('building'), unit=_('revisions'),
185 progress = ui.makeprogress(_('building'), unit=_('revisions'),
186 total=total)
186 total=total)
187 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
187 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
188 at = -1
188 at = -1
189 atbranch = 'default'
189 atbranch = 'default'
190 nodeids = []
190 nodeids = []
191 id = 0
191 id = 0
192 progress.update(id)
192 progress.update(id)
193 for type, data in dagparser.parsedag(text):
193 for type, data in dagparser.parsedag(text):
194 if type == 'n':
194 if type == 'n':
195 ui.note(('node %s\n' % pycompat.bytestr(data)))
195 ui.note(('node %s\n' % pycompat.bytestr(data)))
196 id, ps = data
196 id, ps = data
197
197
198 files = []
198 files = []
199 filecontent = {}
199 filecontent = {}
200
200
201 p2 = None
201 p2 = None
202 if mergeable_file:
202 if mergeable_file:
203 fn = "mf"
203 fn = "mf"
204 p1 = repo[ps[0]]
204 p1 = repo[ps[0]]
205 if len(ps) > 1:
205 if len(ps) > 1:
206 p2 = repo[ps[1]]
206 p2 = repo[ps[1]]
207 pa = p1.ancestor(p2)
207 pa = p1.ancestor(p2)
208 base, local, other = [x[fn].data() for x in (pa, p1,
208 base, local, other = [x[fn].data() for x in (pa, p1,
209 p2)]
209 p2)]
210 m3 = simplemerge.Merge3Text(base, local, other)
210 m3 = simplemerge.Merge3Text(base, local, other)
211 ml = [l.strip() for l in m3.merge_lines()]
211 ml = [l.strip() for l in m3.merge_lines()]
212 ml.append("")
212 ml.append("")
213 elif at > 0:
213 elif at > 0:
214 ml = p1[fn].data().split("\n")
214 ml = p1[fn].data().split("\n")
215 else:
215 else:
216 ml = initialmergedlines
216 ml = initialmergedlines
217 ml[id * linesperrev] += " r%i" % id
217 ml[id * linesperrev] += " r%i" % id
218 mergedtext = "\n".join(ml)
218 mergedtext = "\n".join(ml)
219 files.append(fn)
219 files.append(fn)
220 filecontent[fn] = mergedtext
220 filecontent[fn] = mergedtext
221
221
222 if overwritten_file:
222 if overwritten_file:
223 fn = "of"
223 fn = "of"
224 files.append(fn)
224 files.append(fn)
225 filecontent[fn] = "r%i\n" % id
225 filecontent[fn] = "r%i\n" % id
226
226
227 if new_file:
227 if new_file:
228 fn = "nf%i" % id
228 fn = "nf%i" % id
229 files.append(fn)
229 files.append(fn)
230 filecontent[fn] = "r%i\n" % id
230 filecontent[fn] = "r%i\n" % id
231 if len(ps) > 1:
231 if len(ps) > 1:
232 if not p2:
232 if not p2:
233 p2 = repo[ps[1]]
233 p2 = repo[ps[1]]
234 for fn in p2:
234 for fn in p2:
235 if fn.startswith("nf"):
235 if fn.startswith("nf"):
236 files.append(fn)
236 files.append(fn)
237 filecontent[fn] = p2[fn].data()
237 filecontent[fn] = p2[fn].data()
238
238
239 def fctxfn(repo, cx, path):
239 def fctxfn(repo, cx, path):
240 if path in filecontent:
240 if path in filecontent:
241 return context.memfilectx(repo, cx, path,
241 return context.memfilectx(repo, cx, path,
242 filecontent[path])
242 filecontent[path])
243 return None
243 return None
244
244
245 if len(ps) == 0 or ps[0] < 0:
245 if len(ps) == 0 or ps[0] < 0:
246 pars = [None, None]
246 pars = [None, None]
247 elif len(ps) == 1:
247 elif len(ps) == 1:
248 pars = [nodeids[ps[0]], None]
248 pars = [nodeids[ps[0]], None]
249 else:
249 else:
250 pars = [nodeids[p] for p in ps]
250 pars = [nodeids[p] for p in ps]
251 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
251 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
252 date=(id, 0),
252 date=(id, 0),
253 user="debugbuilddag",
253 user="debugbuilddag",
254 extra={'branch': atbranch})
254 extra={'branch': atbranch})
255 nodeid = repo.commitctx(cx)
255 nodeid = repo.commitctx(cx)
256 nodeids.append(nodeid)
256 nodeids.append(nodeid)
257 at = id
257 at = id
258 elif type == 'l':
258 elif type == 'l':
259 id, name = data
259 id, name = data
260 ui.note(('tag %s\n' % name))
260 ui.note(('tag %s\n' % name))
261 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
261 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
262 elif type == 'a':
262 elif type == 'a':
263 ui.note(('branch %s\n' % data))
263 ui.note(('branch %s\n' % data))
264 atbranch = data
264 atbranch = data
265 progress.update(id)
265 progress.update(id)
266
266
267 if tags:
267 if tags:
268 repo.vfs.write("localtags", "".join(tags))
268 repo.vfs.write("localtags", "".join(tags))
269
269
270 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
270 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
271 indent_string = ' ' * indent
271 indent_string = ' ' * indent
272 if all:
272 if all:
273 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
273 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
274 % indent_string)
274 % indent_string)
275
275
276 def showchunks(named):
276 def showchunks(named):
277 ui.write("\n%s%s\n" % (indent_string, named))
277 ui.write("\n%s%s\n" % (indent_string, named))
278 for deltadata in gen.deltaiter():
278 for deltadata in gen.deltaiter():
279 node, p1, p2, cs, deltabase, delta, flags = deltadata
279 node, p1, p2, cs, deltabase, delta, flags = deltadata
280 ui.write("%s%s %s %s %s %s %d\n" %
280 ui.write("%s%s %s %s %s %s %d\n" %
281 (indent_string, hex(node), hex(p1), hex(p2),
281 (indent_string, hex(node), hex(p1), hex(p2),
282 hex(cs), hex(deltabase), len(delta)))
282 hex(cs), hex(deltabase), len(delta)))
283
283
284 chunkdata = gen.changelogheader()
284 chunkdata = gen.changelogheader()
285 showchunks("changelog")
285 showchunks("changelog")
286 chunkdata = gen.manifestheader()
286 chunkdata = gen.manifestheader()
287 showchunks("manifest")
287 showchunks("manifest")
288 for chunkdata in iter(gen.filelogheader, {}):
288 for chunkdata in iter(gen.filelogheader, {}):
289 fname = chunkdata['filename']
289 fname = chunkdata['filename']
290 showchunks(fname)
290 showchunks(fname)
291 else:
291 else:
292 if isinstance(gen, bundle2.unbundle20):
292 if isinstance(gen, bundle2.unbundle20):
293 raise error.Abort(_('use debugbundle2 for this file'))
293 raise error.Abort(_('use debugbundle2 for this file'))
294 chunkdata = gen.changelogheader()
294 chunkdata = gen.changelogheader()
295 for deltadata in gen.deltaiter():
295 for deltadata in gen.deltaiter():
296 node, p1, p2, cs, deltabase, delta, flags = deltadata
296 node, p1, p2, cs, deltabase, delta, flags = deltadata
297 ui.write("%s%s\n" % (indent_string, hex(node)))
297 ui.write("%s%s\n" % (indent_string, hex(node)))
298
298
299 def _debugobsmarkers(ui, part, indent=0, **opts):
299 def _debugobsmarkers(ui, part, indent=0, **opts):
300 """display version and markers contained in 'data'"""
300 """display version and markers contained in 'data'"""
301 opts = pycompat.byteskwargs(opts)
301 opts = pycompat.byteskwargs(opts)
302 data = part.read()
302 data = part.read()
303 indent_string = ' ' * indent
303 indent_string = ' ' * indent
304 try:
304 try:
305 version, markers = obsolete._readmarkers(data)
305 version, markers = obsolete._readmarkers(data)
306 except error.UnknownVersion as exc:
306 except error.UnknownVersion as exc:
307 msg = "%sunsupported version: %s (%d bytes)\n"
307 msg = "%sunsupported version: %s (%d bytes)\n"
308 msg %= indent_string, exc.version, len(data)
308 msg %= indent_string, exc.version, len(data)
309 ui.write(msg)
309 ui.write(msg)
310 else:
310 else:
311 msg = "%sversion: %d (%d bytes)\n"
311 msg = "%sversion: %d (%d bytes)\n"
312 msg %= indent_string, version, len(data)
312 msg %= indent_string, version, len(data)
313 ui.write(msg)
313 ui.write(msg)
314 fm = ui.formatter('debugobsolete', opts)
314 fm = ui.formatter('debugobsolete', opts)
315 for rawmarker in sorted(markers):
315 for rawmarker in sorted(markers):
316 m = obsutil.marker(None, rawmarker)
316 m = obsutil.marker(None, rawmarker)
317 fm.startitem()
317 fm.startitem()
318 fm.plain(indent_string)
318 fm.plain(indent_string)
319 cmdutil.showmarker(fm, m)
319 cmdutil.showmarker(fm, m)
320 fm.end()
320 fm.end()
321
321
322 def _debugphaseheads(ui, data, indent=0):
322 def _debugphaseheads(ui, data, indent=0):
323 """display version and markers contained in 'data'"""
323 """display version and markers contained in 'data'"""
324 indent_string = ' ' * indent
324 indent_string = ' ' * indent
325 headsbyphase = phases.binarydecode(data)
325 headsbyphase = phases.binarydecode(data)
326 for phase in phases.allphases:
326 for phase in phases.allphases:
327 for head in headsbyphase[phase]:
327 for head in headsbyphase[phase]:
328 ui.write(indent_string)
328 ui.write(indent_string)
329 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
329 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
330
330
331 def _quasirepr(thing):
331 def _quasirepr(thing):
332 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
332 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
333 return '{%s}' % (
333 return '{%s}' % (
334 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
334 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
335 return pycompat.bytestr(repr(thing))
335 return pycompat.bytestr(repr(thing))
336
336
337 def _debugbundle2(ui, gen, all=None, **opts):
337 def _debugbundle2(ui, gen, all=None, **opts):
338 """lists the contents of a bundle2"""
338 """lists the contents of a bundle2"""
339 if not isinstance(gen, bundle2.unbundle20):
339 if not isinstance(gen, bundle2.unbundle20):
340 raise error.Abort(_('not a bundle2 file'))
340 raise error.Abort(_('not a bundle2 file'))
341 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
341 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
342 parttypes = opts.get(r'part_type', [])
342 parttypes = opts.get(r'part_type', [])
343 for part in gen.iterparts():
343 for part in gen.iterparts():
344 if parttypes and part.type not in parttypes:
344 if parttypes and part.type not in parttypes:
345 continue
345 continue
346 msg = '%s -- %s (mandatory: %r)\n'
346 msg = '%s -- %s (mandatory: %r)\n'
347 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
347 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
348 if part.type == 'changegroup':
348 if part.type == 'changegroup':
349 version = part.params.get('version', '01')
349 version = part.params.get('version', '01')
350 cg = changegroup.getunbundler(version, part, 'UN')
350 cg = changegroup.getunbundler(version, part, 'UN')
351 if not ui.quiet:
351 if not ui.quiet:
352 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
352 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
353 if part.type == 'obsmarkers':
353 if part.type == 'obsmarkers':
354 if not ui.quiet:
354 if not ui.quiet:
355 _debugobsmarkers(ui, part, indent=4, **opts)
355 _debugobsmarkers(ui, part, indent=4, **opts)
356 if part.type == 'phase-heads':
356 if part.type == 'phase-heads':
357 if not ui.quiet:
357 if not ui.quiet:
358 _debugphaseheads(ui, part, indent=4)
358 _debugphaseheads(ui, part, indent=4)
359
359
360 @command('debugbundle',
360 @command('debugbundle',
361 [('a', 'all', None, _('show all details')),
361 [('a', 'all', None, _('show all details')),
362 ('', 'part-type', [], _('show only the named part type')),
362 ('', 'part-type', [], _('show only the named part type')),
363 ('', 'spec', None, _('print the bundlespec of the bundle'))],
363 ('', 'spec', None, _('print the bundlespec of the bundle'))],
364 _('FILE'),
364 _('FILE'),
365 norepo=True)
365 norepo=True)
366 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
366 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
367 """lists the contents of a bundle"""
367 """lists the contents of a bundle"""
368 with hg.openpath(ui, bundlepath) as f:
368 with hg.openpath(ui, bundlepath) as f:
369 if spec:
369 if spec:
370 spec = exchange.getbundlespec(ui, f)
370 spec = exchange.getbundlespec(ui, f)
371 ui.write('%s\n' % spec)
371 ui.write('%s\n' % spec)
372 return
372 return
373
373
374 gen = exchange.readbundle(ui, f, bundlepath)
374 gen = exchange.readbundle(ui, f, bundlepath)
375 if isinstance(gen, bundle2.unbundle20):
375 if isinstance(gen, bundle2.unbundle20):
376 return _debugbundle2(ui, gen, all=all, **opts)
376 return _debugbundle2(ui, gen, all=all, **opts)
377 _debugchangegroup(ui, gen, all=all, **opts)
377 _debugchangegroup(ui, gen, all=all, **opts)
378
378
379 @command('debugcapabilities',
379 @command('debugcapabilities',
380 [], _('PATH'),
380 [], _('PATH'),
381 norepo=True)
381 norepo=True)
382 def debugcapabilities(ui, path, **opts):
382 def debugcapabilities(ui, path, **opts):
383 """lists the capabilities of a remote peer"""
383 """lists the capabilities of a remote peer"""
384 opts = pycompat.byteskwargs(opts)
384 opts = pycompat.byteskwargs(opts)
385 peer = hg.peer(ui, opts, path)
385 peer = hg.peer(ui, opts, path)
386 caps = peer.capabilities()
386 caps = peer.capabilities()
387 ui.write(('Main capabilities:\n'))
387 ui.write(('Main capabilities:\n'))
388 for c in sorted(caps):
388 for c in sorted(caps):
389 ui.write((' %s\n') % c)
389 ui.write((' %s\n') % c)
390 b2caps = bundle2.bundle2caps(peer)
390 b2caps = bundle2.bundle2caps(peer)
391 if b2caps:
391 if b2caps:
392 ui.write(('Bundle2 capabilities:\n'))
392 ui.write(('Bundle2 capabilities:\n'))
393 for key, values in sorted(b2caps.iteritems()):
393 for key, values in sorted(b2caps.iteritems()):
394 ui.write((' %s\n') % key)
394 ui.write((' %s\n') % key)
395 for v in values:
395 for v in values:
396 ui.write((' %s\n') % v)
396 ui.write((' %s\n') % v)
397
397
398 @command('debugcheckstate', [], '')
398 @command('debugcheckstate', [], '')
399 def debugcheckstate(ui, repo):
399 def debugcheckstate(ui, repo):
400 """validate the correctness of the current dirstate"""
400 """validate the correctness of the current dirstate"""
401 parent1, parent2 = repo.dirstate.parents()
401 parent1, parent2 = repo.dirstate.parents()
402 m1 = repo[parent1].manifest()
402 m1 = repo[parent1].manifest()
403 m2 = repo[parent2].manifest()
403 m2 = repo[parent2].manifest()
404 errors = 0
404 errors = 0
405 for f in repo.dirstate:
405 for f in repo.dirstate:
406 state = repo.dirstate[f]
406 state = repo.dirstate[f]
407 if state in "nr" and f not in m1:
407 if state in "nr" and f not in m1:
408 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
408 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
409 errors += 1
409 errors += 1
410 if state in "a" and f in m1:
410 if state in "a" and f in m1:
411 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
411 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
412 errors += 1
412 errors += 1
413 if state in "m" and f not in m1 and f not in m2:
413 if state in "m" and f not in m1 and f not in m2:
414 ui.warn(_("%s in state %s, but not in either manifest\n") %
414 ui.warn(_("%s in state %s, but not in either manifest\n") %
415 (f, state))
415 (f, state))
416 errors += 1
416 errors += 1
417 for f in m1:
417 for f in m1:
418 state = repo.dirstate[f]
418 state = repo.dirstate[f]
419 if state not in "nrm":
419 if state not in "nrm":
420 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
420 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
421 errors += 1
421 errors += 1
422 if errors:
422 if errors:
423 error = _(".hg/dirstate inconsistent with current parent's manifest")
423 error = _(".hg/dirstate inconsistent with current parent's manifest")
424 raise error.Abort(error)
424 raise error.Abort(error)
425
425
426 @command('debugcolor',
426 @command('debugcolor',
427 [('', 'style', None, _('show all configured styles'))],
427 [('', 'style', None, _('show all configured styles'))],
428 'hg debugcolor')
428 'hg debugcolor')
429 def debugcolor(ui, repo, **opts):
429 def debugcolor(ui, repo, **opts):
430 """show available color, effects or style"""
430 """show available color, effects or style"""
431 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
431 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
432 if opts.get(r'style'):
432 if opts.get(r'style'):
433 return _debugdisplaystyle(ui)
433 return _debugdisplaystyle(ui)
434 else:
434 else:
435 return _debugdisplaycolor(ui)
435 return _debugdisplaycolor(ui)
436
436
437 def _debugdisplaycolor(ui):
437 def _debugdisplaycolor(ui):
438 ui = ui.copy()
438 ui = ui.copy()
439 ui._styles.clear()
439 ui._styles.clear()
440 for effect in color._activeeffects(ui).keys():
440 for effect in color._activeeffects(ui).keys():
441 ui._styles[effect] = effect
441 ui._styles[effect] = effect
442 if ui._terminfoparams:
442 if ui._terminfoparams:
443 for k, v in ui.configitems('color'):
443 for k, v in ui.configitems('color'):
444 if k.startswith('color.'):
444 if k.startswith('color.'):
445 ui._styles[k] = k[6:]
445 ui._styles[k] = k[6:]
446 elif k.startswith('terminfo.'):
446 elif k.startswith('terminfo.'):
447 ui._styles[k] = k[9:]
447 ui._styles[k] = k[9:]
448 ui.write(_('available colors:\n'))
448 ui.write(_('available colors:\n'))
449 # sort label with a '_' after the other to group '_background' entry.
449 # sort label with a '_' after the other to group '_background' entry.
450 items = sorted(ui._styles.items(),
450 items = sorted(ui._styles.items(),
451 key=lambda i: ('_' in i[0], i[0], i[1]))
451 key=lambda i: ('_' in i[0], i[0], i[1]))
452 for colorname, label in items:
452 for colorname, label in items:
453 ui.write(('%s\n') % colorname, label=label)
453 ui.write(('%s\n') % colorname, label=label)
454
454
455 def _debugdisplaystyle(ui):
455 def _debugdisplaystyle(ui):
456 ui.write(_('available style:\n'))
456 ui.write(_('available style:\n'))
457 if not ui._styles:
457 if not ui._styles:
458 return
458 return
459 width = max(len(s) for s in ui._styles)
459 width = max(len(s) for s in ui._styles)
460 for label, effects in sorted(ui._styles.items()):
460 for label, effects in sorted(ui._styles.items()):
461 ui.write('%s' % label, label=label)
461 ui.write('%s' % label, label=label)
462 if effects:
462 if effects:
463 # 50
463 # 50
464 ui.write(': ')
464 ui.write(': ')
465 ui.write(' ' * (max(0, width - len(label))))
465 ui.write(' ' * (max(0, width - len(label))))
466 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
466 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
467 ui.write('\n')
467 ui.write('\n')
468
468
469 @command('debugcreatestreamclonebundle', [], 'FILE')
469 @command('debugcreatestreamclonebundle', [], 'FILE')
470 def debugcreatestreamclonebundle(ui, repo, fname):
470 def debugcreatestreamclonebundle(ui, repo, fname):
471 """create a stream clone bundle file
471 """create a stream clone bundle file
472
472
473 Stream bundles are special bundles that are essentially archives of
473 Stream bundles are special bundles that are essentially archives of
474 revlog files. They are commonly used for cloning very quickly.
474 revlog files. They are commonly used for cloning very quickly.
475 """
475 """
476 # TODO we may want to turn this into an abort when this functionality
476 # TODO we may want to turn this into an abort when this functionality
477 # is moved into `hg bundle`.
477 # is moved into `hg bundle`.
478 if phases.hassecret(repo):
478 if phases.hassecret(repo):
479 ui.warn(_('(warning: stream clone bundle will contain secret '
479 ui.warn(_('(warning: stream clone bundle will contain secret '
480 'revisions)\n'))
480 'revisions)\n'))
481
481
482 requirements, gen = streamclone.generatebundlev1(repo)
482 requirements, gen = streamclone.generatebundlev1(repo)
483 changegroup.writechunks(ui, gen, fname)
483 changegroup.writechunks(ui, gen, fname)
484
484
485 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
485 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
486
486
487 @command('debugdag',
487 @command('debugdag',
488 [('t', 'tags', None, _('use tags as labels')),
488 [('t', 'tags', None, _('use tags as labels')),
489 ('b', 'branches', None, _('annotate with branch names')),
489 ('b', 'branches', None, _('annotate with branch names')),
490 ('', 'dots', None, _('use dots for runs')),
490 ('', 'dots', None, _('use dots for runs')),
491 ('s', 'spaces', None, _('separate elements by spaces'))],
491 ('s', 'spaces', None, _('separate elements by spaces'))],
492 _('[OPTION]... [FILE [REV]...]'),
492 _('[OPTION]... [FILE [REV]...]'),
493 optionalrepo=True)
493 optionalrepo=True)
494 def debugdag(ui, repo, file_=None, *revs, **opts):
494 def debugdag(ui, repo, file_=None, *revs, **opts):
495 """format the changelog or an index DAG as a concise textual description
495 """format the changelog or an index DAG as a concise textual description
496
496
497 If you pass a revlog index, the revlog's DAG is emitted. If you list
497 If you pass a revlog index, the revlog's DAG is emitted. If you list
498 revision numbers, they get labeled in the output as rN.
498 revision numbers, they get labeled in the output as rN.
499
499
500 Otherwise, the changelog DAG of the current repo is emitted.
500 Otherwise, the changelog DAG of the current repo is emitted.
501 """
501 """
502 spaces = opts.get(r'spaces')
502 spaces = opts.get(r'spaces')
503 dots = opts.get(r'dots')
503 dots = opts.get(r'dots')
504 if file_:
504 if file_:
505 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
505 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
506 file_)
506 file_)
507 revs = set((int(r) for r in revs))
507 revs = set((int(r) for r in revs))
508 def events():
508 def events():
509 for r in rlog:
509 for r in rlog:
510 yield 'n', (r, list(p for p in rlog.parentrevs(r)
510 yield 'n', (r, list(p for p in rlog.parentrevs(r)
511 if p != -1))
511 if p != -1))
512 if r in revs:
512 if r in revs:
513 yield 'l', (r, "r%i" % r)
513 yield 'l', (r, "r%i" % r)
514 elif repo:
514 elif repo:
515 cl = repo.changelog
515 cl = repo.changelog
516 tags = opts.get(r'tags')
516 tags = opts.get(r'tags')
517 branches = opts.get(r'branches')
517 branches = opts.get(r'branches')
518 if tags:
518 if tags:
519 labels = {}
519 labels = {}
520 for l, n in repo.tags().items():
520 for l, n in repo.tags().items():
521 labels.setdefault(cl.rev(n), []).append(l)
521 labels.setdefault(cl.rev(n), []).append(l)
522 def events():
522 def events():
523 b = "default"
523 b = "default"
524 for r in cl:
524 for r in cl:
525 if branches:
525 if branches:
526 newb = cl.read(cl.node(r))[5]['branch']
526 newb = cl.read(cl.node(r))[5]['branch']
527 if newb != b:
527 if newb != b:
528 yield 'a', newb
528 yield 'a', newb
529 b = newb
529 b = newb
530 yield 'n', (r, list(p for p in cl.parentrevs(r)
530 yield 'n', (r, list(p for p in cl.parentrevs(r)
531 if p != -1))
531 if p != -1))
532 if tags:
532 if tags:
533 ls = labels.get(r)
533 ls = labels.get(r)
534 if ls:
534 if ls:
535 for l in ls:
535 for l in ls:
536 yield 'l', (r, l)
536 yield 'l', (r, l)
537 else:
537 else:
538 raise error.Abort(_('need repo for changelog dag'))
538 raise error.Abort(_('need repo for changelog dag'))
539
539
540 for line in dagparser.dagtextlines(events(),
540 for line in dagparser.dagtextlines(events(),
541 addspaces=spaces,
541 addspaces=spaces,
542 wraplabels=True,
542 wraplabels=True,
543 wrapannotations=True,
543 wrapannotations=True,
544 wrapnonlinear=dots,
544 wrapnonlinear=dots,
545 usedots=dots,
545 usedots=dots,
546 maxlinewidth=70):
546 maxlinewidth=70):
547 ui.write(line)
547 ui.write(line)
548 ui.write("\n")
548 ui.write("\n")
549
549
550 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
550 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
551 def debugdata(ui, repo, file_, rev=None, **opts):
551 def debugdata(ui, repo, file_, rev=None, **opts):
552 """dump the contents of a data file revision"""
552 """dump the contents of a data file revision"""
553 opts = pycompat.byteskwargs(opts)
553 opts = pycompat.byteskwargs(opts)
554 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
554 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
555 if rev is not None:
555 if rev is not None:
556 raise error.CommandError('debugdata', _('invalid arguments'))
556 raise error.CommandError('debugdata', _('invalid arguments'))
557 file_, rev = None, file_
557 file_, rev = None, file_
558 elif rev is None:
558 elif rev is None:
559 raise error.CommandError('debugdata', _('invalid arguments'))
559 raise error.CommandError('debugdata', _('invalid arguments'))
560 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
560 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
561 try:
561 try:
562 ui.write(r.revision(r.lookup(rev), raw=True))
562 ui.write(r.revision(r.lookup(rev), raw=True))
563 except KeyError:
563 except KeyError:
564 raise error.Abort(_('invalid revision identifier %s') % rev)
564 raise error.Abort(_('invalid revision identifier %s') % rev)
565
565
566 @command('debugdate',
566 @command('debugdate',
567 [('e', 'extended', None, _('try extended date formats'))],
567 [('e', 'extended', None, _('try extended date formats'))],
568 _('[-e] DATE [RANGE]'),
568 _('[-e] DATE [RANGE]'),
569 norepo=True, optionalrepo=True)
569 norepo=True, optionalrepo=True)
570 def debugdate(ui, date, range=None, **opts):
570 def debugdate(ui, date, range=None, **opts):
571 """parse and display a date"""
571 """parse and display a date"""
572 if opts[r"extended"]:
572 if opts[r"extended"]:
573 d = dateutil.parsedate(date, util.extendeddateformats)
573 d = dateutil.parsedate(date, util.extendeddateformats)
574 else:
574 else:
575 d = dateutil.parsedate(date)
575 d = dateutil.parsedate(date)
576 ui.write(("internal: %d %d\n") % d)
576 ui.write(("internal: %d %d\n") % d)
577 ui.write(("standard: %s\n") % dateutil.datestr(d))
577 ui.write(("standard: %s\n") % dateutil.datestr(d))
578 if range:
578 if range:
579 m = dateutil.matchdate(range)
579 m = dateutil.matchdate(range)
580 ui.write(("match: %s\n") % m(d[0]))
580 ui.write(("match: %s\n") % m(d[0]))
581
581
582 @command('debugdeltachain',
582 @command('debugdeltachain',
583 cmdutil.debugrevlogopts + cmdutil.formatteropts,
583 cmdutil.debugrevlogopts + cmdutil.formatteropts,
584 _('-c|-m|FILE'),
584 _('-c|-m|FILE'),
585 optionalrepo=True)
585 optionalrepo=True)
586 def debugdeltachain(ui, repo, file_=None, **opts):
586 def debugdeltachain(ui, repo, file_=None, **opts):
587 """dump information about delta chains in a revlog
587 """dump information about delta chains in a revlog
588
588
589 Output can be templatized. Available template keywords are:
589 Output can be templatized. Available template keywords are:
590
590
591 :``rev``: revision number
591 :``rev``: revision number
592 :``chainid``: delta chain identifier (numbered by unique base)
592 :``chainid``: delta chain identifier (numbered by unique base)
593 :``chainlen``: delta chain length to this revision
593 :``chainlen``: delta chain length to this revision
594 :``prevrev``: previous revision in delta chain
594 :``prevrev``: previous revision in delta chain
595 :``deltatype``: role of delta / how it was computed
595 :``deltatype``: role of delta / how it was computed
596 :``compsize``: compressed size of revision
596 :``compsize``: compressed size of revision
597 :``uncompsize``: uncompressed size of revision
597 :``uncompsize``: uncompressed size of revision
598 :``chainsize``: total size of compressed revisions in chain
598 :``chainsize``: total size of compressed revisions in chain
599 :``chainratio``: total chain size divided by uncompressed revision size
599 :``chainratio``: total chain size divided by uncompressed revision size
600 (new delta chains typically start at ratio 2.00)
600 (new delta chains typically start at ratio 2.00)
601 :``lindist``: linear distance from base revision in delta chain to end
601 :``lindist``: linear distance from base revision in delta chain to end
602 of this revision
602 of this revision
603 :``extradist``: total size of revisions not part of this delta chain from
603 :``extradist``: total size of revisions not part of this delta chain from
604 base of delta chain to end of this revision; a measurement
604 base of delta chain to end of this revision; a measurement
605 of how much extra data we need to read/seek across to read
605 of how much extra data we need to read/seek across to read
606 the delta chain for this revision
606 the delta chain for this revision
607 :``extraratio``: extradist divided by chainsize; another representation of
607 :``extraratio``: extradist divided by chainsize; another representation of
608 how much unrelated data is needed to load this delta chain
608 how much unrelated data is needed to load this delta chain
609
609
610 If the repository is configured to use the sparse read, additional keywords
610 If the repository is configured to use the sparse read, additional keywords
611 are available:
611 are available:
612
612
613 :``readsize``: total size of data read from the disk for a revision
613 :``readsize``: total size of data read from the disk for a revision
614 (sum of the sizes of all the blocks)
614 (sum of the sizes of all the blocks)
615 :``largestblock``: size of the largest block of data read from the disk
615 :``largestblock``: size of the largest block of data read from the disk
616 :``readdensity``: density of useful bytes in the data read from the disk
616 :``readdensity``: density of useful bytes in the data read from the disk
617 :``srchunks``: in how many data hunks the whole revision would be read
617 :``srchunks``: in how many data hunks the whole revision would be read
618
618
619 The sparse read can be enabled with experimental.sparse-read = True
619 The sparse read can be enabled with experimental.sparse-read = True
620 """
620 """
621 opts = pycompat.byteskwargs(opts)
621 opts = pycompat.byteskwargs(opts)
622 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
622 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
623 index = r.index
623 index = r.index
624 start = r.start
624 start = r.start
625 length = r.length
625 length = r.length
626 generaldelta = r.version & revlog.FLAG_GENERALDELTA
626 generaldelta = r.version & revlog.FLAG_GENERALDELTA
627 withsparseread = getattr(r, '_withsparseread', False)
627 withsparseread = getattr(r, '_withsparseread', False)
628
628
629 def revinfo(rev):
629 def revinfo(rev):
630 e = index[rev]
630 e = index[rev]
631 compsize = e[1]
631 compsize = e[1]
632 uncompsize = e[2]
632 uncompsize = e[2]
633 chainsize = 0
633 chainsize = 0
634
634
635 if generaldelta:
635 if generaldelta:
636 if e[3] == e[5]:
636 if e[3] == e[5]:
637 deltatype = 'p1'
637 deltatype = 'p1'
638 elif e[3] == e[6]:
638 elif e[3] == e[6]:
639 deltatype = 'p2'
639 deltatype = 'p2'
640 elif e[3] == rev - 1:
640 elif e[3] == rev - 1:
641 deltatype = 'prev'
641 deltatype = 'prev'
642 elif e[3] == rev:
642 elif e[3] == rev:
643 deltatype = 'base'
643 deltatype = 'base'
644 else:
644 else:
645 deltatype = 'other'
645 deltatype = 'other'
646 else:
646 else:
647 if e[3] == rev:
647 if e[3] == rev:
648 deltatype = 'base'
648 deltatype = 'base'
649 else:
649 else:
650 deltatype = 'prev'
650 deltatype = 'prev'
651
651
652 chain = r._deltachain(rev)[0]
652 chain = r._deltachain(rev)[0]
653 for iterrev in chain:
653 for iterrev in chain:
654 e = index[iterrev]
654 e = index[iterrev]
655 chainsize += e[1]
655 chainsize += e[1]
656
656
657 return compsize, uncompsize, deltatype, chain, chainsize
657 return compsize, uncompsize, deltatype, chain, chainsize
658
658
659 fm = ui.formatter('debugdeltachain', opts)
659 fm = ui.formatter('debugdeltachain', opts)
660
660
661 fm.plain(' rev chain# chainlen prev delta '
661 fm.plain(' rev chain# chainlen prev delta '
662 'size rawsize chainsize ratio lindist extradist '
662 'size rawsize chainsize ratio lindist extradist '
663 'extraratio')
663 'extraratio')
664 if withsparseread:
664 if withsparseread:
665 fm.plain(' readsize largestblk rddensity srchunks')
665 fm.plain(' readsize largestblk rddensity srchunks')
666 fm.plain('\n')
666 fm.plain('\n')
667
667
668 chainbases = {}
668 chainbases = {}
669 for rev in r:
669 for rev in r:
670 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
670 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
671 chainbase = chain[0]
671 chainbase = chain[0]
672 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
672 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
673 basestart = start(chainbase)
673 basestart = start(chainbase)
674 revstart = start(rev)
674 revstart = start(rev)
675 lineardist = revstart + comp - basestart
675 lineardist = revstart + comp - basestart
676 extradist = lineardist - chainsize
676 extradist = lineardist - chainsize
677 try:
677 try:
678 prevrev = chain[-2]
678 prevrev = chain[-2]
679 except IndexError:
679 except IndexError:
680 prevrev = -1
680 prevrev = -1
681
681
682 if uncomp != 0:
682 if uncomp != 0:
683 chainratio = float(chainsize) / float(uncomp)
683 chainratio = float(chainsize) / float(uncomp)
684 else:
684 else:
685 chainratio = chainsize
685 chainratio = chainsize
686
686
687 if chainsize != 0:
687 if chainsize != 0:
688 extraratio = float(extradist) / float(chainsize)
688 extraratio = float(extradist) / float(chainsize)
689 else:
689 else:
690 extraratio = extradist
690 extraratio = extradist
691
691
692 fm.startitem()
692 fm.startitem()
693 fm.write('rev chainid chainlen prevrev deltatype compsize '
693 fm.write('rev chainid chainlen prevrev deltatype compsize '
694 'uncompsize chainsize chainratio lindist extradist '
694 'uncompsize chainsize chainratio lindist extradist '
695 'extraratio',
695 'extraratio',
696 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
696 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
697 rev, chainid, len(chain), prevrev, deltatype, comp,
697 rev, chainid, len(chain), prevrev, deltatype, comp,
698 uncomp, chainsize, chainratio, lineardist, extradist,
698 uncomp, chainsize, chainratio, lineardist, extradist,
699 extraratio,
699 extraratio,
700 rev=rev, chainid=chainid, chainlen=len(chain),
700 rev=rev, chainid=chainid, chainlen=len(chain),
701 prevrev=prevrev, deltatype=deltatype, compsize=comp,
701 prevrev=prevrev, deltatype=deltatype, compsize=comp,
702 uncompsize=uncomp, chainsize=chainsize,
702 uncompsize=uncomp, chainsize=chainsize,
703 chainratio=chainratio, lindist=lineardist,
703 chainratio=chainratio, lindist=lineardist,
704 extradist=extradist, extraratio=extraratio)
704 extradist=extradist, extraratio=extraratio)
705 if withsparseread:
705 if withsparseread:
706 readsize = 0
706 readsize = 0
707 largestblock = 0
707 largestblock = 0
708 srchunks = 0
708 srchunks = 0
709
709
710 for revschunk in revlog._slicechunk(r, chain):
710 for revschunk in revlog._slicechunk(r, chain):
711 srchunks += 1
711 srchunks += 1
712 blkend = start(revschunk[-1]) + length(revschunk[-1])
712 blkend = start(revschunk[-1]) + length(revschunk[-1])
713 blksize = blkend - start(revschunk[0])
713 blksize = blkend - start(revschunk[0])
714
714
715 readsize += blksize
715 readsize += blksize
716 if largestblock < blksize:
716 if largestblock < blksize:
717 largestblock = blksize
717 largestblock = blksize
718
718
719 if readsize:
719 if readsize:
720 readdensity = float(chainsize) / float(readsize)
720 readdensity = float(chainsize) / float(readsize)
721 else:
721 else:
722 readdensity = 1
722 readdensity = 1
723
723
724 fm.write('readsize largestblock readdensity srchunks',
724 fm.write('readsize largestblock readdensity srchunks',
725 ' %10d %10d %9.5f %8d',
725 ' %10d %10d %9.5f %8d',
726 readsize, largestblock, readdensity, srchunks,
726 readsize, largestblock, readdensity, srchunks,
727 readsize=readsize, largestblock=largestblock,
727 readsize=readsize, largestblock=largestblock,
728 readdensity=readdensity, srchunks=srchunks)
728 readdensity=readdensity, srchunks=srchunks)
729
729
730 fm.plain('\n')
730 fm.plain('\n')
731
731
732 fm.end()
732 fm.end()
733
733
734 @command('debugdirstate|debugstate',
734 @command('debugdirstate|debugstate',
735 [('', 'nodates', None, _('do not display the saved mtime')),
735 [('', 'nodates', None, _('do not display the saved mtime')),
736 ('', 'datesort', None, _('sort by saved mtime'))],
736 ('', 'datesort', None, _('sort by saved mtime'))],
737 _('[OPTION]...'))
737 _('[OPTION]...'))
738 def debugstate(ui, repo, **opts):
738 def debugstate(ui, repo, **opts):
739 """show the contents of the current dirstate"""
739 """show the contents of the current dirstate"""
740
740
741 nodates = opts.get(r'nodates')
741 nodates = opts.get(r'nodates')
742 datesort = opts.get(r'datesort')
742 datesort = opts.get(r'datesort')
743
743
744 timestr = ""
744 timestr = ""
745 if datesort:
745 if datesort:
746 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
746 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
747 else:
747 else:
748 keyfunc = None # sort by filename
748 keyfunc = None # sort by filename
749 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
749 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
750 if ent[3] == -1:
750 if ent[3] == -1:
751 timestr = 'unset '
751 timestr = 'unset '
752 elif nodates:
752 elif nodates:
753 timestr = 'set '
753 timestr = 'set '
754 else:
754 else:
755 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
755 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
756 time.localtime(ent[3]))
756 time.localtime(ent[3]))
757 timestr = encoding.strtolocal(timestr)
757 timestr = encoding.strtolocal(timestr)
758 if ent[1] & 0o20000:
758 if ent[1] & 0o20000:
759 mode = 'lnk'
759 mode = 'lnk'
760 else:
760 else:
761 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
761 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
762 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
762 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
763 for f in repo.dirstate.copies():
763 for f in repo.dirstate.copies():
764 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
764 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
765
765
766 @command('debugdiscovery',
766 @command('debugdiscovery',
767 [('', 'old', None, _('use old-style discovery')),
767 [('', 'old', None, _('use old-style discovery')),
768 ('', 'nonheads', None,
768 ('', 'nonheads', None,
769 _('use old-style discovery with non-heads included')),
769 _('use old-style discovery with non-heads included')),
770 ('', 'rev', [], 'restrict discovery to this set of revs'),
770 ('', 'rev', [], 'restrict discovery to this set of revs'),
771 ] + cmdutil.remoteopts,
771 ] + cmdutil.remoteopts,
772 _('[--rev REV] [OTHER]'))
772 _('[--rev REV] [OTHER]'))
773 def debugdiscovery(ui, repo, remoteurl="default", **opts):
773 def debugdiscovery(ui, repo, remoteurl="default", **opts):
774 """runs the changeset discovery protocol in isolation"""
774 """runs the changeset discovery protocol in isolation"""
775 opts = pycompat.byteskwargs(opts)
775 opts = pycompat.byteskwargs(opts)
776 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
776 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
777 remote = hg.peer(repo, opts, remoteurl)
777 remote = hg.peer(repo, opts, remoteurl)
778 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
778 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
779
779
780 # make sure tests are repeatable
780 # make sure tests are repeatable
781 random.seed(12323)
781 random.seed(12323)
782
782
783 def doit(pushedrevs, remoteheads, remote=remote):
783 def doit(pushedrevs, remoteheads, remote=remote):
784 if opts.get('old'):
784 if opts.get('old'):
785 if not util.safehasattr(remote, 'branches'):
785 if not util.safehasattr(remote, 'branches'):
786 # enable in-client legacy support
786 # enable in-client legacy support
787 remote = localrepo.locallegacypeer(remote.local())
787 remote = localrepo.locallegacypeer(remote.local())
788 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
788 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
789 force=True)
789 force=True)
790 common = set(common)
790 common = set(common)
791 if not opts.get('nonheads'):
791 if not opts.get('nonheads'):
792 ui.write(("unpruned common: %s\n") %
792 ui.write(("unpruned common: %s\n") %
793 " ".join(sorted(short(n) for n in common)))
793 " ".join(sorted(short(n) for n in common)))
794 dag = dagutil.revlogdag(repo.changelog)
794 cl = repo.changelog
795 clnode = cl.node
796 dag = dagutil.revlogdag(cl)
795 all = dag.ancestorset(dag.internalizeall(common))
797 all = dag.ancestorset(dag.internalizeall(common))
796 common = dag.externalizeall(dag.headsetofconnecteds(all))
798 common = {clnode(r) for r in dag.headsetofconnecteds(all)}
797 else:
799 else:
798 nodes = None
800 nodes = None
799 if pushedrevs:
801 if pushedrevs:
800 revs = scmutil.revrange(repo, pushedrevs)
802 revs = scmutil.revrange(repo, pushedrevs)
801 nodes = [repo[r].node() for r in revs]
803 nodes = [repo[r].node() for r in revs]
802 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
804 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
803 ancestorsof=nodes)
805 ancestorsof=nodes)
804 common = set(common)
806 common = set(common)
805 rheads = set(hds)
807 rheads = set(hds)
806 lheads = set(repo.heads())
808 lheads = set(repo.heads())
807 ui.write(("common heads: %s\n") %
809 ui.write(("common heads: %s\n") %
808 " ".join(sorted(short(n) for n in common)))
810 " ".join(sorted(short(n) for n in common)))
809 if lheads <= common:
811 if lheads <= common:
810 ui.write(("local is subset\n"))
812 ui.write(("local is subset\n"))
811 elif rheads <= common:
813 elif rheads <= common:
812 ui.write(("remote is subset\n"))
814 ui.write(("remote is subset\n"))
813
815
814 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
816 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
815 localrevs = opts['rev']
817 localrevs = opts['rev']
816 doit(localrevs, remoterevs)
818 doit(localrevs, remoterevs)
817
819
818 _chunksize = 4 << 10
820 _chunksize = 4 << 10
819
821
820 @command('debugdownload',
822 @command('debugdownload',
821 [
823 [
822 ('o', 'output', '', _('path')),
824 ('o', 'output', '', _('path')),
823 ],
825 ],
824 optionalrepo=True)
826 optionalrepo=True)
825 def debugdownload(ui, repo, url, output=None, **opts):
827 def debugdownload(ui, repo, url, output=None, **opts):
826 """download a resource using Mercurial logic and config
828 """download a resource using Mercurial logic and config
827 """
829 """
828 fh = urlmod.open(ui, url, output)
830 fh = urlmod.open(ui, url, output)
829
831
830 dest = ui
832 dest = ui
831 if output:
833 if output:
832 dest = open(output, "wb", _chunksize)
834 dest = open(output, "wb", _chunksize)
833 try:
835 try:
834 data = fh.read(_chunksize)
836 data = fh.read(_chunksize)
835 while data:
837 while data:
836 dest.write(data)
838 dest.write(data)
837 data = fh.read(_chunksize)
839 data = fh.read(_chunksize)
838 finally:
840 finally:
839 if output:
841 if output:
840 dest.close()
842 dest.close()
841
843
842 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
844 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
843 def debugextensions(ui, repo, **opts):
845 def debugextensions(ui, repo, **opts):
844 '''show information about active extensions'''
846 '''show information about active extensions'''
845 opts = pycompat.byteskwargs(opts)
847 opts = pycompat.byteskwargs(opts)
846 exts = extensions.extensions(ui)
848 exts = extensions.extensions(ui)
847 hgver = util.version()
849 hgver = util.version()
848 fm = ui.formatter('debugextensions', opts)
850 fm = ui.formatter('debugextensions', opts)
849 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
851 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
850 isinternal = extensions.ismoduleinternal(extmod)
852 isinternal = extensions.ismoduleinternal(extmod)
851 extsource = pycompat.fsencode(extmod.__file__)
853 extsource = pycompat.fsencode(extmod.__file__)
852 if isinternal:
854 if isinternal:
853 exttestedwith = [] # never expose magic string to users
855 exttestedwith = [] # never expose magic string to users
854 else:
856 else:
855 exttestedwith = getattr(extmod, 'testedwith', '').split()
857 exttestedwith = getattr(extmod, 'testedwith', '').split()
856 extbuglink = getattr(extmod, 'buglink', None)
858 extbuglink = getattr(extmod, 'buglink', None)
857
859
858 fm.startitem()
860 fm.startitem()
859
861
860 if ui.quiet or ui.verbose:
862 if ui.quiet or ui.verbose:
861 fm.write('name', '%s\n', extname)
863 fm.write('name', '%s\n', extname)
862 else:
864 else:
863 fm.write('name', '%s', extname)
865 fm.write('name', '%s', extname)
864 if isinternal or hgver in exttestedwith:
866 if isinternal or hgver in exttestedwith:
865 fm.plain('\n')
867 fm.plain('\n')
866 elif not exttestedwith:
868 elif not exttestedwith:
867 fm.plain(_(' (untested!)\n'))
869 fm.plain(_(' (untested!)\n'))
868 else:
870 else:
869 lasttestedversion = exttestedwith[-1]
871 lasttestedversion = exttestedwith[-1]
870 fm.plain(' (%s!)\n' % lasttestedversion)
872 fm.plain(' (%s!)\n' % lasttestedversion)
871
873
872 fm.condwrite(ui.verbose and extsource, 'source',
874 fm.condwrite(ui.verbose and extsource, 'source',
873 _(' location: %s\n'), extsource or "")
875 _(' location: %s\n'), extsource or "")
874
876
875 if ui.verbose:
877 if ui.verbose:
876 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
878 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
877 fm.data(bundled=isinternal)
879 fm.data(bundled=isinternal)
878
880
879 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
881 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
880 _(' tested with: %s\n'),
882 _(' tested with: %s\n'),
881 fm.formatlist(exttestedwith, name='ver'))
883 fm.formatlist(exttestedwith, name='ver'))
882
884
883 fm.condwrite(ui.verbose and extbuglink, 'buglink',
885 fm.condwrite(ui.verbose and extbuglink, 'buglink',
884 _(' bug reporting: %s\n'), extbuglink or "")
886 _(' bug reporting: %s\n'), extbuglink or "")
885
887
886 fm.end()
888 fm.end()
887
889
888 @command('debugfileset',
890 @command('debugfileset',
889 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
891 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
890 ('', 'all-files', False,
892 ('', 'all-files', False,
891 _('test files from all revisions and working directory')),
893 _('test files from all revisions and working directory')),
892 ('s', 'show-matcher', None,
894 ('s', 'show-matcher', None,
893 _('print internal representation of matcher')),
895 _('print internal representation of matcher')),
894 ('p', 'show-stage', [],
896 ('p', 'show-stage', [],
895 _('print parsed tree at the given stage'), _('NAME'))],
897 _('print parsed tree at the given stage'), _('NAME'))],
896 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
898 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
897 def debugfileset(ui, repo, expr, **opts):
899 def debugfileset(ui, repo, expr, **opts):
898 '''parse and apply a fileset specification'''
900 '''parse and apply a fileset specification'''
899 from . import fileset
901 from . import fileset
900 fileset.symbols # force import of fileset so we have predicates to optimize
902 fileset.symbols # force import of fileset so we have predicates to optimize
901 opts = pycompat.byteskwargs(opts)
903 opts = pycompat.byteskwargs(opts)
902 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
904 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
903
905
904 stages = [
906 stages = [
905 ('parsed', pycompat.identity),
907 ('parsed', pycompat.identity),
906 ('analyzed', filesetlang.analyze),
908 ('analyzed', filesetlang.analyze),
907 ('optimized', filesetlang.optimize),
909 ('optimized', filesetlang.optimize),
908 ]
910 ]
909 stagenames = set(n for n, f in stages)
911 stagenames = set(n for n, f in stages)
910
912
911 showalways = set()
913 showalways = set()
912 if ui.verbose and not opts['show_stage']:
914 if ui.verbose and not opts['show_stage']:
913 # show parsed tree by --verbose (deprecated)
915 # show parsed tree by --verbose (deprecated)
914 showalways.add('parsed')
916 showalways.add('parsed')
915 if opts['show_stage'] == ['all']:
917 if opts['show_stage'] == ['all']:
916 showalways.update(stagenames)
918 showalways.update(stagenames)
917 else:
919 else:
918 for n in opts['show_stage']:
920 for n in opts['show_stage']:
919 if n not in stagenames:
921 if n not in stagenames:
920 raise error.Abort(_('invalid stage name: %s') % n)
922 raise error.Abort(_('invalid stage name: %s') % n)
921 showalways.update(opts['show_stage'])
923 showalways.update(opts['show_stage'])
922
924
923 tree = filesetlang.parse(expr)
925 tree = filesetlang.parse(expr)
924 for n, f in stages:
926 for n, f in stages:
925 tree = f(tree)
927 tree = f(tree)
926 if n in showalways:
928 if n in showalways:
927 if opts['show_stage'] or n != 'parsed':
929 if opts['show_stage'] or n != 'parsed':
928 ui.write(("* %s:\n") % n)
930 ui.write(("* %s:\n") % n)
929 ui.write(filesetlang.prettyformat(tree), "\n")
931 ui.write(filesetlang.prettyformat(tree), "\n")
930
932
931 files = set()
933 files = set()
932 if opts['all_files']:
934 if opts['all_files']:
933 for r in repo:
935 for r in repo:
934 c = repo[r]
936 c = repo[r]
935 files.update(c.files())
937 files.update(c.files())
936 files.update(c.substate)
938 files.update(c.substate)
937 if opts['all_files'] or ctx.rev() is None:
939 if opts['all_files'] or ctx.rev() is None:
938 wctx = repo[None]
940 wctx = repo[None]
939 files.update(repo.dirstate.walk(scmutil.matchall(repo),
941 files.update(repo.dirstate.walk(scmutil.matchall(repo),
940 subrepos=list(wctx.substate),
942 subrepos=list(wctx.substate),
941 unknown=True, ignored=True))
943 unknown=True, ignored=True))
942 files.update(wctx.substate)
944 files.update(wctx.substate)
943 else:
945 else:
944 files.update(ctx.files())
946 files.update(ctx.files())
945 files.update(ctx.substate)
947 files.update(ctx.substate)
946
948
947 m = ctx.matchfileset(expr)
949 m = ctx.matchfileset(expr)
948 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
950 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
949 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
951 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
950 for f in sorted(files):
952 for f in sorted(files):
951 if not m(f):
953 if not m(f):
952 continue
954 continue
953 ui.write("%s\n" % f)
955 ui.write("%s\n" % f)
954
956
955 @command('debugformat',
957 @command('debugformat',
956 [] + cmdutil.formatteropts)
958 [] + cmdutil.formatteropts)
957 def debugformat(ui, repo, **opts):
959 def debugformat(ui, repo, **opts):
958 """display format information about the current repository
960 """display format information about the current repository
959
961
960 Use --verbose to get extra information about current config value and
962 Use --verbose to get extra information about current config value and
961 Mercurial default."""
963 Mercurial default."""
962 opts = pycompat.byteskwargs(opts)
964 opts = pycompat.byteskwargs(opts)
963 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
965 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
964 maxvariantlength = max(len('format-variant'), maxvariantlength)
966 maxvariantlength = max(len('format-variant'), maxvariantlength)
965
967
966 def makeformatname(name):
968 def makeformatname(name):
967 return '%s:' + (' ' * (maxvariantlength - len(name)))
969 return '%s:' + (' ' * (maxvariantlength - len(name)))
968
970
969 fm = ui.formatter('debugformat', opts)
971 fm = ui.formatter('debugformat', opts)
970 if fm.isplain():
972 if fm.isplain():
971 def formatvalue(value):
973 def formatvalue(value):
972 if util.safehasattr(value, 'startswith'):
974 if util.safehasattr(value, 'startswith'):
973 return value
975 return value
974 if value:
976 if value:
975 return 'yes'
977 return 'yes'
976 else:
978 else:
977 return 'no'
979 return 'no'
978 else:
980 else:
979 formatvalue = pycompat.identity
981 formatvalue = pycompat.identity
980
982
981 fm.plain('format-variant')
983 fm.plain('format-variant')
982 fm.plain(' ' * (maxvariantlength - len('format-variant')))
984 fm.plain(' ' * (maxvariantlength - len('format-variant')))
983 fm.plain(' repo')
985 fm.plain(' repo')
984 if ui.verbose:
986 if ui.verbose:
985 fm.plain(' config default')
987 fm.plain(' config default')
986 fm.plain('\n')
988 fm.plain('\n')
987 for fv in upgrade.allformatvariant:
989 for fv in upgrade.allformatvariant:
988 fm.startitem()
990 fm.startitem()
989 repovalue = fv.fromrepo(repo)
991 repovalue = fv.fromrepo(repo)
990 configvalue = fv.fromconfig(repo)
992 configvalue = fv.fromconfig(repo)
991
993
992 if repovalue != configvalue:
994 if repovalue != configvalue:
993 namelabel = 'formatvariant.name.mismatchconfig'
995 namelabel = 'formatvariant.name.mismatchconfig'
994 repolabel = 'formatvariant.repo.mismatchconfig'
996 repolabel = 'formatvariant.repo.mismatchconfig'
995 elif repovalue != fv.default:
997 elif repovalue != fv.default:
996 namelabel = 'formatvariant.name.mismatchdefault'
998 namelabel = 'formatvariant.name.mismatchdefault'
997 repolabel = 'formatvariant.repo.mismatchdefault'
999 repolabel = 'formatvariant.repo.mismatchdefault'
998 else:
1000 else:
999 namelabel = 'formatvariant.name.uptodate'
1001 namelabel = 'formatvariant.name.uptodate'
1000 repolabel = 'formatvariant.repo.uptodate'
1002 repolabel = 'formatvariant.repo.uptodate'
1001
1003
1002 fm.write('name', makeformatname(fv.name), fv.name,
1004 fm.write('name', makeformatname(fv.name), fv.name,
1003 label=namelabel)
1005 label=namelabel)
1004 fm.write('repo', ' %3s', formatvalue(repovalue),
1006 fm.write('repo', ' %3s', formatvalue(repovalue),
1005 label=repolabel)
1007 label=repolabel)
1006 if fv.default != configvalue:
1008 if fv.default != configvalue:
1007 configlabel = 'formatvariant.config.special'
1009 configlabel = 'formatvariant.config.special'
1008 else:
1010 else:
1009 configlabel = 'formatvariant.config.default'
1011 configlabel = 'formatvariant.config.default'
1010 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1012 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1011 label=configlabel)
1013 label=configlabel)
1012 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1014 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1013 label='formatvariant.default')
1015 label='formatvariant.default')
1014 fm.plain('\n')
1016 fm.plain('\n')
1015 fm.end()
1017 fm.end()
1016
1018
1017 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1019 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1018 def debugfsinfo(ui, path="."):
1020 def debugfsinfo(ui, path="."):
1019 """show information detected about current filesystem"""
1021 """show information detected about current filesystem"""
1020 ui.write(('path: %s\n') % path)
1022 ui.write(('path: %s\n') % path)
1021 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1023 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1022 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1024 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1023 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1025 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1024 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1026 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1025 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1027 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1026 casesensitive = '(unknown)'
1028 casesensitive = '(unknown)'
1027 try:
1029 try:
1028 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1030 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1029 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1031 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1030 except OSError:
1032 except OSError:
1031 pass
1033 pass
1032 ui.write(('case-sensitive: %s\n') % casesensitive)
1034 ui.write(('case-sensitive: %s\n') % casesensitive)
1033
1035
1034 @command('debuggetbundle',
1036 @command('debuggetbundle',
1035 [('H', 'head', [], _('id of head node'), _('ID')),
1037 [('H', 'head', [], _('id of head node'), _('ID')),
1036 ('C', 'common', [], _('id of common node'), _('ID')),
1038 ('C', 'common', [], _('id of common node'), _('ID')),
1037 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1039 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1038 _('REPO FILE [-H|-C ID]...'),
1040 _('REPO FILE [-H|-C ID]...'),
1039 norepo=True)
1041 norepo=True)
1040 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1042 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1041 """retrieves a bundle from a repo
1043 """retrieves a bundle from a repo
1042
1044
1043 Every ID must be a full-length hex node id string. Saves the bundle to the
1045 Every ID must be a full-length hex node id string. Saves the bundle to the
1044 given file.
1046 given file.
1045 """
1047 """
1046 opts = pycompat.byteskwargs(opts)
1048 opts = pycompat.byteskwargs(opts)
1047 repo = hg.peer(ui, opts, repopath)
1049 repo = hg.peer(ui, opts, repopath)
1048 if not repo.capable('getbundle'):
1050 if not repo.capable('getbundle'):
1049 raise error.Abort("getbundle() not supported by target repository")
1051 raise error.Abort("getbundle() not supported by target repository")
1050 args = {}
1052 args = {}
1051 if common:
1053 if common:
1052 args[r'common'] = [bin(s) for s in common]
1054 args[r'common'] = [bin(s) for s in common]
1053 if head:
1055 if head:
1054 args[r'heads'] = [bin(s) for s in head]
1056 args[r'heads'] = [bin(s) for s in head]
1055 # TODO: get desired bundlecaps from command line.
1057 # TODO: get desired bundlecaps from command line.
1056 args[r'bundlecaps'] = None
1058 args[r'bundlecaps'] = None
1057 bundle = repo.getbundle('debug', **args)
1059 bundle = repo.getbundle('debug', **args)
1058
1060
1059 bundletype = opts.get('type', 'bzip2').lower()
1061 bundletype = opts.get('type', 'bzip2').lower()
1060 btypes = {'none': 'HG10UN',
1062 btypes = {'none': 'HG10UN',
1061 'bzip2': 'HG10BZ',
1063 'bzip2': 'HG10BZ',
1062 'gzip': 'HG10GZ',
1064 'gzip': 'HG10GZ',
1063 'bundle2': 'HG20'}
1065 'bundle2': 'HG20'}
1064 bundletype = btypes.get(bundletype)
1066 bundletype = btypes.get(bundletype)
1065 if bundletype not in bundle2.bundletypes:
1067 if bundletype not in bundle2.bundletypes:
1066 raise error.Abort(_('unknown bundle type specified with --type'))
1068 raise error.Abort(_('unknown bundle type specified with --type'))
1067 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1069 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1068
1070
1069 @command('debugignore', [], '[FILE]')
1071 @command('debugignore', [], '[FILE]')
1070 def debugignore(ui, repo, *files, **opts):
1072 def debugignore(ui, repo, *files, **opts):
1071 """display the combined ignore pattern and information about ignored files
1073 """display the combined ignore pattern and information about ignored files
1072
1074
1073 With no argument display the combined ignore pattern.
1075 With no argument display the combined ignore pattern.
1074
1076
1075 Given space separated file names, shows if the given file is ignored and
1077 Given space separated file names, shows if the given file is ignored and
1076 if so, show the ignore rule (file and line number) that matched it.
1078 if so, show the ignore rule (file and line number) that matched it.
1077 """
1079 """
1078 ignore = repo.dirstate._ignore
1080 ignore = repo.dirstate._ignore
1079 if not files:
1081 if not files:
1080 # Show all the patterns
1082 # Show all the patterns
1081 ui.write("%s\n" % pycompat.byterepr(ignore))
1083 ui.write("%s\n" % pycompat.byterepr(ignore))
1082 else:
1084 else:
1083 m = scmutil.match(repo[None], pats=files)
1085 m = scmutil.match(repo[None], pats=files)
1084 for f in m.files():
1086 for f in m.files():
1085 nf = util.normpath(f)
1087 nf = util.normpath(f)
1086 ignored = None
1088 ignored = None
1087 ignoredata = None
1089 ignoredata = None
1088 if nf != '.':
1090 if nf != '.':
1089 if ignore(nf):
1091 if ignore(nf):
1090 ignored = nf
1092 ignored = nf
1091 ignoredata = repo.dirstate._ignorefileandline(nf)
1093 ignoredata = repo.dirstate._ignorefileandline(nf)
1092 else:
1094 else:
1093 for p in util.finddirs(nf):
1095 for p in util.finddirs(nf):
1094 if ignore(p):
1096 if ignore(p):
1095 ignored = p
1097 ignored = p
1096 ignoredata = repo.dirstate._ignorefileandline(p)
1098 ignoredata = repo.dirstate._ignorefileandline(p)
1097 break
1099 break
1098 if ignored:
1100 if ignored:
1099 if ignored == nf:
1101 if ignored == nf:
1100 ui.write(_("%s is ignored\n") % m.uipath(f))
1102 ui.write(_("%s is ignored\n") % m.uipath(f))
1101 else:
1103 else:
1102 ui.write(_("%s is ignored because of "
1104 ui.write(_("%s is ignored because of "
1103 "containing folder %s\n")
1105 "containing folder %s\n")
1104 % (m.uipath(f), ignored))
1106 % (m.uipath(f), ignored))
1105 ignorefile, lineno, line = ignoredata
1107 ignorefile, lineno, line = ignoredata
1106 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1108 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1107 % (ignorefile, lineno, line))
1109 % (ignorefile, lineno, line))
1108 else:
1110 else:
1109 ui.write(_("%s is not ignored\n") % m.uipath(f))
1111 ui.write(_("%s is not ignored\n") % m.uipath(f))
1110
1112
1111 @command('debugindex', cmdutil.debugrevlogopts +
1113 @command('debugindex', cmdutil.debugrevlogopts +
1112 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1114 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1113 _('[-f FORMAT] -c|-m|FILE'),
1115 _('[-f FORMAT] -c|-m|FILE'),
1114 optionalrepo=True)
1116 optionalrepo=True)
1115 def debugindex(ui, repo, file_=None, **opts):
1117 def debugindex(ui, repo, file_=None, **opts):
1116 """dump the contents of an index file"""
1118 """dump the contents of an index file"""
1117 opts = pycompat.byteskwargs(opts)
1119 opts = pycompat.byteskwargs(opts)
1118 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1120 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1119 format = opts.get('format', 0)
1121 format = opts.get('format', 0)
1120 if format not in (0, 1):
1122 if format not in (0, 1):
1121 raise error.Abort(_("unknown format %d") % format)
1123 raise error.Abort(_("unknown format %d") % format)
1122
1124
1123 if ui.debugflag:
1125 if ui.debugflag:
1124 shortfn = hex
1126 shortfn = hex
1125 else:
1127 else:
1126 shortfn = short
1128 shortfn = short
1127
1129
1128 # There might not be anything in r, so have a sane default
1130 # There might not be anything in r, so have a sane default
1129 idlen = 12
1131 idlen = 12
1130 for i in r:
1132 for i in r:
1131 idlen = len(shortfn(r.node(i)))
1133 idlen = len(shortfn(r.node(i)))
1132 break
1134 break
1133
1135
1134 if format == 0:
1136 if format == 0:
1135 if ui.verbose:
1137 if ui.verbose:
1136 ui.write((" rev offset length linkrev"
1138 ui.write((" rev offset length linkrev"
1137 " %s %s p2\n") % ("nodeid".ljust(idlen),
1139 " %s %s p2\n") % ("nodeid".ljust(idlen),
1138 "p1".ljust(idlen)))
1140 "p1".ljust(idlen)))
1139 else:
1141 else:
1140 ui.write((" rev linkrev %s %s p2\n") % (
1142 ui.write((" rev linkrev %s %s p2\n") % (
1141 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1143 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1142 elif format == 1:
1144 elif format == 1:
1143 if ui.verbose:
1145 if ui.verbose:
1144 ui.write((" rev flag offset length size link p1"
1146 ui.write((" rev flag offset length size link p1"
1145 " p2 %s\n") % "nodeid".rjust(idlen))
1147 " p2 %s\n") % "nodeid".rjust(idlen))
1146 else:
1148 else:
1147 ui.write((" rev flag size link p1 p2 %s\n") %
1149 ui.write((" rev flag size link p1 p2 %s\n") %
1148 "nodeid".rjust(idlen))
1150 "nodeid".rjust(idlen))
1149
1151
1150 for i in r:
1152 for i in r:
1151 node = r.node(i)
1153 node = r.node(i)
1152 if format == 0:
1154 if format == 0:
1153 try:
1155 try:
1154 pp = r.parents(node)
1156 pp = r.parents(node)
1155 except Exception:
1157 except Exception:
1156 pp = [nullid, nullid]
1158 pp = [nullid, nullid]
1157 if ui.verbose:
1159 if ui.verbose:
1158 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1160 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1159 i, r.start(i), r.length(i), r.linkrev(i),
1161 i, r.start(i), r.length(i), r.linkrev(i),
1160 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1162 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1161 else:
1163 else:
1162 ui.write("% 6d % 7d %s %s %s\n" % (
1164 ui.write("% 6d % 7d %s %s %s\n" % (
1163 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1165 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1164 shortfn(pp[1])))
1166 shortfn(pp[1])))
1165 elif format == 1:
1167 elif format == 1:
1166 pr = r.parentrevs(i)
1168 pr = r.parentrevs(i)
1167 if ui.verbose:
1169 if ui.verbose:
1168 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1170 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1169 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1171 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1170 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1172 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1171 else:
1173 else:
1172 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1174 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1173 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1175 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1174 shortfn(node)))
1176 shortfn(node)))
1175
1177
1176 @command('debugindexdot', cmdutil.debugrevlogopts,
1178 @command('debugindexdot', cmdutil.debugrevlogopts,
1177 _('-c|-m|FILE'), optionalrepo=True)
1179 _('-c|-m|FILE'), optionalrepo=True)
1178 def debugindexdot(ui, repo, file_=None, **opts):
1180 def debugindexdot(ui, repo, file_=None, **opts):
1179 """dump an index DAG as a graphviz dot file"""
1181 """dump an index DAG as a graphviz dot file"""
1180 opts = pycompat.byteskwargs(opts)
1182 opts = pycompat.byteskwargs(opts)
1181 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1183 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1182 ui.write(("digraph G {\n"))
1184 ui.write(("digraph G {\n"))
1183 for i in r:
1185 for i in r:
1184 node = r.node(i)
1186 node = r.node(i)
1185 pp = r.parents(node)
1187 pp = r.parents(node)
1186 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1188 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1187 if pp[1] != nullid:
1189 if pp[1] != nullid:
1188 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1190 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1189 ui.write("}\n")
1191 ui.write("}\n")
1190
1192
1191 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1193 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1192 def debuginstall(ui, **opts):
1194 def debuginstall(ui, **opts):
1193 '''test Mercurial installation
1195 '''test Mercurial installation
1194
1196
1195 Returns 0 on success.
1197 Returns 0 on success.
1196 '''
1198 '''
1197 opts = pycompat.byteskwargs(opts)
1199 opts = pycompat.byteskwargs(opts)
1198
1200
1199 def writetemp(contents):
1201 def writetemp(contents):
1200 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1202 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1201 f = os.fdopen(fd, r"wb")
1203 f = os.fdopen(fd, r"wb")
1202 f.write(contents)
1204 f.write(contents)
1203 f.close()
1205 f.close()
1204 return name
1206 return name
1205
1207
1206 problems = 0
1208 problems = 0
1207
1209
1208 fm = ui.formatter('debuginstall', opts)
1210 fm = ui.formatter('debuginstall', opts)
1209 fm.startitem()
1211 fm.startitem()
1210
1212
1211 # encoding
1213 # encoding
1212 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1214 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1213 err = None
1215 err = None
1214 try:
1216 try:
1215 codecs.lookup(pycompat.sysstr(encoding.encoding))
1217 codecs.lookup(pycompat.sysstr(encoding.encoding))
1216 except LookupError as inst:
1218 except LookupError as inst:
1217 err = stringutil.forcebytestr(inst)
1219 err = stringutil.forcebytestr(inst)
1218 problems += 1
1220 problems += 1
1219 fm.condwrite(err, 'encodingerror', _(" %s\n"
1221 fm.condwrite(err, 'encodingerror', _(" %s\n"
1220 " (check that your locale is properly set)\n"), err)
1222 " (check that your locale is properly set)\n"), err)
1221
1223
1222 # Python
1224 # Python
1223 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1225 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1224 pycompat.sysexecutable)
1226 pycompat.sysexecutable)
1225 fm.write('pythonver', _("checking Python version (%s)\n"),
1227 fm.write('pythonver', _("checking Python version (%s)\n"),
1226 ("%d.%d.%d" % sys.version_info[:3]))
1228 ("%d.%d.%d" % sys.version_info[:3]))
1227 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1229 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1228 os.path.dirname(pycompat.fsencode(os.__file__)))
1230 os.path.dirname(pycompat.fsencode(os.__file__)))
1229
1231
1230 security = set(sslutil.supportedprotocols)
1232 security = set(sslutil.supportedprotocols)
1231 if sslutil.hassni:
1233 if sslutil.hassni:
1232 security.add('sni')
1234 security.add('sni')
1233
1235
1234 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1236 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1235 fm.formatlist(sorted(security), name='protocol',
1237 fm.formatlist(sorted(security), name='protocol',
1236 fmt='%s', sep=','))
1238 fmt='%s', sep=','))
1237
1239
1238 # These are warnings, not errors. So don't increment problem count. This
1240 # These are warnings, not errors. So don't increment problem count. This
1239 # may change in the future.
1241 # may change in the future.
1240 if 'tls1.2' not in security:
1242 if 'tls1.2' not in security:
1241 fm.plain(_(' TLS 1.2 not supported by Python install; '
1243 fm.plain(_(' TLS 1.2 not supported by Python install; '
1242 'network connections lack modern security\n'))
1244 'network connections lack modern security\n'))
1243 if 'sni' not in security:
1245 if 'sni' not in security:
1244 fm.plain(_(' SNI not supported by Python install; may have '
1246 fm.plain(_(' SNI not supported by Python install; may have '
1245 'connectivity issues with some servers\n'))
1247 'connectivity issues with some servers\n'))
1246
1248
1247 # TODO print CA cert info
1249 # TODO print CA cert info
1248
1250
1249 # hg version
1251 # hg version
1250 hgver = util.version()
1252 hgver = util.version()
1251 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1253 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1252 hgver.split('+')[0])
1254 hgver.split('+')[0])
1253 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1255 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1254 '+'.join(hgver.split('+')[1:]))
1256 '+'.join(hgver.split('+')[1:]))
1255
1257
1256 # compiled modules
1258 # compiled modules
1257 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1259 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1258 policy.policy)
1260 policy.policy)
1259 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1261 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1260 os.path.dirname(pycompat.fsencode(__file__)))
1262 os.path.dirname(pycompat.fsencode(__file__)))
1261
1263
1262 if policy.policy in ('c', 'allow'):
1264 if policy.policy in ('c', 'allow'):
1263 err = None
1265 err = None
1264 try:
1266 try:
1265 from .cext import (
1267 from .cext import (
1266 base85,
1268 base85,
1267 bdiff,
1269 bdiff,
1268 mpatch,
1270 mpatch,
1269 osutil,
1271 osutil,
1270 )
1272 )
1271 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1273 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1272 except Exception as inst:
1274 except Exception as inst:
1273 err = stringutil.forcebytestr(inst)
1275 err = stringutil.forcebytestr(inst)
1274 problems += 1
1276 problems += 1
1275 fm.condwrite(err, 'extensionserror', " %s\n", err)
1277 fm.condwrite(err, 'extensionserror', " %s\n", err)
1276
1278
1277 compengines = util.compengines._engines.values()
1279 compengines = util.compengines._engines.values()
1278 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1280 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1279 fm.formatlist(sorted(e.name() for e in compengines),
1281 fm.formatlist(sorted(e.name() for e in compengines),
1280 name='compengine', fmt='%s', sep=', '))
1282 name='compengine', fmt='%s', sep=', '))
1281 fm.write('compenginesavail', _('checking available compression engines '
1283 fm.write('compenginesavail', _('checking available compression engines '
1282 '(%s)\n'),
1284 '(%s)\n'),
1283 fm.formatlist(sorted(e.name() for e in compengines
1285 fm.formatlist(sorted(e.name() for e in compengines
1284 if e.available()),
1286 if e.available()),
1285 name='compengine', fmt='%s', sep=', '))
1287 name='compengine', fmt='%s', sep=', '))
1286 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1288 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1287 fm.write('compenginesserver', _('checking available compression engines '
1289 fm.write('compenginesserver', _('checking available compression engines '
1288 'for wire protocol (%s)\n'),
1290 'for wire protocol (%s)\n'),
1289 fm.formatlist([e.name() for e in wirecompengines
1291 fm.formatlist([e.name() for e in wirecompengines
1290 if e.wireprotosupport()],
1292 if e.wireprotosupport()],
1291 name='compengine', fmt='%s', sep=', '))
1293 name='compengine', fmt='%s', sep=', '))
1292 re2 = 'missing'
1294 re2 = 'missing'
1293 if util._re2:
1295 if util._re2:
1294 re2 = 'available'
1296 re2 = 'available'
1295 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1297 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1296 fm.data(re2=bool(util._re2))
1298 fm.data(re2=bool(util._re2))
1297
1299
1298 # templates
1300 # templates
1299 p = templater.templatepaths()
1301 p = templater.templatepaths()
1300 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1302 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1301 fm.condwrite(not p, '', _(" no template directories found\n"))
1303 fm.condwrite(not p, '', _(" no template directories found\n"))
1302 if p:
1304 if p:
1303 m = templater.templatepath("map-cmdline.default")
1305 m = templater.templatepath("map-cmdline.default")
1304 if m:
1306 if m:
1305 # template found, check if it is working
1307 # template found, check if it is working
1306 err = None
1308 err = None
1307 try:
1309 try:
1308 templater.templater.frommapfile(m)
1310 templater.templater.frommapfile(m)
1309 except Exception as inst:
1311 except Exception as inst:
1310 err = stringutil.forcebytestr(inst)
1312 err = stringutil.forcebytestr(inst)
1311 p = None
1313 p = None
1312 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1314 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1313 else:
1315 else:
1314 p = None
1316 p = None
1315 fm.condwrite(p, 'defaulttemplate',
1317 fm.condwrite(p, 'defaulttemplate',
1316 _("checking default template (%s)\n"), m)
1318 _("checking default template (%s)\n"), m)
1317 fm.condwrite(not m, 'defaulttemplatenotfound',
1319 fm.condwrite(not m, 'defaulttemplatenotfound',
1318 _(" template '%s' not found\n"), "default")
1320 _(" template '%s' not found\n"), "default")
1319 if not p:
1321 if not p:
1320 problems += 1
1322 problems += 1
1321 fm.condwrite(not p, '',
1323 fm.condwrite(not p, '',
1322 _(" (templates seem to have been installed incorrectly)\n"))
1324 _(" (templates seem to have been installed incorrectly)\n"))
1323
1325
1324 # editor
1326 # editor
1325 editor = ui.geteditor()
1327 editor = ui.geteditor()
1326 editor = util.expandpath(editor)
1328 editor = util.expandpath(editor)
1327 editorbin = procutil.shellsplit(editor)[0]
1329 editorbin = procutil.shellsplit(editor)[0]
1328 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1330 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1329 cmdpath = procutil.findexe(editorbin)
1331 cmdpath = procutil.findexe(editorbin)
1330 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1332 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1331 _(" No commit editor set and can't find %s in PATH\n"
1333 _(" No commit editor set and can't find %s in PATH\n"
1332 " (specify a commit editor in your configuration"
1334 " (specify a commit editor in your configuration"
1333 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1335 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1334 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1336 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1335 _(" Can't find editor '%s' in PATH\n"
1337 _(" Can't find editor '%s' in PATH\n"
1336 " (specify a commit editor in your configuration"
1338 " (specify a commit editor in your configuration"
1337 " file)\n"), not cmdpath and editorbin)
1339 " file)\n"), not cmdpath and editorbin)
1338 if not cmdpath and editor != 'vi':
1340 if not cmdpath and editor != 'vi':
1339 problems += 1
1341 problems += 1
1340
1342
1341 # check username
1343 # check username
1342 username = None
1344 username = None
1343 err = None
1345 err = None
1344 try:
1346 try:
1345 username = ui.username()
1347 username = ui.username()
1346 except error.Abort as e:
1348 except error.Abort as e:
1347 err = stringutil.forcebytestr(e)
1349 err = stringutil.forcebytestr(e)
1348 problems += 1
1350 problems += 1
1349
1351
1350 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1352 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1351 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1353 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1352 " (specify a username in your configuration file)\n"), err)
1354 " (specify a username in your configuration file)\n"), err)
1353
1355
1354 fm.condwrite(not problems, '',
1356 fm.condwrite(not problems, '',
1355 _("no problems detected\n"))
1357 _("no problems detected\n"))
1356 if not problems:
1358 if not problems:
1357 fm.data(problems=problems)
1359 fm.data(problems=problems)
1358 fm.condwrite(problems, 'problems',
1360 fm.condwrite(problems, 'problems',
1359 _("%d problems detected,"
1361 _("%d problems detected,"
1360 " please check your install!\n"), problems)
1362 " please check your install!\n"), problems)
1361 fm.end()
1363 fm.end()
1362
1364
1363 return problems
1365 return problems
1364
1366
1365 @command('debugknown', [], _('REPO ID...'), norepo=True)
1367 @command('debugknown', [], _('REPO ID...'), norepo=True)
1366 def debugknown(ui, repopath, *ids, **opts):
1368 def debugknown(ui, repopath, *ids, **opts):
1367 """test whether node ids are known to a repo
1369 """test whether node ids are known to a repo
1368
1370
1369 Every ID must be a full-length hex node id string. Returns a list of 0s
1371 Every ID must be a full-length hex node id string. Returns a list of 0s
1370 and 1s indicating unknown/known.
1372 and 1s indicating unknown/known.
1371 """
1373 """
1372 opts = pycompat.byteskwargs(opts)
1374 opts = pycompat.byteskwargs(opts)
1373 repo = hg.peer(ui, opts, repopath)
1375 repo = hg.peer(ui, opts, repopath)
1374 if not repo.capable('known'):
1376 if not repo.capable('known'):
1375 raise error.Abort("known() not supported by target repository")
1377 raise error.Abort("known() not supported by target repository")
1376 flags = repo.known([bin(s) for s in ids])
1378 flags = repo.known([bin(s) for s in ids])
1377 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1379 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1378
1380
1379 @command('debuglabelcomplete', [], _('LABEL...'))
1381 @command('debuglabelcomplete', [], _('LABEL...'))
1380 def debuglabelcomplete(ui, repo, *args):
1382 def debuglabelcomplete(ui, repo, *args):
1381 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1383 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1382 debugnamecomplete(ui, repo, *args)
1384 debugnamecomplete(ui, repo, *args)
1383
1385
1384 @command('debuglocks',
1386 @command('debuglocks',
1385 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1387 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1386 ('W', 'force-wlock', None,
1388 ('W', 'force-wlock', None,
1387 _('free the working state lock (DANGEROUS)')),
1389 _('free the working state lock (DANGEROUS)')),
1388 ('s', 'set-lock', None, _('set the store lock until stopped')),
1390 ('s', 'set-lock', None, _('set the store lock until stopped')),
1389 ('S', 'set-wlock', None,
1391 ('S', 'set-wlock', None,
1390 _('set the working state lock until stopped'))],
1392 _('set the working state lock until stopped'))],
1391 _('[OPTION]...'))
1393 _('[OPTION]...'))
1392 def debuglocks(ui, repo, **opts):
1394 def debuglocks(ui, repo, **opts):
1393 """show or modify state of locks
1395 """show or modify state of locks
1394
1396
1395 By default, this command will show which locks are held. This
1397 By default, this command will show which locks are held. This
1396 includes the user and process holding the lock, the amount of time
1398 includes the user and process holding the lock, the amount of time
1397 the lock has been held, and the machine name where the process is
1399 the lock has been held, and the machine name where the process is
1398 running if it's not local.
1400 running if it's not local.
1399
1401
1400 Locks protect the integrity of Mercurial's data, so should be
1402 Locks protect the integrity of Mercurial's data, so should be
1401 treated with care. System crashes or other interruptions may cause
1403 treated with care. System crashes or other interruptions may cause
1402 locks to not be properly released, though Mercurial will usually
1404 locks to not be properly released, though Mercurial will usually
1403 detect and remove such stale locks automatically.
1405 detect and remove such stale locks automatically.
1404
1406
1405 However, detecting stale locks may not always be possible (for
1407 However, detecting stale locks may not always be possible (for
1406 instance, on a shared filesystem). Removing locks may also be
1408 instance, on a shared filesystem). Removing locks may also be
1407 blocked by filesystem permissions.
1409 blocked by filesystem permissions.
1408
1410
1409 Setting a lock will prevent other commands from changing the data.
1411 Setting a lock will prevent other commands from changing the data.
1410 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1412 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1411 The set locks are removed when the command exits.
1413 The set locks are removed when the command exits.
1412
1414
1413 Returns 0 if no locks are held.
1415 Returns 0 if no locks are held.
1414
1416
1415 """
1417 """
1416
1418
1417 if opts.get(r'force_lock'):
1419 if opts.get(r'force_lock'):
1418 repo.svfs.unlink('lock')
1420 repo.svfs.unlink('lock')
1419 if opts.get(r'force_wlock'):
1421 if opts.get(r'force_wlock'):
1420 repo.vfs.unlink('wlock')
1422 repo.vfs.unlink('wlock')
1421 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1423 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1422 return 0
1424 return 0
1423
1425
1424 locks = []
1426 locks = []
1425 try:
1427 try:
1426 if opts.get(r'set_wlock'):
1428 if opts.get(r'set_wlock'):
1427 try:
1429 try:
1428 locks.append(repo.wlock(False))
1430 locks.append(repo.wlock(False))
1429 except error.LockHeld:
1431 except error.LockHeld:
1430 raise error.Abort(_('wlock is already held'))
1432 raise error.Abort(_('wlock is already held'))
1431 if opts.get(r'set_lock'):
1433 if opts.get(r'set_lock'):
1432 try:
1434 try:
1433 locks.append(repo.lock(False))
1435 locks.append(repo.lock(False))
1434 except error.LockHeld:
1436 except error.LockHeld:
1435 raise error.Abort(_('lock is already held'))
1437 raise error.Abort(_('lock is already held'))
1436 if len(locks):
1438 if len(locks):
1437 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1439 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1438 return 0
1440 return 0
1439 finally:
1441 finally:
1440 release(*locks)
1442 release(*locks)
1441
1443
1442 now = time.time()
1444 now = time.time()
1443 held = 0
1445 held = 0
1444
1446
1445 def report(vfs, name, method):
1447 def report(vfs, name, method):
1446 # this causes stale locks to get reaped for more accurate reporting
1448 # this causes stale locks to get reaped for more accurate reporting
1447 try:
1449 try:
1448 l = method(False)
1450 l = method(False)
1449 except error.LockHeld:
1451 except error.LockHeld:
1450 l = None
1452 l = None
1451
1453
1452 if l:
1454 if l:
1453 l.release()
1455 l.release()
1454 else:
1456 else:
1455 try:
1457 try:
1456 st = vfs.lstat(name)
1458 st = vfs.lstat(name)
1457 age = now - st[stat.ST_MTIME]
1459 age = now - st[stat.ST_MTIME]
1458 user = util.username(st.st_uid)
1460 user = util.username(st.st_uid)
1459 locker = vfs.readlock(name)
1461 locker = vfs.readlock(name)
1460 if ":" in locker:
1462 if ":" in locker:
1461 host, pid = locker.split(':')
1463 host, pid = locker.split(':')
1462 if host == socket.gethostname():
1464 if host == socket.gethostname():
1463 locker = 'user %s, process %s' % (user, pid)
1465 locker = 'user %s, process %s' % (user, pid)
1464 else:
1466 else:
1465 locker = 'user %s, process %s, host %s' \
1467 locker = 'user %s, process %s, host %s' \
1466 % (user, pid, host)
1468 % (user, pid, host)
1467 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1469 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1468 return 1
1470 return 1
1469 except OSError as e:
1471 except OSError as e:
1470 if e.errno != errno.ENOENT:
1472 if e.errno != errno.ENOENT:
1471 raise
1473 raise
1472
1474
1473 ui.write(("%-6s free\n") % (name + ":"))
1475 ui.write(("%-6s free\n") % (name + ":"))
1474 return 0
1476 return 0
1475
1477
1476 held += report(repo.svfs, "lock", repo.lock)
1478 held += report(repo.svfs, "lock", repo.lock)
1477 held += report(repo.vfs, "wlock", repo.wlock)
1479 held += report(repo.vfs, "wlock", repo.wlock)
1478
1480
1479 return held
1481 return held
1480
1482
1481 @command('debugmanifestfulltextcache', [
1483 @command('debugmanifestfulltextcache', [
1482 ('', 'clear', False, _('clear the cache')),
1484 ('', 'clear', False, _('clear the cache')),
1483 ('a', 'add', '', _('add the given manifest node to the cache'),
1485 ('a', 'add', '', _('add the given manifest node to the cache'),
1484 _('NODE'))
1486 _('NODE'))
1485 ], '')
1487 ], '')
1486 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1488 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1487 """show, clear or amend the contents of the manifest fulltext cache"""
1489 """show, clear or amend the contents of the manifest fulltext cache"""
1488 with repo.lock():
1490 with repo.lock():
1489 r = repo.manifestlog._revlog
1491 r = repo.manifestlog._revlog
1490 try:
1492 try:
1491 cache = r._fulltextcache
1493 cache = r._fulltextcache
1492 except AttributeError:
1494 except AttributeError:
1493 ui.warn(_(
1495 ui.warn(_(
1494 "Current revlog implementation doesn't appear to have a "
1496 "Current revlog implementation doesn't appear to have a "
1495 'manifest fulltext cache\n'))
1497 'manifest fulltext cache\n'))
1496 return
1498 return
1497
1499
1498 if opts.get(r'clear'):
1500 if opts.get(r'clear'):
1499 cache.clear()
1501 cache.clear()
1500
1502
1501 if add:
1503 if add:
1502 try:
1504 try:
1503 manifest = repo.manifestlog[r.lookup(add)]
1505 manifest = repo.manifestlog[r.lookup(add)]
1504 except error.LookupError as e:
1506 except error.LookupError as e:
1505 raise error.Abort(e, hint="Check your manifest node id")
1507 raise error.Abort(e, hint="Check your manifest node id")
1506 manifest.read() # stores revisision in cache too
1508 manifest.read() # stores revisision in cache too
1507
1509
1508 if not len(cache):
1510 if not len(cache):
1509 ui.write(_('Cache empty'))
1511 ui.write(_('Cache empty'))
1510 else:
1512 else:
1511 ui.write(
1513 ui.write(
1512 _('Cache contains %d manifest entries, in order of most to '
1514 _('Cache contains %d manifest entries, in order of most to '
1513 'least recent:\n') % (len(cache),))
1515 'least recent:\n') % (len(cache),))
1514 totalsize = 0
1516 totalsize = 0
1515 for nodeid in cache:
1517 for nodeid in cache:
1516 # Use cache.get to not update the LRU order
1518 # Use cache.get to not update the LRU order
1517 data = cache.get(nodeid)
1519 data = cache.get(nodeid)
1518 size = len(data)
1520 size = len(data)
1519 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1521 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1520 ui.write(_('id: %s, size %s\n') % (
1522 ui.write(_('id: %s, size %s\n') % (
1521 hex(nodeid), util.bytecount(size)))
1523 hex(nodeid), util.bytecount(size)))
1522 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1524 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1523 ui.write(
1525 ui.write(
1524 _('Total cache data size %s, on-disk %s\n') % (
1526 _('Total cache data size %s, on-disk %s\n') % (
1525 util.bytecount(totalsize), util.bytecount(ondisk))
1527 util.bytecount(totalsize), util.bytecount(ondisk))
1526 )
1528 )
1527
1529
1528 @command('debugmergestate', [], '')
1530 @command('debugmergestate', [], '')
1529 def debugmergestate(ui, repo, *args):
1531 def debugmergestate(ui, repo, *args):
1530 """print merge state
1532 """print merge state
1531
1533
1532 Use --verbose to print out information about whether v1 or v2 merge state
1534 Use --verbose to print out information about whether v1 or v2 merge state
1533 was chosen."""
1535 was chosen."""
1534 def _hashornull(h):
1536 def _hashornull(h):
1535 if h == nullhex:
1537 if h == nullhex:
1536 return 'null'
1538 return 'null'
1537 else:
1539 else:
1538 return h
1540 return h
1539
1541
1540 def printrecords(version):
1542 def printrecords(version):
1541 ui.write(('* version %d records\n') % version)
1543 ui.write(('* version %d records\n') % version)
1542 if version == 1:
1544 if version == 1:
1543 records = v1records
1545 records = v1records
1544 else:
1546 else:
1545 records = v2records
1547 records = v2records
1546
1548
1547 for rtype, record in records:
1549 for rtype, record in records:
1548 # pretty print some record types
1550 # pretty print some record types
1549 if rtype == 'L':
1551 if rtype == 'L':
1550 ui.write(('local: %s\n') % record)
1552 ui.write(('local: %s\n') % record)
1551 elif rtype == 'O':
1553 elif rtype == 'O':
1552 ui.write(('other: %s\n') % record)
1554 ui.write(('other: %s\n') % record)
1553 elif rtype == 'm':
1555 elif rtype == 'm':
1554 driver, mdstate = record.split('\0', 1)
1556 driver, mdstate = record.split('\0', 1)
1555 ui.write(('merge driver: %s (state "%s")\n')
1557 ui.write(('merge driver: %s (state "%s")\n')
1556 % (driver, mdstate))
1558 % (driver, mdstate))
1557 elif rtype in 'FDC':
1559 elif rtype in 'FDC':
1558 r = record.split('\0')
1560 r = record.split('\0')
1559 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1561 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1560 if version == 1:
1562 if version == 1:
1561 onode = 'not stored in v1 format'
1563 onode = 'not stored in v1 format'
1562 flags = r[7]
1564 flags = r[7]
1563 else:
1565 else:
1564 onode, flags = r[7:9]
1566 onode, flags = r[7:9]
1565 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1567 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1566 % (f, rtype, state, _hashornull(hash)))
1568 % (f, rtype, state, _hashornull(hash)))
1567 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1569 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1568 ui.write((' ancestor path: %s (node %s)\n')
1570 ui.write((' ancestor path: %s (node %s)\n')
1569 % (afile, _hashornull(anode)))
1571 % (afile, _hashornull(anode)))
1570 ui.write((' other path: %s (node %s)\n')
1572 ui.write((' other path: %s (node %s)\n')
1571 % (ofile, _hashornull(onode)))
1573 % (ofile, _hashornull(onode)))
1572 elif rtype == 'f':
1574 elif rtype == 'f':
1573 filename, rawextras = record.split('\0', 1)
1575 filename, rawextras = record.split('\0', 1)
1574 extras = rawextras.split('\0')
1576 extras = rawextras.split('\0')
1575 i = 0
1577 i = 0
1576 extrastrings = []
1578 extrastrings = []
1577 while i < len(extras):
1579 while i < len(extras):
1578 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1580 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1579 i += 2
1581 i += 2
1580
1582
1581 ui.write(('file extras: %s (%s)\n')
1583 ui.write(('file extras: %s (%s)\n')
1582 % (filename, ', '.join(extrastrings)))
1584 % (filename, ', '.join(extrastrings)))
1583 elif rtype == 'l':
1585 elif rtype == 'l':
1584 labels = record.split('\0', 2)
1586 labels = record.split('\0', 2)
1585 labels = [l for l in labels if len(l) > 0]
1587 labels = [l for l in labels if len(l) > 0]
1586 ui.write(('labels:\n'))
1588 ui.write(('labels:\n'))
1587 ui.write((' local: %s\n' % labels[0]))
1589 ui.write((' local: %s\n' % labels[0]))
1588 ui.write((' other: %s\n' % labels[1]))
1590 ui.write((' other: %s\n' % labels[1]))
1589 if len(labels) > 2:
1591 if len(labels) > 2:
1590 ui.write((' base: %s\n' % labels[2]))
1592 ui.write((' base: %s\n' % labels[2]))
1591 else:
1593 else:
1592 ui.write(('unrecognized entry: %s\t%s\n')
1594 ui.write(('unrecognized entry: %s\t%s\n')
1593 % (rtype, record.replace('\0', '\t')))
1595 % (rtype, record.replace('\0', '\t')))
1594
1596
1595 # Avoid mergestate.read() since it may raise an exception for unsupported
1597 # Avoid mergestate.read() since it may raise an exception for unsupported
1596 # merge state records. We shouldn't be doing this, but this is OK since this
1598 # merge state records. We shouldn't be doing this, but this is OK since this
1597 # command is pretty low-level.
1599 # command is pretty low-level.
1598 ms = mergemod.mergestate(repo)
1600 ms = mergemod.mergestate(repo)
1599
1601
1600 # sort so that reasonable information is on top
1602 # sort so that reasonable information is on top
1601 v1records = ms._readrecordsv1()
1603 v1records = ms._readrecordsv1()
1602 v2records = ms._readrecordsv2()
1604 v2records = ms._readrecordsv2()
1603 order = 'LOml'
1605 order = 'LOml'
1604 def key(r):
1606 def key(r):
1605 idx = order.find(r[0])
1607 idx = order.find(r[0])
1606 if idx == -1:
1608 if idx == -1:
1607 return (1, r[1])
1609 return (1, r[1])
1608 else:
1610 else:
1609 return (0, idx)
1611 return (0, idx)
1610 v1records.sort(key=key)
1612 v1records.sort(key=key)
1611 v2records.sort(key=key)
1613 v2records.sort(key=key)
1612
1614
1613 if not v1records and not v2records:
1615 if not v1records and not v2records:
1614 ui.write(('no merge state found\n'))
1616 ui.write(('no merge state found\n'))
1615 elif not v2records:
1617 elif not v2records:
1616 ui.note(('no version 2 merge state\n'))
1618 ui.note(('no version 2 merge state\n'))
1617 printrecords(1)
1619 printrecords(1)
1618 elif ms._v1v2match(v1records, v2records):
1620 elif ms._v1v2match(v1records, v2records):
1619 ui.note(('v1 and v2 states match: using v2\n'))
1621 ui.note(('v1 and v2 states match: using v2\n'))
1620 printrecords(2)
1622 printrecords(2)
1621 else:
1623 else:
1622 ui.note(('v1 and v2 states mismatch: using v1\n'))
1624 ui.note(('v1 and v2 states mismatch: using v1\n'))
1623 printrecords(1)
1625 printrecords(1)
1624 if ui.verbose:
1626 if ui.verbose:
1625 printrecords(2)
1627 printrecords(2)
1626
1628
1627 @command('debugnamecomplete', [], _('NAME...'))
1629 @command('debugnamecomplete', [], _('NAME...'))
1628 def debugnamecomplete(ui, repo, *args):
1630 def debugnamecomplete(ui, repo, *args):
1629 '''complete "names" - tags, open branch names, bookmark names'''
1631 '''complete "names" - tags, open branch names, bookmark names'''
1630
1632
1631 names = set()
1633 names = set()
1632 # since we previously only listed open branches, we will handle that
1634 # since we previously only listed open branches, we will handle that
1633 # specially (after this for loop)
1635 # specially (after this for loop)
1634 for name, ns in repo.names.iteritems():
1636 for name, ns in repo.names.iteritems():
1635 if name != 'branches':
1637 if name != 'branches':
1636 names.update(ns.listnames(repo))
1638 names.update(ns.listnames(repo))
1637 names.update(tag for (tag, heads, tip, closed)
1639 names.update(tag for (tag, heads, tip, closed)
1638 in repo.branchmap().iterbranches() if not closed)
1640 in repo.branchmap().iterbranches() if not closed)
1639 completions = set()
1641 completions = set()
1640 if not args:
1642 if not args:
1641 args = ['']
1643 args = ['']
1642 for a in args:
1644 for a in args:
1643 completions.update(n for n in names if n.startswith(a))
1645 completions.update(n for n in names if n.startswith(a))
1644 ui.write('\n'.join(sorted(completions)))
1646 ui.write('\n'.join(sorted(completions)))
1645 ui.write('\n')
1647 ui.write('\n')
1646
1648
1647 @command('debugobsolete',
1649 @command('debugobsolete',
1648 [('', 'flags', 0, _('markers flag')),
1650 [('', 'flags', 0, _('markers flag')),
1649 ('', 'record-parents', False,
1651 ('', 'record-parents', False,
1650 _('record parent information for the precursor')),
1652 _('record parent information for the precursor')),
1651 ('r', 'rev', [], _('display markers relevant to REV')),
1653 ('r', 'rev', [], _('display markers relevant to REV')),
1652 ('', 'exclusive', False, _('restrict display to markers only '
1654 ('', 'exclusive', False, _('restrict display to markers only '
1653 'relevant to REV')),
1655 'relevant to REV')),
1654 ('', 'index', False, _('display index of the marker')),
1656 ('', 'index', False, _('display index of the marker')),
1655 ('', 'delete', [], _('delete markers specified by indices')),
1657 ('', 'delete', [], _('delete markers specified by indices')),
1656 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1658 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1657 _('[OBSOLETED [REPLACEMENT ...]]'))
1659 _('[OBSOLETED [REPLACEMENT ...]]'))
1658 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1660 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1659 """create arbitrary obsolete marker
1661 """create arbitrary obsolete marker
1660
1662
1661 With no arguments, displays the list of obsolescence markers."""
1663 With no arguments, displays the list of obsolescence markers."""
1662
1664
1663 opts = pycompat.byteskwargs(opts)
1665 opts = pycompat.byteskwargs(opts)
1664
1666
1665 def parsenodeid(s):
1667 def parsenodeid(s):
1666 try:
1668 try:
1667 # We do not use revsingle/revrange functions here to accept
1669 # We do not use revsingle/revrange functions here to accept
1668 # arbitrary node identifiers, possibly not present in the
1670 # arbitrary node identifiers, possibly not present in the
1669 # local repository.
1671 # local repository.
1670 n = bin(s)
1672 n = bin(s)
1671 if len(n) != len(nullid):
1673 if len(n) != len(nullid):
1672 raise TypeError()
1674 raise TypeError()
1673 return n
1675 return n
1674 except TypeError:
1676 except TypeError:
1675 raise error.Abort('changeset references must be full hexadecimal '
1677 raise error.Abort('changeset references must be full hexadecimal '
1676 'node identifiers')
1678 'node identifiers')
1677
1679
1678 if opts.get('delete'):
1680 if opts.get('delete'):
1679 indices = []
1681 indices = []
1680 for v in opts.get('delete'):
1682 for v in opts.get('delete'):
1681 try:
1683 try:
1682 indices.append(int(v))
1684 indices.append(int(v))
1683 except ValueError:
1685 except ValueError:
1684 raise error.Abort(_('invalid index value: %r') % v,
1686 raise error.Abort(_('invalid index value: %r') % v,
1685 hint=_('use integers for indices'))
1687 hint=_('use integers for indices'))
1686
1688
1687 if repo.currenttransaction():
1689 if repo.currenttransaction():
1688 raise error.Abort(_('cannot delete obsmarkers in the middle '
1690 raise error.Abort(_('cannot delete obsmarkers in the middle '
1689 'of transaction.'))
1691 'of transaction.'))
1690
1692
1691 with repo.lock():
1693 with repo.lock():
1692 n = repair.deleteobsmarkers(repo.obsstore, indices)
1694 n = repair.deleteobsmarkers(repo.obsstore, indices)
1693 ui.write(_('deleted %i obsolescence markers\n') % n)
1695 ui.write(_('deleted %i obsolescence markers\n') % n)
1694
1696
1695 return
1697 return
1696
1698
1697 if precursor is not None:
1699 if precursor is not None:
1698 if opts['rev']:
1700 if opts['rev']:
1699 raise error.Abort('cannot select revision when creating marker')
1701 raise error.Abort('cannot select revision when creating marker')
1700 metadata = {}
1702 metadata = {}
1701 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1703 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1702 succs = tuple(parsenodeid(succ) for succ in successors)
1704 succs = tuple(parsenodeid(succ) for succ in successors)
1703 l = repo.lock()
1705 l = repo.lock()
1704 try:
1706 try:
1705 tr = repo.transaction('debugobsolete')
1707 tr = repo.transaction('debugobsolete')
1706 try:
1708 try:
1707 date = opts.get('date')
1709 date = opts.get('date')
1708 if date:
1710 if date:
1709 date = dateutil.parsedate(date)
1711 date = dateutil.parsedate(date)
1710 else:
1712 else:
1711 date = None
1713 date = None
1712 prec = parsenodeid(precursor)
1714 prec = parsenodeid(precursor)
1713 parents = None
1715 parents = None
1714 if opts['record_parents']:
1716 if opts['record_parents']:
1715 if prec not in repo.unfiltered():
1717 if prec not in repo.unfiltered():
1716 raise error.Abort('cannot used --record-parents on '
1718 raise error.Abort('cannot used --record-parents on '
1717 'unknown changesets')
1719 'unknown changesets')
1718 parents = repo.unfiltered()[prec].parents()
1720 parents = repo.unfiltered()[prec].parents()
1719 parents = tuple(p.node() for p in parents)
1721 parents = tuple(p.node() for p in parents)
1720 repo.obsstore.create(tr, prec, succs, opts['flags'],
1722 repo.obsstore.create(tr, prec, succs, opts['flags'],
1721 parents=parents, date=date,
1723 parents=parents, date=date,
1722 metadata=metadata, ui=ui)
1724 metadata=metadata, ui=ui)
1723 tr.close()
1725 tr.close()
1724 except ValueError as exc:
1726 except ValueError as exc:
1725 raise error.Abort(_('bad obsmarker input: %s') %
1727 raise error.Abort(_('bad obsmarker input: %s') %
1726 pycompat.bytestr(exc))
1728 pycompat.bytestr(exc))
1727 finally:
1729 finally:
1728 tr.release()
1730 tr.release()
1729 finally:
1731 finally:
1730 l.release()
1732 l.release()
1731 else:
1733 else:
1732 if opts['rev']:
1734 if opts['rev']:
1733 revs = scmutil.revrange(repo, opts['rev'])
1735 revs = scmutil.revrange(repo, opts['rev'])
1734 nodes = [repo[r].node() for r in revs]
1736 nodes = [repo[r].node() for r in revs]
1735 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1737 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1736 exclusive=opts['exclusive']))
1738 exclusive=opts['exclusive']))
1737 markers.sort(key=lambda x: x._data)
1739 markers.sort(key=lambda x: x._data)
1738 else:
1740 else:
1739 markers = obsutil.getmarkers(repo)
1741 markers = obsutil.getmarkers(repo)
1740
1742
1741 markerstoiter = markers
1743 markerstoiter = markers
1742 isrelevant = lambda m: True
1744 isrelevant = lambda m: True
1743 if opts.get('rev') and opts.get('index'):
1745 if opts.get('rev') and opts.get('index'):
1744 markerstoiter = obsutil.getmarkers(repo)
1746 markerstoiter = obsutil.getmarkers(repo)
1745 markerset = set(markers)
1747 markerset = set(markers)
1746 isrelevant = lambda m: m in markerset
1748 isrelevant = lambda m: m in markerset
1747
1749
1748 fm = ui.formatter('debugobsolete', opts)
1750 fm = ui.formatter('debugobsolete', opts)
1749 for i, m in enumerate(markerstoiter):
1751 for i, m in enumerate(markerstoiter):
1750 if not isrelevant(m):
1752 if not isrelevant(m):
1751 # marker can be irrelevant when we're iterating over a set
1753 # marker can be irrelevant when we're iterating over a set
1752 # of markers (markerstoiter) which is bigger than the set
1754 # of markers (markerstoiter) which is bigger than the set
1753 # of markers we want to display (markers)
1755 # of markers we want to display (markers)
1754 # this can happen if both --index and --rev options are
1756 # this can happen if both --index and --rev options are
1755 # provided and thus we need to iterate over all of the markers
1757 # provided and thus we need to iterate over all of the markers
1756 # to get the correct indices, but only display the ones that
1758 # to get the correct indices, but only display the ones that
1757 # are relevant to --rev value
1759 # are relevant to --rev value
1758 continue
1760 continue
1759 fm.startitem()
1761 fm.startitem()
1760 ind = i if opts.get('index') else None
1762 ind = i if opts.get('index') else None
1761 cmdutil.showmarker(fm, m, index=ind)
1763 cmdutil.showmarker(fm, m, index=ind)
1762 fm.end()
1764 fm.end()
1763
1765
1764 @command('debugpathcomplete',
1766 @command('debugpathcomplete',
1765 [('f', 'full', None, _('complete an entire path')),
1767 [('f', 'full', None, _('complete an entire path')),
1766 ('n', 'normal', None, _('show only normal files')),
1768 ('n', 'normal', None, _('show only normal files')),
1767 ('a', 'added', None, _('show only added files')),
1769 ('a', 'added', None, _('show only added files')),
1768 ('r', 'removed', None, _('show only removed files'))],
1770 ('r', 'removed', None, _('show only removed files'))],
1769 _('FILESPEC...'))
1771 _('FILESPEC...'))
1770 def debugpathcomplete(ui, repo, *specs, **opts):
1772 def debugpathcomplete(ui, repo, *specs, **opts):
1771 '''complete part or all of a tracked path
1773 '''complete part or all of a tracked path
1772
1774
1773 This command supports shells that offer path name completion. It
1775 This command supports shells that offer path name completion. It
1774 currently completes only files already known to the dirstate.
1776 currently completes only files already known to the dirstate.
1775
1777
1776 Completion extends only to the next path segment unless
1778 Completion extends only to the next path segment unless
1777 --full is specified, in which case entire paths are used.'''
1779 --full is specified, in which case entire paths are used.'''
1778
1780
1779 def complete(path, acceptable):
1781 def complete(path, acceptable):
1780 dirstate = repo.dirstate
1782 dirstate = repo.dirstate
1781 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1783 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1782 rootdir = repo.root + pycompat.ossep
1784 rootdir = repo.root + pycompat.ossep
1783 if spec != repo.root and not spec.startswith(rootdir):
1785 if spec != repo.root and not spec.startswith(rootdir):
1784 return [], []
1786 return [], []
1785 if os.path.isdir(spec):
1787 if os.path.isdir(spec):
1786 spec += '/'
1788 spec += '/'
1787 spec = spec[len(rootdir):]
1789 spec = spec[len(rootdir):]
1788 fixpaths = pycompat.ossep != '/'
1790 fixpaths = pycompat.ossep != '/'
1789 if fixpaths:
1791 if fixpaths:
1790 spec = spec.replace(pycompat.ossep, '/')
1792 spec = spec.replace(pycompat.ossep, '/')
1791 speclen = len(spec)
1793 speclen = len(spec)
1792 fullpaths = opts[r'full']
1794 fullpaths = opts[r'full']
1793 files, dirs = set(), set()
1795 files, dirs = set(), set()
1794 adddir, addfile = dirs.add, files.add
1796 adddir, addfile = dirs.add, files.add
1795 for f, st in dirstate.iteritems():
1797 for f, st in dirstate.iteritems():
1796 if f.startswith(spec) and st[0] in acceptable:
1798 if f.startswith(spec) and st[0] in acceptable:
1797 if fixpaths:
1799 if fixpaths:
1798 f = f.replace('/', pycompat.ossep)
1800 f = f.replace('/', pycompat.ossep)
1799 if fullpaths:
1801 if fullpaths:
1800 addfile(f)
1802 addfile(f)
1801 continue
1803 continue
1802 s = f.find(pycompat.ossep, speclen)
1804 s = f.find(pycompat.ossep, speclen)
1803 if s >= 0:
1805 if s >= 0:
1804 adddir(f[:s])
1806 adddir(f[:s])
1805 else:
1807 else:
1806 addfile(f)
1808 addfile(f)
1807 return files, dirs
1809 return files, dirs
1808
1810
1809 acceptable = ''
1811 acceptable = ''
1810 if opts[r'normal']:
1812 if opts[r'normal']:
1811 acceptable += 'nm'
1813 acceptable += 'nm'
1812 if opts[r'added']:
1814 if opts[r'added']:
1813 acceptable += 'a'
1815 acceptable += 'a'
1814 if opts[r'removed']:
1816 if opts[r'removed']:
1815 acceptable += 'r'
1817 acceptable += 'r'
1816 cwd = repo.getcwd()
1818 cwd = repo.getcwd()
1817 if not specs:
1819 if not specs:
1818 specs = ['.']
1820 specs = ['.']
1819
1821
1820 files, dirs = set(), set()
1822 files, dirs = set(), set()
1821 for spec in specs:
1823 for spec in specs:
1822 f, d = complete(spec, acceptable or 'nmar')
1824 f, d = complete(spec, acceptable or 'nmar')
1823 files.update(f)
1825 files.update(f)
1824 dirs.update(d)
1826 dirs.update(d)
1825 files.update(dirs)
1827 files.update(dirs)
1826 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1828 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1827 ui.write('\n')
1829 ui.write('\n')
1828
1830
1829 @command('debugpeer', [], _('PATH'), norepo=True)
1831 @command('debugpeer', [], _('PATH'), norepo=True)
1830 def debugpeer(ui, path):
1832 def debugpeer(ui, path):
1831 """establish a connection to a peer repository"""
1833 """establish a connection to a peer repository"""
1832 # Always enable peer request logging. Requires --debug to display
1834 # Always enable peer request logging. Requires --debug to display
1833 # though.
1835 # though.
1834 overrides = {
1836 overrides = {
1835 ('devel', 'debug.peer-request'): True,
1837 ('devel', 'debug.peer-request'): True,
1836 }
1838 }
1837
1839
1838 with ui.configoverride(overrides):
1840 with ui.configoverride(overrides):
1839 peer = hg.peer(ui, {}, path)
1841 peer = hg.peer(ui, {}, path)
1840
1842
1841 local = peer.local() is not None
1843 local = peer.local() is not None
1842 canpush = peer.canpush()
1844 canpush = peer.canpush()
1843
1845
1844 ui.write(_('url: %s\n') % peer.url())
1846 ui.write(_('url: %s\n') % peer.url())
1845 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1847 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1846 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1848 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1847
1849
1848 @command('debugpickmergetool',
1850 @command('debugpickmergetool',
1849 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1851 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1850 ('', 'changedelete', None, _('emulate merging change and delete')),
1852 ('', 'changedelete', None, _('emulate merging change and delete')),
1851 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1853 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1852 _('[PATTERN]...'),
1854 _('[PATTERN]...'),
1853 inferrepo=True)
1855 inferrepo=True)
1854 def debugpickmergetool(ui, repo, *pats, **opts):
1856 def debugpickmergetool(ui, repo, *pats, **opts):
1855 """examine which merge tool is chosen for specified file
1857 """examine which merge tool is chosen for specified file
1856
1858
1857 As described in :hg:`help merge-tools`, Mercurial examines
1859 As described in :hg:`help merge-tools`, Mercurial examines
1858 configurations below in this order to decide which merge tool is
1860 configurations below in this order to decide which merge tool is
1859 chosen for specified file.
1861 chosen for specified file.
1860
1862
1861 1. ``--tool`` option
1863 1. ``--tool`` option
1862 2. ``HGMERGE`` environment variable
1864 2. ``HGMERGE`` environment variable
1863 3. configurations in ``merge-patterns`` section
1865 3. configurations in ``merge-patterns`` section
1864 4. configuration of ``ui.merge``
1866 4. configuration of ``ui.merge``
1865 5. configurations in ``merge-tools`` section
1867 5. configurations in ``merge-tools`` section
1866 6. ``hgmerge`` tool (for historical reason only)
1868 6. ``hgmerge`` tool (for historical reason only)
1867 7. default tool for fallback (``:merge`` or ``:prompt``)
1869 7. default tool for fallback (``:merge`` or ``:prompt``)
1868
1870
1869 This command writes out examination result in the style below::
1871 This command writes out examination result in the style below::
1870
1872
1871 FILE = MERGETOOL
1873 FILE = MERGETOOL
1872
1874
1873 By default, all files known in the first parent context of the
1875 By default, all files known in the first parent context of the
1874 working directory are examined. Use file patterns and/or -I/-X
1876 working directory are examined. Use file patterns and/or -I/-X
1875 options to limit target files. -r/--rev is also useful to examine
1877 options to limit target files. -r/--rev is also useful to examine
1876 files in another context without actual updating to it.
1878 files in another context without actual updating to it.
1877
1879
1878 With --debug, this command shows warning messages while matching
1880 With --debug, this command shows warning messages while matching
1879 against ``merge-patterns`` and so on, too. It is recommended to
1881 against ``merge-patterns`` and so on, too. It is recommended to
1880 use this option with explicit file patterns and/or -I/-X options,
1882 use this option with explicit file patterns and/or -I/-X options,
1881 because this option increases amount of output per file according
1883 because this option increases amount of output per file according
1882 to configurations in hgrc.
1884 to configurations in hgrc.
1883
1885
1884 With -v/--verbose, this command shows configurations below at
1886 With -v/--verbose, this command shows configurations below at
1885 first (only if specified).
1887 first (only if specified).
1886
1888
1887 - ``--tool`` option
1889 - ``--tool`` option
1888 - ``HGMERGE`` environment variable
1890 - ``HGMERGE`` environment variable
1889 - configuration of ``ui.merge``
1891 - configuration of ``ui.merge``
1890
1892
1891 If merge tool is chosen before matching against
1893 If merge tool is chosen before matching against
1892 ``merge-patterns``, this command can't show any helpful
1894 ``merge-patterns``, this command can't show any helpful
1893 information, even with --debug. In such case, information above is
1895 information, even with --debug. In such case, information above is
1894 useful to know why a merge tool is chosen.
1896 useful to know why a merge tool is chosen.
1895 """
1897 """
1896 opts = pycompat.byteskwargs(opts)
1898 opts = pycompat.byteskwargs(opts)
1897 overrides = {}
1899 overrides = {}
1898 if opts['tool']:
1900 if opts['tool']:
1899 overrides[('ui', 'forcemerge')] = opts['tool']
1901 overrides[('ui', 'forcemerge')] = opts['tool']
1900 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1902 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1901
1903
1902 with ui.configoverride(overrides, 'debugmergepatterns'):
1904 with ui.configoverride(overrides, 'debugmergepatterns'):
1903 hgmerge = encoding.environ.get("HGMERGE")
1905 hgmerge = encoding.environ.get("HGMERGE")
1904 if hgmerge is not None:
1906 if hgmerge is not None:
1905 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1907 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1906 uimerge = ui.config("ui", "merge")
1908 uimerge = ui.config("ui", "merge")
1907 if uimerge:
1909 if uimerge:
1908 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1910 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1909
1911
1910 ctx = scmutil.revsingle(repo, opts.get('rev'))
1912 ctx = scmutil.revsingle(repo, opts.get('rev'))
1911 m = scmutil.match(ctx, pats, opts)
1913 m = scmutil.match(ctx, pats, opts)
1912 changedelete = opts['changedelete']
1914 changedelete = opts['changedelete']
1913 for path in ctx.walk(m):
1915 for path in ctx.walk(m):
1914 fctx = ctx[path]
1916 fctx = ctx[path]
1915 try:
1917 try:
1916 if not ui.debugflag:
1918 if not ui.debugflag:
1917 ui.pushbuffer(error=True)
1919 ui.pushbuffer(error=True)
1918 tool, toolpath = filemerge._picktool(repo, ui, path,
1920 tool, toolpath = filemerge._picktool(repo, ui, path,
1919 fctx.isbinary(),
1921 fctx.isbinary(),
1920 'l' in fctx.flags(),
1922 'l' in fctx.flags(),
1921 changedelete)
1923 changedelete)
1922 finally:
1924 finally:
1923 if not ui.debugflag:
1925 if not ui.debugflag:
1924 ui.popbuffer()
1926 ui.popbuffer()
1925 ui.write(('%s = %s\n') % (path, tool))
1927 ui.write(('%s = %s\n') % (path, tool))
1926
1928
1927 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1929 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1928 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1930 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1929 '''access the pushkey key/value protocol
1931 '''access the pushkey key/value protocol
1930
1932
1931 With two args, list the keys in the given namespace.
1933 With two args, list the keys in the given namespace.
1932
1934
1933 With five args, set a key to new if it currently is set to old.
1935 With five args, set a key to new if it currently is set to old.
1934 Reports success or failure.
1936 Reports success or failure.
1935 '''
1937 '''
1936
1938
1937 target = hg.peer(ui, {}, repopath)
1939 target = hg.peer(ui, {}, repopath)
1938 if keyinfo:
1940 if keyinfo:
1939 key, old, new = keyinfo
1941 key, old, new = keyinfo
1940 with target.commandexecutor() as e:
1942 with target.commandexecutor() as e:
1941 r = e.callcommand('pushkey', {
1943 r = e.callcommand('pushkey', {
1942 'namespace': namespace,
1944 'namespace': namespace,
1943 'key': key,
1945 'key': key,
1944 'old': old,
1946 'old': old,
1945 'new': new,
1947 'new': new,
1946 }).result()
1948 }).result()
1947
1949
1948 ui.status(pycompat.bytestr(r) + '\n')
1950 ui.status(pycompat.bytestr(r) + '\n')
1949 return not r
1951 return not r
1950 else:
1952 else:
1951 for k, v in sorted(target.listkeys(namespace).iteritems()):
1953 for k, v in sorted(target.listkeys(namespace).iteritems()):
1952 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1954 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1953 stringutil.escapestr(v)))
1955 stringutil.escapestr(v)))
1954
1956
1955 @command('debugpvec', [], _('A B'))
1957 @command('debugpvec', [], _('A B'))
1956 def debugpvec(ui, repo, a, b=None):
1958 def debugpvec(ui, repo, a, b=None):
1957 ca = scmutil.revsingle(repo, a)
1959 ca = scmutil.revsingle(repo, a)
1958 cb = scmutil.revsingle(repo, b)
1960 cb = scmutil.revsingle(repo, b)
1959 pa = pvec.ctxpvec(ca)
1961 pa = pvec.ctxpvec(ca)
1960 pb = pvec.ctxpvec(cb)
1962 pb = pvec.ctxpvec(cb)
1961 if pa == pb:
1963 if pa == pb:
1962 rel = "="
1964 rel = "="
1963 elif pa > pb:
1965 elif pa > pb:
1964 rel = ">"
1966 rel = ">"
1965 elif pa < pb:
1967 elif pa < pb:
1966 rel = "<"
1968 rel = "<"
1967 elif pa | pb:
1969 elif pa | pb:
1968 rel = "|"
1970 rel = "|"
1969 ui.write(_("a: %s\n") % pa)
1971 ui.write(_("a: %s\n") % pa)
1970 ui.write(_("b: %s\n") % pb)
1972 ui.write(_("b: %s\n") % pb)
1971 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1973 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1972 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1974 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1973 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1975 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1974 pa.distance(pb), rel))
1976 pa.distance(pb), rel))
1975
1977
1976 @command('debugrebuilddirstate|debugrebuildstate',
1978 @command('debugrebuilddirstate|debugrebuildstate',
1977 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1979 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1978 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1980 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1979 'the working copy parent')),
1981 'the working copy parent')),
1980 ],
1982 ],
1981 _('[-r REV]'))
1983 _('[-r REV]'))
1982 def debugrebuilddirstate(ui, repo, rev, **opts):
1984 def debugrebuilddirstate(ui, repo, rev, **opts):
1983 """rebuild the dirstate as it would look like for the given revision
1985 """rebuild the dirstate as it would look like for the given revision
1984
1986
1985 If no revision is specified the first current parent will be used.
1987 If no revision is specified the first current parent will be used.
1986
1988
1987 The dirstate will be set to the files of the given revision.
1989 The dirstate will be set to the files of the given revision.
1988 The actual working directory content or existing dirstate
1990 The actual working directory content or existing dirstate
1989 information such as adds or removes is not considered.
1991 information such as adds or removes is not considered.
1990
1992
1991 ``minimal`` will only rebuild the dirstate status for files that claim to be
1993 ``minimal`` will only rebuild the dirstate status for files that claim to be
1992 tracked but are not in the parent manifest, or that exist in the parent
1994 tracked but are not in the parent manifest, or that exist in the parent
1993 manifest but are not in the dirstate. It will not change adds, removes, or
1995 manifest but are not in the dirstate. It will not change adds, removes, or
1994 modified files that are in the working copy parent.
1996 modified files that are in the working copy parent.
1995
1997
1996 One use of this command is to make the next :hg:`status` invocation
1998 One use of this command is to make the next :hg:`status` invocation
1997 check the actual file content.
1999 check the actual file content.
1998 """
2000 """
1999 ctx = scmutil.revsingle(repo, rev)
2001 ctx = scmutil.revsingle(repo, rev)
2000 with repo.wlock():
2002 with repo.wlock():
2001 dirstate = repo.dirstate
2003 dirstate = repo.dirstate
2002 changedfiles = None
2004 changedfiles = None
2003 # See command doc for what minimal does.
2005 # See command doc for what minimal does.
2004 if opts.get(r'minimal'):
2006 if opts.get(r'minimal'):
2005 manifestfiles = set(ctx.manifest().keys())
2007 manifestfiles = set(ctx.manifest().keys())
2006 dirstatefiles = set(dirstate)
2008 dirstatefiles = set(dirstate)
2007 manifestonly = manifestfiles - dirstatefiles
2009 manifestonly = manifestfiles - dirstatefiles
2008 dsonly = dirstatefiles - manifestfiles
2010 dsonly = dirstatefiles - manifestfiles
2009 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2011 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2010 changedfiles = manifestonly | dsnotadded
2012 changedfiles = manifestonly | dsnotadded
2011
2013
2012 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2014 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2013
2015
2014 @command('debugrebuildfncache', [], '')
2016 @command('debugrebuildfncache', [], '')
2015 def debugrebuildfncache(ui, repo):
2017 def debugrebuildfncache(ui, repo):
2016 """rebuild the fncache file"""
2018 """rebuild the fncache file"""
2017 repair.rebuildfncache(ui, repo)
2019 repair.rebuildfncache(ui, repo)
2018
2020
2019 @command('debugrename',
2021 @command('debugrename',
2020 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2022 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2021 _('[-r REV] FILE'))
2023 _('[-r REV] FILE'))
2022 def debugrename(ui, repo, file1, *pats, **opts):
2024 def debugrename(ui, repo, file1, *pats, **opts):
2023 """dump rename information"""
2025 """dump rename information"""
2024
2026
2025 opts = pycompat.byteskwargs(opts)
2027 opts = pycompat.byteskwargs(opts)
2026 ctx = scmutil.revsingle(repo, opts.get('rev'))
2028 ctx = scmutil.revsingle(repo, opts.get('rev'))
2027 m = scmutil.match(ctx, (file1,) + pats, opts)
2029 m = scmutil.match(ctx, (file1,) + pats, opts)
2028 for abs in ctx.walk(m):
2030 for abs in ctx.walk(m):
2029 fctx = ctx[abs]
2031 fctx = ctx[abs]
2030 o = fctx.filelog().renamed(fctx.filenode())
2032 o = fctx.filelog().renamed(fctx.filenode())
2031 rel = m.rel(abs)
2033 rel = m.rel(abs)
2032 if o:
2034 if o:
2033 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2035 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2034 else:
2036 else:
2035 ui.write(_("%s not renamed\n") % rel)
2037 ui.write(_("%s not renamed\n") % rel)
2036
2038
2037 @command('debugrevlog', cmdutil.debugrevlogopts +
2039 @command('debugrevlog', cmdutil.debugrevlogopts +
2038 [('d', 'dump', False, _('dump index data'))],
2040 [('d', 'dump', False, _('dump index data'))],
2039 _('-c|-m|FILE'),
2041 _('-c|-m|FILE'),
2040 optionalrepo=True)
2042 optionalrepo=True)
2041 def debugrevlog(ui, repo, file_=None, **opts):
2043 def debugrevlog(ui, repo, file_=None, **opts):
2042 """show data and statistics about a revlog"""
2044 """show data and statistics about a revlog"""
2043 opts = pycompat.byteskwargs(opts)
2045 opts = pycompat.byteskwargs(opts)
2044 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2046 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2045
2047
2046 if opts.get("dump"):
2048 if opts.get("dump"):
2047 numrevs = len(r)
2049 numrevs = len(r)
2048 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2050 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2049 " rawsize totalsize compression heads chainlen\n"))
2051 " rawsize totalsize compression heads chainlen\n"))
2050 ts = 0
2052 ts = 0
2051 heads = set()
2053 heads = set()
2052
2054
2053 for rev in pycompat.xrange(numrevs):
2055 for rev in pycompat.xrange(numrevs):
2054 dbase = r.deltaparent(rev)
2056 dbase = r.deltaparent(rev)
2055 if dbase == -1:
2057 if dbase == -1:
2056 dbase = rev
2058 dbase = rev
2057 cbase = r.chainbase(rev)
2059 cbase = r.chainbase(rev)
2058 clen = r.chainlen(rev)
2060 clen = r.chainlen(rev)
2059 p1, p2 = r.parentrevs(rev)
2061 p1, p2 = r.parentrevs(rev)
2060 rs = r.rawsize(rev)
2062 rs = r.rawsize(rev)
2061 ts = ts + rs
2063 ts = ts + rs
2062 heads -= set(r.parentrevs(rev))
2064 heads -= set(r.parentrevs(rev))
2063 heads.add(rev)
2065 heads.add(rev)
2064 try:
2066 try:
2065 compression = ts / r.end(rev)
2067 compression = ts / r.end(rev)
2066 except ZeroDivisionError:
2068 except ZeroDivisionError:
2067 compression = 0
2069 compression = 0
2068 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2070 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2069 "%11d %5d %8d\n" %
2071 "%11d %5d %8d\n" %
2070 (rev, p1, p2, r.start(rev), r.end(rev),
2072 (rev, p1, p2, r.start(rev), r.end(rev),
2071 r.start(dbase), r.start(cbase),
2073 r.start(dbase), r.start(cbase),
2072 r.start(p1), r.start(p2),
2074 r.start(p1), r.start(p2),
2073 rs, ts, compression, len(heads), clen))
2075 rs, ts, compression, len(heads), clen))
2074 return 0
2076 return 0
2075
2077
2076 v = r.version
2078 v = r.version
2077 format = v & 0xFFFF
2079 format = v & 0xFFFF
2078 flags = []
2080 flags = []
2079 gdelta = False
2081 gdelta = False
2080 if v & revlog.FLAG_INLINE_DATA:
2082 if v & revlog.FLAG_INLINE_DATA:
2081 flags.append('inline')
2083 flags.append('inline')
2082 if v & revlog.FLAG_GENERALDELTA:
2084 if v & revlog.FLAG_GENERALDELTA:
2083 gdelta = True
2085 gdelta = True
2084 flags.append('generaldelta')
2086 flags.append('generaldelta')
2085 if not flags:
2087 if not flags:
2086 flags = ['(none)']
2088 flags = ['(none)']
2087
2089
2088 ### tracks merge vs single parent
2090 ### tracks merge vs single parent
2089 nummerges = 0
2091 nummerges = 0
2090
2092
2091 ### tracks ways the "delta" are build
2093 ### tracks ways the "delta" are build
2092 # nodelta
2094 # nodelta
2093 numempty = 0
2095 numempty = 0
2094 numemptytext = 0
2096 numemptytext = 0
2095 numemptydelta = 0
2097 numemptydelta = 0
2096 # full file content
2098 # full file content
2097 numfull = 0
2099 numfull = 0
2098 # intermediate snapshot against a prior snapshot
2100 # intermediate snapshot against a prior snapshot
2099 numsemi = 0
2101 numsemi = 0
2100 # snapshot count per depth
2102 # snapshot count per depth
2101 numsnapdepth = collections.defaultdict(lambda: 0)
2103 numsnapdepth = collections.defaultdict(lambda: 0)
2102 # delta against previous revision
2104 # delta against previous revision
2103 numprev = 0
2105 numprev = 0
2104 # delta against first or second parent (not prev)
2106 # delta against first or second parent (not prev)
2105 nump1 = 0
2107 nump1 = 0
2106 nump2 = 0
2108 nump2 = 0
2107 # delta against neither prev nor parents
2109 # delta against neither prev nor parents
2108 numother = 0
2110 numother = 0
2109 # delta against prev that are also first or second parent
2111 # delta against prev that are also first or second parent
2110 # (details of `numprev`)
2112 # (details of `numprev`)
2111 nump1prev = 0
2113 nump1prev = 0
2112 nump2prev = 0
2114 nump2prev = 0
2113
2115
2114 # data about delta chain of each revs
2116 # data about delta chain of each revs
2115 chainlengths = []
2117 chainlengths = []
2116 chainbases = []
2118 chainbases = []
2117 chainspans = []
2119 chainspans = []
2118
2120
2119 # data about each revision
2121 # data about each revision
2120 datasize = [None, 0, 0]
2122 datasize = [None, 0, 0]
2121 fullsize = [None, 0, 0]
2123 fullsize = [None, 0, 0]
2122 semisize = [None, 0, 0]
2124 semisize = [None, 0, 0]
2123 # snapshot count per depth
2125 # snapshot count per depth
2124 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2126 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2125 deltasize = [None, 0, 0]
2127 deltasize = [None, 0, 0]
2126 chunktypecounts = {}
2128 chunktypecounts = {}
2127 chunktypesizes = {}
2129 chunktypesizes = {}
2128
2130
2129 def addsize(size, l):
2131 def addsize(size, l):
2130 if l[0] is None or size < l[0]:
2132 if l[0] is None or size < l[0]:
2131 l[0] = size
2133 l[0] = size
2132 if size > l[1]:
2134 if size > l[1]:
2133 l[1] = size
2135 l[1] = size
2134 l[2] += size
2136 l[2] += size
2135
2137
2136 numrevs = len(r)
2138 numrevs = len(r)
2137 for rev in pycompat.xrange(numrevs):
2139 for rev in pycompat.xrange(numrevs):
2138 p1, p2 = r.parentrevs(rev)
2140 p1, p2 = r.parentrevs(rev)
2139 delta = r.deltaparent(rev)
2141 delta = r.deltaparent(rev)
2140 if format > 0:
2142 if format > 0:
2141 addsize(r.rawsize(rev), datasize)
2143 addsize(r.rawsize(rev), datasize)
2142 if p2 != nullrev:
2144 if p2 != nullrev:
2143 nummerges += 1
2145 nummerges += 1
2144 size = r.length(rev)
2146 size = r.length(rev)
2145 if delta == nullrev:
2147 if delta == nullrev:
2146 chainlengths.append(0)
2148 chainlengths.append(0)
2147 chainbases.append(r.start(rev))
2149 chainbases.append(r.start(rev))
2148 chainspans.append(size)
2150 chainspans.append(size)
2149 if size == 0:
2151 if size == 0:
2150 numempty += 1
2152 numempty += 1
2151 numemptytext += 1
2153 numemptytext += 1
2152 else:
2154 else:
2153 numfull += 1
2155 numfull += 1
2154 numsnapdepth[0] += 1
2156 numsnapdepth[0] += 1
2155 addsize(size, fullsize)
2157 addsize(size, fullsize)
2156 addsize(size, snapsizedepth[0])
2158 addsize(size, snapsizedepth[0])
2157 else:
2159 else:
2158 chainlengths.append(chainlengths[delta] + 1)
2160 chainlengths.append(chainlengths[delta] + 1)
2159 baseaddr = chainbases[delta]
2161 baseaddr = chainbases[delta]
2160 revaddr = r.start(rev)
2162 revaddr = r.start(rev)
2161 chainbases.append(baseaddr)
2163 chainbases.append(baseaddr)
2162 chainspans.append((revaddr - baseaddr) + size)
2164 chainspans.append((revaddr - baseaddr) + size)
2163 if size == 0:
2165 if size == 0:
2164 numempty += 1
2166 numempty += 1
2165 numemptydelta += 1
2167 numemptydelta += 1
2166 elif r.issnapshot(rev):
2168 elif r.issnapshot(rev):
2167 addsize(size, semisize)
2169 addsize(size, semisize)
2168 numsemi += 1
2170 numsemi += 1
2169 depth = r.snapshotdepth(rev)
2171 depth = r.snapshotdepth(rev)
2170 numsnapdepth[depth] += 1
2172 numsnapdepth[depth] += 1
2171 addsize(size, snapsizedepth[depth])
2173 addsize(size, snapsizedepth[depth])
2172 else:
2174 else:
2173 addsize(size, deltasize)
2175 addsize(size, deltasize)
2174 if delta == rev - 1:
2176 if delta == rev - 1:
2175 numprev += 1
2177 numprev += 1
2176 if delta == p1:
2178 if delta == p1:
2177 nump1prev += 1
2179 nump1prev += 1
2178 elif delta == p2:
2180 elif delta == p2:
2179 nump2prev += 1
2181 nump2prev += 1
2180 elif delta == p1:
2182 elif delta == p1:
2181 nump1 += 1
2183 nump1 += 1
2182 elif delta == p2:
2184 elif delta == p2:
2183 nump2 += 1
2185 nump2 += 1
2184 elif delta != nullrev:
2186 elif delta != nullrev:
2185 numother += 1
2187 numother += 1
2186
2188
2187 # Obtain data on the raw chunks in the revlog.
2189 # Obtain data on the raw chunks in the revlog.
2188 if util.safehasattr(r, '_getsegmentforrevs'):
2190 if util.safehasattr(r, '_getsegmentforrevs'):
2189 segment = r._getsegmentforrevs(rev, rev)[1]
2191 segment = r._getsegmentforrevs(rev, rev)[1]
2190 else:
2192 else:
2191 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2193 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2192 if segment:
2194 if segment:
2193 chunktype = bytes(segment[0:1])
2195 chunktype = bytes(segment[0:1])
2194 else:
2196 else:
2195 chunktype = 'empty'
2197 chunktype = 'empty'
2196
2198
2197 if chunktype not in chunktypecounts:
2199 if chunktype not in chunktypecounts:
2198 chunktypecounts[chunktype] = 0
2200 chunktypecounts[chunktype] = 0
2199 chunktypesizes[chunktype] = 0
2201 chunktypesizes[chunktype] = 0
2200
2202
2201 chunktypecounts[chunktype] += 1
2203 chunktypecounts[chunktype] += 1
2202 chunktypesizes[chunktype] += size
2204 chunktypesizes[chunktype] += size
2203
2205
2204 # Adjust size min value for empty cases
2206 # Adjust size min value for empty cases
2205 for size in (datasize, fullsize, semisize, deltasize):
2207 for size in (datasize, fullsize, semisize, deltasize):
2206 if size[0] is None:
2208 if size[0] is None:
2207 size[0] = 0
2209 size[0] = 0
2208
2210
2209 numdeltas = numrevs - numfull - numempty - numsemi
2211 numdeltas = numrevs - numfull - numempty - numsemi
2210 numoprev = numprev - nump1prev - nump2prev
2212 numoprev = numprev - nump1prev - nump2prev
2211 totalrawsize = datasize[2]
2213 totalrawsize = datasize[2]
2212 datasize[2] /= numrevs
2214 datasize[2] /= numrevs
2213 fulltotal = fullsize[2]
2215 fulltotal = fullsize[2]
2214 fullsize[2] /= numfull
2216 fullsize[2] /= numfull
2215 semitotal = semisize[2]
2217 semitotal = semisize[2]
2216 snaptotal = {}
2218 snaptotal = {}
2217 if 0 < numsemi:
2219 if 0 < numsemi:
2218 semisize[2] /= numsemi
2220 semisize[2] /= numsemi
2219 for depth in snapsizedepth:
2221 for depth in snapsizedepth:
2220 snaptotal[depth] = snapsizedepth[depth][2]
2222 snaptotal[depth] = snapsizedepth[depth][2]
2221 snapsizedepth[depth][2] /= numsnapdepth[depth]
2223 snapsizedepth[depth][2] /= numsnapdepth[depth]
2222
2224
2223 deltatotal = deltasize[2]
2225 deltatotal = deltasize[2]
2224 if numdeltas > 0:
2226 if numdeltas > 0:
2225 deltasize[2] /= numdeltas
2227 deltasize[2] /= numdeltas
2226 totalsize = fulltotal + semitotal + deltatotal
2228 totalsize = fulltotal + semitotal + deltatotal
2227 avgchainlen = sum(chainlengths) / numrevs
2229 avgchainlen = sum(chainlengths) / numrevs
2228 maxchainlen = max(chainlengths)
2230 maxchainlen = max(chainlengths)
2229 maxchainspan = max(chainspans)
2231 maxchainspan = max(chainspans)
2230 compratio = 1
2232 compratio = 1
2231 if totalsize:
2233 if totalsize:
2232 compratio = totalrawsize / totalsize
2234 compratio = totalrawsize / totalsize
2233
2235
2234 basedfmtstr = '%%%dd\n'
2236 basedfmtstr = '%%%dd\n'
2235 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2237 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2236
2238
2237 def dfmtstr(max):
2239 def dfmtstr(max):
2238 return basedfmtstr % len(str(max))
2240 return basedfmtstr % len(str(max))
2239 def pcfmtstr(max, padding=0):
2241 def pcfmtstr(max, padding=0):
2240 return basepcfmtstr % (len(str(max)), ' ' * padding)
2242 return basepcfmtstr % (len(str(max)), ' ' * padding)
2241
2243
2242 def pcfmt(value, total):
2244 def pcfmt(value, total):
2243 if total:
2245 if total:
2244 return (value, 100 * float(value) / total)
2246 return (value, 100 * float(value) / total)
2245 else:
2247 else:
2246 return value, 100.0
2248 return value, 100.0
2247
2249
2248 ui.write(('format : %d\n') % format)
2250 ui.write(('format : %d\n') % format)
2249 ui.write(('flags : %s\n') % ', '.join(flags))
2251 ui.write(('flags : %s\n') % ', '.join(flags))
2250
2252
2251 ui.write('\n')
2253 ui.write('\n')
2252 fmt = pcfmtstr(totalsize)
2254 fmt = pcfmtstr(totalsize)
2253 fmt2 = dfmtstr(totalsize)
2255 fmt2 = dfmtstr(totalsize)
2254 ui.write(('revisions : ') + fmt2 % numrevs)
2256 ui.write(('revisions : ') + fmt2 % numrevs)
2255 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2257 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2256 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2258 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2257 ui.write(('revisions : ') + fmt2 % numrevs)
2259 ui.write(('revisions : ') + fmt2 % numrevs)
2258 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2260 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2259 ui.write((' text : ')
2261 ui.write((' text : ')
2260 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2262 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2261 ui.write((' delta : ')
2263 ui.write((' delta : ')
2262 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2264 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2263 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2265 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2264 for depth in sorted(numsnapdepth):
2266 for depth in sorted(numsnapdepth):
2265 ui.write((' lvl-%-3d : ' % depth)
2267 ui.write((' lvl-%-3d : ' % depth)
2266 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2268 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2267 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2269 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2268 ui.write(('revision size : ') + fmt2 % totalsize)
2270 ui.write(('revision size : ') + fmt2 % totalsize)
2269 ui.write((' snapshot : ')
2271 ui.write((' snapshot : ')
2270 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2272 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2271 for depth in sorted(numsnapdepth):
2273 for depth in sorted(numsnapdepth):
2272 ui.write((' lvl-%-3d : ' % depth)
2274 ui.write((' lvl-%-3d : ' % depth)
2273 + fmt % pcfmt(snaptotal[depth], totalsize))
2275 + fmt % pcfmt(snaptotal[depth], totalsize))
2274 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2276 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2275
2277
2276 def fmtchunktype(chunktype):
2278 def fmtchunktype(chunktype):
2277 if chunktype == 'empty':
2279 if chunktype == 'empty':
2278 return ' %s : ' % chunktype
2280 return ' %s : ' % chunktype
2279 elif chunktype in pycompat.bytestr(string.ascii_letters):
2281 elif chunktype in pycompat.bytestr(string.ascii_letters):
2280 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2282 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2281 else:
2283 else:
2282 return ' 0x%s : ' % hex(chunktype)
2284 return ' 0x%s : ' % hex(chunktype)
2283
2285
2284 ui.write('\n')
2286 ui.write('\n')
2285 ui.write(('chunks : ') + fmt2 % numrevs)
2287 ui.write(('chunks : ') + fmt2 % numrevs)
2286 for chunktype in sorted(chunktypecounts):
2288 for chunktype in sorted(chunktypecounts):
2287 ui.write(fmtchunktype(chunktype))
2289 ui.write(fmtchunktype(chunktype))
2288 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2290 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2289 ui.write(('chunks size : ') + fmt2 % totalsize)
2291 ui.write(('chunks size : ') + fmt2 % totalsize)
2290 for chunktype in sorted(chunktypecounts):
2292 for chunktype in sorted(chunktypecounts):
2291 ui.write(fmtchunktype(chunktype))
2293 ui.write(fmtchunktype(chunktype))
2292 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2294 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2293
2295
2294 ui.write('\n')
2296 ui.write('\n')
2295 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2297 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2296 ui.write(('avg chain length : ') + fmt % avgchainlen)
2298 ui.write(('avg chain length : ') + fmt % avgchainlen)
2297 ui.write(('max chain length : ') + fmt % maxchainlen)
2299 ui.write(('max chain length : ') + fmt % maxchainlen)
2298 ui.write(('max chain reach : ') + fmt % maxchainspan)
2300 ui.write(('max chain reach : ') + fmt % maxchainspan)
2299 ui.write(('compression ratio : ') + fmt % compratio)
2301 ui.write(('compression ratio : ') + fmt % compratio)
2300
2302
2301 if format > 0:
2303 if format > 0:
2302 ui.write('\n')
2304 ui.write('\n')
2303 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2305 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2304 % tuple(datasize))
2306 % tuple(datasize))
2305 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2307 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2306 % tuple(fullsize))
2308 % tuple(fullsize))
2307 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2309 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2308 % tuple(semisize))
2310 % tuple(semisize))
2309 for depth in sorted(snapsizedepth):
2311 for depth in sorted(snapsizedepth):
2310 if depth == 0:
2312 if depth == 0:
2311 continue
2313 continue
2312 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2314 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2313 % ((depth,) + tuple(snapsizedepth[depth])))
2315 % ((depth,) + tuple(snapsizedepth[depth])))
2314 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2316 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2315 % tuple(deltasize))
2317 % tuple(deltasize))
2316
2318
2317 if numdeltas > 0:
2319 if numdeltas > 0:
2318 ui.write('\n')
2320 ui.write('\n')
2319 fmt = pcfmtstr(numdeltas)
2321 fmt = pcfmtstr(numdeltas)
2320 fmt2 = pcfmtstr(numdeltas, 4)
2322 fmt2 = pcfmtstr(numdeltas, 4)
2321 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2323 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2322 if numprev > 0:
2324 if numprev > 0:
2323 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2325 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2324 numprev))
2326 numprev))
2325 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2327 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2326 numprev))
2328 numprev))
2327 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2329 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2328 numprev))
2330 numprev))
2329 if gdelta:
2331 if gdelta:
2330 ui.write(('deltas against p1 : ')
2332 ui.write(('deltas against p1 : ')
2331 + fmt % pcfmt(nump1, numdeltas))
2333 + fmt % pcfmt(nump1, numdeltas))
2332 ui.write(('deltas against p2 : ')
2334 ui.write(('deltas against p2 : ')
2333 + fmt % pcfmt(nump2, numdeltas))
2335 + fmt % pcfmt(nump2, numdeltas))
2334 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2336 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2335 numdeltas))
2337 numdeltas))
2336
2338
2337 @command('debugrevspec',
2339 @command('debugrevspec',
2338 [('', 'optimize', None,
2340 [('', 'optimize', None,
2339 _('print parsed tree after optimizing (DEPRECATED)')),
2341 _('print parsed tree after optimizing (DEPRECATED)')),
2340 ('', 'show-revs', True, _('print list of result revisions (default)')),
2342 ('', 'show-revs', True, _('print list of result revisions (default)')),
2341 ('s', 'show-set', None, _('print internal representation of result set')),
2343 ('s', 'show-set', None, _('print internal representation of result set')),
2342 ('p', 'show-stage', [],
2344 ('p', 'show-stage', [],
2343 _('print parsed tree at the given stage'), _('NAME')),
2345 _('print parsed tree at the given stage'), _('NAME')),
2344 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2346 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2345 ('', 'verify-optimized', False, _('verify optimized result')),
2347 ('', 'verify-optimized', False, _('verify optimized result')),
2346 ],
2348 ],
2347 ('REVSPEC'))
2349 ('REVSPEC'))
2348 def debugrevspec(ui, repo, expr, **opts):
2350 def debugrevspec(ui, repo, expr, **opts):
2349 """parse and apply a revision specification
2351 """parse and apply a revision specification
2350
2352
2351 Use -p/--show-stage option to print the parsed tree at the given stages.
2353 Use -p/--show-stage option to print the parsed tree at the given stages.
2352 Use -p all to print tree at every stage.
2354 Use -p all to print tree at every stage.
2353
2355
2354 Use --no-show-revs option with -s or -p to print only the set
2356 Use --no-show-revs option with -s or -p to print only the set
2355 representation or the parsed tree respectively.
2357 representation or the parsed tree respectively.
2356
2358
2357 Use --verify-optimized to compare the optimized result with the unoptimized
2359 Use --verify-optimized to compare the optimized result with the unoptimized
2358 one. Returns 1 if the optimized result differs.
2360 one. Returns 1 if the optimized result differs.
2359 """
2361 """
2360 opts = pycompat.byteskwargs(opts)
2362 opts = pycompat.byteskwargs(opts)
2361 aliases = ui.configitems('revsetalias')
2363 aliases = ui.configitems('revsetalias')
2362 stages = [
2364 stages = [
2363 ('parsed', lambda tree: tree),
2365 ('parsed', lambda tree: tree),
2364 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2366 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2365 ui.warn)),
2367 ui.warn)),
2366 ('concatenated', revsetlang.foldconcat),
2368 ('concatenated', revsetlang.foldconcat),
2367 ('analyzed', revsetlang.analyze),
2369 ('analyzed', revsetlang.analyze),
2368 ('optimized', revsetlang.optimize),
2370 ('optimized', revsetlang.optimize),
2369 ]
2371 ]
2370 if opts['no_optimized']:
2372 if opts['no_optimized']:
2371 stages = stages[:-1]
2373 stages = stages[:-1]
2372 if opts['verify_optimized'] and opts['no_optimized']:
2374 if opts['verify_optimized'] and opts['no_optimized']:
2373 raise error.Abort(_('cannot use --verify-optimized with '
2375 raise error.Abort(_('cannot use --verify-optimized with '
2374 '--no-optimized'))
2376 '--no-optimized'))
2375 stagenames = set(n for n, f in stages)
2377 stagenames = set(n for n, f in stages)
2376
2378
2377 showalways = set()
2379 showalways = set()
2378 showchanged = set()
2380 showchanged = set()
2379 if ui.verbose and not opts['show_stage']:
2381 if ui.verbose and not opts['show_stage']:
2380 # show parsed tree by --verbose (deprecated)
2382 # show parsed tree by --verbose (deprecated)
2381 showalways.add('parsed')
2383 showalways.add('parsed')
2382 showchanged.update(['expanded', 'concatenated'])
2384 showchanged.update(['expanded', 'concatenated'])
2383 if opts['optimize']:
2385 if opts['optimize']:
2384 showalways.add('optimized')
2386 showalways.add('optimized')
2385 if opts['show_stage'] and opts['optimize']:
2387 if opts['show_stage'] and opts['optimize']:
2386 raise error.Abort(_('cannot use --optimize with --show-stage'))
2388 raise error.Abort(_('cannot use --optimize with --show-stage'))
2387 if opts['show_stage'] == ['all']:
2389 if opts['show_stage'] == ['all']:
2388 showalways.update(stagenames)
2390 showalways.update(stagenames)
2389 else:
2391 else:
2390 for n in opts['show_stage']:
2392 for n in opts['show_stage']:
2391 if n not in stagenames:
2393 if n not in stagenames:
2392 raise error.Abort(_('invalid stage name: %s') % n)
2394 raise error.Abort(_('invalid stage name: %s') % n)
2393 showalways.update(opts['show_stage'])
2395 showalways.update(opts['show_stage'])
2394
2396
2395 treebystage = {}
2397 treebystage = {}
2396 printedtree = None
2398 printedtree = None
2397 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2399 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2398 for n, f in stages:
2400 for n, f in stages:
2399 treebystage[n] = tree = f(tree)
2401 treebystage[n] = tree = f(tree)
2400 if n in showalways or (n in showchanged and tree != printedtree):
2402 if n in showalways or (n in showchanged and tree != printedtree):
2401 if opts['show_stage'] or n != 'parsed':
2403 if opts['show_stage'] or n != 'parsed':
2402 ui.write(("* %s:\n") % n)
2404 ui.write(("* %s:\n") % n)
2403 ui.write(revsetlang.prettyformat(tree), "\n")
2405 ui.write(revsetlang.prettyformat(tree), "\n")
2404 printedtree = tree
2406 printedtree = tree
2405
2407
2406 if opts['verify_optimized']:
2408 if opts['verify_optimized']:
2407 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2409 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2408 brevs = revset.makematcher(treebystage['optimized'])(repo)
2410 brevs = revset.makematcher(treebystage['optimized'])(repo)
2409 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2411 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2410 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2412 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2411 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2413 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2412 arevs = list(arevs)
2414 arevs = list(arevs)
2413 brevs = list(brevs)
2415 brevs = list(brevs)
2414 if arevs == brevs:
2416 if arevs == brevs:
2415 return 0
2417 return 0
2416 ui.write(('--- analyzed\n'), label='diff.file_a')
2418 ui.write(('--- analyzed\n'), label='diff.file_a')
2417 ui.write(('+++ optimized\n'), label='diff.file_b')
2419 ui.write(('+++ optimized\n'), label='diff.file_b')
2418 sm = difflib.SequenceMatcher(None, arevs, brevs)
2420 sm = difflib.SequenceMatcher(None, arevs, brevs)
2419 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2421 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2420 if tag in ('delete', 'replace'):
2422 if tag in ('delete', 'replace'):
2421 for c in arevs[alo:ahi]:
2423 for c in arevs[alo:ahi]:
2422 ui.write('-%s\n' % c, label='diff.deleted')
2424 ui.write('-%s\n' % c, label='diff.deleted')
2423 if tag in ('insert', 'replace'):
2425 if tag in ('insert', 'replace'):
2424 for c in brevs[blo:bhi]:
2426 for c in brevs[blo:bhi]:
2425 ui.write('+%s\n' % c, label='diff.inserted')
2427 ui.write('+%s\n' % c, label='diff.inserted')
2426 if tag == 'equal':
2428 if tag == 'equal':
2427 for c in arevs[alo:ahi]:
2429 for c in arevs[alo:ahi]:
2428 ui.write(' %s\n' % c)
2430 ui.write(' %s\n' % c)
2429 return 1
2431 return 1
2430
2432
2431 func = revset.makematcher(tree)
2433 func = revset.makematcher(tree)
2432 revs = func(repo)
2434 revs = func(repo)
2433 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2435 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2434 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2436 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2435 if not opts['show_revs']:
2437 if not opts['show_revs']:
2436 return
2438 return
2437 for c in revs:
2439 for c in revs:
2438 ui.write("%d\n" % c)
2440 ui.write("%d\n" % c)
2439
2441
2440 @command('debugserve', [
2442 @command('debugserve', [
2441 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2443 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2442 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2444 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2443 ('', 'logiofile', '', _('file to log server I/O to')),
2445 ('', 'logiofile', '', _('file to log server I/O to')),
2444 ], '')
2446 ], '')
2445 def debugserve(ui, repo, **opts):
2447 def debugserve(ui, repo, **opts):
2446 """run a server with advanced settings
2448 """run a server with advanced settings
2447
2449
2448 This command is similar to :hg:`serve`. It exists partially as a
2450 This command is similar to :hg:`serve`. It exists partially as a
2449 workaround to the fact that ``hg serve --stdio`` must have specific
2451 workaround to the fact that ``hg serve --stdio`` must have specific
2450 arguments for security reasons.
2452 arguments for security reasons.
2451 """
2453 """
2452 opts = pycompat.byteskwargs(opts)
2454 opts = pycompat.byteskwargs(opts)
2453
2455
2454 if not opts['sshstdio']:
2456 if not opts['sshstdio']:
2455 raise error.Abort(_('only --sshstdio is currently supported'))
2457 raise error.Abort(_('only --sshstdio is currently supported'))
2456
2458
2457 logfh = None
2459 logfh = None
2458
2460
2459 if opts['logiofd'] and opts['logiofile']:
2461 if opts['logiofd'] and opts['logiofile']:
2460 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2462 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2461
2463
2462 if opts['logiofd']:
2464 if opts['logiofd']:
2463 # Line buffered because output is line based.
2465 # Line buffered because output is line based.
2464 try:
2466 try:
2465 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2467 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2466 except OSError as e:
2468 except OSError as e:
2467 if e.errno != errno.ESPIPE:
2469 if e.errno != errno.ESPIPE:
2468 raise
2470 raise
2469 # can't seek a pipe, so `ab` mode fails on py3
2471 # can't seek a pipe, so `ab` mode fails on py3
2470 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2472 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2471 elif opts['logiofile']:
2473 elif opts['logiofile']:
2472 logfh = open(opts['logiofile'], 'ab', 1)
2474 logfh = open(opts['logiofile'], 'ab', 1)
2473
2475
2474 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2476 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2475 s.serve_forever()
2477 s.serve_forever()
2476
2478
2477 @command('debugsetparents', [], _('REV1 [REV2]'))
2479 @command('debugsetparents', [], _('REV1 [REV2]'))
2478 def debugsetparents(ui, repo, rev1, rev2=None):
2480 def debugsetparents(ui, repo, rev1, rev2=None):
2479 """manually set the parents of the current working directory
2481 """manually set the parents of the current working directory
2480
2482
2481 This is useful for writing repository conversion tools, but should
2483 This is useful for writing repository conversion tools, but should
2482 be used with care. For example, neither the working directory nor the
2484 be used with care. For example, neither the working directory nor the
2483 dirstate is updated, so file status may be incorrect after running this
2485 dirstate is updated, so file status may be incorrect after running this
2484 command.
2486 command.
2485
2487
2486 Returns 0 on success.
2488 Returns 0 on success.
2487 """
2489 """
2488
2490
2489 node1 = scmutil.revsingle(repo, rev1).node()
2491 node1 = scmutil.revsingle(repo, rev1).node()
2490 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2492 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2491
2493
2492 with repo.wlock():
2494 with repo.wlock():
2493 repo.setparents(node1, node2)
2495 repo.setparents(node1, node2)
2494
2496
2495 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2497 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2496 def debugssl(ui, repo, source=None, **opts):
2498 def debugssl(ui, repo, source=None, **opts):
2497 '''test a secure connection to a server
2499 '''test a secure connection to a server
2498
2500
2499 This builds the certificate chain for the server on Windows, installing the
2501 This builds the certificate chain for the server on Windows, installing the
2500 missing intermediates and trusted root via Windows Update if necessary. It
2502 missing intermediates and trusted root via Windows Update if necessary. It
2501 does nothing on other platforms.
2503 does nothing on other platforms.
2502
2504
2503 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2505 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2504 that server is used. See :hg:`help urls` for more information.
2506 that server is used. See :hg:`help urls` for more information.
2505
2507
2506 If the update succeeds, retry the original operation. Otherwise, the cause
2508 If the update succeeds, retry the original operation. Otherwise, the cause
2507 of the SSL error is likely another issue.
2509 of the SSL error is likely another issue.
2508 '''
2510 '''
2509 if not pycompat.iswindows:
2511 if not pycompat.iswindows:
2510 raise error.Abort(_('certificate chain building is only possible on '
2512 raise error.Abort(_('certificate chain building is only possible on '
2511 'Windows'))
2513 'Windows'))
2512
2514
2513 if not source:
2515 if not source:
2514 if not repo:
2516 if not repo:
2515 raise error.Abort(_("there is no Mercurial repository here, and no "
2517 raise error.Abort(_("there is no Mercurial repository here, and no "
2516 "server specified"))
2518 "server specified"))
2517 source = "default"
2519 source = "default"
2518
2520
2519 source, branches = hg.parseurl(ui.expandpath(source))
2521 source, branches = hg.parseurl(ui.expandpath(source))
2520 url = util.url(source)
2522 url = util.url(source)
2521 addr = None
2523 addr = None
2522
2524
2523 defaultport = {'https': 443, 'ssh': 22}
2525 defaultport = {'https': 443, 'ssh': 22}
2524 if url.scheme in defaultport:
2526 if url.scheme in defaultport:
2525 try:
2527 try:
2526 addr = (url.host, int(url.port or defaultport[url.scheme]))
2528 addr = (url.host, int(url.port or defaultport[url.scheme]))
2527 except ValueError:
2529 except ValueError:
2528 raise error.Abort(_("malformed port number in URL"))
2530 raise error.Abort(_("malformed port number in URL"))
2529 else:
2531 else:
2530 raise error.Abort(_("only https and ssh connections are supported"))
2532 raise error.Abort(_("only https and ssh connections are supported"))
2531
2533
2532 from . import win32
2534 from . import win32
2533
2535
2534 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2536 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2535 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2537 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2536
2538
2537 try:
2539 try:
2538 s.connect(addr)
2540 s.connect(addr)
2539 cert = s.getpeercert(True)
2541 cert = s.getpeercert(True)
2540
2542
2541 ui.status(_('checking the certificate chain for %s\n') % url.host)
2543 ui.status(_('checking the certificate chain for %s\n') % url.host)
2542
2544
2543 complete = win32.checkcertificatechain(cert, build=False)
2545 complete = win32.checkcertificatechain(cert, build=False)
2544
2546
2545 if not complete:
2547 if not complete:
2546 ui.status(_('certificate chain is incomplete, updating... '))
2548 ui.status(_('certificate chain is incomplete, updating... '))
2547
2549
2548 if not win32.checkcertificatechain(cert):
2550 if not win32.checkcertificatechain(cert):
2549 ui.status(_('failed.\n'))
2551 ui.status(_('failed.\n'))
2550 else:
2552 else:
2551 ui.status(_('done.\n'))
2553 ui.status(_('done.\n'))
2552 else:
2554 else:
2553 ui.status(_('full certificate chain is available\n'))
2555 ui.status(_('full certificate chain is available\n'))
2554 finally:
2556 finally:
2555 s.close()
2557 s.close()
2556
2558
2557 @command('debugsub',
2559 @command('debugsub',
2558 [('r', 'rev', '',
2560 [('r', 'rev', '',
2559 _('revision to check'), _('REV'))],
2561 _('revision to check'), _('REV'))],
2560 _('[-r REV] [REV]'))
2562 _('[-r REV] [REV]'))
2561 def debugsub(ui, repo, rev=None):
2563 def debugsub(ui, repo, rev=None):
2562 ctx = scmutil.revsingle(repo, rev, None)
2564 ctx = scmutil.revsingle(repo, rev, None)
2563 for k, v in sorted(ctx.substate.items()):
2565 for k, v in sorted(ctx.substate.items()):
2564 ui.write(('path %s\n') % k)
2566 ui.write(('path %s\n') % k)
2565 ui.write((' source %s\n') % v[0])
2567 ui.write((' source %s\n') % v[0])
2566 ui.write((' revision %s\n') % v[1])
2568 ui.write((' revision %s\n') % v[1])
2567
2569
2568 @command('debugsuccessorssets',
2570 @command('debugsuccessorssets',
2569 [('', 'closest', False, _('return closest successors sets only'))],
2571 [('', 'closest', False, _('return closest successors sets only'))],
2570 _('[REV]'))
2572 _('[REV]'))
2571 def debugsuccessorssets(ui, repo, *revs, **opts):
2573 def debugsuccessorssets(ui, repo, *revs, **opts):
2572 """show set of successors for revision
2574 """show set of successors for revision
2573
2575
2574 A successors set of changeset A is a consistent group of revisions that
2576 A successors set of changeset A is a consistent group of revisions that
2575 succeed A. It contains non-obsolete changesets only unless closests
2577 succeed A. It contains non-obsolete changesets only unless closests
2576 successors set is set.
2578 successors set is set.
2577
2579
2578 In most cases a changeset A has a single successors set containing a single
2580 In most cases a changeset A has a single successors set containing a single
2579 successor (changeset A replaced by A').
2581 successor (changeset A replaced by A').
2580
2582
2581 A changeset that is made obsolete with no successors are called "pruned".
2583 A changeset that is made obsolete with no successors are called "pruned".
2582 Such changesets have no successors sets at all.
2584 Such changesets have no successors sets at all.
2583
2585
2584 A changeset that has been "split" will have a successors set containing
2586 A changeset that has been "split" will have a successors set containing
2585 more than one successor.
2587 more than one successor.
2586
2588
2587 A changeset that has been rewritten in multiple different ways is called
2589 A changeset that has been rewritten in multiple different ways is called
2588 "divergent". Such changesets have multiple successor sets (each of which
2590 "divergent". Such changesets have multiple successor sets (each of which
2589 may also be split, i.e. have multiple successors).
2591 may also be split, i.e. have multiple successors).
2590
2592
2591 Results are displayed as follows::
2593 Results are displayed as follows::
2592
2594
2593 <rev1>
2595 <rev1>
2594 <successors-1A>
2596 <successors-1A>
2595 <rev2>
2597 <rev2>
2596 <successors-2A>
2598 <successors-2A>
2597 <successors-2B1> <successors-2B2> <successors-2B3>
2599 <successors-2B1> <successors-2B2> <successors-2B3>
2598
2600
2599 Here rev2 has two possible (i.e. divergent) successors sets. The first
2601 Here rev2 has two possible (i.e. divergent) successors sets. The first
2600 holds one element, whereas the second holds three (i.e. the changeset has
2602 holds one element, whereas the second holds three (i.e. the changeset has
2601 been split).
2603 been split).
2602 """
2604 """
2603 # passed to successorssets caching computation from one call to another
2605 # passed to successorssets caching computation from one call to another
2604 cache = {}
2606 cache = {}
2605 ctx2str = bytes
2607 ctx2str = bytes
2606 node2str = short
2608 node2str = short
2607 for rev in scmutil.revrange(repo, revs):
2609 for rev in scmutil.revrange(repo, revs):
2608 ctx = repo[rev]
2610 ctx = repo[rev]
2609 ui.write('%s\n'% ctx2str(ctx))
2611 ui.write('%s\n'% ctx2str(ctx))
2610 for succsset in obsutil.successorssets(repo, ctx.node(),
2612 for succsset in obsutil.successorssets(repo, ctx.node(),
2611 closest=opts[r'closest'],
2613 closest=opts[r'closest'],
2612 cache=cache):
2614 cache=cache):
2613 if succsset:
2615 if succsset:
2614 ui.write(' ')
2616 ui.write(' ')
2615 ui.write(node2str(succsset[0]))
2617 ui.write(node2str(succsset[0]))
2616 for node in succsset[1:]:
2618 for node in succsset[1:]:
2617 ui.write(' ')
2619 ui.write(' ')
2618 ui.write(node2str(node))
2620 ui.write(node2str(node))
2619 ui.write('\n')
2621 ui.write('\n')
2620
2622
2621 @command('debugtemplate',
2623 @command('debugtemplate',
2622 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2624 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2623 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2625 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2624 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2626 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2625 optionalrepo=True)
2627 optionalrepo=True)
2626 def debugtemplate(ui, repo, tmpl, **opts):
2628 def debugtemplate(ui, repo, tmpl, **opts):
2627 """parse and apply a template
2629 """parse and apply a template
2628
2630
2629 If -r/--rev is given, the template is processed as a log template and
2631 If -r/--rev is given, the template is processed as a log template and
2630 applied to the given changesets. Otherwise, it is processed as a generic
2632 applied to the given changesets. Otherwise, it is processed as a generic
2631 template.
2633 template.
2632
2634
2633 Use --verbose to print the parsed tree.
2635 Use --verbose to print the parsed tree.
2634 """
2636 """
2635 revs = None
2637 revs = None
2636 if opts[r'rev']:
2638 if opts[r'rev']:
2637 if repo is None:
2639 if repo is None:
2638 raise error.RepoError(_('there is no Mercurial repository here '
2640 raise error.RepoError(_('there is no Mercurial repository here '
2639 '(.hg not found)'))
2641 '(.hg not found)'))
2640 revs = scmutil.revrange(repo, opts[r'rev'])
2642 revs = scmutil.revrange(repo, opts[r'rev'])
2641
2643
2642 props = {}
2644 props = {}
2643 for d in opts[r'define']:
2645 for d in opts[r'define']:
2644 try:
2646 try:
2645 k, v = (e.strip() for e in d.split('=', 1))
2647 k, v = (e.strip() for e in d.split('=', 1))
2646 if not k or k == 'ui':
2648 if not k or k == 'ui':
2647 raise ValueError
2649 raise ValueError
2648 props[k] = v
2650 props[k] = v
2649 except ValueError:
2651 except ValueError:
2650 raise error.Abort(_('malformed keyword definition: %s') % d)
2652 raise error.Abort(_('malformed keyword definition: %s') % d)
2651
2653
2652 if ui.verbose:
2654 if ui.verbose:
2653 aliases = ui.configitems('templatealias')
2655 aliases = ui.configitems('templatealias')
2654 tree = templater.parse(tmpl)
2656 tree = templater.parse(tmpl)
2655 ui.note(templater.prettyformat(tree), '\n')
2657 ui.note(templater.prettyformat(tree), '\n')
2656 newtree = templater.expandaliases(tree, aliases)
2658 newtree = templater.expandaliases(tree, aliases)
2657 if newtree != tree:
2659 if newtree != tree:
2658 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2660 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2659
2661
2660 if revs is None:
2662 if revs is None:
2661 tres = formatter.templateresources(ui, repo)
2663 tres = formatter.templateresources(ui, repo)
2662 t = formatter.maketemplater(ui, tmpl, resources=tres)
2664 t = formatter.maketemplater(ui, tmpl, resources=tres)
2663 if ui.verbose:
2665 if ui.verbose:
2664 kwds, funcs = t.symbolsuseddefault()
2666 kwds, funcs = t.symbolsuseddefault()
2665 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2667 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2666 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2668 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2667 ui.write(t.renderdefault(props))
2669 ui.write(t.renderdefault(props))
2668 else:
2670 else:
2669 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2671 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2670 if ui.verbose:
2672 if ui.verbose:
2671 kwds, funcs = displayer.t.symbolsuseddefault()
2673 kwds, funcs = displayer.t.symbolsuseddefault()
2672 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2674 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2673 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2675 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2674 for r in revs:
2676 for r in revs:
2675 displayer.show(repo[r], **pycompat.strkwargs(props))
2677 displayer.show(repo[r], **pycompat.strkwargs(props))
2676 displayer.close()
2678 displayer.close()
2677
2679
2678 @command('debuguigetpass', [
2680 @command('debuguigetpass', [
2679 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2681 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2680 ], _('[-p TEXT]'), norepo=True)
2682 ], _('[-p TEXT]'), norepo=True)
2681 def debuguigetpass(ui, prompt=''):
2683 def debuguigetpass(ui, prompt=''):
2682 """show prompt to type password"""
2684 """show prompt to type password"""
2683 r = ui.getpass(prompt)
2685 r = ui.getpass(prompt)
2684 ui.write(('respose: %s\n') % r)
2686 ui.write(('respose: %s\n') % r)
2685
2687
2686 @command('debuguiprompt', [
2688 @command('debuguiprompt', [
2687 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2689 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2688 ], _('[-p TEXT]'), norepo=True)
2690 ], _('[-p TEXT]'), norepo=True)
2689 def debuguiprompt(ui, prompt=''):
2691 def debuguiprompt(ui, prompt=''):
2690 """show plain prompt"""
2692 """show plain prompt"""
2691 r = ui.prompt(prompt)
2693 r = ui.prompt(prompt)
2692 ui.write(('response: %s\n') % r)
2694 ui.write(('response: %s\n') % r)
2693
2695
2694 @command('debugupdatecaches', [])
2696 @command('debugupdatecaches', [])
2695 def debugupdatecaches(ui, repo, *pats, **opts):
2697 def debugupdatecaches(ui, repo, *pats, **opts):
2696 """warm all known caches in the repository"""
2698 """warm all known caches in the repository"""
2697 with repo.wlock(), repo.lock():
2699 with repo.wlock(), repo.lock():
2698 repo.updatecaches(full=True)
2700 repo.updatecaches(full=True)
2699
2701
2700 @command('debugupgraderepo', [
2702 @command('debugupgraderepo', [
2701 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2703 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2702 ('', 'run', False, _('performs an upgrade')),
2704 ('', 'run', False, _('performs an upgrade')),
2703 ])
2705 ])
2704 def debugupgraderepo(ui, repo, run=False, optimize=None):
2706 def debugupgraderepo(ui, repo, run=False, optimize=None):
2705 """upgrade a repository to use different features
2707 """upgrade a repository to use different features
2706
2708
2707 If no arguments are specified, the repository is evaluated for upgrade
2709 If no arguments are specified, the repository is evaluated for upgrade
2708 and a list of problems and potential optimizations is printed.
2710 and a list of problems and potential optimizations is printed.
2709
2711
2710 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2712 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2711 can be influenced via additional arguments. More details will be provided
2713 can be influenced via additional arguments. More details will be provided
2712 by the command output when run without ``--run``.
2714 by the command output when run without ``--run``.
2713
2715
2714 During the upgrade, the repository will be locked and no writes will be
2716 During the upgrade, the repository will be locked and no writes will be
2715 allowed.
2717 allowed.
2716
2718
2717 At the end of the upgrade, the repository may not be readable while new
2719 At the end of the upgrade, the repository may not be readable while new
2718 repository data is swapped in. This window will be as long as it takes to
2720 repository data is swapped in. This window will be as long as it takes to
2719 rename some directories inside the ``.hg`` directory. On most machines, this
2721 rename some directories inside the ``.hg`` directory. On most machines, this
2720 should complete almost instantaneously and the chances of a consumer being
2722 should complete almost instantaneously and the chances of a consumer being
2721 unable to access the repository should be low.
2723 unable to access the repository should be low.
2722 """
2724 """
2723 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2725 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2724
2726
2725 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2727 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2726 inferrepo=True)
2728 inferrepo=True)
2727 def debugwalk(ui, repo, *pats, **opts):
2729 def debugwalk(ui, repo, *pats, **opts):
2728 """show how files match on given patterns"""
2730 """show how files match on given patterns"""
2729 opts = pycompat.byteskwargs(opts)
2731 opts = pycompat.byteskwargs(opts)
2730 m = scmutil.match(repo[None], pats, opts)
2732 m = scmutil.match(repo[None], pats, opts)
2731 if ui.verbose:
2733 if ui.verbose:
2732 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2734 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2733 items = list(repo[None].walk(m))
2735 items = list(repo[None].walk(m))
2734 if not items:
2736 if not items:
2735 return
2737 return
2736 f = lambda fn: fn
2738 f = lambda fn: fn
2737 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2739 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2738 f = lambda fn: util.normpath(fn)
2740 f = lambda fn: util.normpath(fn)
2739 fmt = 'f %%-%ds %%-%ds %%s' % (
2741 fmt = 'f %%-%ds %%-%ds %%s' % (
2740 max([len(abs) for abs in items]),
2742 max([len(abs) for abs in items]),
2741 max([len(m.rel(abs)) for abs in items]))
2743 max([len(m.rel(abs)) for abs in items]))
2742 for abs in items:
2744 for abs in items:
2743 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2745 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2744 ui.write("%s\n" % line.rstrip())
2746 ui.write("%s\n" % line.rstrip())
2745
2747
2746 @command('debugwhyunstable', [], _('REV'))
2748 @command('debugwhyunstable', [], _('REV'))
2747 def debugwhyunstable(ui, repo, rev):
2749 def debugwhyunstable(ui, repo, rev):
2748 """explain instabilities of a changeset"""
2750 """explain instabilities of a changeset"""
2749 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2751 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2750 dnodes = ''
2752 dnodes = ''
2751 if entry.get('divergentnodes'):
2753 if entry.get('divergentnodes'):
2752 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2754 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2753 for ctx in entry['divergentnodes']) + ' '
2755 for ctx in entry['divergentnodes']) + ' '
2754 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2756 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2755 entry['reason'], entry['node']))
2757 entry['reason'], entry['node']))
2756
2758
2757 @command('debugwireargs',
2759 @command('debugwireargs',
2758 [('', 'three', '', 'three'),
2760 [('', 'three', '', 'three'),
2759 ('', 'four', '', 'four'),
2761 ('', 'four', '', 'four'),
2760 ('', 'five', '', 'five'),
2762 ('', 'five', '', 'five'),
2761 ] + cmdutil.remoteopts,
2763 ] + cmdutil.remoteopts,
2762 _('REPO [OPTIONS]... [ONE [TWO]]'),
2764 _('REPO [OPTIONS]... [ONE [TWO]]'),
2763 norepo=True)
2765 norepo=True)
2764 def debugwireargs(ui, repopath, *vals, **opts):
2766 def debugwireargs(ui, repopath, *vals, **opts):
2765 opts = pycompat.byteskwargs(opts)
2767 opts = pycompat.byteskwargs(opts)
2766 repo = hg.peer(ui, opts, repopath)
2768 repo = hg.peer(ui, opts, repopath)
2767 for opt in cmdutil.remoteopts:
2769 for opt in cmdutil.remoteopts:
2768 del opts[opt[1]]
2770 del opts[opt[1]]
2769 args = {}
2771 args = {}
2770 for k, v in opts.iteritems():
2772 for k, v in opts.iteritems():
2771 if v:
2773 if v:
2772 args[k] = v
2774 args[k] = v
2773 args = pycompat.strkwargs(args)
2775 args = pycompat.strkwargs(args)
2774 # run twice to check that we don't mess up the stream for the next command
2776 # run twice to check that we don't mess up the stream for the next command
2775 res1 = repo.debugwireargs(*vals, **args)
2777 res1 = repo.debugwireargs(*vals, **args)
2776 res2 = repo.debugwireargs(*vals, **args)
2778 res2 = repo.debugwireargs(*vals, **args)
2777 ui.write("%s\n" % res1)
2779 ui.write("%s\n" % res1)
2778 if res1 != res2:
2780 if res1 != res2:
2779 ui.warn("%s\n" % res2)
2781 ui.warn("%s\n" % res2)
2780
2782
2781 def _parsewirelangblocks(fh):
2783 def _parsewirelangblocks(fh):
2782 activeaction = None
2784 activeaction = None
2783 blocklines = []
2785 blocklines = []
2784
2786
2785 for line in fh:
2787 for line in fh:
2786 line = line.rstrip()
2788 line = line.rstrip()
2787 if not line:
2789 if not line:
2788 continue
2790 continue
2789
2791
2790 if line.startswith(b'#'):
2792 if line.startswith(b'#'):
2791 continue
2793 continue
2792
2794
2793 if not line.startswith(b' '):
2795 if not line.startswith(b' '):
2794 # New block. Flush previous one.
2796 # New block. Flush previous one.
2795 if activeaction:
2797 if activeaction:
2796 yield activeaction, blocklines
2798 yield activeaction, blocklines
2797
2799
2798 activeaction = line
2800 activeaction = line
2799 blocklines = []
2801 blocklines = []
2800 continue
2802 continue
2801
2803
2802 # Else we start with an indent.
2804 # Else we start with an indent.
2803
2805
2804 if not activeaction:
2806 if not activeaction:
2805 raise error.Abort(_('indented line outside of block'))
2807 raise error.Abort(_('indented line outside of block'))
2806
2808
2807 blocklines.append(line)
2809 blocklines.append(line)
2808
2810
2809 # Flush last block.
2811 # Flush last block.
2810 if activeaction:
2812 if activeaction:
2811 yield activeaction, blocklines
2813 yield activeaction, blocklines
2812
2814
2813 @command('debugwireproto',
2815 @command('debugwireproto',
2814 [
2816 [
2815 ('', 'localssh', False, _('start an SSH server for this repo')),
2817 ('', 'localssh', False, _('start an SSH server for this repo')),
2816 ('', 'peer', '', _('construct a specific version of the peer')),
2818 ('', 'peer', '', _('construct a specific version of the peer')),
2817 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2819 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2818 ('', 'nologhandshake', False,
2820 ('', 'nologhandshake', False,
2819 _('do not log I/O related to the peer handshake')),
2821 _('do not log I/O related to the peer handshake')),
2820 ] + cmdutil.remoteopts,
2822 ] + cmdutil.remoteopts,
2821 _('[PATH]'),
2823 _('[PATH]'),
2822 optionalrepo=True)
2824 optionalrepo=True)
2823 def debugwireproto(ui, repo, path=None, **opts):
2825 def debugwireproto(ui, repo, path=None, **opts):
2824 """send wire protocol commands to a server
2826 """send wire protocol commands to a server
2825
2827
2826 This command can be used to issue wire protocol commands to remote
2828 This command can be used to issue wire protocol commands to remote
2827 peers and to debug the raw data being exchanged.
2829 peers and to debug the raw data being exchanged.
2828
2830
2829 ``--localssh`` will start an SSH server against the current repository
2831 ``--localssh`` will start an SSH server against the current repository
2830 and connect to that. By default, the connection will perform a handshake
2832 and connect to that. By default, the connection will perform a handshake
2831 and establish an appropriate peer instance.
2833 and establish an appropriate peer instance.
2832
2834
2833 ``--peer`` can be used to bypass the handshake protocol and construct a
2835 ``--peer`` can be used to bypass the handshake protocol and construct a
2834 peer instance using the specified class type. Valid values are ``raw``,
2836 peer instance using the specified class type. Valid values are ``raw``,
2835 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2837 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2836 raw data payloads and don't support higher-level command actions.
2838 raw data payloads and don't support higher-level command actions.
2837
2839
2838 ``--noreadstderr`` can be used to disable automatic reading from stderr
2840 ``--noreadstderr`` can be used to disable automatic reading from stderr
2839 of the peer (for SSH connections only). Disabling automatic reading of
2841 of the peer (for SSH connections only). Disabling automatic reading of
2840 stderr is useful for making output more deterministic.
2842 stderr is useful for making output more deterministic.
2841
2843
2842 Commands are issued via a mini language which is specified via stdin.
2844 Commands are issued via a mini language which is specified via stdin.
2843 The language consists of individual actions to perform. An action is
2845 The language consists of individual actions to perform. An action is
2844 defined by a block. A block is defined as a line with no leading
2846 defined by a block. A block is defined as a line with no leading
2845 space followed by 0 or more lines with leading space. Blocks are
2847 space followed by 0 or more lines with leading space. Blocks are
2846 effectively a high-level command with additional metadata.
2848 effectively a high-level command with additional metadata.
2847
2849
2848 Lines beginning with ``#`` are ignored.
2850 Lines beginning with ``#`` are ignored.
2849
2851
2850 The following sections denote available actions.
2852 The following sections denote available actions.
2851
2853
2852 raw
2854 raw
2853 ---
2855 ---
2854
2856
2855 Send raw data to the server.
2857 Send raw data to the server.
2856
2858
2857 The block payload contains the raw data to send as one atomic send
2859 The block payload contains the raw data to send as one atomic send
2858 operation. The data may not actually be delivered in a single system
2860 operation. The data may not actually be delivered in a single system
2859 call: it depends on the abilities of the transport being used.
2861 call: it depends on the abilities of the transport being used.
2860
2862
2861 Each line in the block is de-indented and concatenated. Then, that
2863 Each line in the block is de-indented and concatenated. Then, that
2862 value is evaluated as a Python b'' literal. This allows the use of
2864 value is evaluated as a Python b'' literal. This allows the use of
2863 backslash escaping, etc.
2865 backslash escaping, etc.
2864
2866
2865 raw+
2867 raw+
2866 ----
2868 ----
2867
2869
2868 Behaves like ``raw`` except flushes output afterwards.
2870 Behaves like ``raw`` except flushes output afterwards.
2869
2871
2870 command <X>
2872 command <X>
2871 -----------
2873 -----------
2872
2874
2873 Send a request to run a named command, whose name follows the ``command``
2875 Send a request to run a named command, whose name follows the ``command``
2874 string.
2876 string.
2875
2877
2876 Arguments to the command are defined as lines in this block. The format of
2878 Arguments to the command are defined as lines in this block. The format of
2877 each line is ``<key> <value>``. e.g.::
2879 each line is ``<key> <value>``. e.g.::
2878
2880
2879 command listkeys
2881 command listkeys
2880 namespace bookmarks
2882 namespace bookmarks
2881
2883
2882 If the value begins with ``eval:``, it will be interpreted as a Python
2884 If the value begins with ``eval:``, it will be interpreted as a Python
2883 literal expression. Otherwise values are interpreted as Python b'' literals.
2885 literal expression. Otherwise values are interpreted as Python b'' literals.
2884 This allows sending complex types and encoding special byte sequences via
2886 This allows sending complex types and encoding special byte sequences via
2885 backslash escaping.
2887 backslash escaping.
2886
2888
2887 The following arguments have special meaning:
2889 The following arguments have special meaning:
2888
2890
2889 ``PUSHFILE``
2891 ``PUSHFILE``
2890 When defined, the *push* mechanism of the peer will be used instead
2892 When defined, the *push* mechanism of the peer will be used instead
2891 of the static request-response mechanism and the content of the
2893 of the static request-response mechanism and the content of the
2892 file specified in the value of this argument will be sent as the
2894 file specified in the value of this argument will be sent as the
2893 command payload.
2895 command payload.
2894
2896
2895 This can be used to submit a local bundle file to the remote.
2897 This can be used to submit a local bundle file to the remote.
2896
2898
2897 batchbegin
2899 batchbegin
2898 ----------
2900 ----------
2899
2901
2900 Instruct the peer to begin a batched send.
2902 Instruct the peer to begin a batched send.
2901
2903
2902 All ``command`` blocks are queued for execution until the next
2904 All ``command`` blocks are queued for execution until the next
2903 ``batchsubmit`` block.
2905 ``batchsubmit`` block.
2904
2906
2905 batchsubmit
2907 batchsubmit
2906 -----------
2908 -----------
2907
2909
2908 Submit previously queued ``command`` blocks as a batch request.
2910 Submit previously queued ``command`` blocks as a batch request.
2909
2911
2910 This action MUST be paired with a ``batchbegin`` action.
2912 This action MUST be paired with a ``batchbegin`` action.
2911
2913
2912 httprequest <method> <path>
2914 httprequest <method> <path>
2913 ---------------------------
2915 ---------------------------
2914
2916
2915 (HTTP peer only)
2917 (HTTP peer only)
2916
2918
2917 Send an HTTP request to the peer.
2919 Send an HTTP request to the peer.
2918
2920
2919 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2921 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2920
2922
2921 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2923 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2922 headers to add to the request. e.g. ``Accept: foo``.
2924 headers to add to the request. e.g. ``Accept: foo``.
2923
2925
2924 The following arguments are special:
2926 The following arguments are special:
2925
2927
2926 ``BODYFILE``
2928 ``BODYFILE``
2927 The content of the file defined as the value to this argument will be
2929 The content of the file defined as the value to this argument will be
2928 transferred verbatim as the HTTP request body.
2930 transferred verbatim as the HTTP request body.
2929
2931
2930 ``frame <type> <flags> <payload>``
2932 ``frame <type> <flags> <payload>``
2931 Send a unified protocol frame as part of the request body.
2933 Send a unified protocol frame as part of the request body.
2932
2934
2933 All frames will be collected and sent as the body to the HTTP
2935 All frames will be collected and sent as the body to the HTTP
2934 request.
2936 request.
2935
2937
2936 close
2938 close
2937 -----
2939 -----
2938
2940
2939 Close the connection to the server.
2941 Close the connection to the server.
2940
2942
2941 flush
2943 flush
2942 -----
2944 -----
2943
2945
2944 Flush data written to the server.
2946 Flush data written to the server.
2945
2947
2946 readavailable
2948 readavailable
2947 -------------
2949 -------------
2948
2950
2949 Close the write end of the connection and read all available data from
2951 Close the write end of the connection and read all available data from
2950 the server.
2952 the server.
2951
2953
2952 If the connection to the server encompasses multiple pipes, we poll both
2954 If the connection to the server encompasses multiple pipes, we poll both
2953 pipes and read available data.
2955 pipes and read available data.
2954
2956
2955 readline
2957 readline
2956 --------
2958 --------
2957
2959
2958 Read a line of output from the server. If there are multiple output
2960 Read a line of output from the server. If there are multiple output
2959 pipes, reads only the main pipe.
2961 pipes, reads only the main pipe.
2960
2962
2961 ereadline
2963 ereadline
2962 ---------
2964 ---------
2963
2965
2964 Like ``readline``, but read from the stderr pipe, if available.
2966 Like ``readline``, but read from the stderr pipe, if available.
2965
2967
2966 read <X>
2968 read <X>
2967 --------
2969 --------
2968
2970
2969 ``read()`` N bytes from the server's main output pipe.
2971 ``read()`` N bytes from the server's main output pipe.
2970
2972
2971 eread <X>
2973 eread <X>
2972 ---------
2974 ---------
2973
2975
2974 ``read()`` N bytes from the server's stderr pipe, if available.
2976 ``read()`` N bytes from the server's stderr pipe, if available.
2975
2977
2976 Specifying Unified Frame-Based Protocol Frames
2978 Specifying Unified Frame-Based Protocol Frames
2977 ----------------------------------------------
2979 ----------------------------------------------
2978
2980
2979 It is possible to emit a *Unified Frame-Based Protocol* by using special
2981 It is possible to emit a *Unified Frame-Based Protocol* by using special
2980 syntax.
2982 syntax.
2981
2983
2982 A frame is composed as a type, flags, and payload. These can be parsed
2984 A frame is composed as a type, flags, and payload. These can be parsed
2983 from a string of the form:
2985 from a string of the form:
2984
2986
2985 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2987 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2986
2988
2987 ``request-id`` and ``stream-id`` are integers defining the request and
2989 ``request-id`` and ``stream-id`` are integers defining the request and
2988 stream identifiers.
2990 stream identifiers.
2989
2991
2990 ``type`` can be an integer value for the frame type or the string name
2992 ``type`` can be an integer value for the frame type or the string name
2991 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2993 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2992 ``command-name``.
2994 ``command-name``.
2993
2995
2994 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2996 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2995 components. Each component (and there can be just one) can be an integer
2997 components. Each component (and there can be just one) can be an integer
2996 or a flag name for stream flags or frame flags, respectively. Values are
2998 or a flag name for stream flags or frame flags, respectively. Values are
2997 resolved to integers and then bitwise OR'd together.
2999 resolved to integers and then bitwise OR'd together.
2998
3000
2999 ``payload`` represents the raw frame payload. If it begins with
3001 ``payload`` represents the raw frame payload. If it begins with
3000 ``cbor:``, the following string is evaluated as Python code and the
3002 ``cbor:``, the following string is evaluated as Python code and the
3001 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3003 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3002 as a Python byte string literal.
3004 as a Python byte string literal.
3003 """
3005 """
3004 opts = pycompat.byteskwargs(opts)
3006 opts = pycompat.byteskwargs(opts)
3005
3007
3006 if opts['localssh'] and not repo:
3008 if opts['localssh'] and not repo:
3007 raise error.Abort(_('--localssh requires a repository'))
3009 raise error.Abort(_('--localssh requires a repository'))
3008
3010
3009 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3011 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3010 raise error.Abort(_('invalid value for --peer'),
3012 raise error.Abort(_('invalid value for --peer'),
3011 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3013 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3012
3014
3013 if path and opts['localssh']:
3015 if path and opts['localssh']:
3014 raise error.Abort(_('cannot specify --localssh with an explicit '
3016 raise error.Abort(_('cannot specify --localssh with an explicit '
3015 'path'))
3017 'path'))
3016
3018
3017 if ui.interactive():
3019 if ui.interactive():
3018 ui.write(_('(waiting for commands on stdin)\n'))
3020 ui.write(_('(waiting for commands on stdin)\n'))
3019
3021
3020 blocks = list(_parsewirelangblocks(ui.fin))
3022 blocks = list(_parsewirelangblocks(ui.fin))
3021
3023
3022 proc = None
3024 proc = None
3023 stdin = None
3025 stdin = None
3024 stdout = None
3026 stdout = None
3025 stderr = None
3027 stderr = None
3026 opener = None
3028 opener = None
3027
3029
3028 if opts['localssh']:
3030 if opts['localssh']:
3029 # We start the SSH server in its own process so there is process
3031 # We start the SSH server in its own process so there is process
3030 # separation. This prevents a whole class of potential bugs around
3032 # separation. This prevents a whole class of potential bugs around
3031 # shared state from interfering with server operation.
3033 # shared state from interfering with server operation.
3032 args = procutil.hgcmd() + [
3034 args = procutil.hgcmd() + [
3033 '-R', repo.root,
3035 '-R', repo.root,
3034 'debugserve', '--sshstdio',
3036 'debugserve', '--sshstdio',
3035 ]
3037 ]
3036 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
3038 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
3037 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3039 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3038 bufsize=0)
3040 bufsize=0)
3039
3041
3040 stdin = proc.stdin
3042 stdin = proc.stdin
3041 stdout = proc.stdout
3043 stdout = proc.stdout
3042 stderr = proc.stderr
3044 stderr = proc.stderr
3043
3045
3044 # We turn the pipes into observers so we can log I/O.
3046 # We turn the pipes into observers so we can log I/O.
3045 if ui.verbose or opts['peer'] == 'raw':
3047 if ui.verbose or opts['peer'] == 'raw':
3046 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3048 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3047 logdata=True)
3049 logdata=True)
3048 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3050 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3049 logdata=True)
3051 logdata=True)
3050 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3052 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3051 logdata=True)
3053 logdata=True)
3052
3054
3053 # --localssh also implies the peer connection settings.
3055 # --localssh also implies the peer connection settings.
3054
3056
3055 url = 'ssh://localserver'
3057 url = 'ssh://localserver'
3056 autoreadstderr = not opts['noreadstderr']
3058 autoreadstderr = not opts['noreadstderr']
3057
3059
3058 if opts['peer'] == 'ssh1':
3060 if opts['peer'] == 'ssh1':
3059 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3061 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3060 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3062 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3061 None, autoreadstderr=autoreadstderr)
3063 None, autoreadstderr=autoreadstderr)
3062 elif opts['peer'] == 'ssh2':
3064 elif opts['peer'] == 'ssh2':
3063 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3065 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3064 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3066 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3065 None, autoreadstderr=autoreadstderr)
3067 None, autoreadstderr=autoreadstderr)
3066 elif opts['peer'] == 'raw':
3068 elif opts['peer'] == 'raw':
3067 ui.write(_('using raw connection to peer\n'))
3069 ui.write(_('using raw connection to peer\n'))
3068 peer = None
3070 peer = None
3069 else:
3071 else:
3070 ui.write(_('creating ssh peer from handshake results\n'))
3072 ui.write(_('creating ssh peer from handshake results\n'))
3071 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3073 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3072 autoreadstderr=autoreadstderr)
3074 autoreadstderr=autoreadstderr)
3073
3075
3074 elif path:
3076 elif path:
3075 # We bypass hg.peer() so we can proxy the sockets.
3077 # We bypass hg.peer() so we can proxy the sockets.
3076 # TODO consider not doing this because we skip
3078 # TODO consider not doing this because we skip
3077 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3079 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3078 u = util.url(path)
3080 u = util.url(path)
3079 if u.scheme != 'http':
3081 if u.scheme != 'http':
3080 raise error.Abort(_('only http:// paths are currently supported'))
3082 raise error.Abort(_('only http:// paths are currently supported'))
3081
3083
3082 url, authinfo = u.authinfo()
3084 url, authinfo = u.authinfo()
3083 openerargs = {
3085 openerargs = {
3084 r'useragent': b'Mercurial debugwireproto',
3086 r'useragent': b'Mercurial debugwireproto',
3085 }
3087 }
3086
3088
3087 # Turn pipes/sockets into observers so we can log I/O.
3089 # Turn pipes/sockets into observers so we can log I/O.
3088 if ui.verbose:
3090 if ui.verbose:
3089 openerargs.update({
3091 openerargs.update({
3090 r'loggingfh': ui,
3092 r'loggingfh': ui,
3091 r'loggingname': b's',
3093 r'loggingname': b's',
3092 r'loggingopts': {
3094 r'loggingopts': {
3093 r'logdata': True,
3095 r'logdata': True,
3094 r'logdataapis': False,
3096 r'logdataapis': False,
3095 },
3097 },
3096 })
3098 })
3097
3099
3098 if ui.debugflag:
3100 if ui.debugflag:
3099 openerargs[r'loggingopts'][r'logdataapis'] = True
3101 openerargs[r'loggingopts'][r'logdataapis'] = True
3100
3102
3101 # Don't send default headers when in raw mode. This allows us to
3103 # Don't send default headers when in raw mode. This allows us to
3102 # bypass most of the behavior of our URL handling code so we can
3104 # bypass most of the behavior of our URL handling code so we can
3103 # have near complete control over what's sent on the wire.
3105 # have near complete control over what's sent on the wire.
3104 if opts['peer'] == 'raw':
3106 if opts['peer'] == 'raw':
3105 openerargs[r'sendaccept'] = False
3107 openerargs[r'sendaccept'] = False
3106
3108
3107 opener = urlmod.opener(ui, authinfo, **openerargs)
3109 opener = urlmod.opener(ui, authinfo, **openerargs)
3108
3110
3109 if opts['peer'] == 'http2':
3111 if opts['peer'] == 'http2':
3110 ui.write(_('creating http peer for wire protocol version 2\n'))
3112 ui.write(_('creating http peer for wire protocol version 2\n'))
3111 # We go through makepeer() because we need an API descriptor for
3113 # We go through makepeer() because we need an API descriptor for
3112 # the peer instance to be useful.
3114 # the peer instance to be useful.
3113 with ui.configoverride({
3115 with ui.configoverride({
3114 ('experimental', 'httppeer.advertise-v2'): True}):
3116 ('experimental', 'httppeer.advertise-v2'): True}):
3115 if opts['nologhandshake']:
3117 if opts['nologhandshake']:
3116 ui.pushbuffer()
3118 ui.pushbuffer()
3117
3119
3118 peer = httppeer.makepeer(ui, path, opener=opener)
3120 peer = httppeer.makepeer(ui, path, opener=opener)
3119
3121
3120 if opts['nologhandshake']:
3122 if opts['nologhandshake']:
3121 ui.popbuffer()
3123 ui.popbuffer()
3122
3124
3123 if not isinstance(peer, httppeer.httpv2peer):
3125 if not isinstance(peer, httppeer.httpv2peer):
3124 raise error.Abort(_('could not instantiate HTTP peer for '
3126 raise error.Abort(_('could not instantiate HTTP peer for '
3125 'wire protocol version 2'),
3127 'wire protocol version 2'),
3126 hint=_('the server may not have the feature '
3128 hint=_('the server may not have the feature '
3127 'enabled or is not allowing this '
3129 'enabled or is not allowing this '
3128 'client version'))
3130 'client version'))
3129
3131
3130 elif opts['peer'] == 'raw':
3132 elif opts['peer'] == 'raw':
3131 ui.write(_('using raw connection to peer\n'))
3133 ui.write(_('using raw connection to peer\n'))
3132 peer = None
3134 peer = None
3133 elif opts['peer']:
3135 elif opts['peer']:
3134 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3136 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3135 opts['peer'])
3137 opts['peer'])
3136 else:
3138 else:
3137 peer = httppeer.makepeer(ui, path, opener=opener)
3139 peer = httppeer.makepeer(ui, path, opener=opener)
3138
3140
3139 # We /could/ populate stdin/stdout with sock.makefile()...
3141 # We /could/ populate stdin/stdout with sock.makefile()...
3140 else:
3142 else:
3141 raise error.Abort(_('unsupported connection configuration'))
3143 raise error.Abort(_('unsupported connection configuration'))
3142
3144
3143 batchedcommands = None
3145 batchedcommands = None
3144
3146
3145 # Now perform actions based on the parsed wire language instructions.
3147 # Now perform actions based on the parsed wire language instructions.
3146 for action, lines in blocks:
3148 for action, lines in blocks:
3147 if action in ('raw', 'raw+'):
3149 if action in ('raw', 'raw+'):
3148 if not stdin:
3150 if not stdin:
3149 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3151 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3150
3152
3151 # Concatenate the data together.
3153 # Concatenate the data together.
3152 data = ''.join(l.lstrip() for l in lines)
3154 data = ''.join(l.lstrip() for l in lines)
3153 data = stringutil.unescapestr(data)
3155 data = stringutil.unescapestr(data)
3154 stdin.write(data)
3156 stdin.write(data)
3155
3157
3156 if action == 'raw+':
3158 if action == 'raw+':
3157 stdin.flush()
3159 stdin.flush()
3158 elif action == 'flush':
3160 elif action == 'flush':
3159 if not stdin:
3161 if not stdin:
3160 raise error.Abort(_('cannot call flush on this peer'))
3162 raise error.Abort(_('cannot call flush on this peer'))
3161 stdin.flush()
3163 stdin.flush()
3162 elif action.startswith('command'):
3164 elif action.startswith('command'):
3163 if not peer:
3165 if not peer:
3164 raise error.Abort(_('cannot send commands unless peer instance '
3166 raise error.Abort(_('cannot send commands unless peer instance '
3165 'is available'))
3167 'is available'))
3166
3168
3167 command = action.split(' ', 1)[1]
3169 command = action.split(' ', 1)[1]
3168
3170
3169 args = {}
3171 args = {}
3170 for line in lines:
3172 for line in lines:
3171 # We need to allow empty values.
3173 # We need to allow empty values.
3172 fields = line.lstrip().split(' ', 1)
3174 fields = line.lstrip().split(' ', 1)
3173 if len(fields) == 1:
3175 if len(fields) == 1:
3174 key = fields[0]
3176 key = fields[0]
3175 value = ''
3177 value = ''
3176 else:
3178 else:
3177 key, value = fields
3179 key, value = fields
3178
3180
3179 if value.startswith('eval:'):
3181 if value.startswith('eval:'):
3180 value = stringutil.evalpythonliteral(value[5:])
3182 value = stringutil.evalpythonliteral(value[5:])
3181 else:
3183 else:
3182 value = stringutil.unescapestr(value)
3184 value = stringutil.unescapestr(value)
3183
3185
3184 args[key] = value
3186 args[key] = value
3185
3187
3186 if batchedcommands is not None:
3188 if batchedcommands is not None:
3187 batchedcommands.append((command, args))
3189 batchedcommands.append((command, args))
3188 continue
3190 continue
3189
3191
3190 ui.status(_('sending %s command\n') % command)
3192 ui.status(_('sending %s command\n') % command)
3191
3193
3192 if 'PUSHFILE' in args:
3194 if 'PUSHFILE' in args:
3193 with open(args['PUSHFILE'], r'rb') as fh:
3195 with open(args['PUSHFILE'], r'rb') as fh:
3194 del args['PUSHFILE']
3196 del args['PUSHFILE']
3195 res, output = peer._callpush(command, fh,
3197 res, output = peer._callpush(command, fh,
3196 **pycompat.strkwargs(args))
3198 **pycompat.strkwargs(args))
3197 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3199 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3198 ui.status(_('remote output: %s\n') %
3200 ui.status(_('remote output: %s\n') %
3199 stringutil.escapestr(output))
3201 stringutil.escapestr(output))
3200 else:
3202 else:
3201 with peer.commandexecutor() as e:
3203 with peer.commandexecutor() as e:
3202 res = e.callcommand(command, args).result()
3204 res = e.callcommand(command, args).result()
3203
3205
3204 if isinstance(res, wireprotov2peer.commandresponse):
3206 if isinstance(res, wireprotov2peer.commandresponse):
3205 val = list(res.cborobjects())
3207 val = list(res.cborobjects())
3206 ui.status(_('response: %s\n') %
3208 ui.status(_('response: %s\n') %
3207 stringutil.pprint(val, bprefix=True))
3209 stringutil.pprint(val, bprefix=True))
3208
3210
3209 else:
3211 else:
3210 ui.status(_('response: %s\n') %
3212 ui.status(_('response: %s\n') %
3211 stringutil.pprint(res, bprefix=True))
3213 stringutil.pprint(res, bprefix=True))
3212
3214
3213 elif action == 'batchbegin':
3215 elif action == 'batchbegin':
3214 if batchedcommands is not None:
3216 if batchedcommands is not None:
3215 raise error.Abort(_('nested batchbegin not allowed'))
3217 raise error.Abort(_('nested batchbegin not allowed'))
3216
3218
3217 batchedcommands = []
3219 batchedcommands = []
3218 elif action == 'batchsubmit':
3220 elif action == 'batchsubmit':
3219 # There is a batching API we could go through. But it would be
3221 # There is a batching API we could go through. But it would be
3220 # difficult to normalize requests into function calls. It is easier
3222 # difficult to normalize requests into function calls. It is easier
3221 # to bypass this layer and normalize to commands + args.
3223 # to bypass this layer and normalize to commands + args.
3222 ui.status(_('sending batch with %d sub-commands\n') %
3224 ui.status(_('sending batch with %d sub-commands\n') %
3223 len(batchedcommands))
3225 len(batchedcommands))
3224 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3226 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3225 ui.status(_('response #%d: %s\n') %
3227 ui.status(_('response #%d: %s\n') %
3226 (i, stringutil.escapestr(chunk)))
3228 (i, stringutil.escapestr(chunk)))
3227
3229
3228 batchedcommands = None
3230 batchedcommands = None
3229
3231
3230 elif action.startswith('httprequest '):
3232 elif action.startswith('httprequest '):
3231 if not opener:
3233 if not opener:
3232 raise error.Abort(_('cannot use httprequest without an HTTP '
3234 raise error.Abort(_('cannot use httprequest without an HTTP '
3233 'peer'))
3235 'peer'))
3234
3236
3235 request = action.split(' ', 2)
3237 request = action.split(' ', 2)
3236 if len(request) != 3:
3238 if len(request) != 3:
3237 raise error.Abort(_('invalid httprequest: expected format is '
3239 raise error.Abort(_('invalid httprequest: expected format is '
3238 '"httprequest <method> <path>'))
3240 '"httprequest <method> <path>'))
3239
3241
3240 method, httppath = request[1:]
3242 method, httppath = request[1:]
3241 headers = {}
3243 headers = {}
3242 body = None
3244 body = None
3243 frames = []
3245 frames = []
3244 for line in lines:
3246 for line in lines:
3245 line = line.lstrip()
3247 line = line.lstrip()
3246 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3248 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3247 if m:
3249 if m:
3248 headers[m.group(1)] = m.group(2)
3250 headers[m.group(1)] = m.group(2)
3249 continue
3251 continue
3250
3252
3251 if line.startswith(b'BODYFILE '):
3253 if line.startswith(b'BODYFILE '):
3252 with open(line.split(b' ', 1), 'rb') as fh:
3254 with open(line.split(b' ', 1), 'rb') as fh:
3253 body = fh.read()
3255 body = fh.read()
3254 elif line.startswith(b'frame '):
3256 elif line.startswith(b'frame '):
3255 frame = wireprotoframing.makeframefromhumanstring(
3257 frame = wireprotoframing.makeframefromhumanstring(
3256 line[len(b'frame '):])
3258 line[len(b'frame '):])
3257
3259
3258 frames.append(frame)
3260 frames.append(frame)
3259 else:
3261 else:
3260 raise error.Abort(_('unknown argument to httprequest: %s') %
3262 raise error.Abort(_('unknown argument to httprequest: %s') %
3261 line)
3263 line)
3262
3264
3263 url = path + httppath
3265 url = path + httppath
3264
3266
3265 if frames:
3267 if frames:
3266 body = b''.join(bytes(f) for f in frames)
3268 body = b''.join(bytes(f) for f in frames)
3267
3269
3268 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3270 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3269
3271
3270 # urllib.Request insists on using has_data() as a proxy for
3272 # urllib.Request insists on using has_data() as a proxy for
3271 # determining the request method. Override that to use our
3273 # determining the request method. Override that to use our
3272 # explicitly requested method.
3274 # explicitly requested method.
3273 req.get_method = lambda: pycompat.sysstr(method)
3275 req.get_method = lambda: pycompat.sysstr(method)
3274
3276
3275 try:
3277 try:
3276 res = opener.open(req)
3278 res = opener.open(req)
3277 body = res.read()
3279 body = res.read()
3278 except util.urlerr.urlerror as e:
3280 except util.urlerr.urlerror as e:
3279 # read() method must be called, but only exists in Python 2
3281 # read() method must be called, but only exists in Python 2
3280 getattr(e, 'read', lambda: None)()
3282 getattr(e, 'read', lambda: None)()
3281 continue
3283 continue
3282
3284
3283 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3285 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3284 ui.write(_('cbor> %s\n') %
3286 ui.write(_('cbor> %s\n') %
3285 stringutil.pprint(cbor.loads(body), bprefix=True))
3287 stringutil.pprint(cbor.loads(body), bprefix=True))
3286
3288
3287 elif action == 'close':
3289 elif action == 'close':
3288 peer.close()
3290 peer.close()
3289 elif action == 'readavailable':
3291 elif action == 'readavailable':
3290 if not stdout or not stderr:
3292 if not stdout or not stderr:
3291 raise error.Abort(_('readavailable not available on this peer'))
3293 raise error.Abort(_('readavailable not available on this peer'))
3292
3294
3293 stdin.close()
3295 stdin.close()
3294 stdout.read()
3296 stdout.read()
3295 stderr.read()
3297 stderr.read()
3296
3298
3297 elif action == 'readline':
3299 elif action == 'readline':
3298 if not stdout:
3300 if not stdout:
3299 raise error.Abort(_('readline not available on this peer'))
3301 raise error.Abort(_('readline not available on this peer'))
3300 stdout.readline()
3302 stdout.readline()
3301 elif action == 'ereadline':
3303 elif action == 'ereadline':
3302 if not stderr:
3304 if not stderr:
3303 raise error.Abort(_('ereadline not available on this peer'))
3305 raise error.Abort(_('ereadline not available on this peer'))
3304 stderr.readline()
3306 stderr.readline()
3305 elif action.startswith('read '):
3307 elif action.startswith('read '):
3306 count = int(action.split(' ', 1)[1])
3308 count = int(action.split(' ', 1)[1])
3307 if not stdout:
3309 if not stdout:
3308 raise error.Abort(_('read not available on this peer'))
3310 raise error.Abort(_('read not available on this peer'))
3309 stdout.read(count)
3311 stdout.read(count)
3310 elif action.startswith('eread '):
3312 elif action.startswith('eread '):
3311 count = int(action.split(' ', 1)[1])
3313 count = int(action.split(' ', 1)[1])
3312 if not stderr:
3314 if not stderr:
3313 raise error.Abort(_('eread not available on this peer'))
3315 raise error.Abort(_('eread not available on this peer'))
3314 stderr.read(count)
3316 stderr.read(count)
3315 else:
3317 else:
3316 raise error.Abort(_('unknown action: %s') % action)
3318 raise error.Abort(_('unknown action: %s') % action)
3317
3319
3318 if batchedcommands is not None:
3320 if batchedcommands is not None:
3319 raise error.Abort(_('unclosed "batchbegin" request'))
3321 raise error.Abort(_('unclosed "batchbegin" request'))
3320
3322
3321 if peer:
3323 if peer:
3322 peer.close()
3324 peer.close()
3323
3325
3324 if proc:
3326 if proc:
3325 proc.kill()
3327 proc.kill()
@@ -1,271 +1,274 b''
1 # setdiscovery.py - improved discovery of common nodeset for mercurial
1 # setdiscovery.py - improved discovery of common nodeset for mercurial
2 #
2 #
3 # Copyright 2010 Benoit Boissinot <bboissin@gmail.com>
3 # Copyright 2010 Benoit Boissinot <bboissin@gmail.com>
4 # and Peter Arrenbrecht <peter@arrenbrecht.ch>
4 # and Peter Arrenbrecht <peter@arrenbrecht.ch>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8 """
8 """
9 Algorithm works in the following way. You have two repository: local and
9 Algorithm works in the following way. You have two repository: local and
10 remote. They both contains a DAG of changelists.
10 remote. They both contains a DAG of changelists.
11
11
12 The goal of the discovery protocol is to find one set of node *common*,
12 The goal of the discovery protocol is to find one set of node *common*,
13 the set of nodes shared by local and remote.
13 the set of nodes shared by local and remote.
14
14
15 One of the issue with the original protocol was latency, it could
15 One of the issue with the original protocol was latency, it could
16 potentially require lots of roundtrips to discover that the local repo was a
16 potentially require lots of roundtrips to discover that the local repo was a
17 subset of remote (which is a very common case, you usually have few changes
17 subset of remote (which is a very common case, you usually have few changes
18 compared to upstream, while upstream probably had lots of development).
18 compared to upstream, while upstream probably had lots of development).
19
19
20 The new protocol only requires one interface for the remote repo: `known()`,
20 The new protocol only requires one interface for the remote repo: `known()`,
21 which given a set of changelists tells you if they are present in the DAG.
21 which given a set of changelists tells you if they are present in the DAG.
22
22
23 The algorithm then works as follow:
23 The algorithm then works as follow:
24
24
25 - We will be using three sets, `common`, `missing`, `unknown`. Originally
25 - We will be using three sets, `common`, `missing`, `unknown`. Originally
26 all nodes are in `unknown`.
26 all nodes are in `unknown`.
27 - Take a sample from `unknown`, call `remote.known(sample)`
27 - Take a sample from `unknown`, call `remote.known(sample)`
28 - For each node that remote knows, move it and all its ancestors to `common`
28 - For each node that remote knows, move it and all its ancestors to `common`
29 - For each node that remote doesn't know, move it and all its descendants
29 - For each node that remote doesn't know, move it and all its descendants
30 to `missing`
30 to `missing`
31 - Iterate until `unknown` is empty
31 - Iterate until `unknown` is empty
32
32
33 There are a couple optimizations, first is instead of starting with a random
33 There are a couple optimizations, first is instead of starting with a random
34 sample of missing, start by sending all heads, in the case where the local
34 sample of missing, start by sending all heads, in the case where the local
35 repo is a subset, you computed the answer in one round trip.
35 repo is a subset, you computed the answer in one round trip.
36
36
37 Then you can do something similar to the bisecting strategy used when
37 Then you can do something similar to the bisecting strategy used when
38 finding faulty changesets. Instead of random samples, you can try picking
38 finding faulty changesets. Instead of random samples, you can try picking
39 nodes that will maximize the number of nodes that will be
39 nodes that will maximize the number of nodes that will be
40 classified with it (since all ancestors or descendants will be marked as well).
40 classified with it (since all ancestors or descendants will be marked as well).
41 """
41 """
42
42
43 from __future__ import absolute_import
43 from __future__ import absolute_import
44
44
45 import collections
45 import collections
46 import random
46 import random
47
47
48 from .i18n import _
48 from .i18n import _
49 from .node import (
49 from .node import (
50 nullid,
50 nullid,
51 nullrev,
51 nullrev,
52 )
52 )
53 from . import (
53 from . import (
54 dagutil,
54 dagutil,
55 error,
55 error,
56 util,
56 util,
57 )
57 )
58
58
59 def _updatesample(dag, nodes, sample, quicksamplesize=0):
59 def _updatesample(dag, nodes, sample, quicksamplesize=0):
60 """update an existing sample to match the expected size
60 """update an existing sample to match the expected size
61
61
62 The sample is updated with nodes exponentially distant from each head of the
62 The sample is updated with nodes exponentially distant from each head of the
63 <nodes> set. (H~1, H~2, H~4, H~8, etc).
63 <nodes> set. (H~1, H~2, H~4, H~8, etc).
64
64
65 If a target size is specified, the sampling will stop once this size is
65 If a target size is specified, the sampling will stop once this size is
66 reached. Otherwise sampling will happen until roots of the <nodes> set are
66 reached. Otherwise sampling will happen until roots of the <nodes> set are
67 reached.
67 reached.
68
68
69 :dag: a dag object from dagutil
69 :dag: a dag object from dagutil
70 :nodes: set of nodes we want to discover (if None, assume the whole dag)
70 :nodes: set of nodes we want to discover (if None, assume the whole dag)
71 :sample: a sample to update
71 :sample: a sample to update
72 :quicksamplesize: optional target size of the sample"""
72 :quicksamplesize: optional target size of the sample"""
73 # if nodes is empty we scan the entire graph
73 # if nodes is empty we scan the entire graph
74 if nodes:
74 if nodes:
75 heads = dag.headsetofconnecteds(nodes)
75 heads = dag.headsetofconnecteds(nodes)
76 else:
76 else:
77 heads = dag.heads()
77 heads = dag.heads()
78 dist = {}
78 dist = {}
79 visit = collections.deque(heads)
79 visit = collections.deque(heads)
80 seen = set()
80 seen = set()
81 factor = 1
81 factor = 1
82 while visit:
82 while visit:
83 curr = visit.popleft()
83 curr = visit.popleft()
84 if curr in seen:
84 if curr in seen:
85 continue
85 continue
86 d = dist.setdefault(curr, 1)
86 d = dist.setdefault(curr, 1)
87 if d > factor:
87 if d > factor:
88 factor *= 2
88 factor *= 2
89 if d == factor:
89 if d == factor:
90 sample.add(curr)
90 sample.add(curr)
91 if quicksamplesize and (len(sample) >= quicksamplesize):
91 if quicksamplesize and (len(sample) >= quicksamplesize):
92 return
92 return
93 seen.add(curr)
93 seen.add(curr)
94 for p in dag.parents(curr):
94 for p in dag.parents(curr):
95 if not nodes or p in nodes:
95 if not nodes or p in nodes:
96 dist.setdefault(p, d + 1)
96 dist.setdefault(p, d + 1)
97 visit.append(p)
97 visit.append(p)
98
98
99 def _takequicksample(dag, nodes, size):
99 def _takequicksample(dag, nodes, size):
100 """takes a quick sample of size <size>
100 """takes a quick sample of size <size>
101
101
102 It is meant for initial sampling and focuses on querying heads and close
102 It is meant for initial sampling and focuses on querying heads and close
103 ancestors of heads.
103 ancestors of heads.
104
104
105 :dag: a dag object
105 :dag: a dag object
106 :nodes: set of nodes to discover
106 :nodes: set of nodes to discover
107 :size: the maximum size of the sample"""
107 :size: the maximum size of the sample"""
108 sample = dag.headsetofconnecteds(nodes)
108 sample = dag.headsetofconnecteds(nodes)
109 if len(sample) >= size:
109 if len(sample) >= size:
110 return _limitsample(sample, size)
110 return _limitsample(sample, size)
111 _updatesample(dag, None, sample, quicksamplesize=size)
111 _updatesample(dag, None, sample, quicksamplesize=size)
112 return sample
112 return sample
113
113
114 def _takefullsample(dag, nodes, size):
114 def _takefullsample(dag, nodes, size):
115 sample = dag.headsetofconnecteds(nodes)
115 sample = dag.headsetofconnecteds(nodes)
116 # update from heads
116 # update from heads
117 _updatesample(dag, nodes, sample)
117 _updatesample(dag, nodes, sample)
118 # update from roots
118 # update from roots
119 _updatesample(dag.inverse(), nodes, sample)
119 _updatesample(dag.inverse(), nodes, sample)
120 assert sample
120 assert sample
121 sample = _limitsample(sample, size)
121 sample = _limitsample(sample, size)
122 if len(sample) < size:
122 if len(sample) < size:
123 more = size - len(sample)
123 more = size - len(sample)
124 sample.update(random.sample(list(nodes - sample), more))
124 sample.update(random.sample(list(nodes - sample), more))
125 return sample
125 return sample
126
126
127 def _limitsample(sample, desiredlen):
127 def _limitsample(sample, desiredlen):
128 """return a random subset of sample of at most desiredlen item"""
128 """return a random subset of sample of at most desiredlen item"""
129 if len(sample) > desiredlen:
129 if len(sample) > desiredlen:
130 sample = set(random.sample(sample, desiredlen))
130 sample = set(random.sample(sample, desiredlen))
131 return sample
131 return sample
132
132
133 def findcommonheads(ui, local, remote,
133 def findcommonheads(ui, local, remote,
134 initialsamplesize=100,
134 initialsamplesize=100,
135 fullsamplesize=200,
135 fullsamplesize=200,
136 abortwhenunrelated=True,
136 abortwhenunrelated=True,
137 ancestorsof=None):
137 ancestorsof=None):
138 '''Return a tuple (common, anyincoming, remoteheads) used to identify
138 '''Return a tuple (common, anyincoming, remoteheads) used to identify
139 missing nodes from or in remote.
139 missing nodes from or in remote.
140 '''
140 '''
141 start = util.timer()
141 start = util.timer()
142
142
143 roundtrips = 0
143 roundtrips = 0
144 cl = local.changelog
144 cl = local.changelog
145 clnode = cl.node
145 localsubset = None
146 localsubset = None
147
146 if ancestorsof is not None:
148 if ancestorsof is not None:
147 rev = local.changelog.rev
149 rev = local.changelog.rev
148 localsubset = [rev(n) for n in ancestorsof]
150 localsubset = [rev(n) for n in ancestorsof]
149 dag = dagutil.revlogdag(cl, localsubset=localsubset)
151 dag = dagutil.revlogdag(cl, localsubset=localsubset)
150
152
151 # early exit if we know all the specified remote heads already
153 # early exit if we know all the specified remote heads already
152 ui.debug("query 1; heads\n")
154 ui.debug("query 1; heads\n")
153 roundtrips += 1
155 roundtrips += 1
154 ownheads = dag.heads()
156 ownheads = dag.heads()
155 sample = _limitsample(ownheads, initialsamplesize)
157 sample = _limitsample(ownheads, initialsamplesize)
156 # indices between sample and externalized version must match
158 # indices between sample and externalized version must match
157 sample = list(sample)
159 sample = list(sample)
158
160
159 with remote.commandexecutor() as e:
161 with remote.commandexecutor() as e:
160 fheads = e.callcommand('heads', {})
162 fheads = e.callcommand('heads', {})
161 fknown = e.callcommand('known', {
163 fknown = e.callcommand('known', {
162 'nodes': dag.externalizeall(sample),
164 'nodes': [clnode(r) for r in sample],
163 })
165 })
164
166
165 srvheadhashes, yesno = fheads.result(), fknown.result()
167 srvheadhashes, yesno = fheads.result(), fknown.result()
166
168
167 if cl.tip() == nullid:
169 if cl.tip() == nullid:
168 if srvheadhashes != [nullid]:
170 if srvheadhashes != [nullid]:
169 return [nullid], True, srvheadhashes
171 return [nullid], True, srvheadhashes
170 return [nullid], False, []
172 return [nullid], False, []
171
173
172 # start actual discovery (we note this before the next "if" for
174 # start actual discovery (we note this before the next "if" for
173 # compatibility reasons)
175 # compatibility reasons)
174 ui.status(_("searching for changes\n"))
176 ui.status(_("searching for changes\n"))
175
177
176 srvheads = dag.internalizeall(srvheadhashes, filterunknown=True)
178 srvheads = dag.internalizeall(srvheadhashes, filterunknown=True)
177 if len(srvheads) == len(srvheadhashes):
179 if len(srvheads) == len(srvheadhashes):
178 ui.debug("all remote heads known locally\n")
180 ui.debug("all remote heads known locally\n")
179 return (srvheadhashes, False, srvheadhashes,)
181 return srvheadhashes, False, srvheadhashes
180
182
181 if len(sample) == len(ownheads) and all(yesno):
183 if len(sample) == len(ownheads) and all(yesno):
182 ui.note(_("all local heads known remotely\n"))
184 ui.note(_("all local heads known remotely\n"))
183 ownheadhashes = dag.externalizeall(ownheads)
185 ownheadhashes = [clnode(r) for r in ownheads]
184 return (ownheadhashes, True, srvheadhashes,)
186 return ownheadhashes, True, srvheadhashes
185
187
186 # full blown discovery
188 # full blown discovery
187
189
188 # own nodes I know we both know
190 # own nodes I know we both know
189 # treat remote heads (and maybe own heads) as a first implicit sample
191 # treat remote heads (and maybe own heads) as a first implicit sample
190 # response
192 # response
191 common = cl.incrementalmissingrevs(srvheads)
193 common = cl.incrementalmissingrevs(srvheads)
192 commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
194 commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
193 common.addbases(commoninsample)
195 common.addbases(commoninsample)
194 # own nodes where I don't know if remote knows them
196 # own nodes where I don't know if remote knows them
195 undecided = set(common.missingancestors(ownheads))
197 undecided = set(common.missingancestors(ownheads))
196 # own nodes I know remote lacks
198 # own nodes I know remote lacks
197 missing = set()
199 missing = set()
198
200
199 full = False
201 full = False
200 progress = ui.makeprogress(_('searching'), unit=_('queries'))
202 progress = ui.makeprogress(_('searching'), unit=_('queries'))
201 while undecided:
203 while undecided:
202
204
203 if sample:
205 if sample:
204 missinginsample = [n for i, n in enumerate(sample) if not yesno[i]]
206 missinginsample = [n for i, n in enumerate(sample) if not yesno[i]]
205 missing.update(dag.descendantset(missinginsample, missing))
207 missing.update(dag.descendantset(missinginsample, missing))
206
208
207 undecided.difference_update(missing)
209 undecided.difference_update(missing)
208
210
209 if not undecided:
211 if not undecided:
210 break
212 break
211
213
212 if full or common.hasbases():
214 if full or common.hasbases():
213 if full:
215 if full:
214 ui.note(_("sampling from both directions\n"))
216 ui.note(_("sampling from both directions\n"))
215 else:
217 else:
216 ui.debug("taking initial sample\n")
218 ui.debug("taking initial sample\n")
217 samplefunc = _takefullsample
219 samplefunc = _takefullsample
218 targetsize = fullsamplesize
220 targetsize = fullsamplesize
219 else:
221 else:
220 # use even cheaper initial sample
222 # use even cheaper initial sample
221 ui.debug("taking quick initial sample\n")
223 ui.debug("taking quick initial sample\n")
222 samplefunc = _takequicksample
224 samplefunc = _takequicksample
223 targetsize = initialsamplesize
225 targetsize = initialsamplesize
224 if len(undecided) < targetsize:
226 if len(undecided) < targetsize:
225 sample = list(undecided)
227 sample = list(undecided)
226 else:
228 else:
227 sample = samplefunc(dag, undecided, targetsize)
229 sample = samplefunc(dag, undecided, targetsize)
228
230
229 roundtrips += 1
231 roundtrips += 1
230 progress.update(roundtrips)
232 progress.update(roundtrips)
231 ui.debug("query %i; still undecided: %i, sample size is: %i\n"
233 ui.debug("query %i; still undecided: %i, sample size is: %i\n"
232 % (roundtrips, len(undecided), len(sample)))
234 % (roundtrips, len(undecided), len(sample)))
233 # indices between sample and externalized version must match
235 # indices between sample and externalized version must match
234 sample = list(sample)
236 sample = list(sample)
235
237
236 with remote.commandexecutor() as e:
238 with remote.commandexecutor() as e:
237 yesno = e.callcommand('known', {
239 yesno = e.callcommand('known', {
238 'nodes': dag.externalizeall(sample),
240 'nodes': [clnode(r) for r in sample],
239 }).result()
241 }).result()
240
242
241 full = True
243 full = True
242
244
243 if sample:
245 if sample:
244 commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
246 commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
245 common.addbases(commoninsample)
247 common.addbases(commoninsample)
246 common.removeancestorsfrom(undecided)
248 common.removeancestorsfrom(undecided)
247
249
248 # heads(common) == heads(common.bases) since common represents common.bases
250 # heads(common) == heads(common.bases) since common represents common.bases
249 # and all its ancestors
251 # and all its ancestors
250 result = dag.headsetofconnecteds(common.bases)
252 result = dag.headsetofconnecteds(common.bases)
251 # common.bases can include nullrev, but our contract requires us to not
253 # common.bases can include nullrev, but our contract requires us to not
252 # return any heads in that case, so discard that
254 # return any heads in that case, so discard that
253 result.discard(nullrev)
255 result.discard(nullrev)
254 elapsed = util.timer() - start
256 elapsed = util.timer() - start
255 progress.complete()
257 progress.complete()
256 ui.debug("%d total queries in %.4fs\n" % (roundtrips, elapsed))
258 ui.debug("%d total queries in %.4fs\n" % (roundtrips, elapsed))
257 msg = ('found %d common and %d unknown server heads,'
259 msg = ('found %d common and %d unknown server heads,'
258 ' %d roundtrips in %.4fs\n')
260 ' %d roundtrips in %.4fs\n')
259 missing = set(result) - set(srvheads)
261 missing = set(result) - set(srvheads)
260 ui.log('discovery', msg, len(result), len(missing), roundtrips,
262 ui.log('discovery', msg, len(result), len(missing), roundtrips,
261 elapsed)
263 elapsed)
262
264
263 if not result and srvheadhashes != [nullid]:
265 if not result and srvheadhashes != [nullid]:
264 if abortwhenunrelated:
266 if abortwhenunrelated:
265 raise error.Abort(_("repository is unrelated"))
267 raise error.Abort(_("repository is unrelated"))
266 else:
268 else:
267 ui.warn(_("warning: repository is unrelated\n"))
269 ui.warn(_("warning: repository is unrelated\n"))
268 return ({nullid}, True, srvheadhashes,)
270 return ({nullid}, True, srvheadhashes,)
269
271
270 anyincoming = (srvheadhashes != [nullid])
272 anyincoming = (srvheadhashes != [nullid])
271 return dag.externalizeall(result), anyincoming, srvheadhashes
273 result = {clnode(r) for r in result}
274 return result, anyincoming, srvheadhashes
General Comments 0
You need to be logged in to leave comments. Login now