##// END OF EJS Templates
setdiscovery: don't use dagutil for node -> rev conversion...
Gregory Szorc -
r39197:858a1284 default
parent child Browse files
Show More
@@ -1,3327 +1,3327 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from .thirdparty import (
35 from .thirdparty import (
36 cbor,
36 cbor,
37 )
37 )
38 from . import (
38 from . import (
39 bundle2,
39 bundle2,
40 changegroup,
40 changegroup,
41 cmdutil,
41 cmdutil,
42 color,
42 color,
43 context,
43 context,
44 dagparser,
44 dagparser,
45 dagutil,
45 dagutil,
46 encoding,
46 encoding,
47 error,
47 error,
48 exchange,
48 exchange,
49 extensions,
49 extensions,
50 filemerge,
50 filemerge,
51 filesetlang,
51 filesetlang,
52 formatter,
52 formatter,
53 hg,
53 hg,
54 httppeer,
54 httppeer,
55 localrepo,
55 localrepo,
56 lock as lockmod,
56 lock as lockmod,
57 logcmdutil,
57 logcmdutil,
58 merge as mergemod,
58 merge as mergemod,
59 obsolete,
59 obsolete,
60 obsutil,
60 obsutil,
61 phases,
61 phases,
62 policy,
62 policy,
63 pvec,
63 pvec,
64 pycompat,
64 pycompat,
65 registrar,
65 registrar,
66 repair,
66 repair,
67 revlog,
67 revlog,
68 revset,
68 revset,
69 revsetlang,
69 revsetlang,
70 scmutil,
70 scmutil,
71 setdiscovery,
71 setdiscovery,
72 simplemerge,
72 simplemerge,
73 sshpeer,
73 sshpeer,
74 sslutil,
74 sslutil,
75 streamclone,
75 streamclone,
76 templater,
76 templater,
77 treediscovery,
77 treediscovery,
78 upgrade,
78 upgrade,
79 url as urlmod,
79 url as urlmod,
80 util,
80 util,
81 vfs as vfsmod,
81 vfs as vfsmod,
82 wireprotoframing,
82 wireprotoframing,
83 wireprotoserver,
83 wireprotoserver,
84 wireprotov2peer,
84 wireprotov2peer,
85 )
85 )
86 from .utils import (
86 from .utils import (
87 dateutil,
87 dateutil,
88 procutil,
88 procutil,
89 stringutil,
89 stringutil,
90 )
90 )
91
91
92 release = lockmod.release
92 release = lockmod.release
93
93
94 command = registrar.command()
94 command = registrar.command()
95
95
96 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
96 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
97 def debugancestor(ui, repo, *args):
97 def debugancestor(ui, repo, *args):
98 """find the ancestor revision of two revisions in a given index"""
98 """find the ancestor revision of two revisions in a given index"""
99 if len(args) == 3:
99 if len(args) == 3:
100 index, rev1, rev2 = args
100 index, rev1, rev2 = args
101 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
101 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
102 lookup = r.lookup
102 lookup = r.lookup
103 elif len(args) == 2:
103 elif len(args) == 2:
104 if not repo:
104 if not repo:
105 raise error.Abort(_('there is no Mercurial repository here '
105 raise error.Abort(_('there is no Mercurial repository here '
106 '(.hg not found)'))
106 '(.hg not found)'))
107 rev1, rev2 = args
107 rev1, rev2 = args
108 r = repo.changelog
108 r = repo.changelog
109 lookup = repo.lookup
109 lookup = repo.lookup
110 else:
110 else:
111 raise error.Abort(_('either two or three arguments required'))
111 raise error.Abort(_('either two or three arguments required'))
112 a = r.ancestor(lookup(rev1), lookup(rev2))
112 a = r.ancestor(lookup(rev1), lookup(rev2))
113 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
113 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
114
114
115 @command('debugapplystreamclonebundle', [], 'FILE')
115 @command('debugapplystreamclonebundle', [], 'FILE')
116 def debugapplystreamclonebundle(ui, repo, fname):
116 def debugapplystreamclonebundle(ui, repo, fname):
117 """apply a stream clone bundle file"""
117 """apply a stream clone bundle file"""
118 f = hg.openpath(ui, fname)
118 f = hg.openpath(ui, fname)
119 gen = exchange.readbundle(ui, f, fname)
119 gen = exchange.readbundle(ui, f, fname)
120 gen.apply(repo)
120 gen.apply(repo)
121
121
122 @command('debugbuilddag',
122 @command('debugbuilddag',
123 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
123 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
124 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
124 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
125 ('n', 'new-file', None, _('add new file at each rev'))],
125 ('n', 'new-file', None, _('add new file at each rev'))],
126 _('[OPTION]... [TEXT]'))
126 _('[OPTION]... [TEXT]'))
127 def debugbuilddag(ui, repo, text=None,
127 def debugbuilddag(ui, repo, text=None,
128 mergeable_file=False,
128 mergeable_file=False,
129 overwritten_file=False,
129 overwritten_file=False,
130 new_file=False):
130 new_file=False):
131 """builds a repo with a given DAG from scratch in the current empty repo
131 """builds a repo with a given DAG from scratch in the current empty repo
132
132
133 The description of the DAG is read from stdin if not given on the
133 The description of the DAG is read from stdin if not given on the
134 command line.
134 command line.
135
135
136 Elements:
136 Elements:
137
137
138 - "+n" is a linear run of n nodes based on the current default parent
138 - "+n" is a linear run of n nodes based on the current default parent
139 - "." is a single node based on the current default parent
139 - "." is a single node based on the current default parent
140 - "$" resets the default parent to null (implied at the start);
140 - "$" resets the default parent to null (implied at the start);
141 otherwise the default parent is always the last node created
141 otherwise the default parent is always the last node created
142 - "<p" sets the default parent to the backref p
142 - "<p" sets the default parent to the backref p
143 - "*p" is a fork at parent p, which is a backref
143 - "*p" is a fork at parent p, which is a backref
144 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
144 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
145 - "/p2" is a merge of the preceding node and p2
145 - "/p2" is a merge of the preceding node and p2
146 - ":tag" defines a local tag for the preceding node
146 - ":tag" defines a local tag for the preceding node
147 - "@branch" sets the named branch for subsequent nodes
147 - "@branch" sets the named branch for subsequent nodes
148 - "#...\\n" is a comment up to the end of the line
148 - "#...\\n" is a comment up to the end of the line
149
149
150 Whitespace between the above elements is ignored.
150 Whitespace between the above elements is ignored.
151
151
152 A backref is either
152 A backref is either
153
153
154 - a number n, which references the node curr-n, where curr is the current
154 - a number n, which references the node curr-n, where curr is the current
155 node, or
155 node, or
156 - the name of a local tag you placed earlier using ":tag", or
156 - the name of a local tag you placed earlier using ":tag", or
157 - empty to denote the default parent.
157 - empty to denote the default parent.
158
158
159 All string valued-elements are either strictly alphanumeric, or must
159 All string valued-elements are either strictly alphanumeric, or must
160 be enclosed in double quotes ("..."), with "\\" as escape character.
160 be enclosed in double quotes ("..."), with "\\" as escape character.
161 """
161 """
162
162
163 if text is None:
163 if text is None:
164 ui.status(_("reading DAG from stdin\n"))
164 ui.status(_("reading DAG from stdin\n"))
165 text = ui.fin.read()
165 text = ui.fin.read()
166
166
167 cl = repo.changelog
167 cl = repo.changelog
168 if len(cl) > 0:
168 if len(cl) > 0:
169 raise error.Abort(_('repository is not empty'))
169 raise error.Abort(_('repository is not empty'))
170
170
171 # determine number of revs in DAG
171 # determine number of revs in DAG
172 total = 0
172 total = 0
173 for type, data in dagparser.parsedag(text):
173 for type, data in dagparser.parsedag(text):
174 if type == 'n':
174 if type == 'n':
175 total += 1
175 total += 1
176
176
177 if mergeable_file:
177 if mergeable_file:
178 linesperrev = 2
178 linesperrev = 2
179 # make a file with k lines per rev
179 # make a file with k lines per rev
180 initialmergedlines = ['%d' % i
180 initialmergedlines = ['%d' % i
181 for i in pycompat.xrange(0, total * linesperrev)]
181 for i in pycompat.xrange(0, total * linesperrev)]
182 initialmergedlines.append("")
182 initialmergedlines.append("")
183
183
184 tags = []
184 tags = []
185 progress = ui.makeprogress(_('building'), unit=_('revisions'),
185 progress = ui.makeprogress(_('building'), unit=_('revisions'),
186 total=total)
186 total=total)
187 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
187 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
188 at = -1
188 at = -1
189 atbranch = 'default'
189 atbranch = 'default'
190 nodeids = []
190 nodeids = []
191 id = 0
191 id = 0
192 progress.update(id)
192 progress.update(id)
193 for type, data in dagparser.parsedag(text):
193 for type, data in dagparser.parsedag(text):
194 if type == 'n':
194 if type == 'n':
195 ui.note(('node %s\n' % pycompat.bytestr(data)))
195 ui.note(('node %s\n' % pycompat.bytestr(data)))
196 id, ps = data
196 id, ps = data
197
197
198 files = []
198 files = []
199 filecontent = {}
199 filecontent = {}
200
200
201 p2 = None
201 p2 = None
202 if mergeable_file:
202 if mergeable_file:
203 fn = "mf"
203 fn = "mf"
204 p1 = repo[ps[0]]
204 p1 = repo[ps[0]]
205 if len(ps) > 1:
205 if len(ps) > 1:
206 p2 = repo[ps[1]]
206 p2 = repo[ps[1]]
207 pa = p1.ancestor(p2)
207 pa = p1.ancestor(p2)
208 base, local, other = [x[fn].data() for x in (pa, p1,
208 base, local, other = [x[fn].data() for x in (pa, p1,
209 p2)]
209 p2)]
210 m3 = simplemerge.Merge3Text(base, local, other)
210 m3 = simplemerge.Merge3Text(base, local, other)
211 ml = [l.strip() for l in m3.merge_lines()]
211 ml = [l.strip() for l in m3.merge_lines()]
212 ml.append("")
212 ml.append("")
213 elif at > 0:
213 elif at > 0:
214 ml = p1[fn].data().split("\n")
214 ml = p1[fn].data().split("\n")
215 else:
215 else:
216 ml = initialmergedlines
216 ml = initialmergedlines
217 ml[id * linesperrev] += " r%i" % id
217 ml[id * linesperrev] += " r%i" % id
218 mergedtext = "\n".join(ml)
218 mergedtext = "\n".join(ml)
219 files.append(fn)
219 files.append(fn)
220 filecontent[fn] = mergedtext
220 filecontent[fn] = mergedtext
221
221
222 if overwritten_file:
222 if overwritten_file:
223 fn = "of"
223 fn = "of"
224 files.append(fn)
224 files.append(fn)
225 filecontent[fn] = "r%i\n" % id
225 filecontent[fn] = "r%i\n" % id
226
226
227 if new_file:
227 if new_file:
228 fn = "nf%i" % id
228 fn = "nf%i" % id
229 files.append(fn)
229 files.append(fn)
230 filecontent[fn] = "r%i\n" % id
230 filecontent[fn] = "r%i\n" % id
231 if len(ps) > 1:
231 if len(ps) > 1:
232 if not p2:
232 if not p2:
233 p2 = repo[ps[1]]
233 p2 = repo[ps[1]]
234 for fn in p2:
234 for fn in p2:
235 if fn.startswith("nf"):
235 if fn.startswith("nf"):
236 files.append(fn)
236 files.append(fn)
237 filecontent[fn] = p2[fn].data()
237 filecontent[fn] = p2[fn].data()
238
238
239 def fctxfn(repo, cx, path):
239 def fctxfn(repo, cx, path):
240 if path in filecontent:
240 if path in filecontent:
241 return context.memfilectx(repo, cx, path,
241 return context.memfilectx(repo, cx, path,
242 filecontent[path])
242 filecontent[path])
243 return None
243 return None
244
244
245 if len(ps) == 0 or ps[0] < 0:
245 if len(ps) == 0 or ps[0] < 0:
246 pars = [None, None]
246 pars = [None, None]
247 elif len(ps) == 1:
247 elif len(ps) == 1:
248 pars = [nodeids[ps[0]], None]
248 pars = [nodeids[ps[0]], None]
249 else:
249 else:
250 pars = [nodeids[p] for p in ps]
250 pars = [nodeids[p] for p in ps]
251 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
251 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
252 date=(id, 0),
252 date=(id, 0),
253 user="debugbuilddag",
253 user="debugbuilddag",
254 extra={'branch': atbranch})
254 extra={'branch': atbranch})
255 nodeid = repo.commitctx(cx)
255 nodeid = repo.commitctx(cx)
256 nodeids.append(nodeid)
256 nodeids.append(nodeid)
257 at = id
257 at = id
258 elif type == 'l':
258 elif type == 'l':
259 id, name = data
259 id, name = data
260 ui.note(('tag %s\n' % name))
260 ui.note(('tag %s\n' % name))
261 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
261 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
262 elif type == 'a':
262 elif type == 'a':
263 ui.note(('branch %s\n' % data))
263 ui.note(('branch %s\n' % data))
264 atbranch = data
264 atbranch = data
265 progress.update(id)
265 progress.update(id)
266
266
267 if tags:
267 if tags:
268 repo.vfs.write("localtags", "".join(tags))
268 repo.vfs.write("localtags", "".join(tags))
269
269
270 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
270 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
271 indent_string = ' ' * indent
271 indent_string = ' ' * indent
272 if all:
272 if all:
273 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
273 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
274 % indent_string)
274 % indent_string)
275
275
276 def showchunks(named):
276 def showchunks(named):
277 ui.write("\n%s%s\n" % (indent_string, named))
277 ui.write("\n%s%s\n" % (indent_string, named))
278 for deltadata in gen.deltaiter():
278 for deltadata in gen.deltaiter():
279 node, p1, p2, cs, deltabase, delta, flags = deltadata
279 node, p1, p2, cs, deltabase, delta, flags = deltadata
280 ui.write("%s%s %s %s %s %s %d\n" %
280 ui.write("%s%s %s %s %s %s %d\n" %
281 (indent_string, hex(node), hex(p1), hex(p2),
281 (indent_string, hex(node), hex(p1), hex(p2),
282 hex(cs), hex(deltabase), len(delta)))
282 hex(cs), hex(deltabase), len(delta)))
283
283
284 chunkdata = gen.changelogheader()
284 chunkdata = gen.changelogheader()
285 showchunks("changelog")
285 showchunks("changelog")
286 chunkdata = gen.manifestheader()
286 chunkdata = gen.manifestheader()
287 showchunks("manifest")
287 showchunks("manifest")
288 for chunkdata in iter(gen.filelogheader, {}):
288 for chunkdata in iter(gen.filelogheader, {}):
289 fname = chunkdata['filename']
289 fname = chunkdata['filename']
290 showchunks(fname)
290 showchunks(fname)
291 else:
291 else:
292 if isinstance(gen, bundle2.unbundle20):
292 if isinstance(gen, bundle2.unbundle20):
293 raise error.Abort(_('use debugbundle2 for this file'))
293 raise error.Abort(_('use debugbundle2 for this file'))
294 chunkdata = gen.changelogheader()
294 chunkdata = gen.changelogheader()
295 for deltadata in gen.deltaiter():
295 for deltadata in gen.deltaiter():
296 node, p1, p2, cs, deltabase, delta, flags = deltadata
296 node, p1, p2, cs, deltabase, delta, flags = deltadata
297 ui.write("%s%s\n" % (indent_string, hex(node)))
297 ui.write("%s%s\n" % (indent_string, hex(node)))
298
298
299 def _debugobsmarkers(ui, part, indent=0, **opts):
299 def _debugobsmarkers(ui, part, indent=0, **opts):
300 """display version and markers contained in 'data'"""
300 """display version and markers contained in 'data'"""
301 opts = pycompat.byteskwargs(opts)
301 opts = pycompat.byteskwargs(opts)
302 data = part.read()
302 data = part.read()
303 indent_string = ' ' * indent
303 indent_string = ' ' * indent
304 try:
304 try:
305 version, markers = obsolete._readmarkers(data)
305 version, markers = obsolete._readmarkers(data)
306 except error.UnknownVersion as exc:
306 except error.UnknownVersion as exc:
307 msg = "%sunsupported version: %s (%d bytes)\n"
307 msg = "%sunsupported version: %s (%d bytes)\n"
308 msg %= indent_string, exc.version, len(data)
308 msg %= indent_string, exc.version, len(data)
309 ui.write(msg)
309 ui.write(msg)
310 else:
310 else:
311 msg = "%sversion: %d (%d bytes)\n"
311 msg = "%sversion: %d (%d bytes)\n"
312 msg %= indent_string, version, len(data)
312 msg %= indent_string, version, len(data)
313 ui.write(msg)
313 ui.write(msg)
314 fm = ui.formatter('debugobsolete', opts)
314 fm = ui.formatter('debugobsolete', opts)
315 for rawmarker in sorted(markers):
315 for rawmarker in sorted(markers):
316 m = obsutil.marker(None, rawmarker)
316 m = obsutil.marker(None, rawmarker)
317 fm.startitem()
317 fm.startitem()
318 fm.plain(indent_string)
318 fm.plain(indent_string)
319 cmdutil.showmarker(fm, m)
319 cmdutil.showmarker(fm, m)
320 fm.end()
320 fm.end()
321
321
322 def _debugphaseheads(ui, data, indent=0):
322 def _debugphaseheads(ui, data, indent=0):
323 """display version and markers contained in 'data'"""
323 """display version and markers contained in 'data'"""
324 indent_string = ' ' * indent
324 indent_string = ' ' * indent
325 headsbyphase = phases.binarydecode(data)
325 headsbyphase = phases.binarydecode(data)
326 for phase in phases.allphases:
326 for phase in phases.allphases:
327 for head in headsbyphase[phase]:
327 for head in headsbyphase[phase]:
328 ui.write(indent_string)
328 ui.write(indent_string)
329 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
329 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
330
330
331 def _quasirepr(thing):
331 def _quasirepr(thing):
332 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
332 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
333 return '{%s}' % (
333 return '{%s}' % (
334 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
334 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
335 return pycompat.bytestr(repr(thing))
335 return pycompat.bytestr(repr(thing))
336
336
337 def _debugbundle2(ui, gen, all=None, **opts):
337 def _debugbundle2(ui, gen, all=None, **opts):
338 """lists the contents of a bundle2"""
338 """lists the contents of a bundle2"""
339 if not isinstance(gen, bundle2.unbundle20):
339 if not isinstance(gen, bundle2.unbundle20):
340 raise error.Abort(_('not a bundle2 file'))
340 raise error.Abort(_('not a bundle2 file'))
341 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
341 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
342 parttypes = opts.get(r'part_type', [])
342 parttypes = opts.get(r'part_type', [])
343 for part in gen.iterparts():
343 for part in gen.iterparts():
344 if parttypes and part.type not in parttypes:
344 if parttypes and part.type not in parttypes:
345 continue
345 continue
346 msg = '%s -- %s (mandatory: %r)\n'
346 msg = '%s -- %s (mandatory: %r)\n'
347 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
347 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
348 if part.type == 'changegroup':
348 if part.type == 'changegroup':
349 version = part.params.get('version', '01')
349 version = part.params.get('version', '01')
350 cg = changegroup.getunbundler(version, part, 'UN')
350 cg = changegroup.getunbundler(version, part, 'UN')
351 if not ui.quiet:
351 if not ui.quiet:
352 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
352 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
353 if part.type == 'obsmarkers':
353 if part.type == 'obsmarkers':
354 if not ui.quiet:
354 if not ui.quiet:
355 _debugobsmarkers(ui, part, indent=4, **opts)
355 _debugobsmarkers(ui, part, indent=4, **opts)
356 if part.type == 'phase-heads':
356 if part.type == 'phase-heads':
357 if not ui.quiet:
357 if not ui.quiet:
358 _debugphaseheads(ui, part, indent=4)
358 _debugphaseheads(ui, part, indent=4)
359
359
360 @command('debugbundle',
360 @command('debugbundle',
361 [('a', 'all', None, _('show all details')),
361 [('a', 'all', None, _('show all details')),
362 ('', 'part-type', [], _('show only the named part type')),
362 ('', 'part-type', [], _('show only the named part type')),
363 ('', 'spec', None, _('print the bundlespec of the bundle'))],
363 ('', 'spec', None, _('print the bundlespec of the bundle'))],
364 _('FILE'),
364 _('FILE'),
365 norepo=True)
365 norepo=True)
366 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
366 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
367 """lists the contents of a bundle"""
367 """lists the contents of a bundle"""
368 with hg.openpath(ui, bundlepath) as f:
368 with hg.openpath(ui, bundlepath) as f:
369 if spec:
369 if spec:
370 spec = exchange.getbundlespec(ui, f)
370 spec = exchange.getbundlespec(ui, f)
371 ui.write('%s\n' % spec)
371 ui.write('%s\n' % spec)
372 return
372 return
373
373
374 gen = exchange.readbundle(ui, f, bundlepath)
374 gen = exchange.readbundle(ui, f, bundlepath)
375 if isinstance(gen, bundle2.unbundle20):
375 if isinstance(gen, bundle2.unbundle20):
376 return _debugbundle2(ui, gen, all=all, **opts)
376 return _debugbundle2(ui, gen, all=all, **opts)
377 _debugchangegroup(ui, gen, all=all, **opts)
377 _debugchangegroup(ui, gen, all=all, **opts)
378
378
379 @command('debugcapabilities',
379 @command('debugcapabilities',
380 [], _('PATH'),
380 [], _('PATH'),
381 norepo=True)
381 norepo=True)
382 def debugcapabilities(ui, path, **opts):
382 def debugcapabilities(ui, path, **opts):
383 """lists the capabilities of a remote peer"""
383 """lists the capabilities of a remote peer"""
384 opts = pycompat.byteskwargs(opts)
384 opts = pycompat.byteskwargs(opts)
385 peer = hg.peer(ui, opts, path)
385 peer = hg.peer(ui, opts, path)
386 caps = peer.capabilities()
386 caps = peer.capabilities()
387 ui.write(('Main capabilities:\n'))
387 ui.write(('Main capabilities:\n'))
388 for c in sorted(caps):
388 for c in sorted(caps):
389 ui.write((' %s\n') % c)
389 ui.write((' %s\n') % c)
390 b2caps = bundle2.bundle2caps(peer)
390 b2caps = bundle2.bundle2caps(peer)
391 if b2caps:
391 if b2caps:
392 ui.write(('Bundle2 capabilities:\n'))
392 ui.write(('Bundle2 capabilities:\n'))
393 for key, values in sorted(b2caps.iteritems()):
393 for key, values in sorted(b2caps.iteritems()):
394 ui.write((' %s\n') % key)
394 ui.write((' %s\n') % key)
395 for v in values:
395 for v in values:
396 ui.write((' %s\n') % v)
396 ui.write((' %s\n') % v)
397
397
398 @command('debugcheckstate', [], '')
398 @command('debugcheckstate', [], '')
399 def debugcheckstate(ui, repo):
399 def debugcheckstate(ui, repo):
400 """validate the correctness of the current dirstate"""
400 """validate the correctness of the current dirstate"""
401 parent1, parent2 = repo.dirstate.parents()
401 parent1, parent2 = repo.dirstate.parents()
402 m1 = repo[parent1].manifest()
402 m1 = repo[parent1].manifest()
403 m2 = repo[parent2].manifest()
403 m2 = repo[parent2].manifest()
404 errors = 0
404 errors = 0
405 for f in repo.dirstate:
405 for f in repo.dirstate:
406 state = repo.dirstate[f]
406 state = repo.dirstate[f]
407 if state in "nr" and f not in m1:
407 if state in "nr" and f not in m1:
408 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
408 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
409 errors += 1
409 errors += 1
410 if state in "a" and f in m1:
410 if state in "a" and f in m1:
411 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
411 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
412 errors += 1
412 errors += 1
413 if state in "m" and f not in m1 and f not in m2:
413 if state in "m" and f not in m1 and f not in m2:
414 ui.warn(_("%s in state %s, but not in either manifest\n") %
414 ui.warn(_("%s in state %s, but not in either manifest\n") %
415 (f, state))
415 (f, state))
416 errors += 1
416 errors += 1
417 for f in m1:
417 for f in m1:
418 state = repo.dirstate[f]
418 state = repo.dirstate[f]
419 if state not in "nrm":
419 if state not in "nrm":
420 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
420 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
421 errors += 1
421 errors += 1
422 if errors:
422 if errors:
423 error = _(".hg/dirstate inconsistent with current parent's manifest")
423 error = _(".hg/dirstate inconsistent with current parent's manifest")
424 raise error.Abort(error)
424 raise error.Abort(error)
425
425
426 @command('debugcolor',
426 @command('debugcolor',
427 [('', 'style', None, _('show all configured styles'))],
427 [('', 'style', None, _('show all configured styles'))],
428 'hg debugcolor')
428 'hg debugcolor')
429 def debugcolor(ui, repo, **opts):
429 def debugcolor(ui, repo, **opts):
430 """show available color, effects or style"""
430 """show available color, effects or style"""
431 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
431 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
432 if opts.get(r'style'):
432 if opts.get(r'style'):
433 return _debugdisplaystyle(ui)
433 return _debugdisplaystyle(ui)
434 else:
434 else:
435 return _debugdisplaycolor(ui)
435 return _debugdisplaycolor(ui)
436
436
437 def _debugdisplaycolor(ui):
437 def _debugdisplaycolor(ui):
438 ui = ui.copy()
438 ui = ui.copy()
439 ui._styles.clear()
439 ui._styles.clear()
440 for effect in color._activeeffects(ui).keys():
440 for effect in color._activeeffects(ui).keys():
441 ui._styles[effect] = effect
441 ui._styles[effect] = effect
442 if ui._terminfoparams:
442 if ui._terminfoparams:
443 for k, v in ui.configitems('color'):
443 for k, v in ui.configitems('color'):
444 if k.startswith('color.'):
444 if k.startswith('color.'):
445 ui._styles[k] = k[6:]
445 ui._styles[k] = k[6:]
446 elif k.startswith('terminfo.'):
446 elif k.startswith('terminfo.'):
447 ui._styles[k] = k[9:]
447 ui._styles[k] = k[9:]
448 ui.write(_('available colors:\n'))
448 ui.write(_('available colors:\n'))
449 # sort label with a '_' after the other to group '_background' entry.
449 # sort label with a '_' after the other to group '_background' entry.
450 items = sorted(ui._styles.items(),
450 items = sorted(ui._styles.items(),
451 key=lambda i: ('_' in i[0], i[0], i[1]))
451 key=lambda i: ('_' in i[0], i[0], i[1]))
452 for colorname, label in items:
452 for colorname, label in items:
453 ui.write(('%s\n') % colorname, label=label)
453 ui.write(('%s\n') % colorname, label=label)
454
454
455 def _debugdisplaystyle(ui):
455 def _debugdisplaystyle(ui):
456 ui.write(_('available style:\n'))
456 ui.write(_('available style:\n'))
457 if not ui._styles:
457 if not ui._styles:
458 return
458 return
459 width = max(len(s) for s in ui._styles)
459 width = max(len(s) for s in ui._styles)
460 for label, effects in sorted(ui._styles.items()):
460 for label, effects in sorted(ui._styles.items()):
461 ui.write('%s' % label, label=label)
461 ui.write('%s' % label, label=label)
462 if effects:
462 if effects:
463 # 50
463 # 50
464 ui.write(': ')
464 ui.write(': ')
465 ui.write(' ' * (max(0, width - len(label))))
465 ui.write(' ' * (max(0, width - len(label))))
466 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
466 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
467 ui.write('\n')
467 ui.write('\n')
468
468
469 @command('debugcreatestreamclonebundle', [], 'FILE')
469 @command('debugcreatestreamclonebundle', [], 'FILE')
470 def debugcreatestreamclonebundle(ui, repo, fname):
470 def debugcreatestreamclonebundle(ui, repo, fname):
471 """create a stream clone bundle file
471 """create a stream clone bundle file
472
472
473 Stream bundles are special bundles that are essentially archives of
473 Stream bundles are special bundles that are essentially archives of
474 revlog files. They are commonly used for cloning very quickly.
474 revlog files. They are commonly used for cloning very quickly.
475 """
475 """
476 # TODO we may want to turn this into an abort when this functionality
476 # TODO we may want to turn this into an abort when this functionality
477 # is moved into `hg bundle`.
477 # is moved into `hg bundle`.
478 if phases.hassecret(repo):
478 if phases.hassecret(repo):
479 ui.warn(_('(warning: stream clone bundle will contain secret '
479 ui.warn(_('(warning: stream clone bundle will contain secret '
480 'revisions)\n'))
480 'revisions)\n'))
481
481
482 requirements, gen = streamclone.generatebundlev1(repo)
482 requirements, gen = streamclone.generatebundlev1(repo)
483 changegroup.writechunks(ui, gen, fname)
483 changegroup.writechunks(ui, gen, fname)
484
484
485 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
485 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
486
486
487 @command('debugdag',
487 @command('debugdag',
488 [('t', 'tags', None, _('use tags as labels')),
488 [('t', 'tags', None, _('use tags as labels')),
489 ('b', 'branches', None, _('annotate with branch names')),
489 ('b', 'branches', None, _('annotate with branch names')),
490 ('', 'dots', None, _('use dots for runs')),
490 ('', 'dots', None, _('use dots for runs')),
491 ('s', 'spaces', None, _('separate elements by spaces'))],
491 ('s', 'spaces', None, _('separate elements by spaces'))],
492 _('[OPTION]... [FILE [REV]...]'),
492 _('[OPTION]... [FILE [REV]...]'),
493 optionalrepo=True)
493 optionalrepo=True)
494 def debugdag(ui, repo, file_=None, *revs, **opts):
494 def debugdag(ui, repo, file_=None, *revs, **opts):
495 """format the changelog or an index DAG as a concise textual description
495 """format the changelog or an index DAG as a concise textual description
496
496
497 If you pass a revlog index, the revlog's DAG is emitted. If you list
497 If you pass a revlog index, the revlog's DAG is emitted. If you list
498 revision numbers, they get labeled in the output as rN.
498 revision numbers, they get labeled in the output as rN.
499
499
500 Otherwise, the changelog DAG of the current repo is emitted.
500 Otherwise, the changelog DAG of the current repo is emitted.
501 """
501 """
502 spaces = opts.get(r'spaces')
502 spaces = opts.get(r'spaces')
503 dots = opts.get(r'dots')
503 dots = opts.get(r'dots')
504 if file_:
504 if file_:
505 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
505 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
506 file_)
506 file_)
507 revs = set((int(r) for r in revs))
507 revs = set((int(r) for r in revs))
508 def events():
508 def events():
509 for r in rlog:
509 for r in rlog:
510 yield 'n', (r, list(p for p in rlog.parentrevs(r)
510 yield 'n', (r, list(p for p in rlog.parentrevs(r)
511 if p != -1))
511 if p != -1))
512 if r in revs:
512 if r in revs:
513 yield 'l', (r, "r%i" % r)
513 yield 'l', (r, "r%i" % r)
514 elif repo:
514 elif repo:
515 cl = repo.changelog
515 cl = repo.changelog
516 tags = opts.get(r'tags')
516 tags = opts.get(r'tags')
517 branches = opts.get(r'branches')
517 branches = opts.get(r'branches')
518 if tags:
518 if tags:
519 labels = {}
519 labels = {}
520 for l, n in repo.tags().items():
520 for l, n in repo.tags().items():
521 labels.setdefault(cl.rev(n), []).append(l)
521 labels.setdefault(cl.rev(n), []).append(l)
522 def events():
522 def events():
523 b = "default"
523 b = "default"
524 for r in cl:
524 for r in cl:
525 if branches:
525 if branches:
526 newb = cl.read(cl.node(r))[5]['branch']
526 newb = cl.read(cl.node(r))[5]['branch']
527 if newb != b:
527 if newb != b:
528 yield 'a', newb
528 yield 'a', newb
529 b = newb
529 b = newb
530 yield 'n', (r, list(p for p in cl.parentrevs(r)
530 yield 'n', (r, list(p for p in cl.parentrevs(r)
531 if p != -1))
531 if p != -1))
532 if tags:
532 if tags:
533 ls = labels.get(r)
533 ls = labels.get(r)
534 if ls:
534 if ls:
535 for l in ls:
535 for l in ls:
536 yield 'l', (r, l)
536 yield 'l', (r, l)
537 else:
537 else:
538 raise error.Abort(_('need repo for changelog dag'))
538 raise error.Abort(_('need repo for changelog dag'))
539
539
540 for line in dagparser.dagtextlines(events(),
540 for line in dagparser.dagtextlines(events(),
541 addspaces=spaces,
541 addspaces=spaces,
542 wraplabels=True,
542 wraplabels=True,
543 wrapannotations=True,
543 wrapannotations=True,
544 wrapnonlinear=dots,
544 wrapnonlinear=dots,
545 usedots=dots,
545 usedots=dots,
546 maxlinewidth=70):
546 maxlinewidth=70):
547 ui.write(line)
547 ui.write(line)
548 ui.write("\n")
548 ui.write("\n")
549
549
550 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
550 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
551 def debugdata(ui, repo, file_, rev=None, **opts):
551 def debugdata(ui, repo, file_, rev=None, **opts):
552 """dump the contents of a data file revision"""
552 """dump the contents of a data file revision"""
553 opts = pycompat.byteskwargs(opts)
553 opts = pycompat.byteskwargs(opts)
554 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
554 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
555 if rev is not None:
555 if rev is not None:
556 raise error.CommandError('debugdata', _('invalid arguments'))
556 raise error.CommandError('debugdata', _('invalid arguments'))
557 file_, rev = None, file_
557 file_, rev = None, file_
558 elif rev is None:
558 elif rev is None:
559 raise error.CommandError('debugdata', _('invalid arguments'))
559 raise error.CommandError('debugdata', _('invalid arguments'))
560 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
560 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
561 try:
561 try:
562 ui.write(r.revision(r.lookup(rev), raw=True))
562 ui.write(r.revision(r.lookup(rev), raw=True))
563 except KeyError:
563 except KeyError:
564 raise error.Abort(_('invalid revision identifier %s') % rev)
564 raise error.Abort(_('invalid revision identifier %s') % rev)
565
565
566 @command('debugdate',
566 @command('debugdate',
567 [('e', 'extended', None, _('try extended date formats'))],
567 [('e', 'extended', None, _('try extended date formats'))],
568 _('[-e] DATE [RANGE]'),
568 _('[-e] DATE [RANGE]'),
569 norepo=True, optionalrepo=True)
569 norepo=True, optionalrepo=True)
570 def debugdate(ui, date, range=None, **opts):
570 def debugdate(ui, date, range=None, **opts):
571 """parse and display a date"""
571 """parse and display a date"""
572 if opts[r"extended"]:
572 if opts[r"extended"]:
573 d = dateutil.parsedate(date, util.extendeddateformats)
573 d = dateutil.parsedate(date, util.extendeddateformats)
574 else:
574 else:
575 d = dateutil.parsedate(date)
575 d = dateutil.parsedate(date)
576 ui.write(("internal: %d %d\n") % d)
576 ui.write(("internal: %d %d\n") % d)
577 ui.write(("standard: %s\n") % dateutil.datestr(d))
577 ui.write(("standard: %s\n") % dateutil.datestr(d))
578 if range:
578 if range:
579 m = dateutil.matchdate(range)
579 m = dateutil.matchdate(range)
580 ui.write(("match: %s\n") % m(d[0]))
580 ui.write(("match: %s\n") % m(d[0]))
581
581
582 @command('debugdeltachain',
582 @command('debugdeltachain',
583 cmdutil.debugrevlogopts + cmdutil.formatteropts,
583 cmdutil.debugrevlogopts + cmdutil.formatteropts,
584 _('-c|-m|FILE'),
584 _('-c|-m|FILE'),
585 optionalrepo=True)
585 optionalrepo=True)
586 def debugdeltachain(ui, repo, file_=None, **opts):
586 def debugdeltachain(ui, repo, file_=None, **opts):
587 """dump information about delta chains in a revlog
587 """dump information about delta chains in a revlog
588
588
589 Output can be templatized. Available template keywords are:
589 Output can be templatized. Available template keywords are:
590
590
591 :``rev``: revision number
591 :``rev``: revision number
592 :``chainid``: delta chain identifier (numbered by unique base)
592 :``chainid``: delta chain identifier (numbered by unique base)
593 :``chainlen``: delta chain length to this revision
593 :``chainlen``: delta chain length to this revision
594 :``prevrev``: previous revision in delta chain
594 :``prevrev``: previous revision in delta chain
595 :``deltatype``: role of delta / how it was computed
595 :``deltatype``: role of delta / how it was computed
596 :``compsize``: compressed size of revision
596 :``compsize``: compressed size of revision
597 :``uncompsize``: uncompressed size of revision
597 :``uncompsize``: uncompressed size of revision
598 :``chainsize``: total size of compressed revisions in chain
598 :``chainsize``: total size of compressed revisions in chain
599 :``chainratio``: total chain size divided by uncompressed revision size
599 :``chainratio``: total chain size divided by uncompressed revision size
600 (new delta chains typically start at ratio 2.00)
600 (new delta chains typically start at ratio 2.00)
601 :``lindist``: linear distance from base revision in delta chain to end
601 :``lindist``: linear distance from base revision in delta chain to end
602 of this revision
602 of this revision
603 :``extradist``: total size of revisions not part of this delta chain from
603 :``extradist``: total size of revisions not part of this delta chain from
604 base of delta chain to end of this revision; a measurement
604 base of delta chain to end of this revision; a measurement
605 of how much extra data we need to read/seek across to read
605 of how much extra data we need to read/seek across to read
606 the delta chain for this revision
606 the delta chain for this revision
607 :``extraratio``: extradist divided by chainsize; another representation of
607 :``extraratio``: extradist divided by chainsize; another representation of
608 how much unrelated data is needed to load this delta chain
608 how much unrelated data is needed to load this delta chain
609
609
610 If the repository is configured to use the sparse read, additional keywords
610 If the repository is configured to use the sparse read, additional keywords
611 are available:
611 are available:
612
612
613 :``readsize``: total size of data read from the disk for a revision
613 :``readsize``: total size of data read from the disk for a revision
614 (sum of the sizes of all the blocks)
614 (sum of the sizes of all the blocks)
615 :``largestblock``: size of the largest block of data read from the disk
615 :``largestblock``: size of the largest block of data read from the disk
616 :``readdensity``: density of useful bytes in the data read from the disk
616 :``readdensity``: density of useful bytes in the data read from the disk
617 :``srchunks``: in how many data hunks the whole revision would be read
617 :``srchunks``: in how many data hunks the whole revision would be read
618
618
619 The sparse read can be enabled with experimental.sparse-read = True
619 The sparse read can be enabled with experimental.sparse-read = True
620 """
620 """
621 opts = pycompat.byteskwargs(opts)
621 opts = pycompat.byteskwargs(opts)
622 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
622 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
623 index = r.index
623 index = r.index
624 start = r.start
624 start = r.start
625 length = r.length
625 length = r.length
626 generaldelta = r.version & revlog.FLAG_GENERALDELTA
626 generaldelta = r.version & revlog.FLAG_GENERALDELTA
627 withsparseread = getattr(r, '_withsparseread', False)
627 withsparseread = getattr(r, '_withsparseread', False)
628
628
629 def revinfo(rev):
629 def revinfo(rev):
630 e = index[rev]
630 e = index[rev]
631 compsize = e[1]
631 compsize = e[1]
632 uncompsize = e[2]
632 uncompsize = e[2]
633 chainsize = 0
633 chainsize = 0
634
634
635 if generaldelta:
635 if generaldelta:
636 if e[3] == e[5]:
636 if e[3] == e[5]:
637 deltatype = 'p1'
637 deltatype = 'p1'
638 elif e[3] == e[6]:
638 elif e[3] == e[6]:
639 deltatype = 'p2'
639 deltatype = 'p2'
640 elif e[3] == rev - 1:
640 elif e[3] == rev - 1:
641 deltatype = 'prev'
641 deltatype = 'prev'
642 elif e[3] == rev:
642 elif e[3] == rev:
643 deltatype = 'base'
643 deltatype = 'base'
644 else:
644 else:
645 deltatype = 'other'
645 deltatype = 'other'
646 else:
646 else:
647 if e[3] == rev:
647 if e[3] == rev:
648 deltatype = 'base'
648 deltatype = 'base'
649 else:
649 else:
650 deltatype = 'prev'
650 deltatype = 'prev'
651
651
652 chain = r._deltachain(rev)[0]
652 chain = r._deltachain(rev)[0]
653 for iterrev in chain:
653 for iterrev in chain:
654 e = index[iterrev]
654 e = index[iterrev]
655 chainsize += e[1]
655 chainsize += e[1]
656
656
657 return compsize, uncompsize, deltatype, chain, chainsize
657 return compsize, uncompsize, deltatype, chain, chainsize
658
658
659 fm = ui.formatter('debugdeltachain', opts)
659 fm = ui.formatter('debugdeltachain', opts)
660
660
661 fm.plain(' rev chain# chainlen prev delta '
661 fm.plain(' rev chain# chainlen prev delta '
662 'size rawsize chainsize ratio lindist extradist '
662 'size rawsize chainsize ratio lindist extradist '
663 'extraratio')
663 'extraratio')
664 if withsparseread:
664 if withsparseread:
665 fm.plain(' readsize largestblk rddensity srchunks')
665 fm.plain(' readsize largestblk rddensity srchunks')
666 fm.plain('\n')
666 fm.plain('\n')
667
667
668 chainbases = {}
668 chainbases = {}
669 for rev in r:
669 for rev in r:
670 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
670 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
671 chainbase = chain[0]
671 chainbase = chain[0]
672 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
672 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
673 basestart = start(chainbase)
673 basestart = start(chainbase)
674 revstart = start(rev)
674 revstart = start(rev)
675 lineardist = revstart + comp - basestart
675 lineardist = revstart + comp - basestart
676 extradist = lineardist - chainsize
676 extradist = lineardist - chainsize
677 try:
677 try:
678 prevrev = chain[-2]
678 prevrev = chain[-2]
679 except IndexError:
679 except IndexError:
680 prevrev = -1
680 prevrev = -1
681
681
682 if uncomp != 0:
682 if uncomp != 0:
683 chainratio = float(chainsize) / float(uncomp)
683 chainratio = float(chainsize) / float(uncomp)
684 else:
684 else:
685 chainratio = chainsize
685 chainratio = chainsize
686
686
687 if chainsize != 0:
687 if chainsize != 0:
688 extraratio = float(extradist) / float(chainsize)
688 extraratio = float(extradist) / float(chainsize)
689 else:
689 else:
690 extraratio = extradist
690 extraratio = extradist
691
691
692 fm.startitem()
692 fm.startitem()
693 fm.write('rev chainid chainlen prevrev deltatype compsize '
693 fm.write('rev chainid chainlen prevrev deltatype compsize '
694 'uncompsize chainsize chainratio lindist extradist '
694 'uncompsize chainsize chainratio lindist extradist '
695 'extraratio',
695 'extraratio',
696 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
696 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
697 rev, chainid, len(chain), prevrev, deltatype, comp,
697 rev, chainid, len(chain), prevrev, deltatype, comp,
698 uncomp, chainsize, chainratio, lineardist, extradist,
698 uncomp, chainsize, chainratio, lineardist, extradist,
699 extraratio,
699 extraratio,
700 rev=rev, chainid=chainid, chainlen=len(chain),
700 rev=rev, chainid=chainid, chainlen=len(chain),
701 prevrev=prevrev, deltatype=deltatype, compsize=comp,
701 prevrev=prevrev, deltatype=deltatype, compsize=comp,
702 uncompsize=uncomp, chainsize=chainsize,
702 uncompsize=uncomp, chainsize=chainsize,
703 chainratio=chainratio, lindist=lineardist,
703 chainratio=chainratio, lindist=lineardist,
704 extradist=extradist, extraratio=extraratio)
704 extradist=extradist, extraratio=extraratio)
705 if withsparseread:
705 if withsparseread:
706 readsize = 0
706 readsize = 0
707 largestblock = 0
707 largestblock = 0
708 srchunks = 0
708 srchunks = 0
709
709
710 for revschunk in revlog._slicechunk(r, chain):
710 for revschunk in revlog._slicechunk(r, chain):
711 srchunks += 1
711 srchunks += 1
712 blkend = start(revschunk[-1]) + length(revschunk[-1])
712 blkend = start(revschunk[-1]) + length(revschunk[-1])
713 blksize = blkend - start(revschunk[0])
713 blksize = blkend - start(revschunk[0])
714
714
715 readsize += blksize
715 readsize += blksize
716 if largestblock < blksize:
716 if largestblock < blksize:
717 largestblock = blksize
717 largestblock = blksize
718
718
719 if readsize:
719 if readsize:
720 readdensity = float(chainsize) / float(readsize)
720 readdensity = float(chainsize) / float(readsize)
721 else:
721 else:
722 readdensity = 1
722 readdensity = 1
723
723
724 fm.write('readsize largestblock readdensity srchunks',
724 fm.write('readsize largestblock readdensity srchunks',
725 ' %10d %10d %9.5f %8d',
725 ' %10d %10d %9.5f %8d',
726 readsize, largestblock, readdensity, srchunks,
726 readsize, largestblock, readdensity, srchunks,
727 readsize=readsize, largestblock=largestblock,
727 readsize=readsize, largestblock=largestblock,
728 readdensity=readdensity, srchunks=srchunks)
728 readdensity=readdensity, srchunks=srchunks)
729
729
730 fm.plain('\n')
730 fm.plain('\n')
731
731
732 fm.end()
732 fm.end()
733
733
734 @command('debugdirstate|debugstate',
734 @command('debugdirstate|debugstate',
735 [('', 'nodates', None, _('do not display the saved mtime')),
735 [('', 'nodates', None, _('do not display the saved mtime')),
736 ('', 'datesort', None, _('sort by saved mtime'))],
736 ('', 'datesort', None, _('sort by saved mtime'))],
737 _('[OPTION]...'))
737 _('[OPTION]...'))
738 def debugstate(ui, repo, **opts):
738 def debugstate(ui, repo, **opts):
739 """show the contents of the current dirstate"""
739 """show the contents of the current dirstate"""
740
740
741 nodates = opts.get(r'nodates')
741 nodates = opts.get(r'nodates')
742 datesort = opts.get(r'datesort')
742 datesort = opts.get(r'datesort')
743
743
744 timestr = ""
744 timestr = ""
745 if datesort:
745 if datesort:
746 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
746 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
747 else:
747 else:
748 keyfunc = None # sort by filename
748 keyfunc = None # sort by filename
749 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
749 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
750 if ent[3] == -1:
750 if ent[3] == -1:
751 timestr = 'unset '
751 timestr = 'unset '
752 elif nodates:
752 elif nodates:
753 timestr = 'set '
753 timestr = 'set '
754 else:
754 else:
755 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
755 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
756 time.localtime(ent[3]))
756 time.localtime(ent[3]))
757 timestr = encoding.strtolocal(timestr)
757 timestr = encoding.strtolocal(timestr)
758 if ent[1] & 0o20000:
758 if ent[1] & 0o20000:
759 mode = 'lnk'
759 mode = 'lnk'
760 else:
760 else:
761 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
761 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
762 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
762 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
763 for f in repo.dirstate.copies():
763 for f in repo.dirstate.copies():
764 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
764 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
765
765
766 @command('debugdiscovery',
766 @command('debugdiscovery',
767 [('', 'old', None, _('use old-style discovery')),
767 [('', 'old', None, _('use old-style discovery')),
768 ('', 'nonheads', None,
768 ('', 'nonheads', None,
769 _('use old-style discovery with non-heads included')),
769 _('use old-style discovery with non-heads included')),
770 ('', 'rev', [], 'restrict discovery to this set of revs'),
770 ('', 'rev', [], 'restrict discovery to this set of revs'),
771 ] + cmdutil.remoteopts,
771 ] + cmdutil.remoteopts,
772 _('[--rev REV] [OTHER]'))
772 _('[--rev REV] [OTHER]'))
773 def debugdiscovery(ui, repo, remoteurl="default", **opts):
773 def debugdiscovery(ui, repo, remoteurl="default", **opts):
774 """runs the changeset discovery protocol in isolation"""
774 """runs the changeset discovery protocol in isolation"""
775 opts = pycompat.byteskwargs(opts)
775 opts = pycompat.byteskwargs(opts)
776 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
776 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
777 remote = hg.peer(repo, opts, remoteurl)
777 remote = hg.peer(repo, opts, remoteurl)
778 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
778 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
779
779
780 # make sure tests are repeatable
780 # make sure tests are repeatable
781 random.seed(12323)
781 random.seed(12323)
782
782
783 def doit(pushedrevs, remoteheads, remote=remote):
783 def doit(pushedrevs, remoteheads, remote=remote):
784 if opts.get('old'):
784 if opts.get('old'):
785 if not util.safehasattr(remote, 'branches'):
785 if not util.safehasattr(remote, 'branches'):
786 # enable in-client legacy support
786 # enable in-client legacy support
787 remote = localrepo.locallegacypeer(remote.local())
787 remote = localrepo.locallegacypeer(remote.local())
788 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
788 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
789 force=True)
789 force=True)
790 common = set(common)
790 common = set(common)
791 if not opts.get('nonheads'):
791 if not opts.get('nonheads'):
792 ui.write(("unpruned common: %s\n") %
792 ui.write(("unpruned common: %s\n") %
793 " ".join(sorted(short(n) for n in common)))
793 " ".join(sorted(short(n) for n in common)))
794 cl = repo.changelog
794 cl = repo.changelog
795 clnode = cl.node
795 clnode = cl.node
796 dag = dagutil.revlogdag(cl)
796 dag = dagutil.revlogdag(cl)
797 all = dag.ancestorset(dag.internalizeall(common))
797 all = dag.ancestorset(cl.rev(n) for n in common)
798 common = {clnode(r) for r in dag.headsetofconnecteds(all)}
798 common = {clnode(r) for r in dag.headsetofconnecteds(all)}
799 else:
799 else:
800 nodes = None
800 nodes = None
801 if pushedrevs:
801 if pushedrevs:
802 revs = scmutil.revrange(repo, pushedrevs)
802 revs = scmutil.revrange(repo, pushedrevs)
803 nodes = [repo[r].node() for r in revs]
803 nodes = [repo[r].node() for r in revs]
804 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
804 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
805 ancestorsof=nodes)
805 ancestorsof=nodes)
806 common = set(common)
806 common = set(common)
807 rheads = set(hds)
807 rheads = set(hds)
808 lheads = set(repo.heads())
808 lheads = set(repo.heads())
809 ui.write(("common heads: %s\n") %
809 ui.write(("common heads: %s\n") %
810 " ".join(sorted(short(n) for n in common)))
810 " ".join(sorted(short(n) for n in common)))
811 if lheads <= common:
811 if lheads <= common:
812 ui.write(("local is subset\n"))
812 ui.write(("local is subset\n"))
813 elif rheads <= common:
813 elif rheads <= common:
814 ui.write(("remote is subset\n"))
814 ui.write(("remote is subset\n"))
815
815
816 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
816 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
817 localrevs = opts['rev']
817 localrevs = opts['rev']
818 doit(localrevs, remoterevs)
818 doit(localrevs, remoterevs)
819
819
820 _chunksize = 4 << 10
820 _chunksize = 4 << 10
821
821
822 @command('debugdownload',
822 @command('debugdownload',
823 [
823 [
824 ('o', 'output', '', _('path')),
824 ('o', 'output', '', _('path')),
825 ],
825 ],
826 optionalrepo=True)
826 optionalrepo=True)
827 def debugdownload(ui, repo, url, output=None, **opts):
827 def debugdownload(ui, repo, url, output=None, **opts):
828 """download a resource using Mercurial logic and config
828 """download a resource using Mercurial logic and config
829 """
829 """
830 fh = urlmod.open(ui, url, output)
830 fh = urlmod.open(ui, url, output)
831
831
832 dest = ui
832 dest = ui
833 if output:
833 if output:
834 dest = open(output, "wb", _chunksize)
834 dest = open(output, "wb", _chunksize)
835 try:
835 try:
836 data = fh.read(_chunksize)
836 data = fh.read(_chunksize)
837 while data:
837 while data:
838 dest.write(data)
838 dest.write(data)
839 data = fh.read(_chunksize)
839 data = fh.read(_chunksize)
840 finally:
840 finally:
841 if output:
841 if output:
842 dest.close()
842 dest.close()
843
843
844 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
844 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
845 def debugextensions(ui, repo, **opts):
845 def debugextensions(ui, repo, **opts):
846 '''show information about active extensions'''
846 '''show information about active extensions'''
847 opts = pycompat.byteskwargs(opts)
847 opts = pycompat.byteskwargs(opts)
848 exts = extensions.extensions(ui)
848 exts = extensions.extensions(ui)
849 hgver = util.version()
849 hgver = util.version()
850 fm = ui.formatter('debugextensions', opts)
850 fm = ui.formatter('debugextensions', opts)
851 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
851 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
852 isinternal = extensions.ismoduleinternal(extmod)
852 isinternal = extensions.ismoduleinternal(extmod)
853 extsource = pycompat.fsencode(extmod.__file__)
853 extsource = pycompat.fsencode(extmod.__file__)
854 if isinternal:
854 if isinternal:
855 exttestedwith = [] # never expose magic string to users
855 exttestedwith = [] # never expose magic string to users
856 else:
856 else:
857 exttestedwith = getattr(extmod, 'testedwith', '').split()
857 exttestedwith = getattr(extmod, 'testedwith', '').split()
858 extbuglink = getattr(extmod, 'buglink', None)
858 extbuglink = getattr(extmod, 'buglink', None)
859
859
860 fm.startitem()
860 fm.startitem()
861
861
862 if ui.quiet or ui.verbose:
862 if ui.quiet or ui.verbose:
863 fm.write('name', '%s\n', extname)
863 fm.write('name', '%s\n', extname)
864 else:
864 else:
865 fm.write('name', '%s', extname)
865 fm.write('name', '%s', extname)
866 if isinternal or hgver in exttestedwith:
866 if isinternal or hgver in exttestedwith:
867 fm.plain('\n')
867 fm.plain('\n')
868 elif not exttestedwith:
868 elif not exttestedwith:
869 fm.plain(_(' (untested!)\n'))
869 fm.plain(_(' (untested!)\n'))
870 else:
870 else:
871 lasttestedversion = exttestedwith[-1]
871 lasttestedversion = exttestedwith[-1]
872 fm.plain(' (%s!)\n' % lasttestedversion)
872 fm.plain(' (%s!)\n' % lasttestedversion)
873
873
874 fm.condwrite(ui.verbose and extsource, 'source',
874 fm.condwrite(ui.verbose and extsource, 'source',
875 _(' location: %s\n'), extsource or "")
875 _(' location: %s\n'), extsource or "")
876
876
877 if ui.verbose:
877 if ui.verbose:
878 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
878 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
879 fm.data(bundled=isinternal)
879 fm.data(bundled=isinternal)
880
880
881 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
881 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
882 _(' tested with: %s\n'),
882 _(' tested with: %s\n'),
883 fm.formatlist(exttestedwith, name='ver'))
883 fm.formatlist(exttestedwith, name='ver'))
884
884
885 fm.condwrite(ui.verbose and extbuglink, 'buglink',
885 fm.condwrite(ui.verbose and extbuglink, 'buglink',
886 _(' bug reporting: %s\n'), extbuglink or "")
886 _(' bug reporting: %s\n'), extbuglink or "")
887
887
888 fm.end()
888 fm.end()
889
889
890 @command('debugfileset',
890 @command('debugfileset',
891 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
891 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
892 ('', 'all-files', False,
892 ('', 'all-files', False,
893 _('test files from all revisions and working directory')),
893 _('test files from all revisions and working directory')),
894 ('s', 'show-matcher', None,
894 ('s', 'show-matcher', None,
895 _('print internal representation of matcher')),
895 _('print internal representation of matcher')),
896 ('p', 'show-stage', [],
896 ('p', 'show-stage', [],
897 _('print parsed tree at the given stage'), _('NAME'))],
897 _('print parsed tree at the given stage'), _('NAME'))],
898 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
898 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
899 def debugfileset(ui, repo, expr, **opts):
899 def debugfileset(ui, repo, expr, **opts):
900 '''parse and apply a fileset specification'''
900 '''parse and apply a fileset specification'''
901 from . import fileset
901 from . import fileset
902 fileset.symbols # force import of fileset so we have predicates to optimize
902 fileset.symbols # force import of fileset so we have predicates to optimize
903 opts = pycompat.byteskwargs(opts)
903 opts = pycompat.byteskwargs(opts)
904 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
904 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
905
905
906 stages = [
906 stages = [
907 ('parsed', pycompat.identity),
907 ('parsed', pycompat.identity),
908 ('analyzed', filesetlang.analyze),
908 ('analyzed', filesetlang.analyze),
909 ('optimized', filesetlang.optimize),
909 ('optimized', filesetlang.optimize),
910 ]
910 ]
911 stagenames = set(n for n, f in stages)
911 stagenames = set(n for n, f in stages)
912
912
913 showalways = set()
913 showalways = set()
914 if ui.verbose and not opts['show_stage']:
914 if ui.verbose and not opts['show_stage']:
915 # show parsed tree by --verbose (deprecated)
915 # show parsed tree by --verbose (deprecated)
916 showalways.add('parsed')
916 showalways.add('parsed')
917 if opts['show_stage'] == ['all']:
917 if opts['show_stage'] == ['all']:
918 showalways.update(stagenames)
918 showalways.update(stagenames)
919 else:
919 else:
920 for n in opts['show_stage']:
920 for n in opts['show_stage']:
921 if n not in stagenames:
921 if n not in stagenames:
922 raise error.Abort(_('invalid stage name: %s') % n)
922 raise error.Abort(_('invalid stage name: %s') % n)
923 showalways.update(opts['show_stage'])
923 showalways.update(opts['show_stage'])
924
924
925 tree = filesetlang.parse(expr)
925 tree = filesetlang.parse(expr)
926 for n, f in stages:
926 for n, f in stages:
927 tree = f(tree)
927 tree = f(tree)
928 if n in showalways:
928 if n in showalways:
929 if opts['show_stage'] or n != 'parsed':
929 if opts['show_stage'] or n != 'parsed':
930 ui.write(("* %s:\n") % n)
930 ui.write(("* %s:\n") % n)
931 ui.write(filesetlang.prettyformat(tree), "\n")
931 ui.write(filesetlang.prettyformat(tree), "\n")
932
932
933 files = set()
933 files = set()
934 if opts['all_files']:
934 if opts['all_files']:
935 for r in repo:
935 for r in repo:
936 c = repo[r]
936 c = repo[r]
937 files.update(c.files())
937 files.update(c.files())
938 files.update(c.substate)
938 files.update(c.substate)
939 if opts['all_files'] or ctx.rev() is None:
939 if opts['all_files'] or ctx.rev() is None:
940 wctx = repo[None]
940 wctx = repo[None]
941 files.update(repo.dirstate.walk(scmutil.matchall(repo),
941 files.update(repo.dirstate.walk(scmutil.matchall(repo),
942 subrepos=list(wctx.substate),
942 subrepos=list(wctx.substate),
943 unknown=True, ignored=True))
943 unknown=True, ignored=True))
944 files.update(wctx.substate)
944 files.update(wctx.substate)
945 else:
945 else:
946 files.update(ctx.files())
946 files.update(ctx.files())
947 files.update(ctx.substate)
947 files.update(ctx.substate)
948
948
949 m = ctx.matchfileset(expr)
949 m = ctx.matchfileset(expr)
950 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
950 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
951 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
951 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
952 for f in sorted(files):
952 for f in sorted(files):
953 if not m(f):
953 if not m(f):
954 continue
954 continue
955 ui.write("%s\n" % f)
955 ui.write("%s\n" % f)
956
956
957 @command('debugformat',
957 @command('debugformat',
958 [] + cmdutil.formatteropts)
958 [] + cmdutil.formatteropts)
959 def debugformat(ui, repo, **opts):
959 def debugformat(ui, repo, **opts):
960 """display format information about the current repository
960 """display format information about the current repository
961
961
962 Use --verbose to get extra information about current config value and
962 Use --verbose to get extra information about current config value and
963 Mercurial default."""
963 Mercurial default."""
964 opts = pycompat.byteskwargs(opts)
964 opts = pycompat.byteskwargs(opts)
965 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
965 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
966 maxvariantlength = max(len('format-variant'), maxvariantlength)
966 maxvariantlength = max(len('format-variant'), maxvariantlength)
967
967
968 def makeformatname(name):
968 def makeformatname(name):
969 return '%s:' + (' ' * (maxvariantlength - len(name)))
969 return '%s:' + (' ' * (maxvariantlength - len(name)))
970
970
971 fm = ui.formatter('debugformat', opts)
971 fm = ui.formatter('debugformat', opts)
972 if fm.isplain():
972 if fm.isplain():
973 def formatvalue(value):
973 def formatvalue(value):
974 if util.safehasattr(value, 'startswith'):
974 if util.safehasattr(value, 'startswith'):
975 return value
975 return value
976 if value:
976 if value:
977 return 'yes'
977 return 'yes'
978 else:
978 else:
979 return 'no'
979 return 'no'
980 else:
980 else:
981 formatvalue = pycompat.identity
981 formatvalue = pycompat.identity
982
982
983 fm.plain('format-variant')
983 fm.plain('format-variant')
984 fm.plain(' ' * (maxvariantlength - len('format-variant')))
984 fm.plain(' ' * (maxvariantlength - len('format-variant')))
985 fm.plain(' repo')
985 fm.plain(' repo')
986 if ui.verbose:
986 if ui.verbose:
987 fm.plain(' config default')
987 fm.plain(' config default')
988 fm.plain('\n')
988 fm.plain('\n')
989 for fv in upgrade.allformatvariant:
989 for fv in upgrade.allformatvariant:
990 fm.startitem()
990 fm.startitem()
991 repovalue = fv.fromrepo(repo)
991 repovalue = fv.fromrepo(repo)
992 configvalue = fv.fromconfig(repo)
992 configvalue = fv.fromconfig(repo)
993
993
994 if repovalue != configvalue:
994 if repovalue != configvalue:
995 namelabel = 'formatvariant.name.mismatchconfig'
995 namelabel = 'formatvariant.name.mismatchconfig'
996 repolabel = 'formatvariant.repo.mismatchconfig'
996 repolabel = 'formatvariant.repo.mismatchconfig'
997 elif repovalue != fv.default:
997 elif repovalue != fv.default:
998 namelabel = 'formatvariant.name.mismatchdefault'
998 namelabel = 'formatvariant.name.mismatchdefault'
999 repolabel = 'formatvariant.repo.mismatchdefault'
999 repolabel = 'formatvariant.repo.mismatchdefault'
1000 else:
1000 else:
1001 namelabel = 'formatvariant.name.uptodate'
1001 namelabel = 'formatvariant.name.uptodate'
1002 repolabel = 'formatvariant.repo.uptodate'
1002 repolabel = 'formatvariant.repo.uptodate'
1003
1003
1004 fm.write('name', makeformatname(fv.name), fv.name,
1004 fm.write('name', makeformatname(fv.name), fv.name,
1005 label=namelabel)
1005 label=namelabel)
1006 fm.write('repo', ' %3s', formatvalue(repovalue),
1006 fm.write('repo', ' %3s', formatvalue(repovalue),
1007 label=repolabel)
1007 label=repolabel)
1008 if fv.default != configvalue:
1008 if fv.default != configvalue:
1009 configlabel = 'formatvariant.config.special'
1009 configlabel = 'formatvariant.config.special'
1010 else:
1010 else:
1011 configlabel = 'formatvariant.config.default'
1011 configlabel = 'formatvariant.config.default'
1012 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1012 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1013 label=configlabel)
1013 label=configlabel)
1014 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1014 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1015 label='formatvariant.default')
1015 label='formatvariant.default')
1016 fm.plain('\n')
1016 fm.plain('\n')
1017 fm.end()
1017 fm.end()
1018
1018
1019 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1019 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1020 def debugfsinfo(ui, path="."):
1020 def debugfsinfo(ui, path="."):
1021 """show information detected about current filesystem"""
1021 """show information detected about current filesystem"""
1022 ui.write(('path: %s\n') % path)
1022 ui.write(('path: %s\n') % path)
1023 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1023 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1024 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1024 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1025 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1025 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1026 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1026 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1027 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1027 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1028 casesensitive = '(unknown)'
1028 casesensitive = '(unknown)'
1029 try:
1029 try:
1030 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1030 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1031 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1031 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1032 except OSError:
1032 except OSError:
1033 pass
1033 pass
1034 ui.write(('case-sensitive: %s\n') % casesensitive)
1034 ui.write(('case-sensitive: %s\n') % casesensitive)
1035
1035
1036 @command('debuggetbundle',
1036 @command('debuggetbundle',
1037 [('H', 'head', [], _('id of head node'), _('ID')),
1037 [('H', 'head', [], _('id of head node'), _('ID')),
1038 ('C', 'common', [], _('id of common node'), _('ID')),
1038 ('C', 'common', [], _('id of common node'), _('ID')),
1039 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1039 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1040 _('REPO FILE [-H|-C ID]...'),
1040 _('REPO FILE [-H|-C ID]...'),
1041 norepo=True)
1041 norepo=True)
1042 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1042 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1043 """retrieves a bundle from a repo
1043 """retrieves a bundle from a repo
1044
1044
1045 Every ID must be a full-length hex node id string. Saves the bundle to the
1045 Every ID must be a full-length hex node id string. Saves the bundle to the
1046 given file.
1046 given file.
1047 """
1047 """
1048 opts = pycompat.byteskwargs(opts)
1048 opts = pycompat.byteskwargs(opts)
1049 repo = hg.peer(ui, opts, repopath)
1049 repo = hg.peer(ui, opts, repopath)
1050 if not repo.capable('getbundle'):
1050 if not repo.capable('getbundle'):
1051 raise error.Abort("getbundle() not supported by target repository")
1051 raise error.Abort("getbundle() not supported by target repository")
1052 args = {}
1052 args = {}
1053 if common:
1053 if common:
1054 args[r'common'] = [bin(s) for s in common]
1054 args[r'common'] = [bin(s) for s in common]
1055 if head:
1055 if head:
1056 args[r'heads'] = [bin(s) for s in head]
1056 args[r'heads'] = [bin(s) for s in head]
1057 # TODO: get desired bundlecaps from command line.
1057 # TODO: get desired bundlecaps from command line.
1058 args[r'bundlecaps'] = None
1058 args[r'bundlecaps'] = None
1059 bundle = repo.getbundle('debug', **args)
1059 bundle = repo.getbundle('debug', **args)
1060
1060
1061 bundletype = opts.get('type', 'bzip2').lower()
1061 bundletype = opts.get('type', 'bzip2').lower()
1062 btypes = {'none': 'HG10UN',
1062 btypes = {'none': 'HG10UN',
1063 'bzip2': 'HG10BZ',
1063 'bzip2': 'HG10BZ',
1064 'gzip': 'HG10GZ',
1064 'gzip': 'HG10GZ',
1065 'bundle2': 'HG20'}
1065 'bundle2': 'HG20'}
1066 bundletype = btypes.get(bundletype)
1066 bundletype = btypes.get(bundletype)
1067 if bundletype not in bundle2.bundletypes:
1067 if bundletype not in bundle2.bundletypes:
1068 raise error.Abort(_('unknown bundle type specified with --type'))
1068 raise error.Abort(_('unknown bundle type specified with --type'))
1069 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1069 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1070
1070
1071 @command('debugignore', [], '[FILE]')
1071 @command('debugignore', [], '[FILE]')
1072 def debugignore(ui, repo, *files, **opts):
1072 def debugignore(ui, repo, *files, **opts):
1073 """display the combined ignore pattern and information about ignored files
1073 """display the combined ignore pattern and information about ignored files
1074
1074
1075 With no argument display the combined ignore pattern.
1075 With no argument display the combined ignore pattern.
1076
1076
1077 Given space separated file names, shows if the given file is ignored and
1077 Given space separated file names, shows if the given file is ignored and
1078 if so, show the ignore rule (file and line number) that matched it.
1078 if so, show the ignore rule (file and line number) that matched it.
1079 """
1079 """
1080 ignore = repo.dirstate._ignore
1080 ignore = repo.dirstate._ignore
1081 if not files:
1081 if not files:
1082 # Show all the patterns
1082 # Show all the patterns
1083 ui.write("%s\n" % pycompat.byterepr(ignore))
1083 ui.write("%s\n" % pycompat.byterepr(ignore))
1084 else:
1084 else:
1085 m = scmutil.match(repo[None], pats=files)
1085 m = scmutil.match(repo[None], pats=files)
1086 for f in m.files():
1086 for f in m.files():
1087 nf = util.normpath(f)
1087 nf = util.normpath(f)
1088 ignored = None
1088 ignored = None
1089 ignoredata = None
1089 ignoredata = None
1090 if nf != '.':
1090 if nf != '.':
1091 if ignore(nf):
1091 if ignore(nf):
1092 ignored = nf
1092 ignored = nf
1093 ignoredata = repo.dirstate._ignorefileandline(nf)
1093 ignoredata = repo.dirstate._ignorefileandline(nf)
1094 else:
1094 else:
1095 for p in util.finddirs(nf):
1095 for p in util.finddirs(nf):
1096 if ignore(p):
1096 if ignore(p):
1097 ignored = p
1097 ignored = p
1098 ignoredata = repo.dirstate._ignorefileandline(p)
1098 ignoredata = repo.dirstate._ignorefileandline(p)
1099 break
1099 break
1100 if ignored:
1100 if ignored:
1101 if ignored == nf:
1101 if ignored == nf:
1102 ui.write(_("%s is ignored\n") % m.uipath(f))
1102 ui.write(_("%s is ignored\n") % m.uipath(f))
1103 else:
1103 else:
1104 ui.write(_("%s is ignored because of "
1104 ui.write(_("%s is ignored because of "
1105 "containing folder %s\n")
1105 "containing folder %s\n")
1106 % (m.uipath(f), ignored))
1106 % (m.uipath(f), ignored))
1107 ignorefile, lineno, line = ignoredata
1107 ignorefile, lineno, line = ignoredata
1108 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1108 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1109 % (ignorefile, lineno, line))
1109 % (ignorefile, lineno, line))
1110 else:
1110 else:
1111 ui.write(_("%s is not ignored\n") % m.uipath(f))
1111 ui.write(_("%s is not ignored\n") % m.uipath(f))
1112
1112
1113 @command('debugindex', cmdutil.debugrevlogopts +
1113 @command('debugindex', cmdutil.debugrevlogopts +
1114 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1114 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1115 _('[-f FORMAT] -c|-m|FILE'),
1115 _('[-f FORMAT] -c|-m|FILE'),
1116 optionalrepo=True)
1116 optionalrepo=True)
1117 def debugindex(ui, repo, file_=None, **opts):
1117 def debugindex(ui, repo, file_=None, **opts):
1118 """dump the contents of an index file"""
1118 """dump the contents of an index file"""
1119 opts = pycompat.byteskwargs(opts)
1119 opts = pycompat.byteskwargs(opts)
1120 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1120 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1121 format = opts.get('format', 0)
1121 format = opts.get('format', 0)
1122 if format not in (0, 1):
1122 if format not in (0, 1):
1123 raise error.Abort(_("unknown format %d") % format)
1123 raise error.Abort(_("unknown format %d") % format)
1124
1124
1125 if ui.debugflag:
1125 if ui.debugflag:
1126 shortfn = hex
1126 shortfn = hex
1127 else:
1127 else:
1128 shortfn = short
1128 shortfn = short
1129
1129
1130 # There might not be anything in r, so have a sane default
1130 # There might not be anything in r, so have a sane default
1131 idlen = 12
1131 idlen = 12
1132 for i in r:
1132 for i in r:
1133 idlen = len(shortfn(r.node(i)))
1133 idlen = len(shortfn(r.node(i)))
1134 break
1134 break
1135
1135
1136 if format == 0:
1136 if format == 0:
1137 if ui.verbose:
1137 if ui.verbose:
1138 ui.write((" rev offset length linkrev"
1138 ui.write((" rev offset length linkrev"
1139 " %s %s p2\n") % ("nodeid".ljust(idlen),
1139 " %s %s p2\n") % ("nodeid".ljust(idlen),
1140 "p1".ljust(idlen)))
1140 "p1".ljust(idlen)))
1141 else:
1141 else:
1142 ui.write((" rev linkrev %s %s p2\n") % (
1142 ui.write((" rev linkrev %s %s p2\n") % (
1143 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1143 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1144 elif format == 1:
1144 elif format == 1:
1145 if ui.verbose:
1145 if ui.verbose:
1146 ui.write((" rev flag offset length size link p1"
1146 ui.write((" rev flag offset length size link p1"
1147 " p2 %s\n") % "nodeid".rjust(idlen))
1147 " p2 %s\n") % "nodeid".rjust(idlen))
1148 else:
1148 else:
1149 ui.write((" rev flag size link p1 p2 %s\n") %
1149 ui.write((" rev flag size link p1 p2 %s\n") %
1150 "nodeid".rjust(idlen))
1150 "nodeid".rjust(idlen))
1151
1151
1152 for i in r:
1152 for i in r:
1153 node = r.node(i)
1153 node = r.node(i)
1154 if format == 0:
1154 if format == 0:
1155 try:
1155 try:
1156 pp = r.parents(node)
1156 pp = r.parents(node)
1157 except Exception:
1157 except Exception:
1158 pp = [nullid, nullid]
1158 pp = [nullid, nullid]
1159 if ui.verbose:
1159 if ui.verbose:
1160 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1160 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1161 i, r.start(i), r.length(i), r.linkrev(i),
1161 i, r.start(i), r.length(i), r.linkrev(i),
1162 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1162 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1163 else:
1163 else:
1164 ui.write("% 6d % 7d %s %s %s\n" % (
1164 ui.write("% 6d % 7d %s %s %s\n" % (
1165 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1165 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1166 shortfn(pp[1])))
1166 shortfn(pp[1])))
1167 elif format == 1:
1167 elif format == 1:
1168 pr = r.parentrevs(i)
1168 pr = r.parentrevs(i)
1169 if ui.verbose:
1169 if ui.verbose:
1170 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1170 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1171 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1171 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1172 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1172 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1173 else:
1173 else:
1174 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1174 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1175 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1175 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1176 shortfn(node)))
1176 shortfn(node)))
1177
1177
1178 @command('debugindexdot', cmdutil.debugrevlogopts,
1178 @command('debugindexdot', cmdutil.debugrevlogopts,
1179 _('-c|-m|FILE'), optionalrepo=True)
1179 _('-c|-m|FILE'), optionalrepo=True)
1180 def debugindexdot(ui, repo, file_=None, **opts):
1180 def debugindexdot(ui, repo, file_=None, **opts):
1181 """dump an index DAG as a graphviz dot file"""
1181 """dump an index DAG as a graphviz dot file"""
1182 opts = pycompat.byteskwargs(opts)
1182 opts = pycompat.byteskwargs(opts)
1183 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1183 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1184 ui.write(("digraph G {\n"))
1184 ui.write(("digraph G {\n"))
1185 for i in r:
1185 for i in r:
1186 node = r.node(i)
1186 node = r.node(i)
1187 pp = r.parents(node)
1187 pp = r.parents(node)
1188 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1188 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1189 if pp[1] != nullid:
1189 if pp[1] != nullid:
1190 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1190 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1191 ui.write("}\n")
1191 ui.write("}\n")
1192
1192
1193 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1193 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1194 def debuginstall(ui, **opts):
1194 def debuginstall(ui, **opts):
1195 '''test Mercurial installation
1195 '''test Mercurial installation
1196
1196
1197 Returns 0 on success.
1197 Returns 0 on success.
1198 '''
1198 '''
1199 opts = pycompat.byteskwargs(opts)
1199 opts = pycompat.byteskwargs(opts)
1200
1200
1201 def writetemp(contents):
1201 def writetemp(contents):
1202 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1202 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1203 f = os.fdopen(fd, r"wb")
1203 f = os.fdopen(fd, r"wb")
1204 f.write(contents)
1204 f.write(contents)
1205 f.close()
1205 f.close()
1206 return name
1206 return name
1207
1207
1208 problems = 0
1208 problems = 0
1209
1209
1210 fm = ui.formatter('debuginstall', opts)
1210 fm = ui.formatter('debuginstall', opts)
1211 fm.startitem()
1211 fm.startitem()
1212
1212
1213 # encoding
1213 # encoding
1214 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1214 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1215 err = None
1215 err = None
1216 try:
1216 try:
1217 codecs.lookup(pycompat.sysstr(encoding.encoding))
1217 codecs.lookup(pycompat.sysstr(encoding.encoding))
1218 except LookupError as inst:
1218 except LookupError as inst:
1219 err = stringutil.forcebytestr(inst)
1219 err = stringutil.forcebytestr(inst)
1220 problems += 1
1220 problems += 1
1221 fm.condwrite(err, 'encodingerror', _(" %s\n"
1221 fm.condwrite(err, 'encodingerror', _(" %s\n"
1222 " (check that your locale is properly set)\n"), err)
1222 " (check that your locale is properly set)\n"), err)
1223
1223
1224 # Python
1224 # Python
1225 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1225 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1226 pycompat.sysexecutable)
1226 pycompat.sysexecutable)
1227 fm.write('pythonver', _("checking Python version (%s)\n"),
1227 fm.write('pythonver', _("checking Python version (%s)\n"),
1228 ("%d.%d.%d" % sys.version_info[:3]))
1228 ("%d.%d.%d" % sys.version_info[:3]))
1229 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1229 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1230 os.path.dirname(pycompat.fsencode(os.__file__)))
1230 os.path.dirname(pycompat.fsencode(os.__file__)))
1231
1231
1232 security = set(sslutil.supportedprotocols)
1232 security = set(sslutil.supportedprotocols)
1233 if sslutil.hassni:
1233 if sslutil.hassni:
1234 security.add('sni')
1234 security.add('sni')
1235
1235
1236 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1236 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1237 fm.formatlist(sorted(security), name='protocol',
1237 fm.formatlist(sorted(security), name='protocol',
1238 fmt='%s', sep=','))
1238 fmt='%s', sep=','))
1239
1239
1240 # These are warnings, not errors. So don't increment problem count. This
1240 # These are warnings, not errors. So don't increment problem count. This
1241 # may change in the future.
1241 # may change in the future.
1242 if 'tls1.2' not in security:
1242 if 'tls1.2' not in security:
1243 fm.plain(_(' TLS 1.2 not supported by Python install; '
1243 fm.plain(_(' TLS 1.2 not supported by Python install; '
1244 'network connections lack modern security\n'))
1244 'network connections lack modern security\n'))
1245 if 'sni' not in security:
1245 if 'sni' not in security:
1246 fm.plain(_(' SNI not supported by Python install; may have '
1246 fm.plain(_(' SNI not supported by Python install; may have '
1247 'connectivity issues with some servers\n'))
1247 'connectivity issues with some servers\n'))
1248
1248
1249 # TODO print CA cert info
1249 # TODO print CA cert info
1250
1250
1251 # hg version
1251 # hg version
1252 hgver = util.version()
1252 hgver = util.version()
1253 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1253 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1254 hgver.split('+')[0])
1254 hgver.split('+')[0])
1255 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1255 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1256 '+'.join(hgver.split('+')[1:]))
1256 '+'.join(hgver.split('+')[1:]))
1257
1257
1258 # compiled modules
1258 # compiled modules
1259 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1259 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1260 policy.policy)
1260 policy.policy)
1261 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1261 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1262 os.path.dirname(pycompat.fsencode(__file__)))
1262 os.path.dirname(pycompat.fsencode(__file__)))
1263
1263
1264 if policy.policy in ('c', 'allow'):
1264 if policy.policy in ('c', 'allow'):
1265 err = None
1265 err = None
1266 try:
1266 try:
1267 from .cext import (
1267 from .cext import (
1268 base85,
1268 base85,
1269 bdiff,
1269 bdiff,
1270 mpatch,
1270 mpatch,
1271 osutil,
1271 osutil,
1272 )
1272 )
1273 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1273 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1274 except Exception as inst:
1274 except Exception as inst:
1275 err = stringutil.forcebytestr(inst)
1275 err = stringutil.forcebytestr(inst)
1276 problems += 1
1276 problems += 1
1277 fm.condwrite(err, 'extensionserror', " %s\n", err)
1277 fm.condwrite(err, 'extensionserror', " %s\n", err)
1278
1278
1279 compengines = util.compengines._engines.values()
1279 compengines = util.compengines._engines.values()
1280 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1280 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1281 fm.formatlist(sorted(e.name() for e in compengines),
1281 fm.formatlist(sorted(e.name() for e in compengines),
1282 name='compengine', fmt='%s', sep=', '))
1282 name='compengine', fmt='%s', sep=', '))
1283 fm.write('compenginesavail', _('checking available compression engines '
1283 fm.write('compenginesavail', _('checking available compression engines '
1284 '(%s)\n'),
1284 '(%s)\n'),
1285 fm.formatlist(sorted(e.name() for e in compengines
1285 fm.formatlist(sorted(e.name() for e in compengines
1286 if e.available()),
1286 if e.available()),
1287 name='compengine', fmt='%s', sep=', '))
1287 name='compengine', fmt='%s', sep=', '))
1288 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1288 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1289 fm.write('compenginesserver', _('checking available compression engines '
1289 fm.write('compenginesserver', _('checking available compression engines '
1290 'for wire protocol (%s)\n'),
1290 'for wire protocol (%s)\n'),
1291 fm.formatlist([e.name() for e in wirecompengines
1291 fm.formatlist([e.name() for e in wirecompengines
1292 if e.wireprotosupport()],
1292 if e.wireprotosupport()],
1293 name='compengine', fmt='%s', sep=', '))
1293 name='compengine', fmt='%s', sep=', '))
1294 re2 = 'missing'
1294 re2 = 'missing'
1295 if util._re2:
1295 if util._re2:
1296 re2 = 'available'
1296 re2 = 'available'
1297 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1297 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1298 fm.data(re2=bool(util._re2))
1298 fm.data(re2=bool(util._re2))
1299
1299
1300 # templates
1300 # templates
1301 p = templater.templatepaths()
1301 p = templater.templatepaths()
1302 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1302 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1303 fm.condwrite(not p, '', _(" no template directories found\n"))
1303 fm.condwrite(not p, '', _(" no template directories found\n"))
1304 if p:
1304 if p:
1305 m = templater.templatepath("map-cmdline.default")
1305 m = templater.templatepath("map-cmdline.default")
1306 if m:
1306 if m:
1307 # template found, check if it is working
1307 # template found, check if it is working
1308 err = None
1308 err = None
1309 try:
1309 try:
1310 templater.templater.frommapfile(m)
1310 templater.templater.frommapfile(m)
1311 except Exception as inst:
1311 except Exception as inst:
1312 err = stringutil.forcebytestr(inst)
1312 err = stringutil.forcebytestr(inst)
1313 p = None
1313 p = None
1314 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1314 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1315 else:
1315 else:
1316 p = None
1316 p = None
1317 fm.condwrite(p, 'defaulttemplate',
1317 fm.condwrite(p, 'defaulttemplate',
1318 _("checking default template (%s)\n"), m)
1318 _("checking default template (%s)\n"), m)
1319 fm.condwrite(not m, 'defaulttemplatenotfound',
1319 fm.condwrite(not m, 'defaulttemplatenotfound',
1320 _(" template '%s' not found\n"), "default")
1320 _(" template '%s' not found\n"), "default")
1321 if not p:
1321 if not p:
1322 problems += 1
1322 problems += 1
1323 fm.condwrite(not p, '',
1323 fm.condwrite(not p, '',
1324 _(" (templates seem to have been installed incorrectly)\n"))
1324 _(" (templates seem to have been installed incorrectly)\n"))
1325
1325
1326 # editor
1326 # editor
1327 editor = ui.geteditor()
1327 editor = ui.geteditor()
1328 editor = util.expandpath(editor)
1328 editor = util.expandpath(editor)
1329 editorbin = procutil.shellsplit(editor)[0]
1329 editorbin = procutil.shellsplit(editor)[0]
1330 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1330 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1331 cmdpath = procutil.findexe(editorbin)
1331 cmdpath = procutil.findexe(editorbin)
1332 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1332 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1333 _(" No commit editor set and can't find %s in PATH\n"
1333 _(" No commit editor set and can't find %s in PATH\n"
1334 " (specify a commit editor in your configuration"
1334 " (specify a commit editor in your configuration"
1335 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1335 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1336 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1336 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1337 _(" Can't find editor '%s' in PATH\n"
1337 _(" Can't find editor '%s' in PATH\n"
1338 " (specify a commit editor in your configuration"
1338 " (specify a commit editor in your configuration"
1339 " file)\n"), not cmdpath and editorbin)
1339 " file)\n"), not cmdpath and editorbin)
1340 if not cmdpath and editor != 'vi':
1340 if not cmdpath and editor != 'vi':
1341 problems += 1
1341 problems += 1
1342
1342
1343 # check username
1343 # check username
1344 username = None
1344 username = None
1345 err = None
1345 err = None
1346 try:
1346 try:
1347 username = ui.username()
1347 username = ui.username()
1348 except error.Abort as e:
1348 except error.Abort as e:
1349 err = stringutil.forcebytestr(e)
1349 err = stringutil.forcebytestr(e)
1350 problems += 1
1350 problems += 1
1351
1351
1352 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1352 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1353 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1353 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1354 " (specify a username in your configuration file)\n"), err)
1354 " (specify a username in your configuration file)\n"), err)
1355
1355
1356 fm.condwrite(not problems, '',
1356 fm.condwrite(not problems, '',
1357 _("no problems detected\n"))
1357 _("no problems detected\n"))
1358 if not problems:
1358 if not problems:
1359 fm.data(problems=problems)
1359 fm.data(problems=problems)
1360 fm.condwrite(problems, 'problems',
1360 fm.condwrite(problems, 'problems',
1361 _("%d problems detected,"
1361 _("%d problems detected,"
1362 " please check your install!\n"), problems)
1362 " please check your install!\n"), problems)
1363 fm.end()
1363 fm.end()
1364
1364
1365 return problems
1365 return problems
1366
1366
1367 @command('debugknown', [], _('REPO ID...'), norepo=True)
1367 @command('debugknown', [], _('REPO ID...'), norepo=True)
1368 def debugknown(ui, repopath, *ids, **opts):
1368 def debugknown(ui, repopath, *ids, **opts):
1369 """test whether node ids are known to a repo
1369 """test whether node ids are known to a repo
1370
1370
1371 Every ID must be a full-length hex node id string. Returns a list of 0s
1371 Every ID must be a full-length hex node id string. Returns a list of 0s
1372 and 1s indicating unknown/known.
1372 and 1s indicating unknown/known.
1373 """
1373 """
1374 opts = pycompat.byteskwargs(opts)
1374 opts = pycompat.byteskwargs(opts)
1375 repo = hg.peer(ui, opts, repopath)
1375 repo = hg.peer(ui, opts, repopath)
1376 if not repo.capable('known'):
1376 if not repo.capable('known'):
1377 raise error.Abort("known() not supported by target repository")
1377 raise error.Abort("known() not supported by target repository")
1378 flags = repo.known([bin(s) for s in ids])
1378 flags = repo.known([bin(s) for s in ids])
1379 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1379 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1380
1380
1381 @command('debuglabelcomplete', [], _('LABEL...'))
1381 @command('debuglabelcomplete', [], _('LABEL...'))
1382 def debuglabelcomplete(ui, repo, *args):
1382 def debuglabelcomplete(ui, repo, *args):
1383 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1383 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1384 debugnamecomplete(ui, repo, *args)
1384 debugnamecomplete(ui, repo, *args)
1385
1385
1386 @command('debuglocks',
1386 @command('debuglocks',
1387 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1387 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1388 ('W', 'force-wlock', None,
1388 ('W', 'force-wlock', None,
1389 _('free the working state lock (DANGEROUS)')),
1389 _('free the working state lock (DANGEROUS)')),
1390 ('s', 'set-lock', None, _('set the store lock until stopped')),
1390 ('s', 'set-lock', None, _('set the store lock until stopped')),
1391 ('S', 'set-wlock', None,
1391 ('S', 'set-wlock', None,
1392 _('set the working state lock until stopped'))],
1392 _('set the working state lock until stopped'))],
1393 _('[OPTION]...'))
1393 _('[OPTION]...'))
1394 def debuglocks(ui, repo, **opts):
1394 def debuglocks(ui, repo, **opts):
1395 """show or modify state of locks
1395 """show or modify state of locks
1396
1396
1397 By default, this command will show which locks are held. This
1397 By default, this command will show which locks are held. This
1398 includes the user and process holding the lock, the amount of time
1398 includes the user and process holding the lock, the amount of time
1399 the lock has been held, and the machine name where the process is
1399 the lock has been held, and the machine name where the process is
1400 running if it's not local.
1400 running if it's not local.
1401
1401
1402 Locks protect the integrity of Mercurial's data, so should be
1402 Locks protect the integrity of Mercurial's data, so should be
1403 treated with care. System crashes or other interruptions may cause
1403 treated with care. System crashes or other interruptions may cause
1404 locks to not be properly released, though Mercurial will usually
1404 locks to not be properly released, though Mercurial will usually
1405 detect and remove such stale locks automatically.
1405 detect and remove such stale locks automatically.
1406
1406
1407 However, detecting stale locks may not always be possible (for
1407 However, detecting stale locks may not always be possible (for
1408 instance, on a shared filesystem). Removing locks may also be
1408 instance, on a shared filesystem). Removing locks may also be
1409 blocked by filesystem permissions.
1409 blocked by filesystem permissions.
1410
1410
1411 Setting a lock will prevent other commands from changing the data.
1411 Setting a lock will prevent other commands from changing the data.
1412 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1412 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1413 The set locks are removed when the command exits.
1413 The set locks are removed when the command exits.
1414
1414
1415 Returns 0 if no locks are held.
1415 Returns 0 if no locks are held.
1416
1416
1417 """
1417 """
1418
1418
1419 if opts.get(r'force_lock'):
1419 if opts.get(r'force_lock'):
1420 repo.svfs.unlink('lock')
1420 repo.svfs.unlink('lock')
1421 if opts.get(r'force_wlock'):
1421 if opts.get(r'force_wlock'):
1422 repo.vfs.unlink('wlock')
1422 repo.vfs.unlink('wlock')
1423 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1423 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1424 return 0
1424 return 0
1425
1425
1426 locks = []
1426 locks = []
1427 try:
1427 try:
1428 if opts.get(r'set_wlock'):
1428 if opts.get(r'set_wlock'):
1429 try:
1429 try:
1430 locks.append(repo.wlock(False))
1430 locks.append(repo.wlock(False))
1431 except error.LockHeld:
1431 except error.LockHeld:
1432 raise error.Abort(_('wlock is already held'))
1432 raise error.Abort(_('wlock is already held'))
1433 if opts.get(r'set_lock'):
1433 if opts.get(r'set_lock'):
1434 try:
1434 try:
1435 locks.append(repo.lock(False))
1435 locks.append(repo.lock(False))
1436 except error.LockHeld:
1436 except error.LockHeld:
1437 raise error.Abort(_('lock is already held'))
1437 raise error.Abort(_('lock is already held'))
1438 if len(locks):
1438 if len(locks):
1439 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1439 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1440 return 0
1440 return 0
1441 finally:
1441 finally:
1442 release(*locks)
1442 release(*locks)
1443
1443
1444 now = time.time()
1444 now = time.time()
1445 held = 0
1445 held = 0
1446
1446
1447 def report(vfs, name, method):
1447 def report(vfs, name, method):
1448 # this causes stale locks to get reaped for more accurate reporting
1448 # this causes stale locks to get reaped for more accurate reporting
1449 try:
1449 try:
1450 l = method(False)
1450 l = method(False)
1451 except error.LockHeld:
1451 except error.LockHeld:
1452 l = None
1452 l = None
1453
1453
1454 if l:
1454 if l:
1455 l.release()
1455 l.release()
1456 else:
1456 else:
1457 try:
1457 try:
1458 st = vfs.lstat(name)
1458 st = vfs.lstat(name)
1459 age = now - st[stat.ST_MTIME]
1459 age = now - st[stat.ST_MTIME]
1460 user = util.username(st.st_uid)
1460 user = util.username(st.st_uid)
1461 locker = vfs.readlock(name)
1461 locker = vfs.readlock(name)
1462 if ":" in locker:
1462 if ":" in locker:
1463 host, pid = locker.split(':')
1463 host, pid = locker.split(':')
1464 if host == socket.gethostname():
1464 if host == socket.gethostname():
1465 locker = 'user %s, process %s' % (user, pid)
1465 locker = 'user %s, process %s' % (user, pid)
1466 else:
1466 else:
1467 locker = 'user %s, process %s, host %s' \
1467 locker = 'user %s, process %s, host %s' \
1468 % (user, pid, host)
1468 % (user, pid, host)
1469 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1469 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1470 return 1
1470 return 1
1471 except OSError as e:
1471 except OSError as e:
1472 if e.errno != errno.ENOENT:
1472 if e.errno != errno.ENOENT:
1473 raise
1473 raise
1474
1474
1475 ui.write(("%-6s free\n") % (name + ":"))
1475 ui.write(("%-6s free\n") % (name + ":"))
1476 return 0
1476 return 0
1477
1477
1478 held += report(repo.svfs, "lock", repo.lock)
1478 held += report(repo.svfs, "lock", repo.lock)
1479 held += report(repo.vfs, "wlock", repo.wlock)
1479 held += report(repo.vfs, "wlock", repo.wlock)
1480
1480
1481 return held
1481 return held
1482
1482
1483 @command('debugmanifestfulltextcache', [
1483 @command('debugmanifestfulltextcache', [
1484 ('', 'clear', False, _('clear the cache')),
1484 ('', 'clear', False, _('clear the cache')),
1485 ('a', 'add', '', _('add the given manifest node to the cache'),
1485 ('a', 'add', '', _('add the given manifest node to the cache'),
1486 _('NODE'))
1486 _('NODE'))
1487 ], '')
1487 ], '')
1488 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1488 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1489 """show, clear or amend the contents of the manifest fulltext cache"""
1489 """show, clear or amend the contents of the manifest fulltext cache"""
1490 with repo.lock():
1490 with repo.lock():
1491 r = repo.manifestlog._revlog
1491 r = repo.manifestlog._revlog
1492 try:
1492 try:
1493 cache = r._fulltextcache
1493 cache = r._fulltextcache
1494 except AttributeError:
1494 except AttributeError:
1495 ui.warn(_(
1495 ui.warn(_(
1496 "Current revlog implementation doesn't appear to have a "
1496 "Current revlog implementation doesn't appear to have a "
1497 'manifest fulltext cache\n'))
1497 'manifest fulltext cache\n'))
1498 return
1498 return
1499
1499
1500 if opts.get(r'clear'):
1500 if opts.get(r'clear'):
1501 cache.clear()
1501 cache.clear()
1502
1502
1503 if add:
1503 if add:
1504 try:
1504 try:
1505 manifest = repo.manifestlog[r.lookup(add)]
1505 manifest = repo.manifestlog[r.lookup(add)]
1506 except error.LookupError as e:
1506 except error.LookupError as e:
1507 raise error.Abort(e, hint="Check your manifest node id")
1507 raise error.Abort(e, hint="Check your manifest node id")
1508 manifest.read() # stores revisision in cache too
1508 manifest.read() # stores revisision in cache too
1509
1509
1510 if not len(cache):
1510 if not len(cache):
1511 ui.write(_('Cache empty'))
1511 ui.write(_('Cache empty'))
1512 else:
1512 else:
1513 ui.write(
1513 ui.write(
1514 _('Cache contains %d manifest entries, in order of most to '
1514 _('Cache contains %d manifest entries, in order of most to '
1515 'least recent:\n') % (len(cache),))
1515 'least recent:\n') % (len(cache),))
1516 totalsize = 0
1516 totalsize = 0
1517 for nodeid in cache:
1517 for nodeid in cache:
1518 # Use cache.get to not update the LRU order
1518 # Use cache.get to not update the LRU order
1519 data = cache.get(nodeid)
1519 data = cache.get(nodeid)
1520 size = len(data)
1520 size = len(data)
1521 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1521 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1522 ui.write(_('id: %s, size %s\n') % (
1522 ui.write(_('id: %s, size %s\n') % (
1523 hex(nodeid), util.bytecount(size)))
1523 hex(nodeid), util.bytecount(size)))
1524 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1524 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1525 ui.write(
1525 ui.write(
1526 _('Total cache data size %s, on-disk %s\n') % (
1526 _('Total cache data size %s, on-disk %s\n') % (
1527 util.bytecount(totalsize), util.bytecount(ondisk))
1527 util.bytecount(totalsize), util.bytecount(ondisk))
1528 )
1528 )
1529
1529
1530 @command('debugmergestate', [], '')
1530 @command('debugmergestate', [], '')
1531 def debugmergestate(ui, repo, *args):
1531 def debugmergestate(ui, repo, *args):
1532 """print merge state
1532 """print merge state
1533
1533
1534 Use --verbose to print out information about whether v1 or v2 merge state
1534 Use --verbose to print out information about whether v1 or v2 merge state
1535 was chosen."""
1535 was chosen."""
1536 def _hashornull(h):
1536 def _hashornull(h):
1537 if h == nullhex:
1537 if h == nullhex:
1538 return 'null'
1538 return 'null'
1539 else:
1539 else:
1540 return h
1540 return h
1541
1541
1542 def printrecords(version):
1542 def printrecords(version):
1543 ui.write(('* version %d records\n') % version)
1543 ui.write(('* version %d records\n') % version)
1544 if version == 1:
1544 if version == 1:
1545 records = v1records
1545 records = v1records
1546 else:
1546 else:
1547 records = v2records
1547 records = v2records
1548
1548
1549 for rtype, record in records:
1549 for rtype, record in records:
1550 # pretty print some record types
1550 # pretty print some record types
1551 if rtype == 'L':
1551 if rtype == 'L':
1552 ui.write(('local: %s\n') % record)
1552 ui.write(('local: %s\n') % record)
1553 elif rtype == 'O':
1553 elif rtype == 'O':
1554 ui.write(('other: %s\n') % record)
1554 ui.write(('other: %s\n') % record)
1555 elif rtype == 'm':
1555 elif rtype == 'm':
1556 driver, mdstate = record.split('\0', 1)
1556 driver, mdstate = record.split('\0', 1)
1557 ui.write(('merge driver: %s (state "%s")\n')
1557 ui.write(('merge driver: %s (state "%s")\n')
1558 % (driver, mdstate))
1558 % (driver, mdstate))
1559 elif rtype in 'FDC':
1559 elif rtype in 'FDC':
1560 r = record.split('\0')
1560 r = record.split('\0')
1561 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1561 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1562 if version == 1:
1562 if version == 1:
1563 onode = 'not stored in v1 format'
1563 onode = 'not stored in v1 format'
1564 flags = r[7]
1564 flags = r[7]
1565 else:
1565 else:
1566 onode, flags = r[7:9]
1566 onode, flags = r[7:9]
1567 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1567 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1568 % (f, rtype, state, _hashornull(hash)))
1568 % (f, rtype, state, _hashornull(hash)))
1569 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1569 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1570 ui.write((' ancestor path: %s (node %s)\n')
1570 ui.write((' ancestor path: %s (node %s)\n')
1571 % (afile, _hashornull(anode)))
1571 % (afile, _hashornull(anode)))
1572 ui.write((' other path: %s (node %s)\n')
1572 ui.write((' other path: %s (node %s)\n')
1573 % (ofile, _hashornull(onode)))
1573 % (ofile, _hashornull(onode)))
1574 elif rtype == 'f':
1574 elif rtype == 'f':
1575 filename, rawextras = record.split('\0', 1)
1575 filename, rawextras = record.split('\0', 1)
1576 extras = rawextras.split('\0')
1576 extras = rawextras.split('\0')
1577 i = 0
1577 i = 0
1578 extrastrings = []
1578 extrastrings = []
1579 while i < len(extras):
1579 while i < len(extras):
1580 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1580 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1581 i += 2
1581 i += 2
1582
1582
1583 ui.write(('file extras: %s (%s)\n')
1583 ui.write(('file extras: %s (%s)\n')
1584 % (filename, ', '.join(extrastrings)))
1584 % (filename, ', '.join(extrastrings)))
1585 elif rtype == 'l':
1585 elif rtype == 'l':
1586 labels = record.split('\0', 2)
1586 labels = record.split('\0', 2)
1587 labels = [l for l in labels if len(l) > 0]
1587 labels = [l for l in labels if len(l) > 0]
1588 ui.write(('labels:\n'))
1588 ui.write(('labels:\n'))
1589 ui.write((' local: %s\n' % labels[0]))
1589 ui.write((' local: %s\n' % labels[0]))
1590 ui.write((' other: %s\n' % labels[1]))
1590 ui.write((' other: %s\n' % labels[1]))
1591 if len(labels) > 2:
1591 if len(labels) > 2:
1592 ui.write((' base: %s\n' % labels[2]))
1592 ui.write((' base: %s\n' % labels[2]))
1593 else:
1593 else:
1594 ui.write(('unrecognized entry: %s\t%s\n')
1594 ui.write(('unrecognized entry: %s\t%s\n')
1595 % (rtype, record.replace('\0', '\t')))
1595 % (rtype, record.replace('\0', '\t')))
1596
1596
1597 # Avoid mergestate.read() since it may raise an exception for unsupported
1597 # Avoid mergestate.read() since it may raise an exception for unsupported
1598 # merge state records. We shouldn't be doing this, but this is OK since this
1598 # merge state records. We shouldn't be doing this, but this is OK since this
1599 # command is pretty low-level.
1599 # command is pretty low-level.
1600 ms = mergemod.mergestate(repo)
1600 ms = mergemod.mergestate(repo)
1601
1601
1602 # sort so that reasonable information is on top
1602 # sort so that reasonable information is on top
1603 v1records = ms._readrecordsv1()
1603 v1records = ms._readrecordsv1()
1604 v2records = ms._readrecordsv2()
1604 v2records = ms._readrecordsv2()
1605 order = 'LOml'
1605 order = 'LOml'
1606 def key(r):
1606 def key(r):
1607 idx = order.find(r[0])
1607 idx = order.find(r[0])
1608 if idx == -1:
1608 if idx == -1:
1609 return (1, r[1])
1609 return (1, r[1])
1610 else:
1610 else:
1611 return (0, idx)
1611 return (0, idx)
1612 v1records.sort(key=key)
1612 v1records.sort(key=key)
1613 v2records.sort(key=key)
1613 v2records.sort(key=key)
1614
1614
1615 if not v1records and not v2records:
1615 if not v1records and not v2records:
1616 ui.write(('no merge state found\n'))
1616 ui.write(('no merge state found\n'))
1617 elif not v2records:
1617 elif not v2records:
1618 ui.note(('no version 2 merge state\n'))
1618 ui.note(('no version 2 merge state\n'))
1619 printrecords(1)
1619 printrecords(1)
1620 elif ms._v1v2match(v1records, v2records):
1620 elif ms._v1v2match(v1records, v2records):
1621 ui.note(('v1 and v2 states match: using v2\n'))
1621 ui.note(('v1 and v2 states match: using v2\n'))
1622 printrecords(2)
1622 printrecords(2)
1623 else:
1623 else:
1624 ui.note(('v1 and v2 states mismatch: using v1\n'))
1624 ui.note(('v1 and v2 states mismatch: using v1\n'))
1625 printrecords(1)
1625 printrecords(1)
1626 if ui.verbose:
1626 if ui.verbose:
1627 printrecords(2)
1627 printrecords(2)
1628
1628
1629 @command('debugnamecomplete', [], _('NAME...'))
1629 @command('debugnamecomplete', [], _('NAME...'))
1630 def debugnamecomplete(ui, repo, *args):
1630 def debugnamecomplete(ui, repo, *args):
1631 '''complete "names" - tags, open branch names, bookmark names'''
1631 '''complete "names" - tags, open branch names, bookmark names'''
1632
1632
1633 names = set()
1633 names = set()
1634 # since we previously only listed open branches, we will handle that
1634 # since we previously only listed open branches, we will handle that
1635 # specially (after this for loop)
1635 # specially (after this for loop)
1636 for name, ns in repo.names.iteritems():
1636 for name, ns in repo.names.iteritems():
1637 if name != 'branches':
1637 if name != 'branches':
1638 names.update(ns.listnames(repo))
1638 names.update(ns.listnames(repo))
1639 names.update(tag for (tag, heads, tip, closed)
1639 names.update(tag for (tag, heads, tip, closed)
1640 in repo.branchmap().iterbranches() if not closed)
1640 in repo.branchmap().iterbranches() if not closed)
1641 completions = set()
1641 completions = set()
1642 if not args:
1642 if not args:
1643 args = ['']
1643 args = ['']
1644 for a in args:
1644 for a in args:
1645 completions.update(n for n in names if n.startswith(a))
1645 completions.update(n for n in names if n.startswith(a))
1646 ui.write('\n'.join(sorted(completions)))
1646 ui.write('\n'.join(sorted(completions)))
1647 ui.write('\n')
1647 ui.write('\n')
1648
1648
1649 @command('debugobsolete',
1649 @command('debugobsolete',
1650 [('', 'flags', 0, _('markers flag')),
1650 [('', 'flags', 0, _('markers flag')),
1651 ('', 'record-parents', False,
1651 ('', 'record-parents', False,
1652 _('record parent information for the precursor')),
1652 _('record parent information for the precursor')),
1653 ('r', 'rev', [], _('display markers relevant to REV')),
1653 ('r', 'rev', [], _('display markers relevant to REV')),
1654 ('', 'exclusive', False, _('restrict display to markers only '
1654 ('', 'exclusive', False, _('restrict display to markers only '
1655 'relevant to REV')),
1655 'relevant to REV')),
1656 ('', 'index', False, _('display index of the marker')),
1656 ('', 'index', False, _('display index of the marker')),
1657 ('', 'delete', [], _('delete markers specified by indices')),
1657 ('', 'delete', [], _('delete markers specified by indices')),
1658 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1658 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1659 _('[OBSOLETED [REPLACEMENT ...]]'))
1659 _('[OBSOLETED [REPLACEMENT ...]]'))
1660 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1660 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1661 """create arbitrary obsolete marker
1661 """create arbitrary obsolete marker
1662
1662
1663 With no arguments, displays the list of obsolescence markers."""
1663 With no arguments, displays the list of obsolescence markers."""
1664
1664
1665 opts = pycompat.byteskwargs(opts)
1665 opts = pycompat.byteskwargs(opts)
1666
1666
1667 def parsenodeid(s):
1667 def parsenodeid(s):
1668 try:
1668 try:
1669 # We do not use revsingle/revrange functions here to accept
1669 # We do not use revsingle/revrange functions here to accept
1670 # arbitrary node identifiers, possibly not present in the
1670 # arbitrary node identifiers, possibly not present in the
1671 # local repository.
1671 # local repository.
1672 n = bin(s)
1672 n = bin(s)
1673 if len(n) != len(nullid):
1673 if len(n) != len(nullid):
1674 raise TypeError()
1674 raise TypeError()
1675 return n
1675 return n
1676 except TypeError:
1676 except TypeError:
1677 raise error.Abort('changeset references must be full hexadecimal '
1677 raise error.Abort('changeset references must be full hexadecimal '
1678 'node identifiers')
1678 'node identifiers')
1679
1679
1680 if opts.get('delete'):
1680 if opts.get('delete'):
1681 indices = []
1681 indices = []
1682 for v in opts.get('delete'):
1682 for v in opts.get('delete'):
1683 try:
1683 try:
1684 indices.append(int(v))
1684 indices.append(int(v))
1685 except ValueError:
1685 except ValueError:
1686 raise error.Abort(_('invalid index value: %r') % v,
1686 raise error.Abort(_('invalid index value: %r') % v,
1687 hint=_('use integers for indices'))
1687 hint=_('use integers for indices'))
1688
1688
1689 if repo.currenttransaction():
1689 if repo.currenttransaction():
1690 raise error.Abort(_('cannot delete obsmarkers in the middle '
1690 raise error.Abort(_('cannot delete obsmarkers in the middle '
1691 'of transaction.'))
1691 'of transaction.'))
1692
1692
1693 with repo.lock():
1693 with repo.lock():
1694 n = repair.deleteobsmarkers(repo.obsstore, indices)
1694 n = repair.deleteobsmarkers(repo.obsstore, indices)
1695 ui.write(_('deleted %i obsolescence markers\n') % n)
1695 ui.write(_('deleted %i obsolescence markers\n') % n)
1696
1696
1697 return
1697 return
1698
1698
1699 if precursor is not None:
1699 if precursor is not None:
1700 if opts['rev']:
1700 if opts['rev']:
1701 raise error.Abort('cannot select revision when creating marker')
1701 raise error.Abort('cannot select revision when creating marker')
1702 metadata = {}
1702 metadata = {}
1703 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1703 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1704 succs = tuple(parsenodeid(succ) for succ in successors)
1704 succs = tuple(parsenodeid(succ) for succ in successors)
1705 l = repo.lock()
1705 l = repo.lock()
1706 try:
1706 try:
1707 tr = repo.transaction('debugobsolete')
1707 tr = repo.transaction('debugobsolete')
1708 try:
1708 try:
1709 date = opts.get('date')
1709 date = opts.get('date')
1710 if date:
1710 if date:
1711 date = dateutil.parsedate(date)
1711 date = dateutil.parsedate(date)
1712 else:
1712 else:
1713 date = None
1713 date = None
1714 prec = parsenodeid(precursor)
1714 prec = parsenodeid(precursor)
1715 parents = None
1715 parents = None
1716 if opts['record_parents']:
1716 if opts['record_parents']:
1717 if prec not in repo.unfiltered():
1717 if prec not in repo.unfiltered():
1718 raise error.Abort('cannot used --record-parents on '
1718 raise error.Abort('cannot used --record-parents on '
1719 'unknown changesets')
1719 'unknown changesets')
1720 parents = repo.unfiltered()[prec].parents()
1720 parents = repo.unfiltered()[prec].parents()
1721 parents = tuple(p.node() for p in parents)
1721 parents = tuple(p.node() for p in parents)
1722 repo.obsstore.create(tr, prec, succs, opts['flags'],
1722 repo.obsstore.create(tr, prec, succs, opts['flags'],
1723 parents=parents, date=date,
1723 parents=parents, date=date,
1724 metadata=metadata, ui=ui)
1724 metadata=metadata, ui=ui)
1725 tr.close()
1725 tr.close()
1726 except ValueError as exc:
1726 except ValueError as exc:
1727 raise error.Abort(_('bad obsmarker input: %s') %
1727 raise error.Abort(_('bad obsmarker input: %s') %
1728 pycompat.bytestr(exc))
1728 pycompat.bytestr(exc))
1729 finally:
1729 finally:
1730 tr.release()
1730 tr.release()
1731 finally:
1731 finally:
1732 l.release()
1732 l.release()
1733 else:
1733 else:
1734 if opts['rev']:
1734 if opts['rev']:
1735 revs = scmutil.revrange(repo, opts['rev'])
1735 revs = scmutil.revrange(repo, opts['rev'])
1736 nodes = [repo[r].node() for r in revs]
1736 nodes = [repo[r].node() for r in revs]
1737 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1737 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1738 exclusive=opts['exclusive']))
1738 exclusive=opts['exclusive']))
1739 markers.sort(key=lambda x: x._data)
1739 markers.sort(key=lambda x: x._data)
1740 else:
1740 else:
1741 markers = obsutil.getmarkers(repo)
1741 markers = obsutil.getmarkers(repo)
1742
1742
1743 markerstoiter = markers
1743 markerstoiter = markers
1744 isrelevant = lambda m: True
1744 isrelevant = lambda m: True
1745 if opts.get('rev') and opts.get('index'):
1745 if opts.get('rev') and opts.get('index'):
1746 markerstoiter = obsutil.getmarkers(repo)
1746 markerstoiter = obsutil.getmarkers(repo)
1747 markerset = set(markers)
1747 markerset = set(markers)
1748 isrelevant = lambda m: m in markerset
1748 isrelevant = lambda m: m in markerset
1749
1749
1750 fm = ui.formatter('debugobsolete', opts)
1750 fm = ui.formatter('debugobsolete', opts)
1751 for i, m in enumerate(markerstoiter):
1751 for i, m in enumerate(markerstoiter):
1752 if not isrelevant(m):
1752 if not isrelevant(m):
1753 # marker can be irrelevant when we're iterating over a set
1753 # marker can be irrelevant when we're iterating over a set
1754 # of markers (markerstoiter) which is bigger than the set
1754 # of markers (markerstoiter) which is bigger than the set
1755 # of markers we want to display (markers)
1755 # of markers we want to display (markers)
1756 # this can happen if both --index and --rev options are
1756 # this can happen if both --index and --rev options are
1757 # provided and thus we need to iterate over all of the markers
1757 # provided and thus we need to iterate over all of the markers
1758 # to get the correct indices, but only display the ones that
1758 # to get the correct indices, but only display the ones that
1759 # are relevant to --rev value
1759 # are relevant to --rev value
1760 continue
1760 continue
1761 fm.startitem()
1761 fm.startitem()
1762 ind = i if opts.get('index') else None
1762 ind = i if opts.get('index') else None
1763 cmdutil.showmarker(fm, m, index=ind)
1763 cmdutil.showmarker(fm, m, index=ind)
1764 fm.end()
1764 fm.end()
1765
1765
1766 @command('debugpathcomplete',
1766 @command('debugpathcomplete',
1767 [('f', 'full', None, _('complete an entire path')),
1767 [('f', 'full', None, _('complete an entire path')),
1768 ('n', 'normal', None, _('show only normal files')),
1768 ('n', 'normal', None, _('show only normal files')),
1769 ('a', 'added', None, _('show only added files')),
1769 ('a', 'added', None, _('show only added files')),
1770 ('r', 'removed', None, _('show only removed files'))],
1770 ('r', 'removed', None, _('show only removed files'))],
1771 _('FILESPEC...'))
1771 _('FILESPEC...'))
1772 def debugpathcomplete(ui, repo, *specs, **opts):
1772 def debugpathcomplete(ui, repo, *specs, **opts):
1773 '''complete part or all of a tracked path
1773 '''complete part or all of a tracked path
1774
1774
1775 This command supports shells that offer path name completion. It
1775 This command supports shells that offer path name completion. It
1776 currently completes only files already known to the dirstate.
1776 currently completes only files already known to the dirstate.
1777
1777
1778 Completion extends only to the next path segment unless
1778 Completion extends only to the next path segment unless
1779 --full is specified, in which case entire paths are used.'''
1779 --full is specified, in which case entire paths are used.'''
1780
1780
1781 def complete(path, acceptable):
1781 def complete(path, acceptable):
1782 dirstate = repo.dirstate
1782 dirstate = repo.dirstate
1783 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1783 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1784 rootdir = repo.root + pycompat.ossep
1784 rootdir = repo.root + pycompat.ossep
1785 if spec != repo.root and not spec.startswith(rootdir):
1785 if spec != repo.root and not spec.startswith(rootdir):
1786 return [], []
1786 return [], []
1787 if os.path.isdir(spec):
1787 if os.path.isdir(spec):
1788 spec += '/'
1788 spec += '/'
1789 spec = spec[len(rootdir):]
1789 spec = spec[len(rootdir):]
1790 fixpaths = pycompat.ossep != '/'
1790 fixpaths = pycompat.ossep != '/'
1791 if fixpaths:
1791 if fixpaths:
1792 spec = spec.replace(pycompat.ossep, '/')
1792 spec = spec.replace(pycompat.ossep, '/')
1793 speclen = len(spec)
1793 speclen = len(spec)
1794 fullpaths = opts[r'full']
1794 fullpaths = opts[r'full']
1795 files, dirs = set(), set()
1795 files, dirs = set(), set()
1796 adddir, addfile = dirs.add, files.add
1796 adddir, addfile = dirs.add, files.add
1797 for f, st in dirstate.iteritems():
1797 for f, st in dirstate.iteritems():
1798 if f.startswith(spec) and st[0] in acceptable:
1798 if f.startswith(spec) and st[0] in acceptable:
1799 if fixpaths:
1799 if fixpaths:
1800 f = f.replace('/', pycompat.ossep)
1800 f = f.replace('/', pycompat.ossep)
1801 if fullpaths:
1801 if fullpaths:
1802 addfile(f)
1802 addfile(f)
1803 continue
1803 continue
1804 s = f.find(pycompat.ossep, speclen)
1804 s = f.find(pycompat.ossep, speclen)
1805 if s >= 0:
1805 if s >= 0:
1806 adddir(f[:s])
1806 adddir(f[:s])
1807 else:
1807 else:
1808 addfile(f)
1808 addfile(f)
1809 return files, dirs
1809 return files, dirs
1810
1810
1811 acceptable = ''
1811 acceptable = ''
1812 if opts[r'normal']:
1812 if opts[r'normal']:
1813 acceptable += 'nm'
1813 acceptable += 'nm'
1814 if opts[r'added']:
1814 if opts[r'added']:
1815 acceptable += 'a'
1815 acceptable += 'a'
1816 if opts[r'removed']:
1816 if opts[r'removed']:
1817 acceptable += 'r'
1817 acceptable += 'r'
1818 cwd = repo.getcwd()
1818 cwd = repo.getcwd()
1819 if not specs:
1819 if not specs:
1820 specs = ['.']
1820 specs = ['.']
1821
1821
1822 files, dirs = set(), set()
1822 files, dirs = set(), set()
1823 for spec in specs:
1823 for spec in specs:
1824 f, d = complete(spec, acceptable or 'nmar')
1824 f, d = complete(spec, acceptable or 'nmar')
1825 files.update(f)
1825 files.update(f)
1826 dirs.update(d)
1826 dirs.update(d)
1827 files.update(dirs)
1827 files.update(dirs)
1828 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1828 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1829 ui.write('\n')
1829 ui.write('\n')
1830
1830
1831 @command('debugpeer', [], _('PATH'), norepo=True)
1831 @command('debugpeer', [], _('PATH'), norepo=True)
1832 def debugpeer(ui, path):
1832 def debugpeer(ui, path):
1833 """establish a connection to a peer repository"""
1833 """establish a connection to a peer repository"""
1834 # Always enable peer request logging. Requires --debug to display
1834 # Always enable peer request logging. Requires --debug to display
1835 # though.
1835 # though.
1836 overrides = {
1836 overrides = {
1837 ('devel', 'debug.peer-request'): True,
1837 ('devel', 'debug.peer-request'): True,
1838 }
1838 }
1839
1839
1840 with ui.configoverride(overrides):
1840 with ui.configoverride(overrides):
1841 peer = hg.peer(ui, {}, path)
1841 peer = hg.peer(ui, {}, path)
1842
1842
1843 local = peer.local() is not None
1843 local = peer.local() is not None
1844 canpush = peer.canpush()
1844 canpush = peer.canpush()
1845
1845
1846 ui.write(_('url: %s\n') % peer.url())
1846 ui.write(_('url: %s\n') % peer.url())
1847 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1847 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1848 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1848 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1849
1849
1850 @command('debugpickmergetool',
1850 @command('debugpickmergetool',
1851 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1851 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1852 ('', 'changedelete', None, _('emulate merging change and delete')),
1852 ('', 'changedelete', None, _('emulate merging change and delete')),
1853 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1853 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1854 _('[PATTERN]...'),
1854 _('[PATTERN]...'),
1855 inferrepo=True)
1855 inferrepo=True)
1856 def debugpickmergetool(ui, repo, *pats, **opts):
1856 def debugpickmergetool(ui, repo, *pats, **opts):
1857 """examine which merge tool is chosen for specified file
1857 """examine which merge tool is chosen for specified file
1858
1858
1859 As described in :hg:`help merge-tools`, Mercurial examines
1859 As described in :hg:`help merge-tools`, Mercurial examines
1860 configurations below in this order to decide which merge tool is
1860 configurations below in this order to decide which merge tool is
1861 chosen for specified file.
1861 chosen for specified file.
1862
1862
1863 1. ``--tool`` option
1863 1. ``--tool`` option
1864 2. ``HGMERGE`` environment variable
1864 2. ``HGMERGE`` environment variable
1865 3. configurations in ``merge-patterns`` section
1865 3. configurations in ``merge-patterns`` section
1866 4. configuration of ``ui.merge``
1866 4. configuration of ``ui.merge``
1867 5. configurations in ``merge-tools`` section
1867 5. configurations in ``merge-tools`` section
1868 6. ``hgmerge`` tool (for historical reason only)
1868 6. ``hgmerge`` tool (for historical reason only)
1869 7. default tool for fallback (``:merge`` or ``:prompt``)
1869 7. default tool for fallback (``:merge`` or ``:prompt``)
1870
1870
1871 This command writes out examination result in the style below::
1871 This command writes out examination result in the style below::
1872
1872
1873 FILE = MERGETOOL
1873 FILE = MERGETOOL
1874
1874
1875 By default, all files known in the first parent context of the
1875 By default, all files known in the first parent context of the
1876 working directory are examined. Use file patterns and/or -I/-X
1876 working directory are examined. Use file patterns and/or -I/-X
1877 options to limit target files. -r/--rev is also useful to examine
1877 options to limit target files. -r/--rev is also useful to examine
1878 files in another context without actual updating to it.
1878 files in another context without actual updating to it.
1879
1879
1880 With --debug, this command shows warning messages while matching
1880 With --debug, this command shows warning messages while matching
1881 against ``merge-patterns`` and so on, too. It is recommended to
1881 against ``merge-patterns`` and so on, too. It is recommended to
1882 use this option with explicit file patterns and/or -I/-X options,
1882 use this option with explicit file patterns and/or -I/-X options,
1883 because this option increases amount of output per file according
1883 because this option increases amount of output per file according
1884 to configurations in hgrc.
1884 to configurations in hgrc.
1885
1885
1886 With -v/--verbose, this command shows configurations below at
1886 With -v/--verbose, this command shows configurations below at
1887 first (only if specified).
1887 first (only if specified).
1888
1888
1889 - ``--tool`` option
1889 - ``--tool`` option
1890 - ``HGMERGE`` environment variable
1890 - ``HGMERGE`` environment variable
1891 - configuration of ``ui.merge``
1891 - configuration of ``ui.merge``
1892
1892
1893 If merge tool is chosen before matching against
1893 If merge tool is chosen before matching against
1894 ``merge-patterns``, this command can't show any helpful
1894 ``merge-patterns``, this command can't show any helpful
1895 information, even with --debug. In such case, information above is
1895 information, even with --debug. In such case, information above is
1896 useful to know why a merge tool is chosen.
1896 useful to know why a merge tool is chosen.
1897 """
1897 """
1898 opts = pycompat.byteskwargs(opts)
1898 opts = pycompat.byteskwargs(opts)
1899 overrides = {}
1899 overrides = {}
1900 if opts['tool']:
1900 if opts['tool']:
1901 overrides[('ui', 'forcemerge')] = opts['tool']
1901 overrides[('ui', 'forcemerge')] = opts['tool']
1902 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1902 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1903
1903
1904 with ui.configoverride(overrides, 'debugmergepatterns'):
1904 with ui.configoverride(overrides, 'debugmergepatterns'):
1905 hgmerge = encoding.environ.get("HGMERGE")
1905 hgmerge = encoding.environ.get("HGMERGE")
1906 if hgmerge is not None:
1906 if hgmerge is not None:
1907 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1907 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1908 uimerge = ui.config("ui", "merge")
1908 uimerge = ui.config("ui", "merge")
1909 if uimerge:
1909 if uimerge:
1910 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1910 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1911
1911
1912 ctx = scmutil.revsingle(repo, opts.get('rev'))
1912 ctx = scmutil.revsingle(repo, opts.get('rev'))
1913 m = scmutil.match(ctx, pats, opts)
1913 m = scmutil.match(ctx, pats, opts)
1914 changedelete = opts['changedelete']
1914 changedelete = opts['changedelete']
1915 for path in ctx.walk(m):
1915 for path in ctx.walk(m):
1916 fctx = ctx[path]
1916 fctx = ctx[path]
1917 try:
1917 try:
1918 if not ui.debugflag:
1918 if not ui.debugflag:
1919 ui.pushbuffer(error=True)
1919 ui.pushbuffer(error=True)
1920 tool, toolpath = filemerge._picktool(repo, ui, path,
1920 tool, toolpath = filemerge._picktool(repo, ui, path,
1921 fctx.isbinary(),
1921 fctx.isbinary(),
1922 'l' in fctx.flags(),
1922 'l' in fctx.flags(),
1923 changedelete)
1923 changedelete)
1924 finally:
1924 finally:
1925 if not ui.debugflag:
1925 if not ui.debugflag:
1926 ui.popbuffer()
1926 ui.popbuffer()
1927 ui.write(('%s = %s\n') % (path, tool))
1927 ui.write(('%s = %s\n') % (path, tool))
1928
1928
1929 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1929 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1930 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1930 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1931 '''access the pushkey key/value protocol
1931 '''access the pushkey key/value protocol
1932
1932
1933 With two args, list the keys in the given namespace.
1933 With two args, list the keys in the given namespace.
1934
1934
1935 With five args, set a key to new if it currently is set to old.
1935 With five args, set a key to new if it currently is set to old.
1936 Reports success or failure.
1936 Reports success or failure.
1937 '''
1937 '''
1938
1938
1939 target = hg.peer(ui, {}, repopath)
1939 target = hg.peer(ui, {}, repopath)
1940 if keyinfo:
1940 if keyinfo:
1941 key, old, new = keyinfo
1941 key, old, new = keyinfo
1942 with target.commandexecutor() as e:
1942 with target.commandexecutor() as e:
1943 r = e.callcommand('pushkey', {
1943 r = e.callcommand('pushkey', {
1944 'namespace': namespace,
1944 'namespace': namespace,
1945 'key': key,
1945 'key': key,
1946 'old': old,
1946 'old': old,
1947 'new': new,
1947 'new': new,
1948 }).result()
1948 }).result()
1949
1949
1950 ui.status(pycompat.bytestr(r) + '\n')
1950 ui.status(pycompat.bytestr(r) + '\n')
1951 return not r
1951 return not r
1952 else:
1952 else:
1953 for k, v in sorted(target.listkeys(namespace).iteritems()):
1953 for k, v in sorted(target.listkeys(namespace).iteritems()):
1954 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1954 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1955 stringutil.escapestr(v)))
1955 stringutil.escapestr(v)))
1956
1956
1957 @command('debugpvec', [], _('A B'))
1957 @command('debugpvec', [], _('A B'))
1958 def debugpvec(ui, repo, a, b=None):
1958 def debugpvec(ui, repo, a, b=None):
1959 ca = scmutil.revsingle(repo, a)
1959 ca = scmutil.revsingle(repo, a)
1960 cb = scmutil.revsingle(repo, b)
1960 cb = scmutil.revsingle(repo, b)
1961 pa = pvec.ctxpvec(ca)
1961 pa = pvec.ctxpvec(ca)
1962 pb = pvec.ctxpvec(cb)
1962 pb = pvec.ctxpvec(cb)
1963 if pa == pb:
1963 if pa == pb:
1964 rel = "="
1964 rel = "="
1965 elif pa > pb:
1965 elif pa > pb:
1966 rel = ">"
1966 rel = ">"
1967 elif pa < pb:
1967 elif pa < pb:
1968 rel = "<"
1968 rel = "<"
1969 elif pa | pb:
1969 elif pa | pb:
1970 rel = "|"
1970 rel = "|"
1971 ui.write(_("a: %s\n") % pa)
1971 ui.write(_("a: %s\n") % pa)
1972 ui.write(_("b: %s\n") % pb)
1972 ui.write(_("b: %s\n") % pb)
1973 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1973 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1974 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1974 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1975 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1975 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1976 pa.distance(pb), rel))
1976 pa.distance(pb), rel))
1977
1977
1978 @command('debugrebuilddirstate|debugrebuildstate',
1978 @command('debugrebuilddirstate|debugrebuildstate',
1979 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1979 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1980 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1980 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1981 'the working copy parent')),
1981 'the working copy parent')),
1982 ],
1982 ],
1983 _('[-r REV]'))
1983 _('[-r REV]'))
1984 def debugrebuilddirstate(ui, repo, rev, **opts):
1984 def debugrebuilddirstate(ui, repo, rev, **opts):
1985 """rebuild the dirstate as it would look like for the given revision
1985 """rebuild the dirstate as it would look like for the given revision
1986
1986
1987 If no revision is specified the first current parent will be used.
1987 If no revision is specified the first current parent will be used.
1988
1988
1989 The dirstate will be set to the files of the given revision.
1989 The dirstate will be set to the files of the given revision.
1990 The actual working directory content or existing dirstate
1990 The actual working directory content or existing dirstate
1991 information such as adds or removes is not considered.
1991 information such as adds or removes is not considered.
1992
1992
1993 ``minimal`` will only rebuild the dirstate status for files that claim to be
1993 ``minimal`` will only rebuild the dirstate status for files that claim to be
1994 tracked but are not in the parent manifest, or that exist in the parent
1994 tracked but are not in the parent manifest, or that exist in the parent
1995 manifest but are not in the dirstate. It will not change adds, removes, or
1995 manifest but are not in the dirstate. It will not change adds, removes, or
1996 modified files that are in the working copy parent.
1996 modified files that are in the working copy parent.
1997
1997
1998 One use of this command is to make the next :hg:`status` invocation
1998 One use of this command is to make the next :hg:`status` invocation
1999 check the actual file content.
1999 check the actual file content.
2000 """
2000 """
2001 ctx = scmutil.revsingle(repo, rev)
2001 ctx = scmutil.revsingle(repo, rev)
2002 with repo.wlock():
2002 with repo.wlock():
2003 dirstate = repo.dirstate
2003 dirstate = repo.dirstate
2004 changedfiles = None
2004 changedfiles = None
2005 # See command doc for what minimal does.
2005 # See command doc for what minimal does.
2006 if opts.get(r'minimal'):
2006 if opts.get(r'minimal'):
2007 manifestfiles = set(ctx.manifest().keys())
2007 manifestfiles = set(ctx.manifest().keys())
2008 dirstatefiles = set(dirstate)
2008 dirstatefiles = set(dirstate)
2009 manifestonly = manifestfiles - dirstatefiles
2009 manifestonly = manifestfiles - dirstatefiles
2010 dsonly = dirstatefiles - manifestfiles
2010 dsonly = dirstatefiles - manifestfiles
2011 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2011 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2012 changedfiles = manifestonly | dsnotadded
2012 changedfiles = manifestonly | dsnotadded
2013
2013
2014 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2014 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2015
2015
2016 @command('debugrebuildfncache', [], '')
2016 @command('debugrebuildfncache', [], '')
2017 def debugrebuildfncache(ui, repo):
2017 def debugrebuildfncache(ui, repo):
2018 """rebuild the fncache file"""
2018 """rebuild the fncache file"""
2019 repair.rebuildfncache(ui, repo)
2019 repair.rebuildfncache(ui, repo)
2020
2020
2021 @command('debugrename',
2021 @command('debugrename',
2022 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2022 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2023 _('[-r REV] FILE'))
2023 _('[-r REV] FILE'))
2024 def debugrename(ui, repo, file1, *pats, **opts):
2024 def debugrename(ui, repo, file1, *pats, **opts):
2025 """dump rename information"""
2025 """dump rename information"""
2026
2026
2027 opts = pycompat.byteskwargs(opts)
2027 opts = pycompat.byteskwargs(opts)
2028 ctx = scmutil.revsingle(repo, opts.get('rev'))
2028 ctx = scmutil.revsingle(repo, opts.get('rev'))
2029 m = scmutil.match(ctx, (file1,) + pats, opts)
2029 m = scmutil.match(ctx, (file1,) + pats, opts)
2030 for abs in ctx.walk(m):
2030 for abs in ctx.walk(m):
2031 fctx = ctx[abs]
2031 fctx = ctx[abs]
2032 o = fctx.filelog().renamed(fctx.filenode())
2032 o = fctx.filelog().renamed(fctx.filenode())
2033 rel = m.rel(abs)
2033 rel = m.rel(abs)
2034 if o:
2034 if o:
2035 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2035 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2036 else:
2036 else:
2037 ui.write(_("%s not renamed\n") % rel)
2037 ui.write(_("%s not renamed\n") % rel)
2038
2038
2039 @command('debugrevlog', cmdutil.debugrevlogopts +
2039 @command('debugrevlog', cmdutil.debugrevlogopts +
2040 [('d', 'dump', False, _('dump index data'))],
2040 [('d', 'dump', False, _('dump index data'))],
2041 _('-c|-m|FILE'),
2041 _('-c|-m|FILE'),
2042 optionalrepo=True)
2042 optionalrepo=True)
2043 def debugrevlog(ui, repo, file_=None, **opts):
2043 def debugrevlog(ui, repo, file_=None, **opts):
2044 """show data and statistics about a revlog"""
2044 """show data and statistics about a revlog"""
2045 opts = pycompat.byteskwargs(opts)
2045 opts = pycompat.byteskwargs(opts)
2046 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2046 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2047
2047
2048 if opts.get("dump"):
2048 if opts.get("dump"):
2049 numrevs = len(r)
2049 numrevs = len(r)
2050 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2050 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2051 " rawsize totalsize compression heads chainlen\n"))
2051 " rawsize totalsize compression heads chainlen\n"))
2052 ts = 0
2052 ts = 0
2053 heads = set()
2053 heads = set()
2054
2054
2055 for rev in pycompat.xrange(numrevs):
2055 for rev in pycompat.xrange(numrevs):
2056 dbase = r.deltaparent(rev)
2056 dbase = r.deltaparent(rev)
2057 if dbase == -1:
2057 if dbase == -1:
2058 dbase = rev
2058 dbase = rev
2059 cbase = r.chainbase(rev)
2059 cbase = r.chainbase(rev)
2060 clen = r.chainlen(rev)
2060 clen = r.chainlen(rev)
2061 p1, p2 = r.parentrevs(rev)
2061 p1, p2 = r.parentrevs(rev)
2062 rs = r.rawsize(rev)
2062 rs = r.rawsize(rev)
2063 ts = ts + rs
2063 ts = ts + rs
2064 heads -= set(r.parentrevs(rev))
2064 heads -= set(r.parentrevs(rev))
2065 heads.add(rev)
2065 heads.add(rev)
2066 try:
2066 try:
2067 compression = ts / r.end(rev)
2067 compression = ts / r.end(rev)
2068 except ZeroDivisionError:
2068 except ZeroDivisionError:
2069 compression = 0
2069 compression = 0
2070 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2070 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2071 "%11d %5d %8d\n" %
2071 "%11d %5d %8d\n" %
2072 (rev, p1, p2, r.start(rev), r.end(rev),
2072 (rev, p1, p2, r.start(rev), r.end(rev),
2073 r.start(dbase), r.start(cbase),
2073 r.start(dbase), r.start(cbase),
2074 r.start(p1), r.start(p2),
2074 r.start(p1), r.start(p2),
2075 rs, ts, compression, len(heads), clen))
2075 rs, ts, compression, len(heads), clen))
2076 return 0
2076 return 0
2077
2077
2078 v = r.version
2078 v = r.version
2079 format = v & 0xFFFF
2079 format = v & 0xFFFF
2080 flags = []
2080 flags = []
2081 gdelta = False
2081 gdelta = False
2082 if v & revlog.FLAG_INLINE_DATA:
2082 if v & revlog.FLAG_INLINE_DATA:
2083 flags.append('inline')
2083 flags.append('inline')
2084 if v & revlog.FLAG_GENERALDELTA:
2084 if v & revlog.FLAG_GENERALDELTA:
2085 gdelta = True
2085 gdelta = True
2086 flags.append('generaldelta')
2086 flags.append('generaldelta')
2087 if not flags:
2087 if not flags:
2088 flags = ['(none)']
2088 flags = ['(none)']
2089
2089
2090 ### tracks merge vs single parent
2090 ### tracks merge vs single parent
2091 nummerges = 0
2091 nummerges = 0
2092
2092
2093 ### tracks ways the "delta" are build
2093 ### tracks ways the "delta" are build
2094 # nodelta
2094 # nodelta
2095 numempty = 0
2095 numempty = 0
2096 numemptytext = 0
2096 numemptytext = 0
2097 numemptydelta = 0
2097 numemptydelta = 0
2098 # full file content
2098 # full file content
2099 numfull = 0
2099 numfull = 0
2100 # intermediate snapshot against a prior snapshot
2100 # intermediate snapshot against a prior snapshot
2101 numsemi = 0
2101 numsemi = 0
2102 # snapshot count per depth
2102 # snapshot count per depth
2103 numsnapdepth = collections.defaultdict(lambda: 0)
2103 numsnapdepth = collections.defaultdict(lambda: 0)
2104 # delta against previous revision
2104 # delta against previous revision
2105 numprev = 0
2105 numprev = 0
2106 # delta against first or second parent (not prev)
2106 # delta against first or second parent (not prev)
2107 nump1 = 0
2107 nump1 = 0
2108 nump2 = 0
2108 nump2 = 0
2109 # delta against neither prev nor parents
2109 # delta against neither prev nor parents
2110 numother = 0
2110 numother = 0
2111 # delta against prev that are also first or second parent
2111 # delta against prev that are also first or second parent
2112 # (details of `numprev`)
2112 # (details of `numprev`)
2113 nump1prev = 0
2113 nump1prev = 0
2114 nump2prev = 0
2114 nump2prev = 0
2115
2115
2116 # data about delta chain of each revs
2116 # data about delta chain of each revs
2117 chainlengths = []
2117 chainlengths = []
2118 chainbases = []
2118 chainbases = []
2119 chainspans = []
2119 chainspans = []
2120
2120
2121 # data about each revision
2121 # data about each revision
2122 datasize = [None, 0, 0]
2122 datasize = [None, 0, 0]
2123 fullsize = [None, 0, 0]
2123 fullsize = [None, 0, 0]
2124 semisize = [None, 0, 0]
2124 semisize = [None, 0, 0]
2125 # snapshot count per depth
2125 # snapshot count per depth
2126 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2126 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2127 deltasize = [None, 0, 0]
2127 deltasize = [None, 0, 0]
2128 chunktypecounts = {}
2128 chunktypecounts = {}
2129 chunktypesizes = {}
2129 chunktypesizes = {}
2130
2130
2131 def addsize(size, l):
2131 def addsize(size, l):
2132 if l[0] is None or size < l[0]:
2132 if l[0] is None or size < l[0]:
2133 l[0] = size
2133 l[0] = size
2134 if size > l[1]:
2134 if size > l[1]:
2135 l[1] = size
2135 l[1] = size
2136 l[2] += size
2136 l[2] += size
2137
2137
2138 numrevs = len(r)
2138 numrevs = len(r)
2139 for rev in pycompat.xrange(numrevs):
2139 for rev in pycompat.xrange(numrevs):
2140 p1, p2 = r.parentrevs(rev)
2140 p1, p2 = r.parentrevs(rev)
2141 delta = r.deltaparent(rev)
2141 delta = r.deltaparent(rev)
2142 if format > 0:
2142 if format > 0:
2143 addsize(r.rawsize(rev), datasize)
2143 addsize(r.rawsize(rev), datasize)
2144 if p2 != nullrev:
2144 if p2 != nullrev:
2145 nummerges += 1
2145 nummerges += 1
2146 size = r.length(rev)
2146 size = r.length(rev)
2147 if delta == nullrev:
2147 if delta == nullrev:
2148 chainlengths.append(0)
2148 chainlengths.append(0)
2149 chainbases.append(r.start(rev))
2149 chainbases.append(r.start(rev))
2150 chainspans.append(size)
2150 chainspans.append(size)
2151 if size == 0:
2151 if size == 0:
2152 numempty += 1
2152 numempty += 1
2153 numemptytext += 1
2153 numemptytext += 1
2154 else:
2154 else:
2155 numfull += 1
2155 numfull += 1
2156 numsnapdepth[0] += 1
2156 numsnapdepth[0] += 1
2157 addsize(size, fullsize)
2157 addsize(size, fullsize)
2158 addsize(size, snapsizedepth[0])
2158 addsize(size, snapsizedepth[0])
2159 else:
2159 else:
2160 chainlengths.append(chainlengths[delta] + 1)
2160 chainlengths.append(chainlengths[delta] + 1)
2161 baseaddr = chainbases[delta]
2161 baseaddr = chainbases[delta]
2162 revaddr = r.start(rev)
2162 revaddr = r.start(rev)
2163 chainbases.append(baseaddr)
2163 chainbases.append(baseaddr)
2164 chainspans.append((revaddr - baseaddr) + size)
2164 chainspans.append((revaddr - baseaddr) + size)
2165 if size == 0:
2165 if size == 0:
2166 numempty += 1
2166 numempty += 1
2167 numemptydelta += 1
2167 numemptydelta += 1
2168 elif r.issnapshot(rev):
2168 elif r.issnapshot(rev):
2169 addsize(size, semisize)
2169 addsize(size, semisize)
2170 numsemi += 1
2170 numsemi += 1
2171 depth = r.snapshotdepth(rev)
2171 depth = r.snapshotdepth(rev)
2172 numsnapdepth[depth] += 1
2172 numsnapdepth[depth] += 1
2173 addsize(size, snapsizedepth[depth])
2173 addsize(size, snapsizedepth[depth])
2174 else:
2174 else:
2175 addsize(size, deltasize)
2175 addsize(size, deltasize)
2176 if delta == rev - 1:
2176 if delta == rev - 1:
2177 numprev += 1
2177 numprev += 1
2178 if delta == p1:
2178 if delta == p1:
2179 nump1prev += 1
2179 nump1prev += 1
2180 elif delta == p2:
2180 elif delta == p2:
2181 nump2prev += 1
2181 nump2prev += 1
2182 elif delta == p1:
2182 elif delta == p1:
2183 nump1 += 1
2183 nump1 += 1
2184 elif delta == p2:
2184 elif delta == p2:
2185 nump2 += 1
2185 nump2 += 1
2186 elif delta != nullrev:
2186 elif delta != nullrev:
2187 numother += 1
2187 numother += 1
2188
2188
2189 # Obtain data on the raw chunks in the revlog.
2189 # Obtain data on the raw chunks in the revlog.
2190 if util.safehasattr(r, '_getsegmentforrevs'):
2190 if util.safehasattr(r, '_getsegmentforrevs'):
2191 segment = r._getsegmentforrevs(rev, rev)[1]
2191 segment = r._getsegmentforrevs(rev, rev)[1]
2192 else:
2192 else:
2193 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2193 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2194 if segment:
2194 if segment:
2195 chunktype = bytes(segment[0:1])
2195 chunktype = bytes(segment[0:1])
2196 else:
2196 else:
2197 chunktype = 'empty'
2197 chunktype = 'empty'
2198
2198
2199 if chunktype not in chunktypecounts:
2199 if chunktype not in chunktypecounts:
2200 chunktypecounts[chunktype] = 0
2200 chunktypecounts[chunktype] = 0
2201 chunktypesizes[chunktype] = 0
2201 chunktypesizes[chunktype] = 0
2202
2202
2203 chunktypecounts[chunktype] += 1
2203 chunktypecounts[chunktype] += 1
2204 chunktypesizes[chunktype] += size
2204 chunktypesizes[chunktype] += size
2205
2205
2206 # Adjust size min value for empty cases
2206 # Adjust size min value for empty cases
2207 for size in (datasize, fullsize, semisize, deltasize):
2207 for size in (datasize, fullsize, semisize, deltasize):
2208 if size[0] is None:
2208 if size[0] is None:
2209 size[0] = 0
2209 size[0] = 0
2210
2210
2211 numdeltas = numrevs - numfull - numempty - numsemi
2211 numdeltas = numrevs - numfull - numempty - numsemi
2212 numoprev = numprev - nump1prev - nump2prev
2212 numoprev = numprev - nump1prev - nump2prev
2213 totalrawsize = datasize[2]
2213 totalrawsize = datasize[2]
2214 datasize[2] /= numrevs
2214 datasize[2] /= numrevs
2215 fulltotal = fullsize[2]
2215 fulltotal = fullsize[2]
2216 fullsize[2] /= numfull
2216 fullsize[2] /= numfull
2217 semitotal = semisize[2]
2217 semitotal = semisize[2]
2218 snaptotal = {}
2218 snaptotal = {}
2219 if 0 < numsemi:
2219 if 0 < numsemi:
2220 semisize[2] /= numsemi
2220 semisize[2] /= numsemi
2221 for depth in snapsizedepth:
2221 for depth in snapsizedepth:
2222 snaptotal[depth] = snapsizedepth[depth][2]
2222 snaptotal[depth] = snapsizedepth[depth][2]
2223 snapsizedepth[depth][2] /= numsnapdepth[depth]
2223 snapsizedepth[depth][2] /= numsnapdepth[depth]
2224
2224
2225 deltatotal = deltasize[2]
2225 deltatotal = deltasize[2]
2226 if numdeltas > 0:
2226 if numdeltas > 0:
2227 deltasize[2] /= numdeltas
2227 deltasize[2] /= numdeltas
2228 totalsize = fulltotal + semitotal + deltatotal
2228 totalsize = fulltotal + semitotal + deltatotal
2229 avgchainlen = sum(chainlengths) / numrevs
2229 avgchainlen = sum(chainlengths) / numrevs
2230 maxchainlen = max(chainlengths)
2230 maxchainlen = max(chainlengths)
2231 maxchainspan = max(chainspans)
2231 maxchainspan = max(chainspans)
2232 compratio = 1
2232 compratio = 1
2233 if totalsize:
2233 if totalsize:
2234 compratio = totalrawsize / totalsize
2234 compratio = totalrawsize / totalsize
2235
2235
2236 basedfmtstr = '%%%dd\n'
2236 basedfmtstr = '%%%dd\n'
2237 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2237 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2238
2238
2239 def dfmtstr(max):
2239 def dfmtstr(max):
2240 return basedfmtstr % len(str(max))
2240 return basedfmtstr % len(str(max))
2241 def pcfmtstr(max, padding=0):
2241 def pcfmtstr(max, padding=0):
2242 return basepcfmtstr % (len(str(max)), ' ' * padding)
2242 return basepcfmtstr % (len(str(max)), ' ' * padding)
2243
2243
2244 def pcfmt(value, total):
2244 def pcfmt(value, total):
2245 if total:
2245 if total:
2246 return (value, 100 * float(value) / total)
2246 return (value, 100 * float(value) / total)
2247 else:
2247 else:
2248 return value, 100.0
2248 return value, 100.0
2249
2249
2250 ui.write(('format : %d\n') % format)
2250 ui.write(('format : %d\n') % format)
2251 ui.write(('flags : %s\n') % ', '.join(flags))
2251 ui.write(('flags : %s\n') % ', '.join(flags))
2252
2252
2253 ui.write('\n')
2253 ui.write('\n')
2254 fmt = pcfmtstr(totalsize)
2254 fmt = pcfmtstr(totalsize)
2255 fmt2 = dfmtstr(totalsize)
2255 fmt2 = dfmtstr(totalsize)
2256 ui.write(('revisions : ') + fmt2 % numrevs)
2256 ui.write(('revisions : ') + fmt2 % numrevs)
2257 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2257 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2258 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2258 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2259 ui.write(('revisions : ') + fmt2 % numrevs)
2259 ui.write(('revisions : ') + fmt2 % numrevs)
2260 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2260 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2261 ui.write((' text : ')
2261 ui.write((' text : ')
2262 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2262 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2263 ui.write((' delta : ')
2263 ui.write((' delta : ')
2264 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2264 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2265 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2265 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2266 for depth in sorted(numsnapdepth):
2266 for depth in sorted(numsnapdepth):
2267 ui.write((' lvl-%-3d : ' % depth)
2267 ui.write((' lvl-%-3d : ' % depth)
2268 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2268 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2269 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2269 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2270 ui.write(('revision size : ') + fmt2 % totalsize)
2270 ui.write(('revision size : ') + fmt2 % totalsize)
2271 ui.write((' snapshot : ')
2271 ui.write((' snapshot : ')
2272 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2272 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2273 for depth in sorted(numsnapdepth):
2273 for depth in sorted(numsnapdepth):
2274 ui.write((' lvl-%-3d : ' % depth)
2274 ui.write((' lvl-%-3d : ' % depth)
2275 + fmt % pcfmt(snaptotal[depth], totalsize))
2275 + fmt % pcfmt(snaptotal[depth], totalsize))
2276 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2276 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2277
2277
2278 def fmtchunktype(chunktype):
2278 def fmtchunktype(chunktype):
2279 if chunktype == 'empty':
2279 if chunktype == 'empty':
2280 return ' %s : ' % chunktype
2280 return ' %s : ' % chunktype
2281 elif chunktype in pycompat.bytestr(string.ascii_letters):
2281 elif chunktype in pycompat.bytestr(string.ascii_letters):
2282 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2282 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2283 else:
2283 else:
2284 return ' 0x%s : ' % hex(chunktype)
2284 return ' 0x%s : ' % hex(chunktype)
2285
2285
2286 ui.write('\n')
2286 ui.write('\n')
2287 ui.write(('chunks : ') + fmt2 % numrevs)
2287 ui.write(('chunks : ') + fmt2 % numrevs)
2288 for chunktype in sorted(chunktypecounts):
2288 for chunktype in sorted(chunktypecounts):
2289 ui.write(fmtchunktype(chunktype))
2289 ui.write(fmtchunktype(chunktype))
2290 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2290 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2291 ui.write(('chunks size : ') + fmt2 % totalsize)
2291 ui.write(('chunks size : ') + fmt2 % totalsize)
2292 for chunktype in sorted(chunktypecounts):
2292 for chunktype in sorted(chunktypecounts):
2293 ui.write(fmtchunktype(chunktype))
2293 ui.write(fmtchunktype(chunktype))
2294 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2294 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2295
2295
2296 ui.write('\n')
2296 ui.write('\n')
2297 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2297 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2298 ui.write(('avg chain length : ') + fmt % avgchainlen)
2298 ui.write(('avg chain length : ') + fmt % avgchainlen)
2299 ui.write(('max chain length : ') + fmt % maxchainlen)
2299 ui.write(('max chain length : ') + fmt % maxchainlen)
2300 ui.write(('max chain reach : ') + fmt % maxchainspan)
2300 ui.write(('max chain reach : ') + fmt % maxchainspan)
2301 ui.write(('compression ratio : ') + fmt % compratio)
2301 ui.write(('compression ratio : ') + fmt % compratio)
2302
2302
2303 if format > 0:
2303 if format > 0:
2304 ui.write('\n')
2304 ui.write('\n')
2305 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2305 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2306 % tuple(datasize))
2306 % tuple(datasize))
2307 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2307 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2308 % tuple(fullsize))
2308 % tuple(fullsize))
2309 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2309 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2310 % tuple(semisize))
2310 % tuple(semisize))
2311 for depth in sorted(snapsizedepth):
2311 for depth in sorted(snapsizedepth):
2312 if depth == 0:
2312 if depth == 0:
2313 continue
2313 continue
2314 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2314 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2315 % ((depth,) + tuple(snapsizedepth[depth])))
2315 % ((depth,) + tuple(snapsizedepth[depth])))
2316 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2316 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2317 % tuple(deltasize))
2317 % tuple(deltasize))
2318
2318
2319 if numdeltas > 0:
2319 if numdeltas > 0:
2320 ui.write('\n')
2320 ui.write('\n')
2321 fmt = pcfmtstr(numdeltas)
2321 fmt = pcfmtstr(numdeltas)
2322 fmt2 = pcfmtstr(numdeltas, 4)
2322 fmt2 = pcfmtstr(numdeltas, 4)
2323 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2323 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2324 if numprev > 0:
2324 if numprev > 0:
2325 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2325 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2326 numprev))
2326 numprev))
2327 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2327 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2328 numprev))
2328 numprev))
2329 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2329 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2330 numprev))
2330 numprev))
2331 if gdelta:
2331 if gdelta:
2332 ui.write(('deltas against p1 : ')
2332 ui.write(('deltas against p1 : ')
2333 + fmt % pcfmt(nump1, numdeltas))
2333 + fmt % pcfmt(nump1, numdeltas))
2334 ui.write(('deltas against p2 : ')
2334 ui.write(('deltas against p2 : ')
2335 + fmt % pcfmt(nump2, numdeltas))
2335 + fmt % pcfmt(nump2, numdeltas))
2336 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2336 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2337 numdeltas))
2337 numdeltas))
2338
2338
2339 @command('debugrevspec',
2339 @command('debugrevspec',
2340 [('', 'optimize', None,
2340 [('', 'optimize', None,
2341 _('print parsed tree after optimizing (DEPRECATED)')),
2341 _('print parsed tree after optimizing (DEPRECATED)')),
2342 ('', 'show-revs', True, _('print list of result revisions (default)')),
2342 ('', 'show-revs', True, _('print list of result revisions (default)')),
2343 ('s', 'show-set', None, _('print internal representation of result set')),
2343 ('s', 'show-set', None, _('print internal representation of result set')),
2344 ('p', 'show-stage', [],
2344 ('p', 'show-stage', [],
2345 _('print parsed tree at the given stage'), _('NAME')),
2345 _('print parsed tree at the given stage'), _('NAME')),
2346 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2346 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2347 ('', 'verify-optimized', False, _('verify optimized result')),
2347 ('', 'verify-optimized', False, _('verify optimized result')),
2348 ],
2348 ],
2349 ('REVSPEC'))
2349 ('REVSPEC'))
2350 def debugrevspec(ui, repo, expr, **opts):
2350 def debugrevspec(ui, repo, expr, **opts):
2351 """parse and apply a revision specification
2351 """parse and apply a revision specification
2352
2352
2353 Use -p/--show-stage option to print the parsed tree at the given stages.
2353 Use -p/--show-stage option to print the parsed tree at the given stages.
2354 Use -p all to print tree at every stage.
2354 Use -p all to print tree at every stage.
2355
2355
2356 Use --no-show-revs option with -s or -p to print only the set
2356 Use --no-show-revs option with -s or -p to print only the set
2357 representation or the parsed tree respectively.
2357 representation or the parsed tree respectively.
2358
2358
2359 Use --verify-optimized to compare the optimized result with the unoptimized
2359 Use --verify-optimized to compare the optimized result with the unoptimized
2360 one. Returns 1 if the optimized result differs.
2360 one. Returns 1 if the optimized result differs.
2361 """
2361 """
2362 opts = pycompat.byteskwargs(opts)
2362 opts = pycompat.byteskwargs(opts)
2363 aliases = ui.configitems('revsetalias')
2363 aliases = ui.configitems('revsetalias')
2364 stages = [
2364 stages = [
2365 ('parsed', lambda tree: tree),
2365 ('parsed', lambda tree: tree),
2366 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2366 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2367 ui.warn)),
2367 ui.warn)),
2368 ('concatenated', revsetlang.foldconcat),
2368 ('concatenated', revsetlang.foldconcat),
2369 ('analyzed', revsetlang.analyze),
2369 ('analyzed', revsetlang.analyze),
2370 ('optimized', revsetlang.optimize),
2370 ('optimized', revsetlang.optimize),
2371 ]
2371 ]
2372 if opts['no_optimized']:
2372 if opts['no_optimized']:
2373 stages = stages[:-1]
2373 stages = stages[:-1]
2374 if opts['verify_optimized'] and opts['no_optimized']:
2374 if opts['verify_optimized'] and opts['no_optimized']:
2375 raise error.Abort(_('cannot use --verify-optimized with '
2375 raise error.Abort(_('cannot use --verify-optimized with '
2376 '--no-optimized'))
2376 '--no-optimized'))
2377 stagenames = set(n for n, f in stages)
2377 stagenames = set(n for n, f in stages)
2378
2378
2379 showalways = set()
2379 showalways = set()
2380 showchanged = set()
2380 showchanged = set()
2381 if ui.verbose and not opts['show_stage']:
2381 if ui.verbose and not opts['show_stage']:
2382 # show parsed tree by --verbose (deprecated)
2382 # show parsed tree by --verbose (deprecated)
2383 showalways.add('parsed')
2383 showalways.add('parsed')
2384 showchanged.update(['expanded', 'concatenated'])
2384 showchanged.update(['expanded', 'concatenated'])
2385 if opts['optimize']:
2385 if opts['optimize']:
2386 showalways.add('optimized')
2386 showalways.add('optimized')
2387 if opts['show_stage'] and opts['optimize']:
2387 if opts['show_stage'] and opts['optimize']:
2388 raise error.Abort(_('cannot use --optimize with --show-stage'))
2388 raise error.Abort(_('cannot use --optimize with --show-stage'))
2389 if opts['show_stage'] == ['all']:
2389 if opts['show_stage'] == ['all']:
2390 showalways.update(stagenames)
2390 showalways.update(stagenames)
2391 else:
2391 else:
2392 for n in opts['show_stage']:
2392 for n in opts['show_stage']:
2393 if n not in stagenames:
2393 if n not in stagenames:
2394 raise error.Abort(_('invalid stage name: %s') % n)
2394 raise error.Abort(_('invalid stage name: %s') % n)
2395 showalways.update(opts['show_stage'])
2395 showalways.update(opts['show_stage'])
2396
2396
2397 treebystage = {}
2397 treebystage = {}
2398 printedtree = None
2398 printedtree = None
2399 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2399 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2400 for n, f in stages:
2400 for n, f in stages:
2401 treebystage[n] = tree = f(tree)
2401 treebystage[n] = tree = f(tree)
2402 if n in showalways or (n in showchanged and tree != printedtree):
2402 if n in showalways or (n in showchanged and tree != printedtree):
2403 if opts['show_stage'] or n != 'parsed':
2403 if opts['show_stage'] or n != 'parsed':
2404 ui.write(("* %s:\n") % n)
2404 ui.write(("* %s:\n") % n)
2405 ui.write(revsetlang.prettyformat(tree), "\n")
2405 ui.write(revsetlang.prettyformat(tree), "\n")
2406 printedtree = tree
2406 printedtree = tree
2407
2407
2408 if opts['verify_optimized']:
2408 if opts['verify_optimized']:
2409 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2409 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2410 brevs = revset.makematcher(treebystage['optimized'])(repo)
2410 brevs = revset.makematcher(treebystage['optimized'])(repo)
2411 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2411 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2412 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2412 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2413 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2413 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2414 arevs = list(arevs)
2414 arevs = list(arevs)
2415 brevs = list(brevs)
2415 brevs = list(brevs)
2416 if arevs == brevs:
2416 if arevs == brevs:
2417 return 0
2417 return 0
2418 ui.write(('--- analyzed\n'), label='diff.file_a')
2418 ui.write(('--- analyzed\n'), label='diff.file_a')
2419 ui.write(('+++ optimized\n'), label='diff.file_b')
2419 ui.write(('+++ optimized\n'), label='diff.file_b')
2420 sm = difflib.SequenceMatcher(None, arevs, brevs)
2420 sm = difflib.SequenceMatcher(None, arevs, brevs)
2421 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2421 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2422 if tag in ('delete', 'replace'):
2422 if tag in ('delete', 'replace'):
2423 for c in arevs[alo:ahi]:
2423 for c in arevs[alo:ahi]:
2424 ui.write('-%s\n' % c, label='diff.deleted')
2424 ui.write('-%s\n' % c, label='diff.deleted')
2425 if tag in ('insert', 'replace'):
2425 if tag in ('insert', 'replace'):
2426 for c in brevs[blo:bhi]:
2426 for c in brevs[blo:bhi]:
2427 ui.write('+%s\n' % c, label='diff.inserted')
2427 ui.write('+%s\n' % c, label='diff.inserted')
2428 if tag == 'equal':
2428 if tag == 'equal':
2429 for c in arevs[alo:ahi]:
2429 for c in arevs[alo:ahi]:
2430 ui.write(' %s\n' % c)
2430 ui.write(' %s\n' % c)
2431 return 1
2431 return 1
2432
2432
2433 func = revset.makematcher(tree)
2433 func = revset.makematcher(tree)
2434 revs = func(repo)
2434 revs = func(repo)
2435 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2435 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2436 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2436 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2437 if not opts['show_revs']:
2437 if not opts['show_revs']:
2438 return
2438 return
2439 for c in revs:
2439 for c in revs:
2440 ui.write("%d\n" % c)
2440 ui.write("%d\n" % c)
2441
2441
2442 @command('debugserve', [
2442 @command('debugserve', [
2443 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2443 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2444 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2444 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2445 ('', 'logiofile', '', _('file to log server I/O to')),
2445 ('', 'logiofile', '', _('file to log server I/O to')),
2446 ], '')
2446 ], '')
2447 def debugserve(ui, repo, **opts):
2447 def debugserve(ui, repo, **opts):
2448 """run a server with advanced settings
2448 """run a server with advanced settings
2449
2449
2450 This command is similar to :hg:`serve`. It exists partially as a
2450 This command is similar to :hg:`serve`. It exists partially as a
2451 workaround to the fact that ``hg serve --stdio`` must have specific
2451 workaround to the fact that ``hg serve --stdio`` must have specific
2452 arguments for security reasons.
2452 arguments for security reasons.
2453 """
2453 """
2454 opts = pycompat.byteskwargs(opts)
2454 opts = pycompat.byteskwargs(opts)
2455
2455
2456 if not opts['sshstdio']:
2456 if not opts['sshstdio']:
2457 raise error.Abort(_('only --sshstdio is currently supported'))
2457 raise error.Abort(_('only --sshstdio is currently supported'))
2458
2458
2459 logfh = None
2459 logfh = None
2460
2460
2461 if opts['logiofd'] and opts['logiofile']:
2461 if opts['logiofd'] and opts['logiofile']:
2462 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2462 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2463
2463
2464 if opts['logiofd']:
2464 if opts['logiofd']:
2465 # Line buffered because output is line based.
2465 # Line buffered because output is line based.
2466 try:
2466 try:
2467 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2467 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2468 except OSError as e:
2468 except OSError as e:
2469 if e.errno != errno.ESPIPE:
2469 if e.errno != errno.ESPIPE:
2470 raise
2470 raise
2471 # can't seek a pipe, so `ab` mode fails on py3
2471 # can't seek a pipe, so `ab` mode fails on py3
2472 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2472 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2473 elif opts['logiofile']:
2473 elif opts['logiofile']:
2474 logfh = open(opts['logiofile'], 'ab', 1)
2474 logfh = open(opts['logiofile'], 'ab', 1)
2475
2475
2476 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2476 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2477 s.serve_forever()
2477 s.serve_forever()
2478
2478
2479 @command('debugsetparents', [], _('REV1 [REV2]'))
2479 @command('debugsetparents', [], _('REV1 [REV2]'))
2480 def debugsetparents(ui, repo, rev1, rev2=None):
2480 def debugsetparents(ui, repo, rev1, rev2=None):
2481 """manually set the parents of the current working directory
2481 """manually set the parents of the current working directory
2482
2482
2483 This is useful for writing repository conversion tools, but should
2483 This is useful for writing repository conversion tools, but should
2484 be used with care. For example, neither the working directory nor the
2484 be used with care. For example, neither the working directory nor the
2485 dirstate is updated, so file status may be incorrect after running this
2485 dirstate is updated, so file status may be incorrect after running this
2486 command.
2486 command.
2487
2487
2488 Returns 0 on success.
2488 Returns 0 on success.
2489 """
2489 """
2490
2490
2491 node1 = scmutil.revsingle(repo, rev1).node()
2491 node1 = scmutil.revsingle(repo, rev1).node()
2492 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2492 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2493
2493
2494 with repo.wlock():
2494 with repo.wlock():
2495 repo.setparents(node1, node2)
2495 repo.setparents(node1, node2)
2496
2496
2497 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2497 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2498 def debugssl(ui, repo, source=None, **opts):
2498 def debugssl(ui, repo, source=None, **opts):
2499 '''test a secure connection to a server
2499 '''test a secure connection to a server
2500
2500
2501 This builds the certificate chain for the server on Windows, installing the
2501 This builds the certificate chain for the server on Windows, installing the
2502 missing intermediates and trusted root via Windows Update if necessary. It
2502 missing intermediates and trusted root via Windows Update if necessary. It
2503 does nothing on other platforms.
2503 does nothing on other platforms.
2504
2504
2505 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2505 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2506 that server is used. See :hg:`help urls` for more information.
2506 that server is used. See :hg:`help urls` for more information.
2507
2507
2508 If the update succeeds, retry the original operation. Otherwise, the cause
2508 If the update succeeds, retry the original operation. Otherwise, the cause
2509 of the SSL error is likely another issue.
2509 of the SSL error is likely another issue.
2510 '''
2510 '''
2511 if not pycompat.iswindows:
2511 if not pycompat.iswindows:
2512 raise error.Abort(_('certificate chain building is only possible on '
2512 raise error.Abort(_('certificate chain building is only possible on '
2513 'Windows'))
2513 'Windows'))
2514
2514
2515 if not source:
2515 if not source:
2516 if not repo:
2516 if not repo:
2517 raise error.Abort(_("there is no Mercurial repository here, and no "
2517 raise error.Abort(_("there is no Mercurial repository here, and no "
2518 "server specified"))
2518 "server specified"))
2519 source = "default"
2519 source = "default"
2520
2520
2521 source, branches = hg.parseurl(ui.expandpath(source))
2521 source, branches = hg.parseurl(ui.expandpath(source))
2522 url = util.url(source)
2522 url = util.url(source)
2523 addr = None
2523 addr = None
2524
2524
2525 defaultport = {'https': 443, 'ssh': 22}
2525 defaultport = {'https': 443, 'ssh': 22}
2526 if url.scheme in defaultport:
2526 if url.scheme in defaultport:
2527 try:
2527 try:
2528 addr = (url.host, int(url.port or defaultport[url.scheme]))
2528 addr = (url.host, int(url.port or defaultport[url.scheme]))
2529 except ValueError:
2529 except ValueError:
2530 raise error.Abort(_("malformed port number in URL"))
2530 raise error.Abort(_("malformed port number in URL"))
2531 else:
2531 else:
2532 raise error.Abort(_("only https and ssh connections are supported"))
2532 raise error.Abort(_("only https and ssh connections are supported"))
2533
2533
2534 from . import win32
2534 from . import win32
2535
2535
2536 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2536 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2537 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2537 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2538
2538
2539 try:
2539 try:
2540 s.connect(addr)
2540 s.connect(addr)
2541 cert = s.getpeercert(True)
2541 cert = s.getpeercert(True)
2542
2542
2543 ui.status(_('checking the certificate chain for %s\n') % url.host)
2543 ui.status(_('checking the certificate chain for %s\n') % url.host)
2544
2544
2545 complete = win32.checkcertificatechain(cert, build=False)
2545 complete = win32.checkcertificatechain(cert, build=False)
2546
2546
2547 if not complete:
2547 if not complete:
2548 ui.status(_('certificate chain is incomplete, updating... '))
2548 ui.status(_('certificate chain is incomplete, updating... '))
2549
2549
2550 if not win32.checkcertificatechain(cert):
2550 if not win32.checkcertificatechain(cert):
2551 ui.status(_('failed.\n'))
2551 ui.status(_('failed.\n'))
2552 else:
2552 else:
2553 ui.status(_('done.\n'))
2553 ui.status(_('done.\n'))
2554 else:
2554 else:
2555 ui.status(_('full certificate chain is available\n'))
2555 ui.status(_('full certificate chain is available\n'))
2556 finally:
2556 finally:
2557 s.close()
2557 s.close()
2558
2558
2559 @command('debugsub',
2559 @command('debugsub',
2560 [('r', 'rev', '',
2560 [('r', 'rev', '',
2561 _('revision to check'), _('REV'))],
2561 _('revision to check'), _('REV'))],
2562 _('[-r REV] [REV]'))
2562 _('[-r REV] [REV]'))
2563 def debugsub(ui, repo, rev=None):
2563 def debugsub(ui, repo, rev=None):
2564 ctx = scmutil.revsingle(repo, rev, None)
2564 ctx = scmutil.revsingle(repo, rev, None)
2565 for k, v in sorted(ctx.substate.items()):
2565 for k, v in sorted(ctx.substate.items()):
2566 ui.write(('path %s\n') % k)
2566 ui.write(('path %s\n') % k)
2567 ui.write((' source %s\n') % v[0])
2567 ui.write((' source %s\n') % v[0])
2568 ui.write((' revision %s\n') % v[1])
2568 ui.write((' revision %s\n') % v[1])
2569
2569
2570 @command('debugsuccessorssets',
2570 @command('debugsuccessorssets',
2571 [('', 'closest', False, _('return closest successors sets only'))],
2571 [('', 'closest', False, _('return closest successors sets only'))],
2572 _('[REV]'))
2572 _('[REV]'))
2573 def debugsuccessorssets(ui, repo, *revs, **opts):
2573 def debugsuccessorssets(ui, repo, *revs, **opts):
2574 """show set of successors for revision
2574 """show set of successors for revision
2575
2575
2576 A successors set of changeset A is a consistent group of revisions that
2576 A successors set of changeset A is a consistent group of revisions that
2577 succeed A. It contains non-obsolete changesets only unless closests
2577 succeed A. It contains non-obsolete changesets only unless closests
2578 successors set is set.
2578 successors set is set.
2579
2579
2580 In most cases a changeset A has a single successors set containing a single
2580 In most cases a changeset A has a single successors set containing a single
2581 successor (changeset A replaced by A').
2581 successor (changeset A replaced by A').
2582
2582
2583 A changeset that is made obsolete with no successors are called "pruned".
2583 A changeset that is made obsolete with no successors are called "pruned".
2584 Such changesets have no successors sets at all.
2584 Such changesets have no successors sets at all.
2585
2585
2586 A changeset that has been "split" will have a successors set containing
2586 A changeset that has been "split" will have a successors set containing
2587 more than one successor.
2587 more than one successor.
2588
2588
2589 A changeset that has been rewritten in multiple different ways is called
2589 A changeset that has been rewritten in multiple different ways is called
2590 "divergent". Such changesets have multiple successor sets (each of which
2590 "divergent". Such changesets have multiple successor sets (each of which
2591 may also be split, i.e. have multiple successors).
2591 may also be split, i.e. have multiple successors).
2592
2592
2593 Results are displayed as follows::
2593 Results are displayed as follows::
2594
2594
2595 <rev1>
2595 <rev1>
2596 <successors-1A>
2596 <successors-1A>
2597 <rev2>
2597 <rev2>
2598 <successors-2A>
2598 <successors-2A>
2599 <successors-2B1> <successors-2B2> <successors-2B3>
2599 <successors-2B1> <successors-2B2> <successors-2B3>
2600
2600
2601 Here rev2 has two possible (i.e. divergent) successors sets. The first
2601 Here rev2 has two possible (i.e. divergent) successors sets. The first
2602 holds one element, whereas the second holds three (i.e. the changeset has
2602 holds one element, whereas the second holds three (i.e. the changeset has
2603 been split).
2603 been split).
2604 """
2604 """
2605 # passed to successorssets caching computation from one call to another
2605 # passed to successorssets caching computation from one call to another
2606 cache = {}
2606 cache = {}
2607 ctx2str = bytes
2607 ctx2str = bytes
2608 node2str = short
2608 node2str = short
2609 for rev in scmutil.revrange(repo, revs):
2609 for rev in scmutil.revrange(repo, revs):
2610 ctx = repo[rev]
2610 ctx = repo[rev]
2611 ui.write('%s\n'% ctx2str(ctx))
2611 ui.write('%s\n'% ctx2str(ctx))
2612 for succsset in obsutil.successorssets(repo, ctx.node(),
2612 for succsset in obsutil.successorssets(repo, ctx.node(),
2613 closest=opts[r'closest'],
2613 closest=opts[r'closest'],
2614 cache=cache):
2614 cache=cache):
2615 if succsset:
2615 if succsset:
2616 ui.write(' ')
2616 ui.write(' ')
2617 ui.write(node2str(succsset[0]))
2617 ui.write(node2str(succsset[0]))
2618 for node in succsset[1:]:
2618 for node in succsset[1:]:
2619 ui.write(' ')
2619 ui.write(' ')
2620 ui.write(node2str(node))
2620 ui.write(node2str(node))
2621 ui.write('\n')
2621 ui.write('\n')
2622
2622
2623 @command('debugtemplate',
2623 @command('debugtemplate',
2624 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2624 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2625 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2625 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2626 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2626 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2627 optionalrepo=True)
2627 optionalrepo=True)
2628 def debugtemplate(ui, repo, tmpl, **opts):
2628 def debugtemplate(ui, repo, tmpl, **opts):
2629 """parse and apply a template
2629 """parse and apply a template
2630
2630
2631 If -r/--rev is given, the template is processed as a log template and
2631 If -r/--rev is given, the template is processed as a log template and
2632 applied to the given changesets. Otherwise, it is processed as a generic
2632 applied to the given changesets. Otherwise, it is processed as a generic
2633 template.
2633 template.
2634
2634
2635 Use --verbose to print the parsed tree.
2635 Use --verbose to print the parsed tree.
2636 """
2636 """
2637 revs = None
2637 revs = None
2638 if opts[r'rev']:
2638 if opts[r'rev']:
2639 if repo is None:
2639 if repo is None:
2640 raise error.RepoError(_('there is no Mercurial repository here '
2640 raise error.RepoError(_('there is no Mercurial repository here '
2641 '(.hg not found)'))
2641 '(.hg not found)'))
2642 revs = scmutil.revrange(repo, opts[r'rev'])
2642 revs = scmutil.revrange(repo, opts[r'rev'])
2643
2643
2644 props = {}
2644 props = {}
2645 for d in opts[r'define']:
2645 for d in opts[r'define']:
2646 try:
2646 try:
2647 k, v = (e.strip() for e in d.split('=', 1))
2647 k, v = (e.strip() for e in d.split('=', 1))
2648 if not k or k == 'ui':
2648 if not k or k == 'ui':
2649 raise ValueError
2649 raise ValueError
2650 props[k] = v
2650 props[k] = v
2651 except ValueError:
2651 except ValueError:
2652 raise error.Abort(_('malformed keyword definition: %s') % d)
2652 raise error.Abort(_('malformed keyword definition: %s') % d)
2653
2653
2654 if ui.verbose:
2654 if ui.verbose:
2655 aliases = ui.configitems('templatealias')
2655 aliases = ui.configitems('templatealias')
2656 tree = templater.parse(tmpl)
2656 tree = templater.parse(tmpl)
2657 ui.note(templater.prettyformat(tree), '\n')
2657 ui.note(templater.prettyformat(tree), '\n')
2658 newtree = templater.expandaliases(tree, aliases)
2658 newtree = templater.expandaliases(tree, aliases)
2659 if newtree != tree:
2659 if newtree != tree:
2660 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2660 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2661
2661
2662 if revs is None:
2662 if revs is None:
2663 tres = formatter.templateresources(ui, repo)
2663 tres = formatter.templateresources(ui, repo)
2664 t = formatter.maketemplater(ui, tmpl, resources=tres)
2664 t = formatter.maketemplater(ui, tmpl, resources=tres)
2665 if ui.verbose:
2665 if ui.verbose:
2666 kwds, funcs = t.symbolsuseddefault()
2666 kwds, funcs = t.symbolsuseddefault()
2667 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2667 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2668 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2668 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2669 ui.write(t.renderdefault(props))
2669 ui.write(t.renderdefault(props))
2670 else:
2670 else:
2671 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2671 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2672 if ui.verbose:
2672 if ui.verbose:
2673 kwds, funcs = displayer.t.symbolsuseddefault()
2673 kwds, funcs = displayer.t.symbolsuseddefault()
2674 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2674 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2675 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2675 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2676 for r in revs:
2676 for r in revs:
2677 displayer.show(repo[r], **pycompat.strkwargs(props))
2677 displayer.show(repo[r], **pycompat.strkwargs(props))
2678 displayer.close()
2678 displayer.close()
2679
2679
2680 @command('debuguigetpass', [
2680 @command('debuguigetpass', [
2681 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2681 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2682 ], _('[-p TEXT]'), norepo=True)
2682 ], _('[-p TEXT]'), norepo=True)
2683 def debuguigetpass(ui, prompt=''):
2683 def debuguigetpass(ui, prompt=''):
2684 """show prompt to type password"""
2684 """show prompt to type password"""
2685 r = ui.getpass(prompt)
2685 r = ui.getpass(prompt)
2686 ui.write(('respose: %s\n') % r)
2686 ui.write(('respose: %s\n') % r)
2687
2687
2688 @command('debuguiprompt', [
2688 @command('debuguiprompt', [
2689 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2689 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2690 ], _('[-p TEXT]'), norepo=True)
2690 ], _('[-p TEXT]'), norepo=True)
2691 def debuguiprompt(ui, prompt=''):
2691 def debuguiprompt(ui, prompt=''):
2692 """show plain prompt"""
2692 """show plain prompt"""
2693 r = ui.prompt(prompt)
2693 r = ui.prompt(prompt)
2694 ui.write(('response: %s\n') % r)
2694 ui.write(('response: %s\n') % r)
2695
2695
2696 @command('debugupdatecaches', [])
2696 @command('debugupdatecaches', [])
2697 def debugupdatecaches(ui, repo, *pats, **opts):
2697 def debugupdatecaches(ui, repo, *pats, **opts):
2698 """warm all known caches in the repository"""
2698 """warm all known caches in the repository"""
2699 with repo.wlock(), repo.lock():
2699 with repo.wlock(), repo.lock():
2700 repo.updatecaches(full=True)
2700 repo.updatecaches(full=True)
2701
2701
2702 @command('debugupgraderepo', [
2702 @command('debugupgraderepo', [
2703 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2703 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2704 ('', 'run', False, _('performs an upgrade')),
2704 ('', 'run', False, _('performs an upgrade')),
2705 ])
2705 ])
2706 def debugupgraderepo(ui, repo, run=False, optimize=None):
2706 def debugupgraderepo(ui, repo, run=False, optimize=None):
2707 """upgrade a repository to use different features
2707 """upgrade a repository to use different features
2708
2708
2709 If no arguments are specified, the repository is evaluated for upgrade
2709 If no arguments are specified, the repository is evaluated for upgrade
2710 and a list of problems and potential optimizations is printed.
2710 and a list of problems and potential optimizations is printed.
2711
2711
2712 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2712 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2713 can be influenced via additional arguments. More details will be provided
2713 can be influenced via additional arguments. More details will be provided
2714 by the command output when run without ``--run``.
2714 by the command output when run without ``--run``.
2715
2715
2716 During the upgrade, the repository will be locked and no writes will be
2716 During the upgrade, the repository will be locked and no writes will be
2717 allowed.
2717 allowed.
2718
2718
2719 At the end of the upgrade, the repository may not be readable while new
2719 At the end of the upgrade, the repository may not be readable while new
2720 repository data is swapped in. This window will be as long as it takes to
2720 repository data is swapped in. This window will be as long as it takes to
2721 rename some directories inside the ``.hg`` directory. On most machines, this
2721 rename some directories inside the ``.hg`` directory. On most machines, this
2722 should complete almost instantaneously and the chances of a consumer being
2722 should complete almost instantaneously and the chances of a consumer being
2723 unable to access the repository should be low.
2723 unable to access the repository should be low.
2724 """
2724 """
2725 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2725 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2726
2726
2727 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2727 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2728 inferrepo=True)
2728 inferrepo=True)
2729 def debugwalk(ui, repo, *pats, **opts):
2729 def debugwalk(ui, repo, *pats, **opts):
2730 """show how files match on given patterns"""
2730 """show how files match on given patterns"""
2731 opts = pycompat.byteskwargs(opts)
2731 opts = pycompat.byteskwargs(opts)
2732 m = scmutil.match(repo[None], pats, opts)
2732 m = scmutil.match(repo[None], pats, opts)
2733 if ui.verbose:
2733 if ui.verbose:
2734 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2734 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2735 items = list(repo[None].walk(m))
2735 items = list(repo[None].walk(m))
2736 if not items:
2736 if not items:
2737 return
2737 return
2738 f = lambda fn: fn
2738 f = lambda fn: fn
2739 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2739 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2740 f = lambda fn: util.normpath(fn)
2740 f = lambda fn: util.normpath(fn)
2741 fmt = 'f %%-%ds %%-%ds %%s' % (
2741 fmt = 'f %%-%ds %%-%ds %%s' % (
2742 max([len(abs) for abs in items]),
2742 max([len(abs) for abs in items]),
2743 max([len(m.rel(abs)) for abs in items]))
2743 max([len(m.rel(abs)) for abs in items]))
2744 for abs in items:
2744 for abs in items:
2745 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2745 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2746 ui.write("%s\n" % line.rstrip())
2746 ui.write("%s\n" % line.rstrip())
2747
2747
2748 @command('debugwhyunstable', [], _('REV'))
2748 @command('debugwhyunstable', [], _('REV'))
2749 def debugwhyunstable(ui, repo, rev):
2749 def debugwhyunstable(ui, repo, rev):
2750 """explain instabilities of a changeset"""
2750 """explain instabilities of a changeset"""
2751 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2751 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2752 dnodes = ''
2752 dnodes = ''
2753 if entry.get('divergentnodes'):
2753 if entry.get('divergentnodes'):
2754 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2754 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2755 for ctx in entry['divergentnodes']) + ' '
2755 for ctx in entry['divergentnodes']) + ' '
2756 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2756 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2757 entry['reason'], entry['node']))
2757 entry['reason'], entry['node']))
2758
2758
2759 @command('debugwireargs',
2759 @command('debugwireargs',
2760 [('', 'three', '', 'three'),
2760 [('', 'three', '', 'three'),
2761 ('', 'four', '', 'four'),
2761 ('', 'four', '', 'four'),
2762 ('', 'five', '', 'five'),
2762 ('', 'five', '', 'five'),
2763 ] + cmdutil.remoteopts,
2763 ] + cmdutil.remoteopts,
2764 _('REPO [OPTIONS]... [ONE [TWO]]'),
2764 _('REPO [OPTIONS]... [ONE [TWO]]'),
2765 norepo=True)
2765 norepo=True)
2766 def debugwireargs(ui, repopath, *vals, **opts):
2766 def debugwireargs(ui, repopath, *vals, **opts):
2767 opts = pycompat.byteskwargs(opts)
2767 opts = pycompat.byteskwargs(opts)
2768 repo = hg.peer(ui, opts, repopath)
2768 repo = hg.peer(ui, opts, repopath)
2769 for opt in cmdutil.remoteopts:
2769 for opt in cmdutil.remoteopts:
2770 del opts[opt[1]]
2770 del opts[opt[1]]
2771 args = {}
2771 args = {}
2772 for k, v in opts.iteritems():
2772 for k, v in opts.iteritems():
2773 if v:
2773 if v:
2774 args[k] = v
2774 args[k] = v
2775 args = pycompat.strkwargs(args)
2775 args = pycompat.strkwargs(args)
2776 # run twice to check that we don't mess up the stream for the next command
2776 # run twice to check that we don't mess up the stream for the next command
2777 res1 = repo.debugwireargs(*vals, **args)
2777 res1 = repo.debugwireargs(*vals, **args)
2778 res2 = repo.debugwireargs(*vals, **args)
2778 res2 = repo.debugwireargs(*vals, **args)
2779 ui.write("%s\n" % res1)
2779 ui.write("%s\n" % res1)
2780 if res1 != res2:
2780 if res1 != res2:
2781 ui.warn("%s\n" % res2)
2781 ui.warn("%s\n" % res2)
2782
2782
2783 def _parsewirelangblocks(fh):
2783 def _parsewirelangblocks(fh):
2784 activeaction = None
2784 activeaction = None
2785 blocklines = []
2785 blocklines = []
2786
2786
2787 for line in fh:
2787 for line in fh:
2788 line = line.rstrip()
2788 line = line.rstrip()
2789 if not line:
2789 if not line:
2790 continue
2790 continue
2791
2791
2792 if line.startswith(b'#'):
2792 if line.startswith(b'#'):
2793 continue
2793 continue
2794
2794
2795 if not line.startswith(b' '):
2795 if not line.startswith(b' '):
2796 # New block. Flush previous one.
2796 # New block. Flush previous one.
2797 if activeaction:
2797 if activeaction:
2798 yield activeaction, blocklines
2798 yield activeaction, blocklines
2799
2799
2800 activeaction = line
2800 activeaction = line
2801 blocklines = []
2801 blocklines = []
2802 continue
2802 continue
2803
2803
2804 # Else we start with an indent.
2804 # Else we start with an indent.
2805
2805
2806 if not activeaction:
2806 if not activeaction:
2807 raise error.Abort(_('indented line outside of block'))
2807 raise error.Abort(_('indented line outside of block'))
2808
2808
2809 blocklines.append(line)
2809 blocklines.append(line)
2810
2810
2811 # Flush last block.
2811 # Flush last block.
2812 if activeaction:
2812 if activeaction:
2813 yield activeaction, blocklines
2813 yield activeaction, blocklines
2814
2814
2815 @command('debugwireproto',
2815 @command('debugwireproto',
2816 [
2816 [
2817 ('', 'localssh', False, _('start an SSH server for this repo')),
2817 ('', 'localssh', False, _('start an SSH server for this repo')),
2818 ('', 'peer', '', _('construct a specific version of the peer')),
2818 ('', 'peer', '', _('construct a specific version of the peer')),
2819 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2819 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2820 ('', 'nologhandshake', False,
2820 ('', 'nologhandshake', False,
2821 _('do not log I/O related to the peer handshake')),
2821 _('do not log I/O related to the peer handshake')),
2822 ] + cmdutil.remoteopts,
2822 ] + cmdutil.remoteopts,
2823 _('[PATH]'),
2823 _('[PATH]'),
2824 optionalrepo=True)
2824 optionalrepo=True)
2825 def debugwireproto(ui, repo, path=None, **opts):
2825 def debugwireproto(ui, repo, path=None, **opts):
2826 """send wire protocol commands to a server
2826 """send wire protocol commands to a server
2827
2827
2828 This command can be used to issue wire protocol commands to remote
2828 This command can be used to issue wire protocol commands to remote
2829 peers and to debug the raw data being exchanged.
2829 peers and to debug the raw data being exchanged.
2830
2830
2831 ``--localssh`` will start an SSH server against the current repository
2831 ``--localssh`` will start an SSH server against the current repository
2832 and connect to that. By default, the connection will perform a handshake
2832 and connect to that. By default, the connection will perform a handshake
2833 and establish an appropriate peer instance.
2833 and establish an appropriate peer instance.
2834
2834
2835 ``--peer`` can be used to bypass the handshake protocol and construct a
2835 ``--peer`` can be used to bypass the handshake protocol and construct a
2836 peer instance using the specified class type. Valid values are ``raw``,
2836 peer instance using the specified class type. Valid values are ``raw``,
2837 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2837 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2838 raw data payloads and don't support higher-level command actions.
2838 raw data payloads and don't support higher-level command actions.
2839
2839
2840 ``--noreadstderr`` can be used to disable automatic reading from stderr
2840 ``--noreadstderr`` can be used to disable automatic reading from stderr
2841 of the peer (for SSH connections only). Disabling automatic reading of
2841 of the peer (for SSH connections only). Disabling automatic reading of
2842 stderr is useful for making output more deterministic.
2842 stderr is useful for making output more deterministic.
2843
2843
2844 Commands are issued via a mini language which is specified via stdin.
2844 Commands are issued via a mini language which is specified via stdin.
2845 The language consists of individual actions to perform. An action is
2845 The language consists of individual actions to perform. An action is
2846 defined by a block. A block is defined as a line with no leading
2846 defined by a block. A block is defined as a line with no leading
2847 space followed by 0 or more lines with leading space. Blocks are
2847 space followed by 0 or more lines with leading space. Blocks are
2848 effectively a high-level command with additional metadata.
2848 effectively a high-level command with additional metadata.
2849
2849
2850 Lines beginning with ``#`` are ignored.
2850 Lines beginning with ``#`` are ignored.
2851
2851
2852 The following sections denote available actions.
2852 The following sections denote available actions.
2853
2853
2854 raw
2854 raw
2855 ---
2855 ---
2856
2856
2857 Send raw data to the server.
2857 Send raw data to the server.
2858
2858
2859 The block payload contains the raw data to send as one atomic send
2859 The block payload contains the raw data to send as one atomic send
2860 operation. The data may not actually be delivered in a single system
2860 operation. The data may not actually be delivered in a single system
2861 call: it depends on the abilities of the transport being used.
2861 call: it depends on the abilities of the transport being used.
2862
2862
2863 Each line in the block is de-indented and concatenated. Then, that
2863 Each line in the block is de-indented and concatenated. Then, that
2864 value is evaluated as a Python b'' literal. This allows the use of
2864 value is evaluated as a Python b'' literal. This allows the use of
2865 backslash escaping, etc.
2865 backslash escaping, etc.
2866
2866
2867 raw+
2867 raw+
2868 ----
2868 ----
2869
2869
2870 Behaves like ``raw`` except flushes output afterwards.
2870 Behaves like ``raw`` except flushes output afterwards.
2871
2871
2872 command <X>
2872 command <X>
2873 -----------
2873 -----------
2874
2874
2875 Send a request to run a named command, whose name follows the ``command``
2875 Send a request to run a named command, whose name follows the ``command``
2876 string.
2876 string.
2877
2877
2878 Arguments to the command are defined as lines in this block. The format of
2878 Arguments to the command are defined as lines in this block. The format of
2879 each line is ``<key> <value>``. e.g.::
2879 each line is ``<key> <value>``. e.g.::
2880
2880
2881 command listkeys
2881 command listkeys
2882 namespace bookmarks
2882 namespace bookmarks
2883
2883
2884 If the value begins with ``eval:``, it will be interpreted as a Python
2884 If the value begins with ``eval:``, it will be interpreted as a Python
2885 literal expression. Otherwise values are interpreted as Python b'' literals.
2885 literal expression. Otherwise values are interpreted as Python b'' literals.
2886 This allows sending complex types and encoding special byte sequences via
2886 This allows sending complex types and encoding special byte sequences via
2887 backslash escaping.
2887 backslash escaping.
2888
2888
2889 The following arguments have special meaning:
2889 The following arguments have special meaning:
2890
2890
2891 ``PUSHFILE``
2891 ``PUSHFILE``
2892 When defined, the *push* mechanism of the peer will be used instead
2892 When defined, the *push* mechanism of the peer will be used instead
2893 of the static request-response mechanism and the content of the
2893 of the static request-response mechanism and the content of the
2894 file specified in the value of this argument will be sent as the
2894 file specified in the value of this argument will be sent as the
2895 command payload.
2895 command payload.
2896
2896
2897 This can be used to submit a local bundle file to the remote.
2897 This can be used to submit a local bundle file to the remote.
2898
2898
2899 batchbegin
2899 batchbegin
2900 ----------
2900 ----------
2901
2901
2902 Instruct the peer to begin a batched send.
2902 Instruct the peer to begin a batched send.
2903
2903
2904 All ``command`` blocks are queued for execution until the next
2904 All ``command`` blocks are queued for execution until the next
2905 ``batchsubmit`` block.
2905 ``batchsubmit`` block.
2906
2906
2907 batchsubmit
2907 batchsubmit
2908 -----------
2908 -----------
2909
2909
2910 Submit previously queued ``command`` blocks as a batch request.
2910 Submit previously queued ``command`` blocks as a batch request.
2911
2911
2912 This action MUST be paired with a ``batchbegin`` action.
2912 This action MUST be paired with a ``batchbegin`` action.
2913
2913
2914 httprequest <method> <path>
2914 httprequest <method> <path>
2915 ---------------------------
2915 ---------------------------
2916
2916
2917 (HTTP peer only)
2917 (HTTP peer only)
2918
2918
2919 Send an HTTP request to the peer.
2919 Send an HTTP request to the peer.
2920
2920
2921 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2921 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2922
2922
2923 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2923 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2924 headers to add to the request. e.g. ``Accept: foo``.
2924 headers to add to the request. e.g. ``Accept: foo``.
2925
2925
2926 The following arguments are special:
2926 The following arguments are special:
2927
2927
2928 ``BODYFILE``
2928 ``BODYFILE``
2929 The content of the file defined as the value to this argument will be
2929 The content of the file defined as the value to this argument will be
2930 transferred verbatim as the HTTP request body.
2930 transferred verbatim as the HTTP request body.
2931
2931
2932 ``frame <type> <flags> <payload>``
2932 ``frame <type> <flags> <payload>``
2933 Send a unified protocol frame as part of the request body.
2933 Send a unified protocol frame as part of the request body.
2934
2934
2935 All frames will be collected and sent as the body to the HTTP
2935 All frames will be collected and sent as the body to the HTTP
2936 request.
2936 request.
2937
2937
2938 close
2938 close
2939 -----
2939 -----
2940
2940
2941 Close the connection to the server.
2941 Close the connection to the server.
2942
2942
2943 flush
2943 flush
2944 -----
2944 -----
2945
2945
2946 Flush data written to the server.
2946 Flush data written to the server.
2947
2947
2948 readavailable
2948 readavailable
2949 -------------
2949 -------------
2950
2950
2951 Close the write end of the connection and read all available data from
2951 Close the write end of the connection and read all available data from
2952 the server.
2952 the server.
2953
2953
2954 If the connection to the server encompasses multiple pipes, we poll both
2954 If the connection to the server encompasses multiple pipes, we poll both
2955 pipes and read available data.
2955 pipes and read available data.
2956
2956
2957 readline
2957 readline
2958 --------
2958 --------
2959
2959
2960 Read a line of output from the server. If there are multiple output
2960 Read a line of output from the server. If there are multiple output
2961 pipes, reads only the main pipe.
2961 pipes, reads only the main pipe.
2962
2962
2963 ereadline
2963 ereadline
2964 ---------
2964 ---------
2965
2965
2966 Like ``readline``, but read from the stderr pipe, if available.
2966 Like ``readline``, but read from the stderr pipe, if available.
2967
2967
2968 read <X>
2968 read <X>
2969 --------
2969 --------
2970
2970
2971 ``read()`` N bytes from the server's main output pipe.
2971 ``read()`` N bytes from the server's main output pipe.
2972
2972
2973 eread <X>
2973 eread <X>
2974 ---------
2974 ---------
2975
2975
2976 ``read()`` N bytes from the server's stderr pipe, if available.
2976 ``read()`` N bytes from the server's stderr pipe, if available.
2977
2977
2978 Specifying Unified Frame-Based Protocol Frames
2978 Specifying Unified Frame-Based Protocol Frames
2979 ----------------------------------------------
2979 ----------------------------------------------
2980
2980
2981 It is possible to emit a *Unified Frame-Based Protocol* by using special
2981 It is possible to emit a *Unified Frame-Based Protocol* by using special
2982 syntax.
2982 syntax.
2983
2983
2984 A frame is composed as a type, flags, and payload. These can be parsed
2984 A frame is composed as a type, flags, and payload. These can be parsed
2985 from a string of the form:
2985 from a string of the form:
2986
2986
2987 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2987 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2988
2988
2989 ``request-id`` and ``stream-id`` are integers defining the request and
2989 ``request-id`` and ``stream-id`` are integers defining the request and
2990 stream identifiers.
2990 stream identifiers.
2991
2991
2992 ``type`` can be an integer value for the frame type or the string name
2992 ``type`` can be an integer value for the frame type or the string name
2993 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2993 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2994 ``command-name``.
2994 ``command-name``.
2995
2995
2996 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2996 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2997 components. Each component (and there can be just one) can be an integer
2997 components. Each component (and there can be just one) can be an integer
2998 or a flag name for stream flags or frame flags, respectively. Values are
2998 or a flag name for stream flags or frame flags, respectively. Values are
2999 resolved to integers and then bitwise OR'd together.
2999 resolved to integers and then bitwise OR'd together.
3000
3000
3001 ``payload`` represents the raw frame payload. If it begins with
3001 ``payload`` represents the raw frame payload. If it begins with
3002 ``cbor:``, the following string is evaluated as Python code and the
3002 ``cbor:``, the following string is evaluated as Python code and the
3003 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3003 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3004 as a Python byte string literal.
3004 as a Python byte string literal.
3005 """
3005 """
3006 opts = pycompat.byteskwargs(opts)
3006 opts = pycompat.byteskwargs(opts)
3007
3007
3008 if opts['localssh'] and not repo:
3008 if opts['localssh'] and not repo:
3009 raise error.Abort(_('--localssh requires a repository'))
3009 raise error.Abort(_('--localssh requires a repository'))
3010
3010
3011 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3011 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3012 raise error.Abort(_('invalid value for --peer'),
3012 raise error.Abort(_('invalid value for --peer'),
3013 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3013 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3014
3014
3015 if path and opts['localssh']:
3015 if path and opts['localssh']:
3016 raise error.Abort(_('cannot specify --localssh with an explicit '
3016 raise error.Abort(_('cannot specify --localssh with an explicit '
3017 'path'))
3017 'path'))
3018
3018
3019 if ui.interactive():
3019 if ui.interactive():
3020 ui.write(_('(waiting for commands on stdin)\n'))
3020 ui.write(_('(waiting for commands on stdin)\n'))
3021
3021
3022 blocks = list(_parsewirelangblocks(ui.fin))
3022 blocks = list(_parsewirelangblocks(ui.fin))
3023
3023
3024 proc = None
3024 proc = None
3025 stdin = None
3025 stdin = None
3026 stdout = None
3026 stdout = None
3027 stderr = None
3027 stderr = None
3028 opener = None
3028 opener = None
3029
3029
3030 if opts['localssh']:
3030 if opts['localssh']:
3031 # We start the SSH server in its own process so there is process
3031 # We start the SSH server in its own process so there is process
3032 # separation. This prevents a whole class of potential bugs around
3032 # separation. This prevents a whole class of potential bugs around
3033 # shared state from interfering with server operation.
3033 # shared state from interfering with server operation.
3034 args = procutil.hgcmd() + [
3034 args = procutil.hgcmd() + [
3035 '-R', repo.root,
3035 '-R', repo.root,
3036 'debugserve', '--sshstdio',
3036 'debugserve', '--sshstdio',
3037 ]
3037 ]
3038 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
3038 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
3039 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3039 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3040 bufsize=0)
3040 bufsize=0)
3041
3041
3042 stdin = proc.stdin
3042 stdin = proc.stdin
3043 stdout = proc.stdout
3043 stdout = proc.stdout
3044 stderr = proc.stderr
3044 stderr = proc.stderr
3045
3045
3046 # We turn the pipes into observers so we can log I/O.
3046 # We turn the pipes into observers so we can log I/O.
3047 if ui.verbose or opts['peer'] == 'raw':
3047 if ui.verbose or opts['peer'] == 'raw':
3048 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3048 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3049 logdata=True)
3049 logdata=True)
3050 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3050 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3051 logdata=True)
3051 logdata=True)
3052 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3052 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3053 logdata=True)
3053 logdata=True)
3054
3054
3055 # --localssh also implies the peer connection settings.
3055 # --localssh also implies the peer connection settings.
3056
3056
3057 url = 'ssh://localserver'
3057 url = 'ssh://localserver'
3058 autoreadstderr = not opts['noreadstderr']
3058 autoreadstderr = not opts['noreadstderr']
3059
3059
3060 if opts['peer'] == 'ssh1':
3060 if opts['peer'] == 'ssh1':
3061 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3061 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3062 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3062 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3063 None, autoreadstderr=autoreadstderr)
3063 None, autoreadstderr=autoreadstderr)
3064 elif opts['peer'] == 'ssh2':
3064 elif opts['peer'] == 'ssh2':
3065 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3065 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3066 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3066 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3067 None, autoreadstderr=autoreadstderr)
3067 None, autoreadstderr=autoreadstderr)
3068 elif opts['peer'] == 'raw':
3068 elif opts['peer'] == 'raw':
3069 ui.write(_('using raw connection to peer\n'))
3069 ui.write(_('using raw connection to peer\n'))
3070 peer = None
3070 peer = None
3071 else:
3071 else:
3072 ui.write(_('creating ssh peer from handshake results\n'))
3072 ui.write(_('creating ssh peer from handshake results\n'))
3073 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3073 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3074 autoreadstderr=autoreadstderr)
3074 autoreadstderr=autoreadstderr)
3075
3075
3076 elif path:
3076 elif path:
3077 # We bypass hg.peer() so we can proxy the sockets.
3077 # We bypass hg.peer() so we can proxy the sockets.
3078 # TODO consider not doing this because we skip
3078 # TODO consider not doing this because we skip
3079 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3079 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3080 u = util.url(path)
3080 u = util.url(path)
3081 if u.scheme != 'http':
3081 if u.scheme != 'http':
3082 raise error.Abort(_('only http:// paths are currently supported'))
3082 raise error.Abort(_('only http:// paths are currently supported'))
3083
3083
3084 url, authinfo = u.authinfo()
3084 url, authinfo = u.authinfo()
3085 openerargs = {
3085 openerargs = {
3086 r'useragent': b'Mercurial debugwireproto',
3086 r'useragent': b'Mercurial debugwireproto',
3087 }
3087 }
3088
3088
3089 # Turn pipes/sockets into observers so we can log I/O.
3089 # Turn pipes/sockets into observers so we can log I/O.
3090 if ui.verbose:
3090 if ui.verbose:
3091 openerargs.update({
3091 openerargs.update({
3092 r'loggingfh': ui,
3092 r'loggingfh': ui,
3093 r'loggingname': b's',
3093 r'loggingname': b's',
3094 r'loggingopts': {
3094 r'loggingopts': {
3095 r'logdata': True,
3095 r'logdata': True,
3096 r'logdataapis': False,
3096 r'logdataapis': False,
3097 },
3097 },
3098 })
3098 })
3099
3099
3100 if ui.debugflag:
3100 if ui.debugflag:
3101 openerargs[r'loggingopts'][r'logdataapis'] = True
3101 openerargs[r'loggingopts'][r'logdataapis'] = True
3102
3102
3103 # Don't send default headers when in raw mode. This allows us to
3103 # Don't send default headers when in raw mode. This allows us to
3104 # bypass most of the behavior of our URL handling code so we can
3104 # bypass most of the behavior of our URL handling code so we can
3105 # have near complete control over what's sent on the wire.
3105 # have near complete control over what's sent on the wire.
3106 if opts['peer'] == 'raw':
3106 if opts['peer'] == 'raw':
3107 openerargs[r'sendaccept'] = False
3107 openerargs[r'sendaccept'] = False
3108
3108
3109 opener = urlmod.opener(ui, authinfo, **openerargs)
3109 opener = urlmod.opener(ui, authinfo, **openerargs)
3110
3110
3111 if opts['peer'] == 'http2':
3111 if opts['peer'] == 'http2':
3112 ui.write(_('creating http peer for wire protocol version 2\n'))
3112 ui.write(_('creating http peer for wire protocol version 2\n'))
3113 # We go through makepeer() because we need an API descriptor for
3113 # We go through makepeer() because we need an API descriptor for
3114 # the peer instance to be useful.
3114 # the peer instance to be useful.
3115 with ui.configoverride({
3115 with ui.configoverride({
3116 ('experimental', 'httppeer.advertise-v2'): True}):
3116 ('experimental', 'httppeer.advertise-v2'): True}):
3117 if opts['nologhandshake']:
3117 if opts['nologhandshake']:
3118 ui.pushbuffer()
3118 ui.pushbuffer()
3119
3119
3120 peer = httppeer.makepeer(ui, path, opener=opener)
3120 peer = httppeer.makepeer(ui, path, opener=opener)
3121
3121
3122 if opts['nologhandshake']:
3122 if opts['nologhandshake']:
3123 ui.popbuffer()
3123 ui.popbuffer()
3124
3124
3125 if not isinstance(peer, httppeer.httpv2peer):
3125 if not isinstance(peer, httppeer.httpv2peer):
3126 raise error.Abort(_('could not instantiate HTTP peer for '
3126 raise error.Abort(_('could not instantiate HTTP peer for '
3127 'wire protocol version 2'),
3127 'wire protocol version 2'),
3128 hint=_('the server may not have the feature '
3128 hint=_('the server may not have the feature '
3129 'enabled or is not allowing this '
3129 'enabled or is not allowing this '
3130 'client version'))
3130 'client version'))
3131
3131
3132 elif opts['peer'] == 'raw':
3132 elif opts['peer'] == 'raw':
3133 ui.write(_('using raw connection to peer\n'))
3133 ui.write(_('using raw connection to peer\n'))
3134 peer = None
3134 peer = None
3135 elif opts['peer']:
3135 elif opts['peer']:
3136 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3136 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3137 opts['peer'])
3137 opts['peer'])
3138 else:
3138 else:
3139 peer = httppeer.makepeer(ui, path, opener=opener)
3139 peer = httppeer.makepeer(ui, path, opener=opener)
3140
3140
3141 # We /could/ populate stdin/stdout with sock.makefile()...
3141 # We /could/ populate stdin/stdout with sock.makefile()...
3142 else:
3142 else:
3143 raise error.Abort(_('unsupported connection configuration'))
3143 raise error.Abort(_('unsupported connection configuration'))
3144
3144
3145 batchedcommands = None
3145 batchedcommands = None
3146
3146
3147 # Now perform actions based on the parsed wire language instructions.
3147 # Now perform actions based on the parsed wire language instructions.
3148 for action, lines in blocks:
3148 for action, lines in blocks:
3149 if action in ('raw', 'raw+'):
3149 if action in ('raw', 'raw+'):
3150 if not stdin:
3150 if not stdin:
3151 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3151 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3152
3152
3153 # Concatenate the data together.
3153 # Concatenate the data together.
3154 data = ''.join(l.lstrip() for l in lines)
3154 data = ''.join(l.lstrip() for l in lines)
3155 data = stringutil.unescapestr(data)
3155 data = stringutil.unescapestr(data)
3156 stdin.write(data)
3156 stdin.write(data)
3157
3157
3158 if action == 'raw+':
3158 if action == 'raw+':
3159 stdin.flush()
3159 stdin.flush()
3160 elif action == 'flush':
3160 elif action == 'flush':
3161 if not stdin:
3161 if not stdin:
3162 raise error.Abort(_('cannot call flush on this peer'))
3162 raise error.Abort(_('cannot call flush on this peer'))
3163 stdin.flush()
3163 stdin.flush()
3164 elif action.startswith('command'):
3164 elif action.startswith('command'):
3165 if not peer:
3165 if not peer:
3166 raise error.Abort(_('cannot send commands unless peer instance '
3166 raise error.Abort(_('cannot send commands unless peer instance '
3167 'is available'))
3167 'is available'))
3168
3168
3169 command = action.split(' ', 1)[1]
3169 command = action.split(' ', 1)[1]
3170
3170
3171 args = {}
3171 args = {}
3172 for line in lines:
3172 for line in lines:
3173 # We need to allow empty values.
3173 # We need to allow empty values.
3174 fields = line.lstrip().split(' ', 1)
3174 fields = line.lstrip().split(' ', 1)
3175 if len(fields) == 1:
3175 if len(fields) == 1:
3176 key = fields[0]
3176 key = fields[0]
3177 value = ''
3177 value = ''
3178 else:
3178 else:
3179 key, value = fields
3179 key, value = fields
3180
3180
3181 if value.startswith('eval:'):
3181 if value.startswith('eval:'):
3182 value = stringutil.evalpythonliteral(value[5:])
3182 value = stringutil.evalpythonliteral(value[5:])
3183 else:
3183 else:
3184 value = stringutil.unescapestr(value)
3184 value = stringutil.unescapestr(value)
3185
3185
3186 args[key] = value
3186 args[key] = value
3187
3187
3188 if batchedcommands is not None:
3188 if batchedcommands is not None:
3189 batchedcommands.append((command, args))
3189 batchedcommands.append((command, args))
3190 continue
3190 continue
3191
3191
3192 ui.status(_('sending %s command\n') % command)
3192 ui.status(_('sending %s command\n') % command)
3193
3193
3194 if 'PUSHFILE' in args:
3194 if 'PUSHFILE' in args:
3195 with open(args['PUSHFILE'], r'rb') as fh:
3195 with open(args['PUSHFILE'], r'rb') as fh:
3196 del args['PUSHFILE']
3196 del args['PUSHFILE']
3197 res, output = peer._callpush(command, fh,
3197 res, output = peer._callpush(command, fh,
3198 **pycompat.strkwargs(args))
3198 **pycompat.strkwargs(args))
3199 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3199 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3200 ui.status(_('remote output: %s\n') %
3200 ui.status(_('remote output: %s\n') %
3201 stringutil.escapestr(output))
3201 stringutil.escapestr(output))
3202 else:
3202 else:
3203 with peer.commandexecutor() as e:
3203 with peer.commandexecutor() as e:
3204 res = e.callcommand(command, args).result()
3204 res = e.callcommand(command, args).result()
3205
3205
3206 if isinstance(res, wireprotov2peer.commandresponse):
3206 if isinstance(res, wireprotov2peer.commandresponse):
3207 val = list(res.cborobjects())
3207 val = list(res.cborobjects())
3208 ui.status(_('response: %s\n') %
3208 ui.status(_('response: %s\n') %
3209 stringutil.pprint(val, bprefix=True))
3209 stringutil.pprint(val, bprefix=True))
3210
3210
3211 else:
3211 else:
3212 ui.status(_('response: %s\n') %
3212 ui.status(_('response: %s\n') %
3213 stringutil.pprint(res, bprefix=True))
3213 stringutil.pprint(res, bprefix=True))
3214
3214
3215 elif action == 'batchbegin':
3215 elif action == 'batchbegin':
3216 if batchedcommands is not None:
3216 if batchedcommands is not None:
3217 raise error.Abort(_('nested batchbegin not allowed'))
3217 raise error.Abort(_('nested batchbegin not allowed'))
3218
3218
3219 batchedcommands = []
3219 batchedcommands = []
3220 elif action == 'batchsubmit':
3220 elif action == 'batchsubmit':
3221 # There is a batching API we could go through. But it would be
3221 # There is a batching API we could go through. But it would be
3222 # difficult to normalize requests into function calls. It is easier
3222 # difficult to normalize requests into function calls. It is easier
3223 # to bypass this layer and normalize to commands + args.
3223 # to bypass this layer and normalize to commands + args.
3224 ui.status(_('sending batch with %d sub-commands\n') %
3224 ui.status(_('sending batch with %d sub-commands\n') %
3225 len(batchedcommands))
3225 len(batchedcommands))
3226 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3226 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3227 ui.status(_('response #%d: %s\n') %
3227 ui.status(_('response #%d: %s\n') %
3228 (i, stringutil.escapestr(chunk)))
3228 (i, stringutil.escapestr(chunk)))
3229
3229
3230 batchedcommands = None
3230 batchedcommands = None
3231
3231
3232 elif action.startswith('httprequest '):
3232 elif action.startswith('httprequest '):
3233 if not opener:
3233 if not opener:
3234 raise error.Abort(_('cannot use httprequest without an HTTP '
3234 raise error.Abort(_('cannot use httprequest without an HTTP '
3235 'peer'))
3235 'peer'))
3236
3236
3237 request = action.split(' ', 2)
3237 request = action.split(' ', 2)
3238 if len(request) != 3:
3238 if len(request) != 3:
3239 raise error.Abort(_('invalid httprequest: expected format is '
3239 raise error.Abort(_('invalid httprequest: expected format is '
3240 '"httprequest <method> <path>'))
3240 '"httprequest <method> <path>'))
3241
3241
3242 method, httppath = request[1:]
3242 method, httppath = request[1:]
3243 headers = {}
3243 headers = {}
3244 body = None
3244 body = None
3245 frames = []
3245 frames = []
3246 for line in lines:
3246 for line in lines:
3247 line = line.lstrip()
3247 line = line.lstrip()
3248 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3248 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3249 if m:
3249 if m:
3250 headers[m.group(1)] = m.group(2)
3250 headers[m.group(1)] = m.group(2)
3251 continue
3251 continue
3252
3252
3253 if line.startswith(b'BODYFILE '):
3253 if line.startswith(b'BODYFILE '):
3254 with open(line.split(b' ', 1), 'rb') as fh:
3254 with open(line.split(b' ', 1), 'rb') as fh:
3255 body = fh.read()
3255 body = fh.read()
3256 elif line.startswith(b'frame '):
3256 elif line.startswith(b'frame '):
3257 frame = wireprotoframing.makeframefromhumanstring(
3257 frame = wireprotoframing.makeframefromhumanstring(
3258 line[len(b'frame '):])
3258 line[len(b'frame '):])
3259
3259
3260 frames.append(frame)
3260 frames.append(frame)
3261 else:
3261 else:
3262 raise error.Abort(_('unknown argument to httprequest: %s') %
3262 raise error.Abort(_('unknown argument to httprequest: %s') %
3263 line)
3263 line)
3264
3264
3265 url = path + httppath
3265 url = path + httppath
3266
3266
3267 if frames:
3267 if frames:
3268 body = b''.join(bytes(f) for f in frames)
3268 body = b''.join(bytes(f) for f in frames)
3269
3269
3270 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3270 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3271
3271
3272 # urllib.Request insists on using has_data() as a proxy for
3272 # urllib.Request insists on using has_data() as a proxy for
3273 # determining the request method. Override that to use our
3273 # determining the request method. Override that to use our
3274 # explicitly requested method.
3274 # explicitly requested method.
3275 req.get_method = lambda: pycompat.sysstr(method)
3275 req.get_method = lambda: pycompat.sysstr(method)
3276
3276
3277 try:
3277 try:
3278 res = opener.open(req)
3278 res = opener.open(req)
3279 body = res.read()
3279 body = res.read()
3280 except util.urlerr.urlerror as e:
3280 except util.urlerr.urlerror as e:
3281 # read() method must be called, but only exists in Python 2
3281 # read() method must be called, but only exists in Python 2
3282 getattr(e, 'read', lambda: None)()
3282 getattr(e, 'read', lambda: None)()
3283 continue
3283 continue
3284
3284
3285 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3285 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3286 ui.write(_('cbor> %s\n') %
3286 ui.write(_('cbor> %s\n') %
3287 stringutil.pprint(cbor.loads(body), bprefix=True))
3287 stringutil.pprint(cbor.loads(body), bprefix=True))
3288
3288
3289 elif action == 'close':
3289 elif action == 'close':
3290 peer.close()
3290 peer.close()
3291 elif action == 'readavailable':
3291 elif action == 'readavailable':
3292 if not stdout or not stderr:
3292 if not stdout or not stderr:
3293 raise error.Abort(_('readavailable not available on this peer'))
3293 raise error.Abort(_('readavailable not available on this peer'))
3294
3294
3295 stdin.close()
3295 stdin.close()
3296 stdout.read()
3296 stdout.read()
3297 stderr.read()
3297 stderr.read()
3298
3298
3299 elif action == 'readline':
3299 elif action == 'readline':
3300 if not stdout:
3300 if not stdout:
3301 raise error.Abort(_('readline not available on this peer'))
3301 raise error.Abort(_('readline not available on this peer'))
3302 stdout.readline()
3302 stdout.readline()
3303 elif action == 'ereadline':
3303 elif action == 'ereadline':
3304 if not stderr:
3304 if not stderr:
3305 raise error.Abort(_('ereadline not available on this peer'))
3305 raise error.Abort(_('ereadline not available on this peer'))
3306 stderr.readline()
3306 stderr.readline()
3307 elif action.startswith('read '):
3307 elif action.startswith('read '):
3308 count = int(action.split(' ', 1)[1])
3308 count = int(action.split(' ', 1)[1])
3309 if not stdout:
3309 if not stdout:
3310 raise error.Abort(_('read not available on this peer'))
3310 raise error.Abort(_('read not available on this peer'))
3311 stdout.read(count)
3311 stdout.read(count)
3312 elif action.startswith('eread '):
3312 elif action.startswith('eread '):
3313 count = int(action.split(' ', 1)[1])
3313 count = int(action.split(' ', 1)[1])
3314 if not stderr:
3314 if not stderr:
3315 raise error.Abort(_('eread not available on this peer'))
3315 raise error.Abort(_('eread not available on this peer'))
3316 stderr.read(count)
3316 stderr.read(count)
3317 else:
3317 else:
3318 raise error.Abort(_('unknown action: %s') % action)
3318 raise error.Abort(_('unknown action: %s') % action)
3319
3319
3320 if batchedcommands is not None:
3320 if batchedcommands is not None:
3321 raise error.Abort(_('unclosed "batchbegin" request'))
3321 raise error.Abort(_('unclosed "batchbegin" request'))
3322
3322
3323 if peer:
3323 if peer:
3324 peer.close()
3324 peer.close()
3325
3325
3326 if proc:
3326 if proc:
3327 proc.kill()
3327 proc.kill()
@@ -1,274 +1,284 b''
1 # setdiscovery.py - improved discovery of common nodeset for mercurial
1 # setdiscovery.py - improved discovery of common nodeset for mercurial
2 #
2 #
3 # Copyright 2010 Benoit Boissinot <bboissin@gmail.com>
3 # Copyright 2010 Benoit Boissinot <bboissin@gmail.com>
4 # and Peter Arrenbrecht <peter@arrenbrecht.ch>
4 # and Peter Arrenbrecht <peter@arrenbrecht.ch>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8 """
8 """
9 Algorithm works in the following way. You have two repository: local and
9 Algorithm works in the following way. You have two repository: local and
10 remote. They both contains a DAG of changelists.
10 remote. They both contains a DAG of changelists.
11
11
12 The goal of the discovery protocol is to find one set of node *common*,
12 The goal of the discovery protocol is to find one set of node *common*,
13 the set of nodes shared by local and remote.
13 the set of nodes shared by local and remote.
14
14
15 One of the issue with the original protocol was latency, it could
15 One of the issue with the original protocol was latency, it could
16 potentially require lots of roundtrips to discover that the local repo was a
16 potentially require lots of roundtrips to discover that the local repo was a
17 subset of remote (which is a very common case, you usually have few changes
17 subset of remote (which is a very common case, you usually have few changes
18 compared to upstream, while upstream probably had lots of development).
18 compared to upstream, while upstream probably had lots of development).
19
19
20 The new protocol only requires one interface for the remote repo: `known()`,
20 The new protocol only requires one interface for the remote repo: `known()`,
21 which given a set of changelists tells you if they are present in the DAG.
21 which given a set of changelists tells you if they are present in the DAG.
22
22
23 The algorithm then works as follow:
23 The algorithm then works as follow:
24
24
25 - We will be using three sets, `common`, `missing`, `unknown`. Originally
25 - We will be using three sets, `common`, `missing`, `unknown`. Originally
26 all nodes are in `unknown`.
26 all nodes are in `unknown`.
27 - Take a sample from `unknown`, call `remote.known(sample)`
27 - Take a sample from `unknown`, call `remote.known(sample)`
28 - For each node that remote knows, move it and all its ancestors to `common`
28 - For each node that remote knows, move it and all its ancestors to `common`
29 - For each node that remote doesn't know, move it and all its descendants
29 - For each node that remote doesn't know, move it and all its descendants
30 to `missing`
30 to `missing`
31 - Iterate until `unknown` is empty
31 - Iterate until `unknown` is empty
32
32
33 There are a couple optimizations, first is instead of starting with a random
33 There are a couple optimizations, first is instead of starting with a random
34 sample of missing, start by sending all heads, in the case where the local
34 sample of missing, start by sending all heads, in the case where the local
35 repo is a subset, you computed the answer in one round trip.
35 repo is a subset, you computed the answer in one round trip.
36
36
37 Then you can do something similar to the bisecting strategy used when
37 Then you can do something similar to the bisecting strategy used when
38 finding faulty changesets. Instead of random samples, you can try picking
38 finding faulty changesets. Instead of random samples, you can try picking
39 nodes that will maximize the number of nodes that will be
39 nodes that will maximize the number of nodes that will be
40 classified with it (since all ancestors or descendants will be marked as well).
40 classified with it (since all ancestors or descendants will be marked as well).
41 """
41 """
42
42
43 from __future__ import absolute_import
43 from __future__ import absolute_import
44
44
45 import collections
45 import collections
46 import random
46 import random
47
47
48 from .i18n import _
48 from .i18n import _
49 from .node import (
49 from .node import (
50 nullid,
50 nullid,
51 nullrev,
51 nullrev,
52 )
52 )
53 from . import (
53 from . import (
54 dagutil,
54 dagutil,
55 error,
55 error,
56 util,
56 util,
57 )
57 )
58
58
59 def _updatesample(dag, nodes, sample, quicksamplesize=0):
59 def _updatesample(dag, nodes, sample, quicksamplesize=0):
60 """update an existing sample to match the expected size
60 """update an existing sample to match the expected size
61
61
62 The sample is updated with nodes exponentially distant from each head of the
62 The sample is updated with nodes exponentially distant from each head of the
63 <nodes> set. (H~1, H~2, H~4, H~8, etc).
63 <nodes> set. (H~1, H~2, H~4, H~8, etc).
64
64
65 If a target size is specified, the sampling will stop once this size is
65 If a target size is specified, the sampling will stop once this size is
66 reached. Otherwise sampling will happen until roots of the <nodes> set are
66 reached. Otherwise sampling will happen until roots of the <nodes> set are
67 reached.
67 reached.
68
68
69 :dag: a dag object from dagutil
69 :dag: a dag object from dagutil
70 :nodes: set of nodes we want to discover (if None, assume the whole dag)
70 :nodes: set of nodes we want to discover (if None, assume the whole dag)
71 :sample: a sample to update
71 :sample: a sample to update
72 :quicksamplesize: optional target size of the sample"""
72 :quicksamplesize: optional target size of the sample"""
73 # if nodes is empty we scan the entire graph
73 # if nodes is empty we scan the entire graph
74 if nodes:
74 if nodes:
75 heads = dag.headsetofconnecteds(nodes)
75 heads = dag.headsetofconnecteds(nodes)
76 else:
76 else:
77 heads = dag.heads()
77 heads = dag.heads()
78 dist = {}
78 dist = {}
79 visit = collections.deque(heads)
79 visit = collections.deque(heads)
80 seen = set()
80 seen = set()
81 factor = 1
81 factor = 1
82 while visit:
82 while visit:
83 curr = visit.popleft()
83 curr = visit.popleft()
84 if curr in seen:
84 if curr in seen:
85 continue
85 continue
86 d = dist.setdefault(curr, 1)
86 d = dist.setdefault(curr, 1)
87 if d > factor:
87 if d > factor:
88 factor *= 2
88 factor *= 2
89 if d == factor:
89 if d == factor:
90 sample.add(curr)
90 sample.add(curr)
91 if quicksamplesize and (len(sample) >= quicksamplesize):
91 if quicksamplesize and (len(sample) >= quicksamplesize):
92 return
92 return
93 seen.add(curr)
93 seen.add(curr)
94 for p in dag.parents(curr):
94 for p in dag.parents(curr):
95 if not nodes or p in nodes:
95 if not nodes or p in nodes:
96 dist.setdefault(p, d + 1)
96 dist.setdefault(p, d + 1)
97 visit.append(p)
97 visit.append(p)
98
98
99 def _takequicksample(dag, nodes, size):
99 def _takequicksample(dag, nodes, size):
100 """takes a quick sample of size <size>
100 """takes a quick sample of size <size>
101
101
102 It is meant for initial sampling and focuses on querying heads and close
102 It is meant for initial sampling and focuses on querying heads and close
103 ancestors of heads.
103 ancestors of heads.
104
104
105 :dag: a dag object
105 :dag: a dag object
106 :nodes: set of nodes to discover
106 :nodes: set of nodes to discover
107 :size: the maximum size of the sample"""
107 :size: the maximum size of the sample"""
108 sample = dag.headsetofconnecteds(nodes)
108 sample = dag.headsetofconnecteds(nodes)
109 if len(sample) >= size:
109 if len(sample) >= size:
110 return _limitsample(sample, size)
110 return _limitsample(sample, size)
111 _updatesample(dag, None, sample, quicksamplesize=size)
111 _updatesample(dag, None, sample, quicksamplesize=size)
112 return sample
112 return sample
113
113
114 def _takefullsample(dag, nodes, size):
114 def _takefullsample(dag, nodes, size):
115 sample = dag.headsetofconnecteds(nodes)
115 sample = dag.headsetofconnecteds(nodes)
116 # update from heads
116 # update from heads
117 _updatesample(dag, nodes, sample)
117 _updatesample(dag, nodes, sample)
118 # update from roots
118 # update from roots
119 _updatesample(dag.inverse(), nodes, sample)
119 _updatesample(dag.inverse(), nodes, sample)
120 assert sample
120 assert sample
121 sample = _limitsample(sample, size)
121 sample = _limitsample(sample, size)
122 if len(sample) < size:
122 if len(sample) < size:
123 more = size - len(sample)
123 more = size - len(sample)
124 sample.update(random.sample(list(nodes - sample), more))
124 sample.update(random.sample(list(nodes - sample), more))
125 return sample
125 return sample
126
126
127 def _limitsample(sample, desiredlen):
127 def _limitsample(sample, desiredlen):
128 """return a random subset of sample of at most desiredlen item"""
128 """return a random subset of sample of at most desiredlen item"""
129 if len(sample) > desiredlen:
129 if len(sample) > desiredlen:
130 sample = set(random.sample(sample, desiredlen))
130 sample = set(random.sample(sample, desiredlen))
131 return sample
131 return sample
132
132
133 def findcommonheads(ui, local, remote,
133 def findcommonheads(ui, local, remote,
134 initialsamplesize=100,
134 initialsamplesize=100,
135 fullsamplesize=200,
135 fullsamplesize=200,
136 abortwhenunrelated=True,
136 abortwhenunrelated=True,
137 ancestorsof=None):
137 ancestorsof=None):
138 '''Return a tuple (common, anyincoming, remoteheads) used to identify
138 '''Return a tuple (common, anyincoming, remoteheads) used to identify
139 missing nodes from or in remote.
139 missing nodes from or in remote.
140 '''
140 '''
141 start = util.timer()
141 start = util.timer()
142
142
143 roundtrips = 0
143 roundtrips = 0
144 cl = local.changelog
144 cl = local.changelog
145 clnode = cl.node
145 clnode = cl.node
146 clrev = cl.rev
146 localsubset = None
147 localsubset = None
147
148
148 if ancestorsof is not None:
149 if ancestorsof is not None:
149 rev = local.changelog.rev
150 localsubset = [clrev(n) for n in ancestorsof]
150 localsubset = [rev(n) for n in ancestorsof]
151 dag = dagutil.revlogdag(cl, localsubset=localsubset)
151 dag = dagutil.revlogdag(cl, localsubset=localsubset)
152
152
153 # early exit if we know all the specified remote heads already
153 # early exit if we know all the specified remote heads already
154 ui.debug("query 1; heads\n")
154 ui.debug("query 1; heads\n")
155 roundtrips += 1
155 roundtrips += 1
156 ownheads = dag.heads()
156 ownheads = dag.heads()
157 sample = _limitsample(ownheads, initialsamplesize)
157 sample = _limitsample(ownheads, initialsamplesize)
158 # indices between sample and externalized version must match
158 # indices between sample and externalized version must match
159 sample = list(sample)
159 sample = list(sample)
160
160
161 with remote.commandexecutor() as e:
161 with remote.commandexecutor() as e:
162 fheads = e.callcommand('heads', {})
162 fheads = e.callcommand('heads', {})
163 fknown = e.callcommand('known', {
163 fknown = e.callcommand('known', {
164 'nodes': [clnode(r) for r in sample],
164 'nodes': [clnode(r) for r in sample],
165 })
165 })
166
166
167 srvheadhashes, yesno = fheads.result(), fknown.result()
167 srvheadhashes, yesno = fheads.result(), fknown.result()
168
168
169 if cl.tip() == nullid:
169 if cl.tip() == nullid:
170 if srvheadhashes != [nullid]:
170 if srvheadhashes != [nullid]:
171 return [nullid], True, srvheadhashes
171 return [nullid], True, srvheadhashes
172 return [nullid], False, []
172 return [nullid], False, []
173
173
174 # start actual discovery (we note this before the next "if" for
174 # start actual discovery (we note this before the next "if" for
175 # compatibility reasons)
175 # compatibility reasons)
176 ui.status(_("searching for changes\n"))
176 ui.status(_("searching for changes\n"))
177
177
178 srvheads = dag.internalizeall(srvheadhashes, filterunknown=True)
178 srvheads = []
179 for node in srvheadhashes:
180 if node == nullid:
181 continue
182
183 try:
184 srvheads.append(clrev(node))
185 # Catches unknown and filtered nodes.
186 except error.LookupError:
187 continue
188
179 if len(srvheads) == len(srvheadhashes):
189 if len(srvheads) == len(srvheadhashes):
180 ui.debug("all remote heads known locally\n")
190 ui.debug("all remote heads known locally\n")
181 return srvheadhashes, False, srvheadhashes
191 return srvheadhashes, False, srvheadhashes
182
192
183 if len(sample) == len(ownheads) and all(yesno):
193 if len(sample) == len(ownheads) and all(yesno):
184 ui.note(_("all local heads known remotely\n"))
194 ui.note(_("all local heads known remotely\n"))
185 ownheadhashes = [clnode(r) for r in ownheads]
195 ownheadhashes = [clnode(r) for r in ownheads]
186 return ownheadhashes, True, srvheadhashes
196 return ownheadhashes, True, srvheadhashes
187
197
188 # full blown discovery
198 # full blown discovery
189
199
190 # own nodes I know we both know
200 # own nodes I know we both know
191 # treat remote heads (and maybe own heads) as a first implicit sample
201 # treat remote heads (and maybe own heads) as a first implicit sample
192 # response
202 # response
193 common = cl.incrementalmissingrevs(srvheads)
203 common = cl.incrementalmissingrevs(srvheads)
194 commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
204 commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
195 common.addbases(commoninsample)
205 common.addbases(commoninsample)
196 # own nodes where I don't know if remote knows them
206 # own nodes where I don't know if remote knows them
197 undecided = set(common.missingancestors(ownheads))
207 undecided = set(common.missingancestors(ownheads))
198 # own nodes I know remote lacks
208 # own nodes I know remote lacks
199 missing = set()
209 missing = set()
200
210
201 full = False
211 full = False
202 progress = ui.makeprogress(_('searching'), unit=_('queries'))
212 progress = ui.makeprogress(_('searching'), unit=_('queries'))
203 while undecided:
213 while undecided:
204
214
205 if sample:
215 if sample:
206 missinginsample = [n for i, n in enumerate(sample) if not yesno[i]]
216 missinginsample = [n for i, n in enumerate(sample) if not yesno[i]]
207 missing.update(dag.descendantset(missinginsample, missing))
217 missing.update(dag.descendantset(missinginsample, missing))
208
218
209 undecided.difference_update(missing)
219 undecided.difference_update(missing)
210
220
211 if not undecided:
221 if not undecided:
212 break
222 break
213
223
214 if full or common.hasbases():
224 if full or common.hasbases():
215 if full:
225 if full:
216 ui.note(_("sampling from both directions\n"))
226 ui.note(_("sampling from both directions\n"))
217 else:
227 else:
218 ui.debug("taking initial sample\n")
228 ui.debug("taking initial sample\n")
219 samplefunc = _takefullsample
229 samplefunc = _takefullsample
220 targetsize = fullsamplesize
230 targetsize = fullsamplesize
221 else:
231 else:
222 # use even cheaper initial sample
232 # use even cheaper initial sample
223 ui.debug("taking quick initial sample\n")
233 ui.debug("taking quick initial sample\n")
224 samplefunc = _takequicksample
234 samplefunc = _takequicksample
225 targetsize = initialsamplesize
235 targetsize = initialsamplesize
226 if len(undecided) < targetsize:
236 if len(undecided) < targetsize:
227 sample = list(undecided)
237 sample = list(undecided)
228 else:
238 else:
229 sample = samplefunc(dag, undecided, targetsize)
239 sample = samplefunc(dag, undecided, targetsize)
230
240
231 roundtrips += 1
241 roundtrips += 1
232 progress.update(roundtrips)
242 progress.update(roundtrips)
233 ui.debug("query %i; still undecided: %i, sample size is: %i\n"
243 ui.debug("query %i; still undecided: %i, sample size is: %i\n"
234 % (roundtrips, len(undecided), len(sample)))
244 % (roundtrips, len(undecided), len(sample)))
235 # indices between sample and externalized version must match
245 # indices between sample and externalized version must match
236 sample = list(sample)
246 sample = list(sample)
237
247
238 with remote.commandexecutor() as e:
248 with remote.commandexecutor() as e:
239 yesno = e.callcommand('known', {
249 yesno = e.callcommand('known', {
240 'nodes': [clnode(r) for r in sample],
250 'nodes': [clnode(r) for r in sample],
241 }).result()
251 }).result()
242
252
243 full = True
253 full = True
244
254
245 if sample:
255 if sample:
246 commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
256 commoninsample = set(n for i, n in enumerate(sample) if yesno[i])
247 common.addbases(commoninsample)
257 common.addbases(commoninsample)
248 common.removeancestorsfrom(undecided)
258 common.removeancestorsfrom(undecided)
249
259
250 # heads(common) == heads(common.bases) since common represents common.bases
260 # heads(common) == heads(common.bases) since common represents common.bases
251 # and all its ancestors
261 # and all its ancestors
252 result = dag.headsetofconnecteds(common.bases)
262 result = dag.headsetofconnecteds(common.bases)
253 # common.bases can include nullrev, but our contract requires us to not
263 # common.bases can include nullrev, but our contract requires us to not
254 # return any heads in that case, so discard that
264 # return any heads in that case, so discard that
255 result.discard(nullrev)
265 result.discard(nullrev)
256 elapsed = util.timer() - start
266 elapsed = util.timer() - start
257 progress.complete()
267 progress.complete()
258 ui.debug("%d total queries in %.4fs\n" % (roundtrips, elapsed))
268 ui.debug("%d total queries in %.4fs\n" % (roundtrips, elapsed))
259 msg = ('found %d common and %d unknown server heads,'
269 msg = ('found %d common and %d unknown server heads,'
260 ' %d roundtrips in %.4fs\n')
270 ' %d roundtrips in %.4fs\n')
261 missing = set(result) - set(srvheads)
271 missing = set(result) - set(srvheads)
262 ui.log('discovery', msg, len(result), len(missing), roundtrips,
272 ui.log('discovery', msg, len(result), len(missing), roundtrips,
263 elapsed)
273 elapsed)
264
274
265 if not result and srvheadhashes != [nullid]:
275 if not result and srvheadhashes != [nullid]:
266 if abortwhenunrelated:
276 if abortwhenunrelated:
267 raise error.Abort(_("repository is unrelated"))
277 raise error.Abort(_("repository is unrelated"))
268 else:
278 else:
269 ui.warn(_("warning: repository is unrelated\n"))
279 ui.warn(_("warning: repository is unrelated\n"))
270 return ({nullid}, True, srvheadhashes,)
280 return ({nullid}, True, srvheadhashes,)
271
281
272 anyincoming = (srvheadhashes != [nullid])
282 anyincoming = (srvheadhashes != [nullid])
273 result = {clnode(r) for r in result}
283 result = {clnode(r) for r in result}
274 return result, anyincoming, srvheadhashes
284 return result, anyincoming, srvheadhashes
General Comments 0
You need to be logged in to leave comments. Login now