##// END OF EJS Templates
debugdiscovery: drop duplicated information...
marmoute -
r42200:c3a16c28 default
parent child Browse files
Show More
@@ -1,3465 +1,3461 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from . import (
35 from . import (
36 bundle2,
36 bundle2,
37 changegroup,
37 changegroup,
38 cmdutil,
38 cmdutil,
39 color,
39 color,
40 context,
40 context,
41 copies,
41 copies,
42 dagparser,
42 dagparser,
43 encoding,
43 encoding,
44 error,
44 error,
45 exchange,
45 exchange,
46 extensions,
46 extensions,
47 filemerge,
47 filemerge,
48 filesetlang,
48 filesetlang,
49 formatter,
49 formatter,
50 hg,
50 hg,
51 httppeer,
51 httppeer,
52 localrepo,
52 localrepo,
53 lock as lockmod,
53 lock as lockmod,
54 logcmdutil,
54 logcmdutil,
55 merge as mergemod,
55 merge as mergemod,
56 obsolete,
56 obsolete,
57 obsutil,
57 obsutil,
58 phases,
58 phases,
59 policy,
59 policy,
60 pvec,
60 pvec,
61 pycompat,
61 pycompat,
62 registrar,
62 registrar,
63 repair,
63 repair,
64 revlog,
64 revlog,
65 revset,
65 revset,
66 revsetlang,
66 revsetlang,
67 scmutil,
67 scmutil,
68 setdiscovery,
68 setdiscovery,
69 simplemerge,
69 simplemerge,
70 sshpeer,
70 sshpeer,
71 sslutil,
71 sslutil,
72 streamclone,
72 streamclone,
73 templater,
73 templater,
74 treediscovery,
74 treediscovery,
75 upgrade,
75 upgrade,
76 url as urlmod,
76 url as urlmod,
77 util,
77 util,
78 vfs as vfsmod,
78 vfs as vfsmod,
79 wireprotoframing,
79 wireprotoframing,
80 wireprotoserver,
80 wireprotoserver,
81 wireprotov2peer,
81 wireprotov2peer,
82 )
82 )
83 from .utils import (
83 from .utils import (
84 cborutil,
84 cborutil,
85 dateutil,
85 dateutil,
86 procutil,
86 procutil,
87 stringutil,
87 stringutil,
88 )
88 )
89
89
90 from .revlogutils import (
90 from .revlogutils import (
91 deltas as deltautil
91 deltas as deltautil
92 )
92 )
93
93
94 release = lockmod.release
94 release = lockmod.release
95
95
96 command = registrar.command()
96 command = registrar.command()
97
97
98 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
98 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
99 def debugancestor(ui, repo, *args):
99 def debugancestor(ui, repo, *args):
100 """find the ancestor revision of two revisions in a given index"""
100 """find the ancestor revision of two revisions in a given index"""
101 if len(args) == 3:
101 if len(args) == 3:
102 index, rev1, rev2 = args
102 index, rev1, rev2 = args
103 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
103 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
104 lookup = r.lookup
104 lookup = r.lookup
105 elif len(args) == 2:
105 elif len(args) == 2:
106 if not repo:
106 if not repo:
107 raise error.Abort(_('there is no Mercurial repository here '
107 raise error.Abort(_('there is no Mercurial repository here '
108 '(.hg not found)'))
108 '(.hg not found)'))
109 rev1, rev2 = args
109 rev1, rev2 = args
110 r = repo.changelog
110 r = repo.changelog
111 lookup = repo.lookup
111 lookup = repo.lookup
112 else:
112 else:
113 raise error.Abort(_('either two or three arguments required'))
113 raise error.Abort(_('either two or three arguments required'))
114 a = r.ancestor(lookup(rev1), lookup(rev2))
114 a = r.ancestor(lookup(rev1), lookup(rev2))
115 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
115 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
116
116
117 @command('debugapplystreamclonebundle', [], 'FILE')
117 @command('debugapplystreamclonebundle', [], 'FILE')
118 def debugapplystreamclonebundle(ui, repo, fname):
118 def debugapplystreamclonebundle(ui, repo, fname):
119 """apply a stream clone bundle file"""
119 """apply a stream clone bundle file"""
120 f = hg.openpath(ui, fname)
120 f = hg.openpath(ui, fname)
121 gen = exchange.readbundle(ui, f, fname)
121 gen = exchange.readbundle(ui, f, fname)
122 gen.apply(repo)
122 gen.apply(repo)
123
123
124 @command('debugbuilddag',
124 @command('debugbuilddag',
125 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
125 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
126 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
126 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
127 ('n', 'new-file', None, _('add new file at each rev'))],
127 ('n', 'new-file', None, _('add new file at each rev'))],
128 _('[OPTION]... [TEXT]'))
128 _('[OPTION]... [TEXT]'))
129 def debugbuilddag(ui, repo, text=None,
129 def debugbuilddag(ui, repo, text=None,
130 mergeable_file=False,
130 mergeable_file=False,
131 overwritten_file=False,
131 overwritten_file=False,
132 new_file=False):
132 new_file=False):
133 """builds a repo with a given DAG from scratch in the current empty repo
133 """builds a repo with a given DAG from scratch in the current empty repo
134
134
135 The description of the DAG is read from stdin if not given on the
135 The description of the DAG is read from stdin if not given on the
136 command line.
136 command line.
137
137
138 Elements:
138 Elements:
139
139
140 - "+n" is a linear run of n nodes based on the current default parent
140 - "+n" is a linear run of n nodes based on the current default parent
141 - "." is a single node based on the current default parent
141 - "." is a single node based on the current default parent
142 - "$" resets the default parent to null (implied at the start);
142 - "$" resets the default parent to null (implied at the start);
143 otherwise the default parent is always the last node created
143 otherwise the default parent is always the last node created
144 - "<p" sets the default parent to the backref p
144 - "<p" sets the default parent to the backref p
145 - "*p" is a fork at parent p, which is a backref
145 - "*p" is a fork at parent p, which is a backref
146 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
146 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
147 - "/p2" is a merge of the preceding node and p2
147 - "/p2" is a merge of the preceding node and p2
148 - ":tag" defines a local tag for the preceding node
148 - ":tag" defines a local tag for the preceding node
149 - "@branch" sets the named branch for subsequent nodes
149 - "@branch" sets the named branch for subsequent nodes
150 - "#...\\n" is a comment up to the end of the line
150 - "#...\\n" is a comment up to the end of the line
151
151
152 Whitespace between the above elements is ignored.
152 Whitespace between the above elements is ignored.
153
153
154 A backref is either
154 A backref is either
155
155
156 - a number n, which references the node curr-n, where curr is the current
156 - a number n, which references the node curr-n, where curr is the current
157 node, or
157 node, or
158 - the name of a local tag you placed earlier using ":tag", or
158 - the name of a local tag you placed earlier using ":tag", or
159 - empty to denote the default parent.
159 - empty to denote the default parent.
160
160
161 All string valued-elements are either strictly alphanumeric, or must
161 All string valued-elements are either strictly alphanumeric, or must
162 be enclosed in double quotes ("..."), with "\\" as escape character.
162 be enclosed in double quotes ("..."), with "\\" as escape character.
163 """
163 """
164
164
165 if text is None:
165 if text is None:
166 ui.status(_("reading DAG from stdin\n"))
166 ui.status(_("reading DAG from stdin\n"))
167 text = ui.fin.read()
167 text = ui.fin.read()
168
168
169 cl = repo.changelog
169 cl = repo.changelog
170 if len(cl) > 0:
170 if len(cl) > 0:
171 raise error.Abort(_('repository is not empty'))
171 raise error.Abort(_('repository is not empty'))
172
172
173 # determine number of revs in DAG
173 # determine number of revs in DAG
174 total = 0
174 total = 0
175 for type, data in dagparser.parsedag(text):
175 for type, data in dagparser.parsedag(text):
176 if type == 'n':
176 if type == 'n':
177 total += 1
177 total += 1
178
178
179 if mergeable_file:
179 if mergeable_file:
180 linesperrev = 2
180 linesperrev = 2
181 # make a file with k lines per rev
181 # make a file with k lines per rev
182 initialmergedlines = ['%d' % i
182 initialmergedlines = ['%d' % i
183 for i in pycompat.xrange(0, total * linesperrev)]
183 for i in pycompat.xrange(0, total * linesperrev)]
184 initialmergedlines.append("")
184 initialmergedlines.append("")
185
185
186 tags = []
186 tags = []
187 progress = ui.makeprogress(_('building'), unit=_('revisions'),
187 progress = ui.makeprogress(_('building'), unit=_('revisions'),
188 total=total)
188 total=total)
189 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
189 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
190 at = -1
190 at = -1
191 atbranch = 'default'
191 atbranch = 'default'
192 nodeids = []
192 nodeids = []
193 id = 0
193 id = 0
194 progress.update(id)
194 progress.update(id)
195 for type, data in dagparser.parsedag(text):
195 for type, data in dagparser.parsedag(text):
196 if type == 'n':
196 if type == 'n':
197 ui.note(('node %s\n' % pycompat.bytestr(data)))
197 ui.note(('node %s\n' % pycompat.bytestr(data)))
198 id, ps = data
198 id, ps = data
199
199
200 files = []
200 files = []
201 filecontent = {}
201 filecontent = {}
202
202
203 p2 = None
203 p2 = None
204 if mergeable_file:
204 if mergeable_file:
205 fn = "mf"
205 fn = "mf"
206 p1 = repo[ps[0]]
206 p1 = repo[ps[0]]
207 if len(ps) > 1:
207 if len(ps) > 1:
208 p2 = repo[ps[1]]
208 p2 = repo[ps[1]]
209 pa = p1.ancestor(p2)
209 pa = p1.ancestor(p2)
210 base, local, other = [x[fn].data() for x in (pa, p1,
210 base, local, other = [x[fn].data() for x in (pa, p1,
211 p2)]
211 p2)]
212 m3 = simplemerge.Merge3Text(base, local, other)
212 m3 = simplemerge.Merge3Text(base, local, other)
213 ml = [l.strip() for l in m3.merge_lines()]
213 ml = [l.strip() for l in m3.merge_lines()]
214 ml.append("")
214 ml.append("")
215 elif at > 0:
215 elif at > 0:
216 ml = p1[fn].data().split("\n")
216 ml = p1[fn].data().split("\n")
217 else:
217 else:
218 ml = initialmergedlines
218 ml = initialmergedlines
219 ml[id * linesperrev] += " r%i" % id
219 ml[id * linesperrev] += " r%i" % id
220 mergedtext = "\n".join(ml)
220 mergedtext = "\n".join(ml)
221 files.append(fn)
221 files.append(fn)
222 filecontent[fn] = mergedtext
222 filecontent[fn] = mergedtext
223
223
224 if overwritten_file:
224 if overwritten_file:
225 fn = "of"
225 fn = "of"
226 files.append(fn)
226 files.append(fn)
227 filecontent[fn] = "r%i\n" % id
227 filecontent[fn] = "r%i\n" % id
228
228
229 if new_file:
229 if new_file:
230 fn = "nf%i" % id
230 fn = "nf%i" % id
231 files.append(fn)
231 files.append(fn)
232 filecontent[fn] = "r%i\n" % id
232 filecontent[fn] = "r%i\n" % id
233 if len(ps) > 1:
233 if len(ps) > 1:
234 if not p2:
234 if not p2:
235 p2 = repo[ps[1]]
235 p2 = repo[ps[1]]
236 for fn in p2:
236 for fn in p2:
237 if fn.startswith("nf"):
237 if fn.startswith("nf"):
238 files.append(fn)
238 files.append(fn)
239 filecontent[fn] = p2[fn].data()
239 filecontent[fn] = p2[fn].data()
240
240
241 def fctxfn(repo, cx, path):
241 def fctxfn(repo, cx, path):
242 if path in filecontent:
242 if path in filecontent:
243 return context.memfilectx(repo, cx, path,
243 return context.memfilectx(repo, cx, path,
244 filecontent[path])
244 filecontent[path])
245 return None
245 return None
246
246
247 if len(ps) == 0 or ps[0] < 0:
247 if len(ps) == 0 or ps[0] < 0:
248 pars = [None, None]
248 pars = [None, None]
249 elif len(ps) == 1:
249 elif len(ps) == 1:
250 pars = [nodeids[ps[0]], None]
250 pars = [nodeids[ps[0]], None]
251 else:
251 else:
252 pars = [nodeids[p] for p in ps]
252 pars = [nodeids[p] for p in ps]
253 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
253 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
254 date=(id, 0),
254 date=(id, 0),
255 user="debugbuilddag",
255 user="debugbuilddag",
256 extra={'branch': atbranch})
256 extra={'branch': atbranch})
257 nodeid = repo.commitctx(cx)
257 nodeid = repo.commitctx(cx)
258 nodeids.append(nodeid)
258 nodeids.append(nodeid)
259 at = id
259 at = id
260 elif type == 'l':
260 elif type == 'l':
261 id, name = data
261 id, name = data
262 ui.note(('tag %s\n' % name))
262 ui.note(('tag %s\n' % name))
263 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
263 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
264 elif type == 'a':
264 elif type == 'a':
265 ui.note(('branch %s\n' % data))
265 ui.note(('branch %s\n' % data))
266 atbranch = data
266 atbranch = data
267 progress.update(id)
267 progress.update(id)
268
268
269 if tags:
269 if tags:
270 repo.vfs.write("localtags", "".join(tags))
270 repo.vfs.write("localtags", "".join(tags))
271
271
272 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
272 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
273 indent_string = ' ' * indent
273 indent_string = ' ' * indent
274 if all:
274 if all:
275 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
275 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
276 % indent_string)
276 % indent_string)
277
277
278 def showchunks(named):
278 def showchunks(named):
279 ui.write("\n%s%s\n" % (indent_string, named))
279 ui.write("\n%s%s\n" % (indent_string, named))
280 for deltadata in gen.deltaiter():
280 for deltadata in gen.deltaiter():
281 node, p1, p2, cs, deltabase, delta, flags = deltadata
281 node, p1, p2, cs, deltabase, delta, flags = deltadata
282 ui.write("%s%s %s %s %s %s %d\n" %
282 ui.write("%s%s %s %s %s %s %d\n" %
283 (indent_string, hex(node), hex(p1), hex(p2),
283 (indent_string, hex(node), hex(p1), hex(p2),
284 hex(cs), hex(deltabase), len(delta)))
284 hex(cs), hex(deltabase), len(delta)))
285
285
286 chunkdata = gen.changelogheader()
286 chunkdata = gen.changelogheader()
287 showchunks("changelog")
287 showchunks("changelog")
288 chunkdata = gen.manifestheader()
288 chunkdata = gen.manifestheader()
289 showchunks("manifest")
289 showchunks("manifest")
290 for chunkdata in iter(gen.filelogheader, {}):
290 for chunkdata in iter(gen.filelogheader, {}):
291 fname = chunkdata['filename']
291 fname = chunkdata['filename']
292 showchunks(fname)
292 showchunks(fname)
293 else:
293 else:
294 if isinstance(gen, bundle2.unbundle20):
294 if isinstance(gen, bundle2.unbundle20):
295 raise error.Abort(_('use debugbundle2 for this file'))
295 raise error.Abort(_('use debugbundle2 for this file'))
296 chunkdata = gen.changelogheader()
296 chunkdata = gen.changelogheader()
297 for deltadata in gen.deltaiter():
297 for deltadata in gen.deltaiter():
298 node, p1, p2, cs, deltabase, delta, flags = deltadata
298 node, p1, p2, cs, deltabase, delta, flags = deltadata
299 ui.write("%s%s\n" % (indent_string, hex(node)))
299 ui.write("%s%s\n" % (indent_string, hex(node)))
300
300
301 def _debugobsmarkers(ui, part, indent=0, **opts):
301 def _debugobsmarkers(ui, part, indent=0, **opts):
302 """display version and markers contained in 'data'"""
302 """display version and markers contained in 'data'"""
303 opts = pycompat.byteskwargs(opts)
303 opts = pycompat.byteskwargs(opts)
304 data = part.read()
304 data = part.read()
305 indent_string = ' ' * indent
305 indent_string = ' ' * indent
306 try:
306 try:
307 version, markers = obsolete._readmarkers(data)
307 version, markers = obsolete._readmarkers(data)
308 except error.UnknownVersion as exc:
308 except error.UnknownVersion as exc:
309 msg = "%sunsupported version: %s (%d bytes)\n"
309 msg = "%sunsupported version: %s (%d bytes)\n"
310 msg %= indent_string, exc.version, len(data)
310 msg %= indent_string, exc.version, len(data)
311 ui.write(msg)
311 ui.write(msg)
312 else:
312 else:
313 msg = "%sversion: %d (%d bytes)\n"
313 msg = "%sversion: %d (%d bytes)\n"
314 msg %= indent_string, version, len(data)
314 msg %= indent_string, version, len(data)
315 ui.write(msg)
315 ui.write(msg)
316 fm = ui.formatter('debugobsolete', opts)
316 fm = ui.formatter('debugobsolete', opts)
317 for rawmarker in sorted(markers):
317 for rawmarker in sorted(markers):
318 m = obsutil.marker(None, rawmarker)
318 m = obsutil.marker(None, rawmarker)
319 fm.startitem()
319 fm.startitem()
320 fm.plain(indent_string)
320 fm.plain(indent_string)
321 cmdutil.showmarker(fm, m)
321 cmdutil.showmarker(fm, m)
322 fm.end()
322 fm.end()
323
323
324 def _debugphaseheads(ui, data, indent=0):
324 def _debugphaseheads(ui, data, indent=0):
325 """display version and markers contained in 'data'"""
325 """display version and markers contained in 'data'"""
326 indent_string = ' ' * indent
326 indent_string = ' ' * indent
327 headsbyphase = phases.binarydecode(data)
327 headsbyphase = phases.binarydecode(data)
328 for phase in phases.allphases:
328 for phase in phases.allphases:
329 for head in headsbyphase[phase]:
329 for head in headsbyphase[phase]:
330 ui.write(indent_string)
330 ui.write(indent_string)
331 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
331 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
332
332
333 def _quasirepr(thing):
333 def _quasirepr(thing):
334 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
334 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
335 return '{%s}' % (
335 return '{%s}' % (
336 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
336 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
337 return pycompat.bytestr(repr(thing))
337 return pycompat.bytestr(repr(thing))
338
338
339 def _debugbundle2(ui, gen, all=None, **opts):
339 def _debugbundle2(ui, gen, all=None, **opts):
340 """lists the contents of a bundle2"""
340 """lists the contents of a bundle2"""
341 if not isinstance(gen, bundle2.unbundle20):
341 if not isinstance(gen, bundle2.unbundle20):
342 raise error.Abort(_('not a bundle2 file'))
342 raise error.Abort(_('not a bundle2 file'))
343 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
343 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
344 parttypes = opts.get(r'part_type', [])
344 parttypes = opts.get(r'part_type', [])
345 for part in gen.iterparts():
345 for part in gen.iterparts():
346 if parttypes and part.type not in parttypes:
346 if parttypes and part.type not in parttypes:
347 continue
347 continue
348 msg = '%s -- %s (mandatory: %r)\n'
348 msg = '%s -- %s (mandatory: %r)\n'
349 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
349 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
350 if part.type == 'changegroup':
350 if part.type == 'changegroup':
351 version = part.params.get('version', '01')
351 version = part.params.get('version', '01')
352 cg = changegroup.getunbundler(version, part, 'UN')
352 cg = changegroup.getunbundler(version, part, 'UN')
353 if not ui.quiet:
353 if not ui.quiet:
354 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
354 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
355 if part.type == 'obsmarkers':
355 if part.type == 'obsmarkers':
356 if not ui.quiet:
356 if not ui.quiet:
357 _debugobsmarkers(ui, part, indent=4, **opts)
357 _debugobsmarkers(ui, part, indent=4, **opts)
358 if part.type == 'phase-heads':
358 if part.type == 'phase-heads':
359 if not ui.quiet:
359 if not ui.quiet:
360 _debugphaseheads(ui, part, indent=4)
360 _debugphaseheads(ui, part, indent=4)
361
361
362 @command('debugbundle',
362 @command('debugbundle',
363 [('a', 'all', None, _('show all details')),
363 [('a', 'all', None, _('show all details')),
364 ('', 'part-type', [], _('show only the named part type')),
364 ('', 'part-type', [], _('show only the named part type')),
365 ('', 'spec', None, _('print the bundlespec of the bundle'))],
365 ('', 'spec', None, _('print the bundlespec of the bundle'))],
366 _('FILE'),
366 _('FILE'),
367 norepo=True)
367 norepo=True)
368 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
368 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
369 """lists the contents of a bundle"""
369 """lists the contents of a bundle"""
370 with hg.openpath(ui, bundlepath) as f:
370 with hg.openpath(ui, bundlepath) as f:
371 if spec:
371 if spec:
372 spec = exchange.getbundlespec(ui, f)
372 spec = exchange.getbundlespec(ui, f)
373 ui.write('%s\n' % spec)
373 ui.write('%s\n' % spec)
374 return
374 return
375
375
376 gen = exchange.readbundle(ui, f, bundlepath)
376 gen = exchange.readbundle(ui, f, bundlepath)
377 if isinstance(gen, bundle2.unbundle20):
377 if isinstance(gen, bundle2.unbundle20):
378 return _debugbundle2(ui, gen, all=all, **opts)
378 return _debugbundle2(ui, gen, all=all, **opts)
379 _debugchangegroup(ui, gen, all=all, **opts)
379 _debugchangegroup(ui, gen, all=all, **opts)
380
380
381 @command('debugcapabilities',
381 @command('debugcapabilities',
382 [], _('PATH'),
382 [], _('PATH'),
383 norepo=True)
383 norepo=True)
384 def debugcapabilities(ui, path, **opts):
384 def debugcapabilities(ui, path, **opts):
385 """lists the capabilities of a remote peer"""
385 """lists the capabilities of a remote peer"""
386 opts = pycompat.byteskwargs(opts)
386 opts = pycompat.byteskwargs(opts)
387 peer = hg.peer(ui, opts, path)
387 peer = hg.peer(ui, opts, path)
388 caps = peer.capabilities()
388 caps = peer.capabilities()
389 ui.write(('Main capabilities:\n'))
389 ui.write(('Main capabilities:\n'))
390 for c in sorted(caps):
390 for c in sorted(caps):
391 ui.write((' %s\n') % c)
391 ui.write((' %s\n') % c)
392 b2caps = bundle2.bundle2caps(peer)
392 b2caps = bundle2.bundle2caps(peer)
393 if b2caps:
393 if b2caps:
394 ui.write(('Bundle2 capabilities:\n'))
394 ui.write(('Bundle2 capabilities:\n'))
395 for key, values in sorted(b2caps.iteritems()):
395 for key, values in sorted(b2caps.iteritems()):
396 ui.write((' %s\n') % key)
396 ui.write((' %s\n') % key)
397 for v in values:
397 for v in values:
398 ui.write((' %s\n') % v)
398 ui.write((' %s\n') % v)
399
399
400 @command('debugcheckstate', [], '')
400 @command('debugcheckstate', [], '')
401 def debugcheckstate(ui, repo):
401 def debugcheckstate(ui, repo):
402 """validate the correctness of the current dirstate"""
402 """validate the correctness of the current dirstate"""
403 parent1, parent2 = repo.dirstate.parents()
403 parent1, parent2 = repo.dirstate.parents()
404 m1 = repo[parent1].manifest()
404 m1 = repo[parent1].manifest()
405 m2 = repo[parent2].manifest()
405 m2 = repo[parent2].manifest()
406 errors = 0
406 errors = 0
407 for f in repo.dirstate:
407 for f in repo.dirstate:
408 state = repo.dirstate[f]
408 state = repo.dirstate[f]
409 if state in "nr" and f not in m1:
409 if state in "nr" and f not in m1:
410 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
410 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
411 errors += 1
411 errors += 1
412 if state in "a" and f in m1:
412 if state in "a" and f in m1:
413 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
413 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
414 errors += 1
414 errors += 1
415 if state in "m" and f not in m1 and f not in m2:
415 if state in "m" and f not in m1 and f not in m2:
416 ui.warn(_("%s in state %s, but not in either manifest\n") %
416 ui.warn(_("%s in state %s, but not in either manifest\n") %
417 (f, state))
417 (f, state))
418 errors += 1
418 errors += 1
419 for f in m1:
419 for f in m1:
420 state = repo.dirstate[f]
420 state = repo.dirstate[f]
421 if state not in "nrm":
421 if state not in "nrm":
422 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
422 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
423 errors += 1
423 errors += 1
424 if errors:
424 if errors:
425 error = _(".hg/dirstate inconsistent with current parent's manifest")
425 error = _(".hg/dirstate inconsistent with current parent's manifest")
426 raise error.Abort(error)
426 raise error.Abort(error)
427
427
428 @command('debugcolor',
428 @command('debugcolor',
429 [('', 'style', None, _('show all configured styles'))],
429 [('', 'style', None, _('show all configured styles'))],
430 'hg debugcolor')
430 'hg debugcolor')
431 def debugcolor(ui, repo, **opts):
431 def debugcolor(ui, repo, **opts):
432 """show available color, effects or style"""
432 """show available color, effects or style"""
433 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
433 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
434 if opts.get(r'style'):
434 if opts.get(r'style'):
435 return _debugdisplaystyle(ui)
435 return _debugdisplaystyle(ui)
436 else:
436 else:
437 return _debugdisplaycolor(ui)
437 return _debugdisplaycolor(ui)
438
438
439 def _debugdisplaycolor(ui):
439 def _debugdisplaycolor(ui):
440 ui = ui.copy()
440 ui = ui.copy()
441 ui._styles.clear()
441 ui._styles.clear()
442 for effect in color._activeeffects(ui).keys():
442 for effect in color._activeeffects(ui).keys():
443 ui._styles[effect] = effect
443 ui._styles[effect] = effect
444 if ui._terminfoparams:
444 if ui._terminfoparams:
445 for k, v in ui.configitems('color'):
445 for k, v in ui.configitems('color'):
446 if k.startswith('color.'):
446 if k.startswith('color.'):
447 ui._styles[k] = k[6:]
447 ui._styles[k] = k[6:]
448 elif k.startswith('terminfo.'):
448 elif k.startswith('terminfo.'):
449 ui._styles[k] = k[9:]
449 ui._styles[k] = k[9:]
450 ui.write(_('available colors:\n'))
450 ui.write(_('available colors:\n'))
451 # sort label with a '_' after the other to group '_background' entry.
451 # sort label with a '_' after the other to group '_background' entry.
452 items = sorted(ui._styles.items(),
452 items = sorted(ui._styles.items(),
453 key=lambda i: ('_' in i[0], i[0], i[1]))
453 key=lambda i: ('_' in i[0], i[0], i[1]))
454 for colorname, label in items:
454 for colorname, label in items:
455 ui.write(('%s\n') % colorname, label=label)
455 ui.write(('%s\n') % colorname, label=label)
456
456
457 def _debugdisplaystyle(ui):
457 def _debugdisplaystyle(ui):
458 ui.write(_('available style:\n'))
458 ui.write(_('available style:\n'))
459 if not ui._styles:
459 if not ui._styles:
460 return
460 return
461 width = max(len(s) for s in ui._styles)
461 width = max(len(s) for s in ui._styles)
462 for label, effects in sorted(ui._styles.items()):
462 for label, effects in sorted(ui._styles.items()):
463 ui.write('%s' % label, label=label)
463 ui.write('%s' % label, label=label)
464 if effects:
464 if effects:
465 # 50
465 # 50
466 ui.write(': ')
466 ui.write(': ')
467 ui.write(' ' * (max(0, width - len(label))))
467 ui.write(' ' * (max(0, width - len(label))))
468 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
468 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
469 ui.write('\n')
469 ui.write('\n')
470
470
471 @command('debugcreatestreamclonebundle', [], 'FILE')
471 @command('debugcreatestreamclonebundle', [], 'FILE')
472 def debugcreatestreamclonebundle(ui, repo, fname):
472 def debugcreatestreamclonebundle(ui, repo, fname):
473 """create a stream clone bundle file
473 """create a stream clone bundle file
474
474
475 Stream bundles are special bundles that are essentially archives of
475 Stream bundles are special bundles that are essentially archives of
476 revlog files. They are commonly used for cloning very quickly.
476 revlog files. They are commonly used for cloning very quickly.
477 """
477 """
478 # TODO we may want to turn this into an abort when this functionality
478 # TODO we may want to turn this into an abort when this functionality
479 # is moved into `hg bundle`.
479 # is moved into `hg bundle`.
480 if phases.hassecret(repo):
480 if phases.hassecret(repo):
481 ui.warn(_('(warning: stream clone bundle will contain secret '
481 ui.warn(_('(warning: stream clone bundle will contain secret '
482 'revisions)\n'))
482 'revisions)\n'))
483
483
484 requirements, gen = streamclone.generatebundlev1(repo)
484 requirements, gen = streamclone.generatebundlev1(repo)
485 changegroup.writechunks(ui, gen, fname)
485 changegroup.writechunks(ui, gen, fname)
486
486
487 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
487 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
488
488
489 @command('debugdag',
489 @command('debugdag',
490 [('t', 'tags', None, _('use tags as labels')),
490 [('t', 'tags', None, _('use tags as labels')),
491 ('b', 'branches', None, _('annotate with branch names')),
491 ('b', 'branches', None, _('annotate with branch names')),
492 ('', 'dots', None, _('use dots for runs')),
492 ('', 'dots', None, _('use dots for runs')),
493 ('s', 'spaces', None, _('separate elements by spaces'))],
493 ('s', 'spaces', None, _('separate elements by spaces'))],
494 _('[OPTION]... [FILE [REV]...]'),
494 _('[OPTION]... [FILE [REV]...]'),
495 optionalrepo=True)
495 optionalrepo=True)
496 def debugdag(ui, repo, file_=None, *revs, **opts):
496 def debugdag(ui, repo, file_=None, *revs, **opts):
497 """format the changelog or an index DAG as a concise textual description
497 """format the changelog or an index DAG as a concise textual description
498
498
499 If you pass a revlog index, the revlog's DAG is emitted. If you list
499 If you pass a revlog index, the revlog's DAG is emitted. If you list
500 revision numbers, they get labeled in the output as rN.
500 revision numbers, they get labeled in the output as rN.
501
501
502 Otherwise, the changelog DAG of the current repo is emitted.
502 Otherwise, the changelog DAG of the current repo is emitted.
503 """
503 """
504 spaces = opts.get(r'spaces')
504 spaces = opts.get(r'spaces')
505 dots = opts.get(r'dots')
505 dots = opts.get(r'dots')
506 if file_:
506 if file_:
507 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
507 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
508 file_)
508 file_)
509 revs = set((int(r) for r in revs))
509 revs = set((int(r) for r in revs))
510 def events():
510 def events():
511 for r in rlog:
511 for r in rlog:
512 yield 'n', (r, list(p for p in rlog.parentrevs(r)
512 yield 'n', (r, list(p for p in rlog.parentrevs(r)
513 if p != -1))
513 if p != -1))
514 if r in revs:
514 if r in revs:
515 yield 'l', (r, "r%i" % r)
515 yield 'l', (r, "r%i" % r)
516 elif repo:
516 elif repo:
517 cl = repo.changelog
517 cl = repo.changelog
518 tags = opts.get(r'tags')
518 tags = opts.get(r'tags')
519 branches = opts.get(r'branches')
519 branches = opts.get(r'branches')
520 if tags:
520 if tags:
521 labels = {}
521 labels = {}
522 for l, n in repo.tags().items():
522 for l, n in repo.tags().items():
523 labels.setdefault(cl.rev(n), []).append(l)
523 labels.setdefault(cl.rev(n), []).append(l)
524 def events():
524 def events():
525 b = "default"
525 b = "default"
526 for r in cl:
526 for r in cl:
527 if branches:
527 if branches:
528 newb = cl.read(cl.node(r))[5]['branch']
528 newb = cl.read(cl.node(r))[5]['branch']
529 if newb != b:
529 if newb != b:
530 yield 'a', newb
530 yield 'a', newb
531 b = newb
531 b = newb
532 yield 'n', (r, list(p for p in cl.parentrevs(r)
532 yield 'n', (r, list(p for p in cl.parentrevs(r)
533 if p != -1))
533 if p != -1))
534 if tags:
534 if tags:
535 ls = labels.get(r)
535 ls = labels.get(r)
536 if ls:
536 if ls:
537 for l in ls:
537 for l in ls:
538 yield 'l', (r, l)
538 yield 'l', (r, l)
539 else:
539 else:
540 raise error.Abort(_('need repo for changelog dag'))
540 raise error.Abort(_('need repo for changelog dag'))
541
541
542 for line in dagparser.dagtextlines(events(),
542 for line in dagparser.dagtextlines(events(),
543 addspaces=spaces,
543 addspaces=spaces,
544 wraplabels=True,
544 wraplabels=True,
545 wrapannotations=True,
545 wrapannotations=True,
546 wrapnonlinear=dots,
546 wrapnonlinear=dots,
547 usedots=dots,
547 usedots=dots,
548 maxlinewidth=70):
548 maxlinewidth=70):
549 ui.write(line)
549 ui.write(line)
550 ui.write("\n")
550 ui.write("\n")
551
551
552 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
552 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
553 def debugdata(ui, repo, file_, rev=None, **opts):
553 def debugdata(ui, repo, file_, rev=None, **opts):
554 """dump the contents of a data file revision"""
554 """dump the contents of a data file revision"""
555 opts = pycompat.byteskwargs(opts)
555 opts = pycompat.byteskwargs(opts)
556 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
556 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
557 if rev is not None:
557 if rev is not None:
558 raise error.CommandError('debugdata', _('invalid arguments'))
558 raise error.CommandError('debugdata', _('invalid arguments'))
559 file_, rev = None, file_
559 file_, rev = None, file_
560 elif rev is None:
560 elif rev is None:
561 raise error.CommandError('debugdata', _('invalid arguments'))
561 raise error.CommandError('debugdata', _('invalid arguments'))
562 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
562 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
563 try:
563 try:
564 ui.write(r.revision(r.lookup(rev), raw=True))
564 ui.write(r.revision(r.lookup(rev), raw=True))
565 except KeyError:
565 except KeyError:
566 raise error.Abort(_('invalid revision identifier %s') % rev)
566 raise error.Abort(_('invalid revision identifier %s') % rev)
567
567
568 @command('debugdate',
568 @command('debugdate',
569 [('e', 'extended', None, _('try extended date formats'))],
569 [('e', 'extended', None, _('try extended date formats'))],
570 _('[-e] DATE [RANGE]'),
570 _('[-e] DATE [RANGE]'),
571 norepo=True, optionalrepo=True)
571 norepo=True, optionalrepo=True)
572 def debugdate(ui, date, range=None, **opts):
572 def debugdate(ui, date, range=None, **opts):
573 """parse and display a date"""
573 """parse and display a date"""
574 if opts[r"extended"]:
574 if opts[r"extended"]:
575 d = dateutil.parsedate(date, util.extendeddateformats)
575 d = dateutil.parsedate(date, util.extendeddateformats)
576 else:
576 else:
577 d = dateutil.parsedate(date)
577 d = dateutil.parsedate(date)
578 ui.write(("internal: %d %d\n") % d)
578 ui.write(("internal: %d %d\n") % d)
579 ui.write(("standard: %s\n") % dateutil.datestr(d))
579 ui.write(("standard: %s\n") % dateutil.datestr(d))
580 if range:
580 if range:
581 m = dateutil.matchdate(range)
581 m = dateutil.matchdate(range)
582 ui.write(("match: %s\n") % m(d[0]))
582 ui.write(("match: %s\n") % m(d[0]))
583
583
584 @command('debugdeltachain',
584 @command('debugdeltachain',
585 cmdutil.debugrevlogopts + cmdutil.formatteropts,
585 cmdutil.debugrevlogopts + cmdutil.formatteropts,
586 _('-c|-m|FILE'),
586 _('-c|-m|FILE'),
587 optionalrepo=True)
587 optionalrepo=True)
588 def debugdeltachain(ui, repo, file_=None, **opts):
588 def debugdeltachain(ui, repo, file_=None, **opts):
589 """dump information about delta chains in a revlog
589 """dump information about delta chains in a revlog
590
590
591 Output can be templatized. Available template keywords are:
591 Output can be templatized. Available template keywords are:
592
592
593 :``rev``: revision number
593 :``rev``: revision number
594 :``chainid``: delta chain identifier (numbered by unique base)
594 :``chainid``: delta chain identifier (numbered by unique base)
595 :``chainlen``: delta chain length to this revision
595 :``chainlen``: delta chain length to this revision
596 :``prevrev``: previous revision in delta chain
596 :``prevrev``: previous revision in delta chain
597 :``deltatype``: role of delta / how it was computed
597 :``deltatype``: role of delta / how it was computed
598 :``compsize``: compressed size of revision
598 :``compsize``: compressed size of revision
599 :``uncompsize``: uncompressed size of revision
599 :``uncompsize``: uncompressed size of revision
600 :``chainsize``: total size of compressed revisions in chain
600 :``chainsize``: total size of compressed revisions in chain
601 :``chainratio``: total chain size divided by uncompressed revision size
601 :``chainratio``: total chain size divided by uncompressed revision size
602 (new delta chains typically start at ratio 2.00)
602 (new delta chains typically start at ratio 2.00)
603 :``lindist``: linear distance from base revision in delta chain to end
603 :``lindist``: linear distance from base revision in delta chain to end
604 of this revision
604 of this revision
605 :``extradist``: total size of revisions not part of this delta chain from
605 :``extradist``: total size of revisions not part of this delta chain from
606 base of delta chain to end of this revision; a measurement
606 base of delta chain to end of this revision; a measurement
607 of how much extra data we need to read/seek across to read
607 of how much extra data we need to read/seek across to read
608 the delta chain for this revision
608 the delta chain for this revision
609 :``extraratio``: extradist divided by chainsize; another representation of
609 :``extraratio``: extradist divided by chainsize; another representation of
610 how much unrelated data is needed to load this delta chain
610 how much unrelated data is needed to load this delta chain
611
611
612 If the repository is configured to use the sparse read, additional keywords
612 If the repository is configured to use the sparse read, additional keywords
613 are available:
613 are available:
614
614
615 :``readsize``: total size of data read from the disk for a revision
615 :``readsize``: total size of data read from the disk for a revision
616 (sum of the sizes of all the blocks)
616 (sum of the sizes of all the blocks)
617 :``largestblock``: size of the largest block of data read from the disk
617 :``largestblock``: size of the largest block of data read from the disk
618 :``readdensity``: density of useful bytes in the data read from the disk
618 :``readdensity``: density of useful bytes in the data read from the disk
619 :``srchunks``: in how many data hunks the whole revision would be read
619 :``srchunks``: in how many data hunks the whole revision would be read
620
620
621 The sparse read can be enabled with experimental.sparse-read = True
621 The sparse read can be enabled with experimental.sparse-read = True
622 """
622 """
623 opts = pycompat.byteskwargs(opts)
623 opts = pycompat.byteskwargs(opts)
624 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
624 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
625 index = r.index
625 index = r.index
626 start = r.start
626 start = r.start
627 length = r.length
627 length = r.length
628 generaldelta = r.version & revlog.FLAG_GENERALDELTA
628 generaldelta = r.version & revlog.FLAG_GENERALDELTA
629 withsparseread = getattr(r, '_withsparseread', False)
629 withsparseread = getattr(r, '_withsparseread', False)
630
630
631 def revinfo(rev):
631 def revinfo(rev):
632 e = index[rev]
632 e = index[rev]
633 compsize = e[1]
633 compsize = e[1]
634 uncompsize = e[2]
634 uncompsize = e[2]
635 chainsize = 0
635 chainsize = 0
636
636
637 if generaldelta:
637 if generaldelta:
638 if e[3] == e[5]:
638 if e[3] == e[5]:
639 deltatype = 'p1'
639 deltatype = 'p1'
640 elif e[3] == e[6]:
640 elif e[3] == e[6]:
641 deltatype = 'p2'
641 deltatype = 'p2'
642 elif e[3] == rev - 1:
642 elif e[3] == rev - 1:
643 deltatype = 'prev'
643 deltatype = 'prev'
644 elif e[3] == rev:
644 elif e[3] == rev:
645 deltatype = 'base'
645 deltatype = 'base'
646 else:
646 else:
647 deltatype = 'other'
647 deltatype = 'other'
648 else:
648 else:
649 if e[3] == rev:
649 if e[3] == rev:
650 deltatype = 'base'
650 deltatype = 'base'
651 else:
651 else:
652 deltatype = 'prev'
652 deltatype = 'prev'
653
653
654 chain = r._deltachain(rev)[0]
654 chain = r._deltachain(rev)[0]
655 for iterrev in chain:
655 for iterrev in chain:
656 e = index[iterrev]
656 e = index[iterrev]
657 chainsize += e[1]
657 chainsize += e[1]
658
658
659 return compsize, uncompsize, deltatype, chain, chainsize
659 return compsize, uncompsize, deltatype, chain, chainsize
660
660
661 fm = ui.formatter('debugdeltachain', opts)
661 fm = ui.formatter('debugdeltachain', opts)
662
662
663 fm.plain(' rev chain# chainlen prev delta '
663 fm.plain(' rev chain# chainlen prev delta '
664 'size rawsize chainsize ratio lindist extradist '
664 'size rawsize chainsize ratio lindist extradist '
665 'extraratio')
665 'extraratio')
666 if withsparseread:
666 if withsparseread:
667 fm.plain(' readsize largestblk rddensity srchunks')
667 fm.plain(' readsize largestblk rddensity srchunks')
668 fm.plain('\n')
668 fm.plain('\n')
669
669
670 chainbases = {}
670 chainbases = {}
671 for rev in r:
671 for rev in r:
672 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
672 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
673 chainbase = chain[0]
673 chainbase = chain[0]
674 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
674 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
675 basestart = start(chainbase)
675 basestart = start(chainbase)
676 revstart = start(rev)
676 revstart = start(rev)
677 lineardist = revstart + comp - basestart
677 lineardist = revstart + comp - basestart
678 extradist = lineardist - chainsize
678 extradist = lineardist - chainsize
679 try:
679 try:
680 prevrev = chain[-2]
680 prevrev = chain[-2]
681 except IndexError:
681 except IndexError:
682 prevrev = -1
682 prevrev = -1
683
683
684 if uncomp != 0:
684 if uncomp != 0:
685 chainratio = float(chainsize) / float(uncomp)
685 chainratio = float(chainsize) / float(uncomp)
686 else:
686 else:
687 chainratio = chainsize
687 chainratio = chainsize
688
688
689 if chainsize != 0:
689 if chainsize != 0:
690 extraratio = float(extradist) / float(chainsize)
690 extraratio = float(extradist) / float(chainsize)
691 else:
691 else:
692 extraratio = extradist
692 extraratio = extradist
693
693
694 fm.startitem()
694 fm.startitem()
695 fm.write('rev chainid chainlen prevrev deltatype compsize '
695 fm.write('rev chainid chainlen prevrev deltatype compsize '
696 'uncompsize chainsize chainratio lindist extradist '
696 'uncompsize chainsize chainratio lindist extradist '
697 'extraratio',
697 'extraratio',
698 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
698 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
699 rev, chainid, len(chain), prevrev, deltatype, comp,
699 rev, chainid, len(chain), prevrev, deltatype, comp,
700 uncomp, chainsize, chainratio, lineardist, extradist,
700 uncomp, chainsize, chainratio, lineardist, extradist,
701 extraratio,
701 extraratio,
702 rev=rev, chainid=chainid, chainlen=len(chain),
702 rev=rev, chainid=chainid, chainlen=len(chain),
703 prevrev=prevrev, deltatype=deltatype, compsize=comp,
703 prevrev=prevrev, deltatype=deltatype, compsize=comp,
704 uncompsize=uncomp, chainsize=chainsize,
704 uncompsize=uncomp, chainsize=chainsize,
705 chainratio=chainratio, lindist=lineardist,
705 chainratio=chainratio, lindist=lineardist,
706 extradist=extradist, extraratio=extraratio)
706 extradist=extradist, extraratio=extraratio)
707 if withsparseread:
707 if withsparseread:
708 readsize = 0
708 readsize = 0
709 largestblock = 0
709 largestblock = 0
710 srchunks = 0
710 srchunks = 0
711
711
712 for revschunk in deltautil.slicechunk(r, chain):
712 for revschunk in deltautil.slicechunk(r, chain):
713 srchunks += 1
713 srchunks += 1
714 blkend = start(revschunk[-1]) + length(revschunk[-1])
714 blkend = start(revschunk[-1]) + length(revschunk[-1])
715 blksize = blkend - start(revschunk[0])
715 blksize = blkend - start(revschunk[0])
716
716
717 readsize += blksize
717 readsize += blksize
718 if largestblock < blksize:
718 if largestblock < blksize:
719 largestblock = blksize
719 largestblock = blksize
720
720
721 if readsize:
721 if readsize:
722 readdensity = float(chainsize) / float(readsize)
722 readdensity = float(chainsize) / float(readsize)
723 else:
723 else:
724 readdensity = 1
724 readdensity = 1
725
725
726 fm.write('readsize largestblock readdensity srchunks',
726 fm.write('readsize largestblock readdensity srchunks',
727 ' %10d %10d %9.5f %8d',
727 ' %10d %10d %9.5f %8d',
728 readsize, largestblock, readdensity, srchunks,
728 readsize, largestblock, readdensity, srchunks,
729 readsize=readsize, largestblock=largestblock,
729 readsize=readsize, largestblock=largestblock,
730 readdensity=readdensity, srchunks=srchunks)
730 readdensity=readdensity, srchunks=srchunks)
731
731
732 fm.plain('\n')
732 fm.plain('\n')
733
733
734 fm.end()
734 fm.end()
735
735
736 @command('debugdirstate|debugstate',
736 @command('debugdirstate|debugstate',
737 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
737 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
738 ('', 'dates', True, _('display the saved mtime')),
738 ('', 'dates', True, _('display the saved mtime')),
739 ('', 'datesort', None, _('sort by saved mtime'))],
739 ('', 'datesort', None, _('sort by saved mtime'))],
740 _('[OPTION]...'))
740 _('[OPTION]...'))
741 def debugstate(ui, repo, **opts):
741 def debugstate(ui, repo, **opts):
742 """show the contents of the current dirstate"""
742 """show the contents of the current dirstate"""
743
743
744 nodates = not opts[r'dates']
744 nodates = not opts[r'dates']
745 if opts.get(r'nodates') is not None:
745 if opts.get(r'nodates') is not None:
746 nodates = True
746 nodates = True
747 datesort = opts.get(r'datesort')
747 datesort = opts.get(r'datesort')
748
748
749 if datesort:
749 if datesort:
750 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
750 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
751 else:
751 else:
752 keyfunc = None # sort by filename
752 keyfunc = None # sort by filename
753 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
753 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
754 if ent[3] == -1:
754 if ent[3] == -1:
755 timestr = 'unset '
755 timestr = 'unset '
756 elif nodates:
756 elif nodates:
757 timestr = 'set '
757 timestr = 'set '
758 else:
758 else:
759 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
759 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
760 time.localtime(ent[3]))
760 time.localtime(ent[3]))
761 timestr = encoding.strtolocal(timestr)
761 timestr = encoding.strtolocal(timestr)
762 if ent[1] & 0o20000:
762 if ent[1] & 0o20000:
763 mode = 'lnk'
763 mode = 'lnk'
764 else:
764 else:
765 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
765 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
766 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
766 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
767 for f in repo.dirstate.copies():
767 for f in repo.dirstate.copies():
768 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
768 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
769
769
770 @command('debugdiscovery',
770 @command('debugdiscovery',
771 [('', 'old', None, _('use old-style discovery')),
771 [('', 'old', None, _('use old-style discovery')),
772 ('', 'nonheads', None,
772 ('', 'nonheads', None,
773 _('use old-style discovery with non-heads included')),
773 _('use old-style discovery with non-heads included')),
774 ('', 'rev', [], 'restrict discovery to this set of revs'),
774 ('', 'rev', [], 'restrict discovery to this set of revs'),
775 ('', 'seed', '12323', 'specify the random seed use for discovery'),
775 ('', 'seed', '12323', 'specify the random seed use for discovery'),
776 ] + cmdutil.remoteopts,
776 ] + cmdutil.remoteopts,
777 _('[--rev REV] [OTHER]'))
777 _('[--rev REV] [OTHER]'))
778 def debugdiscovery(ui, repo, remoteurl="default", **opts):
778 def debugdiscovery(ui, repo, remoteurl="default", **opts):
779 """runs the changeset discovery protocol in isolation"""
779 """runs the changeset discovery protocol in isolation"""
780 opts = pycompat.byteskwargs(opts)
780 opts = pycompat.byteskwargs(opts)
781 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
781 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
782 remote = hg.peer(repo, opts, remoteurl)
782 remote = hg.peer(repo, opts, remoteurl)
783 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
783 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
784
784
785 # make sure tests are repeatable
785 # make sure tests are repeatable
786 random.seed(int(opts['seed']))
786 random.seed(int(opts['seed']))
787
787
788
788
789
789
790 if opts.get('old'):
790 if opts.get('old'):
791 def doit(pushedrevs, remoteheads, remote=remote):
791 def doit(pushedrevs, remoteheads, remote=remote):
792 if not util.safehasattr(remote, 'branches'):
792 if not util.safehasattr(remote, 'branches'):
793 # enable in-client legacy support
793 # enable in-client legacy support
794 remote = localrepo.locallegacypeer(remote.local())
794 remote = localrepo.locallegacypeer(remote.local())
795 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
795 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
796 force=True)
796 force=True)
797 common = set(common)
797 common = set(common)
798 if not opts.get('nonheads'):
798 if not opts.get('nonheads'):
799 ui.write(("unpruned common: %s\n") %
799 ui.write(("unpruned common: %s\n") %
800 " ".join(sorted(short(n) for n in common)))
800 " ".join(sorted(short(n) for n in common)))
801
801
802 clnode = repo.changelog.node
802 clnode = repo.changelog.node
803 common = repo.revs('heads(::%ln)', common)
803 common = repo.revs('heads(::%ln)', common)
804 common = {clnode(r) for r in common}
804 common = {clnode(r) for r in common}
805 return common, hds
805 return common, hds
806 else:
806 else:
807 def doit(pushedrevs, remoteheads, remote=remote):
807 def doit(pushedrevs, remoteheads, remote=remote):
808 nodes = None
808 nodes = None
809 if pushedrevs:
809 if pushedrevs:
810 revs = scmutil.revrange(repo, pushedrevs)
810 revs = scmutil.revrange(repo, pushedrevs)
811 nodes = [repo[r].node() for r in revs]
811 nodes = [repo[r].node() for r in revs]
812 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
812 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
813 ancestorsof=nodes)
813 ancestorsof=nodes)
814 return common, hds
814 return common, hds
815
815
816 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
816 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
817 localrevs = opts['rev']
817 localrevs = opts['rev']
818 common, hds = doit(localrevs, remoterevs)
818 common, hds = doit(localrevs, remoterevs)
819
819
820 # compute all statistics
820 # compute all statistics
821 common = set(common)
821 common = set(common)
822 rheads = set(hds)
822 rheads = set(hds)
823 lheads = set(repo.heads())
823 lheads = set(repo.heads())
824
824
825 data = {}
825 data = {}
826 data['nb-common'] = len(common)
826 data['nb-common'] = len(common)
827 data['nb-common-local'] = len(common & lheads)
827 data['nb-common-local'] = len(common & lheads)
828 data['nb-common-remote'] = len(common & rheads)
828 data['nb-common-remote'] = len(common & rheads)
829 data['nb-local'] = len(lheads)
829 data['nb-local'] = len(lheads)
830 data['nb-local-missing'] = data['nb-local'] - data['nb-common-local']
830 data['nb-local-missing'] = data['nb-local'] - data['nb-common-local']
831 data['nb-remote'] = len(rheads)
831 data['nb-remote'] = len(rheads)
832 data['nb-remote-unknown'] = data['nb-remote'] - data['nb-common-remote']
832 data['nb-remote-unknown'] = data['nb-remote'] - data['nb-common-remote']
833 data['nb-revs'] = len(repo.revs('all()'))
833 data['nb-revs'] = len(repo.revs('all()'))
834 data['nb-revs-common'] = len(repo.revs('::%ln', common))
834 data['nb-revs-common'] = len(repo.revs('::%ln', common))
835 data['nb-revs-missing'] = data['nb-revs'] - data['nb-revs-common']
835 data['nb-revs-missing'] = data['nb-revs'] - data['nb-revs-common']
836
836
837 # display discovery summary
837 # display discovery summary
838 ui.write(("heads summary:\n"))
838 ui.write(("heads summary:\n"))
839 ui.write((" total common heads: %(nb-common)9d\n") % data)
839 ui.write((" total common heads: %(nb-common)9d\n") % data)
840 ui.write((" also local heads: %(nb-common-local)9d\n") % data)
840 ui.write((" also local heads: %(nb-common-local)9d\n") % data)
841 ui.write((" also remote heads: %(nb-common-remote)9d\n") % data)
841 ui.write((" also remote heads: %(nb-common-remote)9d\n") % data)
842 ui.write((" local heads: %(nb-local)9d\n") % data)
842 ui.write((" local heads: %(nb-local)9d\n") % data)
843 ui.write((" common: %(nb-common-local)9d\n") % data)
843 ui.write((" common: %(nb-common-local)9d\n") % data)
844 ui.write((" missing: %(nb-local-missing)9d\n") % data)
844 ui.write((" missing: %(nb-local-missing)9d\n") % data)
845 ui.write((" remote heads: %(nb-remote)9d\n") % data)
845 ui.write((" remote heads: %(nb-remote)9d\n") % data)
846 ui.write((" common: %(nb-common-remote)9d\n") % data)
846 ui.write((" common: %(nb-common-remote)9d\n") % data)
847 ui.write((" unknown: %(nb-remote-unknown)9d\n") % data)
847 ui.write((" unknown: %(nb-remote-unknown)9d\n") % data)
848 ui.write(("local changesets: %(nb-revs)9d\n") % data)
848 ui.write(("local changesets: %(nb-revs)9d\n") % data)
849 ui.write((" common: %(nb-revs-common)9d\n") % data)
849 ui.write((" common: %(nb-revs-common)9d\n") % data)
850 ui.write((" missing: %(nb-revs-missing)9d\n") % data)
850 ui.write((" missing: %(nb-revs-missing)9d\n") % data)
851
851
852 ui.write(("common heads: %s\n") %
852 ui.write(("common heads: %s\n") %
853 " ".join(sorted(short(n) for n in common)))
853 " ".join(sorted(short(n) for n in common)))
854 if lheads <= common:
855 ui.write(("local is subset\n"))
856 elif rheads <= common:
857 ui.write(("remote is subset\n"))
858
854
859 _chunksize = 4 << 10
855 _chunksize = 4 << 10
860
856
861 @command('debugdownload',
857 @command('debugdownload',
862 [
858 [
863 ('o', 'output', '', _('path')),
859 ('o', 'output', '', _('path')),
864 ],
860 ],
865 optionalrepo=True)
861 optionalrepo=True)
866 def debugdownload(ui, repo, url, output=None, **opts):
862 def debugdownload(ui, repo, url, output=None, **opts):
867 """download a resource using Mercurial logic and config
863 """download a resource using Mercurial logic and config
868 """
864 """
869 fh = urlmod.open(ui, url, output)
865 fh = urlmod.open(ui, url, output)
870
866
871 dest = ui
867 dest = ui
872 if output:
868 if output:
873 dest = open(output, "wb", _chunksize)
869 dest = open(output, "wb", _chunksize)
874 try:
870 try:
875 data = fh.read(_chunksize)
871 data = fh.read(_chunksize)
876 while data:
872 while data:
877 dest.write(data)
873 dest.write(data)
878 data = fh.read(_chunksize)
874 data = fh.read(_chunksize)
879 finally:
875 finally:
880 if output:
876 if output:
881 dest.close()
877 dest.close()
882
878
883 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
879 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
884 def debugextensions(ui, repo, **opts):
880 def debugextensions(ui, repo, **opts):
885 '''show information about active extensions'''
881 '''show information about active extensions'''
886 opts = pycompat.byteskwargs(opts)
882 opts = pycompat.byteskwargs(opts)
887 exts = extensions.extensions(ui)
883 exts = extensions.extensions(ui)
888 hgver = util.version()
884 hgver = util.version()
889 fm = ui.formatter('debugextensions', opts)
885 fm = ui.formatter('debugextensions', opts)
890 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
886 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
891 isinternal = extensions.ismoduleinternal(extmod)
887 isinternal = extensions.ismoduleinternal(extmod)
892 extsource = pycompat.fsencode(extmod.__file__)
888 extsource = pycompat.fsencode(extmod.__file__)
893 if isinternal:
889 if isinternal:
894 exttestedwith = [] # never expose magic string to users
890 exttestedwith = [] # never expose magic string to users
895 else:
891 else:
896 exttestedwith = getattr(extmod, 'testedwith', '').split()
892 exttestedwith = getattr(extmod, 'testedwith', '').split()
897 extbuglink = getattr(extmod, 'buglink', None)
893 extbuglink = getattr(extmod, 'buglink', None)
898
894
899 fm.startitem()
895 fm.startitem()
900
896
901 if ui.quiet or ui.verbose:
897 if ui.quiet or ui.verbose:
902 fm.write('name', '%s\n', extname)
898 fm.write('name', '%s\n', extname)
903 else:
899 else:
904 fm.write('name', '%s', extname)
900 fm.write('name', '%s', extname)
905 if isinternal or hgver in exttestedwith:
901 if isinternal or hgver in exttestedwith:
906 fm.plain('\n')
902 fm.plain('\n')
907 elif not exttestedwith:
903 elif not exttestedwith:
908 fm.plain(_(' (untested!)\n'))
904 fm.plain(_(' (untested!)\n'))
909 else:
905 else:
910 lasttestedversion = exttestedwith[-1]
906 lasttestedversion = exttestedwith[-1]
911 fm.plain(' (%s!)\n' % lasttestedversion)
907 fm.plain(' (%s!)\n' % lasttestedversion)
912
908
913 fm.condwrite(ui.verbose and extsource, 'source',
909 fm.condwrite(ui.verbose and extsource, 'source',
914 _(' location: %s\n'), extsource or "")
910 _(' location: %s\n'), extsource or "")
915
911
916 if ui.verbose:
912 if ui.verbose:
917 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
913 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
918 fm.data(bundled=isinternal)
914 fm.data(bundled=isinternal)
919
915
920 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
916 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
921 _(' tested with: %s\n'),
917 _(' tested with: %s\n'),
922 fm.formatlist(exttestedwith, name='ver'))
918 fm.formatlist(exttestedwith, name='ver'))
923
919
924 fm.condwrite(ui.verbose and extbuglink, 'buglink',
920 fm.condwrite(ui.verbose and extbuglink, 'buglink',
925 _(' bug reporting: %s\n'), extbuglink or "")
921 _(' bug reporting: %s\n'), extbuglink or "")
926
922
927 fm.end()
923 fm.end()
928
924
929 @command('debugfileset',
925 @command('debugfileset',
930 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
926 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
931 ('', 'all-files', False,
927 ('', 'all-files', False,
932 _('test files from all revisions and working directory')),
928 _('test files from all revisions and working directory')),
933 ('s', 'show-matcher', None,
929 ('s', 'show-matcher', None,
934 _('print internal representation of matcher')),
930 _('print internal representation of matcher')),
935 ('p', 'show-stage', [],
931 ('p', 'show-stage', [],
936 _('print parsed tree at the given stage'), _('NAME'))],
932 _('print parsed tree at the given stage'), _('NAME'))],
937 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
933 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
938 def debugfileset(ui, repo, expr, **opts):
934 def debugfileset(ui, repo, expr, **opts):
939 '''parse and apply a fileset specification'''
935 '''parse and apply a fileset specification'''
940 from . import fileset
936 from . import fileset
941 fileset.symbols # force import of fileset so we have predicates to optimize
937 fileset.symbols # force import of fileset so we have predicates to optimize
942 opts = pycompat.byteskwargs(opts)
938 opts = pycompat.byteskwargs(opts)
943 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
939 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
944
940
945 stages = [
941 stages = [
946 ('parsed', pycompat.identity),
942 ('parsed', pycompat.identity),
947 ('analyzed', filesetlang.analyze),
943 ('analyzed', filesetlang.analyze),
948 ('optimized', filesetlang.optimize),
944 ('optimized', filesetlang.optimize),
949 ]
945 ]
950 stagenames = set(n for n, f in stages)
946 stagenames = set(n for n, f in stages)
951
947
952 showalways = set()
948 showalways = set()
953 if ui.verbose and not opts['show_stage']:
949 if ui.verbose and not opts['show_stage']:
954 # show parsed tree by --verbose (deprecated)
950 # show parsed tree by --verbose (deprecated)
955 showalways.add('parsed')
951 showalways.add('parsed')
956 if opts['show_stage'] == ['all']:
952 if opts['show_stage'] == ['all']:
957 showalways.update(stagenames)
953 showalways.update(stagenames)
958 else:
954 else:
959 for n in opts['show_stage']:
955 for n in opts['show_stage']:
960 if n not in stagenames:
956 if n not in stagenames:
961 raise error.Abort(_('invalid stage name: %s') % n)
957 raise error.Abort(_('invalid stage name: %s') % n)
962 showalways.update(opts['show_stage'])
958 showalways.update(opts['show_stage'])
963
959
964 tree = filesetlang.parse(expr)
960 tree = filesetlang.parse(expr)
965 for n, f in stages:
961 for n, f in stages:
966 tree = f(tree)
962 tree = f(tree)
967 if n in showalways:
963 if n in showalways:
968 if opts['show_stage'] or n != 'parsed':
964 if opts['show_stage'] or n != 'parsed':
969 ui.write(("* %s:\n") % n)
965 ui.write(("* %s:\n") % n)
970 ui.write(filesetlang.prettyformat(tree), "\n")
966 ui.write(filesetlang.prettyformat(tree), "\n")
971
967
972 files = set()
968 files = set()
973 if opts['all_files']:
969 if opts['all_files']:
974 for r in repo:
970 for r in repo:
975 c = repo[r]
971 c = repo[r]
976 files.update(c.files())
972 files.update(c.files())
977 files.update(c.substate)
973 files.update(c.substate)
978 if opts['all_files'] or ctx.rev() is None:
974 if opts['all_files'] or ctx.rev() is None:
979 wctx = repo[None]
975 wctx = repo[None]
980 files.update(repo.dirstate.walk(scmutil.matchall(repo),
976 files.update(repo.dirstate.walk(scmutil.matchall(repo),
981 subrepos=list(wctx.substate),
977 subrepos=list(wctx.substate),
982 unknown=True, ignored=True))
978 unknown=True, ignored=True))
983 files.update(wctx.substate)
979 files.update(wctx.substate)
984 else:
980 else:
985 files.update(ctx.files())
981 files.update(ctx.files())
986 files.update(ctx.substate)
982 files.update(ctx.substate)
987
983
988 m = ctx.matchfileset(expr)
984 m = ctx.matchfileset(expr)
989 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
985 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
990 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
986 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
991 for f in sorted(files):
987 for f in sorted(files):
992 if not m(f):
988 if not m(f):
993 continue
989 continue
994 ui.write("%s\n" % f)
990 ui.write("%s\n" % f)
995
991
996 @command('debugformat',
992 @command('debugformat',
997 [] + cmdutil.formatteropts)
993 [] + cmdutil.formatteropts)
998 def debugformat(ui, repo, **opts):
994 def debugformat(ui, repo, **opts):
999 """display format information about the current repository
995 """display format information about the current repository
1000
996
1001 Use --verbose to get extra information about current config value and
997 Use --verbose to get extra information about current config value and
1002 Mercurial default."""
998 Mercurial default."""
1003 opts = pycompat.byteskwargs(opts)
999 opts = pycompat.byteskwargs(opts)
1004 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1000 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1005 maxvariantlength = max(len('format-variant'), maxvariantlength)
1001 maxvariantlength = max(len('format-variant'), maxvariantlength)
1006
1002
1007 def makeformatname(name):
1003 def makeformatname(name):
1008 return '%s:' + (' ' * (maxvariantlength - len(name)))
1004 return '%s:' + (' ' * (maxvariantlength - len(name)))
1009
1005
1010 fm = ui.formatter('debugformat', opts)
1006 fm = ui.formatter('debugformat', opts)
1011 if fm.isplain():
1007 if fm.isplain():
1012 def formatvalue(value):
1008 def formatvalue(value):
1013 if util.safehasattr(value, 'startswith'):
1009 if util.safehasattr(value, 'startswith'):
1014 return value
1010 return value
1015 if value:
1011 if value:
1016 return 'yes'
1012 return 'yes'
1017 else:
1013 else:
1018 return 'no'
1014 return 'no'
1019 else:
1015 else:
1020 formatvalue = pycompat.identity
1016 formatvalue = pycompat.identity
1021
1017
1022 fm.plain('format-variant')
1018 fm.plain('format-variant')
1023 fm.plain(' ' * (maxvariantlength - len('format-variant')))
1019 fm.plain(' ' * (maxvariantlength - len('format-variant')))
1024 fm.plain(' repo')
1020 fm.plain(' repo')
1025 if ui.verbose:
1021 if ui.verbose:
1026 fm.plain(' config default')
1022 fm.plain(' config default')
1027 fm.plain('\n')
1023 fm.plain('\n')
1028 for fv in upgrade.allformatvariant:
1024 for fv in upgrade.allformatvariant:
1029 fm.startitem()
1025 fm.startitem()
1030 repovalue = fv.fromrepo(repo)
1026 repovalue = fv.fromrepo(repo)
1031 configvalue = fv.fromconfig(repo)
1027 configvalue = fv.fromconfig(repo)
1032
1028
1033 if repovalue != configvalue:
1029 if repovalue != configvalue:
1034 namelabel = 'formatvariant.name.mismatchconfig'
1030 namelabel = 'formatvariant.name.mismatchconfig'
1035 repolabel = 'formatvariant.repo.mismatchconfig'
1031 repolabel = 'formatvariant.repo.mismatchconfig'
1036 elif repovalue != fv.default:
1032 elif repovalue != fv.default:
1037 namelabel = 'formatvariant.name.mismatchdefault'
1033 namelabel = 'formatvariant.name.mismatchdefault'
1038 repolabel = 'formatvariant.repo.mismatchdefault'
1034 repolabel = 'formatvariant.repo.mismatchdefault'
1039 else:
1035 else:
1040 namelabel = 'formatvariant.name.uptodate'
1036 namelabel = 'formatvariant.name.uptodate'
1041 repolabel = 'formatvariant.repo.uptodate'
1037 repolabel = 'formatvariant.repo.uptodate'
1042
1038
1043 fm.write('name', makeformatname(fv.name), fv.name,
1039 fm.write('name', makeformatname(fv.name), fv.name,
1044 label=namelabel)
1040 label=namelabel)
1045 fm.write('repo', ' %3s', formatvalue(repovalue),
1041 fm.write('repo', ' %3s', formatvalue(repovalue),
1046 label=repolabel)
1042 label=repolabel)
1047 if fv.default != configvalue:
1043 if fv.default != configvalue:
1048 configlabel = 'formatvariant.config.special'
1044 configlabel = 'formatvariant.config.special'
1049 else:
1045 else:
1050 configlabel = 'formatvariant.config.default'
1046 configlabel = 'formatvariant.config.default'
1051 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1047 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1052 label=configlabel)
1048 label=configlabel)
1053 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1049 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1054 label='formatvariant.default')
1050 label='formatvariant.default')
1055 fm.plain('\n')
1051 fm.plain('\n')
1056 fm.end()
1052 fm.end()
1057
1053
1058 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1054 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1059 def debugfsinfo(ui, path="."):
1055 def debugfsinfo(ui, path="."):
1060 """show information detected about current filesystem"""
1056 """show information detected about current filesystem"""
1061 ui.write(('path: %s\n') % path)
1057 ui.write(('path: %s\n') % path)
1062 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1058 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1063 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1059 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1064 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1060 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1065 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1061 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1066 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1062 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1067 casesensitive = '(unknown)'
1063 casesensitive = '(unknown)'
1068 try:
1064 try:
1069 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1065 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1070 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1066 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1071 except OSError:
1067 except OSError:
1072 pass
1068 pass
1073 ui.write(('case-sensitive: %s\n') % casesensitive)
1069 ui.write(('case-sensitive: %s\n') % casesensitive)
1074
1070
1075 @command('debuggetbundle',
1071 @command('debuggetbundle',
1076 [('H', 'head', [], _('id of head node'), _('ID')),
1072 [('H', 'head', [], _('id of head node'), _('ID')),
1077 ('C', 'common', [], _('id of common node'), _('ID')),
1073 ('C', 'common', [], _('id of common node'), _('ID')),
1078 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1074 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1079 _('REPO FILE [-H|-C ID]...'),
1075 _('REPO FILE [-H|-C ID]...'),
1080 norepo=True)
1076 norepo=True)
1081 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1077 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1082 """retrieves a bundle from a repo
1078 """retrieves a bundle from a repo
1083
1079
1084 Every ID must be a full-length hex node id string. Saves the bundle to the
1080 Every ID must be a full-length hex node id string. Saves the bundle to the
1085 given file.
1081 given file.
1086 """
1082 """
1087 opts = pycompat.byteskwargs(opts)
1083 opts = pycompat.byteskwargs(opts)
1088 repo = hg.peer(ui, opts, repopath)
1084 repo = hg.peer(ui, opts, repopath)
1089 if not repo.capable('getbundle'):
1085 if not repo.capable('getbundle'):
1090 raise error.Abort("getbundle() not supported by target repository")
1086 raise error.Abort("getbundle() not supported by target repository")
1091 args = {}
1087 args = {}
1092 if common:
1088 if common:
1093 args[r'common'] = [bin(s) for s in common]
1089 args[r'common'] = [bin(s) for s in common]
1094 if head:
1090 if head:
1095 args[r'heads'] = [bin(s) for s in head]
1091 args[r'heads'] = [bin(s) for s in head]
1096 # TODO: get desired bundlecaps from command line.
1092 # TODO: get desired bundlecaps from command line.
1097 args[r'bundlecaps'] = None
1093 args[r'bundlecaps'] = None
1098 bundle = repo.getbundle('debug', **args)
1094 bundle = repo.getbundle('debug', **args)
1099
1095
1100 bundletype = opts.get('type', 'bzip2').lower()
1096 bundletype = opts.get('type', 'bzip2').lower()
1101 btypes = {'none': 'HG10UN',
1097 btypes = {'none': 'HG10UN',
1102 'bzip2': 'HG10BZ',
1098 'bzip2': 'HG10BZ',
1103 'gzip': 'HG10GZ',
1099 'gzip': 'HG10GZ',
1104 'bundle2': 'HG20'}
1100 'bundle2': 'HG20'}
1105 bundletype = btypes.get(bundletype)
1101 bundletype = btypes.get(bundletype)
1106 if bundletype not in bundle2.bundletypes:
1102 if bundletype not in bundle2.bundletypes:
1107 raise error.Abort(_('unknown bundle type specified with --type'))
1103 raise error.Abort(_('unknown bundle type specified with --type'))
1108 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1104 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1109
1105
1110 @command('debugignore', [], '[FILE]')
1106 @command('debugignore', [], '[FILE]')
1111 def debugignore(ui, repo, *files, **opts):
1107 def debugignore(ui, repo, *files, **opts):
1112 """display the combined ignore pattern and information about ignored files
1108 """display the combined ignore pattern and information about ignored files
1113
1109
1114 With no argument display the combined ignore pattern.
1110 With no argument display the combined ignore pattern.
1115
1111
1116 Given space separated file names, shows if the given file is ignored and
1112 Given space separated file names, shows if the given file is ignored and
1117 if so, show the ignore rule (file and line number) that matched it.
1113 if so, show the ignore rule (file and line number) that matched it.
1118 """
1114 """
1119 ignore = repo.dirstate._ignore
1115 ignore = repo.dirstate._ignore
1120 if not files:
1116 if not files:
1121 # Show all the patterns
1117 # Show all the patterns
1122 ui.write("%s\n" % pycompat.byterepr(ignore))
1118 ui.write("%s\n" % pycompat.byterepr(ignore))
1123 else:
1119 else:
1124 m = scmutil.match(repo[None], pats=files)
1120 m = scmutil.match(repo[None], pats=files)
1125 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1121 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1126 for f in m.files():
1122 for f in m.files():
1127 nf = util.normpath(f)
1123 nf = util.normpath(f)
1128 ignored = None
1124 ignored = None
1129 ignoredata = None
1125 ignoredata = None
1130 if nf != '.':
1126 if nf != '.':
1131 if ignore(nf):
1127 if ignore(nf):
1132 ignored = nf
1128 ignored = nf
1133 ignoredata = repo.dirstate._ignorefileandline(nf)
1129 ignoredata = repo.dirstate._ignorefileandline(nf)
1134 else:
1130 else:
1135 for p in util.finddirs(nf):
1131 for p in util.finddirs(nf):
1136 if ignore(p):
1132 if ignore(p):
1137 ignored = p
1133 ignored = p
1138 ignoredata = repo.dirstate._ignorefileandline(p)
1134 ignoredata = repo.dirstate._ignorefileandline(p)
1139 break
1135 break
1140 if ignored:
1136 if ignored:
1141 if ignored == nf:
1137 if ignored == nf:
1142 ui.write(_("%s is ignored\n") % uipathfn(f))
1138 ui.write(_("%s is ignored\n") % uipathfn(f))
1143 else:
1139 else:
1144 ui.write(_("%s is ignored because of "
1140 ui.write(_("%s is ignored because of "
1145 "containing folder %s\n")
1141 "containing folder %s\n")
1146 % (uipathfn(f), ignored))
1142 % (uipathfn(f), ignored))
1147 ignorefile, lineno, line = ignoredata
1143 ignorefile, lineno, line = ignoredata
1148 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1144 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1149 % (ignorefile, lineno, line))
1145 % (ignorefile, lineno, line))
1150 else:
1146 else:
1151 ui.write(_("%s is not ignored\n") % uipathfn(f))
1147 ui.write(_("%s is not ignored\n") % uipathfn(f))
1152
1148
1153 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1149 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1154 _('-c|-m|FILE'))
1150 _('-c|-m|FILE'))
1155 def debugindex(ui, repo, file_=None, **opts):
1151 def debugindex(ui, repo, file_=None, **opts):
1156 """dump index data for a storage primitive"""
1152 """dump index data for a storage primitive"""
1157 opts = pycompat.byteskwargs(opts)
1153 opts = pycompat.byteskwargs(opts)
1158 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1154 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1159
1155
1160 if ui.debugflag:
1156 if ui.debugflag:
1161 shortfn = hex
1157 shortfn = hex
1162 else:
1158 else:
1163 shortfn = short
1159 shortfn = short
1164
1160
1165 idlen = 12
1161 idlen = 12
1166 for i in store:
1162 for i in store:
1167 idlen = len(shortfn(store.node(i)))
1163 idlen = len(shortfn(store.node(i)))
1168 break
1164 break
1169
1165
1170 fm = ui.formatter('debugindex', opts)
1166 fm = ui.formatter('debugindex', opts)
1171 fm.plain(b' rev linkrev %s %s p2\n' % (
1167 fm.plain(b' rev linkrev %s %s p2\n' % (
1172 b'nodeid'.ljust(idlen),
1168 b'nodeid'.ljust(idlen),
1173 b'p1'.ljust(idlen)))
1169 b'p1'.ljust(idlen)))
1174
1170
1175 for rev in store:
1171 for rev in store:
1176 node = store.node(rev)
1172 node = store.node(rev)
1177 parents = store.parents(node)
1173 parents = store.parents(node)
1178
1174
1179 fm.startitem()
1175 fm.startitem()
1180 fm.write(b'rev', b'%6d ', rev)
1176 fm.write(b'rev', b'%6d ', rev)
1181 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1177 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1182 fm.write(b'node', '%s ', shortfn(node))
1178 fm.write(b'node', '%s ', shortfn(node))
1183 fm.write(b'p1', '%s ', shortfn(parents[0]))
1179 fm.write(b'p1', '%s ', shortfn(parents[0]))
1184 fm.write(b'p2', '%s', shortfn(parents[1]))
1180 fm.write(b'p2', '%s', shortfn(parents[1]))
1185 fm.plain(b'\n')
1181 fm.plain(b'\n')
1186
1182
1187 fm.end()
1183 fm.end()
1188
1184
1189 @command('debugindexdot', cmdutil.debugrevlogopts,
1185 @command('debugindexdot', cmdutil.debugrevlogopts,
1190 _('-c|-m|FILE'), optionalrepo=True)
1186 _('-c|-m|FILE'), optionalrepo=True)
1191 def debugindexdot(ui, repo, file_=None, **opts):
1187 def debugindexdot(ui, repo, file_=None, **opts):
1192 """dump an index DAG as a graphviz dot file"""
1188 """dump an index DAG as a graphviz dot file"""
1193 opts = pycompat.byteskwargs(opts)
1189 opts = pycompat.byteskwargs(opts)
1194 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1190 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1195 ui.write(("digraph G {\n"))
1191 ui.write(("digraph G {\n"))
1196 for i in r:
1192 for i in r:
1197 node = r.node(i)
1193 node = r.node(i)
1198 pp = r.parents(node)
1194 pp = r.parents(node)
1199 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1195 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1200 if pp[1] != nullid:
1196 if pp[1] != nullid:
1201 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1197 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1202 ui.write("}\n")
1198 ui.write("}\n")
1203
1199
1204 @command('debugindexstats', [])
1200 @command('debugindexstats', [])
1205 def debugindexstats(ui, repo):
1201 def debugindexstats(ui, repo):
1206 """show stats related to the changelog index"""
1202 """show stats related to the changelog index"""
1207 repo.changelog.shortest(nullid, 1)
1203 repo.changelog.shortest(nullid, 1)
1208 index = repo.changelog.index
1204 index = repo.changelog.index
1209 if not util.safehasattr(index, 'stats'):
1205 if not util.safehasattr(index, 'stats'):
1210 raise error.Abort(_('debugindexstats only works with native code'))
1206 raise error.Abort(_('debugindexstats only works with native code'))
1211 for k, v in sorted(index.stats().items()):
1207 for k, v in sorted(index.stats().items()):
1212 ui.write('%s: %d\n' % (k, v))
1208 ui.write('%s: %d\n' % (k, v))
1213
1209
1214 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1210 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1215 def debuginstall(ui, **opts):
1211 def debuginstall(ui, **opts):
1216 '''test Mercurial installation
1212 '''test Mercurial installation
1217
1213
1218 Returns 0 on success.
1214 Returns 0 on success.
1219 '''
1215 '''
1220 opts = pycompat.byteskwargs(opts)
1216 opts = pycompat.byteskwargs(opts)
1221
1217
1222 problems = 0
1218 problems = 0
1223
1219
1224 fm = ui.formatter('debuginstall', opts)
1220 fm = ui.formatter('debuginstall', opts)
1225 fm.startitem()
1221 fm.startitem()
1226
1222
1227 # encoding
1223 # encoding
1228 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1224 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1229 err = None
1225 err = None
1230 try:
1226 try:
1231 codecs.lookup(pycompat.sysstr(encoding.encoding))
1227 codecs.lookup(pycompat.sysstr(encoding.encoding))
1232 except LookupError as inst:
1228 except LookupError as inst:
1233 err = stringutil.forcebytestr(inst)
1229 err = stringutil.forcebytestr(inst)
1234 problems += 1
1230 problems += 1
1235 fm.condwrite(err, 'encodingerror', _(" %s\n"
1231 fm.condwrite(err, 'encodingerror', _(" %s\n"
1236 " (check that your locale is properly set)\n"), err)
1232 " (check that your locale is properly set)\n"), err)
1237
1233
1238 # Python
1234 # Python
1239 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1235 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1240 pycompat.sysexecutable)
1236 pycompat.sysexecutable)
1241 fm.write('pythonver', _("checking Python version (%s)\n"),
1237 fm.write('pythonver', _("checking Python version (%s)\n"),
1242 ("%d.%d.%d" % sys.version_info[:3]))
1238 ("%d.%d.%d" % sys.version_info[:3]))
1243 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1239 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1244 os.path.dirname(pycompat.fsencode(os.__file__)))
1240 os.path.dirname(pycompat.fsencode(os.__file__)))
1245
1241
1246 security = set(sslutil.supportedprotocols)
1242 security = set(sslutil.supportedprotocols)
1247 if sslutil.hassni:
1243 if sslutil.hassni:
1248 security.add('sni')
1244 security.add('sni')
1249
1245
1250 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1246 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1251 fm.formatlist(sorted(security), name='protocol',
1247 fm.formatlist(sorted(security), name='protocol',
1252 fmt='%s', sep=','))
1248 fmt='%s', sep=','))
1253
1249
1254 # These are warnings, not errors. So don't increment problem count. This
1250 # These are warnings, not errors. So don't increment problem count. This
1255 # may change in the future.
1251 # may change in the future.
1256 if 'tls1.2' not in security:
1252 if 'tls1.2' not in security:
1257 fm.plain(_(' TLS 1.2 not supported by Python install; '
1253 fm.plain(_(' TLS 1.2 not supported by Python install; '
1258 'network connections lack modern security\n'))
1254 'network connections lack modern security\n'))
1259 if 'sni' not in security:
1255 if 'sni' not in security:
1260 fm.plain(_(' SNI not supported by Python install; may have '
1256 fm.plain(_(' SNI not supported by Python install; may have '
1261 'connectivity issues with some servers\n'))
1257 'connectivity issues with some servers\n'))
1262
1258
1263 # TODO print CA cert info
1259 # TODO print CA cert info
1264
1260
1265 # hg version
1261 # hg version
1266 hgver = util.version()
1262 hgver = util.version()
1267 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1263 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1268 hgver.split('+')[0])
1264 hgver.split('+')[0])
1269 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1265 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1270 '+'.join(hgver.split('+')[1:]))
1266 '+'.join(hgver.split('+')[1:]))
1271
1267
1272 # compiled modules
1268 # compiled modules
1273 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1269 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1274 policy.policy)
1270 policy.policy)
1275 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1271 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1276 os.path.dirname(pycompat.fsencode(__file__)))
1272 os.path.dirname(pycompat.fsencode(__file__)))
1277
1273
1278 if policy.policy in ('c', 'allow'):
1274 if policy.policy in ('c', 'allow'):
1279 err = None
1275 err = None
1280 try:
1276 try:
1281 from .cext import (
1277 from .cext import (
1282 base85,
1278 base85,
1283 bdiff,
1279 bdiff,
1284 mpatch,
1280 mpatch,
1285 osutil,
1281 osutil,
1286 )
1282 )
1287 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1283 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1288 except Exception as inst:
1284 except Exception as inst:
1289 err = stringutil.forcebytestr(inst)
1285 err = stringutil.forcebytestr(inst)
1290 problems += 1
1286 problems += 1
1291 fm.condwrite(err, 'extensionserror', " %s\n", err)
1287 fm.condwrite(err, 'extensionserror', " %s\n", err)
1292
1288
1293 compengines = util.compengines._engines.values()
1289 compengines = util.compengines._engines.values()
1294 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1290 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1295 fm.formatlist(sorted(e.name() for e in compengines),
1291 fm.formatlist(sorted(e.name() for e in compengines),
1296 name='compengine', fmt='%s', sep=', '))
1292 name='compengine', fmt='%s', sep=', '))
1297 fm.write('compenginesavail', _('checking available compression engines '
1293 fm.write('compenginesavail', _('checking available compression engines '
1298 '(%s)\n'),
1294 '(%s)\n'),
1299 fm.formatlist(sorted(e.name() for e in compengines
1295 fm.formatlist(sorted(e.name() for e in compengines
1300 if e.available()),
1296 if e.available()),
1301 name='compengine', fmt='%s', sep=', '))
1297 name='compengine', fmt='%s', sep=', '))
1302 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1298 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1303 fm.write('compenginesserver', _('checking available compression engines '
1299 fm.write('compenginesserver', _('checking available compression engines '
1304 'for wire protocol (%s)\n'),
1300 'for wire protocol (%s)\n'),
1305 fm.formatlist([e.name() for e in wirecompengines
1301 fm.formatlist([e.name() for e in wirecompengines
1306 if e.wireprotosupport()],
1302 if e.wireprotosupport()],
1307 name='compengine', fmt='%s', sep=', '))
1303 name='compengine', fmt='%s', sep=', '))
1308 re2 = 'missing'
1304 re2 = 'missing'
1309 if util._re2:
1305 if util._re2:
1310 re2 = 'available'
1306 re2 = 'available'
1311 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1307 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1312 fm.data(re2=bool(util._re2))
1308 fm.data(re2=bool(util._re2))
1313
1309
1314 # templates
1310 # templates
1315 p = templater.templatepaths()
1311 p = templater.templatepaths()
1316 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1312 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1317 fm.condwrite(not p, '', _(" no template directories found\n"))
1313 fm.condwrite(not p, '', _(" no template directories found\n"))
1318 if p:
1314 if p:
1319 m = templater.templatepath("map-cmdline.default")
1315 m = templater.templatepath("map-cmdline.default")
1320 if m:
1316 if m:
1321 # template found, check if it is working
1317 # template found, check if it is working
1322 err = None
1318 err = None
1323 try:
1319 try:
1324 templater.templater.frommapfile(m)
1320 templater.templater.frommapfile(m)
1325 except Exception as inst:
1321 except Exception as inst:
1326 err = stringutil.forcebytestr(inst)
1322 err = stringutil.forcebytestr(inst)
1327 p = None
1323 p = None
1328 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1324 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1329 else:
1325 else:
1330 p = None
1326 p = None
1331 fm.condwrite(p, 'defaulttemplate',
1327 fm.condwrite(p, 'defaulttemplate',
1332 _("checking default template (%s)\n"), m)
1328 _("checking default template (%s)\n"), m)
1333 fm.condwrite(not m, 'defaulttemplatenotfound',
1329 fm.condwrite(not m, 'defaulttemplatenotfound',
1334 _(" template '%s' not found\n"), "default")
1330 _(" template '%s' not found\n"), "default")
1335 if not p:
1331 if not p:
1336 problems += 1
1332 problems += 1
1337 fm.condwrite(not p, '',
1333 fm.condwrite(not p, '',
1338 _(" (templates seem to have been installed incorrectly)\n"))
1334 _(" (templates seem to have been installed incorrectly)\n"))
1339
1335
1340 # editor
1336 # editor
1341 editor = ui.geteditor()
1337 editor = ui.geteditor()
1342 editor = util.expandpath(editor)
1338 editor = util.expandpath(editor)
1343 editorbin = procutil.shellsplit(editor)[0]
1339 editorbin = procutil.shellsplit(editor)[0]
1344 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1340 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1345 cmdpath = procutil.findexe(editorbin)
1341 cmdpath = procutil.findexe(editorbin)
1346 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1342 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1347 _(" No commit editor set and can't find %s in PATH\n"
1343 _(" No commit editor set and can't find %s in PATH\n"
1348 " (specify a commit editor in your configuration"
1344 " (specify a commit editor in your configuration"
1349 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1345 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1350 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1346 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1351 _(" Can't find editor '%s' in PATH\n"
1347 _(" Can't find editor '%s' in PATH\n"
1352 " (specify a commit editor in your configuration"
1348 " (specify a commit editor in your configuration"
1353 " file)\n"), not cmdpath and editorbin)
1349 " file)\n"), not cmdpath and editorbin)
1354 if not cmdpath and editor != 'vi':
1350 if not cmdpath and editor != 'vi':
1355 problems += 1
1351 problems += 1
1356
1352
1357 # check username
1353 # check username
1358 username = None
1354 username = None
1359 err = None
1355 err = None
1360 try:
1356 try:
1361 username = ui.username()
1357 username = ui.username()
1362 except error.Abort as e:
1358 except error.Abort as e:
1363 err = stringutil.forcebytestr(e)
1359 err = stringutil.forcebytestr(e)
1364 problems += 1
1360 problems += 1
1365
1361
1366 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1362 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1367 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1363 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1368 " (specify a username in your configuration file)\n"), err)
1364 " (specify a username in your configuration file)\n"), err)
1369
1365
1370 fm.condwrite(not problems, '',
1366 fm.condwrite(not problems, '',
1371 _("no problems detected\n"))
1367 _("no problems detected\n"))
1372 if not problems:
1368 if not problems:
1373 fm.data(problems=problems)
1369 fm.data(problems=problems)
1374 fm.condwrite(problems, 'problems',
1370 fm.condwrite(problems, 'problems',
1375 _("%d problems detected,"
1371 _("%d problems detected,"
1376 " please check your install!\n"), problems)
1372 " please check your install!\n"), problems)
1377 fm.end()
1373 fm.end()
1378
1374
1379 return problems
1375 return problems
1380
1376
1381 @command('debugknown', [], _('REPO ID...'), norepo=True)
1377 @command('debugknown', [], _('REPO ID...'), norepo=True)
1382 def debugknown(ui, repopath, *ids, **opts):
1378 def debugknown(ui, repopath, *ids, **opts):
1383 """test whether node ids are known to a repo
1379 """test whether node ids are known to a repo
1384
1380
1385 Every ID must be a full-length hex node id string. Returns a list of 0s
1381 Every ID must be a full-length hex node id string. Returns a list of 0s
1386 and 1s indicating unknown/known.
1382 and 1s indicating unknown/known.
1387 """
1383 """
1388 opts = pycompat.byteskwargs(opts)
1384 opts = pycompat.byteskwargs(opts)
1389 repo = hg.peer(ui, opts, repopath)
1385 repo = hg.peer(ui, opts, repopath)
1390 if not repo.capable('known'):
1386 if not repo.capable('known'):
1391 raise error.Abort("known() not supported by target repository")
1387 raise error.Abort("known() not supported by target repository")
1392 flags = repo.known([bin(s) for s in ids])
1388 flags = repo.known([bin(s) for s in ids])
1393 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1389 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1394
1390
1395 @command('debuglabelcomplete', [], _('LABEL...'))
1391 @command('debuglabelcomplete', [], _('LABEL...'))
1396 def debuglabelcomplete(ui, repo, *args):
1392 def debuglabelcomplete(ui, repo, *args):
1397 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1393 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1398 debugnamecomplete(ui, repo, *args)
1394 debugnamecomplete(ui, repo, *args)
1399
1395
1400 @command('debuglocks',
1396 @command('debuglocks',
1401 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1397 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1402 ('W', 'force-wlock', None,
1398 ('W', 'force-wlock', None,
1403 _('free the working state lock (DANGEROUS)')),
1399 _('free the working state lock (DANGEROUS)')),
1404 ('s', 'set-lock', None, _('set the store lock until stopped')),
1400 ('s', 'set-lock', None, _('set the store lock until stopped')),
1405 ('S', 'set-wlock', None,
1401 ('S', 'set-wlock', None,
1406 _('set the working state lock until stopped'))],
1402 _('set the working state lock until stopped'))],
1407 _('[OPTION]...'))
1403 _('[OPTION]...'))
1408 def debuglocks(ui, repo, **opts):
1404 def debuglocks(ui, repo, **opts):
1409 """show or modify state of locks
1405 """show or modify state of locks
1410
1406
1411 By default, this command will show which locks are held. This
1407 By default, this command will show which locks are held. This
1412 includes the user and process holding the lock, the amount of time
1408 includes the user and process holding the lock, the amount of time
1413 the lock has been held, and the machine name where the process is
1409 the lock has been held, and the machine name where the process is
1414 running if it's not local.
1410 running if it's not local.
1415
1411
1416 Locks protect the integrity of Mercurial's data, so should be
1412 Locks protect the integrity of Mercurial's data, so should be
1417 treated with care. System crashes or other interruptions may cause
1413 treated with care. System crashes or other interruptions may cause
1418 locks to not be properly released, though Mercurial will usually
1414 locks to not be properly released, though Mercurial will usually
1419 detect and remove such stale locks automatically.
1415 detect and remove such stale locks automatically.
1420
1416
1421 However, detecting stale locks may not always be possible (for
1417 However, detecting stale locks may not always be possible (for
1422 instance, on a shared filesystem). Removing locks may also be
1418 instance, on a shared filesystem). Removing locks may also be
1423 blocked by filesystem permissions.
1419 blocked by filesystem permissions.
1424
1420
1425 Setting a lock will prevent other commands from changing the data.
1421 Setting a lock will prevent other commands from changing the data.
1426 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1422 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1427 The set locks are removed when the command exits.
1423 The set locks are removed when the command exits.
1428
1424
1429 Returns 0 if no locks are held.
1425 Returns 0 if no locks are held.
1430
1426
1431 """
1427 """
1432
1428
1433 if opts.get(r'force_lock'):
1429 if opts.get(r'force_lock'):
1434 repo.svfs.unlink('lock')
1430 repo.svfs.unlink('lock')
1435 if opts.get(r'force_wlock'):
1431 if opts.get(r'force_wlock'):
1436 repo.vfs.unlink('wlock')
1432 repo.vfs.unlink('wlock')
1437 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1433 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1438 return 0
1434 return 0
1439
1435
1440 locks = []
1436 locks = []
1441 try:
1437 try:
1442 if opts.get(r'set_wlock'):
1438 if opts.get(r'set_wlock'):
1443 try:
1439 try:
1444 locks.append(repo.wlock(False))
1440 locks.append(repo.wlock(False))
1445 except error.LockHeld:
1441 except error.LockHeld:
1446 raise error.Abort(_('wlock is already held'))
1442 raise error.Abort(_('wlock is already held'))
1447 if opts.get(r'set_lock'):
1443 if opts.get(r'set_lock'):
1448 try:
1444 try:
1449 locks.append(repo.lock(False))
1445 locks.append(repo.lock(False))
1450 except error.LockHeld:
1446 except error.LockHeld:
1451 raise error.Abort(_('lock is already held'))
1447 raise error.Abort(_('lock is already held'))
1452 if len(locks):
1448 if len(locks):
1453 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1449 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1454 return 0
1450 return 0
1455 finally:
1451 finally:
1456 release(*locks)
1452 release(*locks)
1457
1453
1458 now = time.time()
1454 now = time.time()
1459 held = 0
1455 held = 0
1460
1456
1461 def report(vfs, name, method):
1457 def report(vfs, name, method):
1462 # this causes stale locks to get reaped for more accurate reporting
1458 # this causes stale locks to get reaped for more accurate reporting
1463 try:
1459 try:
1464 l = method(False)
1460 l = method(False)
1465 except error.LockHeld:
1461 except error.LockHeld:
1466 l = None
1462 l = None
1467
1463
1468 if l:
1464 if l:
1469 l.release()
1465 l.release()
1470 else:
1466 else:
1471 try:
1467 try:
1472 st = vfs.lstat(name)
1468 st = vfs.lstat(name)
1473 age = now - st[stat.ST_MTIME]
1469 age = now - st[stat.ST_MTIME]
1474 user = util.username(st.st_uid)
1470 user = util.username(st.st_uid)
1475 locker = vfs.readlock(name)
1471 locker = vfs.readlock(name)
1476 if ":" in locker:
1472 if ":" in locker:
1477 host, pid = locker.split(':')
1473 host, pid = locker.split(':')
1478 if host == socket.gethostname():
1474 if host == socket.gethostname():
1479 locker = 'user %s, process %s' % (user or b'None', pid)
1475 locker = 'user %s, process %s' % (user or b'None', pid)
1480 else:
1476 else:
1481 locker = ('user %s, process %s, host %s'
1477 locker = ('user %s, process %s, host %s'
1482 % (user or b'None', pid, host))
1478 % (user or b'None', pid, host))
1483 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1479 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1484 return 1
1480 return 1
1485 except OSError as e:
1481 except OSError as e:
1486 if e.errno != errno.ENOENT:
1482 if e.errno != errno.ENOENT:
1487 raise
1483 raise
1488
1484
1489 ui.write(("%-6s free\n") % (name + ":"))
1485 ui.write(("%-6s free\n") % (name + ":"))
1490 return 0
1486 return 0
1491
1487
1492 held += report(repo.svfs, "lock", repo.lock)
1488 held += report(repo.svfs, "lock", repo.lock)
1493 held += report(repo.vfs, "wlock", repo.wlock)
1489 held += report(repo.vfs, "wlock", repo.wlock)
1494
1490
1495 return held
1491 return held
1496
1492
1497 @command('debugmanifestfulltextcache', [
1493 @command('debugmanifestfulltextcache', [
1498 ('', 'clear', False, _('clear the cache')),
1494 ('', 'clear', False, _('clear the cache')),
1499 ('a', 'add', [], _('add the given manifest nodes to the cache'),
1495 ('a', 'add', [], _('add the given manifest nodes to the cache'),
1500 _('NODE'))
1496 _('NODE'))
1501 ], '')
1497 ], '')
1502 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1498 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1503 """show, clear or amend the contents of the manifest fulltext cache"""
1499 """show, clear or amend the contents of the manifest fulltext cache"""
1504
1500
1505 def getcache():
1501 def getcache():
1506 r = repo.manifestlog.getstorage(b'')
1502 r = repo.manifestlog.getstorage(b'')
1507 try:
1503 try:
1508 return r._fulltextcache
1504 return r._fulltextcache
1509 except AttributeError:
1505 except AttributeError:
1510 msg = _("Current revlog implementation doesn't appear to have a "
1506 msg = _("Current revlog implementation doesn't appear to have a "
1511 "manifest fulltext cache\n")
1507 "manifest fulltext cache\n")
1512 raise error.Abort(msg)
1508 raise error.Abort(msg)
1513
1509
1514 if opts.get(r'clear'):
1510 if opts.get(r'clear'):
1515 with repo.wlock():
1511 with repo.wlock():
1516 cache = getcache()
1512 cache = getcache()
1517 cache.clear(clear_persisted_data=True)
1513 cache.clear(clear_persisted_data=True)
1518 return
1514 return
1519
1515
1520 if add:
1516 if add:
1521 with repo.wlock():
1517 with repo.wlock():
1522 m = repo.manifestlog
1518 m = repo.manifestlog
1523 store = m.getstorage(b'')
1519 store = m.getstorage(b'')
1524 for n in add:
1520 for n in add:
1525 try:
1521 try:
1526 manifest = m[store.lookup(n)]
1522 manifest = m[store.lookup(n)]
1527 except error.LookupError as e:
1523 except error.LookupError as e:
1528 raise error.Abort(e, hint="Check your manifest node id")
1524 raise error.Abort(e, hint="Check your manifest node id")
1529 manifest.read() # stores revisision in cache too
1525 manifest.read() # stores revisision in cache too
1530 return
1526 return
1531
1527
1532 cache = getcache()
1528 cache = getcache()
1533 if not len(cache):
1529 if not len(cache):
1534 ui.write(_('cache empty\n'))
1530 ui.write(_('cache empty\n'))
1535 else:
1531 else:
1536 ui.write(
1532 ui.write(
1537 _('cache contains %d manifest entries, in order of most to '
1533 _('cache contains %d manifest entries, in order of most to '
1538 'least recent:\n') % (len(cache),))
1534 'least recent:\n') % (len(cache),))
1539 totalsize = 0
1535 totalsize = 0
1540 for nodeid in cache:
1536 for nodeid in cache:
1541 # Use cache.get to not update the LRU order
1537 # Use cache.get to not update the LRU order
1542 data = cache.peek(nodeid)
1538 data = cache.peek(nodeid)
1543 size = len(data)
1539 size = len(data)
1544 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1540 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1545 ui.write(_('id: %s, size %s\n') % (
1541 ui.write(_('id: %s, size %s\n') % (
1546 hex(nodeid), util.bytecount(size)))
1542 hex(nodeid), util.bytecount(size)))
1547 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1543 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1548 ui.write(
1544 ui.write(
1549 _('total cache data size %s, on-disk %s\n') % (
1545 _('total cache data size %s, on-disk %s\n') % (
1550 util.bytecount(totalsize), util.bytecount(ondisk))
1546 util.bytecount(totalsize), util.bytecount(ondisk))
1551 )
1547 )
1552
1548
1553 @command('debugmergestate', [], '')
1549 @command('debugmergestate', [], '')
1554 def debugmergestate(ui, repo, *args):
1550 def debugmergestate(ui, repo, *args):
1555 """print merge state
1551 """print merge state
1556
1552
1557 Use --verbose to print out information about whether v1 or v2 merge state
1553 Use --verbose to print out information about whether v1 or v2 merge state
1558 was chosen."""
1554 was chosen."""
1559 def _hashornull(h):
1555 def _hashornull(h):
1560 if h == nullhex:
1556 if h == nullhex:
1561 return 'null'
1557 return 'null'
1562 else:
1558 else:
1563 return h
1559 return h
1564
1560
1565 def printrecords(version):
1561 def printrecords(version):
1566 ui.write(('* version %d records\n') % version)
1562 ui.write(('* version %d records\n') % version)
1567 if version == 1:
1563 if version == 1:
1568 records = v1records
1564 records = v1records
1569 else:
1565 else:
1570 records = v2records
1566 records = v2records
1571
1567
1572 for rtype, record in records:
1568 for rtype, record in records:
1573 # pretty print some record types
1569 # pretty print some record types
1574 if rtype == 'L':
1570 if rtype == 'L':
1575 ui.write(('local: %s\n') % record)
1571 ui.write(('local: %s\n') % record)
1576 elif rtype == 'O':
1572 elif rtype == 'O':
1577 ui.write(('other: %s\n') % record)
1573 ui.write(('other: %s\n') % record)
1578 elif rtype == 'm':
1574 elif rtype == 'm':
1579 driver, mdstate = record.split('\0', 1)
1575 driver, mdstate = record.split('\0', 1)
1580 ui.write(('merge driver: %s (state "%s")\n')
1576 ui.write(('merge driver: %s (state "%s")\n')
1581 % (driver, mdstate))
1577 % (driver, mdstate))
1582 elif rtype in 'FDC':
1578 elif rtype in 'FDC':
1583 r = record.split('\0')
1579 r = record.split('\0')
1584 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1580 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1585 if version == 1:
1581 if version == 1:
1586 onode = 'not stored in v1 format'
1582 onode = 'not stored in v1 format'
1587 flags = r[7]
1583 flags = r[7]
1588 else:
1584 else:
1589 onode, flags = r[7:9]
1585 onode, flags = r[7:9]
1590 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1586 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1591 % (f, rtype, state, _hashornull(hash)))
1587 % (f, rtype, state, _hashornull(hash)))
1592 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1588 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1593 ui.write((' ancestor path: %s (node %s)\n')
1589 ui.write((' ancestor path: %s (node %s)\n')
1594 % (afile, _hashornull(anode)))
1590 % (afile, _hashornull(anode)))
1595 ui.write((' other path: %s (node %s)\n')
1591 ui.write((' other path: %s (node %s)\n')
1596 % (ofile, _hashornull(onode)))
1592 % (ofile, _hashornull(onode)))
1597 elif rtype == 'f':
1593 elif rtype == 'f':
1598 filename, rawextras = record.split('\0', 1)
1594 filename, rawextras = record.split('\0', 1)
1599 extras = rawextras.split('\0')
1595 extras = rawextras.split('\0')
1600 i = 0
1596 i = 0
1601 extrastrings = []
1597 extrastrings = []
1602 while i < len(extras):
1598 while i < len(extras):
1603 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1599 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1604 i += 2
1600 i += 2
1605
1601
1606 ui.write(('file extras: %s (%s)\n')
1602 ui.write(('file extras: %s (%s)\n')
1607 % (filename, ', '.join(extrastrings)))
1603 % (filename, ', '.join(extrastrings)))
1608 elif rtype == 'l':
1604 elif rtype == 'l':
1609 labels = record.split('\0', 2)
1605 labels = record.split('\0', 2)
1610 labels = [l for l in labels if len(l) > 0]
1606 labels = [l for l in labels if len(l) > 0]
1611 ui.write(('labels:\n'))
1607 ui.write(('labels:\n'))
1612 ui.write((' local: %s\n' % labels[0]))
1608 ui.write((' local: %s\n' % labels[0]))
1613 ui.write((' other: %s\n' % labels[1]))
1609 ui.write((' other: %s\n' % labels[1]))
1614 if len(labels) > 2:
1610 if len(labels) > 2:
1615 ui.write((' base: %s\n' % labels[2]))
1611 ui.write((' base: %s\n' % labels[2]))
1616 else:
1612 else:
1617 ui.write(('unrecognized entry: %s\t%s\n')
1613 ui.write(('unrecognized entry: %s\t%s\n')
1618 % (rtype, record.replace('\0', '\t')))
1614 % (rtype, record.replace('\0', '\t')))
1619
1615
1620 # Avoid mergestate.read() since it may raise an exception for unsupported
1616 # Avoid mergestate.read() since it may raise an exception for unsupported
1621 # merge state records. We shouldn't be doing this, but this is OK since this
1617 # merge state records. We shouldn't be doing this, but this is OK since this
1622 # command is pretty low-level.
1618 # command is pretty low-level.
1623 ms = mergemod.mergestate(repo)
1619 ms = mergemod.mergestate(repo)
1624
1620
1625 # sort so that reasonable information is on top
1621 # sort so that reasonable information is on top
1626 v1records = ms._readrecordsv1()
1622 v1records = ms._readrecordsv1()
1627 v2records = ms._readrecordsv2()
1623 v2records = ms._readrecordsv2()
1628 order = 'LOml'
1624 order = 'LOml'
1629 def key(r):
1625 def key(r):
1630 idx = order.find(r[0])
1626 idx = order.find(r[0])
1631 if idx == -1:
1627 if idx == -1:
1632 return (1, r[1])
1628 return (1, r[1])
1633 else:
1629 else:
1634 return (0, idx)
1630 return (0, idx)
1635 v1records.sort(key=key)
1631 v1records.sort(key=key)
1636 v2records.sort(key=key)
1632 v2records.sort(key=key)
1637
1633
1638 if not v1records and not v2records:
1634 if not v1records and not v2records:
1639 ui.write(('no merge state found\n'))
1635 ui.write(('no merge state found\n'))
1640 elif not v2records:
1636 elif not v2records:
1641 ui.note(('no version 2 merge state\n'))
1637 ui.note(('no version 2 merge state\n'))
1642 printrecords(1)
1638 printrecords(1)
1643 elif ms._v1v2match(v1records, v2records):
1639 elif ms._v1v2match(v1records, v2records):
1644 ui.note(('v1 and v2 states match: using v2\n'))
1640 ui.note(('v1 and v2 states match: using v2\n'))
1645 printrecords(2)
1641 printrecords(2)
1646 else:
1642 else:
1647 ui.note(('v1 and v2 states mismatch: using v1\n'))
1643 ui.note(('v1 and v2 states mismatch: using v1\n'))
1648 printrecords(1)
1644 printrecords(1)
1649 if ui.verbose:
1645 if ui.verbose:
1650 printrecords(2)
1646 printrecords(2)
1651
1647
1652 @command('debugnamecomplete', [], _('NAME...'))
1648 @command('debugnamecomplete', [], _('NAME...'))
1653 def debugnamecomplete(ui, repo, *args):
1649 def debugnamecomplete(ui, repo, *args):
1654 '''complete "names" - tags, open branch names, bookmark names'''
1650 '''complete "names" - tags, open branch names, bookmark names'''
1655
1651
1656 names = set()
1652 names = set()
1657 # since we previously only listed open branches, we will handle that
1653 # since we previously only listed open branches, we will handle that
1658 # specially (after this for loop)
1654 # specially (after this for loop)
1659 for name, ns in repo.names.iteritems():
1655 for name, ns in repo.names.iteritems():
1660 if name != 'branches':
1656 if name != 'branches':
1661 names.update(ns.listnames(repo))
1657 names.update(ns.listnames(repo))
1662 names.update(tag for (tag, heads, tip, closed)
1658 names.update(tag for (tag, heads, tip, closed)
1663 in repo.branchmap().iterbranches() if not closed)
1659 in repo.branchmap().iterbranches() if not closed)
1664 completions = set()
1660 completions = set()
1665 if not args:
1661 if not args:
1666 args = ['']
1662 args = ['']
1667 for a in args:
1663 for a in args:
1668 completions.update(n for n in names if n.startswith(a))
1664 completions.update(n for n in names if n.startswith(a))
1669 ui.write('\n'.join(sorted(completions)))
1665 ui.write('\n'.join(sorted(completions)))
1670 ui.write('\n')
1666 ui.write('\n')
1671
1667
1672 @command('debugobsolete',
1668 @command('debugobsolete',
1673 [('', 'flags', 0, _('markers flag')),
1669 [('', 'flags', 0, _('markers flag')),
1674 ('', 'record-parents', False,
1670 ('', 'record-parents', False,
1675 _('record parent information for the precursor')),
1671 _('record parent information for the precursor')),
1676 ('r', 'rev', [], _('display markers relevant to REV')),
1672 ('r', 'rev', [], _('display markers relevant to REV')),
1677 ('', 'exclusive', False, _('restrict display to markers only '
1673 ('', 'exclusive', False, _('restrict display to markers only '
1678 'relevant to REV')),
1674 'relevant to REV')),
1679 ('', 'index', False, _('display index of the marker')),
1675 ('', 'index', False, _('display index of the marker')),
1680 ('', 'delete', [], _('delete markers specified by indices')),
1676 ('', 'delete', [], _('delete markers specified by indices')),
1681 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1677 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1682 _('[OBSOLETED [REPLACEMENT ...]]'))
1678 _('[OBSOLETED [REPLACEMENT ...]]'))
1683 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1679 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1684 """create arbitrary obsolete marker
1680 """create arbitrary obsolete marker
1685
1681
1686 With no arguments, displays the list of obsolescence markers."""
1682 With no arguments, displays the list of obsolescence markers."""
1687
1683
1688 opts = pycompat.byteskwargs(opts)
1684 opts = pycompat.byteskwargs(opts)
1689
1685
1690 def parsenodeid(s):
1686 def parsenodeid(s):
1691 try:
1687 try:
1692 # We do not use revsingle/revrange functions here to accept
1688 # We do not use revsingle/revrange functions here to accept
1693 # arbitrary node identifiers, possibly not present in the
1689 # arbitrary node identifiers, possibly not present in the
1694 # local repository.
1690 # local repository.
1695 n = bin(s)
1691 n = bin(s)
1696 if len(n) != len(nullid):
1692 if len(n) != len(nullid):
1697 raise TypeError()
1693 raise TypeError()
1698 return n
1694 return n
1699 except TypeError:
1695 except TypeError:
1700 raise error.Abort('changeset references must be full hexadecimal '
1696 raise error.Abort('changeset references must be full hexadecimal '
1701 'node identifiers')
1697 'node identifiers')
1702
1698
1703 if opts.get('delete'):
1699 if opts.get('delete'):
1704 indices = []
1700 indices = []
1705 for v in opts.get('delete'):
1701 for v in opts.get('delete'):
1706 try:
1702 try:
1707 indices.append(int(v))
1703 indices.append(int(v))
1708 except ValueError:
1704 except ValueError:
1709 raise error.Abort(_('invalid index value: %r') % v,
1705 raise error.Abort(_('invalid index value: %r') % v,
1710 hint=_('use integers for indices'))
1706 hint=_('use integers for indices'))
1711
1707
1712 if repo.currenttransaction():
1708 if repo.currenttransaction():
1713 raise error.Abort(_('cannot delete obsmarkers in the middle '
1709 raise error.Abort(_('cannot delete obsmarkers in the middle '
1714 'of transaction.'))
1710 'of transaction.'))
1715
1711
1716 with repo.lock():
1712 with repo.lock():
1717 n = repair.deleteobsmarkers(repo.obsstore, indices)
1713 n = repair.deleteobsmarkers(repo.obsstore, indices)
1718 ui.write(_('deleted %i obsolescence markers\n') % n)
1714 ui.write(_('deleted %i obsolescence markers\n') % n)
1719
1715
1720 return
1716 return
1721
1717
1722 if precursor is not None:
1718 if precursor is not None:
1723 if opts['rev']:
1719 if opts['rev']:
1724 raise error.Abort('cannot select revision when creating marker')
1720 raise error.Abort('cannot select revision when creating marker')
1725 metadata = {}
1721 metadata = {}
1726 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1722 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1727 succs = tuple(parsenodeid(succ) for succ in successors)
1723 succs = tuple(parsenodeid(succ) for succ in successors)
1728 l = repo.lock()
1724 l = repo.lock()
1729 try:
1725 try:
1730 tr = repo.transaction('debugobsolete')
1726 tr = repo.transaction('debugobsolete')
1731 try:
1727 try:
1732 date = opts.get('date')
1728 date = opts.get('date')
1733 if date:
1729 if date:
1734 date = dateutil.parsedate(date)
1730 date = dateutil.parsedate(date)
1735 else:
1731 else:
1736 date = None
1732 date = None
1737 prec = parsenodeid(precursor)
1733 prec = parsenodeid(precursor)
1738 parents = None
1734 parents = None
1739 if opts['record_parents']:
1735 if opts['record_parents']:
1740 if prec not in repo.unfiltered():
1736 if prec not in repo.unfiltered():
1741 raise error.Abort('cannot used --record-parents on '
1737 raise error.Abort('cannot used --record-parents on '
1742 'unknown changesets')
1738 'unknown changesets')
1743 parents = repo.unfiltered()[prec].parents()
1739 parents = repo.unfiltered()[prec].parents()
1744 parents = tuple(p.node() for p in parents)
1740 parents = tuple(p.node() for p in parents)
1745 repo.obsstore.create(tr, prec, succs, opts['flags'],
1741 repo.obsstore.create(tr, prec, succs, opts['flags'],
1746 parents=parents, date=date,
1742 parents=parents, date=date,
1747 metadata=metadata, ui=ui)
1743 metadata=metadata, ui=ui)
1748 tr.close()
1744 tr.close()
1749 except ValueError as exc:
1745 except ValueError as exc:
1750 raise error.Abort(_('bad obsmarker input: %s') %
1746 raise error.Abort(_('bad obsmarker input: %s') %
1751 pycompat.bytestr(exc))
1747 pycompat.bytestr(exc))
1752 finally:
1748 finally:
1753 tr.release()
1749 tr.release()
1754 finally:
1750 finally:
1755 l.release()
1751 l.release()
1756 else:
1752 else:
1757 if opts['rev']:
1753 if opts['rev']:
1758 revs = scmutil.revrange(repo, opts['rev'])
1754 revs = scmutil.revrange(repo, opts['rev'])
1759 nodes = [repo[r].node() for r in revs]
1755 nodes = [repo[r].node() for r in revs]
1760 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1756 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1761 exclusive=opts['exclusive']))
1757 exclusive=opts['exclusive']))
1762 markers.sort(key=lambda x: x._data)
1758 markers.sort(key=lambda x: x._data)
1763 else:
1759 else:
1764 markers = obsutil.getmarkers(repo)
1760 markers = obsutil.getmarkers(repo)
1765
1761
1766 markerstoiter = markers
1762 markerstoiter = markers
1767 isrelevant = lambda m: True
1763 isrelevant = lambda m: True
1768 if opts.get('rev') and opts.get('index'):
1764 if opts.get('rev') and opts.get('index'):
1769 markerstoiter = obsutil.getmarkers(repo)
1765 markerstoiter = obsutil.getmarkers(repo)
1770 markerset = set(markers)
1766 markerset = set(markers)
1771 isrelevant = lambda m: m in markerset
1767 isrelevant = lambda m: m in markerset
1772
1768
1773 fm = ui.formatter('debugobsolete', opts)
1769 fm = ui.formatter('debugobsolete', opts)
1774 for i, m in enumerate(markerstoiter):
1770 for i, m in enumerate(markerstoiter):
1775 if not isrelevant(m):
1771 if not isrelevant(m):
1776 # marker can be irrelevant when we're iterating over a set
1772 # marker can be irrelevant when we're iterating over a set
1777 # of markers (markerstoiter) which is bigger than the set
1773 # of markers (markerstoiter) which is bigger than the set
1778 # of markers we want to display (markers)
1774 # of markers we want to display (markers)
1779 # this can happen if both --index and --rev options are
1775 # this can happen if both --index and --rev options are
1780 # provided and thus we need to iterate over all of the markers
1776 # provided and thus we need to iterate over all of the markers
1781 # to get the correct indices, but only display the ones that
1777 # to get the correct indices, but only display the ones that
1782 # are relevant to --rev value
1778 # are relevant to --rev value
1783 continue
1779 continue
1784 fm.startitem()
1780 fm.startitem()
1785 ind = i if opts.get('index') else None
1781 ind = i if opts.get('index') else None
1786 cmdutil.showmarker(fm, m, index=ind)
1782 cmdutil.showmarker(fm, m, index=ind)
1787 fm.end()
1783 fm.end()
1788
1784
1789 @command('debugp1copies',
1785 @command('debugp1copies',
1790 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1786 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1791 _('[-r REV]'))
1787 _('[-r REV]'))
1792 def debugp1copies(ui, repo, **opts):
1788 def debugp1copies(ui, repo, **opts):
1793 """dump copy information compared to p1"""
1789 """dump copy information compared to p1"""
1794
1790
1795 opts = pycompat.byteskwargs(opts)
1791 opts = pycompat.byteskwargs(opts)
1796 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1792 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1797 for dst, src in ctx.p1copies().items():
1793 for dst, src in ctx.p1copies().items():
1798 ui.write('%s -> %s\n' % (src, dst))
1794 ui.write('%s -> %s\n' % (src, dst))
1799
1795
1800 @command('debugp2copies',
1796 @command('debugp2copies',
1801 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1797 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1802 _('[-r REV]'))
1798 _('[-r REV]'))
1803 def debugp1copies(ui, repo, **opts):
1799 def debugp1copies(ui, repo, **opts):
1804 """dump copy information compared to p2"""
1800 """dump copy information compared to p2"""
1805
1801
1806 opts = pycompat.byteskwargs(opts)
1802 opts = pycompat.byteskwargs(opts)
1807 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1803 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1808 for dst, src in ctx.p2copies().items():
1804 for dst, src in ctx.p2copies().items():
1809 ui.write('%s -> %s\n' % (src, dst))
1805 ui.write('%s -> %s\n' % (src, dst))
1810
1806
1811 @command('debugpathcomplete',
1807 @command('debugpathcomplete',
1812 [('f', 'full', None, _('complete an entire path')),
1808 [('f', 'full', None, _('complete an entire path')),
1813 ('n', 'normal', None, _('show only normal files')),
1809 ('n', 'normal', None, _('show only normal files')),
1814 ('a', 'added', None, _('show only added files')),
1810 ('a', 'added', None, _('show only added files')),
1815 ('r', 'removed', None, _('show only removed files'))],
1811 ('r', 'removed', None, _('show only removed files'))],
1816 _('FILESPEC...'))
1812 _('FILESPEC...'))
1817 def debugpathcomplete(ui, repo, *specs, **opts):
1813 def debugpathcomplete(ui, repo, *specs, **opts):
1818 '''complete part or all of a tracked path
1814 '''complete part or all of a tracked path
1819
1815
1820 This command supports shells that offer path name completion. It
1816 This command supports shells that offer path name completion. It
1821 currently completes only files already known to the dirstate.
1817 currently completes only files already known to the dirstate.
1822
1818
1823 Completion extends only to the next path segment unless
1819 Completion extends only to the next path segment unless
1824 --full is specified, in which case entire paths are used.'''
1820 --full is specified, in which case entire paths are used.'''
1825
1821
1826 def complete(path, acceptable):
1822 def complete(path, acceptable):
1827 dirstate = repo.dirstate
1823 dirstate = repo.dirstate
1828 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1824 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1829 rootdir = repo.root + pycompat.ossep
1825 rootdir = repo.root + pycompat.ossep
1830 if spec != repo.root and not spec.startswith(rootdir):
1826 if spec != repo.root and not spec.startswith(rootdir):
1831 return [], []
1827 return [], []
1832 if os.path.isdir(spec):
1828 if os.path.isdir(spec):
1833 spec += '/'
1829 spec += '/'
1834 spec = spec[len(rootdir):]
1830 spec = spec[len(rootdir):]
1835 fixpaths = pycompat.ossep != '/'
1831 fixpaths = pycompat.ossep != '/'
1836 if fixpaths:
1832 if fixpaths:
1837 spec = spec.replace(pycompat.ossep, '/')
1833 spec = spec.replace(pycompat.ossep, '/')
1838 speclen = len(spec)
1834 speclen = len(spec)
1839 fullpaths = opts[r'full']
1835 fullpaths = opts[r'full']
1840 files, dirs = set(), set()
1836 files, dirs = set(), set()
1841 adddir, addfile = dirs.add, files.add
1837 adddir, addfile = dirs.add, files.add
1842 for f, st in dirstate.iteritems():
1838 for f, st in dirstate.iteritems():
1843 if f.startswith(spec) and st[0] in acceptable:
1839 if f.startswith(spec) and st[0] in acceptable:
1844 if fixpaths:
1840 if fixpaths:
1845 f = f.replace('/', pycompat.ossep)
1841 f = f.replace('/', pycompat.ossep)
1846 if fullpaths:
1842 if fullpaths:
1847 addfile(f)
1843 addfile(f)
1848 continue
1844 continue
1849 s = f.find(pycompat.ossep, speclen)
1845 s = f.find(pycompat.ossep, speclen)
1850 if s >= 0:
1846 if s >= 0:
1851 adddir(f[:s])
1847 adddir(f[:s])
1852 else:
1848 else:
1853 addfile(f)
1849 addfile(f)
1854 return files, dirs
1850 return files, dirs
1855
1851
1856 acceptable = ''
1852 acceptable = ''
1857 if opts[r'normal']:
1853 if opts[r'normal']:
1858 acceptable += 'nm'
1854 acceptable += 'nm'
1859 if opts[r'added']:
1855 if opts[r'added']:
1860 acceptable += 'a'
1856 acceptable += 'a'
1861 if opts[r'removed']:
1857 if opts[r'removed']:
1862 acceptable += 'r'
1858 acceptable += 'r'
1863 cwd = repo.getcwd()
1859 cwd = repo.getcwd()
1864 if not specs:
1860 if not specs:
1865 specs = ['.']
1861 specs = ['.']
1866
1862
1867 files, dirs = set(), set()
1863 files, dirs = set(), set()
1868 for spec in specs:
1864 for spec in specs:
1869 f, d = complete(spec, acceptable or 'nmar')
1865 f, d = complete(spec, acceptable or 'nmar')
1870 files.update(f)
1866 files.update(f)
1871 dirs.update(d)
1867 dirs.update(d)
1872 files.update(dirs)
1868 files.update(dirs)
1873 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1869 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1874 ui.write('\n')
1870 ui.write('\n')
1875
1871
1876 @command('debugpathcopies',
1872 @command('debugpathcopies',
1877 cmdutil.walkopts,
1873 cmdutil.walkopts,
1878 'hg debugpathcopies REV1 REV2 [FILE]',
1874 'hg debugpathcopies REV1 REV2 [FILE]',
1879 inferrepo=True)
1875 inferrepo=True)
1880 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
1876 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
1881 """show copies between two revisions"""
1877 """show copies between two revisions"""
1882 ctx1 = scmutil.revsingle(repo, rev1)
1878 ctx1 = scmutil.revsingle(repo, rev1)
1883 ctx2 = scmutil.revsingle(repo, rev2)
1879 ctx2 = scmutil.revsingle(repo, rev2)
1884 m = scmutil.match(ctx1, pats, opts)
1880 m = scmutil.match(ctx1, pats, opts)
1885 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
1881 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
1886 ui.write('%s -> %s\n' % (src, dst))
1882 ui.write('%s -> %s\n' % (src, dst))
1887
1883
1888 @command('debugpeer', [], _('PATH'), norepo=True)
1884 @command('debugpeer', [], _('PATH'), norepo=True)
1889 def debugpeer(ui, path):
1885 def debugpeer(ui, path):
1890 """establish a connection to a peer repository"""
1886 """establish a connection to a peer repository"""
1891 # Always enable peer request logging. Requires --debug to display
1887 # Always enable peer request logging. Requires --debug to display
1892 # though.
1888 # though.
1893 overrides = {
1889 overrides = {
1894 ('devel', 'debug.peer-request'): True,
1890 ('devel', 'debug.peer-request'): True,
1895 }
1891 }
1896
1892
1897 with ui.configoverride(overrides):
1893 with ui.configoverride(overrides):
1898 peer = hg.peer(ui, {}, path)
1894 peer = hg.peer(ui, {}, path)
1899
1895
1900 local = peer.local() is not None
1896 local = peer.local() is not None
1901 canpush = peer.canpush()
1897 canpush = peer.canpush()
1902
1898
1903 ui.write(_('url: %s\n') % peer.url())
1899 ui.write(_('url: %s\n') % peer.url())
1904 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1900 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1905 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1901 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1906
1902
1907 @command('debugpickmergetool',
1903 @command('debugpickmergetool',
1908 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1904 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1909 ('', 'changedelete', None, _('emulate merging change and delete')),
1905 ('', 'changedelete', None, _('emulate merging change and delete')),
1910 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1906 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1911 _('[PATTERN]...'),
1907 _('[PATTERN]...'),
1912 inferrepo=True)
1908 inferrepo=True)
1913 def debugpickmergetool(ui, repo, *pats, **opts):
1909 def debugpickmergetool(ui, repo, *pats, **opts):
1914 """examine which merge tool is chosen for specified file
1910 """examine which merge tool is chosen for specified file
1915
1911
1916 As described in :hg:`help merge-tools`, Mercurial examines
1912 As described in :hg:`help merge-tools`, Mercurial examines
1917 configurations below in this order to decide which merge tool is
1913 configurations below in this order to decide which merge tool is
1918 chosen for specified file.
1914 chosen for specified file.
1919
1915
1920 1. ``--tool`` option
1916 1. ``--tool`` option
1921 2. ``HGMERGE`` environment variable
1917 2. ``HGMERGE`` environment variable
1922 3. configurations in ``merge-patterns`` section
1918 3. configurations in ``merge-patterns`` section
1923 4. configuration of ``ui.merge``
1919 4. configuration of ``ui.merge``
1924 5. configurations in ``merge-tools`` section
1920 5. configurations in ``merge-tools`` section
1925 6. ``hgmerge`` tool (for historical reason only)
1921 6. ``hgmerge`` tool (for historical reason only)
1926 7. default tool for fallback (``:merge`` or ``:prompt``)
1922 7. default tool for fallback (``:merge`` or ``:prompt``)
1927
1923
1928 This command writes out examination result in the style below::
1924 This command writes out examination result in the style below::
1929
1925
1930 FILE = MERGETOOL
1926 FILE = MERGETOOL
1931
1927
1932 By default, all files known in the first parent context of the
1928 By default, all files known in the first parent context of the
1933 working directory are examined. Use file patterns and/or -I/-X
1929 working directory are examined. Use file patterns and/or -I/-X
1934 options to limit target files. -r/--rev is also useful to examine
1930 options to limit target files. -r/--rev is also useful to examine
1935 files in another context without actual updating to it.
1931 files in another context without actual updating to it.
1936
1932
1937 With --debug, this command shows warning messages while matching
1933 With --debug, this command shows warning messages while matching
1938 against ``merge-patterns`` and so on, too. It is recommended to
1934 against ``merge-patterns`` and so on, too. It is recommended to
1939 use this option with explicit file patterns and/or -I/-X options,
1935 use this option with explicit file patterns and/or -I/-X options,
1940 because this option increases amount of output per file according
1936 because this option increases amount of output per file according
1941 to configurations in hgrc.
1937 to configurations in hgrc.
1942
1938
1943 With -v/--verbose, this command shows configurations below at
1939 With -v/--verbose, this command shows configurations below at
1944 first (only if specified).
1940 first (only if specified).
1945
1941
1946 - ``--tool`` option
1942 - ``--tool`` option
1947 - ``HGMERGE`` environment variable
1943 - ``HGMERGE`` environment variable
1948 - configuration of ``ui.merge``
1944 - configuration of ``ui.merge``
1949
1945
1950 If merge tool is chosen before matching against
1946 If merge tool is chosen before matching against
1951 ``merge-patterns``, this command can't show any helpful
1947 ``merge-patterns``, this command can't show any helpful
1952 information, even with --debug. In such case, information above is
1948 information, even with --debug. In such case, information above is
1953 useful to know why a merge tool is chosen.
1949 useful to know why a merge tool is chosen.
1954 """
1950 """
1955 opts = pycompat.byteskwargs(opts)
1951 opts = pycompat.byteskwargs(opts)
1956 overrides = {}
1952 overrides = {}
1957 if opts['tool']:
1953 if opts['tool']:
1958 overrides[('ui', 'forcemerge')] = opts['tool']
1954 overrides[('ui', 'forcemerge')] = opts['tool']
1959 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1955 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1960
1956
1961 with ui.configoverride(overrides, 'debugmergepatterns'):
1957 with ui.configoverride(overrides, 'debugmergepatterns'):
1962 hgmerge = encoding.environ.get("HGMERGE")
1958 hgmerge = encoding.environ.get("HGMERGE")
1963 if hgmerge is not None:
1959 if hgmerge is not None:
1964 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1960 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1965 uimerge = ui.config("ui", "merge")
1961 uimerge = ui.config("ui", "merge")
1966 if uimerge:
1962 if uimerge:
1967 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1963 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1968
1964
1969 ctx = scmutil.revsingle(repo, opts.get('rev'))
1965 ctx = scmutil.revsingle(repo, opts.get('rev'))
1970 m = scmutil.match(ctx, pats, opts)
1966 m = scmutil.match(ctx, pats, opts)
1971 changedelete = opts['changedelete']
1967 changedelete = opts['changedelete']
1972 for path in ctx.walk(m):
1968 for path in ctx.walk(m):
1973 fctx = ctx[path]
1969 fctx = ctx[path]
1974 try:
1970 try:
1975 if not ui.debugflag:
1971 if not ui.debugflag:
1976 ui.pushbuffer(error=True)
1972 ui.pushbuffer(error=True)
1977 tool, toolpath = filemerge._picktool(repo, ui, path,
1973 tool, toolpath = filemerge._picktool(repo, ui, path,
1978 fctx.isbinary(),
1974 fctx.isbinary(),
1979 'l' in fctx.flags(),
1975 'l' in fctx.flags(),
1980 changedelete)
1976 changedelete)
1981 finally:
1977 finally:
1982 if not ui.debugflag:
1978 if not ui.debugflag:
1983 ui.popbuffer()
1979 ui.popbuffer()
1984 ui.write(('%s = %s\n') % (path, tool))
1980 ui.write(('%s = %s\n') % (path, tool))
1985
1981
1986 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1982 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1987 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1983 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1988 '''access the pushkey key/value protocol
1984 '''access the pushkey key/value protocol
1989
1985
1990 With two args, list the keys in the given namespace.
1986 With two args, list the keys in the given namespace.
1991
1987
1992 With five args, set a key to new if it currently is set to old.
1988 With five args, set a key to new if it currently is set to old.
1993 Reports success or failure.
1989 Reports success or failure.
1994 '''
1990 '''
1995
1991
1996 target = hg.peer(ui, {}, repopath)
1992 target = hg.peer(ui, {}, repopath)
1997 if keyinfo:
1993 if keyinfo:
1998 key, old, new = keyinfo
1994 key, old, new = keyinfo
1999 with target.commandexecutor() as e:
1995 with target.commandexecutor() as e:
2000 r = e.callcommand('pushkey', {
1996 r = e.callcommand('pushkey', {
2001 'namespace': namespace,
1997 'namespace': namespace,
2002 'key': key,
1998 'key': key,
2003 'old': old,
1999 'old': old,
2004 'new': new,
2000 'new': new,
2005 }).result()
2001 }).result()
2006
2002
2007 ui.status(pycompat.bytestr(r) + '\n')
2003 ui.status(pycompat.bytestr(r) + '\n')
2008 return not r
2004 return not r
2009 else:
2005 else:
2010 for k, v in sorted(target.listkeys(namespace).iteritems()):
2006 for k, v in sorted(target.listkeys(namespace).iteritems()):
2011 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
2007 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
2012 stringutil.escapestr(v)))
2008 stringutil.escapestr(v)))
2013
2009
2014 @command('debugpvec', [], _('A B'))
2010 @command('debugpvec', [], _('A B'))
2015 def debugpvec(ui, repo, a, b=None):
2011 def debugpvec(ui, repo, a, b=None):
2016 ca = scmutil.revsingle(repo, a)
2012 ca = scmutil.revsingle(repo, a)
2017 cb = scmutil.revsingle(repo, b)
2013 cb = scmutil.revsingle(repo, b)
2018 pa = pvec.ctxpvec(ca)
2014 pa = pvec.ctxpvec(ca)
2019 pb = pvec.ctxpvec(cb)
2015 pb = pvec.ctxpvec(cb)
2020 if pa == pb:
2016 if pa == pb:
2021 rel = "="
2017 rel = "="
2022 elif pa > pb:
2018 elif pa > pb:
2023 rel = ">"
2019 rel = ">"
2024 elif pa < pb:
2020 elif pa < pb:
2025 rel = "<"
2021 rel = "<"
2026 elif pa | pb:
2022 elif pa | pb:
2027 rel = "|"
2023 rel = "|"
2028 ui.write(_("a: %s\n") % pa)
2024 ui.write(_("a: %s\n") % pa)
2029 ui.write(_("b: %s\n") % pb)
2025 ui.write(_("b: %s\n") % pb)
2030 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2026 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2031 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2027 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2032 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2028 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2033 pa.distance(pb), rel))
2029 pa.distance(pb), rel))
2034
2030
2035 @command('debugrebuilddirstate|debugrebuildstate',
2031 @command('debugrebuilddirstate|debugrebuildstate',
2036 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
2032 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
2037 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
2033 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
2038 'the working copy parent')),
2034 'the working copy parent')),
2039 ],
2035 ],
2040 _('[-r REV]'))
2036 _('[-r REV]'))
2041 def debugrebuilddirstate(ui, repo, rev, **opts):
2037 def debugrebuilddirstate(ui, repo, rev, **opts):
2042 """rebuild the dirstate as it would look like for the given revision
2038 """rebuild the dirstate as it would look like for the given revision
2043
2039
2044 If no revision is specified the first current parent will be used.
2040 If no revision is specified the first current parent will be used.
2045
2041
2046 The dirstate will be set to the files of the given revision.
2042 The dirstate will be set to the files of the given revision.
2047 The actual working directory content or existing dirstate
2043 The actual working directory content or existing dirstate
2048 information such as adds or removes is not considered.
2044 information such as adds or removes is not considered.
2049
2045
2050 ``minimal`` will only rebuild the dirstate status for files that claim to be
2046 ``minimal`` will only rebuild the dirstate status for files that claim to be
2051 tracked but are not in the parent manifest, or that exist in the parent
2047 tracked but are not in the parent manifest, or that exist in the parent
2052 manifest but are not in the dirstate. It will not change adds, removes, or
2048 manifest but are not in the dirstate. It will not change adds, removes, or
2053 modified files that are in the working copy parent.
2049 modified files that are in the working copy parent.
2054
2050
2055 One use of this command is to make the next :hg:`status` invocation
2051 One use of this command is to make the next :hg:`status` invocation
2056 check the actual file content.
2052 check the actual file content.
2057 """
2053 """
2058 ctx = scmutil.revsingle(repo, rev)
2054 ctx = scmutil.revsingle(repo, rev)
2059 with repo.wlock():
2055 with repo.wlock():
2060 dirstate = repo.dirstate
2056 dirstate = repo.dirstate
2061 changedfiles = None
2057 changedfiles = None
2062 # See command doc for what minimal does.
2058 # See command doc for what minimal does.
2063 if opts.get(r'minimal'):
2059 if opts.get(r'minimal'):
2064 manifestfiles = set(ctx.manifest().keys())
2060 manifestfiles = set(ctx.manifest().keys())
2065 dirstatefiles = set(dirstate)
2061 dirstatefiles = set(dirstate)
2066 manifestonly = manifestfiles - dirstatefiles
2062 manifestonly = manifestfiles - dirstatefiles
2067 dsonly = dirstatefiles - manifestfiles
2063 dsonly = dirstatefiles - manifestfiles
2068 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2064 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2069 changedfiles = manifestonly | dsnotadded
2065 changedfiles = manifestonly | dsnotadded
2070
2066
2071 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2067 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2072
2068
2073 @command('debugrebuildfncache', [], '')
2069 @command('debugrebuildfncache', [], '')
2074 def debugrebuildfncache(ui, repo):
2070 def debugrebuildfncache(ui, repo):
2075 """rebuild the fncache file"""
2071 """rebuild the fncache file"""
2076 repair.rebuildfncache(ui, repo)
2072 repair.rebuildfncache(ui, repo)
2077
2073
2078 @command('debugrename',
2074 @command('debugrename',
2079 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2075 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2080 _('[-r REV] [FILE]...'))
2076 _('[-r REV] [FILE]...'))
2081 def debugrename(ui, repo, *pats, **opts):
2077 def debugrename(ui, repo, *pats, **opts):
2082 """dump rename information"""
2078 """dump rename information"""
2083
2079
2084 opts = pycompat.byteskwargs(opts)
2080 opts = pycompat.byteskwargs(opts)
2085 ctx = scmutil.revsingle(repo, opts.get('rev'))
2081 ctx = scmutil.revsingle(repo, opts.get('rev'))
2086 m = scmutil.match(ctx, pats, opts)
2082 m = scmutil.match(ctx, pats, opts)
2087 for abs in ctx.walk(m):
2083 for abs in ctx.walk(m):
2088 fctx = ctx[abs]
2084 fctx = ctx[abs]
2089 o = fctx.filelog().renamed(fctx.filenode())
2085 o = fctx.filelog().renamed(fctx.filenode())
2090 rel = repo.pathto(abs)
2086 rel = repo.pathto(abs)
2091 if o:
2087 if o:
2092 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2088 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2093 else:
2089 else:
2094 ui.write(_("%s not renamed\n") % rel)
2090 ui.write(_("%s not renamed\n") % rel)
2095
2091
2096 @command('debugrevlog', cmdutil.debugrevlogopts +
2092 @command('debugrevlog', cmdutil.debugrevlogopts +
2097 [('d', 'dump', False, _('dump index data'))],
2093 [('d', 'dump', False, _('dump index data'))],
2098 _('-c|-m|FILE'),
2094 _('-c|-m|FILE'),
2099 optionalrepo=True)
2095 optionalrepo=True)
2100 def debugrevlog(ui, repo, file_=None, **opts):
2096 def debugrevlog(ui, repo, file_=None, **opts):
2101 """show data and statistics about a revlog"""
2097 """show data and statistics about a revlog"""
2102 opts = pycompat.byteskwargs(opts)
2098 opts = pycompat.byteskwargs(opts)
2103 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2099 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2104
2100
2105 if opts.get("dump"):
2101 if opts.get("dump"):
2106 numrevs = len(r)
2102 numrevs = len(r)
2107 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2103 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2108 " rawsize totalsize compression heads chainlen\n"))
2104 " rawsize totalsize compression heads chainlen\n"))
2109 ts = 0
2105 ts = 0
2110 heads = set()
2106 heads = set()
2111
2107
2112 for rev in pycompat.xrange(numrevs):
2108 for rev in pycompat.xrange(numrevs):
2113 dbase = r.deltaparent(rev)
2109 dbase = r.deltaparent(rev)
2114 if dbase == -1:
2110 if dbase == -1:
2115 dbase = rev
2111 dbase = rev
2116 cbase = r.chainbase(rev)
2112 cbase = r.chainbase(rev)
2117 clen = r.chainlen(rev)
2113 clen = r.chainlen(rev)
2118 p1, p2 = r.parentrevs(rev)
2114 p1, p2 = r.parentrevs(rev)
2119 rs = r.rawsize(rev)
2115 rs = r.rawsize(rev)
2120 ts = ts + rs
2116 ts = ts + rs
2121 heads -= set(r.parentrevs(rev))
2117 heads -= set(r.parentrevs(rev))
2122 heads.add(rev)
2118 heads.add(rev)
2123 try:
2119 try:
2124 compression = ts / r.end(rev)
2120 compression = ts / r.end(rev)
2125 except ZeroDivisionError:
2121 except ZeroDivisionError:
2126 compression = 0
2122 compression = 0
2127 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2123 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2128 "%11d %5d %8d\n" %
2124 "%11d %5d %8d\n" %
2129 (rev, p1, p2, r.start(rev), r.end(rev),
2125 (rev, p1, p2, r.start(rev), r.end(rev),
2130 r.start(dbase), r.start(cbase),
2126 r.start(dbase), r.start(cbase),
2131 r.start(p1), r.start(p2),
2127 r.start(p1), r.start(p2),
2132 rs, ts, compression, len(heads), clen))
2128 rs, ts, compression, len(heads), clen))
2133 return 0
2129 return 0
2134
2130
2135 v = r.version
2131 v = r.version
2136 format = v & 0xFFFF
2132 format = v & 0xFFFF
2137 flags = []
2133 flags = []
2138 gdelta = False
2134 gdelta = False
2139 if v & revlog.FLAG_INLINE_DATA:
2135 if v & revlog.FLAG_INLINE_DATA:
2140 flags.append('inline')
2136 flags.append('inline')
2141 if v & revlog.FLAG_GENERALDELTA:
2137 if v & revlog.FLAG_GENERALDELTA:
2142 gdelta = True
2138 gdelta = True
2143 flags.append('generaldelta')
2139 flags.append('generaldelta')
2144 if not flags:
2140 if not flags:
2145 flags = ['(none)']
2141 flags = ['(none)']
2146
2142
2147 ### tracks merge vs single parent
2143 ### tracks merge vs single parent
2148 nummerges = 0
2144 nummerges = 0
2149
2145
2150 ### tracks ways the "delta" are build
2146 ### tracks ways the "delta" are build
2151 # nodelta
2147 # nodelta
2152 numempty = 0
2148 numempty = 0
2153 numemptytext = 0
2149 numemptytext = 0
2154 numemptydelta = 0
2150 numemptydelta = 0
2155 # full file content
2151 # full file content
2156 numfull = 0
2152 numfull = 0
2157 # intermediate snapshot against a prior snapshot
2153 # intermediate snapshot against a prior snapshot
2158 numsemi = 0
2154 numsemi = 0
2159 # snapshot count per depth
2155 # snapshot count per depth
2160 numsnapdepth = collections.defaultdict(lambda: 0)
2156 numsnapdepth = collections.defaultdict(lambda: 0)
2161 # delta against previous revision
2157 # delta against previous revision
2162 numprev = 0
2158 numprev = 0
2163 # delta against first or second parent (not prev)
2159 # delta against first or second parent (not prev)
2164 nump1 = 0
2160 nump1 = 0
2165 nump2 = 0
2161 nump2 = 0
2166 # delta against neither prev nor parents
2162 # delta against neither prev nor parents
2167 numother = 0
2163 numother = 0
2168 # delta against prev that are also first or second parent
2164 # delta against prev that are also first or second parent
2169 # (details of `numprev`)
2165 # (details of `numprev`)
2170 nump1prev = 0
2166 nump1prev = 0
2171 nump2prev = 0
2167 nump2prev = 0
2172
2168
2173 # data about delta chain of each revs
2169 # data about delta chain of each revs
2174 chainlengths = []
2170 chainlengths = []
2175 chainbases = []
2171 chainbases = []
2176 chainspans = []
2172 chainspans = []
2177
2173
2178 # data about each revision
2174 # data about each revision
2179 datasize = [None, 0, 0]
2175 datasize = [None, 0, 0]
2180 fullsize = [None, 0, 0]
2176 fullsize = [None, 0, 0]
2181 semisize = [None, 0, 0]
2177 semisize = [None, 0, 0]
2182 # snapshot count per depth
2178 # snapshot count per depth
2183 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2179 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2184 deltasize = [None, 0, 0]
2180 deltasize = [None, 0, 0]
2185 chunktypecounts = {}
2181 chunktypecounts = {}
2186 chunktypesizes = {}
2182 chunktypesizes = {}
2187
2183
2188 def addsize(size, l):
2184 def addsize(size, l):
2189 if l[0] is None or size < l[0]:
2185 if l[0] is None or size < l[0]:
2190 l[0] = size
2186 l[0] = size
2191 if size > l[1]:
2187 if size > l[1]:
2192 l[1] = size
2188 l[1] = size
2193 l[2] += size
2189 l[2] += size
2194
2190
2195 numrevs = len(r)
2191 numrevs = len(r)
2196 for rev in pycompat.xrange(numrevs):
2192 for rev in pycompat.xrange(numrevs):
2197 p1, p2 = r.parentrevs(rev)
2193 p1, p2 = r.parentrevs(rev)
2198 delta = r.deltaparent(rev)
2194 delta = r.deltaparent(rev)
2199 if format > 0:
2195 if format > 0:
2200 addsize(r.rawsize(rev), datasize)
2196 addsize(r.rawsize(rev), datasize)
2201 if p2 != nullrev:
2197 if p2 != nullrev:
2202 nummerges += 1
2198 nummerges += 1
2203 size = r.length(rev)
2199 size = r.length(rev)
2204 if delta == nullrev:
2200 if delta == nullrev:
2205 chainlengths.append(0)
2201 chainlengths.append(0)
2206 chainbases.append(r.start(rev))
2202 chainbases.append(r.start(rev))
2207 chainspans.append(size)
2203 chainspans.append(size)
2208 if size == 0:
2204 if size == 0:
2209 numempty += 1
2205 numempty += 1
2210 numemptytext += 1
2206 numemptytext += 1
2211 else:
2207 else:
2212 numfull += 1
2208 numfull += 1
2213 numsnapdepth[0] += 1
2209 numsnapdepth[0] += 1
2214 addsize(size, fullsize)
2210 addsize(size, fullsize)
2215 addsize(size, snapsizedepth[0])
2211 addsize(size, snapsizedepth[0])
2216 else:
2212 else:
2217 chainlengths.append(chainlengths[delta] + 1)
2213 chainlengths.append(chainlengths[delta] + 1)
2218 baseaddr = chainbases[delta]
2214 baseaddr = chainbases[delta]
2219 revaddr = r.start(rev)
2215 revaddr = r.start(rev)
2220 chainbases.append(baseaddr)
2216 chainbases.append(baseaddr)
2221 chainspans.append((revaddr - baseaddr) + size)
2217 chainspans.append((revaddr - baseaddr) + size)
2222 if size == 0:
2218 if size == 0:
2223 numempty += 1
2219 numempty += 1
2224 numemptydelta += 1
2220 numemptydelta += 1
2225 elif r.issnapshot(rev):
2221 elif r.issnapshot(rev):
2226 addsize(size, semisize)
2222 addsize(size, semisize)
2227 numsemi += 1
2223 numsemi += 1
2228 depth = r.snapshotdepth(rev)
2224 depth = r.snapshotdepth(rev)
2229 numsnapdepth[depth] += 1
2225 numsnapdepth[depth] += 1
2230 addsize(size, snapsizedepth[depth])
2226 addsize(size, snapsizedepth[depth])
2231 else:
2227 else:
2232 addsize(size, deltasize)
2228 addsize(size, deltasize)
2233 if delta == rev - 1:
2229 if delta == rev - 1:
2234 numprev += 1
2230 numprev += 1
2235 if delta == p1:
2231 if delta == p1:
2236 nump1prev += 1
2232 nump1prev += 1
2237 elif delta == p2:
2233 elif delta == p2:
2238 nump2prev += 1
2234 nump2prev += 1
2239 elif delta == p1:
2235 elif delta == p1:
2240 nump1 += 1
2236 nump1 += 1
2241 elif delta == p2:
2237 elif delta == p2:
2242 nump2 += 1
2238 nump2 += 1
2243 elif delta != nullrev:
2239 elif delta != nullrev:
2244 numother += 1
2240 numother += 1
2245
2241
2246 # Obtain data on the raw chunks in the revlog.
2242 # Obtain data on the raw chunks in the revlog.
2247 if util.safehasattr(r, '_getsegmentforrevs'):
2243 if util.safehasattr(r, '_getsegmentforrevs'):
2248 segment = r._getsegmentforrevs(rev, rev)[1]
2244 segment = r._getsegmentforrevs(rev, rev)[1]
2249 else:
2245 else:
2250 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2246 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2251 if segment:
2247 if segment:
2252 chunktype = bytes(segment[0:1])
2248 chunktype = bytes(segment[0:1])
2253 else:
2249 else:
2254 chunktype = 'empty'
2250 chunktype = 'empty'
2255
2251
2256 if chunktype not in chunktypecounts:
2252 if chunktype not in chunktypecounts:
2257 chunktypecounts[chunktype] = 0
2253 chunktypecounts[chunktype] = 0
2258 chunktypesizes[chunktype] = 0
2254 chunktypesizes[chunktype] = 0
2259
2255
2260 chunktypecounts[chunktype] += 1
2256 chunktypecounts[chunktype] += 1
2261 chunktypesizes[chunktype] += size
2257 chunktypesizes[chunktype] += size
2262
2258
2263 # Adjust size min value for empty cases
2259 # Adjust size min value for empty cases
2264 for size in (datasize, fullsize, semisize, deltasize):
2260 for size in (datasize, fullsize, semisize, deltasize):
2265 if size[0] is None:
2261 if size[0] is None:
2266 size[0] = 0
2262 size[0] = 0
2267
2263
2268 numdeltas = numrevs - numfull - numempty - numsemi
2264 numdeltas = numrevs - numfull - numempty - numsemi
2269 numoprev = numprev - nump1prev - nump2prev
2265 numoprev = numprev - nump1prev - nump2prev
2270 totalrawsize = datasize[2]
2266 totalrawsize = datasize[2]
2271 datasize[2] /= numrevs
2267 datasize[2] /= numrevs
2272 fulltotal = fullsize[2]
2268 fulltotal = fullsize[2]
2273 fullsize[2] /= numfull
2269 fullsize[2] /= numfull
2274 semitotal = semisize[2]
2270 semitotal = semisize[2]
2275 snaptotal = {}
2271 snaptotal = {}
2276 if numsemi > 0:
2272 if numsemi > 0:
2277 semisize[2] /= numsemi
2273 semisize[2] /= numsemi
2278 for depth in snapsizedepth:
2274 for depth in snapsizedepth:
2279 snaptotal[depth] = snapsizedepth[depth][2]
2275 snaptotal[depth] = snapsizedepth[depth][2]
2280 snapsizedepth[depth][2] /= numsnapdepth[depth]
2276 snapsizedepth[depth][2] /= numsnapdepth[depth]
2281
2277
2282 deltatotal = deltasize[2]
2278 deltatotal = deltasize[2]
2283 if numdeltas > 0:
2279 if numdeltas > 0:
2284 deltasize[2] /= numdeltas
2280 deltasize[2] /= numdeltas
2285 totalsize = fulltotal + semitotal + deltatotal
2281 totalsize = fulltotal + semitotal + deltatotal
2286 avgchainlen = sum(chainlengths) / numrevs
2282 avgchainlen = sum(chainlengths) / numrevs
2287 maxchainlen = max(chainlengths)
2283 maxchainlen = max(chainlengths)
2288 maxchainspan = max(chainspans)
2284 maxchainspan = max(chainspans)
2289 compratio = 1
2285 compratio = 1
2290 if totalsize:
2286 if totalsize:
2291 compratio = totalrawsize / totalsize
2287 compratio = totalrawsize / totalsize
2292
2288
2293 basedfmtstr = '%%%dd\n'
2289 basedfmtstr = '%%%dd\n'
2294 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2290 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2295
2291
2296 def dfmtstr(max):
2292 def dfmtstr(max):
2297 return basedfmtstr % len(str(max))
2293 return basedfmtstr % len(str(max))
2298 def pcfmtstr(max, padding=0):
2294 def pcfmtstr(max, padding=0):
2299 return basepcfmtstr % (len(str(max)), ' ' * padding)
2295 return basepcfmtstr % (len(str(max)), ' ' * padding)
2300
2296
2301 def pcfmt(value, total):
2297 def pcfmt(value, total):
2302 if total:
2298 if total:
2303 return (value, 100 * float(value) / total)
2299 return (value, 100 * float(value) / total)
2304 else:
2300 else:
2305 return value, 100.0
2301 return value, 100.0
2306
2302
2307 ui.write(('format : %d\n') % format)
2303 ui.write(('format : %d\n') % format)
2308 ui.write(('flags : %s\n') % ', '.join(flags))
2304 ui.write(('flags : %s\n') % ', '.join(flags))
2309
2305
2310 ui.write('\n')
2306 ui.write('\n')
2311 fmt = pcfmtstr(totalsize)
2307 fmt = pcfmtstr(totalsize)
2312 fmt2 = dfmtstr(totalsize)
2308 fmt2 = dfmtstr(totalsize)
2313 ui.write(('revisions : ') + fmt2 % numrevs)
2309 ui.write(('revisions : ') + fmt2 % numrevs)
2314 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2310 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2315 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2311 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2316 ui.write(('revisions : ') + fmt2 % numrevs)
2312 ui.write(('revisions : ') + fmt2 % numrevs)
2317 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2313 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2318 ui.write((' text : ')
2314 ui.write((' text : ')
2319 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2315 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2320 ui.write((' delta : ')
2316 ui.write((' delta : ')
2321 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2317 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2322 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2318 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2323 for depth in sorted(numsnapdepth):
2319 for depth in sorted(numsnapdepth):
2324 ui.write((' lvl-%-3d : ' % depth)
2320 ui.write((' lvl-%-3d : ' % depth)
2325 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2321 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2326 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2322 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2327 ui.write(('revision size : ') + fmt2 % totalsize)
2323 ui.write(('revision size : ') + fmt2 % totalsize)
2328 ui.write((' snapshot : ')
2324 ui.write((' snapshot : ')
2329 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2325 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2330 for depth in sorted(numsnapdepth):
2326 for depth in sorted(numsnapdepth):
2331 ui.write((' lvl-%-3d : ' % depth)
2327 ui.write((' lvl-%-3d : ' % depth)
2332 + fmt % pcfmt(snaptotal[depth], totalsize))
2328 + fmt % pcfmt(snaptotal[depth], totalsize))
2333 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2329 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2334
2330
2335 def fmtchunktype(chunktype):
2331 def fmtchunktype(chunktype):
2336 if chunktype == 'empty':
2332 if chunktype == 'empty':
2337 return ' %s : ' % chunktype
2333 return ' %s : ' % chunktype
2338 elif chunktype in pycompat.bytestr(string.ascii_letters):
2334 elif chunktype in pycompat.bytestr(string.ascii_letters):
2339 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2335 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2340 else:
2336 else:
2341 return ' 0x%s : ' % hex(chunktype)
2337 return ' 0x%s : ' % hex(chunktype)
2342
2338
2343 ui.write('\n')
2339 ui.write('\n')
2344 ui.write(('chunks : ') + fmt2 % numrevs)
2340 ui.write(('chunks : ') + fmt2 % numrevs)
2345 for chunktype in sorted(chunktypecounts):
2341 for chunktype in sorted(chunktypecounts):
2346 ui.write(fmtchunktype(chunktype))
2342 ui.write(fmtchunktype(chunktype))
2347 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2343 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2348 ui.write(('chunks size : ') + fmt2 % totalsize)
2344 ui.write(('chunks size : ') + fmt2 % totalsize)
2349 for chunktype in sorted(chunktypecounts):
2345 for chunktype in sorted(chunktypecounts):
2350 ui.write(fmtchunktype(chunktype))
2346 ui.write(fmtchunktype(chunktype))
2351 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2347 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2352
2348
2353 ui.write('\n')
2349 ui.write('\n')
2354 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2350 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2355 ui.write(('avg chain length : ') + fmt % avgchainlen)
2351 ui.write(('avg chain length : ') + fmt % avgchainlen)
2356 ui.write(('max chain length : ') + fmt % maxchainlen)
2352 ui.write(('max chain length : ') + fmt % maxchainlen)
2357 ui.write(('max chain reach : ') + fmt % maxchainspan)
2353 ui.write(('max chain reach : ') + fmt % maxchainspan)
2358 ui.write(('compression ratio : ') + fmt % compratio)
2354 ui.write(('compression ratio : ') + fmt % compratio)
2359
2355
2360 if format > 0:
2356 if format > 0:
2361 ui.write('\n')
2357 ui.write('\n')
2362 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2358 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2363 % tuple(datasize))
2359 % tuple(datasize))
2364 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2360 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2365 % tuple(fullsize))
2361 % tuple(fullsize))
2366 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2362 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2367 % tuple(semisize))
2363 % tuple(semisize))
2368 for depth in sorted(snapsizedepth):
2364 for depth in sorted(snapsizedepth):
2369 if depth == 0:
2365 if depth == 0:
2370 continue
2366 continue
2371 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2367 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2372 % ((depth,) + tuple(snapsizedepth[depth])))
2368 % ((depth,) + tuple(snapsizedepth[depth])))
2373 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2369 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2374 % tuple(deltasize))
2370 % tuple(deltasize))
2375
2371
2376 if numdeltas > 0:
2372 if numdeltas > 0:
2377 ui.write('\n')
2373 ui.write('\n')
2378 fmt = pcfmtstr(numdeltas)
2374 fmt = pcfmtstr(numdeltas)
2379 fmt2 = pcfmtstr(numdeltas, 4)
2375 fmt2 = pcfmtstr(numdeltas, 4)
2380 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2376 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2381 if numprev > 0:
2377 if numprev > 0:
2382 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2378 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2383 numprev))
2379 numprev))
2384 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2380 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2385 numprev))
2381 numprev))
2386 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2382 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2387 numprev))
2383 numprev))
2388 if gdelta:
2384 if gdelta:
2389 ui.write(('deltas against p1 : ')
2385 ui.write(('deltas against p1 : ')
2390 + fmt % pcfmt(nump1, numdeltas))
2386 + fmt % pcfmt(nump1, numdeltas))
2391 ui.write(('deltas against p2 : ')
2387 ui.write(('deltas against p2 : ')
2392 + fmt % pcfmt(nump2, numdeltas))
2388 + fmt % pcfmt(nump2, numdeltas))
2393 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2389 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2394 numdeltas))
2390 numdeltas))
2395
2391
2396 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2392 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2397 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2393 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2398 _('[-f FORMAT] -c|-m|FILE'),
2394 _('[-f FORMAT] -c|-m|FILE'),
2399 optionalrepo=True)
2395 optionalrepo=True)
2400 def debugrevlogindex(ui, repo, file_=None, **opts):
2396 def debugrevlogindex(ui, repo, file_=None, **opts):
2401 """dump the contents of a revlog index"""
2397 """dump the contents of a revlog index"""
2402 opts = pycompat.byteskwargs(opts)
2398 opts = pycompat.byteskwargs(opts)
2403 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2399 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2404 format = opts.get('format', 0)
2400 format = opts.get('format', 0)
2405 if format not in (0, 1):
2401 if format not in (0, 1):
2406 raise error.Abort(_("unknown format %d") % format)
2402 raise error.Abort(_("unknown format %d") % format)
2407
2403
2408 if ui.debugflag:
2404 if ui.debugflag:
2409 shortfn = hex
2405 shortfn = hex
2410 else:
2406 else:
2411 shortfn = short
2407 shortfn = short
2412
2408
2413 # There might not be anything in r, so have a sane default
2409 # There might not be anything in r, so have a sane default
2414 idlen = 12
2410 idlen = 12
2415 for i in r:
2411 for i in r:
2416 idlen = len(shortfn(r.node(i)))
2412 idlen = len(shortfn(r.node(i)))
2417 break
2413 break
2418
2414
2419 if format == 0:
2415 if format == 0:
2420 if ui.verbose:
2416 if ui.verbose:
2421 ui.write((" rev offset length linkrev"
2417 ui.write((" rev offset length linkrev"
2422 " %s %s p2\n") % ("nodeid".ljust(idlen),
2418 " %s %s p2\n") % ("nodeid".ljust(idlen),
2423 "p1".ljust(idlen)))
2419 "p1".ljust(idlen)))
2424 else:
2420 else:
2425 ui.write((" rev linkrev %s %s p2\n") % (
2421 ui.write((" rev linkrev %s %s p2\n") % (
2426 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2422 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2427 elif format == 1:
2423 elif format == 1:
2428 if ui.verbose:
2424 if ui.verbose:
2429 ui.write((" rev flag offset length size link p1"
2425 ui.write((" rev flag offset length size link p1"
2430 " p2 %s\n") % "nodeid".rjust(idlen))
2426 " p2 %s\n") % "nodeid".rjust(idlen))
2431 else:
2427 else:
2432 ui.write((" rev flag size link p1 p2 %s\n") %
2428 ui.write((" rev flag size link p1 p2 %s\n") %
2433 "nodeid".rjust(idlen))
2429 "nodeid".rjust(idlen))
2434
2430
2435 for i in r:
2431 for i in r:
2436 node = r.node(i)
2432 node = r.node(i)
2437 if format == 0:
2433 if format == 0:
2438 try:
2434 try:
2439 pp = r.parents(node)
2435 pp = r.parents(node)
2440 except Exception:
2436 except Exception:
2441 pp = [nullid, nullid]
2437 pp = [nullid, nullid]
2442 if ui.verbose:
2438 if ui.verbose:
2443 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2439 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2444 i, r.start(i), r.length(i), r.linkrev(i),
2440 i, r.start(i), r.length(i), r.linkrev(i),
2445 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2441 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2446 else:
2442 else:
2447 ui.write("% 6d % 7d %s %s %s\n" % (
2443 ui.write("% 6d % 7d %s %s %s\n" % (
2448 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2444 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2449 shortfn(pp[1])))
2445 shortfn(pp[1])))
2450 elif format == 1:
2446 elif format == 1:
2451 pr = r.parentrevs(i)
2447 pr = r.parentrevs(i)
2452 if ui.verbose:
2448 if ui.verbose:
2453 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2449 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2454 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2450 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2455 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2451 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2456 else:
2452 else:
2457 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2453 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2458 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2454 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2459 shortfn(node)))
2455 shortfn(node)))
2460
2456
2461 @command('debugrevspec',
2457 @command('debugrevspec',
2462 [('', 'optimize', None,
2458 [('', 'optimize', None,
2463 _('print parsed tree after optimizing (DEPRECATED)')),
2459 _('print parsed tree after optimizing (DEPRECATED)')),
2464 ('', 'show-revs', True, _('print list of result revisions (default)')),
2460 ('', 'show-revs', True, _('print list of result revisions (default)')),
2465 ('s', 'show-set', None, _('print internal representation of result set')),
2461 ('s', 'show-set', None, _('print internal representation of result set')),
2466 ('p', 'show-stage', [],
2462 ('p', 'show-stage', [],
2467 _('print parsed tree at the given stage'), _('NAME')),
2463 _('print parsed tree at the given stage'), _('NAME')),
2468 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2464 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2469 ('', 'verify-optimized', False, _('verify optimized result')),
2465 ('', 'verify-optimized', False, _('verify optimized result')),
2470 ],
2466 ],
2471 ('REVSPEC'))
2467 ('REVSPEC'))
2472 def debugrevspec(ui, repo, expr, **opts):
2468 def debugrevspec(ui, repo, expr, **opts):
2473 """parse and apply a revision specification
2469 """parse and apply a revision specification
2474
2470
2475 Use -p/--show-stage option to print the parsed tree at the given stages.
2471 Use -p/--show-stage option to print the parsed tree at the given stages.
2476 Use -p all to print tree at every stage.
2472 Use -p all to print tree at every stage.
2477
2473
2478 Use --no-show-revs option with -s or -p to print only the set
2474 Use --no-show-revs option with -s or -p to print only the set
2479 representation or the parsed tree respectively.
2475 representation or the parsed tree respectively.
2480
2476
2481 Use --verify-optimized to compare the optimized result with the unoptimized
2477 Use --verify-optimized to compare the optimized result with the unoptimized
2482 one. Returns 1 if the optimized result differs.
2478 one. Returns 1 if the optimized result differs.
2483 """
2479 """
2484 opts = pycompat.byteskwargs(opts)
2480 opts = pycompat.byteskwargs(opts)
2485 aliases = ui.configitems('revsetalias')
2481 aliases = ui.configitems('revsetalias')
2486 stages = [
2482 stages = [
2487 ('parsed', lambda tree: tree),
2483 ('parsed', lambda tree: tree),
2488 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2484 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2489 ui.warn)),
2485 ui.warn)),
2490 ('concatenated', revsetlang.foldconcat),
2486 ('concatenated', revsetlang.foldconcat),
2491 ('analyzed', revsetlang.analyze),
2487 ('analyzed', revsetlang.analyze),
2492 ('optimized', revsetlang.optimize),
2488 ('optimized', revsetlang.optimize),
2493 ]
2489 ]
2494 if opts['no_optimized']:
2490 if opts['no_optimized']:
2495 stages = stages[:-1]
2491 stages = stages[:-1]
2496 if opts['verify_optimized'] and opts['no_optimized']:
2492 if opts['verify_optimized'] and opts['no_optimized']:
2497 raise error.Abort(_('cannot use --verify-optimized with '
2493 raise error.Abort(_('cannot use --verify-optimized with '
2498 '--no-optimized'))
2494 '--no-optimized'))
2499 stagenames = set(n for n, f in stages)
2495 stagenames = set(n for n, f in stages)
2500
2496
2501 showalways = set()
2497 showalways = set()
2502 showchanged = set()
2498 showchanged = set()
2503 if ui.verbose and not opts['show_stage']:
2499 if ui.verbose and not opts['show_stage']:
2504 # show parsed tree by --verbose (deprecated)
2500 # show parsed tree by --verbose (deprecated)
2505 showalways.add('parsed')
2501 showalways.add('parsed')
2506 showchanged.update(['expanded', 'concatenated'])
2502 showchanged.update(['expanded', 'concatenated'])
2507 if opts['optimize']:
2503 if opts['optimize']:
2508 showalways.add('optimized')
2504 showalways.add('optimized')
2509 if opts['show_stage'] and opts['optimize']:
2505 if opts['show_stage'] and opts['optimize']:
2510 raise error.Abort(_('cannot use --optimize with --show-stage'))
2506 raise error.Abort(_('cannot use --optimize with --show-stage'))
2511 if opts['show_stage'] == ['all']:
2507 if opts['show_stage'] == ['all']:
2512 showalways.update(stagenames)
2508 showalways.update(stagenames)
2513 else:
2509 else:
2514 for n in opts['show_stage']:
2510 for n in opts['show_stage']:
2515 if n not in stagenames:
2511 if n not in stagenames:
2516 raise error.Abort(_('invalid stage name: %s') % n)
2512 raise error.Abort(_('invalid stage name: %s') % n)
2517 showalways.update(opts['show_stage'])
2513 showalways.update(opts['show_stage'])
2518
2514
2519 treebystage = {}
2515 treebystage = {}
2520 printedtree = None
2516 printedtree = None
2521 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2517 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2522 for n, f in stages:
2518 for n, f in stages:
2523 treebystage[n] = tree = f(tree)
2519 treebystage[n] = tree = f(tree)
2524 if n in showalways or (n in showchanged and tree != printedtree):
2520 if n in showalways or (n in showchanged and tree != printedtree):
2525 if opts['show_stage'] or n != 'parsed':
2521 if opts['show_stage'] or n != 'parsed':
2526 ui.write(("* %s:\n") % n)
2522 ui.write(("* %s:\n") % n)
2527 ui.write(revsetlang.prettyformat(tree), "\n")
2523 ui.write(revsetlang.prettyformat(tree), "\n")
2528 printedtree = tree
2524 printedtree = tree
2529
2525
2530 if opts['verify_optimized']:
2526 if opts['verify_optimized']:
2531 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2527 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2532 brevs = revset.makematcher(treebystage['optimized'])(repo)
2528 brevs = revset.makematcher(treebystage['optimized'])(repo)
2533 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2529 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2534 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2530 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2535 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2531 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2536 arevs = list(arevs)
2532 arevs = list(arevs)
2537 brevs = list(brevs)
2533 brevs = list(brevs)
2538 if arevs == brevs:
2534 if arevs == brevs:
2539 return 0
2535 return 0
2540 ui.write(('--- analyzed\n'), label='diff.file_a')
2536 ui.write(('--- analyzed\n'), label='diff.file_a')
2541 ui.write(('+++ optimized\n'), label='diff.file_b')
2537 ui.write(('+++ optimized\n'), label='diff.file_b')
2542 sm = difflib.SequenceMatcher(None, arevs, brevs)
2538 sm = difflib.SequenceMatcher(None, arevs, brevs)
2543 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2539 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2544 if tag in (r'delete', r'replace'):
2540 if tag in (r'delete', r'replace'):
2545 for c in arevs[alo:ahi]:
2541 for c in arevs[alo:ahi]:
2546 ui.write('-%d\n' % c, label='diff.deleted')
2542 ui.write('-%d\n' % c, label='diff.deleted')
2547 if tag in (r'insert', r'replace'):
2543 if tag in (r'insert', r'replace'):
2548 for c in brevs[blo:bhi]:
2544 for c in brevs[blo:bhi]:
2549 ui.write('+%d\n' % c, label='diff.inserted')
2545 ui.write('+%d\n' % c, label='diff.inserted')
2550 if tag == r'equal':
2546 if tag == r'equal':
2551 for c in arevs[alo:ahi]:
2547 for c in arevs[alo:ahi]:
2552 ui.write(' %d\n' % c)
2548 ui.write(' %d\n' % c)
2553 return 1
2549 return 1
2554
2550
2555 func = revset.makematcher(tree)
2551 func = revset.makematcher(tree)
2556 revs = func(repo)
2552 revs = func(repo)
2557 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2553 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2558 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2554 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2559 if not opts['show_revs']:
2555 if not opts['show_revs']:
2560 return
2556 return
2561 for c in revs:
2557 for c in revs:
2562 ui.write("%d\n" % c)
2558 ui.write("%d\n" % c)
2563
2559
2564 @command('debugserve', [
2560 @command('debugserve', [
2565 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2561 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2566 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2562 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2567 ('', 'logiofile', '', _('file to log server I/O to')),
2563 ('', 'logiofile', '', _('file to log server I/O to')),
2568 ], '')
2564 ], '')
2569 def debugserve(ui, repo, **opts):
2565 def debugserve(ui, repo, **opts):
2570 """run a server with advanced settings
2566 """run a server with advanced settings
2571
2567
2572 This command is similar to :hg:`serve`. It exists partially as a
2568 This command is similar to :hg:`serve`. It exists partially as a
2573 workaround to the fact that ``hg serve --stdio`` must have specific
2569 workaround to the fact that ``hg serve --stdio`` must have specific
2574 arguments for security reasons.
2570 arguments for security reasons.
2575 """
2571 """
2576 opts = pycompat.byteskwargs(opts)
2572 opts = pycompat.byteskwargs(opts)
2577
2573
2578 if not opts['sshstdio']:
2574 if not opts['sshstdio']:
2579 raise error.Abort(_('only --sshstdio is currently supported'))
2575 raise error.Abort(_('only --sshstdio is currently supported'))
2580
2576
2581 logfh = None
2577 logfh = None
2582
2578
2583 if opts['logiofd'] and opts['logiofile']:
2579 if opts['logiofd'] and opts['logiofile']:
2584 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2580 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2585
2581
2586 if opts['logiofd']:
2582 if opts['logiofd']:
2587 # Line buffered because output is line based.
2583 # Line buffered because output is line based.
2588 try:
2584 try:
2589 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2585 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2590 except OSError as e:
2586 except OSError as e:
2591 if e.errno != errno.ESPIPE:
2587 if e.errno != errno.ESPIPE:
2592 raise
2588 raise
2593 # can't seek a pipe, so `ab` mode fails on py3
2589 # can't seek a pipe, so `ab` mode fails on py3
2594 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2590 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2595 elif opts['logiofile']:
2591 elif opts['logiofile']:
2596 logfh = open(opts['logiofile'], 'ab', 1)
2592 logfh = open(opts['logiofile'], 'ab', 1)
2597
2593
2598 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2594 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2599 s.serve_forever()
2595 s.serve_forever()
2600
2596
2601 @command('debugsetparents', [], _('REV1 [REV2]'))
2597 @command('debugsetparents', [], _('REV1 [REV2]'))
2602 def debugsetparents(ui, repo, rev1, rev2=None):
2598 def debugsetparents(ui, repo, rev1, rev2=None):
2603 """manually set the parents of the current working directory
2599 """manually set the parents of the current working directory
2604
2600
2605 This is useful for writing repository conversion tools, but should
2601 This is useful for writing repository conversion tools, but should
2606 be used with care. For example, neither the working directory nor the
2602 be used with care. For example, neither the working directory nor the
2607 dirstate is updated, so file status may be incorrect after running this
2603 dirstate is updated, so file status may be incorrect after running this
2608 command.
2604 command.
2609
2605
2610 Returns 0 on success.
2606 Returns 0 on success.
2611 """
2607 """
2612
2608
2613 node1 = scmutil.revsingle(repo, rev1).node()
2609 node1 = scmutil.revsingle(repo, rev1).node()
2614 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2610 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2615
2611
2616 with repo.wlock():
2612 with repo.wlock():
2617 repo.setparents(node1, node2)
2613 repo.setparents(node1, node2)
2618
2614
2619 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2615 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2620 def debugssl(ui, repo, source=None, **opts):
2616 def debugssl(ui, repo, source=None, **opts):
2621 '''test a secure connection to a server
2617 '''test a secure connection to a server
2622
2618
2623 This builds the certificate chain for the server on Windows, installing the
2619 This builds the certificate chain for the server on Windows, installing the
2624 missing intermediates and trusted root via Windows Update if necessary. It
2620 missing intermediates and trusted root via Windows Update if necessary. It
2625 does nothing on other platforms.
2621 does nothing on other platforms.
2626
2622
2627 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2623 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2628 that server is used. See :hg:`help urls` for more information.
2624 that server is used. See :hg:`help urls` for more information.
2629
2625
2630 If the update succeeds, retry the original operation. Otherwise, the cause
2626 If the update succeeds, retry the original operation. Otherwise, the cause
2631 of the SSL error is likely another issue.
2627 of the SSL error is likely another issue.
2632 '''
2628 '''
2633 if not pycompat.iswindows:
2629 if not pycompat.iswindows:
2634 raise error.Abort(_('certificate chain building is only possible on '
2630 raise error.Abort(_('certificate chain building is only possible on '
2635 'Windows'))
2631 'Windows'))
2636
2632
2637 if not source:
2633 if not source:
2638 if not repo:
2634 if not repo:
2639 raise error.Abort(_("there is no Mercurial repository here, and no "
2635 raise error.Abort(_("there is no Mercurial repository here, and no "
2640 "server specified"))
2636 "server specified"))
2641 source = "default"
2637 source = "default"
2642
2638
2643 source, branches = hg.parseurl(ui.expandpath(source))
2639 source, branches = hg.parseurl(ui.expandpath(source))
2644 url = util.url(source)
2640 url = util.url(source)
2645
2641
2646 defaultport = {'https': 443, 'ssh': 22}
2642 defaultport = {'https': 443, 'ssh': 22}
2647 if url.scheme in defaultport:
2643 if url.scheme in defaultport:
2648 try:
2644 try:
2649 addr = (url.host, int(url.port or defaultport[url.scheme]))
2645 addr = (url.host, int(url.port or defaultport[url.scheme]))
2650 except ValueError:
2646 except ValueError:
2651 raise error.Abort(_("malformed port number in URL"))
2647 raise error.Abort(_("malformed port number in URL"))
2652 else:
2648 else:
2653 raise error.Abort(_("only https and ssh connections are supported"))
2649 raise error.Abort(_("only https and ssh connections are supported"))
2654
2650
2655 from . import win32
2651 from . import win32
2656
2652
2657 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2653 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2658 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2654 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2659
2655
2660 try:
2656 try:
2661 s.connect(addr)
2657 s.connect(addr)
2662 cert = s.getpeercert(True)
2658 cert = s.getpeercert(True)
2663
2659
2664 ui.status(_('checking the certificate chain for %s\n') % url.host)
2660 ui.status(_('checking the certificate chain for %s\n') % url.host)
2665
2661
2666 complete = win32.checkcertificatechain(cert, build=False)
2662 complete = win32.checkcertificatechain(cert, build=False)
2667
2663
2668 if not complete:
2664 if not complete:
2669 ui.status(_('certificate chain is incomplete, updating... '))
2665 ui.status(_('certificate chain is incomplete, updating... '))
2670
2666
2671 if not win32.checkcertificatechain(cert):
2667 if not win32.checkcertificatechain(cert):
2672 ui.status(_('failed.\n'))
2668 ui.status(_('failed.\n'))
2673 else:
2669 else:
2674 ui.status(_('done.\n'))
2670 ui.status(_('done.\n'))
2675 else:
2671 else:
2676 ui.status(_('full certificate chain is available\n'))
2672 ui.status(_('full certificate chain is available\n'))
2677 finally:
2673 finally:
2678 s.close()
2674 s.close()
2679
2675
2680 @command('debugsub',
2676 @command('debugsub',
2681 [('r', 'rev', '',
2677 [('r', 'rev', '',
2682 _('revision to check'), _('REV'))],
2678 _('revision to check'), _('REV'))],
2683 _('[-r REV] [REV]'))
2679 _('[-r REV] [REV]'))
2684 def debugsub(ui, repo, rev=None):
2680 def debugsub(ui, repo, rev=None):
2685 ctx = scmutil.revsingle(repo, rev, None)
2681 ctx = scmutil.revsingle(repo, rev, None)
2686 for k, v in sorted(ctx.substate.items()):
2682 for k, v in sorted(ctx.substate.items()):
2687 ui.write(('path %s\n') % k)
2683 ui.write(('path %s\n') % k)
2688 ui.write((' source %s\n') % v[0])
2684 ui.write((' source %s\n') % v[0])
2689 ui.write((' revision %s\n') % v[1])
2685 ui.write((' revision %s\n') % v[1])
2690
2686
2691 @command('debugsuccessorssets',
2687 @command('debugsuccessorssets',
2692 [('', 'closest', False, _('return closest successors sets only'))],
2688 [('', 'closest', False, _('return closest successors sets only'))],
2693 _('[REV]'))
2689 _('[REV]'))
2694 def debugsuccessorssets(ui, repo, *revs, **opts):
2690 def debugsuccessorssets(ui, repo, *revs, **opts):
2695 """show set of successors for revision
2691 """show set of successors for revision
2696
2692
2697 A successors set of changeset A is a consistent group of revisions that
2693 A successors set of changeset A is a consistent group of revisions that
2698 succeed A. It contains non-obsolete changesets only unless closests
2694 succeed A. It contains non-obsolete changesets only unless closests
2699 successors set is set.
2695 successors set is set.
2700
2696
2701 In most cases a changeset A has a single successors set containing a single
2697 In most cases a changeset A has a single successors set containing a single
2702 successor (changeset A replaced by A').
2698 successor (changeset A replaced by A').
2703
2699
2704 A changeset that is made obsolete with no successors are called "pruned".
2700 A changeset that is made obsolete with no successors are called "pruned".
2705 Such changesets have no successors sets at all.
2701 Such changesets have no successors sets at all.
2706
2702
2707 A changeset that has been "split" will have a successors set containing
2703 A changeset that has been "split" will have a successors set containing
2708 more than one successor.
2704 more than one successor.
2709
2705
2710 A changeset that has been rewritten in multiple different ways is called
2706 A changeset that has been rewritten in multiple different ways is called
2711 "divergent". Such changesets have multiple successor sets (each of which
2707 "divergent". Such changesets have multiple successor sets (each of which
2712 may also be split, i.e. have multiple successors).
2708 may also be split, i.e. have multiple successors).
2713
2709
2714 Results are displayed as follows::
2710 Results are displayed as follows::
2715
2711
2716 <rev1>
2712 <rev1>
2717 <successors-1A>
2713 <successors-1A>
2718 <rev2>
2714 <rev2>
2719 <successors-2A>
2715 <successors-2A>
2720 <successors-2B1> <successors-2B2> <successors-2B3>
2716 <successors-2B1> <successors-2B2> <successors-2B3>
2721
2717
2722 Here rev2 has two possible (i.e. divergent) successors sets. The first
2718 Here rev2 has two possible (i.e. divergent) successors sets. The first
2723 holds one element, whereas the second holds three (i.e. the changeset has
2719 holds one element, whereas the second holds three (i.e. the changeset has
2724 been split).
2720 been split).
2725 """
2721 """
2726 # passed to successorssets caching computation from one call to another
2722 # passed to successorssets caching computation from one call to another
2727 cache = {}
2723 cache = {}
2728 ctx2str = bytes
2724 ctx2str = bytes
2729 node2str = short
2725 node2str = short
2730 for rev in scmutil.revrange(repo, revs):
2726 for rev in scmutil.revrange(repo, revs):
2731 ctx = repo[rev]
2727 ctx = repo[rev]
2732 ui.write('%s\n'% ctx2str(ctx))
2728 ui.write('%s\n'% ctx2str(ctx))
2733 for succsset in obsutil.successorssets(repo, ctx.node(),
2729 for succsset in obsutil.successorssets(repo, ctx.node(),
2734 closest=opts[r'closest'],
2730 closest=opts[r'closest'],
2735 cache=cache):
2731 cache=cache):
2736 if succsset:
2732 if succsset:
2737 ui.write(' ')
2733 ui.write(' ')
2738 ui.write(node2str(succsset[0]))
2734 ui.write(node2str(succsset[0]))
2739 for node in succsset[1:]:
2735 for node in succsset[1:]:
2740 ui.write(' ')
2736 ui.write(' ')
2741 ui.write(node2str(node))
2737 ui.write(node2str(node))
2742 ui.write('\n')
2738 ui.write('\n')
2743
2739
2744 @command('debugtemplate',
2740 @command('debugtemplate',
2745 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2741 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2746 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2742 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2747 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2743 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2748 optionalrepo=True)
2744 optionalrepo=True)
2749 def debugtemplate(ui, repo, tmpl, **opts):
2745 def debugtemplate(ui, repo, tmpl, **opts):
2750 """parse and apply a template
2746 """parse and apply a template
2751
2747
2752 If -r/--rev is given, the template is processed as a log template and
2748 If -r/--rev is given, the template is processed as a log template and
2753 applied to the given changesets. Otherwise, it is processed as a generic
2749 applied to the given changesets. Otherwise, it is processed as a generic
2754 template.
2750 template.
2755
2751
2756 Use --verbose to print the parsed tree.
2752 Use --verbose to print the parsed tree.
2757 """
2753 """
2758 revs = None
2754 revs = None
2759 if opts[r'rev']:
2755 if opts[r'rev']:
2760 if repo is None:
2756 if repo is None:
2761 raise error.RepoError(_('there is no Mercurial repository here '
2757 raise error.RepoError(_('there is no Mercurial repository here '
2762 '(.hg not found)'))
2758 '(.hg not found)'))
2763 revs = scmutil.revrange(repo, opts[r'rev'])
2759 revs = scmutil.revrange(repo, opts[r'rev'])
2764
2760
2765 props = {}
2761 props = {}
2766 for d in opts[r'define']:
2762 for d in opts[r'define']:
2767 try:
2763 try:
2768 k, v = (e.strip() for e in d.split('=', 1))
2764 k, v = (e.strip() for e in d.split('=', 1))
2769 if not k or k == 'ui':
2765 if not k or k == 'ui':
2770 raise ValueError
2766 raise ValueError
2771 props[k] = v
2767 props[k] = v
2772 except ValueError:
2768 except ValueError:
2773 raise error.Abort(_('malformed keyword definition: %s') % d)
2769 raise error.Abort(_('malformed keyword definition: %s') % d)
2774
2770
2775 if ui.verbose:
2771 if ui.verbose:
2776 aliases = ui.configitems('templatealias')
2772 aliases = ui.configitems('templatealias')
2777 tree = templater.parse(tmpl)
2773 tree = templater.parse(tmpl)
2778 ui.note(templater.prettyformat(tree), '\n')
2774 ui.note(templater.prettyformat(tree), '\n')
2779 newtree = templater.expandaliases(tree, aliases)
2775 newtree = templater.expandaliases(tree, aliases)
2780 if newtree != tree:
2776 if newtree != tree:
2781 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2777 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2782
2778
2783 if revs is None:
2779 if revs is None:
2784 tres = formatter.templateresources(ui, repo)
2780 tres = formatter.templateresources(ui, repo)
2785 t = formatter.maketemplater(ui, tmpl, resources=tres)
2781 t = formatter.maketemplater(ui, tmpl, resources=tres)
2786 if ui.verbose:
2782 if ui.verbose:
2787 kwds, funcs = t.symbolsuseddefault()
2783 kwds, funcs = t.symbolsuseddefault()
2788 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2784 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2789 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2785 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2790 ui.write(t.renderdefault(props))
2786 ui.write(t.renderdefault(props))
2791 else:
2787 else:
2792 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2788 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2793 if ui.verbose:
2789 if ui.verbose:
2794 kwds, funcs = displayer.t.symbolsuseddefault()
2790 kwds, funcs = displayer.t.symbolsuseddefault()
2795 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2791 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2796 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2792 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2797 for r in revs:
2793 for r in revs:
2798 displayer.show(repo[r], **pycompat.strkwargs(props))
2794 displayer.show(repo[r], **pycompat.strkwargs(props))
2799 displayer.close()
2795 displayer.close()
2800
2796
2801 @command('debuguigetpass', [
2797 @command('debuguigetpass', [
2802 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2798 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2803 ], _('[-p TEXT]'), norepo=True)
2799 ], _('[-p TEXT]'), norepo=True)
2804 def debuguigetpass(ui, prompt=''):
2800 def debuguigetpass(ui, prompt=''):
2805 """show prompt to type password"""
2801 """show prompt to type password"""
2806 r = ui.getpass(prompt)
2802 r = ui.getpass(prompt)
2807 ui.write(('respose: %s\n') % r)
2803 ui.write(('respose: %s\n') % r)
2808
2804
2809 @command('debuguiprompt', [
2805 @command('debuguiprompt', [
2810 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2806 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2811 ], _('[-p TEXT]'), norepo=True)
2807 ], _('[-p TEXT]'), norepo=True)
2812 def debuguiprompt(ui, prompt=''):
2808 def debuguiprompt(ui, prompt=''):
2813 """show plain prompt"""
2809 """show plain prompt"""
2814 r = ui.prompt(prompt)
2810 r = ui.prompt(prompt)
2815 ui.write(('response: %s\n') % r)
2811 ui.write(('response: %s\n') % r)
2816
2812
2817 @command('debugupdatecaches', [])
2813 @command('debugupdatecaches', [])
2818 def debugupdatecaches(ui, repo, *pats, **opts):
2814 def debugupdatecaches(ui, repo, *pats, **opts):
2819 """warm all known caches in the repository"""
2815 """warm all known caches in the repository"""
2820 with repo.wlock(), repo.lock():
2816 with repo.wlock(), repo.lock():
2821 repo.updatecaches(full=True)
2817 repo.updatecaches(full=True)
2822
2818
2823 @command('debugupgraderepo', [
2819 @command('debugupgraderepo', [
2824 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2820 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2825 ('', 'run', False, _('performs an upgrade')),
2821 ('', 'run', False, _('performs an upgrade')),
2826 ('', 'backup', True, _('keep the old repository content around')),
2822 ('', 'backup', True, _('keep the old repository content around')),
2827 ])
2823 ])
2828 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True):
2824 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True):
2829 """upgrade a repository to use different features
2825 """upgrade a repository to use different features
2830
2826
2831 If no arguments are specified, the repository is evaluated for upgrade
2827 If no arguments are specified, the repository is evaluated for upgrade
2832 and a list of problems and potential optimizations is printed.
2828 and a list of problems and potential optimizations is printed.
2833
2829
2834 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2830 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2835 can be influenced via additional arguments. More details will be provided
2831 can be influenced via additional arguments. More details will be provided
2836 by the command output when run without ``--run``.
2832 by the command output when run without ``--run``.
2837
2833
2838 During the upgrade, the repository will be locked and no writes will be
2834 During the upgrade, the repository will be locked and no writes will be
2839 allowed.
2835 allowed.
2840
2836
2841 At the end of the upgrade, the repository may not be readable while new
2837 At the end of the upgrade, the repository may not be readable while new
2842 repository data is swapped in. This window will be as long as it takes to
2838 repository data is swapped in. This window will be as long as it takes to
2843 rename some directories inside the ``.hg`` directory. On most machines, this
2839 rename some directories inside the ``.hg`` directory. On most machines, this
2844 should complete almost instantaneously and the chances of a consumer being
2840 should complete almost instantaneously and the chances of a consumer being
2845 unable to access the repository should be low.
2841 unable to access the repository should be low.
2846 """
2842 """
2847 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
2843 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
2848 backup=backup)
2844 backup=backup)
2849
2845
2850 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2846 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2851 inferrepo=True)
2847 inferrepo=True)
2852 def debugwalk(ui, repo, *pats, **opts):
2848 def debugwalk(ui, repo, *pats, **opts):
2853 """show how files match on given patterns"""
2849 """show how files match on given patterns"""
2854 opts = pycompat.byteskwargs(opts)
2850 opts = pycompat.byteskwargs(opts)
2855 m = scmutil.match(repo[None], pats, opts)
2851 m = scmutil.match(repo[None], pats, opts)
2856 if ui.verbose:
2852 if ui.verbose:
2857 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2853 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2858 items = list(repo[None].walk(m))
2854 items = list(repo[None].walk(m))
2859 if not items:
2855 if not items:
2860 return
2856 return
2861 f = lambda fn: fn
2857 f = lambda fn: fn
2862 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2858 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2863 f = lambda fn: util.normpath(fn)
2859 f = lambda fn: util.normpath(fn)
2864 fmt = 'f %%-%ds %%-%ds %%s' % (
2860 fmt = 'f %%-%ds %%-%ds %%s' % (
2865 max([len(abs) for abs in items]),
2861 max([len(abs) for abs in items]),
2866 max([len(repo.pathto(abs)) for abs in items]))
2862 max([len(repo.pathto(abs)) for abs in items]))
2867 for abs in items:
2863 for abs in items:
2868 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
2864 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
2869 ui.write("%s\n" % line.rstrip())
2865 ui.write("%s\n" % line.rstrip())
2870
2866
2871 @command('debugwhyunstable', [], _('REV'))
2867 @command('debugwhyunstable', [], _('REV'))
2872 def debugwhyunstable(ui, repo, rev):
2868 def debugwhyunstable(ui, repo, rev):
2873 """explain instabilities of a changeset"""
2869 """explain instabilities of a changeset"""
2874 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2870 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2875 dnodes = ''
2871 dnodes = ''
2876 if entry.get('divergentnodes'):
2872 if entry.get('divergentnodes'):
2877 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2873 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2878 for ctx in entry['divergentnodes']) + ' '
2874 for ctx in entry['divergentnodes']) + ' '
2879 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2875 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2880 entry['reason'], entry['node']))
2876 entry['reason'], entry['node']))
2881
2877
2882 @command('debugwireargs',
2878 @command('debugwireargs',
2883 [('', 'three', '', 'three'),
2879 [('', 'three', '', 'three'),
2884 ('', 'four', '', 'four'),
2880 ('', 'four', '', 'four'),
2885 ('', 'five', '', 'five'),
2881 ('', 'five', '', 'five'),
2886 ] + cmdutil.remoteopts,
2882 ] + cmdutil.remoteopts,
2887 _('REPO [OPTIONS]... [ONE [TWO]]'),
2883 _('REPO [OPTIONS]... [ONE [TWO]]'),
2888 norepo=True)
2884 norepo=True)
2889 def debugwireargs(ui, repopath, *vals, **opts):
2885 def debugwireargs(ui, repopath, *vals, **opts):
2890 opts = pycompat.byteskwargs(opts)
2886 opts = pycompat.byteskwargs(opts)
2891 repo = hg.peer(ui, opts, repopath)
2887 repo = hg.peer(ui, opts, repopath)
2892 for opt in cmdutil.remoteopts:
2888 for opt in cmdutil.remoteopts:
2893 del opts[opt[1]]
2889 del opts[opt[1]]
2894 args = {}
2890 args = {}
2895 for k, v in opts.iteritems():
2891 for k, v in opts.iteritems():
2896 if v:
2892 if v:
2897 args[k] = v
2893 args[k] = v
2898 args = pycompat.strkwargs(args)
2894 args = pycompat.strkwargs(args)
2899 # run twice to check that we don't mess up the stream for the next command
2895 # run twice to check that we don't mess up the stream for the next command
2900 res1 = repo.debugwireargs(*vals, **args)
2896 res1 = repo.debugwireargs(*vals, **args)
2901 res2 = repo.debugwireargs(*vals, **args)
2897 res2 = repo.debugwireargs(*vals, **args)
2902 ui.write("%s\n" % res1)
2898 ui.write("%s\n" % res1)
2903 if res1 != res2:
2899 if res1 != res2:
2904 ui.warn("%s\n" % res2)
2900 ui.warn("%s\n" % res2)
2905
2901
2906 def _parsewirelangblocks(fh):
2902 def _parsewirelangblocks(fh):
2907 activeaction = None
2903 activeaction = None
2908 blocklines = []
2904 blocklines = []
2909 lastindent = 0
2905 lastindent = 0
2910
2906
2911 for line in fh:
2907 for line in fh:
2912 line = line.rstrip()
2908 line = line.rstrip()
2913 if not line:
2909 if not line:
2914 continue
2910 continue
2915
2911
2916 if line.startswith(b'#'):
2912 if line.startswith(b'#'):
2917 continue
2913 continue
2918
2914
2919 if not line.startswith(b' '):
2915 if not line.startswith(b' '):
2920 # New block. Flush previous one.
2916 # New block. Flush previous one.
2921 if activeaction:
2917 if activeaction:
2922 yield activeaction, blocklines
2918 yield activeaction, blocklines
2923
2919
2924 activeaction = line
2920 activeaction = line
2925 blocklines = []
2921 blocklines = []
2926 lastindent = 0
2922 lastindent = 0
2927 continue
2923 continue
2928
2924
2929 # Else we start with an indent.
2925 # Else we start with an indent.
2930
2926
2931 if not activeaction:
2927 if not activeaction:
2932 raise error.Abort(_('indented line outside of block'))
2928 raise error.Abort(_('indented line outside of block'))
2933
2929
2934 indent = len(line) - len(line.lstrip())
2930 indent = len(line) - len(line.lstrip())
2935
2931
2936 # If this line is indented more than the last line, concatenate it.
2932 # If this line is indented more than the last line, concatenate it.
2937 if indent > lastindent and blocklines:
2933 if indent > lastindent and blocklines:
2938 blocklines[-1] += line.lstrip()
2934 blocklines[-1] += line.lstrip()
2939 else:
2935 else:
2940 blocklines.append(line)
2936 blocklines.append(line)
2941 lastindent = indent
2937 lastindent = indent
2942
2938
2943 # Flush last block.
2939 # Flush last block.
2944 if activeaction:
2940 if activeaction:
2945 yield activeaction, blocklines
2941 yield activeaction, blocklines
2946
2942
2947 @command('debugwireproto',
2943 @command('debugwireproto',
2948 [
2944 [
2949 ('', 'localssh', False, _('start an SSH server for this repo')),
2945 ('', 'localssh', False, _('start an SSH server for this repo')),
2950 ('', 'peer', '', _('construct a specific version of the peer')),
2946 ('', 'peer', '', _('construct a specific version of the peer')),
2951 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2947 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2952 ('', 'nologhandshake', False,
2948 ('', 'nologhandshake', False,
2953 _('do not log I/O related to the peer handshake')),
2949 _('do not log I/O related to the peer handshake')),
2954 ] + cmdutil.remoteopts,
2950 ] + cmdutil.remoteopts,
2955 _('[PATH]'),
2951 _('[PATH]'),
2956 optionalrepo=True)
2952 optionalrepo=True)
2957 def debugwireproto(ui, repo, path=None, **opts):
2953 def debugwireproto(ui, repo, path=None, **opts):
2958 """send wire protocol commands to a server
2954 """send wire protocol commands to a server
2959
2955
2960 This command can be used to issue wire protocol commands to remote
2956 This command can be used to issue wire protocol commands to remote
2961 peers and to debug the raw data being exchanged.
2957 peers and to debug the raw data being exchanged.
2962
2958
2963 ``--localssh`` will start an SSH server against the current repository
2959 ``--localssh`` will start an SSH server against the current repository
2964 and connect to that. By default, the connection will perform a handshake
2960 and connect to that. By default, the connection will perform a handshake
2965 and establish an appropriate peer instance.
2961 and establish an appropriate peer instance.
2966
2962
2967 ``--peer`` can be used to bypass the handshake protocol and construct a
2963 ``--peer`` can be used to bypass the handshake protocol and construct a
2968 peer instance using the specified class type. Valid values are ``raw``,
2964 peer instance using the specified class type. Valid values are ``raw``,
2969 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2965 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2970 raw data payloads and don't support higher-level command actions.
2966 raw data payloads and don't support higher-level command actions.
2971
2967
2972 ``--noreadstderr`` can be used to disable automatic reading from stderr
2968 ``--noreadstderr`` can be used to disable automatic reading from stderr
2973 of the peer (for SSH connections only). Disabling automatic reading of
2969 of the peer (for SSH connections only). Disabling automatic reading of
2974 stderr is useful for making output more deterministic.
2970 stderr is useful for making output more deterministic.
2975
2971
2976 Commands are issued via a mini language which is specified via stdin.
2972 Commands are issued via a mini language which is specified via stdin.
2977 The language consists of individual actions to perform. An action is
2973 The language consists of individual actions to perform. An action is
2978 defined by a block. A block is defined as a line with no leading
2974 defined by a block. A block is defined as a line with no leading
2979 space followed by 0 or more lines with leading space. Blocks are
2975 space followed by 0 or more lines with leading space. Blocks are
2980 effectively a high-level command with additional metadata.
2976 effectively a high-level command with additional metadata.
2981
2977
2982 Lines beginning with ``#`` are ignored.
2978 Lines beginning with ``#`` are ignored.
2983
2979
2984 The following sections denote available actions.
2980 The following sections denote available actions.
2985
2981
2986 raw
2982 raw
2987 ---
2983 ---
2988
2984
2989 Send raw data to the server.
2985 Send raw data to the server.
2990
2986
2991 The block payload contains the raw data to send as one atomic send
2987 The block payload contains the raw data to send as one atomic send
2992 operation. The data may not actually be delivered in a single system
2988 operation. The data may not actually be delivered in a single system
2993 call: it depends on the abilities of the transport being used.
2989 call: it depends on the abilities of the transport being used.
2994
2990
2995 Each line in the block is de-indented and concatenated. Then, that
2991 Each line in the block is de-indented and concatenated. Then, that
2996 value is evaluated as a Python b'' literal. This allows the use of
2992 value is evaluated as a Python b'' literal. This allows the use of
2997 backslash escaping, etc.
2993 backslash escaping, etc.
2998
2994
2999 raw+
2995 raw+
3000 ----
2996 ----
3001
2997
3002 Behaves like ``raw`` except flushes output afterwards.
2998 Behaves like ``raw`` except flushes output afterwards.
3003
2999
3004 command <X>
3000 command <X>
3005 -----------
3001 -----------
3006
3002
3007 Send a request to run a named command, whose name follows the ``command``
3003 Send a request to run a named command, whose name follows the ``command``
3008 string.
3004 string.
3009
3005
3010 Arguments to the command are defined as lines in this block. The format of
3006 Arguments to the command are defined as lines in this block. The format of
3011 each line is ``<key> <value>``. e.g.::
3007 each line is ``<key> <value>``. e.g.::
3012
3008
3013 command listkeys
3009 command listkeys
3014 namespace bookmarks
3010 namespace bookmarks
3015
3011
3016 If the value begins with ``eval:``, it will be interpreted as a Python
3012 If the value begins with ``eval:``, it will be interpreted as a Python
3017 literal expression. Otherwise values are interpreted as Python b'' literals.
3013 literal expression. Otherwise values are interpreted as Python b'' literals.
3018 This allows sending complex types and encoding special byte sequences via
3014 This allows sending complex types and encoding special byte sequences via
3019 backslash escaping.
3015 backslash escaping.
3020
3016
3021 The following arguments have special meaning:
3017 The following arguments have special meaning:
3022
3018
3023 ``PUSHFILE``
3019 ``PUSHFILE``
3024 When defined, the *push* mechanism of the peer will be used instead
3020 When defined, the *push* mechanism of the peer will be used instead
3025 of the static request-response mechanism and the content of the
3021 of the static request-response mechanism and the content of the
3026 file specified in the value of this argument will be sent as the
3022 file specified in the value of this argument will be sent as the
3027 command payload.
3023 command payload.
3028
3024
3029 This can be used to submit a local bundle file to the remote.
3025 This can be used to submit a local bundle file to the remote.
3030
3026
3031 batchbegin
3027 batchbegin
3032 ----------
3028 ----------
3033
3029
3034 Instruct the peer to begin a batched send.
3030 Instruct the peer to begin a batched send.
3035
3031
3036 All ``command`` blocks are queued for execution until the next
3032 All ``command`` blocks are queued for execution until the next
3037 ``batchsubmit`` block.
3033 ``batchsubmit`` block.
3038
3034
3039 batchsubmit
3035 batchsubmit
3040 -----------
3036 -----------
3041
3037
3042 Submit previously queued ``command`` blocks as a batch request.
3038 Submit previously queued ``command`` blocks as a batch request.
3043
3039
3044 This action MUST be paired with a ``batchbegin`` action.
3040 This action MUST be paired with a ``batchbegin`` action.
3045
3041
3046 httprequest <method> <path>
3042 httprequest <method> <path>
3047 ---------------------------
3043 ---------------------------
3048
3044
3049 (HTTP peer only)
3045 (HTTP peer only)
3050
3046
3051 Send an HTTP request to the peer.
3047 Send an HTTP request to the peer.
3052
3048
3053 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3049 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3054
3050
3055 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3051 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3056 headers to add to the request. e.g. ``Accept: foo``.
3052 headers to add to the request. e.g. ``Accept: foo``.
3057
3053
3058 The following arguments are special:
3054 The following arguments are special:
3059
3055
3060 ``BODYFILE``
3056 ``BODYFILE``
3061 The content of the file defined as the value to this argument will be
3057 The content of the file defined as the value to this argument will be
3062 transferred verbatim as the HTTP request body.
3058 transferred verbatim as the HTTP request body.
3063
3059
3064 ``frame <type> <flags> <payload>``
3060 ``frame <type> <flags> <payload>``
3065 Send a unified protocol frame as part of the request body.
3061 Send a unified protocol frame as part of the request body.
3066
3062
3067 All frames will be collected and sent as the body to the HTTP
3063 All frames will be collected and sent as the body to the HTTP
3068 request.
3064 request.
3069
3065
3070 close
3066 close
3071 -----
3067 -----
3072
3068
3073 Close the connection to the server.
3069 Close the connection to the server.
3074
3070
3075 flush
3071 flush
3076 -----
3072 -----
3077
3073
3078 Flush data written to the server.
3074 Flush data written to the server.
3079
3075
3080 readavailable
3076 readavailable
3081 -------------
3077 -------------
3082
3078
3083 Close the write end of the connection and read all available data from
3079 Close the write end of the connection and read all available data from
3084 the server.
3080 the server.
3085
3081
3086 If the connection to the server encompasses multiple pipes, we poll both
3082 If the connection to the server encompasses multiple pipes, we poll both
3087 pipes and read available data.
3083 pipes and read available data.
3088
3084
3089 readline
3085 readline
3090 --------
3086 --------
3091
3087
3092 Read a line of output from the server. If there are multiple output
3088 Read a line of output from the server. If there are multiple output
3093 pipes, reads only the main pipe.
3089 pipes, reads only the main pipe.
3094
3090
3095 ereadline
3091 ereadline
3096 ---------
3092 ---------
3097
3093
3098 Like ``readline``, but read from the stderr pipe, if available.
3094 Like ``readline``, but read from the stderr pipe, if available.
3099
3095
3100 read <X>
3096 read <X>
3101 --------
3097 --------
3102
3098
3103 ``read()`` N bytes from the server's main output pipe.
3099 ``read()`` N bytes from the server's main output pipe.
3104
3100
3105 eread <X>
3101 eread <X>
3106 ---------
3102 ---------
3107
3103
3108 ``read()`` N bytes from the server's stderr pipe, if available.
3104 ``read()`` N bytes from the server's stderr pipe, if available.
3109
3105
3110 Specifying Unified Frame-Based Protocol Frames
3106 Specifying Unified Frame-Based Protocol Frames
3111 ----------------------------------------------
3107 ----------------------------------------------
3112
3108
3113 It is possible to emit a *Unified Frame-Based Protocol* by using special
3109 It is possible to emit a *Unified Frame-Based Protocol* by using special
3114 syntax.
3110 syntax.
3115
3111
3116 A frame is composed as a type, flags, and payload. These can be parsed
3112 A frame is composed as a type, flags, and payload. These can be parsed
3117 from a string of the form:
3113 from a string of the form:
3118
3114
3119 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3115 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3120
3116
3121 ``request-id`` and ``stream-id`` are integers defining the request and
3117 ``request-id`` and ``stream-id`` are integers defining the request and
3122 stream identifiers.
3118 stream identifiers.
3123
3119
3124 ``type`` can be an integer value for the frame type or the string name
3120 ``type`` can be an integer value for the frame type or the string name
3125 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3121 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3126 ``command-name``.
3122 ``command-name``.
3127
3123
3128 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3124 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3129 components. Each component (and there can be just one) can be an integer
3125 components. Each component (and there can be just one) can be an integer
3130 or a flag name for stream flags or frame flags, respectively. Values are
3126 or a flag name for stream flags or frame flags, respectively. Values are
3131 resolved to integers and then bitwise OR'd together.
3127 resolved to integers and then bitwise OR'd together.
3132
3128
3133 ``payload`` represents the raw frame payload. If it begins with
3129 ``payload`` represents the raw frame payload. If it begins with
3134 ``cbor:``, the following string is evaluated as Python code and the
3130 ``cbor:``, the following string is evaluated as Python code and the
3135 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3131 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3136 as a Python byte string literal.
3132 as a Python byte string literal.
3137 """
3133 """
3138 opts = pycompat.byteskwargs(opts)
3134 opts = pycompat.byteskwargs(opts)
3139
3135
3140 if opts['localssh'] and not repo:
3136 if opts['localssh'] and not repo:
3141 raise error.Abort(_('--localssh requires a repository'))
3137 raise error.Abort(_('--localssh requires a repository'))
3142
3138
3143 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3139 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3144 raise error.Abort(_('invalid value for --peer'),
3140 raise error.Abort(_('invalid value for --peer'),
3145 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3141 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3146
3142
3147 if path and opts['localssh']:
3143 if path and opts['localssh']:
3148 raise error.Abort(_('cannot specify --localssh with an explicit '
3144 raise error.Abort(_('cannot specify --localssh with an explicit '
3149 'path'))
3145 'path'))
3150
3146
3151 if ui.interactive():
3147 if ui.interactive():
3152 ui.write(_('(waiting for commands on stdin)\n'))
3148 ui.write(_('(waiting for commands on stdin)\n'))
3153
3149
3154 blocks = list(_parsewirelangblocks(ui.fin))
3150 blocks = list(_parsewirelangblocks(ui.fin))
3155
3151
3156 proc = None
3152 proc = None
3157 stdin = None
3153 stdin = None
3158 stdout = None
3154 stdout = None
3159 stderr = None
3155 stderr = None
3160 opener = None
3156 opener = None
3161
3157
3162 if opts['localssh']:
3158 if opts['localssh']:
3163 # We start the SSH server in its own process so there is process
3159 # We start the SSH server in its own process so there is process
3164 # separation. This prevents a whole class of potential bugs around
3160 # separation. This prevents a whole class of potential bugs around
3165 # shared state from interfering with server operation.
3161 # shared state from interfering with server operation.
3166 args = procutil.hgcmd() + [
3162 args = procutil.hgcmd() + [
3167 '-R', repo.root,
3163 '-R', repo.root,
3168 'debugserve', '--sshstdio',
3164 'debugserve', '--sshstdio',
3169 ]
3165 ]
3170 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3166 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3171 stdin=subprocess.PIPE,
3167 stdin=subprocess.PIPE,
3172 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3168 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3173 bufsize=0)
3169 bufsize=0)
3174
3170
3175 stdin = proc.stdin
3171 stdin = proc.stdin
3176 stdout = proc.stdout
3172 stdout = proc.stdout
3177 stderr = proc.stderr
3173 stderr = proc.stderr
3178
3174
3179 # We turn the pipes into observers so we can log I/O.
3175 # We turn the pipes into observers so we can log I/O.
3180 if ui.verbose or opts['peer'] == 'raw':
3176 if ui.verbose or opts['peer'] == 'raw':
3181 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3177 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3182 logdata=True)
3178 logdata=True)
3183 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3179 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3184 logdata=True)
3180 logdata=True)
3185 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3181 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3186 logdata=True)
3182 logdata=True)
3187
3183
3188 # --localssh also implies the peer connection settings.
3184 # --localssh also implies the peer connection settings.
3189
3185
3190 url = 'ssh://localserver'
3186 url = 'ssh://localserver'
3191 autoreadstderr = not opts['noreadstderr']
3187 autoreadstderr = not opts['noreadstderr']
3192
3188
3193 if opts['peer'] == 'ssh1':
3189 if opts['peer'] == 'ssh1':
3194 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3190 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3195 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3191 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3196 None, autoreadstderr=autoreadstderr)
3192 None, autoreadstderr=autoreadstderr)
3197 elif opts['peer'] == 'ssh2':
3193 elif opts['peer'] == 'ssh2':
3198 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3194 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3199 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3195 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3200 None, autoreadstderr=autoreadstderr)
3196 None, autoreadstderr=autoreadstderr)
3201 elif opts['peer'] == 'raw':
3197 elif opts['peer'] == 'raw':
3202 ui.write(_('using raw connection to peer\n'))
3198 ui.write(_('using raw connection to peer\n'))
3203 peer = None
3199 peer = None
3204 else:
3200 else:
3205 ui.write(_('creating ssh peer from handshake results\n'))
3201 ui.write(_('creating ssh peer from handshake results\n'))
3206 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3202 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3207 autoreadstderr=autoreadstderr)
3203 autoreadstderr=autoreadstderr)
3208
3204
3209 elif path:
3205 elif path:
3210 # We bypass hg.peer() so we can proxy the sockets.
3206 # We bypass hg.peer() so we can proxy the sockets.
3211 # TODO consider not doing this because we skip
3207 # TODO consider not doing this because we skip
3212 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3208 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3213 u = util.url(path)
3209 u = util.url(path)
3214 if u.scheme != 'http':
3210 if u.scheme != 'http':
3215 raise error.Abort(_('only http:// paths are currently supported'))
3211 raise error.Abort(_('only http:// paths are currently supported'))
3216
3212
3217 url, authinfo = u.authinfo()
3213 url, authinfo = u.authinfo()
3218 openerargs = {
3214 openerargs = {
3219 r'useragent': b'Mercurial debugwireproto',
3215 r'useragent': b'Mercurial debugwireproto',
3220 }
3216 }
3221
3217
3222 # Turn pipes/sockets into observers so we can log I/O.
3218 # Turn pipes/sockets into observers so we can log I/O.
3223 if ui.verbose:
3219 if ui.verbose:
3224 openerargs.update({
3220 openerargs.update({
3225 r'loggingfh': ui,
3221 r'loggingfh': ui,
3226 r'loggingname': b's',
3222 r'loggingname': b's',
3227 r'loggingopts': {
3223 r'loggingopts': {
3228 r'logdata': True,
3224 r'logdata': True,
3229 r'logdataapis': False,
3225 r'logdataapis': False,
3230 },
3226 },
3231 })
3227 })
3232
3228
3233 if ui.debugflag:
3229 if ui.debugflag:
3234 openerargs[r'loggingopts'][r'logdataapis'] = True
3230 openerargs[r'loggingopts'][r'logdataapis'] = True
3235
3231
3236 # Don't send default headers when in raw mode. This allows us to
3232 # Don't send default headers when in raw mode. This allows us to
3237 # bypass most of the behavior of our URL handling code so we can
3233 # bypass most of the behavior of our URL handling code so we can
3238 # have near complete control over what's sent on the wire.
3234 # have near complete control over what's sent on the wire.
3239 if opts['peer'] == 'raw':
3235 if opts['peer'] == 'raw':
3240 openerargs[r'sendaccept'] = False
3236 openerargs[r'sendaccept'] = False
3241
3237
3242 opener = urlmod.opener(ui, authinfo, **openerargs)
3238 opener = urlmod.opener(ui, authinfo, **openerargs)
3243
3239
3244 if opts['peer'] == 'http2':
3240 if opts['peer'] == 'http2':
3245 ui.write(_('creating http peer for wire protocol version 2\n'))
3241 ui.write(_('creating http peer for wire protocol version 2\n'))
3246 # We go through makepeer() because we need an API descriptor for
3242 # We go through makepeer() because we need an API descriptor for
3247 # the peer instance to be useful.
3243 # the peer instance to be useful.
3248 with ui.configoverride({
3244 with ui.configoverride({
3249 ('experimental', 'httppeer.advertise-v2'): True}):
3245 ('experimental', 'httppeer.advertise-v2'): True}):
3250 if opts['nologhandshake']:
3246 if opts['nologhandshake']:
3251 ui.pushbuffer()
3247 ui.pushbuffer()
3252
3248
3253 peer = httppeer.makepeer(ui, path, opener=opener)
3249 peer = httppeer.makepeer(ui, path, opener=opener)
3254
3250
3255 if opts['nologhandshake']:
3251 if opts['nologhandshake']:
3256 ui.popbuffer()
3252 ui.popbuffer()
3257
3253
3258 if not isinstance(peer, httppeer.httpv2peer):
3254 if not isinstance(peer, httppeer.httpv2peer):
3259 raise error.Abort(_('could not instantiate HTTP peer for '
3255 raise error.Abort(_('could not instantiate HTTP peer for '
3260 'wire protocol version 2'),
3256 'wire protocol version 2'),
3261 hint=_('the server may not have the feature '
3257 hint=_('the server may not have the feature '
3262 'enabled or is not allowing this '
3258 'enabled or is not allowing this '
3263 'client version'))
3259 'client version'))
3264
3260
3265 elif opts['peer'] == 'raw':
3261 elif opts['peer'] == 'raw':
3266 ui.write(_('using raw connection to peer\n'))
3262 ui.write(_('using raw connection to peer\n'))
3267 peer = None
3263 peer = None
3268 elif opts['peer']:
3264 elif opts['peer']:
3269 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3265 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3270 opts['peer'])
3266 opts['peer'])
3271 else:
3267 else:
3272 peer = httppeer.makepeer(ui, path, opener=opener)
3268 peer = httppeer.makepeer(ui, path, opener=opener)
3273
3269
3274 # We /could/ populate stdin/stdout with sock.makefile()...
3270 # We /could/ populate stdin/stdout with sock.makefile()...
3275 else:
3271 else:
3276 raise error.Abort(_('unsupported connection configuration'))
3272 raise error.Abort(_('unsupported connection configuration'))
3277
3273
3278 batchedcommands = None
3274 batchedcommands = None
3279
3275
3280 # Now perform actions based on the parsed wire language instructions.
3276 # Now perform actions based on the parsed wire language instructions.
3281 for action, lines in blocks:
3277 for action, lines in blocks:
3282 if action in ('raw', 'raw+'):
3278 if action in ('raw', 'raw+'):
3283 if not stdin:
3279 if not stdin:
3284 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3280 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3285
3281
3286 # Concatenate the data together.
3282 # Concatenate the data together.
3287 data = ''.join(l.lstrip() for l in lines)
3283 data = ''.join(l.lstrip() for l in lines)
3288 data = stringutil.unescapestr(data)
3284 data = stringutil.unescapestr(data)
3289 stdin.write(data)
3285 stdin.write(data)
3290
3286
3291 if action == 'raw+':
3287 if action == 'raw+':
3292 stdin.flush()
3288 stdin.flush()
3293 elif action == 'flush':
3289 elif action == 'flush':
3294 if not stdin:
3290 if not stdin:
3295 raise error.Abort(_('cannot call flush on this peer'))
3291 raise error.Abort(_('cannot call flush on this peer'))
3296 stdin.flush()
3292 stdin.flush()
3297 elif action.startswith('command'):
3293 elif action.startswith('command'):
3298 if not peer:
3294 if not peer:
3299 raise error.Abort(_('cannot send commands unless peer instance '
3295 raise error.Abort(_('cannot send commands unless peer instance '
3300 'is available'))
3296 'is available'))
3301
3297
3302 command = action.split(' ', 1)[1]
3298 command = action.split(' ', 1)[1]
3303
3299
3304 args = {}
3300 args = {}
3305 for line in lines:
3301 for line in lines:
3306 # We need to allow empty values.
3302 # We need to allow empty values.
3307 fields = line.lstrip().split(' ', 1)
3303 fields = line.lstrip().split(' ', 1)
3308 if len(fields) == 1:
3304 if len(fields) == 1:
3309 key = fields[0]
3305 key = fields[0]
3310 value = ''
3306 value = ''
3311 else:
3307 else:
3312 key, value = fields
3308 key, value = fields
3313
3309
3314 if value.startswith('eval:'):
3310 if value.startswith('eval:'):
3315 value = stringutil.evalpythonliteral(value[5:])
3311 value = stringutil.evalpythonliteral(value[5:])
3316 else:
3312 else:
3317 value = stringutil.unescapestr(value)
3313 value = stringutil.unescapestr(value)
3318
3314
3319 args[key] = value
3315 args[key] = value
3320
3316
3321 if batchedcommands is not None:
3317 if batchedcommands is not None:
3322 batchedcommands.append((command, args))
3318 batchedcommands.append((command, args))
3323 continue
3319 continue
3324
3320
3325 ui.status(_('sending %s command\n') % command)
3321 ui.status(_('sending %s command\n') % command)
3326
3322
3327 if 'PUSHFILE' in args:
3323 if 'PUSHFILE' in args:
3328 with open(args['PUSHFILE'], r'rb') as fh:
3324 with open(args['PUSHFILE'], r'rb') as fh:
3329 del args['PUSHFILE']
3325 del args['PUSHFILE']
3330 res, output = peer._callpush(command, fh,
3326 res, output = peer._callpush(command, fh,
3331 **pycompat.strkwargs(args))
3327 **pycompat.strkwargs(args))
3332 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3328 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3333 ui.status(_('remote output: %s\n') %
3329 ui.status(_('remote output: %s\n') %
3334 stringutil.escapestr(output))
3330 stringutil.escapestr(output))
3335 else:
3331 else:
3336 with peer.commandexecutor() as e:
3332 with peer.commandexecutor() as e:
3337 res = e.callcommand(command, args).result()
3333 res = e.callcommand(command, args).result()
3338
3334
3339 if isinstance(res, wireprotov2peer.commandresponse):
3335 if isinstance(res, wireprotov2peer.commandresponse):
3340 val = res.objects()
3336 val = res.objects()
3341 ui.status(_('response: %s\n') %
3337 ui.status(_('response: %s\n') %
3342 stringutil.pprint(val, bprefix=True, indent=2))
3338 stringutil.pprint(val, bprefix=True, indent=2))
3343 else:
3339 else:
3344 ui.status(_('response: %s\n') %
3340 ui.status(_('response: %s\n') %
3345 stringutil.pprint(res, bprefix=True, indent=2))
3341 stringutil.pprint(res, bprefix=True, indent=2))
3346
3342
3347 elif action == 'batchbegin':
3343 elif action == 'batchbegin':
3348 if batchedcommands is not None:
3344 if batchedcommands is not None:
3349 raise error.Abort(_('nested batchbegin not allowed'))
3345 raise error.Abort(_('nested batchbegin not allowed'))
3350
3346
3351 batchedcommands = []
3347 batchedcommands = []
3352 elif action == 'batchsubmit':
3348 elif action == 'batchsubmit':
3353 # There is a batching API we could go through. But it would be
3349 # There is a batching API we could go through. But it would be
3354 # difficult to normalize requests into function calls. It is easier
3350 # difficult to normalize requests into function calls. It is easier
3355 # to bypass this layer and normalize to commands + args.
3351 # to bypass this layer and normalize to commands + args.
3356 ui.status(_('sending batch with %d sub-commands\n') %
3352 ui.status(_('sending batch with %d sub-commands\n') %
3357 len(batchedcommands))
3353 len(batchedcommands))
3358 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3354 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3359 ui.status(_('response #%d: %s\n') %
3355 ui.status(_('response #%d: %s\n') %
3360 (i, stringutil.escapestr(chunk)))
3356 (i, stringutil.escapestr(chunk)))
3361
3357
3362 batchedcommands = None
3358 batchedcommands = None
3363
3359
3364 elif action.startswith('httprequest '):
3360 elif action.startswith('httprequest '):
3365 if not opener:
3361 if not opener:
3366 raise error.Abort(_('cannot use httprequest without an HTTP '
3362 raise error.Abort(_('cannot use httprequest without an HTTP '
3367 'peer'))
3363 'peer'))
3368
3364
3369 request = action.split(' ', 2)
3365 request = action.split(' ', 2)
3370 if len(request) != 3:
3366 if len(request) != 3:
3371 raise error.Abort(_('invalid httprequest: expected format is '
3367 raise error.Abort(_('invalid httprequest: expected format is '
3372 '"httprequest <method> <path>'))
3368 '"httprequest <method> <path>'))
3373
3369
3374 method, httppath = request[1:]
3370 method, httppath = request[1:]
3375 headers = {}
3371 headers = {}
3376 body = None
3372 body = None
3377 frames = []
3373 frames = []
3378 for line in lines:
3374 for line in lines:
3379 line = line.lstrip()
3375 line = line.lstrip()
3380 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3376 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3381 if m:
3377 if m:
3382 # Headers need to use native strings.
3378 # Headers need to use native strings.
3383 key = pycompat.strurl(m.group(1))
3379 key = pycompat.strurl(m.group(1))
3384 value = pycompat.strurl(m.group(2))
3380 value = pycompat.strurl(m.group(2))
3385 headers[key] = value
3381 headers[key] = value
3386 continue
3382 continue
3387
3383
3388 if line.startswith(b'BODYFILE '):
3384 if line.startswith(b'BODYFILE '):
3389 with open(line.split(b' ', 1), 'rb') as fh:
3385 with open(line.split(b' ', 1), 'rb') as fh:
3390 body = fh.read()
3386 body = fh.read()
3391 elif line.startswith(b'frame '):
3387 elif line.startswith(b'frame '):
3392 frame = wireprotoframing.makeframefromhumanstring(
3388 frame = wireprotoframing.makeframefromhumanstring(
3393 line[len(b'frame '):])
3389 line[len(b'frame '):])
3394
3390
3395 frames.append(frame)
3391 frames.append(frame)
3396 else:
3392 else:
3397 raise error.Abort(_('unknown argument to httprequest: %s') %
3393 raise error.Abort(_('unknown argument to httprequest: %s') %
3398 line)
3394 line)
3399
3395
3400 url = path + httppath
3396 url = path + httppath
3401
3397
3402 if frames:
3398 if frames:
3403 body = b''.join(bytes(f) for f in frames)
3399 body = b''.join(bytes(f) for f in frames)
3404
3400
3405 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3401 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3406
3402
3407 # urllib.Request insists on using has_data() as a proxy for
3403 # urllib.Request insists on using has_data() as a proxy for
3408 # determining the request method. Override that to use our
3404 # determining the request method. Override that to use our
3409 # explicitly requested method.
3405 # explicitly requested method.
3410 req.get_method = lambda: pycompat.sysstr(method)
3406 req.get_method = lambda: pycompat.sysstr(method)
3411
3407
3412 try:
3408 try:
3413 res = opener.open(req)
3409 res = opener.open(req)
3414 body = res.read()
3410 body = res.read()
3415 except util.urlerr.urlerror as e:
3411 except util.urlerr.urlerror as e:
3416 # read() method must be called, but only exists in Python 2
3412 # read() method must be called, but only exists in Python 2
3417 getattr(e, 'read', lambda: None)()
3413 getattr(e, 'read', lambda: None)()
3418 continue
3414 continue
3419
3415
3420 ct = res.headers.get(r'Content-Type')
3416 ct = res.headers.get(r'Content-Type')
3421 if ct == r'application/mercurial-cbor':
3417 if ct == r'application/mercurial-cbor':
3422 ui.write(_('cbor> %s\n') %
3418 ui.write(_('cbor> %s\n') %
3423 stringutil.pprint(cborutil.decodeall(body),
3419 stringutil.pprint(cborutil.decodeall(body),
3424 bprefix=True,
3420 bprefix=True,
3425 indent=2))
3421 indent=2))
3426
3422
3427 elif action == 'close':
3423 elif action == 'close':
3428 peer.close()
3424 peer.close()
3429 elif action == 'readavailable':
3425 elif action == 'readavailable':
3430 if not stdout or not stderr:
3426 if not stdout or not stderr:
3431 raise error.Abort(_('readavailable not available on this peer'))
3427 raise error.Abort(_('readavailable not available on this peer'))
3432
3428
3433 stdin.close()
3429 stdin.close()
3434 stdout.read()
3430 stdout.read()
3435 stderr.read()
3431 stderr.read()
3436
3432
3437 elif action == 'readline':
3433 elif action == 'readline':
3438 if not stdout:
3434 if not stdout:
3439 raise error.Abort(_('readline not available on this peer'))
3435 raise error.Abort(_('readline not available on this peer'))
3440 stdout.readline()
3436 stdout.readline()
3441 elif action == 'ereadline':
3437 elif action == 'ereadline':
3442 if not stderr:
3438 if not stderr:
3443 raise error.Abort(_('ereadline not available on this peer'))
3439 raise error.Abort(_('ereadline not available on this peer'))
3444 stderr.readline()
3440 stderr.readline()
3445 elif action.startswith('read '):
3441 elif action.startswith('read '):
3446 count = int(action.split(' ', 1)[1])
3442 count = int(action.split(' ', 1)[1])
3447 if not stdout:
3443 if not stdout:
3448 raise error.Abort(_('read not available on this peer'))
3444 raise error.Abort(_('read not available on this peer'))
3449 stdout.read(count)
3445 stdout.read(count)
3450 elif action.startswith('eread '):
3446 elif action.startswith('eread '):
3451 count = int(action.split(' ', 1)[1])
3447 count = int(action.split(' ', 1)[1])
3452 if not stderr:
3448 if not stderr:
3453 raise error.Abort(_('eread not available on this peer'))
3449 raise error.Abort(_('eread not available on this peer'))
3454 stderr.read(count)
3450 stderr.read(count)
3455 else:
3451 else:
3456 raise error.Abort(_('unknown action: %s') % action)
3452 raise error.Abort(_('unknown action: %s') % action)
3457
3453
3458 if batchedcommands is not None:
3454 if batchedcommands is not None:
3459 raise error.Abort(_('unclosed "batchbegin" request'))
3455 raise error.Abort(_('unclosed "batchbegin" request'))
3460
3456
3461 if peer:
3457 if peer:
3462 peer.close()
3458 peer.close()
3463
3459
3464 if proc:
3460 if proc:
3465 proc.kill()
3461 proc.kill()
@@ -1,1050 +1,1045 b''
1
1
2 Function to test discovery between two repos in both directions, using both the local shortcut
2 Function to test discovery between two repos in both directions, using both the local shortcut
3 (which is currently not activated by default) and the full remotable protocol:
3 (which is currently not activated by default) and the full remotable protocol:
4
4
5 $ testdesc() { # revs_a, revs_b, dagdesc
5 $ testdesc() { # revs_a, revs_b, dagdesc
6 > if [ -d foo ]; then rm -rf foo; fi
6 > if [ -d foo ]; then rm -rf foo; fi
7 > hg init foo
7 > hg init foo
8 > cd foo
8 > cd foo
9 > hg debugbuilddag "$3"
9 > hg debugbuilddag "$3"
10 > hg clone . a $1 --quiet
10 > hg clone . a $1 --quiet
11 > hg clone . b $2 --quiet
11 > hg clone . b $2 --quiet
12 > echo
12 > echo
13 > echo "% -- a -> b tree"
13 > echo "% -- a -> b tree"
14 > hg -R a debugdiscovery b --verbose --old
14 > hg -R a debugdiscovery b --verbose --old
15 > echo
15 > echo
16 > echo "% -- a -> b set"
16 > echo "% -- a -> b set"
17 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true
17 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true
18 > echo
18 > echo
19 > echo "% -- a -> b set (tip only)"
19 > echo "% -- a -> b set (tip only)"
20 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true --rev tip
20 > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true --rev tip
21 > echo
21 > echo
22 > echo "% -- b -> a tree"
22 > echo "% -- b -> a tree"
23 > hg -R b debugdiscovery a --verbose --old
23 > hg -R b debugdiscovery a --verbose --old
24 > echo
24 > echo
25 > echo "% -- b -> a set"
25 > echo "% -- b -> a set"
26 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true
26 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true
27 > echo
27 > echo
28 > echo "% -- b -> a set (tip only)"
28 > echo "% -- b -> a set (tip only)"
29 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true --rev tip
29 > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true --rev tip
30 > cd ..
30 > cd ..
31 > }
31 > }
32
32
33
33
34 Small superset:
34 Small superset:
35
35
36 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
36 $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' '
37 > +2:f +1:a1:b1
37 > +2:f +1:a1:b1
38 > <f +4 :a2
38 > <f +4 :a2
39 > +5 :b2
39 > +5 :b2
40 > <f +3 :b3'
40 > <f +3 :b3'
41
41
42 % -- a -> b tree
42 % -- a -> b tree
43 comparing with b
43 comparing with b
44 searching for changes
44 searching for changes
45 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
45 unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0
46 heads summary:
46 heads summary:
47 total common heads: 2
47 total common heads: 2
48 also local heads: 2
48 also local heads: 2
49 also remote heads: 1
49 also remote heads: 1
50 local heads: 2
50 local heads: 2
51 common: 2
51 common: 2
52 missing: 0
52 missing: 0
53 remote heads: 3
53 remote heads: 3
54 common: 1
54 common: 1
55 unknown: 2
55 unknown: 2
56 local changesets: 7
56 local changesets: 7
57 common: 7
57 common: 7
58 missing: 0
58 missing: 0
59 common heads: 01241442b3c2 b5714e113bc0
59 common heads: 01241442b3c2 b5714e113bc0
60 local is subset
61
60
62 % -- a -> b set
61 % -- a -> b set
63 comparing with b
62 comparing with b
64 query 1; heads
63 query 1; heads
65 searching for changes
64 searching for changes
66 all local heads known remotely
65 all local heads known remotely
67 heads summary:
66 heads summary:
68 total common heads: 2
67 total common heads: 2
69 also local heads: 2
68 also local heads: 2
70 also remote heads: 1
69 also remote heads: 1
71 local heads: 2
70 local heads: 2
72 common: 2
71 common: 2
73 missing: 0
72 missing: 0
74 remote heads: 3
73 remote heads: 3
75 common: 1
74 common: 1
76 unknown: 2
75 unknown: 2
77 local changesets: 7
76 local changesets: 7
78 common: 7
77 common: 7
79 missing: 0
78 missing: 0
80 common heads: 01241442b3c2 b5714e113bc0
79 common heads: 01241442b3c2 b5714e113bc0
81 local is subset
82
80
83 % -- a -> b set (tip only)
81 % -- a -> b set (tip only)
84 comparing with b
82 comparing with b
85 query 1; heads
83 query 1; heads
86 searching for changes
84 searching for changes
87 all local heads known remotely
85 all local heads known remotely
88 heads summary:
86 heads summary:
89 total common heads: 1
87 total common heads: 1
90 also local heads: 1
88 also local heads: 1
91 also remote heads: 0
89 also remote heads: 0
92 local heads: 2
90 local heads: 2
93 common: 1
91 common: 1
94 missing: 1
92 missing: 1
95 remote heads: 3
93 remote heads: 3
96 common: 0
94 common: 0
97 unknown: 3
95 unknown: 3
98 local changesets: 7
96 local changesets: 7
99 common: 6
97 common: 6
100 missing: 1
98 missing: 1
101 common heads: b5714e113bc0
99 common heads: b5714e113bc0
102
100
103 % -- b -> a tree
101 % -- b -> a tree
104 comparing with a
102 comparing with a
105 searching for changes
103 searching for changes
106 unpruned common: 01241442b3c2 b5714e113bc0
104 unpruned common: 01241442b3c2 b5714e113bc0
107 heads summary:
105 heads summary:
108 total common heads: 2
106 total common heads: 2
109 also local heads: 1
107 also local heads: 1
110 also remote heads: 2
108 also remote heads: 2
111 local heads: 3
109 local heads: 3
112 common: 1
110 common: 1
113 missing: 2
111 missing: 2
114 remote heads: 2
112 remote heads: 2
115 common: 2
113 common: 2
116 unknown: 0
114 unknown: 0
117 local changesets: 15
115 local changesets: 15
118 common: 7
116 common: 7
119 missing: 8
117 missing: 8
120 common heads: 01241442b3c2 b5714e113bc0
118 common heads: 01241442b3c2 b5714e113bc0
121 remote is subset
122
119
123 % -- b -> a set
120 % -- b -> a set
124 comparing with a
121 comparing with a
125 query 1; heads
122 query 1; heads
126 searching for changes
123 searching for changes
127 all remote heads known locally
124 all remote heads known locally
128 heads summary:
125 heads summary:
129 total common heads: 2
126 total common heads: 2
130 also local heads: 1
127 also local heads: 1
131 also remote heads: 2
128 also remote heads: 2
132 local heads: 3
129 local heads: 3
133 common: 1
130 common: 1
134 missing: 2
131 missing: 2
135 remote heads: 2
132 remote heads: 2
136 common: 2
133 common: 2
137 unknown: 0
134 unknown: 0
138 local changesets: 15
135 local changesets: 15
139 common: 7
136 common: 7
140 missing: 8
137 missing: 8
141 common heads: 01241442b3c2 b5714e113bc0
138 common heads: 01241442b3c2 b5714e113bc0
142 remote is subset
143
139
144 % -- b -> a set (tip only)
140 % -- b -> a set (tip only)
145 comparing with a
141 comparing with a
146 query 1; heads
142 query 1; heads
147 searching for changes
143 searching for changes
148 all remote heads known locally
144 all remote heads known locally
149 heads summary:
145 heads summary:
150 total common heads: 2
146 total common heads: 2
151 also local heads: 1
147 also local heads: 1
152 also remote heads: 2
148 also remote heads: 2
153 local heads: 3
149 local heads: 3
154 common: 1
150 common: 1
155 missing: 2
151 missing: 2
156 remote heads: 2
152 remote heads: 2
157 common: 2
153 common: 2
158 unknown: 0
154 unknown: 0
159 local changesets: 15
155 local changesets: 15
160 common: 7
156 common: 7
161 missing: 8
157 missing: 8
162 common heads: 01241442b3c2 b5714e113bc0
158 common heads: 01241442b3c2 b5714e113bc0
163 remote is subset
164
159
165
160
166 Many new:
161 Many new:
167
162
168 $ testdesc '-ra1 -ra2' '-rb' '
163 $ testdesc '-ra1 -ra2' '-rb' '
169 > +2:f +3:a1 +3:b
164 > +2:f +3:a1 +3:b
170 > <f +30 :a2'
165 > <f +30 :a2'
171
166
172 % -- a -> b tree
167 % -- a -> b tree
173 comparing with b
168 comparing with b
174 searching for changes
169 searching for changes
175 unpruned common: bebd167eb94d
170 unpruned common: bebd167eb94d
176 heads summary:
171 heads summary:
177 total common heads: 1
172 total common heads: 1
178 also local heads: 1
173 also local heads: 1
179 also remote heads: 0
174 also remote heads: 0
180 local heads: 2
175 local heads: 2
181 common: 1
176 common: 1
182 missing: 1
177 missing: 1
183 remote heads: 1
178 remote heads: 1
184 common: 0
179 common: 0
185 unknown: 1
180 unknown: 1
186 local changesets: 35
181 local changesets: 35
187 common: 5
182 common: 5
188 missing: 30
183 missing: 30
189 common heads: bebd167eb94d
184 common heads: bebd167eb94d
190
185
191 % -- a -> b set
186 % -- a -> b set
192 comparing with b
187 comparing with b
193 query 1; heads
188 query 1; heads
194 searching for changes
189 searching for changes
195 taking initial sample
190 taking initial sample
196 searching: 2 queries
191 searching: 2 queries
197 query 2; still undecided: 29, sample size is: 29
192 query 2; still undecided: 29, sample size is: 29
198 2 total queries in *.????s (glob)
193 2 total queries in *.????s (glob)
199 heads summary:
194 heads summary:
200 total common heads: 1
195 total common heads: 1
201 also local heads: 1
196 also local heads: 1
202 also remote heads: 0
197 also remote heads: 0
203 local heads: 2
198 local heads: 2
204 common: 1
199 common: 1
205 missing: 1
200 missing: 1
206 remote heads: 1
201 remote heads: 1
207 common: 0
202 common: 0
208 unknown: 1
203 unknown: 1
209 local changesets: 35
204 local changesets: 35
210 common: 5
205 common: 5
211 missing: 30
206 missing: 30
212 common heads: bebd167eb94d
207 common heads: bebd167eb94d
213
208
214 % -- a -> b set (tip only)
209 % -- a -> b set (tip only)
215 comparing with b
210 comparing with b
216 query 1; heads
211 query 1; heads
217 searching for changes
212 searching for changes
218 taking quick initial sample
213 taking quick initial sample
219 searching: 2 queries
214 searching: 2 queries
220 query 2; still undecided: 31, sample size is: 31
215 query 2; still undecided: 31, sample size is: 31
221 2 total queries in *.????s (glob)
216 2 total queries in *.????s (glob)
222 heads summary:
217 heads summary:
223 total common heads: 1
218 total common heads: 1
224 also local heads: 0
219 also local heads: 0
225 also remote heads: 0
220 also remote heads: 0
226 local heads: 2
221 local heads: 2
227 common: 0
222 common: 0
228 missing: 2
223 missing: 2
229 remote heads: 1
224 remote heads: 1
230 common: 0
225 common: 0
231 unknown: 1
226 unknown: 1
232 local changesets: 35
227 local changesets: 35
233 common: 2
228 common: 2
234 missing: 33
229 missing: 33
235 common heads: 66f7d451a68b
230 common heads: 66f7d451a68b
236
231
237 % -- b -> a tree
232 % -- b -> a tree
238 comparing with a
233 comparing with a
239 searching for changes
234 searching for changes
240 unpruned common: 66f7d451a68b bebd167eb94d
235 unpruned common: 66f7d451a68b bebd167eb94d
241 heads summary:
236 heads summary:
242 total common heads: 1
237 total common heads: 1
243 also local heads: 0
238 also local heads: 0
244 also remote heads: 1
239 also remote heads: 1
245 local heads: 1
240 local heads: 1
246 common: 0
241 common: 0
247 missing: 1
242 missing: 1
248 remote heads: 2
243 remote heads: 2
249 common: 1
244 common: 1
250 unknown: 1
245 unknown: 1
251 local changesets: 8
246 local changesets: 8
252 common: 5
247 common: 5
253 missing: 3
248 missing: 3
254 common heads: bebd167eb94d
249 common heads: bebd167eb94d
255
250
256 % -- b -> a set
251 % -- b -> a set
257 comparing with a
252 comparing with a
258 query 1; heads
253 query 1; heads
259 searching for changes
254 searching for changes
260 taking initial sample
255 taking initial sample
261 searching: 2 queries
256 searching: 2 queries
262 query 2; still undecided: 2, sample size is: 2
257 query 2; still undecided: 2, sample size is: 2
263 2 total queries in *.????s (glob)
258 2 total queries in *.????s (glob)
264 heads summary:
259 heads summary:
265 total common heads: 1
260 total common heads: 1
266 also local heads: 0
261 also local heads: 0
267 also remote heads: 1
262 also remote heads: 1
268 local heads: 1
263 local heads: 1
269 common: 0
264 common: 0
270 missing: 1
265 missing: 1
271 remote heads: 2
266 remote heads: 2
272 common: 1
267 common: 1
273 unknown: 1
268 unknown: 1
274 local changesets: 8
269 local changesets: 8
275 common: 5
270 common: 5
276 missing: 3
271 missing: 3
277 common heads: bebd167eb94d
272 common heads: bebd167eb94d
278
273
279 % -- b -> a set (tip only)
274 % -- b -> a set (tip only)
280 comparing with a
275 comparing with a
281 query 1; heads
276 query 1; heads
282 searching for changes
277 searching for changes
283 taking initial sample
278 taking initial sample
284 searching: 2 queries
279 searching: 2 queries
285 query 2; still undecided: 2, sample size is: 2
280 query 2; still undecided: 2, sample size is: 2
286 2 total queries in *.????s (glob)
281 2 total queries in *.????s (glob)
287 heads summary:
282 heads summary:
288 total common heads: 1
283 total common heads: 1
289 also local heads: 0
284 also local heads: 0
290 also remote heads: 1
285 also remote heads: 1
291 local heads: 1
286 local heads: 1
292 common: 0
287 common: 0
293 missing: 1
288 missing: 1
294 remote heads: 2
289 remote heads: 2
295 common: 1
290 common: 1
296 unknown: 1
291 unknown: 1
297 local changesets: 8
292 local changesets: 8
298 common: 5
293 common: 5
299 missing: 3
294 missing: 3
300 common heads: bebd167eb94d
295 common heads: bebd167eb94d
301
296
302 Both sides many new with stub:
297 Both sides many new with stub:
303
298
304 $ testdesc '-ra1 -ra2' '-rb' '
299 $ testdesc '-ra1 -ra2' '-rb' '
305 > +2:f +2:a1 +30 :b
300 > +2:f +2:a1 +30 :b
306 > <f +30 :a2'
301 > <f +30 :a2'
307
302
308 % -- a -> b tree
303 % -- a -> b tree
309 comparing with b
304 comparing with b
310 searching for changes
305 searching for changes
311 unpruned common: 2dc09a01254d
306 unpruned common: 2dc09a01254d
312 heads summary:
307 heads summary:
313 total common heads: 1
308 total common heads: 1
314 also local heads: 1
309 also local heads: 1
315 also remote heads: 0
310 also remote heads: 0
316 local heads: 2
311 local heads: 2
317 common: 1
312 common: 1
318 missing: 1
313 missing: 1
319 remote heads: 1
314 remote heads: 1
320 common: 0
315 common: 0
321 unknown: 1
316 unknown: 1
322 local changesets: 34
317 local changesets: 34
323 common: 4
318 common: 4
324 missing: 30
319 missing: 30
325 common heads: 2dc09a01254d
320 common heads: 2dc09a01254d
326
321
327 % -- a -> b set
322 % -- a -> b set
328 comparing with b
323 comparing with b
329 query 1; heads
324 query 1; heads
330 searching for changes
325 searching for changes
331 taking initial sample
326 taking initial sample
332 searching: 2 queries
327 searching: 2 queries
333 query 2; still undecided: 29, sample size is: 29
328 query 2; still undecided: 29, sample size is: 29
334 2 total queries in *.????s (glob)
329 2 total queries in *.????s (glob)
335 heads summary:
330 heads summary:
336 total common heads: 1
331 total common heads: 1
337 also local heads: 1
332 also local heads: 1
338 also remote heads: 0
333 also remote heads: 0
339 local heads: 2
334 local heads: 2
340 common: 1
335 common: 1
341 missing: 1
336 missing: 1
342 remote heads: 1
337 remote heads: 1
343 common: 0
338 common: 0
344 unknown: 1
339 unknown: 1
345 local changesets: 34
340 local changesets: 34
346 common: 4
341 common: 4
347 missing: 30
342 missing: 30
348 common heads: 2dc09a01254d
343 common heads: 2dc09a01254d
349
344
350 % -- a -> b set (tip only)
345 % -- a -> b set (tip only)
351 comparing with b
346 comparing with b
352 query 1; heads
347 query 1; heads
353 searching for changes
348 searching for changes
354 taking quick initial sample
349 taking quick initial sample
355 searching: 2 queries
350 searching: 2 queries
356 query 2; still undecided: 31, sample size is: 31
351 query 2; still undecided: 31, sample size is: 31
357 2 total queries in *.????s (glob)
352 2 total queries in *.????s (glob)
358 heads summary:
353 heads summary:
359 total common heads: 1
354 total common heads: 1
360 also local heads: 0
355 also local heads: 0
361 also remote heads: 0
356 also remote heads: 0
362 local heads: 2
357 local heads: 2
363 common: 0
358 common: 0
364 missing: 2
359 missing: 2
365 remote heads: 1
360 remote heads: 1
366 common: 0
361 common: 0
367 unknown: 1
362 unknown: 1
368 local changesets: 34
363 local changesets: 34
369 common: 2
364 common: 2
370 missing: 32
365 missing: 32
371 common heads: 66f7d451a68b
366 common heads: 66f7d451a68b
372
367
373 % -- b -> a tree
368 % -- b -> a tree
374 comparing with a
369 comparing with a
375 searching for changes
370 searching for changes
376 unpruned common: 2dc09a01254d 66f7d451a68b
371 unpruned common: 2dc09a01254d 66f7d451a68b
377 heads summary:
372 heads summary:
378 total common heads: 1
373 total common heads: 1
379 also local heads: 0
374 also local heads: 0
380 also remote heads: 1
375 also remote heads: 1
381 local heads: 1
376 local heads: 1
382 common: 0
377 common: 0
383 missing: 1
378 missing: 1
384 remote heads: 2
379 remote heads: 2
385 common: 1
380 common: 1
386 unknown: 1
381 unknown: 1
387 local changesets: 34
382 local changesets: 34
388 common: 4
383 common: 4
389 missing: 30
384 missing: 30
390 common heads: 2dc09a01254d
385 common heads: 2dc09a01254d
391
386
392 % -- b -> a set
387 % -- b -> a set
393 comparing with a
388 comparing with a
394 query 1; heads
389 query 1; heads
395 searching for changes
390 searching for changes
396 taking initial sample
391 taking initial sample
397 searching: 2 queries
392 searching: 2 queries
398 query 2; still undecided: 29, sample size is: 29
393 query 2; still undecided: 29, sample size is: 29
399 2 total queries in *.????s (glob)
394 2 total queries in *.????s (glob)
400 heads summary:
395 heads summary:
401 total common heads: 1
396 total common heads: 1
402 also local heads: 0
397 also local heads: 0
403 also remote heads: 1
398 also remote heads: 1
404 local heads: 1
399 local heads: 1
405 common: 0
400 common: 0
406 missing: 1
401 missing: 1
407 remote heads: 2
402 remote heads: 2
408 common: 1
403 common: 1
409 unknown: 1
404 unknown: 1
410 local changesets: 34
405 local changesets: 34
411 common: 4
406 common: 4
412 missing: 30
407 missing: 30
413 common heads: 2dc09a01254d
408 common heads: 2dc09a01254d
414
409
415 % -- b -> a set (tip only)
410 % -- b -> a set (tip only)
416 comparing with a
411 comparing with a
417 query 1; heads
412 query 1; heads
418 searching for changes
413 searching for changes
419 taking initial sample
414 taking initial sample
420 searching: 2 queries
415 searching: 2 queries
421 query 2; still undecided: 29, sample size is: 29
416 query 2; still undecided: 29, sample size is: 29
422 2 total queries in *.????s (glob)
417 2 total queries in *.????s (glob)
423 heads summary:
418 heads summary:
424 total common heads: 1
419 total common heads: 1
425 also local heads: 0
420 also local heads: 0
426 also remote heads: 1
421 also remote heads: 1
427 local heads: 1
422 local heads: 1
428 common: 0
423 common: 0
429 missing: 1
424 missing: 1
430 remote heads: 2
425 remote heads: 2
431 common: 1
426 common: 1
432 unknown: 1
427 unknown: 1
433 local changesets: 34
428 local changesets: 34
434 common: 4
429 common: 4
435 missing: 30
430 missing: 30
436 common heads: 2dc09a01254d
431 common heads: 2dc09a01254d
437
432
438
433
439 Both many new:
434 Both many new:
440
435
441 $ testdesc '-ra' '-rb' '
436 $ testdesc '-ra' '-rb' '
442 > +2:f +30 :b
437 > +2:f +30 :b
443 > <f +30 :a'
438 > <f +30 :a'
444
439
445 % -- a -> b tree
440 % -- a -> b tree
446 comparing with b
441 comparing with b
447 searching for changes
442 searching for changes
448 unpruned common: 66f7d451a68b
443 unpruned common: 66f7d451a68b
449 heads summary:
444 heads summary:
450 total common heads: 1
445 total common heads: 1
451 also local heads: 0
446 also local heads: 0
452 also remote heads: 0
447 also remote heads: 0
453 local heads: 1
448 local heads: 1
454 common: 0
449 common: 0
455 missing: 1
450 missing: 1
456 remote heads: 1
451 remote heads: 1
457 common: 0
452 common: 0
458 unknown: 1
453 unknown: 1
459 local changesets: 32
454 local changesets: 32
460 common: 2
455 common: 2
461 missing: 30
456 missing: 30
462 common heads: 66f7d451a68b
457 common heads: 66f7d451a68b
463
458
464 % -- a -> b set
459 % -- a -> b set
465 comparing with b
460 comparing with b
466 query 1; heads
461 query 1; heads
467 searching for changes
462 searching for changes
468 taking quick initial sample
463 taking quick initial sample
469 searching: 2 queries
464 searching: 2 queries
470 query 2; still undecided: 31, sample size is: 31
465 query 2; still undecided: 31, sample size is: 31
471 2 total queries in *.????s (glob)
466 2 total queries in *.????s (glob)
472 heads summary:
467 heads summary:
473 total common heads: 1
468 total common heads: 1
474 also local heads: 0
469 also local heads: 0
475 also remote heads: 0
470 also remote heads: 0
476 local heads: 1
471 local heads: 1
477 common: 0
472 common: 0
478 missing: 1
473 missing: 1
479 remote heads: 1
474 remote heads: 1
480 common: 0
475 common: 0
481 unknown: 1
476 unknown: 1
482 local changesets: 32
477 local changesets: 32
483 common: 2
478 common: 2
484 missing: 30
479 missing: 30
485 common heads: 66f7d451a68b
480 common heads: 66f7d451a68b
486
481
487 % -- a -> b set (tip only)
482 % -- a -> b set (tip only)
488 comparing with b
483 comparing with b
489 query 1; heads
484 query 1; heads
490 searching for changes
485 searching for changes
491 taking quick initial sample
486 taking quick initial sample
492 searching: 2 queries
487 searching: 2 queries
493 query 2; still undecided: 31, sample size is: 31
488 query 2; still undecided: 31, sample size is: 31
494 2 total queries in *.????s (glob)
489 2 total queries in *.????s (glob)
495 heads summary:
490 heads summary:
496 total common heads: 1
491 total common heads: 1
497 also local heads: 0
492 also local heads: 0
498 also remote heads: 0
493 also remote heads: 0
499 local heads: 1
494 local heads: 1
500 common: 0
495 common: 0
501 missing: 1
496 missing: 1
502 remote heads: 1
497 remote heads: 1
503 common: 0
498 common: 0
504 unknown: 1
499 unknown: 1
505 local changesets: 32
500 local changesets: 32
506 common: 2
501 common: 2
507 missing: 30
502 missing: 30
508 common heads: 66f7d451a68b
503 common heads: 66f7d451a68b
509
504
510 % -- b -> a tree
505 % -- b -> a tree
511 comparing with a
506 comparing with a
512 searching for changes
507 searching for changes
513 unpruned common: 66f7d451a68b
508 unpruned common: 66f7d451a68b
514 heads summary:
509 heads summary:
515 total common heads: 1
510 total common heads: 1
516 also local heads: 0
511 also local heads: 0
517 also remote heads: 0
512 also remote heads: 0
518 local heads: 1
513 local heads: 1
519 common: 0
514 common: 0
520 missing: 1
515 missing: 1
521 remote heads: 1
516 remote heads: 1
522 common: 0
517 common: 0
523 unknown: 1
518 unknown: 1
524 local changesets: 32
519 local changesets: 32
525 common: 2
520 common: 2
526 missing: 30
521 missing: 30
527 common heads: 66f7d451a68b
522 common heads: 66f7d451a68b
528
523
529 % -- b -> a set
524 % -- b -> a set
530 comparing with a
525 comparing with a
531 query 1; heads
526 query 1; heads
532 searching for changes
527 searching for changes
533 taking quick initial sample
528 taking quick initial sample
534 searching: 2 queries
529 searching: 2 queries
535 query 2; still undecided: 31, sample size is: 31
530 query 2; still undecided: 31, sample size is: 31
536 2 total queries in *.????s (glob)
531 2 total queries in *.????s (glob)
537 heads summary:
532 heads summary:
538 total common heads: 1
533 total common heads: 1
539 also local heads: 0
534 also local heads: 0
540 also remote heads: 0
535 also remote heads: 0
541 local heads: 1
536 local heads: 1
542 common: 0
537 common: 0
543 missing: 1
538 missing: 1
544 remote heads: 1
539 remote heads: 1
545 common: 0
540 common: 0
546 unknown: 1
541 unknown: 1
547 local changesets: 32
542 local changesets: 32
548 common: 2
543 common: 2
549 missing: 30
544 missing: 30
550 common heads: 66f7d451a68b
545 common heads: 66f7d451a68b
551
546
552 % -- b -> a set (tip only)
547 % -- b -> a set (tip only)
553 comparing with a
548 comparing with a
554 query 1; heads
549 query 1; heads
555 searching for changes
550 searching for changes
556 taking quick initial sample
551 taking quick initial sample
557 searching: 2 queries
552 searching: 2 queries
558 query 2; still undecided: 31, sample size is: 31
553 query 2; still undecided: 31, sample size is: 31
559 2 total queries in *.????s (glob)
554 2 total queries in *.????s (glob)
560 heads summary:
555 heads summary:
561 total common heads: 1
556 total common heads: 1
562 also local heads: 0
557 also local heads: 0
563 also remote heads: 0
558 also remote heads: 0
564 local heads: 1
559 local heads: 1
565 common: 0
560 common: 0
566 missing: 1
561 missing: 1
567 remote heads: 1
562 remote heads: 1
568 common: 0
563 common: 0
569 unknown: 1
564 unknown: 1
570 local changesets: 32
565 local changesets: 32
571 common: 2
566 common: 2
572 missing: 30
567 missing: 30
573 common heads: 66f7d451a68b
568 common heads: 66f7d451a68b
574
569
575
570
576 Both many new skewed:
571 Both many new skewed:
577
572
578 $ testdesc '-ra' '-rb' '
573 $ testdesc '-ra' '-rb' '
579 > +2:f +30 :b
574 > +2:f +30 :b
580 > <f +50 :a'
575 > <f +50 :a'
581
576
582 % -- a -> b tree
577 % -- a -> b tree
583 comparing with b
578 comparing with b
584 searching for changes
579 searching for changes
585 unpruned common: 66f7d451a68b
580 unpruned common: 66f7d451a68b
586 heads summary:
581 heads summary:
587 total common heads: 1
582 total common heads: 1
588 also local heads: 0
583 also local heads: 0
589 also remote heads: 0
584 also remote heads: 0
590 local heads: 1
585 local heads: 1
591 common: 0
586 common: 0
592 missing: 1
587 missing: 1
593 remote heads: 1
588 remote heads: 1
594 common: 0
589 common: 0
595 unknown: 1
590 unknown: 1
596 local changesets: 52
591 local changesets: 52
597 common: 2
592 common: 2
598 missing: 50
593 missing: 50
599 common heads: 66f7d451a68b
594 common heads: 66f7d451a68b
600
595
601 % -- a -> b set
596 % -- a -> b set
602 comparing with b
597 comparing with b
603 query 1; heads
598 query 1; heads
604 searching for changes
599 searching for changes
605 taking quick initial sample
600 taking quick initial sample
606 searching: 2 queries
601 searching: 2 queries
607 query 2; still undecided: 51, sample size is: 51
602 query 2; still undecided: 51, sample size is: 51
608 2 total queries in *.????s (glob)
603 2 total queries in *.????s (glob)
609 heads summary:
604 heads summary:
610 total common heads: 1
605 total common heads: 1
611 also local heads: 0
606 also local heads: 0
612 also remote heads: 0
607 also remote heads: 0
613 local heads: 1
608 local heads: 1
614 common: 0
609 common: 0
615 missing: 1
610 missing: 1
616 remote heads: 1
611 remote heads: 1
617 common: 0
612 common: 0
618 unknown: 1
613 unknown: 1
619 local changesets: 52
614 local changesets: 52
620 common: 2
615 common: 2
621 missing: 50
616 missing: 50
622 common heads: 66f7d451a68b
617 common heads: 66f7d451a68b
623
618
624 % -- a -> b set (tip only)
619 % -- a -> b set (tip only)
625 comparing with b
620 comparing with b
626 query 1; heads
621 query 1; heads
627 searching for changes
622 searching for changes
628 taking quick initial sample
623 taking quick initial sample
629 searching: 2 queries
624 searching: 2 queries
630 query 2; still undecided: 51, sample size is: 51
625 query 2; still undecided: 51, sample size is: 51
631 2 total queries in *.????s (glob)
626 2 total queries in *.????s (glob)
632 heads summary:
627 heads summary:
633 total common heads: 1
628 total common heads: 1
634 also local heads: 0
629 also local heads: 0
635 also remote heads: 0
630 also remote heads: 0
636 local heads: 1
631 local heads: 1
637 common: 0
632 common: 0
638 missing: 1
633 missing: 1
639 remote heads: 1
634 remote heads: 1
640 common: 0
635 common: 0
641 unknown: 1
636 unknown: 1
642 local changesets: 52
637 local changesets: 52
643 common: 2
638 common: 2
644 missing: 50
639 missing: 50
645 common heads: 66f7d451a68b
640 common heads: 66f7d451a68b
646
641
647 % -- b -> a tree
642 % -- b -> a tree
648 comparing with a
643 comparing with a
649 searching for changes
644 searching for changes
650 unpruned common: 66f7d451a68b
645 unpruned common: 66f7d451a68b
651 heads summary:
646 heads summary:
652 total common heads: 1
647 total common heads: 1
653 also local heads: 0
648 also local heads: 0
654 also remote heads: 0
649 also remote heads: 0
655 local heads: 1
650 local heads: 1
656 common: 0
651 common: 0
657 missing: 1
652 missing: 1
658 remote heads: 1
653 remote heads: 1
659 common: 0
654 common: 0
660 unknown: 1
655 unknown: 1
661 local changesets: 32
656 local changesets: 32
662 common: 2
657 common: 2
663 missing: 30
658 missing: 30
664 common heads: 66f7d451a68b
659 common heads: 66f7d451a68b
665
660
666 % -- b -> a set
661 % -- b -> a set
667 comparing with a
662 comparing with a
668 query 1; heads
663 query 1; heads
669 searching for changes
664 searching for changes
670 taking quick initial sample
665 taking quick initial sample
671 searching: 2 queries
666 searching: 2 queries
672 query 2; still undecided: 31, sample size is: 31
667 query 2; still undecided: 31, sample size is: 31
673 2 total queries in *.????s (glob)
668 2 total queries in *.????s (glob)
674 heads summary:
669 heads summary:
675 total common heads: 1
670 total common heads: 1
676 also local heads: 0
671 also local heads: 0
677 also remote heads: 0
672 also remote heads: 0
678 local heads: 1
673 local heads: 1
679 common: 0
674 common: 0
680 missing: 1
675 missing: 1
681 remote heads: 1
676 remote heads: 1
682 common: 0
677 common: 0
683 unknown: 1
678 unknown: 1
684 local changesets: 32
679 local changesets: 32
685 common: 2
680 common: 2
686 missing: 30
681 missing: 30
687 common heads: 66f7d451a68b
682 common heads: 66f7d451a68b
688
683
689 % -- b -> a set (tip only)
684 % -- b -> a set (tip only)
690 comparing with a
685 comparing with a
691 query 1; heads
686 query 1; heads
692 searching for changes
687 searching for changes
693 taking quick initial sample
688 taking quick initial sample
694 searching: 2 queries
689 searching: 2 queries
695 query 2; still undecided: 31, sample size is: 31
690 query 2; still undecided: 31, sample size is: 31
696 2 total queries in *.????s (glob)
691 2 total queries in *.????s (glob)
697 heads summary:
692 heads summary:
698 total common heads: 1
693 total common heads: 1
699 also local heads: 0
694 also local heads: 0
700 also remote heads: 0
695 also remote heads: 0
701 local heads: 1
696 local heads: 1
702 common: 0
697 common: 0
703 missing: 1
698 missing: 1
704 remote heads: 1
699 remote heads: 1
705 common: 0
700 common: 0
706 unknown: 1
701 unknown: 1
707 local changesets: 32
702 local changesets: 32
708 common: 2
703 common: 2
709 missing: 30
704 missing: 30
710 common heads: 66f7d451a68b
705 common heads: 66f7d451a68b
711
706
712
707
713 Both many new on top of long history:
708 Both many new on top of long history:
714
709
715 $ testdesc '-ra' '-rb' '
710 $ testdesc '-ra' '-rb' '
716 > +1000:f +30 :b
711 > +1000:f +30 :b
717 > <f +50 :a'
712 > <f +50 :a'
718
713
719 % -- a -> b tree
714 % -- a -> b tree
720 comparing with b
715 comparing with b
721 searching for changes
716 searching for changes
722 unpruned common: 7ead0cba2838
717 unpruned common: 7ead0cba2838
723 heads summary:
718 heads summary:
724 total common heads: 1
719 total common heads: 1
725 also local heads: 0
720 also local heads: 0
726 also remote heads: 0
721 also remote heads: 0
727 local heads: 1
722 local heads: 1
728 common: 0
723 common: 0
729 missing: 1
724 missing: 1
730 remote heads: 1
725 remote heads: 1
731 common: 0
726 common: 0
732 unknown: 1
727 unknown: 1
733 local changesets: 1050
728 local changesets: 1050
734 common: 1000
729 common: 1000
735 missing: 50
730 missing: 50
736 common heads: 7ead0cba2838
731 common heads: 7ead0cba2838
737
732
738 % -- a -> b set
733 % -- a -> b set
739 comparing with b
734 comparing with b
740 query 1; heads
735 query 1; heads
741 searching for changes
736 searching for changes
742 taking quick initial sample
737 taking quick initial sample
743 searching: 2 queries
738 searching: 2 queries
744 query 2; still undecided: 1049, sample size is: 11
739 query 2; still undecided: 1049, sample size is: 11
745 sampling from both directions
740 sampling from both directions
746 searching: 3 queries
741 searching: 3 queries
747 query 3; still undecided: 31, sample size is: 31
742 query 3; still undecided: 31, sample size is: 31
748 3 total queries in *.????s (glob)
743 3 total queries in *.????s (glob)
749 heads summary:
744 heads summary:
750 total common heads: 1
745 total common heads: 1
751 also local heads: 0
746 also local heads: 0
752 also remote heads: 0
747 also remote heads: 0
753 local heads: 1
748 local heads: 1
754 common: 0
749 common: 0
755 missing: 1
750 missing: 1
756 remote heads: 1
751 remote heads: 1
757 common: 0
752 common: 0
758 unknown: 1
753 unknown: 1
759 local changesets: 1050
754 local changesets: 1050
760 common: 1000
755 common: 1000
761 missing: 50
756 missing: 50
762 common heads: 7ead0cba2838
757 common heads: 7ead0cba2838
763
758
764 % -- a -> b set (tip only)
759 % -- a -> b set (tip only)
765 comparing with b
760 comparing with b
766 query 1; heads
761 query 1; heads
767 searching for changes
762 searching for changes
768 taking quick initial sample
763 taking quick initial sample
769 searching: 2 queries
764 searching: 2 queries
770 query 2; still undecided: 1049, sample size is: 11
765 query 2; still undecided: 1049, sample size is: 11
771 sampling from both directions
766 sampling from both directions
772 searching: 3 queries
767 searching: 3 queries
773 query 3; still undecided: 31, sample size is: 31
768 query 3; still undecided: 31, sample size is: 31
774 3 total queries in *.????s (glob)
769 3 total queries in *.????s (glob)
775 heads summary:
770 heads summary:
776 total common heads: 1
771 total common heads: 1
777 also local heads: 0
772 also local heads: 0
778 also remote heads: 0
773 also remote heads: 0
779 local heads: 1
774 local heads: 1
780 common: 0
775 common: 0
781 missing: 1
776 missing: 1
782 remote heads: 1
777 remote heads: 1
783 common: 0
778 common: 0
784 unknown: 1
779 unknown: 1
785 local changesets: 1050
780 local changesets: 1050
786 common: 1000
781 common: 1000
787 missing: 50
782 missing: 50
788 common heads: 7ead0cba2838
783 common heads: 7ead0cba2838
789
784
790 % -- b -> a tree
785 % -- b -> a tree
791 comparing with a
786 comparing with a
792 searching for changes
787 searching for changes
793 unpruned common: 7ead0cba2838
788 unpruned common: 7ead0cba2838
794 heads summary:
789 heads summary:
795 total common heads: 1
790 total common heads: 1
796 also local heads: 0
791 also local heads: 0
797 also remote heads: 0
792 also remote heads: 0
798 local heads: 1
793 local heads: 1
799 common: 0
794 common: 0
800 missing: 1
795 missing: 1
801 remote heads: 1
796 remote heads: 1
802 common: 0
797 common: 0
803 unknown: 1
798 unknown: 1
804 local changesets: 1030
799 local changesets: 1030
805 common: 1000
800 common: 1000
806 missing: 30
801 missing: 30
807 common heads: 7ead0cba2838
802 common heads: 7ead0cba2838
808
803
809 % -- b -> a set
804 % -- b -> a set
810 comparing with a
805 comparing with a
811 query 1; heads
806 query 1; heads
812 searching for changes
807 searching for changes
813 taking quick initial sample
808 taking quick initial sample
814 searching: 2 queries
809 searching: 2 queries
815 query 2; still undecided: 1029, sample size is: 11
810 query 2; still undecided: 1029, sample size is: 11
816 sampling from both directions
811 sampling from both directions
817 searching: 3 queries
812 searching: 3 queries
818 query 3; still undecided: 15, sample size is: 15
813 query 3; still undecided: 15, sample size is: 15
819 3 total queries in *.????s (glob)
814 3 total queries in *.????s (glob)
820 heads summary:
815 heads summary:
821 total common heads: 1
816 total common heads: 1
822 also local heads: 0
817 also local heads: 0
823 also remote heads: 0
818 also remote heads: 0
824 local heads: 1
819 local heads: 1
825 common: 0
820 common: 0
826 missing: 1
821 missing: 1
827 remote heads: 1
822 remote heads: 1
828 common: 0
823 common: 0
829 unknown: 1
824 unknown: 1
830 local changesets: 1030
825 local changesets: 1030
831 common: 1000
826 common: 1000
832 missing: 30
827 missing: 30
833 common heads: 7ead0cba2838
828 common heads: 7ead0cba2838
834
829
835 % -- b -> a set (tip only)
830 % -- b -> a set (tip only)
836 comparing with a
831 comparing with a
837 query 1; heads
832 query 1; heads
838 searching for changes
833 searching for changes
839 taking quick initial sample
834 taking quick initial sample
840 searching: 2 queries
835 searching: 2 queries
841 query 2; still undecided: 1029, sample size is: 11
836 query 2; still undecided: 1029, sample size is: 11
842 sampling from both directions
837 sampling from both directions
843 searching: 3 queries
838 searching: 3 queries
844 query 3; still undecided: 15, sample size is: 15
839 query 3; still undecided: 15, sample size is: 15
845 3 total queries in *.????s (glob)
840 3 total queries in *.????s (glob)
846 heads summary:
841 heads summary:
847 total common heads: 1
842 total common heads: 1
848 also local heads: 0
843 also local heads: 0
849 also remote heads: 0
844 also remote heads: 0
850 local heads: 1
845 local heads: 1
851 common: 0
846 common: 0
852 missing: 1
847 missing: 1
853 remote heads: 1
848 remote heads: 1
854 common: 0
849 common: 0
855 unknown: 1
850 unknown: 1
856 local changesets: 1030
851 local changesets: 1030
857 common: 1000
852 common: 1000
858 missing: 30
853 missing: 30
859 common heads: 7ead0cba2838
854 common heads: 7ead0cba2838
860
855
861
856
862 One with >200 heads, which used to use up all of the sample:
857 One with >200 heads, which used to use up all of the sample:
863
858
864 $ hg init manyheads
859 $ hg init manyheads
865 $ cd manyheads
860 $ cd manyheads
866 $ echo "+300:r @a" >dagdesc
861 $ echo "+300:r @a" >dagdesc
867 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
862 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
868 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
863 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
869 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
864 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
870 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
865 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
871 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
866 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
872 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
867 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
873 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
868 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
874 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
869 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
875 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
870 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
876 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
871 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
877 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
872 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
878 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
873 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
879 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
874 $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads
880 $ echo "@b *r+3" >>dagdesc # one more head
875 $ echo "@b *r+3" >>dagdesc # one more head
881 $ hg debugbuilddag <dagdesc
876 $ hg debugbuilddag <dagdesc
882 reading DAG from stdin
877 reading DAG from stdin
883
878
884 $ hg heads -t --template . | wc -c
879 $ hg heads -t --template . | wc -c
885 \s*261 (re)
880 \s*261 (re)
886
881
887 $ hg clone -b a . a
882 $ hg clone -b a . a
888 adding changesets
883 adding changesets
889 adding manifests
884 adding manifests
890 adding file changes
885 adding file changes
891 added 1340 changesets with 0 changes to 0 files (+259 heads)
886 added 1340 changesets with 0 changes to 0 files (+259 heads)
892 new changesets 1ea73414a91b:1c51e2c80832
887 new changesets 1ea73414a91b:1c51e2c80832
893 updating to branch a
888 updating to branch a
894 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
889 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
895 $ hg clone -b b . b
890 $ hg clone -b b . b
896 adding changesets
891 adding changesets
897 adding manifests
892 adding manifests
898 adding file changes
893 adding file changes
899 added 304 changesets with 0 changes to 0 files
894 added 304 changesets with 0 changes to 0 files
900 new changesets 1ea73414a91b:513314ca8b3a
895 new changesets 1ea73414a91b:513314ca8b3a
901 updating to branch b
896 updating to branch b
902 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
897 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
903
898
904 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true
899 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true
905 comparing with b
900 comparing with b
906 query 1; heads
901 query 1; heads
907 searching for changes
902 searching for changes
908 taking quick initial sample
903 taking quick initial sample
909 searching: 2 queries
904 searching: 2 queries
910 query 2; still undecided: 1240, sample size is: 100
905 query 2; still undecided: 1240, sample size is: 100
911 sampling from both directions
906 sampling from both directions
912 searching: 3 queries
907 searching: 3 queries
913 query 3; still undecided: 1140, sample size is: 200
908 query 3; still undecided: 1140, sample size is: 200
914 sampling from both directions
909 sampling from both directions
915 searching: 4 queries
910 searching: 4 queries
916 query 4; still undecided: \d+, sample size is: 200 (re)
911 query 4; still undecided: \d+, sample size is: 200 (re)
917 sampling from both directions
912 sampling from both directions
918 searching: 5 queries
913 searching: 5 queries
919 query 5; still undecided: \d+, sample size is: 200 (re)
914 query 5; still undecided: \d+, sample size is: 200 (re)
920 sampling from both directions
915 sampling from both directions
921 searching: 6 queries
916 searching: 6 queries
922 query 6; still undecided: \d+, sample size is: \d+ (re)
917 query 6; still undecided: \d+, sample size is: \d+ (re)
923 6 total queries in *.????s (glob)
918 6 total queries in *.????s (glob)
924 heads summary:
919 heads summary:
925 total common heads: 1
920 total common heads: 1
926 also local heads: 0
921 also local heads: 0
927 also remote heads: 0
922 also remote heads: 0
928 local heads: 260
923 local heads: 260
929 common: 0
924 common: 0
930 missing: 260
925 missing: 260
931 remote heads: 1
926 remote heads: 1
932 common: 0
927 common: 0
933 unknown: 1
928 unknown: 1
934 local changesets: 1340
929 local changesets: 1340
935 common: 300
930 common: 300
936 missing: 1040
931 missing: 1040
937 common heads: 3ee37d65064a
932 common heads: 3ee37d65064a
938 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --rev tip
933 $ hg -R a debugdiscovery b --debug --verbose --config progress.debug=true --rev tip
939 comparing with b
934 comparing with b
940 query 1; heads
935 query 1; heads
941 searching for changes
936 searching for changes
942 taking quick initial sample
937 taking quick initial sample
943 searching: 2 queries
938 searching: 2 queries
944 query 2; still undecided: 303, sample size is: 9
939 query 2; still undecided: 303, sample size is: 9
945 sampling from both directions
940 sampling from both directions
946 searching: 3 queries
941 searching: 3 queries
947 query 3; still undecided: 3, sample size is: 3
942 query 3; still undecided: 3, sample size is: 3
948 3 total queries in *.????s (glob)
943 3 total queries in *.????s (glob)
949 heads summary:
944 heads summary:
950 total common heads: 1
945 total common heads: 1
951 also local heads: 0
946 also local heads: 0
952 also remote heads: 0
947 also remote heads: 0
953 local heads: 260
948 local heads: 260
954 common: 0
949 common: 0
955 missing: 260
950 missing: 260
956 remote heads: 1
951 remote heads: 1
957 common: 0
952 common: 0
958 unknown: 1
953 unknown: 1
959 local changesets: 1340
954 local changesets: 1340
960 common: 300
955 common: 300
961 missing: 1040
956 missing: 1040
962 common heads: 3ee37d65064a
957 common heads: 3ee37d65064a
963
958
964 Test actual protocol when pulling one new head in addition to common heads
959 Test actual protocol when pulling one new head in addition to common heads
965
960
966 $ hg clone -U b c
961 $ hg clone -U b c
967 $ hg -R c id -ir tip
962 $ hg -R c id -ir tip
968 513314ca8b3a
963 513314ca8b3a
969 $ hg -R c up -qr default
964 $ hg -R c up -qr default
970 $ touch c/f
965 $ touch c/f
971 $ hg -R c ci -Aqm "extra head"
966 $ hg -R c ci -Aqm "extra head"
972 $ hg -R c id -i
967 $ hg -R c id -i
973 e64a39e7da8b
968 e64a39e7da8b
974
969
975 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
970 $ hg serve -R c -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log
976 $ cat hg.pid >> $DAEMON_PIDS
971 $ cat hg.pid >> $DAEMON_PIDS
977
972
978 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
973 $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n'
979 comparing with http://localhost:$HGPORT/
974 comparing with http://localhost:$HGPORT/
980 searching for changes
975 searching for changes
981 e64a39e7da8b
976 e64a39e7da8b
982
977
983 $ killdaemons.py
978 $ killdaemons.py
984 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
979 $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling
985 "GET /?cmd=capabilities HTTP/1.1" 200 -
980 "GET /?cmd=capabilities HTTP/1.1" 200 -
986 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
981 "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
987 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:$USUAL_BUNDLE_CAPS$&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
982 "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:$USUAL_BUNDLE_CAPS$&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
988 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
983 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ partial-pull
989 $ cat errors.log
984 $ cat errors.log
990
985
991 $ cd ..
986 $ cd ..
992
987
993
988
994 Issue 4438 - test coverage for 3ef893520a85 issues.
989 Issue 4438 - test coverage for 3ef893520a85 issues.
995
990
996 $ mkdir issue4438
991 $ mkdir issue4438
997 $ cd issue4438
992 $ cd issue4438
998 #if false
993 #if false
999 generate new bundles:
994 generate new bundles:
1000 $ hg init r1
995 $ hg init r1
1001 $ for i in `"$PYTHON" $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
996 $ for i in `"$PYTHON" $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done
1002 $ hg clone -q r1 r2
997 $ hg clone -q r1 r2
1003 $ for i in `"$PYTHON" $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
998 $ for i in `"$PYTHON" $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done
1004 $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
999 $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change
1005 $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
1000 $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg
1006 $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
1001 $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg
1007 #else
1002 #else
1008 use existing bundles:
1003 use existing bundles:
1009 $ hg init r1
1004 $ hg init r1
1010 $ hg -R r1 -q unbundle $TESTDIR/bundles/issue4438-r1.hg
1005 $ hg -R r1 -q unbundle $TESTDIR/bundles/issue4438-r1.hg
1011 $ hg -R r1 -q up
1006 $ hg -R r1 -q up
1012 $ hg init r2
1007 $ hg init r2
1013 $ hg -R r2 -q unbundle $TESTDIR/bundles/issue4438-r2.hg
1008 $ hg -R r2 -q unbundle $TESTDIR/bundles/issue4438-r2.hg
1014 $ hg -R r2 -q up
1009 $ hg -R r2 -q up
1015 #endif
1010 #endif
1016
1011
1017 Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650:
1012 Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650:
1018
1013
1019 $ hg -R r1 outgoing r2 -T'{rev} '
1014 $ hg -R r1 outgoing r2 -T'{rev} '
1020 comparing with r2
1015 comparing with r2
1021 searching for changes
1016 searching for changes
1022 101 102 103 104 105 106 107 108 109 110 (no-eol)
1017 101 102 103 104 105 106 107 108 109 110 (no-eol)
1023
1018
1024 The case where all the 'initialsamplesize' samples already were common would
1019 The case where all the 'initialsamplesize' samples already were common would
1025 give 'all remote heads known locally' without checking the remaining heads -
1020 give 'all remote heads known locally' without checking the remaining heads -
1026 fixed in 86c35b7ae300:
1021 fixed in 86c35b7ae300:
1027
1022
1028 $ cat >> $TESTTMP/unrandomsample.py << EOF
1023 $ cat >> $TESTTMP/unrandomsample.py << EOF
1029 > import random
1024 > import random
1030 > def sample(population, k):
1025 > def sample(population, k):
1031 > return sorted(population)[:k]
1026 > return sorted(population)[:k]
1032 > random.sample = sample
1027 > random.sample = sample
1033 > EOF
1028 > EOF
1034
1029
1035 $ cat >> r1/.hg/hgrc << EOF
1030 $ cat >> r1/.hg/hgrc << EOF
1036 > [extensions]
1031 > [extensions]
1037 > unrandomsample = $TESTTMP/unrandomsample.py
1032 > unrandomsample = $TESTTMP/unrandomsample.py
1038 > EOF
1033 > EOF
1039
1034
1040 $ hg -R r1 outgoing r2 -T'{rev} ' --config extensions.blackbox= \
1035 $ hg -R r1 outgoing r2 -T'{rev} ' --config extensions.blackbox= \
1041 > --config blackbox.track='command commandfinish discovery'
1036 > --config blackbox.track='command commandfinish discovery'
1042 comparing with r2
1037 comparing with r2
1043 searching for changes
1038 searching for changes
1044 101 102 103 104 105 106 107 108 109 110 (no-eol)
1039 101 102 103 104 105 106 107 108 109 110 (no-eol)
1045 $ hg -R r1 --config extensions.blackbox= blackbox --config blackbox.track=
1040 $ hg -R r1 --config extensions.blackbox= blackbox --config blackbox.track=
1046 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> serve --cmdserver chgunix * (glob) (chg !)
1041 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> serve --cmdserver chgunix * (glob) (chg !)
1047 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* (glob)
1042 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* (glob)
1048 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 2 roundtrips in *.????s (glob)
1043 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> found 101 common and 1 unknown server heads, 2 roundtrips in *.????s (glob)
1049 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
1044 * @5d0b986a083e0d91f116de4691e2aaa54d5bbec0 (*)> -R r1 outgoing r2 *-T{rev} * --config *extensions.blackbox=* exited 0 after *.?? seconds (glob)
1050 $ cd ..
1045 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now