##// END OF EJS Templates
fileset: sort debugfileset output...
Yuya Nishihara -
r38618:f9805627 @73 default
parent child Browse files
Show More
@@ -1,3144 +1,3144 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from .thirdparty import (
35 from .thirdparty import (
36 cbor,
36 cbor,
37 )
37 )
38 from . import (
38 from . import (
39 bundle2,
39 bundle2,
40 changegroup,
40 changegroup,
41 cmdutil,
41 cmdutil,
42 color,
42 color,
43 context,
43 context,
44 dagparser,
44 dagparser,
45 dagutil,
45 dagutil,
46 encoding,
46 encoding,
47 error,
47 error,
48 exchange,
48 exchange,
49 extensions,
49 extensions,
50 filemerge,
50 filemerge,
51 fileset,
51 fileset,
52 formatter,
52 formatter,
53 hg,
53 hg,
54 httppeer,
54 httppeer,
55 localrepo,
55 localrepo,
56 lock as lockmod,
56 lock as lockmod,
57 logcmdutil,
57 logcmdutil,
58 merge as mergemod,
58 merge as mergemod,
59 obsolete,
59 obsolete,
60 obsutil,
60 obsutil,
61 phases,
61 phases,
62 policy,
62 policy,
63 pvec,
63 pvec,
64 pycompat,
64 pycompat,
65 registrar,
65 registrar,
66 repair,
66 repair,
67 revlog,
67 revlog,
68 revset,
68 revset,
69 revsetlang,
69 revsetlang,
70 scmutil,
70 scmutil,
71 setdiscovery,
71 setdiscovery,
72 simplemerge,
72 simplemerge,
73 sshpeer,
73 sshpeer,
74 sslutil,
74 sslutil,
75 streamclone,
75 streamclone,
76 templater,
76 templater,
77 treediscovery,
77 treediscovery,
78 upgrade,
78 upgrade,
79 url as urlmod,
79 url as urlmod,
80 util,
80 util,
81 vfs as vfsmod,
81 vfs as vfsmod,
82 wireprotoframing,
82 wireprotoframing,
83 wireprotoserver,
83 wireprotoserver,
84 wireprotov2peer,
84 wireprotov2peer,
85 )
85 )
86 from .utils import (
86 from .utils import (
87 dateutil,
87 dateutil,
88 procutil,
88 procutil,
89 stringutil,
89 stringutil,
90 )
90 )
91
91
92 release = lockmod.release
92 release = lockmod.release
93
93
94 command = registrar.command()
94 command = registrar.command()
95
95
96 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
96 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
97 def debugancestor(ui, repo, *args):
97 def debugancestor(ui, repo, *args):
98 """find the ancestor revision of two revisions in a given index"""
98 """find the ancestor revision of two revisions in a given index"""
99 if len(args) == 3:
99 if len(args) == 3:
100 index, rev1, rev2 = args
100 index, rev1, rev2 = args
101 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
101 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
102 lookup = r.lookup
102 lookup = r.lookup
103 elif len(args) == 2:
103 elif len(args) == 2:
104 if not repo:
104 if not repo:
105 raise error.Abort(_('there is no Mercurial repository here '
105 raise error.Abort(_('there is no Mercurial repository here '
106 '(.hg not found)'))
106 '(.hg not found)'))
107 rev1, rev2 = args
107 rev1, rev2 = args
108 r = repo.changelog
108 r = repo.changelog
109 lookup = repo.lookup
109 lookup = repo.lookup
110 else:
110 else:
111 raise error.Abort(_('either two or three arguments required'))
111 raise error.Abort(_('either two or three arguments required'))
112 a = r.ancestor(lookup(rev1), lookup(rev2))
112 a = r.ancestor(lookup(rev1), lookup(rev2))
113 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
113 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
114
114
115 @command('debugapplystreamclonebundle', [], 'FILE')
115 @command('debugapplystreamclonebundle', [], 'FILE')
116 def debugapplystreamclonebundle(ui, repo, fname):
116 def debugapplystreamclonebundle(ui, repo, fname):
117 """apply a stream clone bundle file"""
117 """apply a stream clone bundle file"""
118 f = hg.openpath(ui, fname)
118 f = hg.openpath(ui, fname)
119 gen = exchange.readbundle(ui, f, fname)
119 gen = exchange.readbundle(ui, f, fname)
120 gen.apply(repo)
120 gen.apply(repo)
121
121
122 @command('debugbuilddag',
122 @command('debugbuilddag',
123 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
123 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
124 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
124 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
125 ('n', 'new-file', None, _('add new file at each rev'))],
125 ('n', 'new-file', None, _('add new file at each rev'))],
126 _('[OPTION]... [TEXT]'))
126 _('[OPTION]... [TEXT]'))
127 def debugbuilddag(ui, repo, text=None,
127 def debugbuilddag(ui, repo, text=None,
128 mergeable_file=False,
128 mergeable_file=False,
129 overwritten_file=False,
129 overwritten_file=False,
130 new_file=False):
130 new_file=False):
131 """builds a repo with a given DAG from scratch in the current empty repo
131 """builds a repo with a given DAG from scratch in the current empty repo
132
132
133 The description of the DAG is read from stdin if not given on the
133 The description of the DAG is read from stdin if not given on the
134 command line.
134 command line.
135
135
136 Elements:
136 Elements:
137
137
138 - "+n" is a linear run of n nodes based on the current default parent
138 - "+n" is a linear run of n nodes based on the current default parent
139 - "." is a single node based on the current default parent
139 - "." is a single node based on the current default parent
140 - "$" resets the default parent to null (implied at the start);
140 - "$" resets the default parent to null (implied at the start);
141 otherwise the default parent is always the last node created
141 otherwise the default parent is always the last node created
142 - "<p" sets the default parent to the backref p
142 - "<p" sets the default parent to the backref p
143 - "*p" is a fork at parent p, which is a backref
143 - "*p" is a fork at parent p, which is a backref
144 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
144 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
145 - "/p2" is a merge of the preceding node and p2
145 - "/p2" is a merge of the preceding node and p2
146 - ":tag" defines a local tag for the preceding node
146 - ":tag" defines a local tag for the preceding node
147 - "@branch" sets the named branch for subsequent nodes
147 - "@branch" sets the named branch for subsequent nodes
148 - "#...\\n" is a comment up to the end of the line
148 - "#...\\n" is a comment up to the end of the line
149
149
150 Whitespace between the above elements is ignored.
150 Whitespace between the above elements is ignored.
151
151
152 A backref is either
152 A backref is either
153
153
154 - a number n, which references the node curr-n, where curr is the current
154 - a number n, which references the node curr-n, where curr is the current
155 node, or
155 node, or
156 - the name of a local tag you placed earlier using ":tag", or
156 - the name of a local tag you placed earlier using ":tag", or
157 - empty to denote the default parent.
157 - empty to denote the default parent.
158
158
159 All string valued-elements are either strictly alphanumeric, or must
159 All string valued-elements are either strictly alphanumeric, or must
160 be enclosed in double quotes ("..."), with "\\" as escape character.
160 be enclosed in double quotes ("..."), with "\\" as escape character.
161 """
161 """
162
162
163 if text is None:
163 if text is None:
164 ui.status(_("reading DAG from stdin\n"))
164 ui.status(_("reading DAG from stdin\n"))
165 text = ui.fin.read()
165 text = ui.fin.read()
166
166
167 cl = repo.changelog
167 cl = repo.changelog
168 if len(cl) > 0:
168 if len(cl) > 0:
169 raise error.Abort(_('repository is not empty'))
169 raise error.Abort(_('repository is not empty'))
170
170
171 # determine number of revs in DAG
171 # determine number of revs in DAG
172 total = 0
172 total = 0
173 for type, data in dagparser.parsedag(text):
173 for type, data in dagparser.parsedag(text):
174 if type == 'n':
174 if type == 'n':
175 total += 1
175 total += 1
176
176
177 if mergeable_file:
177 if mergeable_file:
178 linesperrev = 2
178 linesperrev = 2
179 # make a file with k lines per rev
179 # make a file with k lines per rev
180 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
180 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
181 initialmergedlines.append("")
181 initialmergedlines.append("")
182
182
183 tags = []
183 tags = []
184 progress = ui.makeprogress(_('building'), unit=_('revisions'),
184 progress = ui.makeprogress(_('building'), unit=_('revisions'),
185 total=total)
185 total=total)
186 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
186 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
187 at = -1
187 at = -1
188 atbranch = 'default'
188 atbranch = 'default'
189 nodeids = []
189 nodeids = []
190 id = 0
190 id = 0
191 progress.update(id)
191 progress.update(id)
192 for type, data in dagparser.parsedag(text):
192 for type, data in dagparser.parsedag(text):
193 if type == 'n':
193 if type == 'n':
194 ui.note(('node %s\n' % pycompat.bytestr(data)))
194 ui.note(('node %s\n' % pycompat.bytestr(data)))
195 id, ps = data
195 id, ps = data
196
196
197 files = []
197 files = []
198 filecontent = {}
198 filecontent = {}
199
199
200 p2 = None
200 p2 = None
201 if mergeable_file:
201 if mergeable_file:
202 fn = "mf"
202 fn = "mf"
203 p1 = repo[ps[0]]
203 p1 = repo[ps[0]]
204 if len(ps) > 1:
204 if len(ps) > 1:
205 p2 = repo[ps[1]]
205 p2 = repo[ps[1]]
206 pa = p1.ancestor(p2)
206 pa = p1.ancestor(p2)
207 base, local, other = [x[fn].data() for x in (pa, p1,
207 base, local, other = [x[fn].data() for x in (pa, p1,
208 p2)]
208 p2)]
209 m3 = simplemerge.Merge3Text(base, local, other)
209 m3 = simplemerge.Merge3Text(base, local, other)
210 ml = [l.strip() for l in m3.merge_lines()]
210 ml = [l.strip() for l in m3.merge_lines()]
211 ml.append("")
211 ml.append("")
212 elif at > 0:
212 elif at > 0:
213 ml = p1[fn].data().split("\n")
213 ml = p1[fn].data().split("\n")
214 else:
214 else:
215 ml = initialmergedlines
215 ml = initialmergedlines
216 ml[id * linesperrev] += " r%i" % id
216 ml[id * linesperrev] += " r%i" % id
217 mergedtext = "\n".join(ml)
217 mergedtext = "\n".join(ml)
218 files.append(fn)
218 files.append(fn)
219 filecontent[fn] = mergedtext
219 filecontent[fn] = mergedtext
220
220
221 if overwritten_file:
221 if overwritten_file:
222 fn = "of"
222 fn = "of"
223 files.append(fn)
223 files.append(fn)
224 filecontent[fn] = "r%i\n" % id
224 filecontent[fn] = "r%i\n" % id
225
225
226 if new_file:
226 if new_file:
227 fn = "nf%i" % id
227 fn = "nf%i" % id
228 files.append(fn)
228 files.append(fn)
229 filecontent[fn] = "r%i\n" % id
229 filecontent[fn] = "r%i\n" % id
230 if len(ps) > 1:
230 if len(ps) > 1:
231 if not p2:
231 if not p2:
232 p2 = repo[ps[1]]
232 p2 = repo[ps[1]]
233 for fn in p2:
233 for fn in p2:
234 if fn.startswith("nf"):
234 if fn.startswith("nf"):
235 files.append(fn)
235 files.append(fn)
236 filecontent[fn] = p2[fn].data()
236 filecontent[fn] = p2[fn].data()
237
237
238 def fctxfn(repo, cx, path):
238 def fctxfn(repo, cx, path):
239 if path in filecontent:
239 if path in filecontent:
240 return context.memfilectx(repo, cx, path,
240 return context.memfilectx(repo, cx, path,
241 filecontent[path])
241 filecontent[path])
242 return None
242 return None
243
243
244 if len(ps) == 0 or ps[0] < 0:
244 if len(ps) == 0 or ps[0] < 0:
245 pars = [None, None]
245 pars = [None, None]
246 elif len(ps) == 1:
246 elif len(ps) == 1:
247 pars = [nodeids[ps[0]], None]
247 pars = [nodeids[ps[0]], None]
248 else:
248 else:
249 pars = [nodeids[p] for p in ps]
249 pars = [nodeids[p] for p in ps]
250 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
250 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
251 date=(id, 0),
251 date=(id, 0),
252 user="debugbuilddag",
252 user="debugbuilddag",
253 extra={'branch': atbranch})
253 extra={'branch': atbranch})
254 nodeid = repo.commitctx(cx)
254 nodeid = repo.commitctx(cx)
255 nodeids.append(nodeid)
255 nodeids.append(nodeid)
256 at = id
256 at = id
257 elif type == 'l':
257 elif type == 'l':
258 id, name = data
258 id, name = data
259 ui.note(('tag %s\n' % name))
259 ui.note(('tag %s\n' % name))
260 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
260 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
261 elif type == 'a':
261 elif type == 'a':
262 ui.note(('branch %s\n' % data))
262 ui.note(('branch %s\n' % data))
263 atbranch = data
263 atbranch = data
264 progress.update(id)
264 progress.update(id)
265
265
266 if tags:
266 if tags:
267 repo.vfs.write("localtags", "".join(tags))
267 repo.vfs.write("localtags", "".join(tags))
268
268
269 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
269 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
270 indent_string = ' ' * indent
270 indent_string = ' ' * indent
271 if all:
271 if all:
272 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
272 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
273 % indent_string)
273 % indent_string)
274
274
275 def showchunks(named):
275 def showchunks(named):
276 ui.write("\n%s%s\n" % (indent_string, named))
276 ui.write("\n%s%s\n" % (indent_string, named))
277 for deltadata in gen.deltaiter():
277 for deltadata in gen.deltaiter():
278 node, p1, p2, cs, deltabase, delta, flags = deltadata
278 node, p1, p2, cs, deltabase, delta, flags = deltadata
279 ui.write("%s%s %s %s %s %s %d\n" %
279 ui.write("%s%s %s %s %s %s %d\n" %
280 (indent_string, hex(node), hex(p1), hex(p2),
280 (indent_string, hex(node), hex(p1), hex(p2),
281 hex(cs), hex(deltabase), len(delta)))
281 hex(cs), hex(deltabase), len(delta)))
282
282
283 chunkdata = gen.changelogheader()
283 chunkdata = gen.changelogheader()
284 showchunks("changelog")
284 showchunks("changelog")
285 chunkdata = gen.manifestheader()
285 chunkdata = gen.manifestheader()
286 showchunks("manifest")
286 showchunks("manifest")
287 for chunkdata in iter(gen.filelogheader, {}):
287 for chunkdata in iter(gen.filelogheader, {}):
288 fname = chunkdata['filename']
288 fname = chunkdata['filename']
289 showchunks(fname)
289 showchunks(fname)
290 else:
290 else:
291 if isinstance(gen, bundle2.unbundle20):
291 if isinstance(gen, bundle2.unbundle20):
292 raise error.Abort(_('use debugbundle2 for this file'))
292 raise error.Abort(_('use debugbundle2 for this file'))
293 chunkdata = gen.changelogheader()
293 chunkdata = gen.changelogheader()
294 for deltadata in gen.deltaiter():
294 for deltadata in gen.deltaiter():
295 node, p1, p2, cs, deltabase, delta, flags = deltadata
295 node, p1, p2, cs, deltabase, delta, flags = deltadata
296 ui.write("%s%s\n" % (indent_string, hex(node)))
296 ui.write("%s%s\n" % (indent_string, hex(node)))
297
297
298 def _debugobsmarkers(ui, part, indent=0, **opts):
298 def _debugobsmarkers(ui, part, indent=0, **opts):
299 """display version and markers contained in 'data'"""
299 """display version and markers contained in 'data'"""
300 opts = pycompat.byteskwargs(opts)
300 opts = pycompat.byteskwargs(opts)
301 data = part.read()
301 data = part.read()
302 indent_string = ' ' * indent
302 indent_string = ' ' * indent
303 try:
303 try:
304 version, markers = obsolete._readmarkers(data)
304 version, markers = obsolete._readmarkers(data)
305 except error.UnknownVersion as exc:
305 except error.UnknownVersion as exc:
306 msg = "%sunsupported version: %s (%d bytes)\n"
306 msg = "%sunsupported version: %s (%d bytes)\n"
307 msg %= indent_string, exc.version, len(data)
307 msg %= indent_string, exc.version, len(data)
308 ui.write(msg)
308 ui.write(msg)
309 else:
309 else:
310 msg = "%sversion: %d (%d bytes)\n"
310 msg = "%sversion: %d (%d bytes)\n"
311 msg %= indent_string, version, len(data)
311 msg %= indent_string, version, len(data)
312 ui.write(msg)
312 ui.write(msg)
313 fm = ui.formatter('debugobsolete', opts)
313 fm = ui.formatter('debugobsolete', opts)
314 for rawmarker in sorted(markers):
314 for rawmarker in sorted(markers):
315 m = obsutil.marker(None, rawmarker)
315 m = obsutil.marker(None, rawmarker)
316 fm.startitem()
316 fm.startitem()
317 fm.plain(indent_string)
317 fm.plain(indent_string)
318 cmdutil.showmarker(fm, m)
318 cmdutil.showmarker(fm, m)
319 fm.end()
319 fm.end()
320
320
321 def _debugphaseheads(ui, data, indent=0):
321 def _debugphaseheads(ui, data, indent=0):
322 """display version and markers contained in 'data'"""
322 """display version and markers contained in 'data'"""
323 indent_string = ' ' * indent
323 indent_string = ' ' * indent
324 headsbyphase = phases.binarydecode(data)
324 headsbyphase = phases.binarydecode(data)
325 for phase in phases.allphases:
325 for phase in phases.allphases:
326 for head in headsbyphase[phase]:
326 for head in headsbyphase[phase]:
327 ui.write(indent_string)
327 ui.write(indent_string)
328 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
328 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
329
329
330 def _quasirepr(thing):
330 def _quasirepr(thing):
331 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
331 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
332 return '{%s}' % (
332 return '{%s}' % (
333 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
333 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
334 return pycompat.bytestr(repr(thing))
334 return pycompat.bytestr(repr(thing))
335
335
336 def _debugbundle2(ui, gen, all=None, **opts):
336 def _debugbundle2(ui, gen, all=None, **opts):
337 """lists the contents of a bundle2"""
337 """lists the contents of a bundle2"""
338 if not isinstance(gen, bundle2.unbundle20):
338 if not isinstance(gen, bundle2.unbundle20):
339 raise error.Abort(_('not a bundle2 file'))
339 raise error.Abort(_('not a bundle2 file'))
340 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
340 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
341 parttypes = opts.get(r'part_type', [])
341 parttypes = opts.get(r'part_type', [])
342 for part in gen.iterparts():
342 for part in gen.iterparts():
343 if parttypes and part.type not in parttypes:
343 if parttypes and part.type not in parttypes:
344 continue
344 continue
345 msg = '%s -- %s (mandatory: %r)\n'
345 msg = '%s -- %s (mandatory: %r)\n'
346 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
346 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
347 if part.type == 'changegroup':
347 if part.type == 'changegroup':
348 version = part.params.get('version', '01')
348 version = part.params.get('version', '01')
349 cg = changegroup.getunbundler(version, part, 'UN')
349 cg = changegroup.getunbundler(version, part, 'UN')
350 if not ui.quiet:
350 if not ui.quiet:
351 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
351 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
352 if part.type == 'obsmarkers':
352 if part.type == 'obsmarkers':
353 if not ui.quiet:
353 if not ui.quiet:
354 _debugobsmarkers(ui, part, indent=4, **opts)
354 _debugobsmarkers(ui, part, indent=4, **opts)
355 if part.type == 'phase-heads':
355 if part.type == 'phase-heads':
356 if not ui.quiet:
356 if not ui.quiet:
357 _debugphaseheads(ui, part, indent=4)
357 _debugphaseheads(ui, part, indent=4)
358
358
359 @command('debugbundle',
359 @command('debugbundle',
360 [('a', 'all', None, _('show all details')),
360 [('a', 'all', None, _('show all details')),
361 ('', 'part-type', [], _('show only the named part type')),
361 ('', 'part-type', [], _('show only the named part type')),
362 ('', 'spec', None, _('print the bundlespec of the bundle'))],
362 ('', 'spec', None, _('print the bundlespec of the bundle'))],
363 _('FILE'),
363 _('FILE'),
364 norepo=True)
364 norepo=True)
365 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
365 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
366 """lists the contents of a bundle"""
366 """lists the contents of a bundle"""
367 with hg.openpath(ui, bundlepath) as f:
367 with hg.openpath(ui, bundlepath) as f:
368 if spec:
368 if spec:
369 spec = exchange.getbundlespec(ui, f)
369 spec = exchange.getbundlespec(ui, f)
370 ui.write('%s\n' % spec)
370 ui.write('%s\n' % spec)
371 return
371 return
372
372
373 gen = exchange.readbundle(ui, f, bundlepath)
373 gen = exchange.readbundle(ui, f, bundlepath)
374 if isinstance(gen, bundle2.unbundle20):
374 if isinstance(gen, bundle2.unbundle20):
375 return _debugbundle2(ui, gen, all=all, **opts)
375 return _debugbundle2(ui, gen, all=all, **opts)
376 _debugchangegroup(ui, gen, all=all, **opts)
376 _debugchangegroup(ui, gen, all=all, **opts)
377
377
378 @command('debugcapabilities',
378 @command('debugcapabilities',
379 [], _('PATH'),
379 [], _('PATH'),
380 norepo=True)
380 norepo=True)
381 def debugcapabilities(ui, path, **opts):
381 def debugcapabilities(ui, path, **opts):
382 """lists the capabilities of a remote peer"""
382 """lists the capabilities of a remote peer"""
383 opts = pycompat.byteskwargs(opts)
383 opts = pycompat.byteskwargs(opts)
384 peer = hg.peer(ui, opts, path)
384 peer = hg.peer(ui, opts, path)
385 caps = peer.capabilities()
385 caps = peer.capabilities()
386 ui.write(('Main capabilities:\n'))
386 ui.write(('Main capabilities:\n'))
387 for c in sorted(caps):
387 for c in sorted(caps):
388 ui.write((' %s\n') % c)
388 ui.write((' %s\n') % c)
389 b2caps = bundle2.bundle2caps(peer)
389 b2caps = bundle2.bundle2caps(peer)
390 if b2caps:
390 if b2caps:
391 ui.write(('Bundle2 capabilities:\n'))
391 ui.write(('Bundle2 capabilities:\n'))
392 for key, values in sorted(b2caps.iteritems()):
392 for key, values in sorted(b2caps.iteritems()):
393 ui.write((' %s\n') % key)
393 ui.write((' %s\n') % key)
394 for v in values:
394 for v in values:
395 ui.write((' %s\n') % v)
395 ui.write((' %s\n') % v)
396
396
397 @command('debugcheckstate', [], '')
397 @command('debugcheckstate', [], '')
398 def debugcheckstate(ui, repo):
398 def debugcheckstate(ui, repo):
399 """validate the correctness of the current dirstate"""
399 """validate the correctness of the current dirstate"""
400 parent1, parent2 = repo.dirstate.parents()
400 parent1, parent2 = repo.dirstate.parents()
401 m1 = repo[parent1].manifest()
401 m1 = repo[parent1].manifest()
402 m2 = repo[parent2].manifest()
402 m2 = repo[parent2].manifest()
403 errors = 0
403 errors = 0
404 for f in repo.dirstate:
404 for f in repo.dirstate:
405 state = repo.dirstate[f]
405 state = repo.dirstate[f]
406 if state in "nr" and f not in m1:
406 if state in "nr" and f not in m1:
407 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
407 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
408 errors += 1
408 errors += 1
409 if state in "a" and f in m1:
409 if state in "a" and f in m1:
410 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
410 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
411 errors += 1
411 errors += 1
412 if state in "m" and f not in m1 and f not in m2:
412 if state in "m" and f not in m1 and f not in m2:
413 ui.warn(_("%s in state %s, but not in either manifest\n") %
413 ui.warn(_("%s in state %s, but not in either manifest\n") %
414 (f, state))
414 (f, state))
415 errors += 1
415 errors += 1
416 for f in m1:
416 for f in m1:
417 state = repo.dirstate[f]
417 state = repo.dirstate[f]
418 if state not in "nrm":
418 if state not in "nrm":
419 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
419 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
420 errors += 1
420 errors += 1
421 if errors:
421 if errors:
422 error = _(".hg/dirstate inconsistent with current parent's manifest")
422 error = _(".hg/dirstate inconsistent with current parent's manifest")
423 raise error.Abort(error)
423 raise error.Abort(error)
424
424
425 @command('debugcolor',
425 @command('debugcolor',
426 [('', 'style', None, _('show all configured styles'))],
426 [('', 'style', None, _('show all configured styles'))],
427 'hg debugcolor')
427 'hg debugcolor')
428 def debugcolor(ui, repo, **opts):
428 def debugcolor(ui, repo, **opts):
429 """show available color, effects or style"""
429 """show available color, effects or style"""
430 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
430 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
431 if opts.get(r'style'):
431 if opts.get(r'style'):
432 return _debugdisplaystyle(ui)
432 return _debugdisplaystyle(ui)
433 else:
433 else:
434 return _debugdisplaycolor(ui)
434 return _debugdisplaycolor(ui)
435
435
436 def _debugdisplaycolor(ui):
436 def _debugdisplaycolor(ui):
437 ui = ui.copy()
437 ui = ui.copy()
438 ui._styles.clear()
438 ui._styles.clear()
439 for effect in color._activeeffects(ui).keys():
439 for effect in color._activeeffects(ui).keys():
440 ui._styles[effect] = effect
440 ui._styles[effect] = effect
441 if ui._terminfoparams:
441 if ui._terminfoparams:
442 for k, v in ui.configitems('color'):
442 for k, v in ui.configitems('color'):
443 if k.startswith('color.'):
443 if k.startswith('color.'):
444 ui._styles[k] = k[6:]
444 ui._styles[k] = k[6:]
445 elif k.startswith('terminfo.'):
445 elif k.startswith('terminfo.'):
446 ui._styles[k] = k[9:]
446 ui._styles[k] = k[9:]
447 ui.write(_('available colors:\n'))
447 ui.write(_('available colors:\n'))
448 # sort label with a '_' after the other to group '_background' entry.
448 # sort label with a '_' after the other to group '_background' entry.
449 items = sorted(ui._styles.items(),
449 items = sorted(ui._styles.items(),
450 key=lambda i: ('_' in i[0], i[0], i[1]))
450 key=lambda i: ('_' in i[0], i[0], i[1]))
451 for colorname, label in items:
451 for colorname, label in items:
452 ui.write(('%s\n') % colorname, label=label)
452 ui.write(('%s\n') % colorname, label=label)
453
453
454 def _debugdisplaystyle(ui):
454 def _debugdisplaystyle(ui):
455 ui.write(_('available style:\n'))
455 ui.write(_('available style:\n'))
456 if not ui._styles:
456 if not ui._styles:
457 return
457 return
458 width = max(len(s) for s in ui._styles)
458 width = max(len(s) for s in ui._styles)
459 for label, effects in sorted(ui._styles.items()):
459 for label, effects in sorted(ui._styles.items()):
460 ui.write('%s' % label, label=label)
460 ui.write('%s' % label, label=label)
461 if effects:
461 if effects:
462 # 50
462 # 50
463 ui.write(': ')
463 ui.write(': ')
464 ui.write(' ' * (max(0, width - len(label))))
464 ui.write(' ' * (max(0, width - len(label))))
465 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
465 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
466 ui.write('\n')
466 ui.write('\n')
467
467
468 @command('debugcreatestreamclonebundle', [], 'FILE')
468 @command('debugcreatestreamclonebundle', [], 'FILE')
469 def debugcreatestreamclonebundle(ui, repo, fname):
469 def debugcreatestreamclonebundle(ui, repo, fname):
470 """create a stream clone bundle file
470 """create a stream clone bundle file
471
471
472 Stream bundles are special bundles that are essentially archives of
472 Stream bundles are special bundles that are essentially archives of
473 revlog files. They are commonly used for cloning very quickly.
473 revlog files. They are commonly used for cloning very quickly.
474 """
474 """
475 # TODO we may want to turn this into an abort when this functionality
475 # TODO we may want to turn this into an abort when this functionality
476 # is moved into `hg bundle`.
476 # is moved into `hg bundle`.
477 if phases.hassecret(repo):
477 if phases.hassecret(repo):
478 ui.warn(_('(warning: stream clone bundle will contain secret '
478 ui.warn(_('(warning: stream clone bundle will contain secret '
479 'revisions)\n'))
479 'revisions)\n'))
480
480
481 requirements, gen = streamclone.generatebundlev1(repo)
481 requirements, gen = streamclone.generatebundlev1(repo)
482 changegroup.writechunks(ui, gen, fname)
482 changegroup.writechunks(ui, gen, fname)
483
483
484 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
484 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
485
485
486 @command('debugdag',
486 @command('debugdag',
487 [('t', 'tags', None, _('use tags as labels')),
487 [('t', 'tags', None, _('use tags as labels')),
488 ('b', 'branches', None, _('annotate with branch names')),
488 ('b', 'branches', None, _('annotate with branch names')),
489 ('', 'dots', None, _('use dots for runs')),
489 ('', 'dots', None, _('use dots for runs')),
490 ('s', 'spaces', None, _('separate elements by spaces'))],
490 ('s', 'spaces', None, _('separate elements by spaces'))],
491 _('[OPTION]... [FILE [REV]...]'),
491 _('[OPTION]... [FILE [REV]...]'),
492 optionalrepo=True)
492 optionalrepo=True)
493 def debugdag(ui, repo, file_=None, *revs, **opts):
493 def debugdag(ui, repo, file_=None, *revs, **opts):
494 """format the changelog or an index DAG as a concise textual description
494 """format the changelog or an index DAG as a concise textual description
495
495
496 If you pass a revlog index, the revlog's DAG is emitted. If you list
496 If you pass a revlog index, the revlog's DAG is emitted. If you list
497 revision numbers, they get labeled in the output as rN.
497 revision numbers, they get labeled in the output as rN.
498
498
499 Otherwise, the changelog DAG of the current repo is emitted.
499 Otherwise, the changelog DAG of the current repo is emitted.
500 """
500 """
501 spaces = opts.get(r'spaces')
501 spaces = opts.get(r'spaces')
502 dots = opts.get(r'dots')
502 dots = opts.get(r'dots')
503 if file_:
503 if file_:
504 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
504 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
505 file_)
505 file_)
506 revs = set((int(r) for r in revs))
506 revs = set((int(r) for r in revs))
507 def events():
507 def events():
508 for r in rlog:
508 for r in rlog:
509 yield 'n', (r, list(p for p in rlog.parentrevs(r)
509 yield 'n', (r, list(p for p in rlog.parentrevs(r)
510 if p != -1))
510 if p != -1))
511 if r in revs:
511 if r in revs:
512 yield 'l', (r, "r%i" % r)
512 yield 'l', (r, "r%i" % r)
513 elif repo:
513 elif repo:
514 cl = repo.changelog
514 cl = repo.changelog
515 tags = opts.get(r'tags')
515 tags = opts.get(r'tags')
516 branches = opts.get(r'branches')
516 branches = opts.get(r'branches')
517 if tags:
517 if tags:
518 labels = {}
518 labels = {}
519 for l, n in repo.tags().items():
519 for l, n in repo.tags().items():
520 labels.setdefault(cl.rev(n), []).append(l)
520 labels.setdefault(cl.rev(n), []).append(l)
521 def events():
521 def events():
522 b = "default"
522 b = "default"
523 for r in cl:
523 for r in cl:
524 if branches:
524 if branches:
525 newb = cl.read(cl.node(r))[5]['branch']
525 newb = cl.read(cl.node(r))[5]['branch']
526 if newb != b:
526 if newb != b:
527 yield 'a', newb
527 yield 'a', newb
528 b = newb
528 b = newb
529 yield 'n', (r, list(p for p in cl.parentrevs(r)
529 yield 'n', (r, list(p for p in cl.parentrevs(r)
530 if p != -1))
530 if p != -1))
531 if tags:
531 if tags:
532 ls = labels.get(r)
532 ls = labels.get(r)
533 if ls:
533 if ls:
534 for l in ls:
534 for l in ls:
535 yield 'l', (r, l)
535 yield 'l', (r, l)
536 else:
536 else:
537 raise error.Abort(_('need repo for changelog dag'))
537 raise error.Abort(_('need repo for changelog dag'))
538
538
539 for line in dagparser.dagtextlines(events(),
539 for line in dagparser.dagtextlines(events(),
540 addspaces=spaces,
540 addspaces=spaces,
541 wraplabels=True,
541 wraplabels=True,
542 wrapannotations=True,
542 wrapannotations=True,
543 wrapnonlinear=dots,
543 wrapnonlinear=dots,
544 usedots=dots,
544 usedots=dots,
545 maxlinewidth=70):
545 maxlinewidth=70):
546 ui.write(line)
546 ui.write(line)
547 ui.write("\n")
547 ui.write("\n")
548
548
549 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
549 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
550 def debugdata(ui, repo, file_, rev=None, **opts):
550 def debugdata(ui, repo, file_, rev=None, **opts):
551 """dump the contents of a data file revision"""
551 """dump the contents of a data file revision"""
552 opts = pycompat.byteskwargs(opts)
552 opts = pycompat.byteskwargs(opts)
553 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
553 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
554 if rev is not None:
554 if rev is not None:
555 raise error.CommandError('debugdata', _('invalid arguments'))
555 raise error.CommandError('debugdata', _('invalid arguments'))
556 file_, rev = None, file_
556 file_, rev = None, file_
557 elif rev is None:
557 elif rev is None:
558 raise error.CommandError('debugdata', _('invalid arguments'))
558 raise error.CommandError('debugdata', _('invalid arguments'))
559 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
559 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
560 try:
560 try:
561 ui.write(r.revision(r.lookup(rev), raw=True))
561 ui.write(r.revision(r.lookup(rev), raw=True))
562 except KeyError:
562 except KeyError:
563 raise error.Abort(_('invalid revision identifier %s') % rev)
563 raise error.Abort(_('invalid revision identifier %s') % rev)
564
564
565 @command('debugdate',
565 @command('debugdate',
566 [('e', 'extended', None, _('try extended date formats'))],
566 [('e', 'extended', None, _('try extended date formats'))],
567 _('[-e] DATE [RANGE]'),
567 _('[-e] DATE [RANGE]'),
568 norepo=True, optionalrepo=True)
568 norepo=True, optionalrepo=True)
569 def debugdate(ui, date, range=None, **opts):
569 def debugdate(ui, date, range=None, **opts):
570 """parse and display a date"""
570 """parse and display a date"""
571 if opts[r"extended"]:
571 if opts[r"extended"]:
572 d = dateutil.parsedate(date, util.extendeddateformats)
572 d = dateutil.parsedate(date, util.extendeddateformats)
573 else:
573 else:
574 d = dateutil.parsedate(date)
574 d = dateutil.parsedate(date)
575 ui.write(("internal: %d %d\n") % d)
575 ui.write(("internal: %d %d\n") % d)
576 ui.write(("standard: %s\n") % dateutil.datestr(d))
576 ui.write(("standard: %s\n") % dateutil.datestr(d))
577 if range:
577 if range:
578 m = dateutil.matchdate(range)
578 m = dateutil.matchdate(range)
579 ui.write(("match: %s\n") % m(d[0]))
579 ui.write(("match: %s\n") % m(d[0]))
580
580
581 @command('debugdeltachain',
581 @command('debugdeltachain',
582 cmdutil.debugrevlogopts + cmdutil.formatteropts,
582 cmdutil.debugrevlogopts + cmdutil.formatteropts,
583 _('-c|-m|FILE'),
583 _('-c|-m|FILE'),
584 optionalrepo=True)
584 optionalrepo=True)
585 def debugdeltachain(ui, repo, file_=None, **opts):
585 def debugdeltachain(ui, repo, file_=None, **opts):
586 """dump information about delta chains in a revlog
586 """dump information about delta chains in a revlog
587
587
588 Output can be templatized. Available template keywords are:
588 Output can be templatized. Available template keywords are:
589
589
590 :``rev``: revision number
590 :``rev``: revision number
591 :``chainid``: delta chain identifier (numbered by unique base)
591 :``chainid``: delta chain identifier (numbered by unique base)
592 :``chainlen``: delta chain length to this revision
592 :``chainlen``: delta chain length to this revision
593 :``prevrev``: previous revision in delta chain
593 :``prevrev``: previous revision in delta chain
594 :``deltatype``: role of delta / how it was computed
594 :``deltatype``: role of delta / how it was computed
595 :``compsize``: compressed size of revision
595 :``compsize``: compressed size of revision
596 :``uncompsize``: uncompressed size of revision
596 :``uncompsize``: uncompressed size of revision
597 :``chainsize``: total size of compressed revisions in chain
597 :``chainsize``: total size of compressed revisions in chain
598 :``chainratio``: total chain size divided by uncompressed revision size
598 :``chainratio``: total chain size divided by uncompressed revision size
599 (new delta chains typically start at ratio 2.00)
599 (new delta chains typically start at ratio 2.00)
600 :``lindist``: linear distance from base revision in delta chain to end
600 :``lindist``: linear distance from base revision in delta chain to end
601 of this revision
601 of this revision
602 :``extradist``: total size of revisions not part of this delta chain from
602 :``extradist``: total size of revisions not part of this delta chain from
603 base of delta chain to end of this revision; a measurement
603 base of delta chain to end of this revision; a measurement
604 of how much extra data we need to read/seek across to read
604 of how much extra data we need to read/seek across to read
605 the delta chain for this revision
605 the delta chain for this revision
606 :``extraratio``: extradist divided by chainsize; another representation of
606 :``extraratio``: extradist divided by chainsize; another representation of
607 how much unrelated data is needed to load this delta chain
607 how much unrelated data is needed to load this delta chain
608
608
609 If the repository is configured to use the sparse read, additional keywords
609 If the repository is configured to use the sparse read, additional keywords
610 are available:
610 are available:
611
611
612 :``readsize``: total size of data read from the disk for a revision
612 :``readsize``: total size of data read from the disk for a revision
613 (sum of the sizes of all the blocks)
613 (sum of the sizes of all the blocks)
614 :``largestblock``: size of the largest block of data read from the disk
614 :``largestblock``: size of the largest block of data read from the disk
615 :``readdensity``: density of useful bytes in the data read from the disk
615 :``readdensity``: density of useful bytes in the data read from the disk
616 :``srchunks``: in how many data hunks the whole revision would be read
616 :``srchunks``: in how many data hunks the whole revision would be read
617
617
618 The sparse read can be enabled with experimental.sparse-read = True
618 The sparse read can be enabled with experimental.sparse-read = True
619 """
619 """
620 opts = pycompat.byteskwargs(opts)
620 opts = pycompat.byteskwargs(opts)
621 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
621 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
622 index = r.index
622 index = r.index
623 start = r.start
623 start = r.start
624 length = r.length
624 length = r.length
625 generaldelta = r.version & revlog.FLAG_GENERALDELTA
625 generaldelta = r.version & revlog.FLAG_GENERALDELTA
626 withsparseread = getattr(r, '_withsparseread', False)
626 withsparseread = getattr(r, '_withsparseread', False)
627
627
628 def revinfo(rev):
628 def revinfo(rev):
629 e = index[rev]
629 e = index[rev]
630 compsize = e[1]
630 compsize = e[1]
631 uncompsize = e[2]
631 uncompsize = e[2]
632 chainsize = 0
632 chainsize = 0
633
633
634 if generaldelta:
634 if generaldelta:
635 if e[3] == e[5]:
635 if e[3] == e[5]:
636 deltatype = 'p1'
636 deltatype = 'p1'
637 elif e[3] == e[6]:
637 elif e[3] == e[6]:
638 deltatype = 'p2'
638 deltatype = 'p2'
639 elif e[3] == rev - 1:
639 elif e[3] == rev - 1:
640 deltatype = 'prev'
640 deltatype = 'prev'
641 elif e[3] == rev:
641 elif e[3] == rev:
642 deltatype = 'base'
642 deltatype = 'base'
643 else:
643 else:
644 deltatype = 'other'
644 deltatype = 'other'
645 else:
645 else:
646 if e[3] == rev:
646 if e[3] == rev:
647 deltatype = 'base'
647 deltatype = 'base'
648 else:
648 else:
649 deltatype = 'prev'
649 deltatype = 'prev'
650
650
651 chain = r._deltachain(rev)[0]
651 chain = r._deltachain(rev)[0]
652 for iterrev in chain:
652 for iterrev in chain:
653 e = index[iterrev]
653 e = index[iterrev]
654 chainsize += e[1]
654 chainsize += e[1]
655
655
656 return compsize, uncompsize, deltatype, chain, chainsize
656 return compsize, uncompsize, deltatype, chain, chainsize
657
657
658 fm = ui.formatter('debugdeltachain', opts)
658 fm = ui.formatter('debugdeltachain', opts)
659
659
660 fm.plain(' rev chain# chainlen prev delta '
660 fm.plain(' rev chain# chainlen prev delta '
661 'size rawsize chainsize ratio lindist extradist '
661 'size rawsize chainsize ratio lindist extradist '
662 'extraratio')
662 'extraratio')
663 if withsparseread:
663 if withsparseread:
664 fm.plain(' readsize largestblk rddensity srchunks')
664 fm.plain(' readsize largestblk rddensity srchunks')
665 fm.plain('\n')
665 fm.plain('\n')
666
666
667 chainbases = {}
667 chainbases = {}
668 for rev in r:
668 for rev in r:
669 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
669 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
670 chainbase = chain[0]
670 chainbase = chain[0]
671 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
671 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
672 basestart = start(chainbase)
672 basestart = start(chainbase)
673 revstart = start(rev)
673 revstart = start(rev)
674 lineardist = revstart + comp - basestart
674 lineardist = revstart + comp - basestart
675 extradist = lineardist - chainsize
675 extradist = lineardist - chainsize
676 try:
676 try:
677 prevrev = chain[-2]
677 prevrev = chain[-2]
678 except IndexError:
678 except IndexError:
679 prevrev = -1
679 prevrev = -1
680
680
681 chainratio = float(chainsize) / float(uncomp)
681 chainratio = float(chainsize) / float(uncomp)
682 extraratio = float(extradist) / float(chainsize)
682 extraratio = float(extradist) / float(chainsize)
683
683
684 fm.startitem()
684 fm.startitem()
685 fm.write('rev chainid chainlen prevrev deltatype compsize '
685 fm.write('rev chainid chainlen prevrev deltatype compsize '
686 'uncompsize chainsize chainratio lindist extradist '
686 'uncompsize chainsize chainratio lindist extradist '
687 'extraratio',
687 'extraratio',
688 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
688 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
689 rev, chainid, len(chain), prevrev, deltatype, comp,
689 rev, chainid, len(chain), prevrev, deltatype, comp,
690 uncomp, chainsize, chainratio, lineardist, extradist,
690 uncomp, chainsize, chainratio, lineardist, extradist,
691 extraratio,
691 extraratio,
692 rev=rev, chainid=chainid, chainlen=len(chain),
692 rev=rev, chainid=chainid, chainlen=len(chain),
693 prevrev=prevrev, deltatype=deltatype, compsize=comp,
693 prevrev=prevrev, deltatype=deltatype, compsize=comp,
694 uncompsize=uncomp, chainsize=chainsize,
694 uncompsize=uncomp, chainsize=chainsize,
695 chainratio=chainratio, lindist=lineardist,
695 chainratio=chainratio, lindist=lineardist,
696 extradist=extradist, extraratio=extraratio)
696 extradist=extradist, extraratio=extraratio)
697 if withsparseread:
697 if withsparseread:
698 readsize = 0
698 readsize = 0
699 largestblock = 0
699 largestblock = 0
700 srchunks = 0
700 srchunks = 0
701
701
702 for revschunk in revlog._slicechunk(r, chain):
702 for revschunk in revlog._slicechunk(r, chain):
703 srchunks += 1
703 srchunks += 1
704 blkend = start(revschunk[-1]) + length(revschunk[-1])
704 blkend = start(revschunk[-1]) + length(revschunk[-1])
705 blksize = blkend - start(revschunk[0])
705 blksize = blkend - start(revschunk[0])
706
706
707 readsize += blksize
707 readsize += blksize
708 if largestblock < blksize:
708 if largestblock < blksize:
709 largestblock = blksize
709 largestblock = blksize
710
710
711 readdensity = float(chainsize) / float(readsize)
711 readdensity = float(chainsize) / float(readsize)
712
712
713 fm.write('readsize largestblock readdensity srchunks',
713 fm.write('readsize largestblock readdensity srchunks',
714 ' %10d %10d %9.5f %8d',
714 ' %10d %10d %9.5f %8d',
715 readsize, largestblock, readdensity, srchunks,
715 readsize, largestblock, readdensity, srchunks,
716 readsize=readsize, largestblock=largestblock,
716 readsize=readsize, largestblock=largestblock,
717 readdensity=readdensity, srchunks=srchunks)
717 readdensity=readdensity, srchunks=srchunks)
718
718
719 fm.plain('\n')
719 fm.plain('\n')
720
720
721 fm.end()
721 fm.end()
722
722
723 @command('debugdirstate|debugstate',
723 @command('debugdirstate|debugstate',
724 [('', 'nodates', None, _('do not display the saved mtime')),
724 [('', 'nodates', None, _('do not display the saved mtime')),
725 ('', 'datesort', None, _('sort by saved mtime'))],
725 ('', 'datesort', None, _('sort by saved mtime'))],
726 _('[OPTION]...'))
726 _('[OPTION]...'))
727 def debugstate(ui, repo, **opts):
727 def debugstate(ui, repo, **opts):
728 """show the contents of the current dirstate"""
728 """show the contents of the current dirstate"""
729
729
730 nodates = opts.get(r'nodates')
730 nodates = opts.get(r'nodates')
731 datesort = opts.get(r'datesort')
731 datesort = opts.get(r'datesort')
732
732
733 timestr = ""
733 timestr = ""
734 if datesort:
734 if datesort:
735 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
735 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
736 else:
736 else:
737 keyfunc = None # sort by filename
737 keyfunc = None # sort by filename
738 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
738 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
739 if ent[3] == -1:
739 if ent[3] == -1:
740 timestr = 'unset '
740 timestr = 'unset '
741 elif nodates:
741 elif nodates:
742 timestr = 'set '
742 timestr = 'set '
743 else:
743 else:
744 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
744 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
745 time.localtime(ent[3]))
745 time.localtime(ent[3]))
746 timestr = encoding.strtolocal(timestr)
746 timestr = encoding.strtolocal(timestr)
747 if ent[1] & 0o20000:
747 if ent[1] & 0o20000:
748 mode = 'lnk'
748 mode = 'lnk'
749 else:
749 else:
750 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
750 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
751 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
751 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
752 for f in repo.dirstate.copies():
752 for f in repo.dirstate.copies():
753 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
753 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
754
754
755 @command('debugdiscovery',
755 @command('debugdiscovery',
756 [('', 'old', None, _('use old-style discovery')),
756 [('', 'old', None, _('use old-style discovery')),
757 ('', 'nonheads', None,
757 ('', 'nonheads', None,
758 _('use old-style discovery with non-heads included')),
758 _('use old-style discovery with non-heads included')),
759 ('', 'rev', [], 'restrict discovery to this set of revs'),
759 ('', 'rev', [], 'restrict discovery to this set of revs'),
760 ] + cmdutil.remoteopts,
760 ] + cmdutil.remoteopts,
761 _('[--rev REV] [OTHER]'))
761 _('[--rev REV] [OTHER]'))
762 def debugdiscovery(ui, repo, remoteurl="default", **opts):
762 def debugdiscovery(ui, repo, remoteurl="default", **opts):
763 """runs the changeset discovery protocol in isolation"""
763 """runs the changeset discovery protocol in isolation"""
764 opts = pycompat.byteskwargs(opts)
764 opts = pycompat.byteskwargs(opts)
765 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
765 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
766 remote = hg.peer(repo, opts, remoteurl)
766 remote = hg.peer(repo, opts, remoteurl)
767 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
767 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
768
768
769 # make sure tests are repeatable
769 # make sure tests are repeatable
770 random.seed(12323)
770 random.seed(12323)
771
771
772 def doit(pushedrevs, remoteheads, remote=remote):
772 def doit(pushedrevs, remoteheads, remote=remote):
773 if opts.get('old'):
773 if opts.get('old'):
774 if not util.safehasattr(remote, 'branches'):
774 if not util.safehasattr(remote, 'branches'):
775 # enable in-client legacy support
775 # enable in-client legacy support
776 remote = localrepo.locallegacypeer(remote.local())
776 remote = localrepo.locallegacypeer(remote.local())
777 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
777 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
778 force=True)
778 force=True)
779 common = set(common)
779 common = set(common)
780 if not opts.get('nonheads'):
780 if not opts.get('nonheads'):
781 ui.write(("unpruned common: %s\n") %
781 ui.write(("unpruned common: %s\n") %
782 " ".join(sorted(short(n) for n in common)))
782 " ".join(sorted(short(n) for n in common)))
783 dag = dagutil.revlogdag(repo.changelog)
783 dag = dagutil.revlogdag(repo.changelog)
784 all = dag.ancestorset(dag.internalizeall(common))
784 all = dag.ancestorset(dag.internalizeall(common))
785 common = dag.externalizeall(dag.headsetofconnecteds(all))
785 common = dag.externalizeall(dag.headsetofconnecteds(all))
786 else:
786 else:
787 nodes = None
787 nodes = None
788 if pushedrevs:
788 if pushedrevs:
789 revs = scmutil.revrange(repo, pushedrevs)
789 revs = scmutil.revrange(repo, pushedrevs)
790 nodes = [repo[r].node() for r in revs]
790 nodes = [repo[r].node() for r in revs]
791 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
791 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
792 ancestorsof=nodes)
792 ancestorsof=nodes)
793 common = set(common)
793 common = set(common)
794 rheads = set(hds)
794 rheads = set(hds)
795 lheads = set(repo.heads())
795 lheads = set(repo.heads())
796 ui.write(("common heads: %s\n") %
796 ui.write(("common heads: %s\n") %
797 " ".join(sorted(short(n) for n in common)))
797 " ".join(sorted(short(n) for n in common)))
798 if lheads <= common:
798 if lheads <= common:
799 ui.write(("local is subset\n"))
799 ui.write(("local is subset\n"))
800 elif rheads <= common:
800 elif rheads <= common:
801 ui.write(("remote is subset\n"))
801 ui.write(("remote is subset\n"))
802
802
803 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
803 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
804 localrevs = opts['rev']
804 localrevs = opts['rev']
805 doit(localrevs, remoterevs)
805 doit(localrevs, remoterevs)
806
806
807 _chunksize = 4 << 10
807 _chunksize = 4 << 10
808
808
809 @command('debugdownload',
809 @command('debugdownload',
810 [
810 [
811 ('o', 'output', '', _('path')),
811 ('o', 'output', '', _('path')),
812 ],
812 ],
813 optionalrepo=True)
813 optionalrepo=True)
814 def debugdownload(ui, repo, url, output=None, **opts):
814 def debugdownload(ui, repo, url, output=None, **opts):
815 """download a resource using Mercurial logic and config
815 """download a resource using Mercurial logic and config
816 """
816 """
817 fh = urlmod.open(ui, url, output)
817 fh = urlmod.open(ui, url, output)
818
818
819 dest = ui
819 dest = ui
820 if output:
820 if output:
821 dest = open(output, "wb", _chunksize)
821 dest = open(output, "wb", _chunksize)
822 try:
822 try:
823 data = fh.read(_chunksize)
823 data = fh.read(_chunksize)
824 while data:
824 while data:
825 dest.write(data)
825 dest.write(data)
826 data = fh.read(_chunksize)
826 data = fh.read(_chunksize)
827 finally:
827 finally:
828 if output:
828 if output:
829 dest.close()
829 dest.close()
830
830
831 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
831 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
832 def debugextensions(ui, repo, **opts):
832 def debugextensions(ui, repo, **opts):
833 '''show information about active extensions'''
833 '''show information about active extensions'''
834 opts = pycompat.byteskwargs(opts)
834 opts = pycompat.byteskwargs(opts)
835 exts = extensions.extensions(ui)
835 exts = extensions.extensions(ui)
836 hgver = util.version()
836 hgver = util.version()
837 fm = ui.formatter('debugextensions', opts)
837 fm = ui.formatter('debugextensions', opts)
838 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
838 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
839 isinternal = extensions.ismoduleinternal(extmod)
839 isinternal = extensions.ismoduleinternal(extmod)
840 extsource = pycompat.fsencode(extmod.__file__)
840 extsource = pycompat.fsencode(extmod.__file__)
841 if isinternal:
841 if isinternal:
842 exttestedwith = [] # never expose magic string to users
842 exttestedwith = [] # never expose magic string to users
843 else:
843 else:
844 exttestedwith = getattr(extmod, 'testedwith', '').split()
844 exttestedwith = getattr(extmod, 'testedwith', '').split()
845 extbuglink = getattr(extmod, 'buglink', None)
845 extbuglink = getattr(extmod, 'buglink', None)
846
846
847 fm.startitem()
847 fm.startitem()
848
848
849 if ui.quiet or ui.verbose:
849 if ui.quiet or ui.verbose:
850 fm.write('name', '%s\n', extname)
850 fm.write('name', '%s\n', extname)
851 else:
851 else:
852 fm.write('name', '%s', extname)
852 fm.write('name', '%s', extname)
853 if isinternal or hgver in exttestedwith:
853 if isinternal or hgver in exttestedwith:
854 fm.plain('\n')
854 fm.plain('\n')
855 elif not exttestedwith:
855 elif not exttestedwith:
856 fm.plain(_(' (untested!)\n'))
856 fm.plain(_(' (untested!)\n'))
857 else:
857 else:
858 lasttestedversion = exttestedwith[-1]
858 lasttestedversion = exttestedwith[-1]
859 fm.plain(' (%s!)\n' % lasttestedversion)
859 fm.plain(' (%s!)\n' % lasttestedversion)
860
860
861 fm.condwrite(ui.verbose and extsource, 'source',
861 fm.condwrite(ui.verbose and extsource, 'source',
862 _(' location: %s\n'), extsource or "")
862 _(' location: %s\n'), extsource or "")
863
863
864 if ui.verbose:
864 if ui.verbose:
865 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
865 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
866 fm.data(bundled=isinternal)
866 fm.data(bundled=isinternal)
867
867
868 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
868 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
869 _(' tested with: %s\n'),
869 _(' tested with: %s\n'),
870 fm.formatlist(exttestedwith, name='ver'))
870 fm.formatlist(exttestedwith, name='ver'))
871
871
872 fm.condwrite(ui.verbose and extbuglink, 'buglink',
872 fm.condwrite(ui.verbose and extbuglink, 'buglink',
873 _(' bug reporting: %s\n'), extbuglink or "")
873 _(' bug reporting: %s\n'), extbuglink or "")
874
874
875 fm.end()
875 fm.end()
876
876
877 @command('debugfileset',
877 @command('debugfileset',
878 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
878 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
879 _('[-r REV] FILESPEC'))
879 _('[-r REV] FILESPEC'))
880 def debugfileset(ui, repo, expr, **opts):
880 def debugfileset(ui, repo, expr, **opts):
881 '''parse and apply a fileset specification'''
881 '''parse and apply a fileset specification'''
882 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
882 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
883 if ui.verbose:
883 if ui.verbose:
884 tree = fileset.parse(expr)
884 tree = fileset.parse(expr)
885 ui.note(fileset.prettyformat(tree), "\n")
885 ui.note(fileset.prettyformat(tree), "\n")
886
886
887 for f in ctx.getfileset(expr):
887 for f in sorted(ctx.getfileset(expr)):
888 ui.write("%s\n" % f)
888 ui.write("%s\n" % f)
889
889
890 @command('debugformat',
890 @command('debugformat',
891 [] + cmdutil.formatteropts,
891 [] + cmdutil.formatteropts,
892 _(''))
892 _(''))
893 def debugformat(ui, repo, **opts):
893 def debugformat(ui, repo, **opts):
894 """display format information about the current repository
894 """display format information about the current repository
895
895
896 Use --verbose to get extra information about current config value and
896 Use --verbose to get extra information about current config value and
897 Mercurial default."""
897 Mercurial default."""
898 opts = pycompat.byteskwargs(opts)
898 opts = pycompat.byteskwargs(opts)
899 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
899 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
900 maxvariantlength = max(len('format-variant'), maxvariantlength)
900 maxvariantlength = max(len('format-variant'), maxvariantlength)
901
901
902 def makeformatname(name):
902 def makeformatname(name):
903 return '%s:' + (' ' * (maxvariantlength - len(name)))
903 return '%s:' + (' ' * (maxvariantlength - len(name)))
904
904
905 fm = ui.formatter('debugformat', opts)
905 fm = ui.formatter('debugformat', opts)
906 if fm.isplain():
906 if fm.isplain():
907 def formatvalue(value):
907 def formatvalue(value):
908 if util.safehasattr(value, 'startswith'):
908 if util.safehasattr(value, 'startswith'):
909 return value
909 return value
910 if value:
910 if value:
911 return 'yes'
911 return 'yes'
912 else:
912 else:
913 return 'no'
913 return 'no'
914 else:
914 else:
915 formatvalue = pycompat.identity
915 formatvalue = pycompat.identity
916
916
917 fm.plain('format-variant')
917 fm.plain('format-variant')
918 fm.plain(' ' * (maxvariantlength - len('format-variant')))
918 fm.plain(' ' * (maxvariantlength - len('format-variant')))
919 fm.plain(' repo')
919 fm.plain(' repo')
920 if ui.verbose:
920 if ui.verbose:
921 fm.plain(' config default')
921 fm.plain(' config default')
922 fm.plain('\n')
922 fm.plain('\n')
923 for fv in upgrade.allformatvariant:
923 for fv in upgrade.allformatvariant:
924 fm.startitem()
924 fm.startitem()
925 repovalue = fv.fromrepo(repo)
925 repovalue = fv.fromrepo(repo)
926 configvalue = fv.fromconfig(repo)
926 configvalue = fv.fromconfig(repo)
927
927
928 if repovalue != configvalue:
928 if repovalue != configvalue:
929 namelabel = 'formatvariant.name.mismatchconfig'
929 namelabel = 'formatvariant.name.mismatchconfig'
930 repolabel = 'formatvariant.repo.mismatchconfig'
930 repolabel = 'formatvariant.repo.mismatchconfig'
931 elif repovalue != fv.default:
931 elif repovalue != fv.default:
932 namelabel = 'formatvariant.name.mismatchdefault'
932 namelabel = 'formatvariant.name.mismatchdefault'
933 repolabel = 'formatvariant.repo.mismatchdefault'
933 repolabel = 'formatvariant.repo.mismatchdefault'
934 else:
934 else:
935 namelabel = 'formatvariant.name.uptodate'
935 namelabel = 'formatvariant.name.uptodate'
936 repolabel = 'formatvariant.repo.uptodate'
936 repolabel = 'formatvariant.repo.uptodate'
937
937
938 fm.write('name', makeformatname(fv.name), fv.name,
938 fm.write('name', makeformatname(fv.name), fv.name,
939 label=namelabel)
939 label=namelabel)
940 fm.write('repo', ' %3s', formatvalue(repovalue),
940 fm.write('repo', ' %3s', formatvalue(repovalue),
941 label=repolabel)
941 label=repolabel)
942 if fv.default != configvalue:
942 if fv.default != configvalue:
943 configlabel = 'formatvariant.config.special'
943 configlabel = 'formatvariant.config.special'
944 else:
944 else:
945 configlabel = 'formatvariant.config.default'
945 configlabel = 'formatvariant.config.default'
946 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
946 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
947 label=configlabel)
947 label=configlabel)
948 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
948 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
949 label='formatvariant.default')
949 label='formatvariant.default')
950 fm.plain('\n')
950 fm.plain('\n')
951 fm.end()
951 fm.end()
952
952
953 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
953 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
954 def debugfsinfo(ui, path="."):
954 def debugfsinfo(ui, path="."):
955 """show information detected about current filesystem"""
955 """show information detected about current filesystem"""
956 ui.write(('path: %s\n') % path)
956 ui.write(('path: %s\n') % path)
957 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
957 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
958 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
958 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
959 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
959 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
960 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
960 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
961 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
961 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
962 casesensitive = '(unknown)'
962 casesensitive = '(unknown)'
963 try:
963 try:
964 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
964 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
965 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
965 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
966 except OSError:
966 except OSError:
967 pass
967 pass
968 ui.write(('case-sensitive: %s\n') % casesensitive)
968 ui.write(('case-sensitive: %s\n') % casesensitive)
969
969
970 @command('debuggetbundle',
970 @command('debuggetbundle',
971 [('H', 'head', [], _('id of head node'), _('ID')),
971 [('H', 'head', [], _('id of head node'), _('ID')),
972 ('C', 'common', [], _('id of common node'), _('ID')),
972 ('C', 'common', [], _('id of common node'), _('ID')),
973 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
973 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
974 _('REPO FILE [-H|-C ID]...'),
974 _('REPO FILE [-H|-C ID]...'),
975 norepo=True)
975 norepo=True)
976 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
976 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
977 """retrieves a bundle from a repo
977 """retrieves a bundle from a repo
978
978
979 Every ID must be a full-length hex node id string. Saves the bundle to the
979 Every ID must be a full-length hex node id string. Saves the bundle to the
980 given file.
980 given file.
981 """
981 """
982 opts = pycompat.byteskwargs(opts)
982 opts = pycompat.byteskwargs(opts)
983 repo = hg.peer(ui, opts, repopath)
983 repo = hg.peer(ui, opts, repopath)
984 if not repo.capable('getbundle'):
984 if not repo.capable('getbundle'):
985 raise error.Abort("getbundle() not supported by target repository")
985 raise error.Abort("getbundle() not supported by target repository")
986 args = {}
986 args = {}
987 if common:
987 if common:
988 args[r'common'] = [bin(s) for s in common]
988 args[r'common'] = [bin(s) for s in common]
989 if head:
989 if head:
990 args[r'heads'] = [bin(s) for s in head]
990 args[r'heads'] = [bin(s) for s in head]
991 # TODO: get desired bundlecaps from command line.
991 # TODO: get desired bundlecaps from command line.
992 args[r'bundlecaps'] = None
992 args[r'bundlecaps'] = None
993 bundle = repo.getbundle('debug', **args)
993 bundle = repo.getbundle('debug', **args)
994
994
995 bundletype = opts.get('type', 'bzip2').lower()
995 bundletype = opts.get('type', 'bzip2').lower()
996 btypes = {'none': 'HG10UN',
996 btypes = {'none': 'HG10UN',
997 'bzip2': 'HG10BZ',
997 'bzip2': 'HG10BZ',
998 'gzip': 'HG10GZ',
998 'gzip': 'HG10GZ',
999 'bundle2': 'HG20'}
999 'bundle2': 'HG20'}
1000 bundletype = btypes.get(bundletype)
1000 bundletype = btypes.get(bundletype)
1001 if bundletype not in bundle2.bundletypes:
1001 if bundletype not in bundle2.bundletypes:
1002 raise error.Abort(_('unknown bundle type specified with --type'))
1002 raise error.Abort(_('unknown bundle type specified with --type'))
1003 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1003 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1004
1004
1005 @command('debugignore', [], '[FILE]')
1005 @command('debugignore', [], '[FILE]')
1006 def debugignore(ui, repo, *files, **opts):
1006 def debugignore(ui, repo, *files, **opts):
1007 """display the combined ignore pattern and information about ignored files
1007 """display the combined ignore pattern and information about ignored files
1008
1008
1009 With no argument display the combined ignore pattern.
1009 With no argument display the combined ignore pattern.
1010
1010
1011 Given space separated file names, shows if the given file is ignored and
1011 Given space separated file names, shows if the given file is ignored and
1012 if so, show the ignore rule (file and line number) that matched it.
1012 if so, show the ignore rule (file and line number) that matched it.
1013 """
1013 """
1014 ignore = repo.dirstate._ignore
1014 ignore = repo.dirstate._ignore
1015 if not files:
1015 if not files:
1016 # Show all the patterns
1016 # Show all the patterns
1017 ui.write("%s\n" % pycompat.byterepr(ignore))
1017 ui.write("%s\n" % pycompat.byterepr(ignore))
1018 else:
1018 else:
1019 m = scmutil.match(repo[None], pats=files)
1019 m = scmutil.match(repo[None], pats=files)
1020 for f in m.files():
1020 for f in m.files():
1021 nf = util.normpath(f)
1021 nf = util.normpath(f)
1022 ignored = None
1022 ignored = None
1023 ignoredata = None
1023 ignoredata = None
1024 if nf != '.':
1024 if nf != '.':
1025 if ignore(nf):
1025 if ignore(nf):
1026 ignored = nf
1026 ignored = nf
1027 ignoredata = repo.dirstate._ignorefileandline(nf)
1027 ignoredata = repo.dirstate._ignorefileandline(nf)
1028 else:
1028 else:
1029 for p in util.finddirs(nf):
1029 for p in util.finddirs(nf):
1030 if ignore(p):
1030 if ignore(p):
1031 ignored = p
1031 ignored = p
1032 ignoredata = repo.dirstate._ignorefileandline(p)
1032 ignoredata = repo.dirstate._ignorefileandline(p)
1033 break
1033 break
1034 if ignored:
1034 if ignored:
1035 if ignored == nf:
1035 if ignored == nf:
1036 ui.write(_("%s is ignored\n") % m.uipath(f))
1036 ui.write(_("%s is ignored\n") % m.uipath(f))
1037 else:
1037 else:
1038 ui.write(_("%s is ignored because of "
1038 ui.write(_("%s is ignored because of "
1039 "containing folder %s\n")
1039 "containing folder %s\n")
1040 % (m.uipath(f), ignored))
1040 % (m.uipath(f), ignored))
1041 ignorefile, lineno, line = ignoredata
1041 ignorefile, lineno, line = ignoredata
1042 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1042 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1043 % (ignorefile, lineno, line))
1043 % (ignorefile, lineno, line))
1044 else:
1044 else:
1045 ui.write(_("%s is not ignored\n") % m.uipath(f))
1045 ui.write(_("%s is not ignored\n") % m.uipath(f))
1046
1046
1047 @command('debugindex', cmdutil.debugrevlogopts +
1047 @command('debugindex', cmdutil.debugrevlogopts +
1048 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1048 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1049 _('[-f FORMAT] -c|-m|FILE'),
1049 _('[-f FORMAT] -c|-m|FILE'),
1050 optionalrepo=True)
1050 optionalrepo=True)
1051 def debugindex(ui, repo, file_=None, **opts):
1051 def debugindex(ui, repo, file_=None, **opts):
1052 """dump the contents of an index file"""
1052 """dump the contents of an index file"""
1053 opts = pycompat.byteskwargs(opts)
1053 opts = pycompat.byteskwargs(opts)
1054 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1054 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1055 format = opts.get('format', 0)
1055 format = opts.get('format', 0)
1056 if format not in (0, 1):
1056 if format not in (0, 1):
1057 raise error.Abort(_("unknown format %d") % format)
1057 raise error.Abort(_("unknown format %d") % format)
1058
1058
1059 if ui.debugflag:
1059 if ui.debugflag:
1060 shortfn = hex
1060 shortfn = hex
1061 else:
1061 else:
1062 shortfn = short
1062 shortfn = short
1063
1063
1064 # There might not be anything in r, so have a sane default
1064 # There might not be anything in r, so have a sane default
1065 idlen = 12
1065 idlen = 12
1066 for i in r:
1066 for i in r:
1067 idlen = len(shortfn(r.node(i)))
1067 idlen = len(shortfn(r.node(i)))
1068 break
1068 break
1069
1069
1070 if format == 0:
1070 if format == 0:
1071 if ui.verbose:
1071 if ui.verbose:
1072 ui.write((" rev offset length linkrev"
1072 ui.write((" rev offset length linkrev"
1073 " %s %s p2\n") % ("nodeid".ljust(idlen),
1073 " %s %s p2\n") % ("nodeid".ljust(idlen),
1074 "p1".ljust(idlen)))
1074 "p1".ljust(idlen)))
1075 else:
1075 else:
1076 ui.write((" rev linkrev %s %s p2\n") % (
1076 ui.write((" rev linkrev %s %s p2\n") % (
1077 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1077 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1078 elif format == 1:
1078 elif format == 1:
1079 if ui.verbose:
1079 if ui.verbose:
1080 ui.write((" rev flag offset length size link p1"
1080 ui.write((" rev flag offset length size link p1"
1081 " p2 %s\n") % "nodeid".rjust(idlen))
1081 " p2 %s\n") % "nodeid".rjust(idlen))
1082 else:
1082 else:
1083 ui.write((" rev flag size link p1 p2 %s\n") %
1083 ui.write((" rev flag size link p1 p2 %s\n") %
1084 "nodeid".rjust(idlen))
1084 "nodeid".rjust(idlen))
1085
1085
1086 for i in r:
1086 for i in r:
1087 node = r.node(i)
1087 node = r.node(i)
1088 if format == 0:
1088 if format == 0:
1089 try:
1089 try:
1090 pp = r.parents(node)
1090 pp = r.parents(node)
1091 except Exception:
1091 except Exception:
1092 pp = [nullid, nullid]
1092 pp = [nullid, nullid]
1093 if ui.verbose:
1093 if ui.verbose:
1094 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1094 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1095 i, r.start(i), r.length(i), r.linkrev(i),
1095 i, r.start(i), r.length(i), r.linkrev(i),
1096 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1096 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1097 else:
1097 else:
1098 ui.write("% 6d % 7d %s %s %s\n" % (
1098 ui.write("% 6d % 7d %s %s %s\n" % (
1099 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1099 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1100 shortfn(pp[1])))
1100 shortfn(pp[1])))
1101 elif format == 1:
1101 elif format == 1:
1102 pr = r.parentrevs(i)
1102 pr = r.parentrevs(i)
1103 if ui.verbose:
1103 if ui.verbose:
1104 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1104 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1105 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1105 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1106 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1106 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1107 else:
1107 else:
1108 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1108 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1109 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1109 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1110 shortfn(node)))
1110 shortfn(node)))
1111
1111
1112 @command('debugindexdot', cmdutil.debugrevlogopts,
1112 @command('debugindexdot', cmdutil.debugrevlogopts,
1113 _('-c|-m|FILE'), optionalrepo=True)
1113 _('-c|-m|FILE'), optionalrepo=True)
1114 def debugindexdot(ui, repo, file_=None, **opts):
1114 def debugindexdot(ui, repo, file_=None, **opts):
1115 """dump an index DAG as a graphviz dot file"""
1115 """dump an index DAG as a graphviz dot file"""
1116 opts = pycompat.byteskwargs(opts)
1116 opts = pycompat.byteskwargs(opts)
1117 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1117 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1118 ui.write(("digraph G {\n"))
1118 ui.write(("digraph G {\n"))
1119 for i in r:
1119 for i in r:
1120 node = r.node(i)
1120 node = r.node(i)
1121 pp = r.parents(node)
1121 pp = r.parents(node)
1122 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1122 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1123 if pp[1] != nullid:
1123 if pp[1] != nullid:
1124 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1124 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1125 ui.write("}\n")
1125 ui.write("}\n")
1126
1126
1127 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1127 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1128 def debuginstall(ui, **opts):
1128 def debuginstall(ui, **opts):
1129 '''test Mercurial installation
1129 '''test Mercurial installation
1130
1130
1131 Returns 0 on success.
1131 Returns 0 on success.
1132 '''
1132 '''
1133 opts = pycompat.byteskwargs(opts)
1133 opts = pycompat.byteskwargs(opts)
1134
1134
1135 def writetemp(contents):
1135 def writetemp(contents):
1136 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1136 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1137 f = os.fdopen(fd, r"wb")
1137 f = os.fdopen(fd, r"wb")
1138 f.write(contents)
1138 f.write(contents)
1139 f.close()
1139 f.close()
1140 return name
1140 return name
1141
1141
1142 problems = 0
1142 problems = 0
1143
1143
1144 fm = ui.formatter('debuginstall', opts)
1144 fm = ui.formatter('debuginstall', opts)
1145 fm.startitem()
1145 fm.startitem()
1146
1146
1147 # encoding
1147 # encoding
1148 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1148 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1149 err = None
1149 err = None
1150 try:
1150 try:
1151 codecs.lookup(pycompat.sysstr(encoding.encoding))
1151 codecs.lookup(pycompat.sysstr(encoding.encoding))
1152 except LookupError as inst:
1152 except LookupError as inst:
1153 err = stringutil.forcebytestr(inst)
1153 err = stringutil.forcebytestr(inst)
1154 problems += 1
1154 problems += 1
1155 fm.condwrite(err, 'encodingerror', _(" %s\n"
1155 fm.condwrite(err, 'encodingerror', _(" %s\n"
1156 " (check that your locale is properly set)\n"), err)
1156 " (check that your locale is properly set)\n"), err)
1157
1157
1158 # Python
1158 # Python
1159 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1159 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1160 pycompat.sysexecutable)
1160 pycompat.sysexecutable)
1161 fm.write('pythonver', _("checking Python version (%s)\n"),
1161 fm.write('pythonver', _("checking Python version (%s)\n"),
1162 ("%d.%d.%d" % sys.version_info[:3]))
1162 ("%d.%d.%d" % sys.version_info[:3]))
1163 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1163 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1164 os.path.dirname(pycompat.fsencode(os.__file__)))
1164 os.path.dirname(pycompat.fsencode(os.__file__)))
1165
1165
1166 security = set(sslutil.supportedprotocols)
1166 security = set(sslutil.supportedprotocols)
1167 if sslutil.hassni:
1167 if sslutil.hassni:
1168 security.add('sni')
1168 security.add('sni')
1169
1169
1170 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1170 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1171 fm.formatlist(sorted(security), name='protocol',
1171 fm.formatlist(sorted(security), name='protocol',
1172 fmt='%s', sep=','))
1172 fmt='%s', sep=','))
1173
1173
1174 # These are warnings, not errors. So don't increment problem count. This
1174 # These are warnings, not errors. So don't increment problem count. This
1175 # may change in the future.
1175 # may change in the future.
1176 if 'tls1.2' not in security:
1176 if 'tls1.2' not in security:
1177 fm.plain(_(' TLS 1.2 not supported by Python install; '
1177 fm.plain(_(' TLS 1.2 not supported by Python install; '
1178 'network connections lack modern security\n'))
1178 'network connections lack modern security\n'))
1179 if 'sni' not in security:
1179 if 'sni' not in security:
1180 fm.plain(_(' SNI not supported by Python install; may have '
1180 fm.plain(_(' SNI not supported by Python install; may have '
1181 'connectivity issues with some servers\n'))
1181 'connectivity issues with some servers\n'))
1182
1182
1183 # TODO print CA cert info
1183 # TODO print CA cert info
1184
1184
1185 # hg version
1185 # hg version
1186 hgver = util.version()
1186 hgver = util.version()
1187 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1187 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1188 hgver.split('+')[0])
1188 hgver.split('+')[0])
1189 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1189 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1190 '+'.join(hgver.split('+')[1:]))
1190 '+'.join(hgver.split('+')[1:]))
1191
1191
1192 # compiled modules
1192 # compiled modules
1193 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1193 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1194 policy.policy)
1194 policy.policy)
1195 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1195 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1196 os.path.dirname(pycompat.fsencode(__file__)))
1196 os.path.dirname(pycompat.fsencode(__file__)))
1197
1197
1198 if policy.policy in ('c', 'allow'):
1198 if policy.policy in ('c', 'allow'):
1199 err = None
1199 err = None
1200 try:
1200 try:
1201 from .cext import (
1201 from .cext import (
1202 base85,
1202 base85,
1203 bdiff,
1203 bdiff,
1204 mpatch,
1204 mpatch,
1205 osutil,
1205 osutil,
1206 )
1206 )
1207 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1207 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1208 except Exception as inst:
1208 except Exception as inst:
1209 err = stringutil.forcebytestr(inst)
1209 err = stringutil.forcebytestr(inst)
1210 problems += 1
1210 problems += 1
1211 fm.condwrite(err, 'extensionserror', " %s\n", err)
1211 fm.condwrite(err, 'extensionserror', " %s\n", err)
1212
1212
1213 compengines = util.compengines._engines.values()
1213 compengines = util.compengines._engines.values()
1214 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1214 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1215 fm.formatlist(sorted(e.name() for e in compengines),
1215 fm.formatlist(sorted(e.name() for e in compengines),
1216 name='compengine', fmt='%s', sep=', '))
1216 name='compengine', fmt='%s', sep=', '))
1217 fm.write('compenginesavail', _('checking available compression engines '
1217 fm.write('compenginesavail', _('checking available compression engines '
1218 '(%s)\n'),
1218 '(%s)\n'),
1219 fm.formatlist(sorted(e.name() for e in compengines
1219 fm.formatlist(sorted(e.name() for e in compengines
1220 if e.available()),
1220 if e.available()),
1221 name='compengine', fmt='%s', sep=', '))
1221 name='compengine', fmt='%s', sep=', '))
1222 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1222 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1223 fm.write('compenginesserver', _('checking available compression engines '
1223 fm.write('compenginesserver', _('checking available compression engines '
1224 'for wire protocol (%s)\n'),
1224 'for wire protocol (%s)\n'),
1225 fm.formatlist([e.name() for e in wirecompengines
1225 fm.formatlist([e.name() for e in wirecompengines
1226 if e.wireprotosupport()],
1226 if e.wireprotosupport()],
1227 name='compengine', fmt='%s', sep=', '))
1227 name='compengine', fmt='%s', sep=', '))
1228 re2 = 'missing'
1228 re2 = 'missing'
1229 if util._re2:
1229 if util._re2:
1230 re2 = 'available'
1230 re2 = 'available'
1231 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1231 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1232 fm.data(re2=bool(util._re2))
1232 fm.data(re2=bool(util._re2))
1233
1233
1234 # templates
1234 # templates
1235 p = templater.templatepaths()
1235 p = templater.templatepaths()
1236 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1236 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1237 fm.condwrite(not p, '', _(" no template directories found\n"))
1237 fm.condwrite(not p, '', _(" no template directories found\n"))
1238 if p:
1238 if p:
1239 m = templater.templatepath("map-cmdline.default")
1239 m = templater.templatepath("map-cmdline.default")
1240 if m:
1240 if m:
1241 # template found, check if it is working
1241 # template found, check if it is working
1242 err = None
1242 err = None
1243 try:
1243 try:
1244 templater.templater.frommapfile(m)
1244 templater.templater.frommapfile(m)
1245 except Exception as inst:
1245 except Exception as inst:
1246 err = stringutil.forcebytestr(inst)
1246 err = stringutil.forcebytestr(inst)
1247 p = None
1247 p = None
1248 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1248 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1249 else:
1249 else:
1250 p = None
1250 p = None
1251 fm.condwrite(p, 'defaulttemplate',
1251 fm.condwrite(p, 'defaulttemplate',
1252 _("checking default template (%s)\n"), m)
1252 _("checking default template (%s)\n"), m)
1253 fm.condwrite(not m, 'defaulttemplatenotfound',
1253 fm.condwrite(not m, 'defaulttemplatenotfound',
1254 _(" template '%s' not found\n"), "default")
1254 _(" template '%s' not found\n"), "default")
1255 if not p:
1255 if not p:
1256 problems += 1
1256 problems += 1
1257 fm.condwrite(not p, '',
1257 fm.condwrite(not p, '',
1258 _(" (templates seem to have been installed incorrectly)\n"))
1258 _(" (templates seem to have been installed incorrectly)\n"))
1259
1259
1260 # editor
1260 # editor
1261 editor = ui.geteditor()
1261 editor = ui.geteditor()
1262 editor = util.expandpath(editor)
1262 editor = util.expandpath(editor)
1263 editorbin = procutil.shellsplit(editor)[0]
1263 editorbin = procutil.shellsplit(editor)[0]
1264 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1264 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1265 cmdpath = procutil.findexe(editorbin)
1265 cmdpath = procutil.findexe(editorbin)
1266 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1266 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1267 _(" No commit editor set and can't find %s in PATH\n"
1267 _(" No commit editor set and can't find %s in PATH\n"
1268 " (specify a commit editor in your configuration"
1268 " (specify a commit editor in your configuration"
1269 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1269 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1270 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1270 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1271 _(" Can't find editor '%s' in PATH\n"
1271 _(" Can't find editor '%s' in PATH\n"
1272 " (specify a commit editor in your configuration"
1272 " (specify a commit editor in your configuration"
1273 " file)\n"), not cmdpath and editorbin)
1273 " file)\n"), not cmdpath and editorbin)
1274 if not cmdpath and editor != 'vi':
1274 if not cmdpath and editor != 'vi':
1275 problems += 1
1275 problems += 1
1276
1276
1277 # check username
1277 # check username
1278 username = None
1278 username = None
1279 err = None
1279 err = None
1280 try:
1280 try:
1281 username = ui.username()
1281 username = ui.username()
1282 except error.Abort as e:
1282 except error.Abort as e:
1283 err = stringutil.forcebytestr(e)
1283 err = stringutil.forcebytestr(e)
1284 problems += 1
1284 problems += 1
1285
1285
1286 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1286 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1287 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1287 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1288 " (specify a username in your configuration file)\n"), err)
1288 " (specify a username in your configuration file)\n"), err)
1289
1289
1290 fm.condwrite(not problems, '',
1290 fm.condwrite(not problems, '',
1291 _("no problems detected\n"))
1291 _("no problems detected\n"))
1292 if not problems:
1292 if not problems:
1293 fm.data(problems=problems)
1293 fm.data(problems=problems)
1294 fm.condwrite(problems, 'problems',
1294 fm.condwrite(problems, 'problems',
1295 _("%d problems detected,"
1295 _("%d problems detected,"
1296 " please check your install!\n"), problems)
1296 " please check your install!\n"), problems)
1297 fm.end()
1297 fm.end()
1298
1298
1299 return problems
1299 return problems
1300
1300
1301 @command('debugknown', [], _('REPO ID...'), norepo=True)
1301 @command('debugknown', [], _('REPO ID...'), norepo=True)
1302 def debugknown(ui, repopath, *ids, **opts):
1302 def debugknown(ui, repopath, *ids, **opts):
1303 """test whether node ids are known to a repo
1303 """test whether node ids are known to a repo
1304
1304
1305 Every ID must be a full-length hex node id string. Returns a list of 0s
1305 Every ID must be a full-length hex node id string. Returns a list of 0s
1306 and 1s indicating unknown/known.
1306 and 1s indicating unknown/known.
1307 """
1307 """
1308 opts = pycompat.byteskwargs(opts)
1308 opts = pycompat.byteskwargs(opts)
1309 repo = hg.peer(ui, opts, repopath)
1309 repo = hg.peer(ui, opts, repopath)
1310 if not repo.capable('known'):
1310 if not repo.capable('known'):
1311 raise error.Abort("known() not supported by target repository")
1311 raise error.Abort("known() not supported by target repository")
1312 flags = repo.known([bin(s) for s in ids])
1312 flags = repo.known([bin(s) for s in ids])
1313 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1313 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1314
1314
1315 @command('debuglabelcomplete', [], _('LABEL...'))
1315 @command('debuglabelcomplete', [], _('LABEL...'))
1316 def debuglabelcomplete(ui, repo, *args):
1316 def debuglabelcomplete(ui, repo, *args):
1317 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1317 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1318 debugnamecomplete(ui, repo, *args)
1318 debugnamecomplete(ui, repo, *args)
1319
1319
1320 @command('debuglocks',
1320 @command('debuglocks',
1321 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1321 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1322 ('W', 'force-wlock', None,
1322 ('W', 'force-wlock', None,
1323 _('free the working state lock (DANGEROUS)')),
1323 _('free the working state lock (DANGEROUS)')),
1324 ('s', 'set-lock', None, _('set the store lock until stopped')),
1324 ('s', 'set-lock', None, _('set the store lock until stopped')),
1325 ('S', 'set-wlock', None,
1325 ('S', 'set-wlock', None,
1326 _('set the working state lock until stopped'))],
1326 _('set the working state lock until stopped'))],
1327 _('[OPTION]...'))
1327 _('[OPTION]...'))
1328 def debuglocks(ui, repo, **opts):
1328 def debuglocks(ui, repo, **opts):
1329 """show or modify state of locks
1329 """show or modify state of locks
1330
1330
1331 By default, this command will show which locks are held. This
1331 By default, this command will show which locks are held. This
1332 includes the user and process holding the lock, the amount of time
1332 includes the user and process holding the lock, the amount of time
1333 the lock has been held, and the machine name where the process is
1333 the lock has been held, and the machine name where the process is
1334 running if it's not local.
1334 running if it's not local.
1335
1335
1336 Locks protect the integrity of Mercurial's data, so should be
1336 Locks protect the integrity of Mercurial's data, so should be
1337 treated with care. System crashes or other interruptions may cause
1337 treated with care. System crashes or other interruptions may cause
1338 locks to not be properly released, though Mercurial will usually
1338 locks to not be properly released, though Mercurial will usually
1339 detect and remove such stale locks automatically.
1339 detect and remove such stale locks automatically.
1340
1340
1341 However, detecting stale locks may not always be possible (for
1341 However, detecting stale locks may not always be possible (for
1342 instance, on a shared filesystem). Removing locks may also be
1342 instance, on a shared filesystem). Removing locks may also be
1343 blocked by filesystem permissions.
1343 blocked by filesystem permissions.
1344
1344
1345 Setting a lock will prevent other commands from changing the data.
1345 Setting a lock will prevent other commands from changing the data.
1346 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1346 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1347 The set locks are removed when the command exits.
1347 The set locks are removed when the command exits.
1348
1348
1349 Returns 0 if no locks are held.
1349 Returns 0 if no locks are held.
1350
1350
1351 """
1351 """
1352
1352
1353 if opts.get(r'force_lock'):
1353 if opts.get(r'force_lock'):
1354 repo.svfs.unlink('lock')
1354 repo.svfs.unlink('lock')
1355 if opts.get(r'force_wlock'):
1355 if opts.get(r'force_wlock'):
1356 repo.vfs.unlink('wlock')
1356 repo.vfs.unlink('wlock')
1357 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1357 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1358 return 0
1358 return 0
1359
1359
1360 locks = []
1360 locks = []
1361 try:
1361 try:
1362 if opts.get(r'set_wlock'):
1362 if opts.get(r'set_wlock'):
1363 try:
1363 try:
1364 locks.append(repo.wlock(False))
1364 locks.append(repo.wlock(False))
1365 except error.LockHeld:
1365 except error.LockHeld:
1366 raise error.Abort(_('wlock is already held'))
1366 raise error.Abort(_('wlock is already held'))
1367 if opts.get(r'set_lock'):
1367 if opts.get(r'set_lock'):
1368 try:
1368 try:
1369 locks.append(repo.lock(False))
1369 locks.append(repo.lock(False))
1370 except error.LockHeld:
1370 except error.LockHeld:
1371 raise error.Abort(_('lock is already held'))
1371 raise error.Abort(_('lock is already held'))
1372 if len(locks):
1372 if len(locks):
1373 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1373 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1374 return 0
1374 return 0
1375 finally:
1375 finally:
1376 release(*locks)
1376 release(*locks)
1377
1377
1378 now = time.time()
1378 now = time.time()
1379 held = 0
1379 held = 0
1380
1380
1381 def report(vfs, name, method):
1381 def report(vfs, name, method):
1382 # this causes stale locks to get reaped for more accurate reporting
1382 # this causes stale locks to get reaped for more accurate reporting
1383 try:
1383 try:
1384 l = method(False)
1384 l = method(False)
1385 except error.LockHeld:
1385 except error.LockHeld:
1386 l = None
1386 l = None
1387
1387
1388 if l:
1388 if l:
1389 l.release()
1389 l.release()
1390 else:
1390 else:
1391 try:
1391 try:
1392 st = vfs.lstat(name)
1392 st = vfs.lstat(name)
1393 age = now - st[stat.ST_MTIME]
1393 age = now - st[stat.ST_MTIME]
1394 user = util.username(st.st_uid)
1394 user = util.username(st.st_uid)
1395 locker = vfs.readlock(name)
1395 locker = vfs.readlock(name)
1396 if ":" in locker:
1396 if ":" in locker:
1397 host, pid = locker.split(':')
1397 host, pid = locker.split(':')
1398 if host == socket.gethostname():
1398 if host == socket.gethostname():
1399 locker = 'user %s, process %s' % (user, pid)
1399 locker = 'user %s, process %s' % (user, pid)
1400 else:
1400 else:
1401 locker = 'user %s, process %s, host %s' \
1401 locker = 'user %s, process %s, host %s' \
1402 % (user, pid, host)
1402 % (user, pid, host)
1403 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1403 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1404 return 1
1404 return 1
1405 except OSError as e:
1405 except OSError as e:
1406 if e.errno != errno.ENOENT:
1406 if e.errno != errno.ENOENT:
1407 raise
1407 raise
1408
1408
1409 ui.write(("%-6s free\n") % (name + ":"))
1409 ui.write(("%-6s free\n") % (name + ":"))
1410 return 0
1410 return 0
1411
1411
1412 held += report(repo.svfs, "lock", repo.lock)
1412 held += report(repo.svfs, "lock", repo.lock)
1413 held += report(repo.vfs, "wlock", repo.wlock)
1413 held += report(repo.vfs, "wlock", repo.wlock)
1414
1414
1415 return held
1415 return held
1416
1416
1417 @command('debugmergestate', [], '')
1417 @command('debugmergestate', [], '')
1418 def debugmergestate(ui, repo, *args):
1418 def debugmergestate(ui, repo, *args):
1419 """print merge state
1419 """print merge state
1420
1420
1421 Use --verbose to print out information about whether v1 or v2 merge state
1421 Use --verbose to print out information about whether v1 or v2 merge state
1422 was chosen."""
1422 was chosen."""
1423 def _hashornull(h):
1423 def _hashornull(h):
1424 if h == nullhex:
1424 if h == nullhex:
1425 return 'null'
1425 return 'null'
1426 else:
1426 else:
1427 return h
1427 return h
1428
1428
1429 def printrecords(version):
1429 def printrecords(version):
1430 ui.write(('* version %d records\n') % version)
1430 ui.write(('* version %d records\n') % version)
1431 if version == 1:
1431 if version == 1:
1432 records = v1records
1432 records = v1records
1433 else:
1433 else:
1434 records = v2records
1434 records = v2records
1435
1435
1436 for rtype, record in records:
1436 for rtype, record in records:
1437 # pretty print some record types
1437 # pretty print some record types
1438 if rtype == 'L':
1438 if rtype == 'L':
1439 ui.write(('local: %s\n') % record)
1439 ui.write(('local: %s\n') % record)
1440 elif rtype == 'O':
1440 elif rtype == 'O':
1441 ui.write(('other: %s\n') % record)
1441 ui.write(('other: %s\n') % record)
1442 elif rtype == 'm':
1442 elif rtype == 'm':
1443 driver, mdstate = record.split('\0', 1)
1443 driver, mdstate = record.split('\0', 1)
1444 ui.write(('merge driver: %s (state "%s")\n')
1444 ui.write(('merge driver: %s (state "%s")\n')
1445 % (driver, mdstate))
1445 % (driver, mdstate))
1446 elif rtype in 'FDC':
1446 elif rtype in 'FDC':
1447 r = record.split('\0')
1447 r = record.split('\0')
1448 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1448 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1449 if version == 1:
1449 if version == 1:
1450 onode = 'not stored in v1 format'
1450 onode = 'not stored in v1 format'
1451 flags = r[7]
1451 flags = r[7]
1452 else:
1452 else:
1453 onode, flags = r[7:9]
1453 onode, flags = r[7:9]
1454 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1454 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1455 % (f, rtype, state, _hashornull(hash)))
1455 % (f, rtype, state, _hashornull(hash)))
1456 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1456 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1457 ui.write((' ancestor path: %s (node %s)\n')
1457 ui.write((' ancestor path: %s (node %s)\n')
1458 % (afile, _hashornull(anode)))
1458 % (afile, _hashornull(anode)))
1459 ui.write((' other path: %s (node %s)\n')
1459 ui.write((' other path: %s (node %s)\n')
1460 % (ofile, _hashornull(onode)))
1460 % (ofile, _hashornull(onode)))
1461 elif rtype == 'f':
1461 elif rtype == 'f':
1462 filename, rawextras = record.split('\0', 1)
1462 filename, rawextras = record.split('\0', 1)
1463 extras = rawextras.split('\0')
1463 extras = rawextras.split('\0')
1464 i = 0
1464 i = 0
1465 extrastrings = []
1465 extrastrings = []
1466 while i < len(extras):
1466 while i < len(extras):
1467 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1467 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1468 i += 2
1468 i += 2
1469
1469
1470 ui.write(('file extras: %s (%s)\n')
1470 ui.write(('file extras: %s (%s)\n')
1471 % (filename, ', '.join(extrastrings)))
1471 % (filename, ', '.join(extrastrings)))
1472 elif rtype == 'l':
1472 elif rtype == 'l':
1473 labels = record.split('\0', 2)
1473 labels = record.split('\0', 2)
1474 labels = [l for l in labels if len(l) > 0]
1474 labels = [l for l in labels if len(l) > 0]
1475 ui.write(('labels:\n'))
1475 ui.write(('labels:\n'))
1476 ui.write((' local: %s\n' % labels[0]))
1476 ui.write((' local: %s\n' % labels[0]))
1477 ui.write((' other: %s\n' % labels[1]))
1477 ui.write((' other: %s\n' % labels[1]))
1478 if len(labels) > 2:
1478 if len(labels) > 2:
1479 ui.write((' base: %s\n' % labels[2]))
1479 ui.write((' base: %s\n' % labels[2]))
1480 else:
1480 else:
1481 ui.write(('unrecognized entry: %s\t%s\n')
1481 ui.write(('unrecognized entry: %s\t%s\n')
1482 % (rtype, record.replace('\0', '\t')))
1482 % (rtype, record.replace('\0', '\t')))
1483
1483
1484 # Avoid mergestate.read() since it may raise an exception for unsupported
1484 # Avoid mergestate.read() since it may raise an exception for unsupported
1485 # merge state records. We shouldn't be doing this, but this is OK since this
1485 # merge state records. We shouldn't be doing this, but this is OK since this
1486 # command is pretty low-level.
1486 # command is pretty low-level.
1487 ms = mergemod.mergestate(repo)
1487 ms = mergemod.mergestate(repo)
1488
1488
1489 # sort so that reasonable information is on top
1489 # sort so that reasonable information is on top
1490 v1records = ms._readrecordsv1()
1490 v1records = ms._readrecordsv1()
1491 v2records = ms._readrecordsv2()
1491 v2records = ms._readrecordsv2()
1492 order = 'LOml'
1492 order = 'LOml'
1493 def key(r):
1493 def key(r):
1494 idx = order.find(r[0])
1494 idx = order.find(r[0])
1495 if idx == -1:
1495 if idx == -1:
1496 return (1, r[1])
1496 return (1, r[1])
1497 else:
1497 else:
1498 return (0, idx)
1498 return (0, idx)
1499 v1records.sort(key=key)
1499 v1records.sort(key=key)
1500 v2records.sort(key=key)
1500 v2records.sort(key=key)
1501
1501
1502 if not v1records and not v2records:
1502 if not v1records and not v2records:
1503 ui.write(('no merge state found\n'))
1503 ui.write(('no merge state found\n'))
1504 elif not v2records:
1504 elif not v2records:
1505 ui.note(('no version 2 merge state\n'))
1505 ui.note(('no version 2 merge state\n'))
1506 printrecords(1)
1506 printrecords(1)
1507 elif ms._v1v2match(v1records, v2records):
1507 elif ms._v1v2match(v1records, v2records):
1508 ui.note(('v1 and v2 states match: using v2\n'))
1508 ui.note(('v1 and v2 states match: using v2\n'))
1509 printrecords(2)
1509 printrecords(2)
1510 else:
1510 else:
1511 ui.note(('v1 and v2 states mismatch: using v1\n'))
1511 ui.note(('v1 and v2 states mismatch: using v1\n'))
1512 printrecords(1)
1512 printrecords(1)
1513 if ui.verbose:
1513 if ui.verbose:
1514 printrecords(2)
1514 printrecords(2)
1515
1515
1516 @command('debugnamecomplete', [], _('NAME...'))
1516 @command('debugnamecomplete', [], _('NAME...'))
1517 def debugnamecomplete(ui, repo, *args):
1517 def debugnamecomplete(ui, repo, *args):
1518 '''complete "names" - tags, open branch names, bookmark names'''
1518 '''complete "names" - tags, open branch names, bookmark names'''
1519
1519
1520 names = set()
1520 names = set()
1521 # since we previously only listed open branches, we will handle that
1521 # since we previously only listed open branches, we will handle that
1522 # specially (after this for loop)
1522 # specially (after this for loop)
1523 for name, ns in repo.names.iteritems():
1523 for name, ns in repo.names.iteritems():
1524 if name != 'branches':
1524 if name != 'branches':
1525 names.update(ns.listnames(repo))
1525 names.update(ns.listnames(repo))
1526 names.update(tag for (tag, heads, tip, closed)
1526 names.update(tag for (tag, heads, tip, closed)
1527 in repo.branchmap().iterbranches() if not closed)
1527 in repo.branchmap().iterbranches() if not closed)
1528 completions = set()
1528 completions = set()
1529 if not args:
1529 if not args:
1530 args = ['']
1530 args = ['']
1531 for a in args:
1531 for a in args:
1532 completions.update(n for n in names if n.startswith(a))
1532 completions.update(n for n in names if n.startswith(a))
1533 ui.write('\n'.join(sorted(completions)))
1533 ui.write('\n'.join(sorted(completions)))
1534 ui.write('\n')
1534 ui.write('\n')
1535
1535
1536 @command('debugobsolete',
1536 @command('debugobsolete',
1537 [('', 'flags', 0, _('markers flag')),
1537 [('', 'flags', 0, _('markers flag')),
1538 ('', 'record-parents', False,
1538 ('', 'record-parents', False,
1539 _('record parent information for the precursor')),
1539 _('record parent information for the precursor')),
1540 ('r', 'rev', [], _('display markers relevant to REV')),
1540 ('r', 'rev', [], _('display markers relevant to REV')),
1541 ('', 'exclusive', False, _('restrict display to markers only '
1541 ('', 'exclusive', False, _('restrict display to markers only '
1542 'relevant to REV')),
1542 'relevant to REV')),
1543 ('', 'index', False, _('display index of the marker')),
1543 ('', 'index', False, _('display index of the marker')),
1544 ('', 'delete', [], _('delete markers specified by indices')),
1544 ('', 'delete', [], _('delete markers specified by indices')),
1545 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1545 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1546 _('[OBSOLETED [REPLACEMENT ...]]'))
1546 _('[OBSOLETED [REPLACEMENT ...]]'))
1547 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1547 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1548 """create arbitrary obsolete marker
1548 """create arbitrary obsolete marker
1549
1549
1550 With no arguments, displays the list of obsolescence markers."""
1550 With no arguments, displays the list of obsolescence markers."""
1551
1551
1552 opts = pycompat.byteskwargs(opts)
1552 opts = pycompat.byteskwargs(opts)
1553
1553
1554 def parsenodeid(s):
1554 def parsenodeid(s):
1555 try:
1555 try:
1556 # We do not use revsingle/revrange functions here to accept
1556 # We do not use revsingle/revrange functions here to accept
1557 # arbitrary node identifiers, possibly not present in the
1557 # arbitrary node identifiers, possibly not present in the
1558 # local repository.
1558 # local repository.
1559 n = bin(s)
1559 n = bin(s)
1560 if len(n) != len(nullid):
1560 if len(n) != len(nullid):
1561 raise TypeError()
1561 raise TypeError()
1562 return n
1562 return n
1563 except TypeError:
1563 except TypeError:
1564 raise error.Abort('changeset references must be full hexadecimal '
1564 raise error.Abort('changeset references must be full hexadecimal '
1565 'node identifiers')
1565 'node identifiers')
1566
1566
1567 if opts.get('delete'):
1567 if opts.get('delete'):
1568 indices = []
1568 indices = []
1569 for v in opts.get('delete'):
1569 for v in opts.get('delete'):
1570 try:
1570 try:
1571 indices.append(int(v))
1571 indices.append(int(v))
1572 except ValueError:
1572 except ValueError:
1573 raise error.Abort(_('invalid index value: %r') % v,
1573 raise error.Abort(_('invalid index value: %r') % v,
1574 hint=_('use integers for indices'))
1574 hint=_('use integers for indices'))
1575
1575
1576 if repo.currenttransaction():
1576 if repo.currenttransaction():
1577 raise error.Abort(_('cannot delete obsmarkers in the middle '
1577 raise error.Abort(_('cannot delete obsmarkers in the middle '
1578 'of transaction.'))
1578 'of transaction.'))
1579
1579
1580 with repo.lock():
1580 with repo.lock():
1581 n = repair.deleteobsmarkers(repo.obsstore, indices)
1581 n = repair.deleteobsmarkers(repo.obsstore, indices)
1582 ui.write(_('deleted %i obsolescence markers\n') % n)
1582 ui.write(_('deleted %i obsolescence markers\n') % n)
1583
1583
1584 return
1584 return
1585
1585
1586 if precursor is not None:
1586 if precursor is not None:
1587 if opts['rev']:
1587 if opts['rev']:
1588 raise error.Abort('cannot select revision when creating marker')
1588 raise error.Abort('cannot select revision when creating marker')
1589 metadata = {}
1589 metadata = {}
1590 metadata['user'] = opts['user'] or ui.username()
1590 metadata['user'] = opts['user'] or ui.username()
1591 succs = tuple(parsenodeid(succ) for succ in successors)
1591 succs = tuple(parsenodeid(succ) for succ in successors)
1592 l = repo.lock()
1592 l = repo.lock()
1593 try:
1593 try:
1594 tr = repo.transaction('debugobsolete')
1594 tr = repo.transaction('debugobsolete')
1595 try:
1595 try:
1596 date = opts.get('date')
1596 date = opts.get('date')
1597 if date:
1597 if date:
1598 date = dateutil.parsedate(date)
1598 date = dateutil.parsedate(date)
1599 else:
1599 else:
1600 date = None
1600 date = None
1601 prec = parsenodeid(precursor)
1601 prec = parsenodeid(precursor)
1602 parents = None
1602 parents = None
1603 if opts['record_parents']:
1603 if opts['record_parents']:
1604 if prec not in repo.unfiltered():
1604 if prec not in repo.unfiltered():
1605 raise error.Abort('cannot used --record-parents on '
1605 raise error.Abort('cannot used --record-parents on '
1606 'unknown changesets')
1606 'unknown changesets')
1607 parents = repo.unfiltered()[prec].parents()
1607 parents = repo.unfiltered()[prec].parents()
1608 parents = tuple(p.node() for p in parents)
1608 parents = tuple(p.node() for p in parents)
1609 repo.obsstore.create(tr, prec, succs, opts['flags'],
1609 repo.obsstore.create(tr, prec, succs, opts['flags'],
1610 parents=parents, date=date,
1610 parents=parents, date=date,
1611 metadata=metadata, ui=ui)
1611 metadata=metadata, ui=ui)
1612 tr.close()
1612 tr.close()
1613 except ValueError as exc:
1613 except ValueError as exc:
1614 raise error.Abort(_('bad obsmarker input: %s') %
1614 raise error.Abort(_('bad obsmarker input: %s') %
1615 pycompat.bytestr(exc))
1615 pycompat.bytestr(exc))
1616 finally:
1616 finally:
1617 tr.release()
1617 tr.release()
1618 finally:
1618 finally:
1619 l.release()
1619 l.release()
1620 else:
1620 else:
1621 if opts['rev']:
1621 if opts['rev']:
1622 revs = scmutil.revrange(repo, opts['rev'])
1622 revs = scmutil.revrange(repo, opts['rev'])
1623 nodes = [repo[r].node() for r in revs]
1623 nodes = [repo[r].node() for r in revs]
1624 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1624 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1625 exclusive=opts['exclusive']))
1625 exclusive=opts['exclusive']))
1626 markers.sort(key=lambda x: x._data)
1626 markers.sort(key=lambda x: x._data)
1627 else:
1627 else:
1628 markers = obsutil.getmarkers(repo)
1628 markers = obsutil.getmarkers(repo)
1629
1629
1630 markerstoiter = markers
1630 markerstoiter = markers
1631 isrelevant = lambda m: True
1631 isrelevant = lambda m: True
1632 if opts.get('rev') and opts.get('index'):
1632 if opts.get('rev') and opts.get('index'):
1633 markerstoiter = obsutil.getmarkers(repo)
1633 markerstoiter = obsutil.getmarkers(repo)
1634 markerset = set(markers)
1634 markerset = set(markers)
1635 isrelevant = lambda m: m in markerset
1635 isrelevant = lambda m: m in markerset
1636
1636
1637 fm = ui.formatter('debugobsolete', opts)
1637 fm = ui.formatter('debugobsolete', opts)
1638 for i, m in enumerate(markerstoiter):
1638 for i, m in enumerate(markerstoiter):
1639 if not isrelevant(m):
1639 if not isrelevant(m):
1640 # marker can be irrelevant when we're iterating over a set
1640 # marker can be irrelevant when we're iterating over a set
1641 # of markers (markerstoiter) which is bigger than the set
1641 # of markers (markerstoiter) which is bigger than the set
1642 # of markers we want to display (markers)
1642 # of markers we want to display (markers)
1643 # this can happen if both --index and --rev options are
1643 # this can happen if both --index and --rev options are
1644 # provided and thus we need to iterate over all of the markers
1644 # provided and thus we need to iterate over all of the markers
1645 # to get the correct indices, but only display the ones that
1645 # to get the correct indices, but only display the ones that
1646 # are relevant to --rev value
1646 # are relevant to --rev value
1647 continue
1647 continue
1648 fm.startitem()
1648 fm.startitem()
1649 ind = i if opts.get('index') else None
1649 ind = i if opts.get('index') else None
1650 cmdutil.showmarker(fm, m, index=ind)
1650 cmdutil.showmarker(fm, m, index=ind)
1651 fm.end()
1651 fm.end()
1652
1652
1653 @command('debugpathcomplete',
1653 @command('debugpathcomplete',
1654 [('f', 'full', None, _('complete an entire path')),
1654 [('f', 'full', None, _('complete an entire path')),
1655 ('n', 'normal', None, _('show only normal files')),
1655 ('n', 'normal', None, _('show only normal files')),
1656 ('a', 'added', None, _('show only added files')),
1656 ('a', 'added', None, _('show only added files')),
1657 ('r', 'removed', None, _('show only removed files'))],
1657 ('r', 'removed', None, _('show only removed files'))],
1658 _('FILESPEC...'))
1658 _('FILESPEC...'))
1659 def debugpathcomplete(ui, repo, *specs, **opts):
1659 def debugpathcomplete(ui, repo, *specs, **opts):
1660 '''complete part or all of a tracked path
1660 '''complete part or all of a tracked path
1661
1661
1662 This command supports shells that offer path name completion. It
1662 This command supports shells that offer path name completion. It
1663 currently completes only files already known to the dirstate.
1663 currently completes only files already known to the dirstate.
1664
1664
1665 Completion extends only to the next path segment unless
1665 Completion extends only to the next path segment unless
1666 --full is specified, in which case entire paths are used.'''
1666 --full is specified, in which case entire paths are used.'''
1667
1667
1668 def complete(path, acceptable):
1668 def complete(path, acceptable):
1669 dirstate = repo.dirstate
1669 dirstate = repo.dirstate
1670 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1670 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1671 rootdir = repo.root + pycompat.ossep
1671 rootdir = repo.root + pycompat.ossep
1672 if spec != repo.root and not spec.startswith(rootdir):
1672 if spec != repo.root and not spec.startswith(rootdir):
1673 return [], []
1673 return [], []
1674 if os.path.isdir(spec):
1674 if os.path.isdir(spec):
1675 spec += '/'
1675 spec += '/'
1676 spec = spec[len(rootdir):]
1676 spec = spec[len(rootdir):]
1677 fixpaths = pycompat.ossep != '/'
1677 fixpaths = pycompat.ossep != '/'
1678 if fixpaths:
1678 if fixpaths:
1679 spec = spec.replace(pycompat.ossep, '/')
1679 spec = spec.replace(pycompat.ossep, '/')
1680 speclen = len(spec)
1680 speclen = len(spec)
1681 fullpaths = opts[r'full']
1681 fullpaths = opts[r'full']
1682 files, dirs = set(), set()
1682 files, dirs = set(), set()
1683 adddir, addfile = dirs.add, files.add
1683 adddir, addfile = dirs.add, files.add
1684 for f, st in dirstate.iteritems():
1684 for f, st in dirstate.iteritems():
1685 if f.startswith(spec) and st[0] in acceptable:
1685 if f.startswith(spec) and st[0] in acceptable:
1686 if fixpaths:
1686 if fixpaths:
1687 f = f.replace('/', pycompat.ossep)
1687 f = f.replace('/', pycompat.ossep)
1688 if fullpaths:
1688 if fullpaths:
1689 addfile(f)
1689 addfile(f)
1690 continue
1690 continue
1691 s = f.find(pycompat.ossep, speclen)
1691 s = f.find(pycompat.ossep, speclen)
1692 if s >= 0:
1692 if s >= 0:
1693 adddir(f[:s])
1693 adddir(f[:s])
1694 else:
1694 else:
1695 addfile(f)
1695 addfile(f)
1696 return files, dirs
1696 return files, dirs
1697
1697
1698 acceptable = ''
1698 acceptable = ''
1699 if opts[r'normal']:
1699 if opts[r'normal']:
1700 acceptable += 'nm'
1700 acceptable += 'nm'
1701 if opts[r'added']:
1701 if opts[r'added']:
1702 acceptable += 'a'
1702 acceptable += 'a'
1703 if opts[r'removed']:
1703 if opts[r'removed']:
1704 acceptable += 'r'
1704 acceptable += 'r'
1705 cwd = repo.getcwd()
1705 cwd = repo.getcwd()
1706 if not specs:
1706 if not specs:
1707 specs = ['.']
1707 specs = ['.']
1708
1708
1709 files, dirs = set(), set()
1709 files, dirs = set(), set()
1710 for spec in specs:
1710 for spec in specs:
1711 f, d = complete(spec, acceptable or 'nmar')
1711 f, d = complete(spec, acceptable or 'nmar')
1712 files.update(f)
1712 files.update(f)
1713 dirs.update(d)
1713 dirs.update(d)
1714 files.update(dirs)
1714 files.update(dirs)
1715 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1715 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1716 ui.write('\n')
1716 ui.write('\n')
1717
1717
1718 @command('debugpeer', [], _('PATH'), norepo=True)
1718 @command('debugpeer', [], _('PATH'), norepo=True)
1719 def debugpeer(ui, path):
1719 def debugpeer(ui, path):
1720 """establish a connection to a peer repository"""
1720 """establish a connection to a peer repository"""
1721 # Always enable peer request logging. Requires --debug to display
1721 # Always enable peer request logging. Requires --debug to display
1722 # though.
1722 # though.
1723 overrides = {
1723 overrides = {
1724 ('devel', 'debug.peer-request'): True,
1724 ('devel', 'debug.peer-request'): True,
1725 }
1725 }
1726
1726
1727 with ui.configoverride(overrides):
1727 with ui.configoverride(overrides):
1728 peer = hg.peer(ui, {}, path)
1728 peer = hg.peer(ui, {}, path)
1729
1729
1730 local = peer.local() is not None
1730 local = peer.local() is not None
1731 canpush = peer.canpush()
1731 canpush = peer.canpush()
1732
1732
1733 ui.write(_('url: %s\n') % peer.url())
1733 ui.write(_('url: %s\n') % peer.url())
1734 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1734 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1735 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1735 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1736
1736
1737 @command('debugpickmergetool',
1737 @command('debugpickmergetool',
1738 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1738 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1739 ('', 'changedelete', None, _('emulate merging change and delete')),
1739 ('', 'changedelete', None, _('emulate merging change and delete')),
1740 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1740 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1741 _('[PATTERN]...'),
1741 _('[PATTERN]...'),
1742 inferrepo=True)
1742 inferrepo=True)
1743 def debugpickmergetool(ui, repo, *pats, **opts):
1743 def debugpickmergetool(ui, repo, *pats, **opts):
1744 """examine which merge tool is chosen for specified file
1744 """examine which merge tool is chosen for specified file
1745
1745
1746 As described in :hg:`help merge-tools`, Mercurial examines
1746 As described in :hg:`help merge-tools`, Mercurial examines
1747 configurations below in this order to decide which merge tool is
1747 configurations below in this order to decide which merge tool is
1748 chosen for specified file.
1748 chosen for specified file.
1749
1749
1750 1. ``--tool`` option
1750 1. ``--tool`` option
1751 2. ``HGMERGE`` environment variable
1751 2. ``HGMERGE`` environment variable
1752 3. configurations in ``merge-patterns`` section
1752 3. configurations in ``merge-patterns`` section
1753 4. configuration of ``ui.merge``
1753 4. configuration of ``ui.merge``
1754 5. configurations in ``merge-tools`` section
1754 5. configurations in ``merge-tools`` section
1755 6. ``hgmerge`` tool (for historical reason only)
1755 6. ``hgmerge`` tool (for historical reason only)
1756 7. default tool for fallback (``:merge`` or ``:prompt``)
1756 7. default tool for fallback (``:merge`` or ``:prompt``)
1757
1757
1758 This command writes out examination result in the style below::
1758 This command writes out examination result in the style below::
1759
1759
1760 FILE = MERGETOOL
1760 FILE = MERGETOOL
1761
1761
1762 By default, all files known in the first parent context of the
1762 By default, all files known in the first parent context of the
1763 working directory are examined. Use file patterns and/or -I/-X
1763 working directory are examined. Use file patterns and/or -I/-X
1764 options to limit target files. -r/--rev is also useful to examine
1764 options to limit target files. -r/--rev is also useful to examine
1765 files in another context without actual updating to it.
1765 files in another context without actual updating to it.
1766
1766
1767 With --debug, this command shows warning messages while matching
1767 With --debug, this command shows warning messages while matching
1768 against ``merge-patterns`` and so on, too. It is recommended to
1768 against ``merge-patterns`` and so on, too. It is recommended to
1769 use this option with explicit file patterns and/or -I/-X options,
1769 use this option with explicit file patterns and/or -I/-X options,
1770 because this option increases amount of output per file according
1770 because this option increases amount of output per file according
1771 to configurations in hgrc.
1771 to configurations in hgrc.
1772
1772
1773 With -v/--verbose, this command shows configurations below at
1773 With -v/--verbose, this command shows configurations below at
1774 first (only if specified).
1774 first (only if specified).
1775
1775
1776 - ``--tool`` option
1776 - ``--tool`` option
1777 - ``HGMERGE`` environment variable
1777 - ``HGMERGE`` environment variable
1778 - configuration of ``ui.merge``
1778 - configuration of ``ui.merge``
1779
1779
1780 If merge tool is chosen before matching against
1780 If merge tool is chosen before matching against
1781 ``merge-patterns``, this command can't show any helpful
1781 ``merge-patterns``, this command can't show any helpful
1782 information, even with --debug. In such case, information above is
1782 information, even with --debug. In such case, information above is
1783 useful to know why a merge tool is chosen.
1783 useful to know why a merge tool is chosen.
1784 """
1784 """
1785 opts = pycompat.byteskwargs(opts)
1785 opts = pycompat.byteskwargs(opts)
1786 overrides = {}
1786 overrides = {}
1787 if opts['tool']:
1787 if opts['tool']:
1788 overrides[('ui', 'forcemerge')] = opts['tool']
1788 overrides[('ui', 'forcemerge')] = opts['tool']
1789 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1789 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1790
1790
1791 with ui.configoverride(overrides, 'debugmergepatterns'):
1791 with ui.configoverride(overrides, 'debugmergepatterns'):
1792 hgmerge = encoding.environ.get("HGMERGE")
1792 hgmerge = encoding.environ.get("HGMERGE")
1793 if hgmerge is not None:
1793 if hgmerge is not None:
1794 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1794 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1795 uimerge = ui.config("ui", "merge")
1795 uimerge = ui.config("ui", "merge")
1796 if uimerge:
1796 if uimerge:
1797 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1797 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1798
1798
1799 ctx = scmutil.revsingle(repo, opts.get('rev'))
1799 ctx = scmutil.revsingle(repo, opts.get('rev'))
1800 m = scmutil.match(ctx, pats, opts)
1800 m = scmutil.match(ctx, pats, opts)
1801 changedelete = opts['changedelete']
1801 changedelete = opts['changedelete']
1802 for path in ctx.walk(m):
1802 for path in ctx.walk(m):
1803 fctx = ctx[path]
1803 fctx = ctx[path]
1804 try:
1804 try:
1805 if not ui.debugflag:
1805 if not ui.debugflag:
1806 ui.pushbuffer(error=True)
1806 ui.pushbuffer(error=True)
1807 tool, toolpath = filemerge._picktool(repo, ui, path,
1807 tool, toolpath = filemerge._picktool(repo, ui, path,
1808 fctx.isbinary(),
1808 fctx.isbinary(),
1809 'l' in fctx.flags(),
1809 'l' in fctx.flags(),
1810 changedelete)
1810 changedelete)
1811 finally:
1811 finally:
1812 if not ui.debugflag:
1812 if not ui.debugflag:
1813 ui.popbuffer()
1813 ui.popbuffer()
1814 ui.write(('%s = %s\n') % (path, tool))
1814 ui.write(('%s = %s\n') % (path, tool))
1815
1815
1816 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1816 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1817 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1817 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1818 '''access the pushkey key/value protocol
1818 '''access the pushkey key/value protocol
1819
1819
1820 With two args, list the keys in the given namespace.
1820 With two args, list the keys in the given namespace.
1821
1821
1822 With five args, set a key to new if it currently is set to old.
1822 With five args, set a key to new if it currently is set to old.
1823 Reports success or failure.
1823 Reports success or failure.
1824 '''
1824 '''
1825
1825
1826 target = hg.peer(ui, {}, repopath)
1826 target = hg.peer(ui, {}, repopath)
1827 if keyinfo:
1827 if keyinfo:
1828 key, old, new = keyinfo
1828 key, old, new = keyinfo
1829 with target.commandexecutor() as e:
1829 with target.commandexecutor() as e:
1830 r = e.callcommand('pushkey', {
1830 r = e.callcommand('pushkey', {
1831 'namespace': namespace,
1831 'namespace': namespace,
1832 'key': key,
1832 'key': key,
1833 'old': old,
1833 'old': old,
1834 'new': new,
1834 'new': new,
1835 }).result()
1835 }).result()
1836
1836
1837 ui.status(pycompat.bytestr(r) + '\n')
1837 ui.status(pycompat.bytestr(r) + '\n')
1838 return not r
1838 return not r
1839 else:
1839 else:
1840 for k, v in sorted(target.listkeys(namespace).iteritems()):
1840 for k, v in sorted(target.listkeys(namespace).iteritems()):
1841 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1841 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1842 stringutil.escapestr(v)))
1842 stringutil.escapestr(v)))
1843
1843
1844 @command('debugpvec', [], _('A B'))
1844 @command('debugpvec', [], _('A B'))
1845 def debugpvec(ui, repo, a, b=None):
1845 def debugpvec(ui, repo, a, b=None):
1846 ca = scmutil.revsingle(repo, a)
1846 ca = scmutil.revsingle(repo, a)
1847 cb = scmutil.revsingle(repo, b)
1847 cb = scmutil.revsingle(repo, b)
1848 pa = pvec.ctxpvec(ca)
1848 pa = pvec.ctxpvec(ca)
1849 pb = pvec.ctxpvec(cb)
1849 pb = pvec.ctxpvec(cb)
1850 if pa == pb:
1850 if pa == pb:
1851 rel = "="
1851 rel = "="
1852 elif pa > pb:
1852 elif pa > pb:
1853 rel = ">"
1853 rel = ">"
1854 elif pa < pb:
1854 elif pa < pb:
1855 rel = "<"
1855 rel = "<"
1856 elif pa | pb:
1856 elif pa | pb:
1857 rel = "|"
1857 rel = "|"
1858 ui.write(_("a: %s\n") % pa)
1858 ui.write(_("a: %s\n") % pa)
1859 ui.write(_("b: %s\n") % pb)
1859 ui.write(_("b: %s\n") % pb)
1860 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1860 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1861 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1861 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1862 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1862 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1863 pa.distance(pb), rel))
1863 pa.distance(pb), rel))
1864
1864
1865 @command('debugrebuilddirstate|debugrebuildstate',
1865 @command('debugrebuilddirstate|debugrebuildstate',
1866 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1866 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1867 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1867 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1868 'the working copy parent')),
1868 'the working copy parent')),
1869 ],
1869 ],
1870 _('[-r REV]'))
1870 _('[-r REV]'))
1871 def debugrebuilddirstate(ui, repo, rev, **opts):
1871 def debugrebuilddirstate(ui, repo, rev, **opts):
1872 """rebuild the dirstate as it would look like for the given revision
1872 """rebuild the dirstate as it would look like for the given revision
1873
1873
1874 If no revision is specified the first current parent will be used.
1874 If no revision is specified the first current parent will be used.
1875
1875
1876 The dirstate will be set to the files of the given revision.
1876 The dirstate will be set to the files of the given revision.
1877 The actual working directory content or existing dirstate
1877 The actual working directory content or existing dirstate
1878 information such as adds or removes is not considered.
1878 information such as adds or removes is not considered.
1879
1879
1880 ``minimal`` will only rebuild the dirstate status for files that claim to be
1880 ``minimal`` will only rebuild the dirstate status for files that claim to be
1881 tracked but are not in the parent manifest, or that exist in the parent
1881 tracked but are not in the parent manifest, or that exist in the parent
1882 manifest but are not in the dirstate. It will not change adds, removes, or
1882 manifest but are not in the dirstate. It will not change adds, removes, or
1883 modified files that are in the working copy parent.
1883 modified files that are in the working copy parent.
1884
1884
1885 One use of this command is to make the next :hg:`status` invocation
1885 One use of this command is to make the next :hg:`status` invocation
1886 check the actual file content.
1886 check the actual file content.
1887 """
1887 """
1888 ctx = scmutil.revsingle(repo, rev)
1888 ctx = scmutil.revsingle(repo, rev)
1889 with repo.wlock():
1889 with repo.wlock():
1890 dirstate = repo.dirstate
1890 dirstate = repo.dirstate
1891 changedfiles = None
1891 changedfiles = None
1892 # See command doc for what minimal does.
1892 # See command doc for what minimal does.
1893 if opts.get(r'minimal'):
1893 if opts.get(r'minimal'):
1894 manifestfiles = set(ctx.manifest().keys())
1894 manifestfiles = set(ctx.manifest().keys())
1895 dirstatefiles = set(dirstate)
1895 dirstatefiles = set(dirstate)
1896 manifestonly = manifestfiles - dirstatefiles
1896 manifestonly = manifestfiles - dirstatefiles
1897 dsonly = dirstatefiles - manifestfiles
1897 dsonly = dirstatefiles - manifestfiles
1898 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1898 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1899 changedfiles = manifestonly | dsnotadded
1899 changedfiles = manifestonly | dsnotadded
1900
1900
1901 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1901 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1902
1902
1903 @command('debugrebuildfncache', [], '')
1903 @command('debugrebuildfncache', [], '')
1904 def debugrebuildfncache(ui, repo):
1904 def debugrebuildfncache(ui, repo):
1905 """rebuild the fncache file"""
1905 """rebuild the fncache file"""
1906 repair.rebuildfncache(ui, repo)
1906 repair.rebuildfncache(ui, repo)
1907
1907
1908 @command('debugrename',
1908 @command('debugrename',
1909 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1909 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1910 _('[-r REV] FILE'))
1910 _('[-r REV] FILE'))
1911 def debugrename(ui, repo, file1, *pats, **opts):
1911 def debugrename(ui, repo, file1, *pats, **opts):
1912 """dump rename information"""
1912 """dump rename information"""
1913
1913
1914 opts = pycompat.byteskwargs(opts)
1914 opts = pycompat.byteskwargs(opts)
1915 ctx = scmutil.revsingle(repo, opts.get('rev'))
1915 ctx = scmutil.revsingle(repo, opts.get('rev'))
1916 m = scmutil.match(ctx, (file1,) + pats, opts)
1916 m = scmutil.match(ctx, (file1,) + pats, opts)
1917 for abs in ctx.walk(m):
1917 for abs in ctx.walk(m):
1918 fctx = ctx[abs]
1918 fctx = ctx[abs]
1919 o = fctx.filelog().renamed(fctx.filenode())
1919 o = fctx.filelog().renamed(fctx.filenode())
1920 rel = m.rel(abs)
1920 rel = m.rel(abs)
1921 if o:
1921 if o:
1922 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1922 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1923 else:
1923 else:
1924 ui.write(_("%s not renamed\n") % rel)
1924 ui.write(_("%s not renamed\n") % rel)
1925
1925
1926 @command('debugrevlog', cmdutil.debugrevlogopts +
1926 @command('debugrevlog', cmdutil.debugrevlogopts +
1927 [('d', 'dump', False, _('dump index data'))],
1927 [('d', 'dump', False, _('dump index data'))],
1928 _('-c|-m|FILE'),
1928 _('-c|-m|FILE'),
1929 optionalrepo=True)
1929 optionalrepo=True)
1930 def debugrevlog(ui, repo, file_=None, **opts):
1930 def debugrevlog(ui, repo, file_=None, **opts):
1931 """show data and statistics about a revlog"""
1931 """show data and statistics about a revlog"""
1932 opts = pycompat.byteskwargs(opts)
1932 opts = pycompat.byteskwargs(opts)
1933 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1933 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1934
1934
1935 if opts.get("dump"):
1935 if opts.get("dump"):
1936 numrevs = len(r)
1936 numrevs = len(r)
1937 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1937 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1938 " rawsize totalsize compression heads chainlen\n"))
1938 " rawsize totalsize compression heads chainlen\n"))
1939 ts = 0
1939 ts = 0
1940 heads = set()
1940 heads = set()
1941
1941
1942 for rev in xrange(numrevs):
1942 for rev in xrange(numrevs):
1943 dbase = r.deltaparent(rev)
1943 dbase = r.deltaparent(rev)
1944 if dbase == -1:
1944 if dbase == -1:
1945 dbase = rev
1945 dbase = rev
1946 cbase = r.chainbase(rev)
1946 cbase = r.chainbase(rev)
1947 clen = r.chainlen(rev)
1947 clen = r.chainlen(rev)
1948 p1, p2 = r.parentrevs(rev)
1948 p1, p2 = r.parentrevs(rev)
1949 rs = r.rawsize(rev)
1949 rs = r.rawsize(rev)
1950 ts = ts + rs
1950 ts = ts + rs
1951 heads -= set(r.parentrevs(rev))
1951 heads -= set(r.parentrevs(rev))
1952 heads.add(rev)
1952 heads.add(rev)
1953 try:
1953 try:
1954 compression = ts / r.end(rev)
1954 compression = ts / r.end(rev)
1955 except ZeroDivisionError:
1955 except ZeroDivisionError:
1956 compression = 0
1956 compression = 0
1957 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1957 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1958 "%11d %5d %8d\n" %
1958 "%11d %5d %8d\n" %
1959 (rev, p1, p2, r.start(rev), r.end(rev),
1959 (rev, p1, p2, r.start(rev), r.end(rev),
1960 r.start(dbase), r.start(cbase),
1960 r.start(dbase), r.start(cbase),
1961 r.start(p1), r.start(p2),
1961 r.start(p1), r.start(p2),
1962 rs, ts, compression, len(heads), clen))
1962 rs, ts, compression, len(heads), clen))
1963 return 0
1963 return 0
1964
1964
1965 v = r.version
1965 v = r.version
1966 format = v & 0xFFFF
1966 format = v & 0xFFFF
1967 flags = []
1967 flags = []
1968 gdelta = False
1968 gdelta = False
1969 if v & revlog.FLAG_INLINE_DATA:
1969 if v & revlog.FLAG_INLINE_DATA:
1970 flags.append('inline')
1970 flags.append('inline')
1971 if v & revlog.FLAG_GENERALDELTA:
1971 if v & revlog.FLAG_GENERALDELTA:
1972 gdelta = True
1972 gdelta = True
1973 flags.append('generaldelta')
1973 flags.append('generaldelta')
1974 if not flags:
1974 if not flags:
1975 flags = ['(none)']
1975 flags = ['(none)']
1976
1976
1977 nummerges = 0
1977 nummerges = 0
1978 numfull = 0
1978 numfull = 0
1979 numprev = 0
1979 numprev = 0
1980 nump1 = 0
1980 nump1 = 0
1981 nump2 = 0
1981 nump2 = 0
1982 numother = 0
1982 numother = 0
1983 nump1prev = 0
1983 nump1prev = 0
1984 nump2prev = 0
1984 nump2prev = 0
1985 chainlengths = []
1985 chainlengths = []
1986 chainbases = []
1986 chainbases = []
1987 chainspans = []
1987 chainspans = []
1988
1988
1989 datasize = [None, 0, 0]
1989 datasize = [None, 0, 0]
1990 fullsize = [None, 0, 0]
1990 fullsize = [None, 0, 0]
1991 deltasize = [None, 0, 0]
1991 deltasize = [None, 0, 0]
1992 chunktypecounts = {}
1992 chunktypecounts = {}
1993 chunktypesizes = {}
1993 chunktypesizes = {}
1994
1994
1995 def addsize(size, l):
1995 def addsize(size, l):
1996 if l[0] is None or size < l[0]:
1996 if l[0] is None or size < l[0]:
1997 l[0] = size
1997 l[0] = size
1998 if size > l[1]:
1998 if size > l[1]:
1999 l[1] = size
1999 l[1] = size
2000 l[2] += size
2000 l[2] += size
2001
2001
2002 numrevs = len(r)
2002 numrevs = len(r)
2003 for rev in xrange(numrevs):
2003 for rev in xrange(numrevs):
2004 p1, p2 = r.parentrevs(rev)
2004 p1, p2 = r.parentrevs(rev)
2005 delta = r.deltaparent(rev)
2005 delta = r.deltaparent(rev)
2006 if format > 0:
2006 if format > 0:
2007 addsize(r.rawsize(rev), datasize)
2007 addsize(r.rawsize(rev), datasize)
2008 if p2 != nullrev:
2008 if p2 != nullrev:
2009 nummerges += 1
2009 nummerges += 1
2010 size = r.length(rev)
2010 size = r.length(rev)
2011 if delta == nullrev:
2011 if delta == nullrev:
2012 chainlengths.append(0)
2012 chainlengths.append(0)
2013 chainbases.append(r.start(rev))
2013 chainbases.append(r.start(rev))
2014 chainspans.append(size)
2014 chainspans.append(size)
2015 numfull += 1
2015 numfull += 1
2016 addsize(size, fullsize)
2016 addsize(size, fullsize)
2017 else:
2017 else:
2018 chainlengths.append(chainlengths[delta] + 1)
2018 chainlengths.append(chainlengths[delta] + 1)
2019 baseaddr = chainbases[delta]
2019 baseaddr = chainbases[delta]
2020 revaddr = r.start(rev)
2020 revaddr = r.start(rev)
2021 chainbases.append(baseaddr)
2021 chainbases.append(baseaddr)
2022 chainspans.append((revaddr - baseaddr) + size)
2022 chainspans.append((revaddr - baseaddr) + size)
2023 addsize(size, deltasize)
2023 addsize(size, deltasize)
2024 if delta == rev - 1:
2024 if delta == rev - 1:
2025 numprev += 1
2025 numprev += 1
2026 if delta == p1:
2026 if delta == p1:
2027 nump1prev += 1
2027 nump1prev += 1
2028 elif delta == p2:
2028 elif delta == p2:
2029 nump2prev += 1
2029 nump2prev += 1
2030 elif delta == p1:
2030 elif delta == p1:
2031 nump1 += 1
2031 nump1 += 1
2032 elif delta == p2:
2032 elif delta == p2:
2033 nump2 += 1
2033 nump2 += 1
2034 elif delta != nullrev:
2034 elif delta != nullrev:
2035 numother += 1
2035 numother += 1
2036
2036
2037 # Obtain data on the raw chunks in the revlog.
2037 # Obtain data on the raw chunks in the revlog.
2038 segment = r._getsegmentforrevs(rev, rev)[1]
2038 segment = r._getsegmentforrevs(rev, rev)[1]
2039 if segment:
2039 if segment:
2040 chunktype = bytes(segment[0:1])
2040 chunktype = bytes(segment[0:1])
2041 else:
2041 else:
2042 chunktype = 'empty'
2042 chunktype = 'empty'
2043
2043
2044 if chunktype not in chunktypecounts:
2044 if chunktype not in chunktypecounts:
2045 chunktypecounts[chunktype] = 0
2045 chunktypecounts[chunktype] = 0
2046 chunktypesizes[chunktype] = 0
2046 chunktypesizes[chunktype] = 0
2047
2047
2048 chunktypecounts[chunktype] += 1
2048 chunktypecounts[chunktype] += 1
2049 chunktypesizes[chunktype] += size
2049 chunktypesizes[chunktype] += size
2050
2050
2051 # Adjust size min value for empty cases
2051 # Adjust size min value for empty cases
2052 for size in (datasize, fullsize, deltasize):
2052 for size in (datasize, fullsize, deltasize):
2053 if size[0] is None:
2053 if size[0] is None:
2054 size[0] = 0
2054 size[0] = 0
2055
2055
2056 numdeltas = numrevs - numfull
2056 numdeltas = numrevs - numfull
2057 numoprev = numprev - nump1prev - nump2prev
2057 numoprev = numprev - nump1prev - nump2prev
2058 totalrawsize = datasize[2]
2058 totalrawsize = datasize[2]
2059 datasize[2] /= numrevs
2059 datasize[2] /= numrevs
2060 fulltotal = fullsize[2]
2060 fulltotal = fullsize[2]
2061 fullsize[2] /= numfull
2061 fullsize[2] /= numfull
2062 deltatotal = deltasize[2]
2062 deltatotal = deltasize[2]
2063 if numrevs - numfull > 0:
2063 if numrevs - numfull > 0:
2064 deltasize[2] /= numrevs - numfull
2064 deltasize[2] /= numrevs - numfull
2065 totalsize = fulltotal + deltatotal
2065 totalsize = fulltotal + deltatotal
2066 avgchainlen = sum(chainlengths) / numrevs
2066 avgchainlen = sum(chainlengths) / numrevs
2067 maxchainlen = max(chainlengths)
2067 maxchainlen = max(chainlengths)
2068 maxchainspan = max(chainspans)
2068 maxchainspan = max(chainspans)
2069 compratio = 1
2069 compratio = 1
2070 if totalsize:
2070 if totalsize:
2071 compratio = totalrawsize / totalsize
2071 compratio = totalrawsize / totalsize
2072
2072
2073 basedfmtstr = '%%%dd\n'
2073 basedfmtstr = '%%%dd\n'
2074 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2074 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2075
2075
2076 def dfmtstr(max):
2076 def dfmtstr(max):
2077 return basedfmtstr % len(str(max))
2077 return basedfmtstr % len(str(max))
2078 def pcfmtstr(max, padding=0):
2078 def pcfmtstr(max, padding=0):
2079 return basepcfmtstr % (len(str(max)), ' ' * padding)
2079 return basepcfmtstr % (len(str(max)), ' ' * padding)
2080
2080
2081 def pcfmt(value, total):
2081 def pcfmt(value, total):
2082 if total:
2082 if total:
2083 return (value, 100 * float(value) / total)
2083 return (value, 100 * float(value) / total)
2084 else:
2084 else:
2085 return value, 100.0
2085 return value, 100.0
2086
2086
2087 ui.write(('format : %d\n') % format)
2087 ui.write(('format : %d\n') % format)
2088 ui.write(('flags : %s\n') % ', '.join(flags))
2088 ui.write(('flags : %s\n') % ', '.join(flags))
2089
2089
2090 ui.write('\n')
2090 ui.write('\n')
2091 fmt = pcfmtstr(totalsize)
2091 fmt = pcfmtstr(totalsize)
2092 fmt2 = dfmtstr(totalsize)
2092 fmt2 = dfmtstr(totalsize)
2093 ui.write(('revisions : ') + fmt2 % numrevs)
2093 ui.write(('revisions : ') + fmt2 % numrevs)
2094 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2094 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2095 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2095 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2096 ui.write(('revisions : ') + fmt2 % numrevs)
2096 ui.write(('revisions : ') + fmt2 % numrevs)
2097 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2097 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2098 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2098 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2099 ui.write(('revision size : ') + fmt2 % totalsize)
2099 ui.write(('revision size : ') + fmt2 % totalsize)
2100 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2100 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2101 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2101 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2102
2102
2103 def fmtchunktype(chunktype):
2103 def fmtchunktype(chunktype):
2104 if chunktype == 'empty':
2104 if chunktype == 'empty':
2105 return ' %s : ' % chunktype
2105 return ' %s : ' % chunktype
2106 elif chunktype in pycompat.bytestr(string.ascii_letters):
2106 elif chunktype in pycompat.bytestr(string.ascii_letters):
2107 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2107 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2108 else:
2108 else:
2109 return ' 0x%s : ' % hex(chunktype)
2109 return ' 0x%s : ' % hex(chunktype)
2110
2110
2111 ui.write('\n')
2111 ui.write('\n')
2112 ui.write(('chunks : ') + fmt2 % numrevs)
2112 ui.write(('chunks : ') + fmt2 % numrevs)
2113 for chunktype in sorted(chunktypecounts):
2113 for chunktype in sorted(chunktypecounts):
2114 ui.write(fmtchunktype(chunktype))
2114 ui.write(fmtchunktype(chunktype))
2115 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2115 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2116 ui.write(('chunks size : ') + fmt2 % totalsize)
2116 ui.write(('chunks size : ') + fmt2 % totalsize)
2117 for chunktype in sorted(chunktypecounts):
2117 for chunktype in sorted(chunktypecounts):
2118 ui.write(fmtchunktype(chunktype))
2118 ui.write(fmtchunktype(chunktype))
2119 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2119 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2120
2120
2121 ui.write('\n')
2121 ui.write('\n')
2122 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2122 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2123 ui.write(('avg chain length : ') + fmt % avgchainlen)
2123 ui.write(('avg chain length : ') + fmt % avgchainlen)
2124 ui.write(('max chain length : ') + fmt % maxchainlen)
2124 ui.write(('max chain length : ') + fmt % maxchainlen)
2125 ui.write(('max chain reach : ') + fmt % maxchainspan)
2125 ui.write(('max chain reach : ') + fmt % maxchainspan)
2126 ui.write(('compression ratio : ') + fmt % compratio)
2126 ui.write(('compression ratio : ') + fmt % compratio)
2127
2127
2128 if format > 0:
2128 if format > 0:
2129 ui.write('\n')
2129 ui.write('\n')
2130 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2130 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2131 % tuple(datasize))
2131 % tuple(datasize))
2132 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2132 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2133 % tuple(fullsize))
2133 % tuple(fullsize))
2134 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2134 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2135 % tuple(deltasize))
2135 % tuple(deltasize))
2136
2136
2137 if numdeltas > 0:
2137 if numdeltas > 0:
2138 ui.write('\n')
2138 ui.write('\n')
2139 fmt = pcfmtstr(numdeltas)
2139 fmt = pcfmtstr(numdeltas)
2140 fmt2 = pcfmtstr(numdeltas, 4)
2140 fmt2 = pcfmtstr(numdeltas, 4)
2141 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2141 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2142 if numprev > 0:
2142 if numprev > 0:
2143 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2143 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2144 numprev))
2144 numprev))
2145 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2145 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2146 numprev))
2146 numprev))
2147 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2147 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2148 numprev))
2148 numprev))
2149 if gdelta:
2149 if gdelta:
2150 ui.write(('deltas against p1 : ')
2150 ui.write(('deltas against p1 : ')
2151 + fmt % pcfmt(nump1, numdeltas))
2151 + fmt % pcfmt(nump1, numdeltas))
2152 ui.write(('deltas against p2 : ')
2152 ui.write(('deltas against p2 : ')
2153 + fmt % pcfmt(nump2, numdeltas))
2153 + fmt % pcfmt(nump2, numdeltas))
2154 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2154 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2155 numdeltas))
2155 numdeltas))
2156
2156
2157 @command('debugrevspec',
2157 @command('debugrevspec',
2158 [('', 'optimize', None,
2158 [('', 'optimize', None,
2159 _('print parsed tree after optimizing (DEPRECATED)')),
2159 _('print parsed tree after optimizing (DEPRECATED)')),
2160 ('', 'show-revs', True, _('print list of result revisions (default)')),
2160 ('', 'show-revs', True, _('print list of result revisions (default)')),
2161 ('s', 'show-set', None, _('print internal representation of result set')),
2161 ('s', 'show-set', None, _('print internal representation of result set')),
2162 ('p', 'show-stage', [],
2162 ('p', 'show-stage', [],
2163 _('print parsed tree at the given stage'), _('NAME')),
2163 _('print parsed tree at the given stage'), _('NAME')),
2164 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2164 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2165 ('', 'verify-optimized', False, _('verify optimized result')),
2165 ('', 'verify-optimized', False, _('verify optimized result')),
2166 ],
2166 ],
2167 ('REVSPEC'))
2167 ('REVSPEC'))
2168 def debugrevspec(ui, repo, expr, **opts):
2168 def debugrevspec(ui, repo, expr, **opts):
2169 """parse and apply a revision specification
2169 """parse and apply a revision specification
2170
2170
2171 Use -p/--show-stage option to print the parsed tree at the given stages.
2171 Use -p/--show-stage option to print the parsed tree at the given stages.
2172 Use -p all to print tree at every stage.
2172 Use -p all to print tree at every stage.
2173
2173
2174 Use --no-show-revs option with -s or -p to print only the set
2174 Use --no-show-revs option with -s or -p to print only the set
2175 representation or the parsed tree respectively.
2175 representation or the parsed tree respectively.
2176
2176
2177 Use --verify-optimized to compare the optimized result with the unoptimized
2177 Use --verify-optimized to compare the optimized result with the unoptimized
2178 one. Returns 1 if the optimized result differs.
2178 one. Returns 1 if the optimized result differs.
2179 """
2179 """
2180 opts = pycompat.byteskwargs(opts)
2180 opts = pycompat.byteskwargs(opts)
2181 aliases = ui.configitems('revsetalias')
2181 aliases = ui.configitems('revsetalias')
2182 stages = [
2182 stages = [
2183 ('parsed', lambda tree: tree),
2183 ('parsed', lambda tree: tree),
2184 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2184 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2185 ui.warn)),
2185 ui.warn)),
2186 ('concatenated', revsetlang.foldconcat),
2186 ('concatenated', revsetlang.foldconcat),
2187 ('analyzed', revsetlang.analyze),
2187 ('analyzed', revsetlang.analyze),
2188 ('optimized', revsetlang.optimize),
2188 ('optimized', revsetlang.optimize),
2189 ]
2189 ]
2190 if opts['no_optimized']:
2190 if opts['no_optimized']:
2191 stages = stages[:-1]
2191 stages = stages[:-1]
2192 if opts['verify_optimized'] and opts['no_optimized']:
2192 if opts['verify_optimized'] and opts['no_optimized']:
2193 raise error.Abort(_('cannot use --verify-optimized with '
2193 raise error.Abort(_('cannot use --verify-optimized with '
2194 '--no-optimized'))
2194 '--no-optimized'))
2195 stagenames = set(n for n, f in stages)
2195 stagenames = set(n for n, f in stages)
2196
2196
2197 showalways = set()
2197 showalways = set()
2198 showchanged = set()
2198 showchanged = set()
2199 if ui.verbose and not opts['show_stage']:
2199 if ui.verbose and not opts['show_stage']:
2200 # show parsed tree by --verbose (deprecated)
2200 # show parsed tree by --verbose (deprecated)
2201 showalways.add('parsed')
2201 showalways.add('parsed')
2202 showchanged.update(['expanded', 'concatenated'])
2202 showchanged.update(['expanded', 'concatenated'])
2203 if opts['optimize']:
2203 if opts['optimize']:
2204 showalways.add('optimized')
2204 showalways.add('optimized')
2205 if opts['show_stage'] and opts['optimize']:
2205 if opts['show_stage'] and opts['optimize']:
2206 raise error.Abort(_('cannot use --optimize with --show-stage'))
2206 raise error.Abort(_('cannot use --optimize with --show-stage'))
2207 if opts['show_stage'] == ['all']:
2207 if opts['show_stage'] == ['all']:
2208 showalways.update(stagenames)
2208 showalways.update(stagenames)
2209 else:
2209 else:
2210 for n in opts['show_stage']:
2210 for n in opts['show_stage']:
2211 if n not in stagenames:
2211 if n not in stagenames:
2212 raise error.Abort(_('invalid stage name: %s') % n)
2212 raise error.Abort(_('invalid stage name: %s') % n)
2213 showalways.update(opts['show_stage'])
2213 showalways.update(opts['show_stage'])
2214
2214
2215 treebystage = {}
2215 treebystage = {}
2216 printedtree = None
2216 printedtree = None
2217 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2217 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2218 for n, f in stages:
2218 for n, f in stages:
2219 treebystage[n] = tree = f(tree)
2219 treebystage[n] = tree = f(tree)
2220 if n in showalways or (n in showchanged and tree != printedtree):
2220 if n in showalways or (n in showchanged and tree != printedtree):
2221 if opts['show_stage'] or n != 'parsed':
2221 if opts['show_stage'] or n != 'parsed':
2222 ui.write(("* %s:\n") % n)
2222 ui.write(("* %s:\n") % n)
2223 ui.write(revsetlang.prettyformat(tree), "\n")
2223 ui.write(revsetlang.prettyformat(tree), "\n")
2224 printedtree = tree
2224 printedtree = tree
2225
2225
2226 if opts['verify_optimized']:
2226 if opts['verify_optimized']:
2227 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2227 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2228 brevs = revset.makematcher(treebystage['optimized'])(repo)
2228 brevs = revset.makematcher(treebystage['optimized'])(repo)
2229 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2229 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2230 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2230 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2231 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2231 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2232 arevs = list(arevs)
2232 arevs = list(arevs)
2233 brevs = list(brevs)
2233 brevs = list(brevs)
2234 if arevs == brevs:
2234 if arevs == brevs:
2235 return 0
2235 return 0
2236 ui.write(('--- analyzed\n'), label='diff.file_a')
2236 ui.write(('--- analyzed\n'), label='diff.file_a')
2237 ui.write(('+++ optimized\n'), label='diff.file_b')
2237 ui.write(('+++ optimized\n'), label='diff.file_b')
2238 sm = difflib.SequenceMatcher(None, arevs, brevs)
2238 sm = difflib.SequenceMatcher(None, arevs, brevs)
2239 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2239 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2240 if tag in ('delete', 'replace'):
2240 if tag in ('delete', 'replace'):
2241 for c in arevs[alo:ahi]:
2241 for c in arevs[alo:ahi]:
2242 ui.write('-%s\n' % c, label='diff.deleted')
2242 ui.write('-%s\n' % c, label='diff.deleted')
2243 if tag in ('insert', 'replace'):
2243 if tag in ('insert', 'replace'):
2244 for c in brevs[blo:bhi]:
2244 for c in brevs[blo:bhi]:
2245 ui.write('+%s\n' % c, label='diff.inserted')
2245 ui.write('+%s\n' % c, label='diff.inserted')
2246 if tag == 'equal':
2246 if tag == 'equal':
2247 for c in arevs[alo:ahi]:
2247 for c in arevs[alo:ahi]:
2248 ui.write(' %s\n' % c)
2248 ui.write(' %s\n' % c)
2249 return 1
2249 return 1
2250
2250
2251 func = revset.makematcher(tree)
2251 func = revset.makematcher(tree)
2252 revs = func(repo)
2252 revs = func(repo)
2253 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2253 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2254 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2254 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2255 if not opts['show_revs']:
2255 if not opts['show_revs']:
2256 return
2256 return
2257 for c in revs:
2257 for c in revs:
2258 ui.write("%d\n" % c)
2258 ui.write("%d\n" % c)
2259
2259
2260 @command('debugserve', [
2260 @command('debugserve', [
2261 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2261 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2262 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2262 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2263 ('', 'logiofile', '', _('file to log server I/O to')),
2263 ('', 'logiofile', '', _('file to log server I/O to')),
2264 ], '')
2264 ], '')
2265 def debugserve(ui, repo, **opts):
2265 def debugserve(ui, repo, **opts):
2266 """run a server with advanced settings
2266 """run a server with advanced settings
2267
2267
2268 This command is similar to :hg:`serve`. It exists partially as a
2268 This command is similar to :hg:`serve`. It exists partially as a
2269 workaround to the fact that ``hg serve --stdio`` must have specific
2269 workaround to the fact that ``hg serve --stdio`` must have specific
2270 arguments for security reasons.
2270 arguments for security reasons.
2271 """
2271 """
2272 opts = pycompat.byteskwargs(opts)
2272 opts = pycompat.byteskwargs(opts)
2273
2273
2274 if not opts['sshstdio']:
2274 if not opts['sshstdio']:
2275 raise error.Abort(_('only --sshstdio is currently supported'))
2275 raise error.Abort(_('only --sshstdio is currently supported'))
2276
2276
2277 logfh = None
2277 logfh = None
2278
2278
2279 if opts['logiofd'] and opts['logiofile']:
2279 if opts['logiofd'] and opts['logiofile']:
2280 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2280 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2281
2281
2282 if opts['logiofd']:
2282 if opts['logiofd']:
2283 # Line buffered because output is line based.
2283 # Line buffered because output is line based.
2284 try:
2284 try:
2285 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2285 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2286 except OSError as e:
2286 except OSError as e:
2287 if e.errno != errno.ESPIPE:
2287 if e.errno != errno.ESPIPE:
2288 raise
2288 raise
2289 # can't seek a pipe, so `ab` mode fails on py3
2289 # can't seek a pipe, so `ab` mode fails on py3
2290 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2290 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2291 elif opts['logiofile']:
2291 elif opts['logiofile']:
2292 logfh = open(opts['logiofile'], 'ab', 1)
2292 logfh = open(opts['logiofile'], 'ab', 1)
2293
2293
2294 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2294 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2295 s.serve_forever()
2295 s.serve_forever()
2296
2296
2297 @command('debugsetparents', [], _('REV1 [REV2]'))
2297 @command('debugsetparents', [], _('REV1 [REV2]'))
2298 def debugsetparents(ui, repo, rev1, rev2=None):
2298 def debugsetparents(ui, repo, rev1, rev2=None):
2299 """manually set the parents of the current working directory
2299 """manually set the parents of the current working directory
2300
2300
2301 This is useful for writing repository conversion tools, but should
2301 This is useful for writing repository conversion tools, but should
2302 be used with care. For example, neither the working directory nor the
2302 be used with care. For example, neither the working directory nor the
2303 dirstate is updated, so file status may be incorrect after running this
2303 dirstate is updated, so file status may be incorrect after running this
2304 command.
2304 command.
2305
2305
2306 Returns 0 on success.
2306 Returns 0 on success.
2307 """
2307 """
2308
2308
2309 node1 = scmutil.revsingle(repo, rev1).node()
2309 node1 = scmutil.revsingle(repo, rev1).node()
2310 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2310 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2311
2311
2312 with repo.wlock():
2312 with repo.wlock():
2313 repo.setparents(node1, node2)
2313 repo.setparents(node1, node2)
2314
2314
2315 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2315 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2316 def debugssl(ui, repo, source=None, **opts):
2316 def debugssl(ui, repo, source=None, **opts):
2317 '''test a secure connection to a server
2317 '''test a secure connection to a server
2318
2318
2319 This builds the certificate chain for the server on Windows, installing the
2319 This builds the certificate chain for the server on Windows, installing the
2320 missing intermediates and trusted root via Windows Update if necessary. It
2320 missing intermediates and trusted root via Windows Update if necessary. It
2321 does nothing on other platforms.
2321 does nothing on other platforms.
2322
2322
2323 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2323 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2324 that server is used. See :hg:`help urls` for more information.
2324 that server is used. See :hg:`help urls` for more information.
2325
2325
2326 If the update succeeds, retry the original operation. Otherwise, the cause
2326 If the update succeeds, retry the original operation. Otherwise, the cause
2327 of the SSL error is likely another issue.
2327 of the SSL error is likely another issue.
2328 '''
2328 '''
2329 if not pycompat.iswindows:
2329 if not pycompat.iswindows:
2330 raise error.Abort(_('certificate chain building is only possible on '
2330 raise error.Abort(_('certificate chain building is only possible on '
2331 'Windows'))
2331 'Windows'))
2332
2332
2333 if not source:
2333 if not source:
2334 if not repo:
2334 if not repo:
2335 raise error.Abort(_("there is no Mercurial repository here, and no "
2335 raise error.Abort(_("there is no Mercurial repository here, and no "
2336 "server specified"))
2336 "server specified"))
2337 source = "default"
2337 source = "default"
2338
2338
2339 source, branches = hg.parseurl(ui.expandpath(source))
2339 source, branches = hg.parseurl(ui.expandpath(source))
2340 url = util.url(source)
2340 url = util.url(source)
2341 addr = None
2341 addr = None
2342
2342
2343 defaultport = {'https': 443, 'ssh': 22}
2343 defaultport = {'https': 443, 'ssh': 22}
2344 if url.scheme in defaultport:
2344 if url.scheme in defaultport:
2345 try:
2345 try:
2346 addr = (url.host, int(url.port or defaultport[url.scheme]))
2346 addr = (url.host, int(url.port or defaultport[url.scheme]))
2347 except ValueError:
2347 except ValueError:
2348 raise error.Abort(_("malformed port number in URL"))
2348 raise error.Abort(_("malformed port number in URL"))
2349 else:
2349 else:
2350 raise error.Abort(_("only https and ssh connections are supported"))
2350 raise error.Abort(_("only https and ssh connections are supported"))
2351
2351
2352 from . import win32
2352 from . import win32
2353
2353
2354 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2354 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2355 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2355 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2356
2356
2357 try:
2357 try:
2358 s.connect(addr)
2358 s.connect(addr)
2359 cert = s.getpeercert(True)
2359 cert = s.getpeercert(True)
2360
2360
2361 ui.status(_('checking the certificate chain for %s\n') % url.host)
2361 ui.status(_('checking the certificate chain for %s\n') % url.host)
2362
2362
2363 complete = win32.checkcertificatechain(cert, build=False)
2363 complete = win32.checkcertificatechain(cert, build=False)
2364
2364
2365 if not complete:
2365 if not complete:
2366 ui.status(_('certificate chain is incomplete, updating... '))
2366 ui.status(_('certificate chain is incomplete, updating... '))
2367
2367
2368 if not win32.checkcertificatechain(cert):
2368 if not win32.checkcertificatechain(cert):
2369 ui.status(_('failed.\n'))
2369 ui.status(_('failed.\n'))
2370 else:
2370 else:
2371 ui.status(_('done.\n'))
2371 ui.status(_('done.\n'))
2372 else:
2372 else:
2373 ui.status(_('full certificate chain is available\n'))
2373 ui.status(_('full certificate chain is available\n'))
2374 finally:
2374 finally:
2375 s.close()
2375 s.close()
2376
2376
2377 @command('debugsub',
2377 @command('debugsub',
2378 [('r', 'rev', '',
2378 [('r', 'rev', '',
2379 _('revision to check'), _('REV'))],
2379 _('revision to check'), _('REV'))],
2380 _('[-r REV] [REV]'))
2380 _('[-r REV] [REV]'))
2381 def debugsub(ui, repo, rev=None):
2381 def debugsub(ui, repo, rev=None):
2382 ctx = scmutil.revsingle(repo, rev, None)
2382 ctx = scmutil.revsingle(repo, rev, None)
2383 for k, v in sorted(ctx.substate.items()):
2383 for k, v in sorted(ctx.substate.items()):
2384 ui.write(('path %s\n') % k)
2384 ui.write(('path %s\n') % k)
2385 ui.write((' source %s\n') % v[0])
2385 ui.write((' source %s\n') % v[0])
2386 ui.write((' revision %s\n') % v[1])
2386 ui.write((' revision %s\n') % v[1])
2387
2387
2388 @command('debugsuccessorssets',
2388 @command('debugsuccessorssets',
2389 [('', 'closest', False, _('return closest successors sets only'))],
2389 [('', 'closest', False, _('return closest successors sets only'))],
2390 _('[REV]'))
2390 _('[REV]'))
2391 def debugsuccessorssets(ui, repo, *revs, **opts):
2391 def debugsuccessorssets(ui, repo, *revs, **opts):
2392 """show set of successors for revision
2392 """show set of successors for revision
2393
2393
2394 A successors set of changeset A is a consistent group of revisions that
2394 A successors set of changeset A is a consistent group of revisions that
2395 succeed A. It contains non-obsolete changesets only unless closests
2395 succeed A. It contains non-obsolete changesets only unless closests
2396 successors set is set.
2396 successors set is set.
2397
2397
2398 In most cases a changeset A has a single successors set containing a single
2398 In most cases a changeset A has a single successors set containing a single
2399 successor (changeset A replaced by A').
2399 successor (changeset A replaced by A').
2400
2400
2401 A changeset that is made obsolete with no successors are called "pruned".
2401 A changeset that is made obsolete with no successors are called "pruned".
2402 Such changesets have no successors sets at all.
2402 Such changesets have no successors sets at all.
2403
2403
2404 A changeset that has been "split" will have a successors set containing
2404 A changeset that has been "split" will have a successors set containing
2405 more than one successor.
2405 more than one successor.
2406
2406
2407 A changeset that has been rewritten in multiple different ways is called
2407 A changeset that has been rewritten in multiple different ways is called
2408 "divergent". Such changesets have multiple successor sets (each of which
2408 "divergent". Such changesets have multiple successor sets (each of which
2409 may also be split, i.e. have multiple successors).
2409 may also be split, i.e. have multiple successors).
2410
2410
2411 Results are displayed as follows::
2411 Results are displayed as follows::
2412
2412
2413 <rev1>
2413 <rev1>
2414 <successors-1A>
2414 <successors-1A>
2415 <rev2>
2415 <rev2>
2416 <successors-2A>
2416 <successors-2A>
2417 <successors-2B1> <successors-2B2> <successors-2B3>
2417 <successors-2B1> <successors-2B2> <successors-2B3>
2418
2418
2419 Here rev2 has two possible (i.e. divergent) successors sets. The first
2419 Here rev2 has two possible (i.e. divergent) successors sets. The first
2420 holds one element, whereas the second holds three (i.e. the changeset has
2420 holds one element, whereas the second holds three (i.e. the changeset has
2421 been split).
2421 been split).
2422 """
2422 """
2423 # passed to successorssets caching computation from one call to another
2423 # passed to successorssets caching computation from one call to another
2424 cache = {}
2424 cache = {}
2425 ctx2str = bytes
2425 ctx2str = bytes
2426 node2str = short
2426 node2str = short
2427 for rev in scmutil.revrange(repo, revs):
2427 for rev in scmutil.revrange(repo, revs):
2428 ctx = repo[rev]
2428 ctx = repo[rev]
2429 ui.write('%s\n'% ctx2str(ctx))
2429 ui.write('%s\n'% ctx2str(ctx))
2430 for succsset in obsutil.successorssets(repo, ctx.node(),
2430 for succsset in obsutil.successorssets(repo, ctx.node(),
2431 closest=opts[r'closest'],
2431 closest=opts[r'closest'],
2432 cache=cache):
2432 cache=cache):
2433 if succsset:
2433 if succsset:
2434 ui.write(' ')
2434 ui.write(' ')
2435 ui.write(node2str(succsset[0]))
2435 ui.write(node2str(succsset[0]))
2436 for node in succsset[1:]:
2436 for node in succsset[1:]:
2437 ui.write(' ')
2437 ui.write(' ')
2438 ui.write(node2str(node))
2438 ui.write(node2str(node))
2439 ui.write('\n')
2439 ui.write('\n')
2440
2440
2441 @command('debugtemplate',
2441 @command('debugtemplate',
2442 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2442 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2443 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2443 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2444 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2444 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2445 optionalrepo=True)
2445 optionalrepo=True)
2446 def debugtemplate(ui, repo, tmpl, **opts):
2446 def debugtemplate(ui, repo, tmpl, **opts):
2447 """parse and apply a template
2447 """parse and apply a template
2448
2448
2449 If -r/--rev is given, the template is processed as a log template and
2449 If -r/--rev is given, the template is processed as a log template and
2450 applied to the given changesets. Otherwise, it is processed as a generic
2450 applied to the given changesets. Otherwise, it is processed as a generic
2451 template.
2451 template.
2452
2452
2453 Use --verbose to print the parsed tree.
2453 Use --verbose to print the parsed tree.
2454 """
2454 """
2455 revs = None
2455 revs = None
2456 if opts[r'rev']:
2456 if opts[r'rev']:
2457 if repo is None:
2457 if repo is None:
2458 raise error.RepoError(_('there is no Mercurial repository here '
2458 raise error.RepoError(_('there is no Mercurial repository here '
2459 '(.hg not found)'))
2459 '(.hg not found)'))
2460 revs = scmutil.revrange(repo, opts[r'rev'])
2460 revs = scmutil.revrange(repo, opts[r'rev'])
2461
2461
2462 props = {}
2462 props = {}
2463 for d in opts[r'define']:
2463 for d in opts[r'define']:
2464 try:
2464 try:
2465 k, v = (e.strip() for e in d.split('=', 1))
2465 k, v = (e.strip() for e in d.split('=', 1))
2466 if not k or k == 'ui':
2466 if not k or k == 'ui':
2467 raise ValueError
2467 raise ValueError
2468 props[k] = v
2468 props[k] = v
2469 except ValueError:
2469 except ValueError:
2470 raise error.Abort(_('malformed keyword definition: %s') % d)
2470 raise error.Abort(_('malformed keyword definition: %s') % d)
2471
2471
2472 if ui.verbose:
2472 if ui.verbose:
2473 aliases = ui.configitems('templatealias')
2473 aliases = ui.configitems('templatealias')
2474 tree = templater.parse(tmpl)
2474 tree = templater.parse(tmpl)
2475 ui.note(templater.prettyformat(tree), '\n')
2475 ui.note(templater.prettyformat(tree), '\n')
2476 newtree = templater.expandaliases(tree, aliases)
2476 newtree = templater.expandaliases(tree, aliases)
2477 if newtree != tree:
2477 if newtree != tree:
2478 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2478 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2479
2479
2480 if revs is None:
2480 if revs is None:
2481 tres = formatter.templateresources(ui, repo)
2481 tres = formatter.templateresources(ui, repo)
2482 t = formatter.maketemplater(ui, tmpl, resources=tres)
2482 t = formatter.maketemplater(ui, tmpl, resources=tres)
2483 if ui.verbose:
2483 if ui.verbose:
2484 kwds, funcs = t.symbolsuseddefault()
2484 kwds, funcs = t.symbolsuseddefault()
2485 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2485 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2486 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2486 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2487 ui.write(t.renderdefault(props))
2487 ui.write(t.renderdefault(props))
2488 else:
2488 else:
2489 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2489 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2490 if ui.verbose:
2490 if ui.verbose:
2491 kwds, funcs = displayer.t.symbolsuseddefault()
2491 kwds, funcs = displayer.t.symbolsuseddefault()
2492 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2492 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2493 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2493 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2494 for r in revs:
2494 for r in revs:
2495 displayer.show(repo[r], **pycompat.strkwargs(props))
2495 displayer.show(repo[r], **pycompat.strkwargs(props))
2496 displayer.close()
2496 displayer.close()
2497
2497
2498 @command('debuguigetpass', [
2498 @command('debuguigetpass', [
2499 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2499 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2500 ], _('[-p TEXT]'), norepo=True)
2500 ], _('[-p TEXT]'), norepo=True)
2501 def debuguigetpass(ui, prompt=''):
2501 def debuguigetpass(ui, prompt=''):
2502 """show prompt to type password"""
2502 """show prompt to type password"""
2503 r = ui.getpass(prompt)
2503 r = ui.getpass(prompt)
2504 ui.write(('respose: %s\n') % r)
2504 ui.write(('respose: %s\n') % r)
2505
2505
2506 @command('debuguiprompt', [
2506 @command('debuguiprompt', [
2507 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2507 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2508 ], _('[-p TEXT]'), norepo=True)
2508 ], _('[-p TEXT]'), norepo=True)
2509 def debuguiprompt(ui, prompt=''):
2509 def debuguiprompt(ui, prompt=''):
2510 """show plain prompt"""
2510 """show plain prompt"""
2511 r = ui.prompt(prompt)
2511 r = ui.prompt(prompt)
2512 ui.write(('response: %s\n') % r)
2512 ui.write(('response: %s\n') % r)
2513
2513
2514 @command('debugupdatecaches', [])
2514 @command('debugupdatecaches', [])
2515 def debugupdatecaches(ui, repo, *pats, **opts):
2515 def debugupdatecaches(ui, repo, *pats, **opts):
2516 """warm all known caches in the repository"""
2516 """warm all known caches in the repository"""
2517 with repo.wlock(), repo.lock():
2517 with repo.wlock(), repo.lock():
2518 repo.updatecaches(full=True)
2518 repo.updatecaches(full=True)
2519
2519
2520 @command('debugupgraderepo', [
2520 @command('debugupgraderepo', [
2521 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2521 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2522 ('', 'run', False, _('performs an upgrade')),
2522 ('', 'run', False, _('performs an upgrade')),
2523 ])
2523 ])
2524 def debugupgraderepo(ui, repo, run=False, optimize=None):
2524 def debugupgraderepo(ui, repo, run=False, optimize=None):
2525 """upgrade a repository to use different features
2525 """upgrade a repository to use different features
2526
2526
2527 If no arguments are specified, the repository is evaluated for upgrade
2527 If no arguments are specified, the repository is evaluated for upgrade
2528 and a list of problems and potential optimizations is printed.
2528 and a list of problems and potential optimizations is printed.
2529
2529
2530 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2530 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2531 can be influenced via additional arguments. More details will be provided
2531 can be influenced via additional arguments. More details will be provided
2532 by the command output when run without ``--run``.
2532 by the command output when run without ``--run``.
2533
2533
2534 During the upgrade, the repository will be locked and no writes will be
2534 During the upgrade, the repository will be locked and no writes will be
2535 allowed.
2535 allowed.
2536
2536
2537 At the end of the upgrade, the repository may not be readable while new
2537 At the end of the upgrade, the repository may not be readable while new
2538 repository data is swapped in. This window will be as long as it takes to
2538 repository data is swapped in. This window will be as long as it takes to
2539 rename some directories inside the ``.hg`` directory. On most machines, this
2539 rename some directories inside the ``.hg`` directory. On most machines, this
2540 should complete almost instantaneously and the chances of a consumer being
2540 should complete almost instantaneously and the chances of a consumer being
2541 unable to access the repository should be low.
2541 unable to access the repository should be low.
2542 """
2542 """
2543 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2543 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2544
2544
2545 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2545 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2546 inferrepo=True)
2546 inferrepo=True)
2547 def debugwalk(ui, repo, *pats, **opts):
2547 def debugwalk(ui, repo, *pats, **opts):
2548 """show how files match on given patterns"""
2548 """show how files match on given patterns"""
2549 opts = pycompat.byteskwargs(opts)
2549 opts = pycompat.byteskwargs(opts)
2550 m = scmutil.match(repo[None], pats, opts)
2550 m = scmutil.match(repo[None], pats, opts)
2551 if ui.verbose:
2551 if ui.verbose:
2552 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2552 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2553 items = list(repo[None].walk(m))
2553 items = list(repo[None].walk(m))
2554 if not items:
2554 if not items:
2555 return
2555 return
2556 f = lambda fn: fn
2556 f = lambda fn: fn
2557 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2557 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2558 f = lambda fn: util.normpath(fn)
2558 f = lambda fn: util.normpath(fn)
2559 fmt = 'f %%-%ds %%-%ds %%s' % (
2559 fmt = 'f %%-%ds %%-%ds %%s' % (
2560 max([len(abs) for abs in items]),
2560 max([len(abs) for abs in items]),
2561 max([len(m.rel(abs)) for abs in items]))
2561 max([len(m.rel(abs)) for abs in items]))
2562 for abs in items:
2562 for abs in items:
2563 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2563 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2564 ui.write("%s\n" % line.rstrip())
2564 ui.write("%s\n" % line.rstrip())
2565
2565
2566 @command('debugwhyunstable', [], _('REV'))
2566 @command('debugwhyunstable', [], _('REV'))
2567 def debugwhyunstable(ui, repo, rev):
2567 def debugwhyunstable(ui, repo, rev):
2568 """explain instabilities of a changeset"""
2568 """explain instabilities of a changeset"""
2569 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2569 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2570 dnodes = ''
2570 dnodes = ''
2571 if entry.get('divergentnodes'):
2571 if entry.get('divergentnodes'):
2572 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2572 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2573 for ctx in entry['divergentnodes']) + ' '
2573 for ctx in entry['divergentnodes']) + ' '
2574 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2574 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2575 entry['reason'], entry['node']))
2575 entry['reason'], entry['node']))
2576
2576
2577 @command('debugwireargs',
2577 @command('debugwireargs',
2578 [('', 'three', '', 'three'),
2578 [('', 'three', '', 'three'),
2579 ('', 'four', '', 'four'),
2579 ('', 'four', '', 'four'),
2580 ('', 'five', '', 'five'),
2580 ('', 'five', '', 'five'),
2581 ] + cmdutil.remoteopts,
2581 ] + cmdutil.remoteopts,
2582 _('REPO [OPTIONS]... [ONE [TWO]]'),
2582 _('REPO [OPTIONS]... [ONE [TWO]]'),
2583 norepo=True)
2583 norepo=True)
2584 def debugwireargs(ui, repopath, *vals, **opts):
2584 def debugwireargs(ui, repopath, *vals, **opts):
2585 opts = pycompat.byteskwargs(opts)
2585 opts = pycompat.byteskwargs(opts)
2586 repo = hg.peer(ui, opts, repopath)
2586 repo = hg.peer(ui, opts, repopath)
2587 for opt in cmdutil.remoteopts:
2587 for opt in cmdutil.remoteopts:
2588 del opts[opt[1]]
2588 del opts[opt[1]]
2589 args = {}
2589 args = {}
2590 for k, v in opts.iteritems():
2590 for k, v in opts.iteritems():
2591 if v:
2591 if v:
2592 args[k] = v
2592 args[k] = v
2593 args = pycompat.strkwargs(args)
2593 args = pycompat.strkwargs(args)
2594 # run twice to check that we don't mess up the stream for the next command
2594 # run twice to check that we don't mess up the stream for the next command
2595 res1 = repo.debugwireargs(*vals, **args)
2595 res1 = repo.debugwireargs(*vals, **args)
2596 res2 = repo.debugwireargs(*vals, **args)
2596 res2 = repo.debugwireargs(*vals, **args)
2597 ui.write("%s\n" % res1)
2597 ui.write("%s\n" % res1)
2598 if res1 != res2:
2598 if res1 != res2:
2599 ui.warn("%s\n" % res2)
2599 ui.warn("%s\n" % res2)
2600
2600
2601 def _parsewirelangblocks(fh):
2601 def _parsewirelangblocks(fh):
2602 activeaction = None
2602 activeaction = None
2603 blocklines = []
2603 blocklines = []
2604
2604
2605 for line in fh:
2605 for line in fh:
2606 line = line.rstrip()
2606 line = line.rstrip()
2607 if not line:
2607 if not line:
2608 continue
2608 continue
2609
2609
2610 if line.startswith(b'#'):
2610 if line.startswith(b'#'):
2611 continue
2611 continue
2612
2612
2613 if not line.startswith(' '):
2613 if not line.startswith(' '):
2614 # New block. Flush previous one.
2614 # New block. Flush previous one.
2615 if activeaction:
2615 if activeaction:
2616 yield activeaction, blocklines
2616 yield activeaction, blocklines
2617
2617
2618 activeaction = line
2618 activeaction = line
2619 blocklines = []
2619 blocklines = []
2620 continue
2620 continue
2621
2621
2622 # Else we start with an indent.
2622 # Else we start with an indent.
2623
2623
2624 if not activeaction:
2624 if not activeaction:
2625 raise error.Abort(_('indented line outside of block'))
2625 raise error.Abort(_('indented line outside of block'))
2626
2626
2627 blocklines.append(line)
2627 blocklines.append(line)
2628
2628
2629 # Flush last block.
2629 # Flush last block.
2630 if activeaction:
2630 if activeaction:
2631 yield activeaction, blocklines
2631 yield activeaction, blocklines
2632
2632
2633 @command('debugwireproto',
2633 @command('debugwireproto',
2634 [
2634 [
2635 ('', 'localssh', False, _('start an SSH server for this repo')),
2635 ('', 'localssh', False, _('start an SSH server for this repo')),
2636 ('', 'peer', '', _('construct a specific version of the peer')),
2636 ('', 'peer', '', _('construct a specific version of the peer')),
2637 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2637 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2638 ('', 'nologhandshake', False,
2638 ('', 'nologhandshake', False,
2639 _('do not log I/O related to the peer handshake')),
2639 _('do not log I/O related to the peer handshake')),
2640 ] + cmdutil.remoteopts,
2640 ] + cmdutil.remoteopts,
2641 _('[PATH]'),
2641 _('[PATH]'),
2642 optionalrepo=True)
2642 optionalrepo=True)
2643 def debugwireproto(ui, repo, path=None, **opts):
2643 def debugwireproto(ui, repo, path=None, **opts):
2644 """send wire protocol commands to a server
2644 """send wire protocol commands to a server
2645
2645
2646 This command can be used to issue wire protocol commands to remote
2646 This command can be used to issue wire protocol commands to remote
2647 peers and to debug the raw data being exchanged.
2647 peers and to debug the raw data being exchanged.
2648
2648
2649 ``--localssh`` will start an SSH server against the current repository
2649 ``--localssh`` will start an SSH server against the current repository
2650 and connect to that. By default, the connection will perform a handshake
2650 and connect to that. By default, the connection will perform a handshake
2651 and establish an appropriate peer instance.
2651 and establish an appropriate peer instance.
2652
2652
2653 ``--peer`` can be used to bypass the handshake protocol and construct a
2653 ``--peer`` can be used to bypass the handshake protocol and construct a
2654 peer instance using the specified class type. Valid values are ``raw``,
2654 peer instance using the specified class type. Valid values are ``raw``,
2655 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2655 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2656 raw data payloads and don't support higher-level command actions.
2656 raw data payloads and don't support higher-level command actions.
2657
2657
2658 ``--noreadstderr`` can be used to disable automatic reading from stderr
2658 ``--noreadstderr`` can be used to disable automatic reading from stderr
2659 of the peer (for SSH connections only). Disabling automatic reading of
2659 of the peer (for SSH connections only). Disabling automatic reading of
2660 stderr is useful for making output more deterministic.
2660 stderr is useful for making output more deterministic.
2661
2661
2662 Commands are issued via a mini language which is specified via stdin.
2662 Commands are issued via a mini language which is specified via stdin.
2663 The language consists of individual actions to perform. An action is
2663 The language consists of individual actions to perform. An action is
2664 defined by a block. A block is defined as a line with no leading
2664 defined by a block. A block is defined as a line with no leading
2665 space followed by 0 or more lines with leading space. Blocks are
2665 space followed by 0 or more lines with leading space. Blocks are
2666 effectively a high-level command with additional metadata.
2666 effectively a high-level command with additional metadata.
2667
2667
2668 Lines beginning with ``#`` are ignored.
2668 Lines beginning with ``#`` are ignored.
2669
2669
2670 The following sections denote available actions.
2670 The following sections denote available actions.
2671
2671
2672 raw
2672 raw
2673 ---
2673 ---
2674
2674
2675 Send raw data to the server.
2675 Send raw data to the server.
2676
2676
2677 The block payload contains the raw data to send as one atomic send
2677 The block payload contains the raw data to send as one atomic send
2678 operation. The data may not actually be delivered in a single system
2678 operation. The data may not actually be delivered in a single system
2679 call: it depends on the abilities of the transport being used.
2679 call: it depends on the abilities of the transport being used.
2680
2680
2681 Each line in the block is de-indented and concatenated. Then, that
2681 Each line in the block is de-indented and concatenated. Then, that
2682 value is evaluated as a Python b'' literal. This allows the use of
2682 value is evaluated as a Python b'' literal. This allows the use of
2683 backslash escaping, etc.
2683 backslash escaping, etc.
2684
2684
2685 raw+
2685 raw+
2686 ----
2686 ----
2687
2687
2688 Behaves like ``raw`` except flushes output afterwards.
2688 Behaves like ``raw`` except flushes output afterwards.
2689
2689
2690 command <X>
2690 command <X>
2691 -----------
2691 -----------
2692
2692
2693 Send a request to run a named command, whose name follows the ``command``
2693 Send a request to run a named command, whose name follows the ``command``
2694 string.
2694 string.
2695
2695
2696 Arguments to the command are defined as lines in this block. The format of
2696 Arguments to the command are defined as lines in this block. The format of
2697 each line is ``<key> <value>``. e.g.::
2697 each line is ``<key> <value>``. e.g.::
2698
2698
2699 command listkeys
2699 command listkeys
2700 namespace bookmarks
2700 namespace bookmarks
2701
2701
2702 If the value begins with ``eval:``, it will be interpreted as a Python
2702 If the value begins with ``eval:``, it will be interpreted as a Python
2703 literal expression. Otherwise values are interpreted as Python b'' literals.
2703 literal expression. Otherwise values are interpreted as Python b'' literals.
2704 This allows sending complex types and encoding special byte sequences via
2704 This allows sending complex types and encoding special byte sequences via
2705 backslash escaping.
2705 backslash escaping.
2706
2706
2707 The following arguments have special meaning:
2707 The following arguments have special meaning:
2708
2708
2709 ``PUSHFILE``
2709 ``PUSHFILE``
2710 When defined, the *push* mechanism of the peer will be used instead
2710 When defined, the *push* mechanism of the peer will be used instead
2711 of the static request-response mechanism and the content of the
2711 of the static request-response mechanism and the content of the
2712 file specified in the value of this argument will be sent as the
2712 file specified in the value of this argument will be sent as the
2713 command payload.
2713 command payload.
2714
2714
2715 This can be used to submit a local bundle file to the remote.
2715 This can be used to submit a local bundle file to the remote.
2716
2716
2717 batchbegin
2717 batchbegin
2718 ----------
2718 ----------
2719
2719
2720 Instruct the peer to begin a batched send.
2720 Instruct the peer to begin a batched send.
2721
2721
2722 All ``command`` blocks are queued for execution until the next
2722 All ``command`` blocks are queued for execution until the next
2723 ``batchsubmit`` block.
2723 ``batchsubmit`` block.
2724
2724
2725 batchsubmit
2725 batchsubmit
2726 -----------
2726 -----------
2727
2727
2728 Submit previously queued ``command`` blocks as a batch request.
2728 Submit previously queued ``command`` blocks as a batch request.
2729
2729
2730 This action MUST be paired with a ``batchbegin`` action.
2730 This action MUST be paired with a ``batchbegin`` action.
2731
2731
2732 httprequest <method> <path>
2732 httprequest <method> <path>
2733 ---------------------------
2733 ---------------------------
2734
2734
2735 (HTTP peer only)
2735 (HTTP peer only)
2736
2736
2737 Send an HTTP request to the peer.
2737 Send an HTTP request to the peer.
2738
2738
2739 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2739 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2740
2740
2741 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2741 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2742 headers to add to the request. e.g. ``Accept: foo``.
2742 headers to add to the request. e.g. ``Accept: foo``.
2743
2743
2744 The following arguments are special:
2744 The following arguments are special:
2745
2745
2746 ``BODYFILE``
2746 ``BODYFILE``
2747 The content of the file defined as the value to this argument will be
2747 The content of the file defined as the value to this argument will be
2748 transferred verbatim as the HTTP request body.
2748 transferred verbatim as the HTTP request body.
2749
2749
2750 ``frame <type> <flags> <payload>``
2750 ``frame <type> <flags> <payload>``
2751 Send a unified protocol frame as part of the request body.
2751 Send a unified protocol frame as part of the request body.
2752
2752
2753 All frames will be collected and sent as the body to the HTTP
2753 All frames will be collected and sent as the body to the HTTP
2754 request.
2754 request.
2755
2755
2756 close
2756 close
2757 -----
2757 -----
2758
2758
2759 Close the connection to the server.
2759 Close the connection to the server.
2760
2760
2761 flush
2761 flush
2762 -----
2762 -----
2763
2763
2764 Flush data written to the server.
2764 Flush data written to the server.
2765
2765
2766 readavailable
2766 readavailable
2767 -------------
2767 -------------
2768
2768
2769 Close the write end of the connection and read all available data from
2769 Close the write end of the connection and read all available data from
2770 the server.
2770 the server.
2771
2771
2772 If the connection to the server encompasses multiple pipes, we poll both
2772 If the connection to the server encompasses multiple pipes, we poll both
2773 pipes and read available data.
2773 pipes and read available data.
2774
2774
2775 readline
2775 readline
2776 --------
2776 --------
2777
2777
2778 Read a line of output from the server. If there are multiple output
2778 Read a line of output from the server. If there are multiple output
2779 pipes, reads only the main pipe.
2779 pipes, reads only the main pipe.
2780
2780
2781 ereadline
2781 ereadline
2782 ---------
2782 ---------
2783
2783
2784 Like ``readline``, but read from the stderr pipe, if available.
2784 Like ``readline``, but read from the stderr pipe, if available.
2785
2785
2786 read <X>
2786 read <X>
2787 --------
2787 --------
2788
2788
2789 ``read()`` N bytes from the server's main output pipe.
2789 ``read()`` N bytes from the server's main output pipe.
2790
2790
2791 eread <X>
2791 eread <X>
2792 ---------
2792 ---------
2793
2793
2794 ``read()`` N bytes from the server's stderr pipe, if available.
2794 ``read()`` N bytes from the server's stderr pipe, if available.
2795
2795
2796 Specifying Unified Frame-Based Protocol Frames
2796 Specifying Unified Frame-Based Protocol Frames
2797 ----------------------------------------------
2797 ----------------------------------------------
2798
2798
2799 It is possible to emit a *Unified Frame-Based Protocol* by using special
2799 It is possible to emit a *Unified Frame-Based Protocol* by using special
2800 syntax.
2800 syntax.
2801
2801
2802 A frame is composed as a type, flags, and payload. These can be parsed
2802 A frame is composed as a type, flags, and payload. These can be parsed
2803 from a string of the form:
2803 from a string of the form:
2804
2804
2805 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2805 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2806
2806
2807 ``request-id`` and ``stream-id`` are integers defining the request and
2807 ``request-id`` and ``stream-id`` are integers defining the request and
2808 stream identifiers.
2808 stream identifiers.
2809
2809
2810 ``type`` can be an integer value for the frame type or the string name
2810 ``type`` can be an integer value for the frame type or the string name
2811 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2811 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2812 ``command-name``.
2812 ``command-name``.
2813
2813
2814 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2814 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2815 components. Each component (and there can be just one) can be an integer
2815 components. Each component (and there can be just one) can be an integer
2816 or a flag name for stream flags or frame flags, respectively. Values are
2816 or a flag name for stream flags or frame flags, respectively. Values are
2817 resolved to integers and then bitwise OR'd together.
2817 resolved to integers and then bitwise OR'd together.
2818
2818
2819 ``payload`` represents the raw frame payload. If it begins with
2819 ``payload`` represents the raw frame payload. If it begins with
2820 ``cbor:``, the following string is evaluated as Python code and the
2820 ``cbor:``, the following string is evaluated as Python code and the
2821 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
2821 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
2822 as a Python byte string literal.
2822 as a Python byte string literal.
2823 """
2823 """
2824 opts = pycompat.byteskwargs(opts)
2824 opts = pycompat.byteskwargs(opts)
2825
2825
2826 if opts['localssh'] and not repo:
2826 if opts['localssh'] and not repo:
2827 raise error.Abort(_('--localssh requires a repository'))
2827 raise error.Abort(_('--localssh requires a repository'))
2828
2828
2829 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
2829 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
2830 raise error.Abort(_('invalid value for --peer'),
2830 raise error.Abort(_('invalid value for --peer'),
2831 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2831 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2832
2832
2833 if path and opts['localssh']:
2833 if path and opts['localssh']:
2834 raise error.Abort(_('cannot specify --localssh with an explicit '
2834 raise error.Abort(_('cannot specify --localssh with an explicit '
2835 'path'))
2835 'path'))
2836
2836
2837 if ui.interactive():
2837 if ui.interactive():
2838 ui.write(_('(waiting for commands on stdin)\n'))
2838 ui.write(_('(waiting for commands on stdin)\n'))
2839
2839
2840 blocks = list(_parsewirelangblocks(ui.fin))
2840 blocks = list(_parsewirelangblocks(ui.fin))
2841
2841
2842 proc = None
2842 proc = None
2843 stdin = None
2843 stdin = None
2844 stdout = None
2844 stdout = None
2845 stderr = None
2845 stderr = None
2846 opener = None
2846 opener = None
2847
2847
2848 if opts['localssh']:
2848 if opts['localssh']:
2849 # We start the SSH server in its own process so there is process
2849 # We start the SSH server in its own process so there is process
2850 # separation. This prevents a whole class of potential bugs around
2850 # separation. This prevents a whole class of potential bugs around
2851 # shared state from interfering with server operation.
2851 # shared state from interfering with server operation.
2852 args = procutil.hgcmd() + [
2852 args = procutil.hgcmd() + [
2853 '-R', repo.root,
2853 '-R', repo.root,
2854 'debugserve', '--sshstdio',
2854 'debugserve', '--sshstdio',
2855 ]
2855 ]
2856 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2856 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2857 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2857 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2858 bufsize=0)
2858 bufsize=0)
2859
2859
2860 stdin = proc.stdin
2860 stdin = proc.stdin
2861 stdout = proc.stdout
2861 stdout = proc.stdout
2862 stderr = proc.stderr
2862 stderr = proc.stderr
2863
2863
2864 # We turn the pipes into observers so we can log I/O.
2864 # We turn the pipes into observers so we can log I/O.
2865 if ui.verbose or opts['peer'] == 'raw':
2865 if ui.verbose or opts['peer'] == 'raw':
2866 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2866 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2867 logdata=True)
2867 logdata=True)
2868 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2868 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2869 logdata=True)
2869 logdata=True)
2870 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2870 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2871 logdata=True)
2871 logdata=True)
2872
2872
2873 # --localssh also implies the peer connection settings.
2873 # --localssh also implies the peer connection settings.
2874
2874
2875 url = 'ssh://localserver'
2875 url = 'ssh://localserver'
2876 autoreadstderr = not opts['noreadstderr']
2876 autoreadstderr = not opts['noreadstderr']
2877
2877
2878 if opts['peer'] == 'ssh1':
2878 if opts['peer'] == 'ssh1':
2879 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2879 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2880 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2880 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2881 None, autoreadstderr=autoreadstderr)
2881 None, autoreadstderr=autoreadstderr)
2882 elif opts['peer'] == 'ssh2':
2882 elif opts['peer'] == 'ssh2':
2883 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2883 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2884 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2884 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2885 None, autoreadstderr=autoreadstderr)
2885 None, autoreadstderr=autoreadstderr)
2886 elif opts['peer'] == 'raw':
2886 elif opts['peer'] == 'raw':
2887 ui.write(_('using raw connection to peer\n'))
2887 ui.write(_('using raw connection to peer\n'))
2888 peer = None
2888 peer = None
2889 else:
2889 else:
2890 ui.write(_('creating ssh peer from handshake results\n'))
2890 ui.write(_('creating ssh peer from handshake results\n'))
2891 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2891 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2892 autoreadstderr=autoreadstderr)
2892 autoreadstderr=autoreadstderr)
2893
2893
2894 elif path:
2894 elif path:
2895 # We bypass hg.peer() so we can proxy the sockets.
2895 # We bypass hg.peer() so we can proxy the sockets.
2896 # TODO consider not doing this because we skip
2896 # TODO consider not doing this because we skip
2897 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
2897 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
2898 u = util.url(path)
2898 u = util.url(path)
2899 if u.scheme != 'http':
2899 if u.scheme != 'http':
2900 raise error.Abort(_('only http:// paths are currently supported'))
2900 raise error.Abort(_('only http:// paths are currently supported'))
2901
2901
2902 url, authinfo = u.authinfo()
2902 url, authinfo = u.authinfo()
2903 openerargs = {
2903 openerargs = {
2904 r'useragent': b'Mercurial debugwireproto',
2904 r'useragent': b'Mercurial debugwireproto',
2905 }
2905 }
2906
2906
2907 # Turn pipes/sockets into observers so we can log I/O.
2907 # Turn pipes/sockets into observers so we can log I/O.
2908 if ui.verbose:
2908 if ui.verbose:
2909 openerargs.update({
2909 openerargs.update({
2910 r'loggingfh': ui,
2910 r'loggingfh': ui,
2911 r'loggingname': b's',
2911 r'loggingname': b's',
2912 r'loggingopts': {
2912 r'loggingopts': {
2913 r'logdata': True,
2913 r'logdata': True,
2914 r'logdataapis': False,
2914 r'logdataapis': False,
2915 },
2915 },
2916 })
2916 })
2917
2917
2918 if ui.debugflag:
2918 if ui.debugflag:
2919 openerargs[r'loggingopts'][r'logdataapis'] = True
2919 openerargs[r'loggingopts'][r'logdataapis'] = True
2920
2920
2921 # Don't send default headers when in raw mode. This allows us to
2921 # Don't send default headers when in raw mode. This allows us to
2922 # bypass most of the behavior of our URL handling code so we can
2922 # bypass most of the behavior of our URL handling code so we can
2923 # have near complete control over what's sent on the wire.
2923 # have near complete control over what's sent on the wire.
2924 if opts['peer'] == 'raw':
2924 if opts['peer'] == 'raw':
2925 openerargs[r'sendaccept'] = False
2925 openerargs[r'sendaccept'] = False
2926
2926
2927 opener = urlmod.opener(ui, authinfo, **openerargs)
2927 opener = urlmod.opener(ui, authinfo, **openerargs)
2928
2928
2929 if opts['peer'] == 'http2':
2929 if opts['peer'] == 'http2':
2930 ui.write(_('creating http peer for wire protocol version 2\n'))
2930 ui.write(_('creating http peer for wire protocol version 2\n'))
2931 # We go through makepeer() because we need an API descriptor for
2931 # We go through makepeer() because we need an API descriptor for
2932 # the peer instance to be useful.
2932 # the peer instance to be useful.
2933 with ui.configoverride({
2933 with ui.configoverride({
2934 ('experimental', 'httppeer.advertise-v2'): True}):
2934 ('experimental', 'httppeer.advertise-v2'): True}):
2935 if opts['nologhandshake']:
2935 if opts['nologhandshake']:
2936 ui.pushbuffer()
2936 ui.pushbuffer()
2937
2937
2938 peer = httppeer.makepeer(ui, path, opener=opener)
2938 peer = httppeer.makepeer(ui, path, opener=opener)
2939
2939
2940 if opts['nologhandshake']:
2940 if opts['nologhandshake']:
2941 ui.popbuffer()
2941 ui.popbuffer()
2942
2942
2943 if not isinstance(peer, httppeer.httpv2peer):
2943 if not isinstance(peer, httppeer.httpv2peer):
2944 raise error.Abort(_('could not instantiate HTTP peer for '
2944 raise error.Abort(_('could not instantiate HTTP peer for '
2945 'wire protocol version 2'),
2945 'wire protocol version 2'),
2946 hint=_('the server may not have the feature '
2946 hint=_('the server may not have the feature '
2947 'enabled or is not allowing this '
2947 'enabled or is not allowing this '
2948 'client version'))
2948 'client version'))
2949
2949
2950 elif opts['peer'] == 'raw':
2950 elif opts['peer'] == 'raw':
2951 ui.write(_('using raw connection to peer\n'))
2951 ui.write(_('using raw connection to peer\n'))
2952 peer = None
2952 peer = None
2953 elif opts['peer']:
2953 elif opts['peer']:
2954 raise error.Abort(_('--peer %s not supported with HTTP peers') %
2954 raise error.Abort(_('--peer %s not supported with HTTP peers') %
2955 opts['peer'])
2955 opts['peer'])
2956 else:
2956 else:
2957 peer = httppeer.makepeer(ui, path, opener=opener)
2957 peer = httppeer.makepeer(ui, path, opener=opener)
2958
2958
2959 # We /could/ populate stdin/stdout with sock.makefile()...
2959 # We /could/ populate stdin/stdout with sock.makefile()...
2960 else:
2960 else:
2961 raise error.Abort(_('unsupported connection configuration'))
2961 raise error.Abort(_('unsupported connection configuration'))
2962
2962
2963 batchedcommands = None
2963 batchedcommands = None
2964
2964
2965 # Now perform actions based on the parsed wire language instructions.
2965 # Now perform actions based on the parsed wire language instructions.
2966 for action, lines in blocks:
2966 for action, lines in blocks:
2967 if action in ('raw', 'raw+'):
2967 if action in ('raw', 'raw+'):
2968 if not stdin:
2968 if not stdin:
2969 raise error.Abort(_('cannot call raw/raw+ on this peer'))
2969 raise error.Abort(_('cannot call raw/raw+ on this peer'))
2970
2970
2971 # Concatenate the data together.
2971 # Concatenate the data together.
2972 data = ''.join(l.lstrip() for l in lines)
2972 data = ''.join(l.lstrip() for l in lines)
2973 data = stringutil.unescapestr(data)
2973 data = stringutil.unescapestr(data)
2974 stdin.write(data)
2974 stdin.write(data)
2975
2975
2976 if action == 'raw+':
2976 if action == 'raw+':
2977 stdin.flush()
2977 stdin.flush()
2978 elif action == 'flush':
2978 elif action == 'flush':
2979 if not stdin:
2979 if not stdin:
2980 raise error.Abort(_('cannot call flush on this peer'))
2980 raise error.Abort(_('cannot call flush on this peer'))
2981 stdin.flush()
2981 stdin.flush()
2982 elif action.startswith('command'):
2982 elif action.startswith('command'):
2983 if not peer:
2983 if not peer:
2984 raise error.Abort(_('cannot send commands unless peer instance '
2984 raise error.Abort(_('cannot send commands unless peer instance '
2985 'is available'))
2985 'is available'))
2986
2986
2987 command = action.split(' ', 1)[1]
2987 command = action.split(' ', 1)[1]
2988
2988
2989 args = {}
2989 args = {}
2990 for line in lines:
2990 for line in lines:
2991 # We need to allow empty values.
2991 # We need to allow empty values.
2992 fields = line.lstrip().split(' ', 1)
2992 fields = line.lstrip().split(' ', 1)
2993 if len(fields) == 1:
2993 if len(fields) == 1:
2994 key = fields[0]
2994 key = fields[0]
2995 value = ''
2995 value = ''
2996 else:
2996 else:
2997 key, value = fields
2997 key, value = fields
2998
2998
2999 if value.startswith('eval:'):
2999 if value.startswith('eval:'):
3000 value = stringutil.evalpythonliteral(value[5:])
3000 value = stringutil.evalpythonliteral(value[5:])
3001 else:
3001 else:
3002 value = stringutil.unescapestr(value)
3002 value = stringutil.unescapestr(value)
3003
3003
3004 args[key] = value
3004 args[key] = value
3005
3005
3006 if batchedcommands is not None:
3006 if batchedcommands is not None:
3007 batchedcommands.append((command, args))
3007 batchedcommands.append((command, args))
3008 continue
3008 continue
3009
3009
3010 ui.status(_('sending %s command\n') % command)
3010 ui.status(_('sending %s command\n') % command)
3011
3011
3012 if 'PUSHFILE' in args:
3012 if 'PUSHFILE' in args:
3013 with open(args['PUSHFILE'], r'rb') as fh:
3013 with open(args['PUSHFILE'], r'rb') as fh:
3014 del args['PUSHFILE']
3014 del args['PUSHFILE']
3015 res, output = peer._callpush(command, fh,
3015 res, output = peer._callpush(command, fh,
3016 **pycompat.strkwargs(args))
3016 **pycompat.strkwargs(args))
3017 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3017 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3018 ui.status(_('remote output: %s\n') %
3018 ui.status(_('remote output: %s\n') %
3019 stringutil.escapestr(output))
3019 stringutil.escapestr(output))
3020 else:
3020 else:
3021 with peer.commandexecutor() as e:
3021 with peer.commandexecutor() as e:
3022 res = e.callcommand(command, args).result()
3022 res = e.callcommand(command, args).result()
3023
3023
3024 if isinstance(res, wireprotov2peer.commandresponse):
3024 if isinstance(res, wireprotov2peer.commandresponse):
3025 val = list(res.cborobjects())
3025 val = list(res.cborobjects())
3026 ui.status(_('response: %s\n') %
3026 ui.status(_('response: %s\n') %
3027 stringutil.pprint(val, bprefix=True))
3027 stringutil.pprint(val, bprefix=True))
3028
3028
3029 else:
3029 else:
3030 ui.status(_('response: %s\n') %
3030 ui.status(_('response: %s\n') %
3031 stringutil.pprint(res, bprefix=True))
3031 stringutil.pprint(res, bprefix=True))
3032
3032
3033 elif action == 'batchbegin':
3033 elif action == 'batchbegin':
3034 if batchedcommands is not None:
3034 if batchedcommands is not None:
3035 raise error.Abort(_('nested batchbegin not allowed'))
3035 raise error.Abort(_('nested batchbegin not allowed'))
3036
3036
3037 batchedcommands = []
3037 batchedcommands = []
3038 elif action == 'batchsubmit':
3038 elif action == 'batchsubmit':
3039 # There is a batching API we could go through. But it would be
3039 # There is a batching API we could go through. But it would be
3040 # difficult to normalize requests into function calls. It is easier
3040 # difficult to normalize requests into function calls. It is easier
3041 # to bypass this layer and normalize to commands + args.
3041 # to bypass this layer and normalize to commands + args.
3042 ui.status(_('sending batch with %d sub-commands\n') %
3042 ui.status(_('sending batch with %d sub-commands\n') %
3043 len(batchedcommands))
3043 len(batchedcommands))
3044 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3044 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3045 ui.status(_('response #%d: %s\n') %
3045 ui.status(_('response #%d: %s\n') %
3046 (i, stringutil.escapestr(chunk)))
3046 (i, stringutil.escapestr(chunk)))
3047
3047
3048 batchedcommands = None
3048 batchedcommands = None
3049
3049
3050 elif action.startswith('httprequest '):
3050 elif action.startswith('httprequest '):
3051 if not opener:
3051 if not opener:
3052 raise error.Abort(_('cannot use httprequest without an HTTP '
3052 raise error.Abort(_('cannot use httprequest without an HTTP '
3053 'peer'))
3053 'peer'))
3054
3054
3055 request = action.split(' ', 2)
3055 request = action.split(' ', 2)
3056 if len(request) != 3:
3056 if len(request) != 3:
3057 raise error.Abort(_('invalid httprequest: expected format is '
3057 raise error.Abort(_('invalid httprequest: expected format is '
3058 '"httprequest <method> <path>'))
3058 '"httprequest <method> <path>'))
3059
3059
3060 method, httppath = request[1:]
3060 method, httppath = request[1:]
3061 headers = {}
3061 headers = {}
3062 body = None
3062 body = None
3063 frames = []
3063 frames = []
3064 for line in lines:
3064 for line in lines:
3065 line = line.lstrip()
3065 line = line.lstrip()
3066 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3066 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3067 if m:
3067 if m:
3068 headers[m.group(1)] = m.group(2)
3068 headers[m.group(1)] = m.group(2)
3069 continue
3069 continue
3070
3070
3071 if line.startswith(b'BODYFILE '):
3071 if line.startswith(b'BODYFILE '):
3072 with open(line.split(b' ', 1), 'rb') as fh:
3072 with open(line.split(b' ', 1), 'rb') as fh:
3073 body = fh.read()
3073 body = fh.read()
3074 elif line.startswith(b'frame '):
3074 elif line.startswith(b'frame '):
3075 frame = wireprotoframing.makeframefromhumanstring(
3075 frame = wireprotoframing.makeframefromhumanstring(
3076 line[len(b'frame '):])
3076 line[len(b'frame '):])
3077
3077
3078 frames.append(frame)
3078 frames.append(frame)
3079 else:
3079 else:
3080 raise error.Abort(_('unknown argument to httprequest: %s') %
3080 raise error.Abort(_('unknown argument to httprequest: %s') %
3081 line)
3081 line)
3082
3082
3083 url = path + httppath
3083 url = path + httppath
3084
3084
3085 if frames:
3085 if frames:
3086 body = b''.join(bytes(f) for f in frames)
3086 body = b''.join(bytes(f) for f in frames)
3087
3087
3088 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3088 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3089
3089
3090 # urllib.Request insists on using has_data() as a proxy for
3090 # urllib.Request insists on using has_data() as a proxy for
3091 # determining the request method. Override that to use our
3091 # determining the request method. Override that to use our
3092 # explicitly requested method.
3092 # explicitly requested method.
3093 req.get_method = lambda: method
3093 req.get_method = lambda: method
3094
3094
3095 try:
3095 try:
3096 res = opener.open(req)
3096 res = opener.open(req)
3097 body = res.read()
3097 body = res.read()
3098 except util.urlerr.urlerror as e:
3098 except util.urlerr.urlerror as e:
3099 e.read()
3099 e.read()
3100 continue
3100 continue
3101
3101
3102 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3102 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3103 ui.write(_('cbor> %s\n') %
3103 ui.write(_('cbor> %s\n') %
3104 stringutil.pprint(cbor.loads(body), bprefix=True))
3104 stringutil.pprint(cbor.loads(body), bprefix=True))
3105
3105
3106 elif action == 'close':
3106 elif action == 'close':
3107 peer.close()
3107 peer.close()
3108 elif action == 'readavailable':
3108 elif action == 'readavailable':
3109 if not stdout or not stderr:
3109 if not stdout or not stderr:
3110 raise error.Abort(_('readavailable not available on this peer'))
3110 raise error.Abort(_('readavailable not available on this peer'))
3111
3111
3112 stdin.close()
3112 stdin.close()
3113 stdout.read()
3113 stdout.read()
3114 stderr.read()
3114 stderr.read()
3115
3115
3116 elif action == 'readline':
3116 elif action == 'readline':
3117 if not stdout:
3117 if not stdout:
3118 raise error.Abort(_('readline not available on this peer'))
3118 raise error.Abort(_('readline not available on this peer'))
3119 stdout.readline()
3119 stdout.readline()
3120 elif action == 'ereadline':
3120 elif action == 'ereadline':
3121 if not stderr:
3121 if not stderr:
3122 raise error.Abort(_('ereadline not available on this peer'))
3122 raise error.Abort(_('ereadline not available on this peer'))
3123 stderr.readline()
3123 stderr.readline()
3124 elif action.startswith('read '):
3124 elif action.startswith('read '):
3125 count = int(action.split(' ', 1)[1])
3125 count = int(action.split(' ', 1)[1])
3126 if not stdout:
3126 if not stdout:
3127 raise error.Abort(_('read not available on this peer'))
3127 raise error.Abort(_('read not available on this peer'))
3128 stdout.read(count)
3128 stdout.read(count)
3129 elif action.startswith('eread '):
3129 elif action.startswith('eread '):
3130 count = int(action.split(' ', 1)[1])
3130 count = int(action.split(' ', 1)[1])
3131 if not stderr:
3131 if not stderr:
3132 raise error.Abort(_('eread not available on this peer'))
3132 raise error.Abort(_('eread not available on this peer'))
3133 stderr.read(count)
3133 stderr.read(count)
3134 else:
3134 else:
3135 raise error.Abort(_('unknown action: %s') % action)
3135 raise error.Abort(_('unknown action: %s') % action)
3136
3136
3137 if batchedcommands is not None:
3137 if batchedcommands is not None:
3138 raise error.Abort(_('unclosed "batchbegin" request'))
3138 raise error.Abort(_('unclosed "batchbegin" request'))
3139
3139
3140 if peer:
3140 if peer:
3141 peer.close()
3141 peer.close()
3142
3142
3143 if proc:
3143 if proc:
3144 proc.kill()
3144 proc.kill()
@@ -1,676 +1,676 b''
1 $ fileset() {
1 $ fileset() {
2 > hg debugfileset "$@"
2 > hg debugfileset "$@"
3 > }
3 > }
4
4
5 $ hg init repo
5 $ hg init repo
6 $ cd repo
6 $ cd repo
7 $ echo a > a1
7 $ echo a > a1
8 $ echo a > a2
8 $ echo a > a2
9 $ echo b > b1
9 $ echo b > b1
10 $ echo b > b2
10 $ echo b > b2
11 $ hg ci -Am addfiles
11 $ hg ci -Am addfiles
12 adding a1
12 adding a1
13 adding a2
13 adding a2
14 adding b1
14 adding b1
15 adding b2
15 adding b2
16
16
17 Test operators and basic patterns
17 Test operators and basic patterns
18
18
19 $ fileset -v a1
19 $ fileset -v a1
20 (symbol 'a1')
20 (symbol 'a1')
21 a1
21 a1
22 $ fileset -v 'a*'
22 $ fileset -v 'a*'
23 (symbol 'a*')
23 (symbol 'a*')
24 a1
24 a1
25 a2
25 a2
26 $ fileset -v '"re:a\d"'
26 $ fileset -v '"re:a\d"'
27 (string 're:a\\d')
27 (string 're:a\\d')
28 a1
28 a1
29 a2
29 a2
30 $ fileset -v '!re:"a\d"'
30 $ fileset -v '!re:"a\d"'
31 (not
31 (not
32 (kindpat
32 (kindpat
33 (symbol 're')
33 (symbol 're')
34 (string 'a\\d')))
34 (string 'a\\d')))
35 b1
35 b1
36 b2
36 b2
37 $ fileset -v 'path:a1 or glob:b?'
37 $ fileset -v 'path:a1 or glob:b?'
38 (or
38 (or
39 (kindpat
39 (kindpat
40 (symbol 'path')
40 (symbol 'path')
41 (symbol 'a1'))
41 (symbol 'a1'))
42 (kindpat
42 (kindpat
43 (symbol 'glob')
43 (symbol 'glob')
44 (symbol 'b?')))
44 (symbol 'b?')))
45 a1
45 a1
46 b1
46 b1
47 b2
47 b2
48 $ fileset -v 'a1 or a2'
48 $ fileset -v 'a1 or a2'
49 (or
49 (or
50 (symbol 'a1')
50 (symbol 'a1')
51 (symbol 'a2'))
51 (symbol 'a2'))
52 a1
52 a1
53 a2
53 a2
54 $ fileset 'a1 | a2'
54 $ fileset 'a1 | a2'
55 a1
55 a1
56 a2
56 a2
57 $ fileset 'a* and "*1"'
57 $ fileset 'a* and "*1"'
58 a1
58 a1
59 $ fileset 'a* & "*1"'
59 $ fileset 'a* & "*1"'
60 a1
60 a1
61 $ fileset 'not (r"a*")'
61 $ fileset 'not (r"a*")'
62 b1
62 b1
63 b2
63 b2
64 $ fileset '! ("a*")'
64 $ fileset '! ("a*")'
65 b1
65 b1
66 b2
66 b2
67 $ fileset 'a* - a1'
67 $ fileset 'a* - a1'
68 a2
68 a2
69 $ fileset 'a_b'
69 $ fileset 'a_b'
70 $ fileset '"\xy"'
70 $ fileset '"\xy"'
71 hg: parse error: invalid \x escape* (glob)
71 hg: parse error: invalid \x escape* (glob)
72 [255]
72 [255]
73
73
74 Test invalid syntax
74 Test invalid syntax
75
75
76 $ fileset -v '"added"()'
76 $ fileset -v '"added"()'
77 (func
77 (func
78 (string 'added')
78 (string 'added')
79 None)
79 None)
80 hg: parse error: not a symbol
80 hg: parse error: not a symbol
81 [255]
81 [255]
82 $ fileset -v '()()'
82 $ fileset -v '()()'
83 (func
83 (func
84 (group
84 (group
85 None)
85 None)
86 None)
86 None)
87 hg: parse error: not a symbol
87 hg: parse error: not a symbol
88 [255]
88 [255]
89 $ fileset -v -- '-x'
89 $ fileset -v -- '-x'
90 (negate
90 (negate
91 (symbol 'x'))
91 (symbol 'x'))
92 hg: parse error: can't use negate operator in this context
92 hg: parse error: can't use negate operator in this context
93 [255]
93 [255]
94 $ fileset -v -- '-()'
94 $ fileset -v -- '-()'
95 (negate
95 (negate
96 (group
96 (group
97 None))
97 None))
98 hg: parse error: can't use negate operator in this context
98 hg: parse error: can't use negate operator in this context
99 [255]
99 [255]
100
100
101 $ fileset '"path":.'
101 $ fileset '"path":.'
102 hg: parse error: not a symbol
102 hg: parse error: not a symbol
103 [255]
103 [255]
104 $ fileset 'path:foo bar'
104 $ fileset 'path:foo bar'
105 hg: parse error at 9: invalid token
105 hg: parse error at 9: invalid token
106 [255]
106 [255]
107 $ fileset 'foo:bar:baz'
107 $ fileset 'foo:bar:baz'
108 hg: parse error: not a symbol
108 hg: parse error: not a symbol
109 [255]
109 [255]
110 $ fileset 'foo:bar()'
110 $ fileset 'foo:bar()'
111 hg: parse error: pattern must be a string
111 hg: parse error: pattern must be a string
112 [255]
112 [255]
113 $ fileset 'foo:bar'
113 $ fileset 'foo:bar'
114 hg: parse error: invalid pattern kind: foo
114 hg: parse error: invalid pattern kind: foo
115 [255]
115 [255]
116
116
117 Test files status
117 Test files status
118
118
119 $ rm a1
119 $ rm a1
120 $ hg rm a2
120 $ hg rm a2
121 $ echo b >> b2
121 $ echo b >> b2
122 $ hg cp b1 c1
122 $ hg cp b1 c1
123 $ echo c > c2
123 $ echo c > c2
124 $ echo c > c3
124 $ echo c > c3
125 $ cat > .hgignore <<EOF
125 $ cat > .hgignore <<EOF
126 > \.hgignore
126 > \.hgignore
127 > 2$
127 > 2$
128 > EOF
128 > EOF
129 $ fileset 'modified()'
129 $ fileset 'modified()'
130 b2
130 b2
131 $ fileset 'added()'
131 $ fileset 'added()'
132 c1
132 c1
133 $ fileset 'removed()'
133 $ fileset 'removed()'
134 a2
134 a2
135 $ fileset 'deleted()'
135 $ fileset 'deleted()'
136 a1
136 a1
137 $ fileset 'missing()'
137 $ fileset 'missing()'
138 a1
138 a1
139 $ fileset 'unknown()'
139 $ fileset 'unknown()'
140 c3
140 c3
141 $ fileset 'ignored()'
141 $ fileset 'ignored()'
142 .hgignore
142 .hgignore
143 c2
143 c2
144 $ fileset 'hgignore()'
144 $ fileset 'hgignore()'
145 a2
145 a2
146 b2
146 b2
147 $ fileset 'clean()'
147 $ fileset 'clean()'
148 b1
148 b1
149 $ fileset 'copied()'
149 $ fileset 'copied()'
150 c1
150 c1
151
151
152 Test files status in different revisions
152 Test files status in different revisions
153
153
154 $ hg status -m
154 $ hg status -m
155 M b2
155 M b2
156 $ fileset -r0 'revs("wdir()", modified())' --traceback
156 $ fileset -r0 'revs("wdir()", modified())' --traceback
157 b2
157 b2
158 $ hg status -a
158 $ hg status -a
159 A c1
159 A c1
160 $ fileset -r0 'revs("wdir()", added())'
160 $ fileset -r0 'revs("wdir()", added())'
161 c1
161 c1
162 $ hg status --change 0 -a
162 $ hg status --change 0 -a
163 A a1
163 A a1
164 A a2
164 A a2
165 A b1
165 A b1
166 A b2
166 A b2
167 $ hg status -mru
167 $ hg status -mru
168 M b2
168 M b2
169 R a2
169 R a2
170 ? c3
170 ? c3
171 $ fileset -r0 'added() and revs("wdir()", modified() or removed() or unknown())'
171 $ fileset -r0 'added() and revs("wdir()", modified() or removed() or unknown())'
172 a2
172 b2
173 b2
173 a2
174 $ fileset -r0 'added() or revs("wdir()", added())'
174 $ fileset -r0 'added() or revs("wdir()", added())'
175 a1
175 a1
176 a2
176 a2
177 b1
177 b1
178 b2
178 b2
179 c1
179 c1
180
180
181 Test files properties
181 Test files properties
182
182
183 >>> open('bin', 'wb').write(b'\0a') and None
183 >>> open('bin', 'wb').write(b'\0a') and None
184 $ fileset 'binary()'
184 $ fileset 'binary()'
185 $ fileset 'binary() and unknown()'
185 $ fileset 'binary() and unknown()'
186 bin
186 bin
187 $ echo '^bin$' >> .hgignore
187 $ echo '^bin$' >> .hgignore
188 $ fileset 'binary() and ignored()'
188 $ fileset 'binary() and ignored()'
189 bin
189 bin
190 $ hg add bin
190 $ hg add bin
191 $ fileset 'binary()'
191 $ fileset 'binary()'
192 bin
192 bin
193
193
194 $ fileset 'grep("b{1}")'
194 $ fileset 'grep("b{1}")'
195 b1
195 b2
196 b2
196 c1
197 c1
197 b1
198 $ fileset 'grep("missingparens(")'
198 $ fileset 'grep("missingparens(")'
199 hg: parse error: invalid match pattern: (unbalanced parenthesis|missing \)).* (re)
199 hg: parse error: invalid match pattern: (unbalanced parenthesis|missing \)).* (re)
200 [255]
200 [255]
201
201
202 #if execbit
202 #if execbit
203 $ chmod +x b2
203 $ chmod +x b2
204 $ fileset 'exec()'
204 $ fileset 'exec()'
205 b2
205 b2
206 #endif
206 #endif
207
207
208 #if symlink
208 #if symlink
209 $ ln -s b2 b2link
209 $ ln -s b2 b2link
210 $ fileset 'symlink() and unknown()'
210 $ fileset 'symlink() and unknown()'
211 b2link
211 b2link
212 $ hg add b2link
212 $ hg add b2link
213 #endif
213 #endif
214
214
215 #if no-windows
215 #if no-windows
216 $ echo foo > con.xml
216 $ echo foo > con.xml
217 $ fileset 'not portable()'
217 $ fileset 'not portable()'
218 con.xml
218 con.xml
219 $ hg --config ui.portablefilenames=ignore add con.xml
219 $ hg --config ui.portablefilenames=ignore add con.xml
220 #endif
220 #endif
221
221
222 >>> open('1k', 'wb').write(b' '*1024) and None
222 >>> open('1k', 'wb').write(b' '*1024) and None
223 >>> open('2k', 'wb').write(b' '*2048) and None
223 >>> open('2k', 'wb').write(b' '*2048) and None
224 $ hg add 1k 2k
224 $ hg add 1k 2k
225 $ fileset 'size("bar")'
225 $ fileset 'size("bar")'
226 hg: parse error: couldn't parse size: bar
226 hg: parse error: couldn't parse size: bar
227 [255]
227 [255]
228 $ fileset '(1k, 2k)'
228 $ fileset '(1k, 2k)'
229 hg: parse error: can't use a list in this context
229 hg: parse error: can't use a list in this context
230 (see hg help "filesets.x or y")
230 (see hg help "filesets.x or y")
231 [255]
231 [255]
232 $ fileset 'size(1k)'
232 $ fileset 'size(1k)'
233 1k
233 1k
234 $ fileset '(1k or 2k) and size("< 2k")'
234 $ fileset '(1k or 2k) and size("< 2k")'
235 1k
235 1k
236 $ fileset '(1k or 2k) and size("<=2k")'
236 $ fileset '(1k or 2k) and size("<=2k")'
237 1k
237 1k
238 2k
238 2k
239 $ fileset '(1k or 2k) and size("> 1k")'
239 $ fileset '(1k or 2k) and size("> 1k")'
240 2k
240 2k
241 $ fileset '(1k or 2k) and size(">=1K")'
241 $ fileset '(1k or 2k) and size(">=1K")'
242 1k
242 1k
243 2k
243 2k
244 $ fileset '(1k or 2k) and size(".5KB - 1.5kB")'
244 $ fileset '(1k or 2k) and size(".5KB - 1.5kB")'
245 1k
245 1k
246 $ fileset 'size("1M")'
246 $ fileset 'size("1M")'
247 $ fileset 'size("1 GB")'
247 $ fileset 'size("1 GB")'
248
248
249 Test merge states
249 Test merge states
250
250
251 $ hg ci -m manychanges
251 $ hg ci -m manychanges
252 $ hg file -r . 'set:copied() & modified()'
252 $ hg file -r . 'set:copied() & modified()'
253 [1]
253 [1]
254 $ hg up -C 0
254 $ hg up -C 0
255 * files updated, 0 files merged, * files removed, 0 files unresolved (glob)
255 * files updated, 0 files merged, * files removed, 0 files unresolved (glob)
256 $ echo c >> b2
256 $ echo c >> b2
257 $ hg ci -m diverging b2
257 $ hg ci -m diverging b2
258 created new head
258 created new head
259 $ fileset 'resolved()'
259 $ fileset 'resolved()'
260 $ fileset 'unresolved()'
260 $ fileset 'unresolved()'
261 $ hg merge
261 $ hg merge
262 merging b2
262 merging b2
263 warning: conflicts while merging b2! (edit, then use 'hg resolve --mark')
263 warning: conflicts while merging b2! (edit, then use 'hg resolve --mark')
264 * files updated, 0 files merged, 1 files removed, 1 files unresolved (glob)
264 * files updated, 0 files merged, 1 files removed, 1 files unresolved (glob)
265 use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
265 use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
266 [1]
266 [1]
267 $ fileset 'resolved()'
267 $ fileset 'resolved()'
268 $ fileset 'unresolved()'
268 $ fileset 'unresolved()'
269 b2
269 b2
270 $ echo e > b2
270 $ echo e > b2
271 $ hg resolve -m b2
271 $ hg resolve -m b2
272 (no more unresolved files)
272 (no more unresolved files)
273 $ fileset 'resolved()'
273 $ fileset 'resolved()'
274 b2
274 b2
275 $ fileset 'unresolved()'
275 $ fileset 'unresolved()'
276 $ hg ci -m merge
276 $ hg ci -m merge
277
277
278 Test subrepo predicate
278 Test subrepo predicate
279
279
280 $ hg init sub
280 $ hg init sub
281 $ echo a > sub/suba
281 $ echo a > sub/suba
282 $ hg -R sub add sub/suba
282 $ hg -R sub add sub/suba
283 $ hg -R sub ci -m sub
283 $ hg -R sub ci -m sub
284 $ echo 'sub = sub' > .hgsub
284 $ echo 'sub = sub' > .hgsub
285 $ hg init sub2
285 $ hg init sub2
286 $ echo b > sub2/b
286 $ echo b > sub2/b
287 $ hg -R sub2 ci -Am sub2
287 $ hg -R sub2 ci -Am sub2
288 adding b
288 adding b
289 $ echo 'sub2 = sub2' >> .hgsub
289 $ echo 'sub2 = sub2' >> .hgsub
290 $ fileset 'subrepo()'
290 $ fileset 'subrepo()'
291 $ hg add .hgsub
291 $ hg add .hgsub
292 $ fileset 'subrepo()'
292 $ fileset 'subrepo()'
293 sub
293 sub
294 sub2
294 sub2
295 $ fileset 'subrepo("sub")'
295 $ fileset 'subrepo("sub")'
296 sub
296 sub
297 $ fileset 'subrepo("glob:*")'
297 $ fileset 'subrepo("glob:*")'
298 sub
298 sub
299 sub2
299 sub2
300 $ hg ci -m subrepo
300 $ hg ci -m subrepo
301
301
302 Test that .hgsubstate is updated as appropriate during a conversion. The
302 Test that .hgsubstate is updated as appropriate during a conversion. The
303 saverev property is enough to alter the hashes of the subrepo.
303 saverev property is enough to alter the hashes of the subrepo.
304
304
305 $ hg init ../converted
305 $ hg init ../converted
306 $ hg --config extensions.convert= convert --config convert.hg.saverev=True \
306 $ hg --config extensions.convert= convert --config convert.hg.saverev=True \
307 > sub ../converted/sub
307 > sub ../converted/sub
308 initializing destination ../converted/sub repository
308 initializing destination ../converted/sub repository
309 scanning source...
309 scanning source...
310 sorting...
310 sorting...
311 converting...
311 converting...
312 0 sub
312 0 sub
313 $ hg clone -U sub2 ../converted/sub2
313 $ hg clone -U sub2 ../converted/sub2
314 $ hg --config extensions.convert= convert --config convert.hg.saverev=True \
314 $ hg --config extensions.convert= convert --config convert.hg.saverev=True \
315 > . ../converted
315 > . ../converted
316 scanning source...
316 scanning source...
317 sorting...
317 sorting...
318 converting...
318 converting...
319 4 addfiles
319 4 addfiles
320 3 manychanges
320 3 manychanges
321 2 diverging
321 2 diverging
322 1 merge
322 1 merge
323 0 subrepo
323 0 subrepo
324 no ".hgsubstate" updates will be made for "sub2"
324 no ".hgsubstate" updates will be made for "sub2"
325 $ hg up -q -R ../converted -r tip
325 $ hg up -q -R ../converted -r tip
326 $ hg --cwd ../converted cat sub/suba sub2/b -r tip
326 $ hg --cwd ../converted cat sub/suba sub2/b -r tip
327 a
327 a
328 b
328 b
329 $ oldnode=`hg log -r tip -T "{node}\n"`
329 $ oldnode=`hg log -r tip -T "{node}\n"`
330 $ newnode=`hg log -R ../converted -r tip -T "{node}\n"`
330 $ newnode=`hg log -R ../converted -r tip -T "{node}\n"`
331 $ [ "$oldnode" != "$newnode" ] || echo "nothing changed"
331 $ [ "$oldnode" != "$newnode" ] || echo "nothing changed"
332
332
333 Test with a revision
333 Test with a revision
334
334
335 $ hg log -G --template '{rev} {desc}\n'
335 $ hg log -G --template '{rev} {desc}\n'
336 @ 4 subrepo
336 @ 4 subrepo
337 |
337 |
338 o 3 merge
338 o 3 merge
339 |\
339 |\
340 | o 2 diverging
340 | o 2 diverging
341 | |
341 | |
342 o | 1 manychanges
342 o | 1 manychanges
343 |/
343 |/
344 o 0 addfiles
344 o 0 addfiles
345
345
346 $ echo unknown > unknown
346 $ echo unknown > unknown
347 $ fileset -r1 'modified()'
347 $ fileset -r1 'modified()'
348 b2
348 b2
349 $ fileset -r1 'added() and c1'
349 $ fileset -r1 'added() and c1'
350 c1
350 c1
351 $ fileset -r1 'removed()'
351 $ fileset -r1 'removed()'
352 a2
352 a2
353 $ fileset -r1 'deleted()'
353 $ fileset -r1 'deleted()'
354 $ fileset -r1 'unknown()'
354 $ fileset -r1 'unknown()'
355 $ fileset -r1 'ignored()'
355 $ fileset -r1 'ignored()'
356 $ fileset -r1 'hgignore()'
356 $ fileset -r1 'hgignore()'
357 b2
357 b2
358 bin
358 bin
359 $ fileset -r1 'binary()'
359 $ fileset -r1 'binary()'
360 bin
360 bin
361 $ fileset -r1 'size(1k)'
361 $ fileset -r1 'size(1k)'
362 1k
362 1k
363 $ fileset -r3 'resolved()'
363 $ fileset -r3 'resolved()'
364 $ fileset -r3 'unresolved()'
364 $ fileset -r3 'unresolved()'
365
365
366 #if execbit
366 #if execbit
367 $ fileset -r1 'exec()'
367 $ fileset -r1 'exec()'
368 b2
368 b2
369 #endif
369 #endif
370
370
371 #if symlink
371 #if symlink
372 $ fileset -r1 'symlink()'
372 $ fileset -r1 'symlink()'
373 b2link
373 b2link
374 #endif
374 #endif
375
375
376 #if no-windows
376 #if no-windows
377 $ fileset -r1 'not portable()'
377 $ fileset -r1 'not portable()'
378 con.xml
378 con.xml
379 $ hg forget 'con.xml'
379 $ hg forget 'con.xml'
380 #endif
380 #endif
381
381
382 $ fileset -r4 'subrepo("re:su.*")'
382 $ fileset -r4 'subrepo("re:su.*")'
383 sub
383 sub
384 sub2
384 sub2
385 $ fileset -r4 'subrepo(re:su.*)'
385 $ fileset -r4 'subrepo(re:su.*)'
386 sub
386 sub
387 sub2
387 sub2
388 $ fileset -r4 'subrepo("sub")'
388 $ fileset -r4 'subrepo("sub")'
389 sub
389 sub
390 $ fileset -r4 'b2 or c1'
390 $ fileset -r4 'b2 or c1'
391 b2
391 b2
392 c1
392 c1
393
393
394 >>> open('dos', 'wb').write(b"dos\r\n") and None
394 >>> open('dos', 'wb').write(b"dos\r\n") and None
395 >>> open('mixed', 'wb').write(b"dos\r\nunix\n") and None
395 >>> open('mixed', 'wb').write(b"dos\r\nunix\n") and None
396 >>> open('mac', 'wb').write(b"mac\r") and None
396 >>> open('mac', 'wb').write(b"mac\r") and None
397 $ hg add dos mixed mac
397 $ hg add dos mixed mac
398
398
399 (remove a1, to examine safety of 'eol' on removed files)
399 (remove a1, to examine safety of 'eol' on removed files)
400 $ rm a1
400 $ rm a1
401
401
402 $ fileset 'eol(dos)'
402 $ fileset 'eol(dos)'
403 dos
403 dos
404 mixed
404 mixed
405 $ fileset 'eol(unix)'
405 $ fileset 'eol(unix)'
406 mixed
407 .hgsub
406 .hgsub
408 .hgsubstate
407 .hgsubstate
409 b1
408 b1
410 b2
409 b2
411 c1
410 c1
411 mixed
412 $ fileset 'eol(mac)'
412 $ fileset 'eol(mac)'
413 mac
413 mac
414
414
415 Test safety of 'encoding' on removed files
415 Test safety of 'encoding' on removed files
416
416
417 $ fileset 'encoding("ascii")'
417 $ fileset 'encoding("ascii")'
418 dos
419 mac
420 mixed
421 .hgsub
418 .hgsub
422 .hgsubstate
419 .hgsubstate
423 1k
420 1k
424 2k
421 2k
425 b1
422 b1
426 b2
423 b2
427 b2link (symlink !)
424 b2link (symlink !)
428 bin
425 bin
429 c1
426 c1
427 dos
428 mac
429 mixed
430
430
431 Test detection of unintentional 'matchctx.existing()' invocation
431 Test detection of unintentional 'matchctx.existing()' invocation
432
432
433 $ cat > $TESTTMP/existingcaller.py <<EOF
433 $ cat > $TESTTMP/existingcaller.py <<EOF
434 > from mercurial import registrar
434 > from mercurial import registrar
435 >
435 >
436 > filesetpredicate = registrar.filesetpredicate()
436 > filesetpredicate = registrar.filesetpredicate()
437 > @filesetpredicate(b'existingcaller()', callexisting=False)
437 > @filesetpredicate(b'existingcaller()', callexisting=False)
438 > def existingcaller(mctx, x):
438 > def existingcaller(mctx, x):
439 > # this 'mctx.existing()' invocation is unintentional
439 > # this 'mctx.existing()' invocation is unintentional
440 > return [f for f in mctx.existing()]
440 > return [f for f in mctx.existing()]
441 > EOF
441 > EOF
442
442
443 $ cat >> .hg/hgrc <<EOF
443 $ cat >> .hg/hgrc <<EOF
444 > [extensions]
444 > [extensions]
445 > existingcaller = $TESTTMP/existingcaller.py
445 > existingcaller = $TESTTMP/existingcaller.py
446 > EOF
446 > EOF
447
447
448 $ fileset 'existingcaller()' 2>&1 | tail -1
448 $ fileset 'existingcaller()' 2>&1 | tail -1
449 *ProgrammingError: *unexpected existing() invocation* (glob)
449 *ProgrammingError: *unexpected existing() invocation* (glob)
450
450
451 Test 'revs(...)'
451 Test 'revs(...)'
452 ================
452 ================
453
453
454 small reminder of the repository state
454 small reminder of the repository state
455
455
456 $ hg log -G
456 $ hg log -G
457 @ changeset: 4:* (glob)
457 @ changeset: 4:* (glob)
458 | tag: tip
458 | tag: tip
459 | user: test
459 | user: test
460 | date: Thu Jan 01 00:00:00 1970 +0000
460 | date: Thu Jan 01 00:00:00 1970 +0000
461 | summary: subrepo
461 | summary: subrepo
462 |
462 |
463 o changeset: 3:* (glob)
463 o changeset: 3:* (glob)
464 |\ parent: 2:55b05bdebf36
464 |\ parent: 2:55b05bdebf36
465 | | parent: 1:* (glob)
465 | | parent: 1:* (glob)
466 | | user: test
466 | | user: test
467 | | date: Thu Jan 01 00:00:00 1970 +0000
467 | | date: Thu Jan 01 00:00:00 1970 +0000
468 | | summary: merge
468 | | summary: merge
469 | |
469 | |
470 | o changeset: 2:55b05bdebf36
470 | o changeset: 2:55b05bdebf36
471 | | parent: 0:8a9576c51c1f
471 | | parent: 0:8a9576c51c1f
472 | | user: test
472 | | user: test
473 | | date: Thu Jan 01 00:00:00 1970 +0000
473 | | date: Thu Jan 01 00:00:00 1970 +0000
474 | | summary: diverging
474 | | summary: diverging
475 | |
475 | |
476 o | changeset: 1:* (glob)
476 o | changeset: 1:* (glob)
477 |/ user: test
477 |/ user: test
478 | date: Thu Jan 01 00:00:00 1970 +0000
478 | date: Thu Jan 01 00:00:00 1970 +0000
479 | summary: manychanges
479 | summary: manychanges
480 |
480 |
481 o changeset: 0:8a9576c51c1f
481 o changeset: 0:8a9576c51c1f
482 user: test
482 user: test
483 date: Thu Jan 01 00:00:00 1970 +0000
483 date: Thu Jan 01 00:00:00 1970 +0000
484 summary: addfiles
484 summary: addfiles
485
485
486 $ hg status --change 0
486 $ hg status --change 0
487 A a1
487 A a1
488 A a2
488 A a2
489 A b1
489 A b1
490 A b2
490 A b2
491 $ hg status --change 1
491 $ hg status --change 1
492 M b2
492 M b2
493 A 1k
493 A 1k
494 A 2k
494 A 2k
495 A b2link (no-windows !)
495 A b2link (no-windows !)
496 A bin
496 A bin
497 A c1
497 A c1
498 A con.xml (no-windows !)
498 A con.xml (no-windows !)
499 R a2
499 R a2
500 $ hg status --change 2
500 $ hg status --change 2
501 M b2
501 M b2
502 $ hg status --change 3
502 $ hg status --change 3
503 M b2
503 M b2
504 A 1k
504 A 1k
505 A 2k
505 A 2k
506 A b2link (no-windows !)
506 A b2link (no-windows !)
507 A bin
507 A bin
508 A c1
508 A c1
509 A con.xml (no-windows !)
509 A con.xml (no-windows !)
510 R a2
510 R a2
511 $ hg status --change 4
511 $ hg status --change 4
512 A .hgsub
512 A .hgsub
513 A .hgsubstate
513 A .hgsubstate
514 $ hg status
514 $ hg status
515 A dos
515 A dos
516 A mac
516 A mac
517 A mixed
517 A mixed
518 R con.xml (no-windows !)
518 R con.xml (no-windows !)
519 ! a1
519 ! a1
520 ? b2.orig
520 ? b2.orig
521 ? c3
521 ? c3
522 ? unknown
522 ? unknown
523
523
524 Test files at -r0 should be filtered by files at wdir
524 Test files at -r0 should be filtered by files at wdir
525 -----------------------------------------------------
525 -----------------------------------------------------
526
526
527 $ fileset -r0 '* and revs("wdir()", *)'
527 $ fileset -r0 '* and revs("wdir()", *)'
528 a1
528 a1
529 b1
529 b1
530 b2
530 b2
531
531
532 Test that "revs()" work at all
532 Test that "revs()" work at all
533 ------------------------------
533 ------------------------------
534
534
535 $ fileset "revs('2', modified())"
535 $ fileset "revs('2', modified())"
536 b2
536 b2
537
537
538 Test that "revs()" work for file missing in the working copy/current context
538 Test that "revs()" work for file missing in the working copy/current context
539 ----------------------------------------------------------------------------
539 ----------------------------------------------------------------------------
540
540
541 (a2 not in working copy)
541 (a2 not in working copy)
542
542
543 $ fileset "revs('0', added())"
543 $ fileset "revs('0', added())"
544 a1
544 a1
545 a2
545 a2
546 b1
546 b1
547 b2
547 b2
548
548
549 (none of the file exist in "0")
549 (none of the file exist in "0")
550
550
551 $ fileset -r 0 "revs('4', added())"
551 $ fileset -r 0 "revs('4', added())"
552 .hgsub
552 .hgsub
553 .hgsubstate
553 .hgsubstate
554
554
555 Call with empty revset
555 Call with empty revset
556 --------------------------
556 --------------------------
557
557
558 $ fileset "revs('2-2', modified())"
558 $ fileset "revs('2-2', modified())"
559
559
560 Call with revset matching multiple revs
560 Call with revset matching multiple revs
561 ---------------------------------------
561 ---------------------------------------
562
562
563 $ fileset "revs('0+4', added())"
563 $ fileset "revs('0+4', added())"
564 .hgsub
565 .hgsubstate
564 a1
566 a1
565 a2
567 a2
566 b1
568 b1
567 b2
569 b2
568 .hgsub
569 .hgsubstate
570
570
571 overlapping set
571 overlapping set
572
572
573 $ fileset "revs('1+2', modified())"
573 $ fileset "revs('1+2', modified())"
574 b2
574 b2
575
575
576 test 'status(...)'
576 test 'status(...)'
577 =================
577 =================
578
578
579 Simple case
579 Simple case
580 -----------
580 -----------
581
581
582 $ fileset "status(3, 4, added())"
582 $ fileset "status(3, 4, added())"
583 .hgsub
583 .hgsub
584 .hgsubstate
584 .hgsubstate
585
585
586 use rev to restrict matched file
586 use rev to restrict matched file
587 -----------------------------------------
587 -----------------------------------------
588
588
589 $ hg status --removed --rev 0 --rev 1
589 $ hg status --removed --rev 0 --rev 1
590 R a2
590 R a2
591 $ fileset "status(0, 1, removed())"
591 $ fileset "status(0, 1, removed())"
592 a2
592 a2
593 $ fileset "* and status(0, 1, removed())"
593 $ fileset "* and status(0, 1, removed())"
594 $ fileset -r 4 "status(0, 1, removed())"
594 $ fileset -r 4 "status(0, 1, removed())"
595 a2
595 a2
596 $ fileset -r 4 "* and status(0, 1, removed())"
596 $ fileset -r 4 "* and status(0, 1, removed())"
597 $ fileset "revs('4', * and status(0, 1, removed()))"
597 $ fileset "revs('4', * and status(0, 1, removed()))"
598 $ fileset "revs('0', * and status(0, 1, removed()))"
598 $ fileset "revs('0', * and status(0, 1, removed()))"
599 a2
599 a2
600
600
601 check wdir()
601 check wdir()
602 ------------
602 ------------
603
603
604 $ hg status --removed --rev 4
604 $ hg status --removed --rev 4
605 R con.xml (no-windows !)
605 R con.xml (no-windows !)
606 $ fileset "status(4, 'wdir()', removed())"
606 $ fileset "status(4, 'wdir()', removed())"
607 con.xml (no-windows !)
607 con.xml (no-windows !)
608
608
609 $ hg status --removed --rev 2
609 $ hg status --removed --rev 2
610 R a2
610 R a2
611 $ fileset "status('2', 'wdir()', removed())"
611 $ fileset "status('2', 'wdir()', removed())"
612 a2
612 a2
613
613
614 test backward status
614 test backward status
615 --------------------
615 --------------------
616
616
617 $ hg status --removed --rev 0 --rev 4
617 $ hg status --removed --rev 0 --rev 4
618 R a2
618 R a2
619 $ hg status --added --rev 4 --rev 0
619 $ hg status --added --rev 4 --rev 0
620 A a2
620 A a2
621 $ fileset "status(4, 0, added())"
621 $ fileset "status(4, 0, added())"
622 a2
622 a2
623
623
624 test cross branch status
624 test cross branch status
625 ------------------------
625 ------------------------
626
626
627 $ hg status --added --rev 1 --rev 2
627 $ hg status --added --rev 1 --rev 2
628 A a2
628 A a2
629 $ fileset "status(1, 2, added())"
629 $ fileset "status(1, 2, added())"
630 a2
630 a2
631
631
632 test with multi revs revset
632 test with multi revs revset
633 ---------------------------
633 ---------------------------
634 $ hg status --added --rev 0:1 --rev 3:4
634 $ hg status --added --rev 0:1 --rev 3:4
635 A .hgsub
635 A .hgsub
636 A .hgsubstate
636 A .hgsubstate
637 A 1k
637 A 1k
638 A 2k
638 A 2k
639 A b2link (no-windows !)
639 A b2link (no-windows !)
640 A bin
640 A bin
641 A c1
641 A c1
642 A con.xml (no-windows !)
642 A con.xml (no-windows !)
643 $ fileset "status('0:1', '3:4', added())"
643 $ fileset "status('0:1', '3:4', added())"
644 .hgsub
644 .hgsub
645 .hgsubstate
645 .hgsubstate
646 1k
646 1k
647 2k
647 2k
648 b2link (no-windows !)
648 b2link (no-windows !)
649 bin
649 bin
650 c1
650 c1
651 con.xml (no-windows !)
651 con.xml (no-windows !)
652
652
653 tests with empty value
653 tests with empty value
654 ----------------------
654 ----------------------
655
655
656 Fully empty revset
656 Fully empty revset
657
657
658 $ fileset "status('', '4', added())"
658 $ fileset "status('', '4', added())"
659 hg: parse error: first argument to status must be a revision
659 hg: parse error: first argument to status must be a revision
660 [255]
660 [255]
661 $ fileset "status('2', '', added())"
661 $ fileset "status('2', '', added())"
662 hg: parse error: second argument to status must be a revision
662 hg: parse error: second argument to status must be a revision
663 [255]
663 [255]
664
664
665 Empty revset will error at the revset layer
665 Empty revset will error at the revset layer
666
666
667 $ fileset "status(' ', '4', added())"
667 $ fileset "status(' ', '4', added())"
668 hg: parse error at 1: not a prefix: end
668 hg: parse error at 1: not a prefix: end
669 (
669 (
670 ^ here)
670 ^ here)
671 [255]
671 [255]
672 $ fileset "status('2', ' ', added())"
672 $ fileset "status('2', ' ', added())"
673 hg: parse error at 1: not a prefix: end
673 hg: parse error at 1: not a prefix: end
674 (
674 (
675 ^ here)
675 ^ here)
676 [255]
676 [255]
General Comments 0
You need to be logged in to leave comments. Login now