##// END OF EJS Templates
fileset: add stub for weight-based optimization...
Yuya Nishihara -
r38865:7e7e2b2f default
parent child Browse files
Show More
@@ -1,3252 +1,3253
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from .thirdparty import (
35 from .thirdparty import (
36 cbor,
36 cbor,
37 )
37 )
38 from . import (
38 from . import (
39 bundle2,
39 bundle2,
40 changegroup,
40 changegroup,
41 cmdutil,
41 cmdutil,
42 color,
42 color,
43 context,
43 context,
44 dagparser,
44 dagparser,
45 dagutil,
45 dagutil,
46 encoding,
46 encoding,
47 error,
47 error,
48 exchange,
48 exchange,
49 extensions,
49 extensions,
50 filemerge,
50 filemerge,
51 filesetlang,
51 filesetlang,
52 formatter,
52 formatter,
53 hg,
53 hg,
54 httppeer,
54 httppeer,
55 localrepo,
55 localrepo,
56 lock as lockmod,
56 lock as lockmod,
57 logcmdutil,
57 logcmdutil,
58 merge as mergemod,
58 merge as mergemod,
59 obsolete,
59 obsolete,
60 obsutil,
60 obsutil,
61 phases,
61 phases,
62 policy,
62 policy,
63 pvec,
63 pvec,
64 pycompat,
64 pycompat,
65 registrar,
65 registrar,
66 repair,
66 repair,
67 revlog,
67 revlog,
68 revset,
68 revset,
69 revsetlang,
69 revsetlang,
70 scmutil,
70 scmutil,
71 setdiscovery,
71 setdiscovery,
72 simplemerge,
72 simplemerge,
73 sshpeer,
73 sshpeer,
74 sslutil,
74 sslutil,
75 streamclone,
75 streamclone,
76 templater,
76 templater,
77 treediscovery,
77 treediscovery,
78 upgrade,
78 upgrade,
79 url as urlmod,
79 url as urlmod,
80 util,
80 util,
81 vfs as vfsmod,
81 vfs as vfsmod,
82 wireprotoframing,
82 wireprotoframing,
83 wireprotoserver,
83 wireprotoserver,
84 wireprotov2peer,
84 wireprotov2peer,
85 )
85 )
86 from .utils import (
86 from .utils import (
87 dateutil,
87 dateutil,
88 procutil,
88 procutil,
89 stringutil,
89 stringutil,
90 )
90 )
91
91
92 release = lockmod.release
92 release = lockmod.release
93
93
94 command = registrar.command()
94 command = registrar.command()
95
95
96 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
96 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
97 def debugancestor(ui, repo, *args):
97 def debugancestor(ui, repo, *args):
98 """find the ancestor revision of two revisions in a given index"""
98 """find the ancestor revision of two revisions in a given index"""
99 if len(args) == 3:
99 if len(args) == 3:
100 index, rev1, rev2 = args
100 index, rev1, rev2 = args
101 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
101 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
102 lookup = r.lookup
102 lookup = r.lookup
103 elif len(args) == 2:
103 elif len(args) == 2:
104 if not repo:
104 if not repo:
105 raise error.Abort(_('there is no Mercurial repository here '
105 raise error.Abort(_('there is no Mercurial repository here '
106 '(.hg not found)'))
106 '(.hg not found)'))
107 rev1, rev2 = args
107 rev1, rev2 = args
108 r = repo.changelog
108 r = repo.changelog
109 lookup = repo.lookup
109 lookup = repo.lookup
110 else:
110 else:
111 raise error.Abort(_('either two or three arguments required'))
111 raise error.Abort(_('either two or three arguments required'))
112 a = r.ancestor(lookup(rev1), lookup(rev2))
112 a = r.ancestor(lookup(rev1), lookup(rev2))
113 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
113 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
114
114
115 @command('debugapplystreamclonebundle', [], 'FILE')
115 @command('debugapplystreamclonebundle', [], 'FILE')
116 def debugapplystreamclonebundle(ui, repo, fname):
116 def debugapplystreamclonebundle(ui, repo, fname):
117 """apply a stream clone bundle file"""
117 """apply a stream clone bundle file"""
118 f = hg.openpath(ui, fname)
118 f = hg.openpath(ui, fname)
119 gen = exchange.readbundle(ui, f, fname)
119 gen = exchange.readbundle(ui, f, fname)
120 gen.apply(repo)
120 gen.apply(repo)
121
121
122 @command('debugbuilddag',
122 @command('debugbuilddag',
123 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
123 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
124 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
124 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
125 ('n', 'new-file', None, _('add new file at each rev'))],
125 ('n', 'new-file', None, _('add new file at each rev'))],
126 _('[OPTION]... [TEXT]'))
126 _('[OPTION]... [TEXT]'))
127 def debugbuilddag(ui, repo, text=None,
127 def debugbuilddag(ui, repo, text=None,
128 mergeable_file=False,
128 mergeable_file=False,
129 overwritten_file=False,
129 overwritten_file=False,
130 new_file=False):
130 new_file=False):
131 """builds a repo with a given DAG from scratch in the current empty repo
131 """builds a repo with a given DAG from scratch in the current empty repo
132
132
133 The description of the DAG is read from stdin if not given on the
133 The description of the DAG is read from stdin if not given on the
134 command line.
134 command line.
135
135
136 Elements:
136 Elements:
137
137
138 - "+n" is a linear run of n nodes based on the current default parent
138 - "+n" is a linear run of n nodes based on the current default parent
139 - "." is a single node based on the current default parent
139 - "." is a single node based on the current default parent
140 - "$" resets the default parent to null (implied at the start);
140 - "$" resets the default parent to null (implied at the start);
141 otherwise the default parent is always the last node created
141 otherwise the default parent is always the last node created
142 - "<p" sets the default parent to the backref p
142 - "<p" sets the default parent to the backref p
143 - "*p" is a fork at parent p, which is a backref
143 - "*p" is a fork at parent p, which is a backref
144 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
144 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
145 - "/p2" is a merge of the preceding node and p2
145 - "/p2" is a merge of the preceding node and p2
146 - ":tag" defines a local tag for the preceding node
146 - ":tag" defines a local tag for the preceding node
147 - "@branch" sets the named branch for subsequent nodes
147 - "@branch" sets the named branch for subsequent nodes
148 - "#...\\n" is a comment up to the end of the line
148 - "#...\\n" is a comment up to the end of the line
149
149
150 Whitespace between the above elements is ignored.
150 Whitespace between the above elements is ignored.
151
151
152 A backref is either
152 A backref is either
153
153
154 - a number n, which references the node curr-n, where curr is the current
154 - a number n, which references the node curr-n, where curr is the current
155 node, or
155 node, or
156 - the name of a local tag you placed earlier using ":tag", or
156 - the name of a local tag you placed earlier using ":tag", or
157 - empty to denote the default parent.
157 - empty to denote the default parent.
158
158
159 All string valued-elements are either strictly alphanumeric, or must
159 All string valued-elements are either strictly alphanumeric, or must
160 be enclosed in double quotes ("..."), with "\\" as escape character.
160 be enclosed in double quotes ("..."), with "\\" as escape character.
161 """
161 """
162
162
163 if text is None:
163 if text is None:
164 ui.status(_("reading DAG from stdin\n"))
164 ui.status(_("reading DAG from stdin\n"))
165 text = ui.fin.read()
165 text = ui.fin.read()
166
166
167 cl = repo.changelog
167 cl = repo.changelog
168 if len(cl) > 0:
168 if len(cl) > 0:
169 raise error.Abort(_('repository is not empty'))
169 raise error.Abort(_('repository is not empty'))
170
170
171 # determine number of revs in DAG
171 # determine number of revs in DAG
172 total = 0
172 total = 0
173 for type, data in dagparser.parsedag(text):
173 for type, data in dagparser.parsedag(text):
174 if type == 'n':
174 if type == 'n':
175 total += 1
175 total += 1
176
176
177 if mergeable_file:
177 if mergeable_file:
178 linesperrev = 2
178 linesperrev = 2
179 # make a file with k lines per rev
179 # make a file with k lines per rev
180 initialmergedlines = ['%d' % i
180 initialmergedlines = ['%d' % i
181 for i in pycompat.xrange(0, total * linesperrev)]
181 for i in pycompat.xrange(0, total * linesperrev)]
182 initialmergedlines.append("")
182 initialmergedlines.append("")
183
183
184 tags = []
184 tags = []
185 progress = ui.makeprogress(_('building'), unit=_('revisions'),
185 progress = ui.makeprogress(_('building'), unit=_('revisions'),
186 total=total)
186 total=total)
187 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
187 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
188 at = -1
188 at = -1
189 atbranch = 'default'
189 atbranch = 'default'
190 nodeids = []
190 nodeids = []
191 id = 0
191 id = 0
192 progress.update(id)
192 progress.update(id)
193 for type, data in dagparser.parsedag(text):
193 for type, data in dagparser.parsedag(text):
194 if type == 'n':
194 if type == 'n':
195 ui.note(('node %s\n' % pycompat.bytestr(data)))
195 ui.note(('node %s\n' % pycompat.bytestr(data)))
196 id, ps = data
196 id, ps = data
197
197
198 files = []
198 files = []
199 filecontent = {}
199 filecontent = {}
200
200
201 p2 = None
201 p2 = None
202 if mergeable_file:
202 if mergeable_file:
203 fn = "mf"
203 fn = "mf"
204 p1 = repo[ps[0]]
204 p1 = repo[ps[0]]
205 if len(ps) > 1:
205 if len(ps) > 1:
206 p2 = repo[ps[1]]
206 p2 = repo[ps[1]]
207 pa = p1.ancestor(p2)
207 pa = p1.ancestor(p2)
208 base, local, other = [x[fn].data() for x in (pa, p1,
208 base, local, other = [x[fn].data() for x in (pa, p1,
209 p2)]
209 p2)]
210 m3 = simplemerge.Merge3Text(base, local, other)
210 m3 = simplemerge.Merge3Text(base, local, other)
211 ml = [l.strip() for l in m3.merge_lines()]
211 ml = [l.strip() for l in m3.merge_lines()]
212 ml.append("")
212 ml.append("")
213 elif at > 0:
213 elif at > 0:
214 ml = p1[fn].data().split("\n")
214 ml = p1[fn].data().split("\n")
215 else:
215 else:
216 ml = initialmergedlines
216 ml = initialmergedlines
217 ml[id * linesperrev] += " r%i" % id
217 ml[id * linesperrev] += " r%i" % id
218 mergedtext = "\n".join(ml)
218 mergedtext = "\n".join(ml)
219 files.append(fn)
219 files.append(fn)
220 filecontent[fn] = mergedtext
220 filecontent[fn] = mergedtext
221
221
222 if overwritten_file:
222 if overwritten_file:
223 fn = "of"
223 fn = "of"
224 files.append(fn)
224 files.append(fn)
225 filecontent[fn] = "r%i\n" % id
225 filecontent[fn] = "r%i\n" % id
226
226
227 if new_file:
227 if new_file:
228 fn = "nf%i" % id
228 fn = "nf%i" % id
229 files.append(fn)
229 files.append(fn)
230 filecontent[fn] = "r%i\n" % id
230 filecontent[fn] = "r%i\n" % id
231 if len(ps) > 1:
231 if len(ps) > 1:
232 if not p2:
232 if not p2:
233 p2 = repo[ps[1]]
233 p2 = repo[ps[1]]
234 for fn in p2:
234 for fn in p2:
235 if fn.startswith("nf"):
235 if fn.startswith("nf"):
236 files.append(fn)
236 files.append(fn)
237 filecontent[fn] = p2[fn].data()
237 filecontent[fn] = p2[fn].data()
238
238
239 def fctxfn(repo, cx, path):
239 def fctxfn(repo, cx, path):
240 if path in filecontent:
240 if path in filecontent:
241 return context.memfilectx(repo, cx, path,
241 return context.memfilectx(repo, cx, path,
242 filecontent[path])
242 filecontent[path])
243 return None
243 return None
244
244
245 if len(ps) == 0 or ps[0] < 0:
245 if len(ps) == 0 or ps[0] < 0:
246 pars = [None, None]
246 pars = [None, None]
247 elif len(ps) == 1:
247 elif len(ps) == 1:
248 pars = [nodeids[ps[0]], None]
248 pars = [nodeids[ps[0]], None]
249 else:
249 else:
250 pars = [nodeids[p] for p in ps]
250 pars = [nodeids[p] for p in ps]
251 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
251 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
252 date=(id, 0),
252 date=(id, 0),
253 user="debugbuilddag",
253 user="debugbuilddag",
254 extra={'branch': atbranch})
254 extra={'branch': atbranch})
255 nodeid = repo.commitctx(cx)
255 nodeid = repo.commitctx(cx)
256 nodeids.append(nodeid)
256 nodeids.append(nodeid)
257 at = id
257 at = id
258 elif type == 'l':
258 elif type == 'l':
259 id, name = data
259 id, name = data
260 ui.note(('tag %s\n' % name))
260 ui.note(('tag %s\n' % name))
261 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
261 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
262 elif type == 'a':
262 elif type == 'a':
263 ui.note(('branch %s\n' % data))
263 ui.note(('branch %s\n' % data))
264 atbranch = data
264 atbranch = data
265 progress.update(id)
265 progress.update(id)
266
266
267 if tags:
267 if tags:
268 repo.vfs.write("localtags", "".join(tags))
268 repo.vfs.write("localtags", "".join(tags))
269
269
270 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
270 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
271 indent_string = ' ' * indent
271 indent_string = ' ' * indent
272 if all:
272 if all:
273 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
273 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
274 % indent_string)
274 % indent_string)
275
275
276 def showchunks(named):
276 def showchunks(named):
277 ui.write("\n%s%s\n" % (indent_string, named))
277 ui.write("\n%s%s\n" % (indent_string, named))
278 for deltadata in gen.deltaiter():
278 for deltadata in gen.deltaiter():
279 node, p1, p2, cs, deltabase, delta, flags = deltadata
279 node, p1, p2, cs, deltabase, delta, flags = deltadata
280 ui.write("%s%s %s %s %s %s %d\n" %
280 ui.write("%s%s %s %s %s %s %d\n" %
281 (indent_string, hex(node), hex(p1), hex(p2),
281 (indent_string, hex(node), hex(p1), hex(p2),
282 hex(cs), hex(deltabase), len(delta)))
282 hex(cs), hex(deltabase), len(delta)))
283
283
284 chunkdata = gen.changelogheader()
284 chunkdata = gen.changelogheader()
285 showchunks("changelog")
285 showchunks("changelog")
286 chunkdata = gen.manifestheader()
286 chunkdata = gen.manifestheader()
287 showchunks("manifest")
287 showchunks("manifest")
288 for chunkdata in iter(gen.filelogheader, {}):
288 for chunkdata in iter(gen.filelogheader, {}):
289 fname = chunkdata['filename']
289 fname = chunkdata['filename']
290 showchunks(fname)
290 showchunks(fname)
291 else:
291 else:
292 if isinstance(gen, bundle2.unbundle20):
292 if isinstance(gen, bundle2.unbundle20):
293 raise error.Abort(_('use debugbundle2 for this file'))
293 raise error.Abort(_('use debugbundle2 for this file'))
294 chunkdata = gen.changelogheader()
294 chunkdata = gen.changelogheader()
295 for deltadata in gen.deltaiter():
295 for deltadata in gen.deltaiter():
296 node, p1, p2, cs, deltabase, delta, flags = deltadata
296 node, p1, p2, cs, deltabase, delta, flags = deltadata
297 ui.write("%s%s\n" % (indent_string, hex(node)))
297 ui.write("%s%s\n" % (indent_string, hex(node)))
298
298
299 def _debugobsmarkers(ui, part, indent=0, **opts):
299 def _debugobsmarkers(ui, part, indent=0, **opts):
300 """display version and markers contained in 'data'"""
300 """display version and markers contained in 'data'"""
301 opts = pycompat.byteskwargs(opts)
301 opts = pycompat.byteskwargs(opts)
302 data = part.read()
302 data = part.read()
303 indent_string = ' ' * indent
303 indent_string = ' ' * indent
304 try:
304 try:
305 version, markers = obsolete._readmarkers(data)
305 version, markers = obsolete._readmarkers(data)
306 except error.UnknownVersion as exc:
306 except error.UnknownVersion as exc:
307 msg = "%sunsupported version: %s (%d bytes)\n"
307 msg = "%sunsupported version: %s (%d bytes)\n"
308 msg %= indent_string, exc.version, len(data)
308 msg %= indent_string, exc.version, len(data)
309 ui.write(msg)
309 ui.write(msg)
310 else:
310 else:
311 msg = "%sversion: %d (%d bytes)\n"
311 msg = "%sversion: %d (%d bytes)\n"
312 msg %= indent_string, version, len(data)
312 msg %= indent_string, version, len(data)
313 ui.write(msg)
313 ui.write(msg)
314 fm = ui.formatter('debugobsolete', opts)
314 fm = ui.formatter('debugobsolete', opts)
315 for rawmarker in sorted(markers):
315 for rawmarker in sorted(markers):
316 m = obsutil.marker(None, rawmarker)
316 m = obsutil.marker(None, rawmarker)
317 fm.startitem()
317 fm.startitem()
318 fm.plain(indent_string)
318 fm.plain(indent_string)
319 cmdutil.showmarker(fm, m)
319 cmdutil.showmarker(fm, m)
320 fm.end()
320 fm.end()
321
321
322 def _debugphaseheads(ui, data, indent=0):
322 def _debugphaseheads(ui, data, indent=0):
323 """display version and markers contained in 'data'"""
323 """display version and markers contained in 'data'"""
324 indent_string = ' ' * indent
324 indent_string = ' ' * indent
325 headsbyphase = phases.binarydecode(data)
325 headsbyphase = phases.binarydecode(data)
326 for phase in phases.allphases:
326 for phase in phases.allphases:
327 for head in headsbyphase[phase]:
327 for head in headsbyphase[phase]:
328 ui.write(indent_string)
328 ui.write(indent_string)
329 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
329 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
330
330
331 def _quasirepr(thing):
331 def _quasirepr(thing):
332 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
332 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
333 return '{%s}' % (
333 return '{%s}' % (
334 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
334 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
335 return pycompat.bytestr(repr(thing))
335 return pycompat.bytestr(repr(thing))
336
336
337 def _debugbundle2(ui, gen, all=None, **opts):
337 def _debugbundle2(ui, gen, all=None, **opts):
338 """lists the contents of a bundle2"""
338 """lists the contents of a bundle2"""
339 if not isinstance(gen, bundle2.unbundle20):
339 if not isinstance(gen, bundle2.unbundle20):
340 raise error.Abort(_('not a bundle2 file'))
340 raise error.Abort(_('not a bundle2 file'))
341 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
341 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
342 parttypes = opts.get(r'part_type', [])
342 parttypes = opts.get(r'part_type', [])
343 for part in gen.iterparts():
343 for part in gen.iterparts():
344 if parttypes and part.type not in parttypes:
344 if parttypes and part.type not in parttypes:
345 continue
345 continue
346 msg = '%s -- %s (mandatory: %r)\n'
346 msg = '%s -- %s (mandatory: %r)\n'
347 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
347 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
348 if part.type == 'changegroup':
348 if part.type == 'changegroup':
349 version = part.params.get('version', '01')
349 version = part.params.get('version', '01')
350 cg = changegroup.getunbundler(version, part, 'UN')
350 cg = changegroup.getunbundler(version, part, 'UN')
351 if not ui.quiet:
351 if not ui.quiet:
352 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
352 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
353 if part.type == 'obsmarkers':
353 if part.type == 'obsmarkers':
354 if not ui.quiet:
354 if not ui.quiet:
355 _debugobsmarkers(ui, part, indent=4, **opts)
355 _debugobsmarkers(ui, part, indent=4, **opts)
356 if part.type == 'phase-heads':
356 if part.type == 'phase-heads':
357 if not ui.quiet:
357 if not ui.quiet:
358 _debugphaseheads(ui, part, indent=4)
358 _debugphaseheads(ui, part, indent=4)
359
359
360 @command('debugbundle',
360 @command('debugbundle',
361 [('a', 'all', None, _('show all details')),
361 [('a', 'all', None, _('show all details')),
362 ('', 'part-type', [], _('show only the named part type')),
362 ('', 'part-type', [], _('show only the named part type')),
363 ('', 'spec', None, _('print the bundlespec of the bundle'))],
363 ('', 'spec', None, _('print the bundlespec of the bundle'))],
364 _('FILE'),
364 _('FILE'),
365 norepo=True)
365 norepo=True)
366 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
366 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
367 """lists the contents of a bundle"""
367 """lists the contents of a bundle"""
368 with hg.openpath(ui, bundlepath) as f:
368 with hg.openpath(ui, bundlepath) as f:
369 if spec:
369 if spec:
370 spec = exchange.getbundlespec(ui, f)
370 spec = exchange.getbundlespec(ui, f)
371 ui.write('%s\n' % spec)
371 ui.write('%s\n' % spec)
372 return
372 return
373
373
374 gen = exchange.readbundle(ui, f, bundlepath)
374 gen = exchange.readbundle(ui, f, bundlepath)
375 if isinstance(gen, bundle2.unbundle20):
375 if isinstance(gen, bundle2.unbundle20):
376 return _debugbundle2(ui, gen, all=all, **opts)
376 return _debugbundle2(ui, gen, all=all, **opts)
377 _debugchangegroup(ui, gen, all=all, **opts)
377 _debugchangegroup(ui, gen, all=all, **opts)
378
378
379 @command('debugcapabilities',
379 @command('debugcapabilities',
380 [], _('PATH'),
380 [], _('PATH'),
381 norepo=True)
381 norepo=True)
382 def debugcapabilities(ui, path, **opts):
382 def debugcapabilities(ui, path, **opts):
383 """lists the capabilities of a remote peer"""
383 """lists the capabilities of a remote peer"""
384 opts = pycompat.byteskwargs(opts)
384 opts = pycompat.byteskwargs(opts)
385 peer = hg.peer(ui, opts, path)
385 peer = hg.peer(ui, opts, path)
386 caps = peer.capabilities()
386 caps = peer.capabilities()
387 ui.write(('Main capabilities:\n'))
387 ui.write(('Main capabilities:\n'))
388 for c in sorted(caps):
388 for c in sorted(caps):
389 ui.write((' %s\n') % c)
389 ui.write((' %s\n') % c)
390 b2caps = bundle2.bundle2caps(peer)
390 b2caps = bundle2.bundle2caps(peer)
391 if b2caps:
391 if b2caps:
392 ui.write(('Bundle2 capabilities:\n'))
392 ui.write(('Bundle2 capabilities:\n'))
393 for key, values in sorted(b2caps.iteritems()):
393 for key, values in sorted(b2caps.iteritems()):
394 ui.write((' %s\n') % key)
394 ui.write((' %s\n') % key)
395 for v in values:
395 for v in values:
396 ui.write((' %s\n') % v)
396 ui.write((' %s\n') % v)
397
397
398 @command('debugcheckstate', [], '')
398 @command('debugcheckstate', [], '')
399 def debugcheckstate(ui, repo):
399 def debugcheckstate(ui, repo):
400 """validate the correctness of the current dirstate"""
400 """validate the correctness of the current dirstate"""
401 parent1, parent2 = repo.dirstate.parents()
401 parent1, parent2 = repo.dirstate.parents()
402 m1 = repo[parent1].manifest()
402 m1 = repo[parent1].manifest()
403 m2 = repo[parent2].manifest()
403 m2 = repo[parent2].manifest()
404 errors = 0
404 errors = 0
405 for f in repo.dirstate:
405 for f in repo.dirstate:
406 state = repo.dirstate[f]
406 state = repo.dirstate[f]
407 if state in "nr" and f not in m1:
407 if state in "nr" and f not in m1:
408 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
408 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
409 errors += 1
409 errors += 1
410 if state in "a" and f in m1:
410 if state in "a" and f in m1:
411 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
411 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
412 errors += 1
412 errors += 1
413 if state in "m" and f not in m1 and f not in m2:
413 if state in "m" and f not in m1 and f not in m2:
414 ui.warn(_("%s in state %s, but not in either manifest\n") %
414 ui.warn(_("%s in state %s, but not in either manifest\n") %
415 (f, state))
415 (f, state))
416 errors += 1
416 errors += 1
417 for f in m1:
417 for f in m1:
418 state = repo.dirstate[f]
418 state = repo.dirstate[f]
419 if state not in "nrm":
419 if state not in "nrm":
420 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
420 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
421 errors += 1
421 errors += 1
422 if errors:
422 if errors:
423 error = _(".hg/dirstate inconsistent with current parent's manifest")
423 error = _(".hg/dirstate inconsistent with current parent's manifest")
424 raise error.Abort(error)
424 raise error.Abort(error)
425
425
426 @command('debugcolor',
426 @command('debugcolor',
427 [('', 'style', None, _('show all configured styles'))],
427 [('', 'style', None, _('show all configured styles'))],
428 'hg debugcolor')
428 'hg debugcolor')
429 def debugcolor(ui, repo, **opts):
429 def debugcolor(ui, repo, **opts):
430 """show available color, effects or style"""
430 """show available color, effects or style"""
431 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
431 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
432 if opts.get(r'style'):
432 if opts.get(r'style'):
433 return _debugdisplaystyle(ui)
433 return _debugdisplaystyle(ui)
434 else:
434 else:
435 return _debugdisplaycolor(ui)
435 return _debugdisplaycolor(ui)
436
436
437 def _debugdisplaycolor(ui):
437 def _debugdisplaycolor(ui):
438 ui = ui.copy()
438 ui = ui.copy()
439 ui._styles.clear()
439 ui._styles.clear()
440 for effect in color._activeeffects(ui).keys():
440 for effect in color._activeeffects(ui).keys():
441 ui._styles[effect] = effect
441 ui._styles[effect] = effect
442 if ui._terminfoparams:
442 if ui._terminfoparams:
443 for k, v in ui.configitems('color'):
443 for k, v in ui.configitems('color'):
444 if k.startswith('color.'):
444 if k.startswith('color.'):
445 ui._styles[k] = k[6:]
445 ui._styles[k] = k[6:]
446 elif k.startswith('terminfo.'):
446 elif k.startswith('terminfo.'):
447 ui._styles[k] = k[9:]
447 ui._styles[k] = k[9:]
448 ui.write(_('available colors:\n'))
448 ui.write(_('available colors:\n'))
449 # sort label with a '_' after the other to group '_background' entry.
449 # sort label with a '_' after the other to group '_background' entry.
450 items = sorted(ui._styles.items(),
450 items = sorted(ui._styles.items(),
451 key=lambda i: ('_' in i[0], i[0], i[1]))
451 key=lambda i: ('_' in i[0], i[0], i[1]))
452 for colorname, label in items:
452 for colorname, label in items:
453 ui.write(('%s\n') % colorname, label=label)
453 ui.write(('%s\n') % colorname, label=label)
454
454
455 def _debugdisplaystyle(ui):
455 def _debugdisplaystyle(ui):
456 ui.write(_('available style:\n'))
456 ui.write(_('available style:\n'))
457 if not ui._styles:
457 if not ui._styles:
458 return
458 return
459 width = max(len(s) for s in ui._styles)
459 width = max(len(s) for s in ui._styles)
460 for label, effects in sorted(ui._styles.items()):
460 for label, effects in sorted(ui._styles.items()):
461 ui.write('%s' % label, label=label)
461 ui.write('%s' % label, label=label)
462 if effects:
462 if effects:
463 # 50
463 # 50
464 ui.write(': ')
464 ui.write(': ')
465 ui.write(' ' * (max(0, width - len(label))))
465 ui.write(' ' * (max(0, width - len(label))))
466 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
466 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
467 ui.write('\n')
467 ui.write('\n')
468
468
469 @command('debugcreatestreamclonebundle', [], 'FILE')
469 @command('debugcreatestreamclonebundle', [], 'FILE')
470 def debugcreatestreamclonebundle(ui, repo, fname):
470 def debugcreatestreamclonebundle(ui, repo, fname):
471 """create a stream clone bundle file
471 """create a stream clone bundle file
472
472
473 Stream bundles are special bundles that are essentially archives of
473 Stream bundles are special bundles that are essentially archives of
474 revlog files. They are commonly used for cloning very quickly.
474 revlog files. They are commonly used for cloning very quickly.
475 """
475 """
476 # TODO we may want to turn this into an abort when this functionality
476 # TODO we may want to turn this into an abort when this functionality
477 # is moved into `hg bundle`.
477 # is moved into `hg bundle`.
478 if phases.hassecret(repo):
478 if phases.hassecret(repo):
479 ui.warn(_('(warning: stream clone bundle will contain secret '
479 ui.warn(_('(warning: stream clone bundle will contain secret '
480 'revisions)\n'))
480 'revisions)\n'))
481
481
482 requirements, gen = streamclone.generatebundlev1(repo)
482 requirements, gen = streamclone.generatebundlev1(repo)
483 changegroup.writechunks(ui, gen, fname)
483 changegroup.writechunks(ui, gen, fname)
484
484
485 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
485 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
486
486
487 @command('debugdag',
487 @command('debugdag',
488 [('t', 'tags', None, _('use tags as labels')),
488 [('t', 'tags', None, _('use tags as labels')),
489 ('b', 'branches', None, _('annotate with branch names')),
489 ('b', 'branches', None, _('annotate with branch names')),
490 ('', 'dots', None, _('use dots for runs')),
490 ('', 'dots', None, _('use dots for runs')),
491 ('s', 'spaces', None, _('separate elements by spaces'))],
491 ('s', 'spaces', None, _('separate elements by spaces'))],
492 _('[OPTION]... [FILE [REV]...]'),
492 _('[OPTION]... [FILE [REV]...]'),
493 optionalrepo=True)
493 optionalrepo=True)
494 def debugdag(ui, repo, file_=None, *revs, **opts):
494 def debugdag(ui, repo, file_=None, *revs, **opts):
495 """format the changelog or an index DAG as a concise textual description
495 """format the changelog or an index DAG as a concise textual description
496
496
497 If you pass a revlog index, the revlog's DAG is emitted. If you list
497 If you pass a revlog index, the revlog's DAG is emitted. If you list
498 revision numbers, they get labeled in the output as rN.
498 revision numbers, they get labeled in the output as rN.
499
499
500 Otherwise, the changelog DAG of the current repo is emitted.
500 Otherwise, the changelog DAG of the current repo is emitted.
501 """
501 """
502 spaces = opts.get(r'spaces')
502 spaces = opts.get(r'spaces')
503 dots = opts.get(r'dots')
503 dots = opts.get(r'dots')
504 if file_:
504 if file_:
505 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
505 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
506 file_)
506 file_)
507 revs = set((int(r) for r in revs))
507 revs = set((int(r) for r in revs))
508 def events():
508 def events():
509 for r in rlog:
509 for r in rlog:
510 yield 'n', (r, list(p for p in rlog.parentrevs(r)
510 yield 'n', (r, list(p for p in rlog.parentrevs(r)
511 if p != -1))
511 if p != -1))
512 if r in revs:
512 if r in revs:
513 yield 'l', (r, "r%i" % r)
513 yield 'l', (r, "r%i" % r)
514 elif repo:
514 elif repo:
515 cl = repo.changelog
515 cl = repo.changelog
516 tags = opts.get(r'tags')
516 tags = opts.get(r'tags')
517 branches = opts.get(r'branches')
517 branches = opts.get(r'branches')
518 if tags:
518 if tags:
519 labels = {}
519 labels = {}
520 for l, n in repo.tags().items():
520 for l, n in repo.tags().items():
521 labels.setdefault(cl.rev(n), []).append(l)
521 labels.setdefault(cl.rev(n), []).append(l)
522 def events():
522 def events():
523 b = "default"
523 b = "default"
524 for r in cl:
524 for r in cl:
525 if branches:
525 if branches:
526 newb = cl.read(cl.node(r))[5]['branch']
526 newb = cl.read(cl.node(r))[5]['branch']
527 if newb != b:
527 if newb != b:
528 yield 'a', newb
528 yield 'a', newb
529 b = newb
529 b = newb
530 yield 'n', (r, list(p for p in cl.parentrevs(r)
530 yield 'n', (r, list(p for p in cl.parentrevs(r)
531 if p != -1))
531 if p != -1))
532 if tags:
532 if tags:
533 ls = labels.get(r)
533 ls = labels.get(r)
534 if ls:
534 if ls:
535 for l in ls:
535 for l in ls:
536 yield 'l', (r, l)
536 yield 'l', (r, l)
537 else:
537 else:
538 raise error.Abort(_('need repo for changelog dag'))
538 raise error.Abort(_('need repo for changelog dag'))
539
539
540 for line in dagparser.dagtextlines(events(),
540 for line in dagparser.dagtextlines(events(),
541 addspaces=spaces,
541 addspaces=spaces,
542 wraplabels=True,
542 wraplabels=True,
543 wrapannotations=True,
543 wrapannotations=True,
544 wrapnonlinear=dots,
544 wrapnonlinear=dots,
545 usedots=dots,
545 usedots=dots,
546 maxlinewidth=70):
546 maxlinewidth=70):
547 ui.write(line)
547 ui.write(line)
548 ui.write("\n")
548 ui.write("\n")
549
549
550 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
550 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
551 def debugdata(ui, repo, file_, rev=None, **opts):
551 def debugdata(ui, repo, file_, rev=None, **opts):
552 """dump the contents of a data file revision"""
552 """dump the contents of a data file revision"""
553 opts = pycompat.byteskwargs(opts)
553 opts = pycompat.byteskwargs(opts)
554 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
554 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
555 if rev is not None:
555 if rev is not None:
556 raise error.CommandError('debugdata', _('invalid arguments'))
556 raise error.CommandError('debugdata', _('invalid arguments'))
557 file_, rev = None, file_
557 file_, rev = None, file_
558 elif rev is None:
558 elif rev is None:
559 raise error.CommandError('debugdata', _('invalid arguments'))
559 raise error.CommandError('debugdata', _('invalid arguments'))
560 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
560 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
561 try:
561 try:
562 ui.write(r.revision(r.lookup(rev), raw=True))
562 ui.write(r.revision(r.lookup(rev), raw=True))
563 except KeyError:
563 except KeyError:
564 raise error.Abort(_('invalid revision identifier %s') % rev)
564 raise error.Abort(_('invalid revision identifier %s') % rev)
565
565
566 @command('debugdate',
566 @command('debugdate',
567 [('e', 'extended', None, _('try extended date formats'))],
567 [('e', 'extended', None, _('try extended date formats'))],
568 _('[-e] DATE [RANGE]'),
568 _('[-e] DATE [RANGE]'),
569 norepo=True, optionalrepo=True)
569 norepo=True, optionalrepo=True)
570 def debugdate(ui, date, range=None, **opts):
570 def debugdate(ui, date, range=None, **opts):
571 """parse and display a date"""
571 """parse and display a date"""
572 if opts[r"extended"]:
572 if opts[r"extended"]:
573 d = dateutil.parsedate(date, util.extendeddateformats)
573 d = dateutil.parsedate(date, util.extendeddateformats)
574 else:
574 else:
575 d = dateutil.parsedate(date)
575 d = dateutil.parsedate(date)
576 ui.write(("internal: %d %d\n") % d)
576 ui.write(("internal: %d %d\n") % d)
577 ui.write(("standard: %s\n") % dateutil.datestr(d))
577 ui.write(("standard: %s\n") % dateutil.datestr(d))
578 if range:
578 if range:
579 m = dateutil.matchdate(range)
579 m = dateutil.matchdate(range)
580 ui.write(("match: %s\n") % m(d[0]))
580 ui.write(("match: %s\n") % m(d[0]))
581
581
582 @command('debugdeltachain',
582 @command('debugdeltachain',
583 cmdutil.debugrevlogopts + cmdutil.formatteropts,
583 cmdutil.debugrevlogopts + cmdutil.formatteropts,
584 _('-c|-m|FILE'),
584 _('-c|-m|FILE'),
585 optionalrepo=True)
585 optionalrepo=True)
586 def debugdeltachain(ui, repo, file_=None, **opts):
586 def debugdeltachain(ui, repo, file_=None, **opts):
587 """dump information about delta chains in a revlog
587 """dump information about delta chains in a revlog
588
588
589 Output can be templatized. Available template keywords are:
589 Output can be templatized. Available template keywords are:
590
590
591 :``rev``: revision number
591 :``rev``: revision number
592 :``chainid``: delta chain identifier (numbered by unique base)
592 :``chainid``: delta chain identifier (numbered by unique base)
593 :``chainlen``: delta chain length to this revision
593 :``chainlen``: delta chain length to this revision
594 :``prevrev``: previous revision in delta chain
594 :``prevrev``: previous revision in delta chain
595 :``deltatype``: role of delta / how it was computed
595 :``deltatype``: role of delta / how it was computed
596 :``compsize``: compressed size of revision
596 :``compsize``: compressed size of revision
597 :``uncompsize``: uncompressed size of revision
597 :``uncompsize``: uncompressed size of revision
598 :``chainsize``: total size of compressed revisions in chain
598 :``chainsize``: total size of compressed revisions in chain
599 :``chainratio``: total chain size divided by uncompressed revision size
599 :``chainratio``: total chain size divided by uncompressed revision size
600 (new delta chains typically start at ratio 2.00)
600 (new delta chains typically start at ratio 2.00)
601 :``lindist``: linear distance from base revision in delta chain to end
601 :``lindist``: linear distance from base revision in delta chain to end
602 of this revision
602 of this revision
603 :``extradist``: total size of revisions not part of this delta chain from
603 :``extradist``: total size of revisions not part of this delta chain from
604 base of delta chain to end of this revision; a measurement
604 base of delta chain to end of this revision; a measurement
605 of how much extra data we need to read/seek across to read
605 of how much extra data we need to read/seek across to read
606 the delta chain for this revision
606 the delta chain for this revision
607 :``extraratio``: extradist divided by chainsize; another representation of
607 :``extraratio``: extradist divided by chainsize; another representation of
608 how much unrelated data is needed to load this delta chain
608 how much unrelated data is needed to load this delta chain
609
609
610 If the repository is configured to use the sparse read, additional keywords
610 If the repository is configured to use the sparse read, additional keywords
611 are available:
611 are available:
612
612
613 :``readsize``: total size of data read from the disk for a revision
613 :``readsize``: total size of data read from the disk for a revision
614 (sum of the sizes of all the blocks)
614 (sum of the sizes of all the blocks)
615 :``largestblock``: size of the largest block of data read from the disk
615 :``largestblock``: size of the largest block of data read from the disk
616 :``readdensity``: density of useful bytes in the data read from the disk
616 :``readdensity``: density of useful bytes in the data read from the disk
617 :``srchunks``: in how many data hunks the whole revision would be read
617 :``srchunks``: in how many data hunks the whole revision would be read
618
618
619 The sparse read can be enabled with experimental.sparse-read = True
619 The sparse read can be enabled with experimental.sparse-read = True
620 """
620 """
621 opts = pycompat.byteskwargs(opts)
621 opts = pycompat.byteskwargs(opts)
622 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
622 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
623 index = r.index
623 index = r.index
624 start = r.start
624 start = r.start
625 length = r.length
625 length = r.length
626 generaldelta = r.version & revlog.FLAG_GENERALDELTA
626 generaldelta = r.version & revlog.FLAG_GENERALDELTA
627 withsparseread = getattr(r, '_withsparseread', False)
627 withsparseread = getattr(r, '_withsparseread', False)
628
628
629 def revinfo(rev):
629 def revinfo(rev):
630 e = index[rev]
630 e = index[rev]
631 compsize = e[1]
631 compsize = e[1]
632 uncompsize = e[2]
632 uncompsize = e[2]
633 chainsize = 0
633 chainsize = 0
634
634
635 if generaldelta:
635 if generaldelta:
636 if e[3] == e[5]:
636 if e[3] == e[5]:
637 deltatype = 'p1'
637 deltatype = 'p1'
638 elif e[3] == e[6]:
638 elif e[3] == e[6]:
639 deltatype = 'p2'
639 deltatype = 'p2'
640 elif e[3] == rev - 1:
640 elif e[3] == rev - 1:
641 deltatype = 'prev'
641 deltatype = 'prev'
642 elif e[3] == rev:
642 elif e[3] == rev:
643 deltatype = 'base'
643 deltatype = 'base'
644 else:
644 else:
645 deltatype = 'other'
645 deltatype = 'other'
646 else:
646 else:
647 if e[3] == rev:
647 if e[3] == rev:
648 deltatype = 'base'
648 deltatype = 'base'
649 else:
649 else:
650 deltatype = 'prev'
650 deltatype = 'prev'
651
651
652 chain = r._deltachain(rev)[0]
652 chain = r._deltachain(rev)[0]
653 for iterrev in chain:
653 for iterrev in chain:
654 e = index[iterrev]
654 e = index[iterrev]
655 chainsize += e[1]
655 chainsize += e[1]
656
656
657 return compsize, uncompsize, deltatype, chain, chainsize
657 return compsize, uncompsize, deltatype, chain, chainsize
658
658
659 fm = ui.formatter('debugdeltachain', opts)
659 fm = ui.formatter('debugdeltachain', opts)
660
660
661 fm.plain(' rev chain# chainlen prev delta '
661 fm.plain(' rev chain# chainlen prev delta '
662 'size rawsize chainsize ratio lindist extradist '
662 'size rawsize chainsize ratio lindist extradist '
663 'extraratio')
663 'extraratio')
664 if withsparseread:
664 if withsparseread:
665 fm.plain(' readsize largestblk rddensity srchunks')
665 fm.plain(' readsize largestblk rddensity srchunks')
666 fm.plain('\n')
666 fm.plain('\n')
667
667
668 chainbases = {}
668 chainbases = {}
669 for rev in r:
669 for rev in r:
670 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
670 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
671 chainbase = chain[0]
671 chainbase = chain[0]
672 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
672 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
673 basestart = start(chainbase)
673 basestart = start(chainbase)
674 revstart = start(rev)
674 revstart = start(rev)
675 lineardist = revstart + comp - basestart
675 lineardist = revstart + comp - basestart
676 extradist = lineardist - chainsize
676 extradist = lineardist - chainsize
677 try:
677 try:
678 prevrev = chain[-2]
678 prevrev = chain[-2]
679 except IndexError:
679 except IndexError:
680 prevrev = -1
680 prevrev = -1
681
681
682 if uncomp != 0:
682 if uncomp != 0:
683 chainratio = float(chainsize) / float(uncomp)
683 chainratio = float(chainsize) / float(uncomp)
684 else:
684 else:
685 chainratio = chainsize
685 chainratio = chainsize
686
686
687 if chainsize != 0:
687 if chainsize != 0:
688 extraratio = float(extradist) / float(chainsize)
688 extraratio = float(extradist) / float(chainsize)
689 else:
689 else:
690 extraratio = extradist
690 extraratio = extradist
691
691
692 fm.startitem()
692 fm.startitem()
693 fm.write('rev chainid chainlen prevrev deltatype compsize '
693 fm.write('rev chainid chainlen prevrev deltatype compsize '
694 'uncompsize chainsize chainratio lindist extradist '
694 'uncompsize chainsize chainratio lindist extradist '
695 'extraratio',
695 'extraratio',
696 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
696 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
697 rev, chainid, len(chain), prevrev, deltatype, comp,
697 rev, chainid, len(chain), prevrev, deltatype, comp,
698 uncomp, chainsize, chainratio, lineardist, extradist,
698 uncomp, chainsize, chainratio, lineardist, extradist,
699 extraratio,
699 extraratio,
700 rev=rev, chainid=chainid, chainlen=len(chain),
700 rev=rev, chainid=chainid, chainlen=len(chain),
701 prevrev=prevrev, deltatype=deltatype, compsize=comp,
701 prevrev=prevrev, deltatype=deltatype, compsize=comp,
702 uncompsize=uncomp, chainsize=chainsize,
702 uncompsize=uncomp, chainsize=chainsize,
703 chainratio=chainratio, lindist=lineardist,
703 chainratio=chainratio, lindist=lineardist,
704 extradist=extradist, extraratio=extraratio)
704 extradist=extradist, extraratio=extraratio)
705 if withsparseread:
705 if withsparseread:
706 readsize = 0
706 readsize = 0
707 largestblock = 0
707 largestblock = 0
708 srchunks = 0
708 srchunks = 0
709
709
710 for revschunk in revlog._slicechunk(r, chain):
710 for revschunk in revlog._slicechunk(r, chain):
711 srchunks += 1
711 srchunks += 1
712 blkend = start(revschunk[-1]) + length(revschunk[-1])
712 blkend = start(revschunk[-1]) + length(revschunk[-1])
713 blksize = blkend - start(revschunk[0])
713 blksize = blkend - start(revschunk[0])
714
714
715 readsize += blksize
715 readsize += blksize
716 if largestblock < blksize:
716 if largestblock < blksize:
717 largestblock = blksize
717 largestblock = blksize
718
718
719 if readsize:
719 if readsize:
720 readdensity = float(chainsize) / float(readsize)
720 readdensity = float(chainsize) / float(readsize)
721 else:
721 else:
722 readdensity = 1
722 readdensity = 1
723
723
724 fm.write('readsize largestblock readdensity srchunks',
724 fm.write('readsize largestblock readdensity srchunks',
725 ' %10d %10d %9.5f %8d',
725 ' %10d %10d %9.5f %8d',
726 readsize, largestblock, readdensity, srchunks,
726 readsize, largestblock, readdensity, srchunks,
727 readsize=readsize, largestblock=largestblock,
727 readsize=readsize, largestblock=largestblock,
728 readdensity=readdensity, srchunks=srchunks)
728 readdensity=readdensity, srchunks=srchunks)
729
729
730 fm.plain('\n')
730 fm.plain('\n')
731
731
732 fm.end()
732 fm.end()
733
733
734 @command('debugdirstate|debugstate',
734 @command('debugdirstate|debugstate',
735 [('', 'nodates', None, _('do not display the saved mtime')),
735 [('', 'nodates', None, _('do not display the saved mtime')),
736 ('', 'datesort', None, _('sort by saved mtime'))],
736 ('', 'datesort', None, _('sort by saved mtime'))],
737 _('[OPTION]...'))
737 _('[OPTION]...'))
738 def debugstate(ui, repo, **opts):
738 def debugstate(ui, repo, **opts):
739 """show the contents of the current dirstate"""
739 """show the contents of the current dirstate"""
740
740
741 nodates = opts.get(r'nodates')
741 nodates = opts.get(r'nodates')
742 datesort = opts.get(r'datesort')
742 datesort = opts.get(r'datesort')
743
743
744 timestr = ""
744 timestr = ""
745 if datesort:
745 if datesort:
746 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
746 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
747 else:
747 else:
748 keyfunc = None # sort by filename
748 keyfunc = None # sort by filename
749 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
749 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
750 if ent[3] == -1:
750 if ent[3] == -1:
751 timestr = 'unset '
751 timestr = 'unset '
752 elif nodates:
752 elif nodates:
753 timestr = 'set '
753 timestr = 'set '
754 else:
754 else:
755 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
755 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
756 time.localtime(ent[3]))
756 time.localtime(ent[3]))
757 timestr = encoding.strtolocal(timestr)
757 timestr = encoding.strtolocal(timestr)
758 if ent[1] & 0o20000:
758 if ent[1] & 0o20000:
759 mode = 'lnk'
759 mode = 'lnk'
760 else:
760 else:
761 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
761 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
762 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
762 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
763 for f in repo.dirstate.copies():
763 for f in repo.dirstate.copies():
764 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
764 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
765
765
766 @command('debugdiscovery',
766 @command('debugdiscovery',
767 [('', 'old', None, _('use old-style discovery')),
767 [('', 'old', None, _('use old-style discovery')),
768 ('', 'nonheads', None,
768 ('', 'nonheads', None,
769 _('use old-style discovery with non-heads included')),
769 _('use old-style discovery with non-heads included')),
770 ('', 'rev', [], 'restrict discovery to this set of revs'),
770 ('', 'rev', [], 'restrict discovery to this set of revs'),
771 ] + cmdutil.remoteopts,
771 ] + cmdutil.remoteopts,
772 _('[--rev REV] [OTHER]'))
772 _('[--rev REV] [OTHER]'))
773 def debugdiscovery(ui, repo, remoteurl="default", **opts):
773 def debugdiscovery(ui, repo, remoteurl="default", **opts):
774 """runs the changeset discovery protocol in isolation"""
774 """runs the changeset discovery protocol in isolation"""
775 opts = pycompat.byteskwargs(opts)
775 opts = pycompat.byteskwargs(opts)
776 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
776 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
777 remote = hg.peer(repo, opts, remoteurl)
777 remote = hg.peer(repo, opts, remoteurl)
778 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
778 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
779
779
780 # make sure tests are repeatable
780 # make sure tests are repeatable
781 random.seed(12323)
781 random.seed(12323)
782
782
783 def doit(pushedrevs, remoteheads, remote=remote):
783 def doit(pushedrevs, remoteheads, remote=remote):
784 if opts.get('old'):
784 if opts.get('old'):
785 if not util.safehasattr(remote, 'branches'):
785 if not util.safehasattr(remote, 'branches'):
786 # enable in-client legacy support
786 # enable in-client legacy support
787 remote = localrepo.locallegacypeer(remote.local())
787 remote = localrepo.locallegacypeer(remote.local())
788 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
788 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
789 force=True)
789 force=True)
790 common = set(common)
790 common = set(common)
791 if not opts.get('nonheads'):
791 if not opts.get('nonheads'):
792 ui.write(("unpruned common: %s\n") %
792 ui.write(("unpruned common: %s\n") %
793 " ".join(sorted(short(n) for n in common)))
793 " ".join(sorted(short(n) for n in common)))
794 dag = dagutil.revlogdag(repo.changelog)
794 dag = dagutil.revlogdag(repo.changelog)
795 all = dag.ancestorset(dag.internalizeall(common))
795 all = dag.ancestorset(dag.internalizeall(common))
796 common = dag.externalizeall(dag.headsetofconnecteds(all))
796 common = dag.externalizeall(dag.headsetofconnecteds(all))
797 else:
797 else:
798 nodes = None
798 nodes = None
799 if pushedrevs:
799 if pushedrevs:
800 revs = scmutil.revrange(repo, pushedrevs)
800 revs = scmutil.revrange(repo, pushedrevs)
801 nodes = [repo[r].node() for r in revs]
801 nodes = [repo[r].node() for r in revs]
802 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
802 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
803 ancestorsof=nodes)
803 ancestorsof=nodes)
804 common = set(common)
804 common = set(common)
805 rheads = set(hds)
805 rheads = set(hds)
806 lheads = set(repo.heads())
806 lheads = set(repo.heads())
807 ui.write(("common heads: %s\n") %
807 ui.write(("common heads: %s\n") %
808 " ".join(sorted(short(n) for n in common)))
808 " ".join(sorted(short(n) for n in common)))
809 if lheads <= common:
809 if lheads <= common:
810 ui.write(("local is subset\n"))
810 ui.write(("local is subset\n"))
811 elif rheads <= common:
811 elif rheads <= common:
812 ui.write(("remote is subset\n"))
812 ui.write(("remote is subset\n"))
813
813
814 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
814 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
815 localrevs = opts['rev']
815 localrevs = opts['rev']
816 doit(localrevs, remoterevs)
816 doit(localrevs, remoterevs)
817
817
818 _chunksize = 4 << 10
818 _chunksize = 4 << 10
819
819
820 @command('debugdownload',
820 @command('debugdownload',
821 [
821 [
822 ('o', 'output', '', _('path')),
822 ('o', 'output', '', _('path')),
823 ],
823 ],
824 optionalrepo=True)
824 optionalrepo=True)
825 def debugdownload(ui, repo, url, output=None, **opts):
825 def debugdownload(ui, repo, url, output=None, **opts):
826 """download a resource using Mercurial logic and config
826 """download a resource using Mercurial logic and config
827 """
827 """
828 fh = urlmod.open(ui, url, output)
828 fh = urlmod.open(ui, url, output)
829
829
830 dest = ui
830 dest = ui
831 if output:
831 if output:
832 dest = open(output, "wb", _chunksize)
832 dest = open(output, "wb", _chunksize)
833 try:
833 try:
834 data = fh.read(_chunksize)
834 data = fh.read(_chunksize)
835 while data:
835 while data:
836 dest.write(data)
836 dest.write(data)
837 data = fh.read(_chunksize)
837 data = fh.read(_chunksize)
838 finally:
838 finally:
839 if output:
839 if output:
840 dest.close()
840 dest.close()
841
841
842 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
842 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
843 def debugextensions(ui, repo, **opts):
843 def debugextensions(ui, repo, **opts):
844 '''show information about active extensions'''
844 '''show information about active extensions'''
845 opts = pycompat.byteskwargs(opts)
845 opts = pycompat.byteskwargs(opts)
846 exts = extensions.extensions(ui)
846 exts = extensions.extensions(ui)
847 hgver = util.version()
847 hgver = util.version()
848 fm = ui.formatter('debugextensions', opts)
848 fm = ui.formatter('debugextensions', opts)
849 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
849 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
850 isinternal = extensions.ismoduleinternal(extmod)
850 isinternal = extensions.ismoduleinternal(extmod)
851 extsource = pycompat.fsencode(extmod.__file__)
851 extsource = pycompat.fsencode(extmod.__file__)
852 if isinternal:
852 if isinternal:
853 exttestedwith = [] # never expose magic string to users
853 exttestedwith = [] # never expose magic string to users
854 else:
854 else:
855 exttestedwith = getattr(extmod, 'testedwith', '').split()
855 exttestedwith = getattr(extmod, 'testedwith', '').split()
856 extbuglink = getattr(extmod, 'buglink', None)
856 extbuglink = getattr(extmod, 'buglink', None)
857
857
858 fm.startitem()
858 fm.startitem()
859
859
860 if ui.quiet or ui.verbose:
860 if ui.quiet or ui.verbose:
861 fm.write('name', '%s\n', extname)
861 fm.write('name', '%s\n', extname)
862 else:
862 else:
863 fm.write('name', '%s', extname)
863 fm.write('name', '%s', extname)
864 if isinternal or hgver in exttestedwith:
864 if isinternal or hgver in exttestedwith:
865 fm.plain('\n')
865 fm.plain('\n')
866 elif not exttestedwith:
866 elif not exttestedwith:
867 fm.plain(_(' (untested!)\n'))
867 fm.plain(_(' (untested!)\n'))
868 else:
868 else:
869 lasttestedversion = exttestedwith[-1]
869 lasttestedversion = exttestedwith[-1]
870 fm.plain(' (%s!)\n' % lasttestedversion)
870 fm.plain(' (%s!)\n' % lasttestedversion)
871
871
872 fm.condwrite(ui.verbose and extsource, 'source',
872 fm.condwrite(ui.verbose and extsource, 'source',
873 _(' location: %s\n'), extsource or "")
873 _(' location: %s\n'), extsource or "")
874
874
875 if ui.verbose:
875 if ui.verbose:
876 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
876 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
877 fm.data(bundled=isinternal)
877 fm.data(bundled=isinternal)
878
878
879 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
879 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
880 _(' tested with: %s\n'),
880 _(' tested with: %s\n'),
881 fm.formatlist(exttestedwith, name='ver'))
881 fm.formatlist(exttestedwith, name='ver'))
882
882
883 fm.condwrite(ui.verbose and extbuglink, 'buglink',
883 fm.condwrite(ui.verbose and extbuglink, 'buglink',
884 _(' bug reporting: %s\n'), extbuglink or "")
884 _(' bug reporting: %s\n'), extbuglink or "")
885
885
886 fm.end()
886 fm.end()
887
887
888 @command('debugfileset',
888 @command('debugfileset',
889 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
889 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
890 ('', 'all-files', False,
890 ('', 'all-files', False,
891 _('test files from all revisions and working directory')),
891 _('test files from all revisions and working directory')),
892 ('s', 'show-matcher', None,
892 ('s', 'show-matcher', None,
893 _('print internal representation of matcher')),
893 _('print internal representation of matcher')),
894 ('p', 'show-stage', [],
894 ('p', 'show-stage', [],
895 _('print parsed tree at the given stage'), _('NAME'))],
895 _('print parsed tree at the given stage'), _('NAME'))],
896 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
896 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
897 def debugfileset(ui, repo, expr, **opts):
897 def debugfileset(ui, repo, expr, **opts):
898 '''parse and apply a fileset specification'''
898 '''parse and apply a fileset specification'''
899 opts = pycompat.byteskwargs(opts)
899 opts = pycompat.byteskwargs(opts)
900 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
900 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
901
901
902 stages = [
902 stages = [
903 ('parsed', pycompat.identity),
903 ('parsed', pycompat.identity),
904 ('analyzed', filesetlang.analyze),
904 ('analyzed', filesetlang.analyze),
905 ('optimized', filesetlang.optimize),
905 ]
906 ]
906 stagenames = set(n for n, f in stages)
907 stagenames = set(n for n, f in stages)
907
908
908 showalways = set()
909 showalways = set()
909 if ui.verbose and not opts['show_stage']:
910 if ui.verbose and not opts['show_stage']:
910 # show parsed tree by --verbose (deprecated)
911 # show parsed tree by --verbose (deprecated)
911 showalways.add('parsed')
912 showalways.add('parsed')
912 if opts['show_stage'] == ['all']:
913 if opts['show_stage'] == ['all']:
913 showalways.update(stagenames)
914 showalways.update(stagenames)
914 else:
915 else:
915 for n in opts['show_stage']:
916 for n in opts['show_stage']:
916 if n not in stagenames:
917 if n not in stagenames:
917 raise error.Abort(_('invalid stage name: %s') % n)
918 raise error.Abort(_('invalid stage name: %s') % n)
918 showalways.update(opts['show_stage'])
919 showalways.update(opts['show_stage'])
919
920
920 tree = filesetlang.parse(expr)
921 tree = filesetlang.parse(expr)
921 for n, f in stages:
922 for n, f in stages:
922 tree = f(tree)
923 tree = f(tree)
923 if n in showalways:
924 if n in showalways:
924 if opts['show_stage'] or n != 'parsed':
925 if opts['show_stage'] or n != 'parsed':
925 ui.write(("* %s:\n") % n)
926 ui.write(("* %s:\n") % n)
926 ui.write(filesetlang.prettyformat(tree), "\n")
927 ui.write(filesetlang.prettyformat(tree), "\n")
927
928
928 files = set()
929 files = set()
929 if opts['all_files']:
930 if opts['all_files']:
930 for r in repo:
931 for r in repo:
931 c = repo[r]
932 c = repo[r]
932 files.update(c.files())
933 files.update(c.files())
933 files.update(c.substate)
934 files.update(c.substate)
934 if opts['all_files'] or ctx.rev() is None:
935 if opts['all_files'] or ctx.rev() is None:
935 wctx = repo[None]
936 wctx = repo[None]
936 files.update(repo.dirstate.walk(scmutil.matchall(repo),
937 files.update(repo.dirstate.walk(scmutil.matchall(repo),
937 subrepos=list(wctx.substate),
938 subrepos=list(wctx.substate),
938 unknown=True, ignored=True))
939 unknown=True, ignored=True))
939 files.update(wctx.substate)
940 files.update(wctx.substate)
940 else:
941 else:
941 files.update(ctx.files())
942 files.update(ctx.files())
942 files.update(ctx.substate)
943 files.update(ctx.substate)
943
944
944 m = ctx.matchfileset(expr)
945 m = ctx.matchfileset(expr)
945 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
946 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
946 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
947 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
947 for f in sorted(files):
948 for f in sorted(files):
948 if not m(f):
949 if not m(f):
949 continue
950 continue
950 ui.write("%s\n" % f)
951 ui.write("%s\n" % f)
951
952
952 @command('debugformat',
953 @command('debugformat',
953 [] + cmdutil.formatteropts)
954 [] + cmdutil.formatteropts)
954 def debugformat(ui, repo, **opts):
955 def debugformat(ui, repo, **opts):
955 """display format information about the current repository
956 """display format information about the current repository
956
957
957 Use --verbose to get extra information about current config value and
958 Use --verbose to get extra information about current config value and
958 Mercurial default."""
959 Mercurial default."""
959 opts = pycompat.byteskwargs(opts)
960 opts = pycompat.byteskwargs(opts)
960 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
961 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
961 maxvariantlength = max(len('format-variant'), maxvariantlength)
962 maxvariantlength = max(len('format-variant'), maxvariantlength)
962
963
963 def makeformatname(name):
964 def makeformatname(name):
964 return '%s:' + (' ' * (maxvariantlength - len(name)))
965 return '%s:' + (' ' * (maxvariantlength - len(name)))
965
966
966 fm = ui.formatter('debugformat', opts)
967 fm = ui.formatter('debugformat', opts)
967 if fm.isplain():
968 if fm.isplain():
968 def formatvalue(value):
969 def formatvalue(value):
969 if util.safehasattr(value, 'startswith'):
970 if util.safehasattr(value, 'startswith'):
970 return value
971 return value
971 if value:
972 if value:
972 return 'yes'
973 return 'yes'
973 else:
974 else:
974 return 'no'
975 return 'no'
975 else:
976 else:
976 formatvalue = pycompat.identity
977 formatvalue = pycompat.identity
977
978
978 fm.plain('format-variant')
979 fm.plain('format-variant')
979 fm.plain(' ' * (maxvariantlength - len('format-variant')))
980 fm.plain(' ' * (maxvariantlength - len('format-variant')))
980 fm.plain(' repo')
981 fm.plain(' repo')
981 if ui.verbose:
982 if ui.verbose:
982 fm.plain(' config default')
983 fm.plain(' config default')
983 fm.plain('\n')
984 fm.plain('\n')
984 for fv in upgrade.allformatvariant:
985 for fv in upgrade.allformatvariant:
985 fm.startitem()
986 fm.startitem()
986 repovalue = fv.fromrepo(repo)
987 repovalue = fv.fromrepo(repo)
987 configvalue = fv.fromconfig(repo)
988 configvalue = fv.fromconfig(repo)
988
989
989 if repovalue != configvalue:
990 if repovalue != configvalue:
990 namelabel = 'formatvariant.name.mismatchconfig'
991 namelabel = 'formatvariant.name.mismatchconfig'
991 repolabel = 'formatvariant.repo.mismatchconfig'
992 repolabel = 'formatvariant.repo.mismatchconfig'
992 elif repovalue != fv.default:
993 elif repovalue != fv.default:
993 namelabel = 'formatvariant.name.mismatchdefault'
994 namelabel = 'formatvariant.name.mismatchdefault'
994 repolabel = 'formatvariant.repo.mismatchdefault'
995 repolabel = 'formatvariant.repo.mismatchdefault'
995 else:
996 else:
996 namelabel = 'formatvariant.name.uptodate'
997 namelabel = 'formatvariant.name.uptodate'
997 repolabel = 'formatvariant.repo.uptodate'
998 repolabel = 'formatvariant.repo.uptodate'
998
999
999 fm.write('name', makeformatname(fv.name), fv.name,
1000 fm.write('name', makeformatname(fv.name), fv.name,
1000 label=namelabel)
1001 label=namelabel)
1001 fm.write('repo', ' %3s', formatvalue(repovalue),
1002 fm.write('repo', ' %3s', formatvalue(repovalue),
1002 label=repolabel)
1003 label=repolabel)
1003 if fv.default != configvalue:
1004 if fv.default != configvalue:
1004 configlabel = 'formatvariant.config.special'
1005 configlabel = 'formatvariant.config.special'
1005 else:
1006 else:
1006 configlabel = 'formatvariant.config.default'
1007 configlabel = 'formatvariant.config.default'
1007 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1008 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1008 label=configlabel)
1009 label=configlabel)
1009 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1010 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1010 label='formatvariant.default')
1011 label='formatvariant.default')
1011 fm.plain('\n')
1012 fm.plain('\n')
1012 fm.end()
1013 fm.end()
1013
1014
1014 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1015 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1015 def debugfsinfo(ui, path="."):
1016 def debugfsinfo(ui, path="."):
1016 """show information detected about current filesystem"""
1017 """show information detected about current filesystem"""
1017 ui.write(('path: %s\n') % path)
1018 ui.write(('path: %s\n') % path)
1018 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1019 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1019 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1020 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1020 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1021 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1021 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1022 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1022 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1023 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1023 casesensitive = '(unknown)'
1024 casesensitive = '(unknown)'
1024 try:
1025 try:
1025 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1026 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1026 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1027 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1027 except OSError:
1028 except OSError:
1028 pass
1029 pass
1029 ui.write(('case-sensitive: %s\n') % casesensitive)
1030 ui.write(('case-sensitive: %s\n') % casesensitive)
1030
1031
1031 @command('debuggetbundle',
1032 @command('debuggetbundle',
1032 [('H', 'head', [], _('id of head node'), _('ID')),
1033 [('H', 'head', [], _('id of head node'), _('ID')),
1033 ('C', 'common', [], _('id of common node'), _('ID')),
1034 ('C', 'common', [], _('id of common node'), _('ID')),
1034 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1035 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1035 _('REPO FILE [-H|-C ID]...'),
1036 _('REPO FILE [-H|-C ID]...'),
1036 norepo=True)
1037 norepo=True)
1037 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1038 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1038 """retrieves a bundle from a repo
1039 """retrieves a bundle from a repo
1039
1040
1040 Every ID must be a full-length hex node id string. Saves the bundle to the
1041 Every ID must be a full-length hex node id string. Saves the bundle to the
1041 given file.
1042 given file.
1042 """
1043 """
1043 opts = pycompat.byteskwargs(opts)
1044 opts = pycompat.byteskwargs(opts)
1044 repo = hg.peer(ui, opts, repopath)
1045 repo = hg.peer(ui, opts, repopath)
1045 if not repo.capable('getbundle'):
1046 if not repo.capable('getbundle'):
1046 raise error.Abort("getbundle() not supported by target repository")
1047 raise error.Abort("getbundle() not supported by target repository")
1047 args = {}
1048 args = {}
1048 if common:
1049 if common:
1049 args[r'common'] = [bin(s) for s in common]
1050 args[r'common'] = [bin(s) for s in common]
1050 if head:
1051 if head:
1051 args[r'heads'] = [bin(s) for s in head]
1052 args[r'heads'] = [bin(s) for s in head]
1052 # TODO: get desired bundlecaps from command line.
1053 # TODO: get desired bundlecaps from command line.
1053 args[r'bundlecaps'] = None
1054 args[r'bundlecaps'] = None
1054 bundle = repo.getbundle('debug', **args)
1055 bundle = repo.getbundle('debug', **args)
1055
1056
1056 bundletype = opts.get('type', 'bzip2').lower()
1057 bundletype = opts.get('type', 'bzip2').lower()
1057 btypes = {'none': 'HG10UN',
1058 btypes = {'none': 'HG10UN',
1058 'bzip2': 'HG10BZ',
1059 'bzip2': 'HG10BZ',
1059 'gzip': 'HG10GZ',
1060 'gzip': 'HG10GZ',
1060 'bundle2': 'HG20'}
1061 'bundle2': 'HG20'}
1061 bundletype = btypes.get(bundletype)
1062 bundletype = btypes.get(bundletype)
1062 if bundletype not in bundle2.bundletypes:
1063 if bundletype not in bundle2.bundletypes:
1063 raise error.Abort(_('unknown bundle type specified with --type'))
1064 raise error.Abort(_('unknown bundle type specified with --type'))
1064 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1065 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1065
1066
1066 @command('debugignore', [], '[FILE]')
1067 @command('debugignore', [], '[FILE]')
1067 def debugignore(ui, repo, *files, **opts):
1068 def debugignore(ui, repo, *files, **opts):
1068 """display the combined ignore pattern and information about ignored files
1069 """display the combined ignore pattern and information about ignored files
1069
1070
1070 With no argument display the combined ignore pattern.
1071 With no argument display the combined ignore pattern.
1071
1072
1072 Given space separated file names, shows if the given file is ignored and
1073 Given space separated file names, shows if the given file is ignored and
1073 if so, show the ignore rule (file and line number) that matched it.
1074 if so, show the ignore rule (file and line number) that matched it.
1074 """
1075 """
1075 ignore = repo.dirstate._ignore
1076 ignore = repo.dirstate._ignore
1076 if not files:
1077 if not files:
1077 # Show all the patterns
1078 # Show all the patterns
1078 ui.write("%s\n" % pycompat.byterepr(ignore))
1079 ui.write("%s\n" % pycompat.byterepr(ignore))
1079 else:
1080 else:
1080 m = scmutil.match(repo[None], pats=files)
1081 m = scmutil.match(repo[None], pats=files)
1081 for f in m.files():
1082 for f in m.files():
1082 nf = util.normpath(f)
1083 nf = util.normpath(f)
1083 ignored = None
1084 ignored = None
1084 ignoredata = None
1085 ignoredata = None
1085 if nf != '.':
1086 if nf != '.':
1086 if ignore(nf):
1087 if ignore(nf):
1087 ignored = nf
1088 ignored = nf
1088 ignoredata = repo.dirstate._ignorefileandline(nf)
1089 ignoredata = repo.dirstate._ignorefileandline(nf)
1089 else:
1090 else:
1090 for p in util.finddirs(nf):
1091 for p in util.finddirs(nf):
1091 if ignore(p):
1092 if ignore(p):
1092 ignored = p
1093 ignored = p
1093 ignoredata = repo.dirstate._ignorefileandline(p)
1094 ignoredata = repo.dirstate._ignorefileandline(p)
1094 break
1095 break
1095 if ignored:
1096 if ignored:
1096 if ignored == nf:
1097 if ignored == nf:
1097 ui.write(_("%s is ignored\n") % m.uipath(f))
1098 ui.write(_("%s is ignored\n") % m.uipath(f))
1098 else:
1099 else:
1099 ui.write(_("%s is ignored because of "
1100 ui.write(_("%s is ignored because of "
1100 "containing folder %s\n")
1101 "containing folder %s\n")
1101 % (m.uipath(f), ignored))
1102 % (m.uipath(f), ignored))
1102 ignorefile, lineno, line = ignoredata
1103 ignorefile, lineno, line = ignoredata
1103 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1104 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1104 % (ignorefile, lineno, line))
1105 % (ignorefile, lineno, line))
1105 else:
1106 else:
1106 ui.write(_("%s is not ignored\n") % m.uipath(f))
1107 ui.write(_("%s is not ignored\n") % m.uipath(f))
1107
1108
1108 @command('debugindex', cmdutil.debugrevlogopts +
1109 @command('debugindex', cmdutil.debugrevlogopts +
1109 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1110 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1110 _('[-f FORMAT] -c|-m|FILE'),
1111 _('[-f FORMAT] -c|-m|FILE'),
1111 optionalrepo=True)
1112 optionalrepo=True)
1112 def debugindex(ui, repo, file_=None, **opts):
1113 def debugindex(ui, repo, file_=None, **opts):
1113 """dump the contents of an index file"""
1114 """dump the contents of an index file"""
1114 opts = pycompat.byteskwargs(opts)
1115 opts = pycompat.byteskwargs(opts)
1115 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1116 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1116 format = opts.get('format', 0)
1117 format = opts.get('format', 0)
1117 if format not in (0, 1):
1118 if format not in (0, 1):
1118 raise error.Abort(_("unknown format %d") % format)
1119 raise error.Abort(_("unknown format %d") % format)
1119
1120
1120 if ui.debugflag:
1121 if ui.debugflag:
1121 shortfn = hex
1122 shortfn = hex
1122 else:
1123 else:
1123 shortfn = short
1124 shortfn = short
1124
1125
1125 # There might not be anything in r, so have a sane default
1126 # There might not be anything in r, so have a sane default
1126 idlen = 12
1127 idlen = 12
1127 for i in r:
1128 for i in r:
1128 idlen = len(shortfn(r.node(i)))
1129 idlen = len(shortfn(r.node(i)))
1129 break
1130 break
1130
1131
1131 if format == 0:
1132 if format == 0:
1132 if ui.verbose:
1133 if ui.verbose:
1133 ui.write((" rev offset length linkrev"
1134 ui.write((" rev offset length linkrev"
1134 " %s %s p2\n") % ("nodeid".ljust(idlen),
1135 " %s %s p2\n") % ("nodeid".ljust(idlen),
1135 "p1".ljust(idlen)))
1136 "p1".ljust(idlen)))
1136 else:
1137 else:
1137 ui.write((" rev linkrev %s %s p2\n") % (
1138 ui.write((" rev linkrev %s %s p2\n") % (
1138 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1139 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1139 elif format == 1:
1140 elif format == 1:
1140 if ui.verbose:
1141 if ui.verbose:
1141 ui.write((" rev flag offset length size link p1"
1142 ui.write((" rev flag offset length size link p1"
1142 " p2 %s\n") % "nodeid".rjust(idlen))
1143 " p2 %s\n") % "nodeid".rjust(idlen))
1143 else:
1144 else:
1144 ui.write((" rev flag size link p1 p2 %s\n") %
1145 ui.write((" rev flag size link p1 p2 %s\n") %
1145 "nodeid".rjust(idlen))
1146 "nodeid".rjust(idlen))
1146
1147
1147 for i in r:
1148 for i in r:
1148 node = r.node(i)
1149 node = r.node(i)
1149 if format == 0:
1150 if format == 0:
1150 try:
1151 try:
1151 pp = r.parents(node)
1152 pp = r.parents(node)
1152 except Exception:
1153 except Exception:
1153 pp = [nullid, nullid]
1154 pp = [nullid, nullid]
1154 if ui.verbose:
1155 if ui.verbose:
1155 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1156 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1156 i, r.start(i), r.length(i), r.linkrev(i),
1157 i, r.start(i), r.length(i), r.linkrev(i),
1157 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1158 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1158 else:
1159 else:
1159 ui.write("% 6d % 7d %s %s %s\n" % (
1160 ui.write("% 6d % 7d %s %s %s\n" % (
1160 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1161 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1161 shortfn(pp[1])))
1162 shortfn(pp[1])))
1162 elif format == 1:
1163 elif format == 1:
1163 pr = r.parentrevs(i)
1164 pr = r.parentrevs(i)
1164 if ui.verbose:
1165 if ui.verbose:
1165 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1166 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1166 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1167 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1167 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1168 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1168 else:
1169 else:
1169 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1170 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1170 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1171 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1171 shortfn(node)))
1172 shortfn(node)))
1172
1173
1173 @command('debugindexdot', cmdutil.debugrevlogopts,
1174 @command('debugindexdot', cmdutil.debugrevlogopts,
1174 _('-c|-m|FILE'), optionalrepo=True)
1175 _('-c|-m|FILE'), optionalrepo=True)
1175 def debugindexdot(ui, repo, file_=None, **opts):
1176 def debugindexdot(ui, repo, file_=None, **opts):
1176 """dump an index DAG as a graphviz dot file"""
1177 """dump an index DAG as a graphviz dot file"""
1177 opts = pycompat.byteskwargs(opts)
1178 opts = pycompat.byteskwargs(opts)
1178 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1179 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1179 ui.write(("digraph G {\n"))
1180 ui.write(("digraph G {\n"))
1180 for i in r:
1181 for i in r:
1181 node = r.node(i)
1182 node = r.node(i)
1182 pp = r.parents(node)
1183 pp = r.parents(node)
1183 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1184 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1184 if pp[1] != nullid:
1185 if pp[1] != nullid:
1185 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1186 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1186 ui.write("}\n")
1187 ui.write("}\n")
1187
1188
1188 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1189 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1189 def debuginstall(ui, **opts):
1190 def debuginstall(ui, **opts):
1190 '''test Mercurial installation
1191 '''test Mercurial installation
1191
1192
1192 Returns 0 on success.
1193 Returns 0 on success.
1193 '''
1194 '''
1194 opts = pycompat.byteskwargs(opts)
1195 opts = pycompat.byteskwargs(opts)
1195
1196
1196 def writetemp(contents):
1197 def writetemp(contents):
1197 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1198 (fd, name) = pycompat.mkstemp(prefix="hg-debuginstall-")
1198 f = os.fdopen(fd, r"wb")
1199 f = os.fdopen(fd, r"wb")
1199 f.write(contents)
1200 f.write(contents)
1200 f.close()
1201 f.close()
1201 return name
1202 return name
1202
1203
1203 problems = 0
1204 problems = 0
1204
1205
1205 fm = ui.formatter('debuginstall', opts)
1206 fm = ui.formatter('debuginstall', opts)
1206 fm.startitem()
1207 fm.startitem()
1207
1208
1208 # encoding
1209 # encoding
1209 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1210 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1210 err = None
1211 err = None
1211 try:
1212 try:
1212 codecs.lookup(pycompat.sysstr(encoding.encoding))
1213 codecs.lookup(pycompat.sysstr(encoding.encoding))
1213 except LookupError as inst:
1214 except LookupError as inst:
1214 err = stringutil.forcebytestr(inst)
1215 err = stringutil.forcebytestr(inst)
1215 problems += 1
1216 problems += 1
1216 fm.condwrite(err, 'encodingerror', _(" %s\n"
1217 fm.condwrite(err, 'encodingerror', _(" %s\n"
1217 " (check that your locale is properly set)\n"), err)
1218 " (check that your locale is properly set)\n"), err)
1218
1219
1219 # Python
1220 # Python
1220 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1221 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1221 pycompat.sysexecutable)
1222 pycompat.sysexecutable)
1222 fm.write('pythonver', _("checking Python version (%s)\n"),
1223 fm.write('pythonver', _("checking Python version (%s)\n"),
1223 ("%d.%d.%d" % sys.version_info[:3]))
1224 ("%d.%d.%d" % sys.version_info[:3]))
1224 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1225 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1225 os.path.dirname(pycompat.fsencode(os.__file__)))
1226 os.path.dirname(pycompat.fsencode(os.__file__)))
1226
1227
1227 security = set(sslutil.supportedprotocols)
1228 security = set(sslutil.supportedprotocols)
1228 if sslutil.hassni:
1229 if sslutil.hassni:
1229 security.add('sni')
1230 security.add('sni')
1230
1231
1231 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1232 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1232 fm.formatlist(sorted(security), name='protocol',
1233 fm.formatlist(sorted(security), name='protocol',
1233 fmt='%s', sep=','))
1234 fmt='%s', sep=','))
1234
1235
1235 # These are warnings, not errors. So don't increment problem count. This
1236 # These are warnings, not errors. So don't increment problem count. This
1236 # may change in the future.
1237 # may change in the future.
1237 if 'tls1.2' not in security:
1238 if 'tls1.2' not in security:
1238 fm.plain(_(' TLS 1.2 not supported by Python install; '
1239 fm.plain(_(' TLS 1.2 not supported by Python install; '
1239 'network connections lack modern security\n'))
1240 'network connections lack modern security\n'))
1240 if 'sni' not in security:
1241 if 'sni' not in security:
1241 fm.plain(_(' SNI not supported by Python install; may have '
1242 fm.plain(_(' SNI not supported by Python install; may have '
1242 'connectivity issues with some servers\n'))
1243 'connectivity issues with some servers\n'))
1243
1244
1244 # TODO print CA cert info
1245 # TODO print CA cert info
1245
1246
1246 # hg version
1247 # hg version
1247 hgver = util.version()
1248 hgver = util.version()
1248 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1249 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1249 hgver.split('+')[0])
1250 hgver.split('+')[0])
1250 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1251 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1251 '+'.join(hgver.split('+')[1:]))
1252 '+'.join(hgver.split('+')[1:]))
1252
1253
1253 # compiled modules
1254 # compiled modules
1254 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1255 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1255 policy.policy)
1256 policy.policy)
1256 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1257 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1257 os.path.dirname(pycompat.fsencode(__file__)))
1258 os.path.dirname(pycompat.fsencode(__file__)))
1258
1259
1259 if policy.policy in ('c', 'allow'):
1260 if policy.policy in ('c', 'allow'):
1260 err = None
1261 err = None
1261 try:
1262 try:
1262 from .cext import (
1263 from .cext import (
1263 base85,
1264 base85,
1264 bdiff,
1265 bdiff,
1265 mpatch,
1266 mpatch,
1266 osutil,
1267 osutil,
1267 )
1268 )
1268 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1269 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1269 except Exception as inst:
1270 except Exception as inst:
1270 err = stringutil.forcebytestr(inst)
1271 err = stringutil.forcebytestr(inst)
1271 problems += 1
1272 problems += 1
1272 fm.condwrite(err, 'extensionserror', " %s\n", err)
1273 fm.condwrite(err, 'extensionserror', " %s\n", err)
1273
1274
1274 compengines = util.compengines._engines.values()
1275 compengines = util.compengines._engines.values()
1275 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1276 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1276 fm.formatlist(sorted(e.name() for e in compengines),
1277 fm.formatlist(sorted(e.name() for e in compengines),
1277 name='compengine', fmt='%s', sep=', '))
1278 name='compengine', fmt='%s', sep=', '))
1278 fm.write('compenginesavail', _('checking available compression engines '
1279 fm.write('compenginesavail', _('checking available compression engines '
1279 '(%s)\n'),
1280 '(%s)\n'),
1280 fm.formatlist(sorted(e.name() for e in compengines
1281 fm.formatlist(sorted(e.name() for e in compengines
1281 if e.available()),
1282 if e.available()),
1282 name='compengine', fmt='%s', sep=', '))
1283 name='compengine', fmt='%s', sep=', '))
1283 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1284 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1284 fm.write('compenginesserver', _('checking available compression engines '
1285 fm.write('compenginesserver', _('checking available compression engines '
1285 'for wire protocol (%s)\n'),
1286 'for wire protocol (%s)\n'),
1286 fm.formatlist([e.name() for e in wirecompengines
1287 fm.formatlist([e.name() for e in wirecompengines
1287 if e.wireprotosupport()],
1288 if e.wireprotosupport()],
1288 name='compengine', fmt='%s', sep=', '))
1289 name='compengine', fmt='%s', sep=', '))
1289 re2 = 'missing'
1290 re2 = 'missing'
1290 if util._re2:
1291 if util._re2:
1291 re2 = 'available'
1292 re2 = 'available'
1292 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1293 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1293 fm.data(re2=bool(util._re2))
1294 fm.data(re2=bool(util._re2))
1294
1295
1295 # templates
1296 # templates
1296 p = templater.templatepaths()
1297 p = templater.templatepaths()
1297 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1298 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1298 fm.condwrite(not p, '', _(" no template directories found\n"))
1299 fm.condwrite(not p, '', _(" no template directories found\n"))
1299 if p:
1300 if p:
1300 m = templater.templatepath("map-cmdline.default")
1301 m = templater.templatepath("map-cmdline.default")
1301 if m:
1302 if m:
1302 # template found, check if it is working
1303 # template found, check if it is working
1303 err = None
1304 err = None
1304 try:
1305 try:
1305 templater.templater.frommapfile(m)
1306 templater.templater.frommapfile(m)
1306 except Exception as inst:
1307 except Exception as inst:
1307 err = stringutil.forcebytestr(inst)
1308 err = stringutil.forcebytestr(inst)
1308 p = None
1309 p = None
1309 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1310 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1310 else:
1311 else:
1311 p = None
1312 p = None
1312 fm.condwrite(p, 'defaulttemplate',
1313 fm.condwrite(p, 'defaulttemplate',
1313 _("checking default template (%s)\n"), m)
1314 _("checking default template (%s)\n"), m)
1314 fm.condwrite(not m, 'defaulttemplatenotfound',
1315 fm.condwrite(not m, 'defaulttemplatenotfound',
1315 _(" template '%s' not found\n"), "default")
1316 _(" template '%s' not found\n"), "default")
1316 if not p:
1317 if not p:
1317 problems += 1
1318 problems += 1
1318 fm.condwrite(not p, '',
1319 fm.condwrite(not p, '',
1319 _(" (templates seem to have been installed incorrectly)\n"))
1320 _(" (templates seem to have been installed incorrectly)\n"))
1320
1321
1321 # editor
1322 # editor
1322 editor = ui.geteditor()
1323 editor = ui.geteditor()
1323 editor = util.expandpath(editor)
1324 editor = util.expandpath(editor)
1324 editorbin = procutil.shellsplit(editor)[0]
1325 editorbin = procutil.shellsplit(editor)[0]
1325 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1326 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1326 cmdpath = procutil.findexe(editorbin)
1327 cmdpath = procutil.findexe(editorbin)
1327 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1328 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1328 _(" No commit editor set and can't find %s in PATH\n"
1329 _(" No commit editor set and can't find %s in PATH\n"
1329 " (specify a commit editor in your configuration"
1330 " (specify a commit editor in your configuration"
1330 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1331 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1331 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1332 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1332 _(" Can't find editor '%s' in PATH\n"
1333 _(" Can't find editor '%s' in PATH\n"
1333 " (specify a commit editor in your configuration"
1334 " (specify a commit editor in your configuration"
1334 " file)\n"), not cmdpath and editorbin)
1335 " file)\n"), not cmdpath and editorbin)
1335 if not cmdpath and editor != 'vi':
1336 if not cmdpath and editor != 'vi':
1336 problems += 1
1337 problems += 1
1337
1338
1338 # check username
1339 # check username
1339 username = None
1340 username = None
1340 err = None
1341 err = None
1341 try:
1342 try:
1342 username = ui.username()
1343 username = ui.username()
1343 except error.Abort as e:
1344 except error.Abort as e:
1344 err = stringutil.forcebytestr(e)
1345 err = stringutil.forcebytestr(e)
1345 problems += 1
1346 problems += 1
1346
1347
1347 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1348 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1348 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1349 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1349 " (specify a username in your configuration file)\n"), err)
1350 " (specify a username in your configuration file)\n"), err)
1350
1351
1351 fm.condwrite(not problems, '',
1352 fm.condwrite(not problems, '',
1352 _("no problems detected\n"))
1353 _("no problems detected\n"))
1353 if not problems:
1354 if not problems:
1354 fm.data(problems=problems)
1355 fm.data(problems=problems)
1355 fm.condwrite(problems, 'problems',
1356 fm.condwrite(problems, 'problems',
1356 _("%d problems detected,"
1357 _("%d problems detected,"
1357 " please check your install!\n"), problems)
1358 " please check your install!\n"), problems)
1358 fm.end()
1359 fm.end()
1359
1360
1360 return problems
1361 return problems
1361
1362
1362 @command('debugknown', [], _('REPO ID...'), norepo=True)
1363 @command('debugknown', [], _('REPO ID...'), norepo=True)
1363 def debugknown(ui, repopath, *ids, **opts):
1364 def debugknown(ui, repopath, *ids, **opts):
1364 """test whether node ids are known to a repo
1365 """test whether node ids are known to a repo
1365
1366
1366 Every ID must be a full-length hex node id string. Returns a list of 0s
1367 Every ID must be a full-length hex node id string. Returns a list of 0s
1367 and 1s indicating unknown/known.
1368 and 1s indicating unknown/known.
1368 """
1369 """
1369 opts = pycompat.byteskwargs(opts)
1370 opts = pycompat.byteskwargs(opts)
1370 repo = hg.peer(ui, opts, repopath)
1371 repo = hg.peer(ui, opts, repopath)
1371 if not repo.capable('known'):
1372 if not repo.capable('known'):
1372 raise error.Abort("known() not supported by target repository")
1373 raise error.Abort("known() not supported by target repository")
1373 flags = repo.known([bin(s) for s in ids])
1374 flags = repo.known([bin(s) for s in ids])
1374 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1375 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1375
1376
1376 @command('debuglabelcomplete', [], _('LABEL...'))
1377 @command('debuglabelcomplete', [], _('LABEL...'))
1377 def debuglabelcomplete(ui, repo, *args):
1378 def debuglabelcomplete(ui, repo, *args):
1378 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1379 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1379 debugnamecomplete(ui, repo, *args)
1380 debugnamecomplete(ui, repo, *args)
1380
1381
1381 @command('debuglocks',
1382 @command('debuglocks',
1382 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1383 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1383 ('W', 'force-wlock', None,
1384 ('W', 'force-wlock', None,
1384 _('free the working state lock (DANGEROUS)')),
1385 _('free the working state lock (DANGEROUS)')),
1385 ('s', 'set-lock', None, _('set the store lock until stopped')),
1386 ('s', 'set-lock', None, _('set the store lock until stopped')),
1386 ('S', 'set-wlock', None,
1387 ('S', 'set-wlock', None,
1387 _('set the working state lock until stopped'))],
1388 _('set the working state lock until stopped'))],
1388 _('[OPTION]...'))
1389 _('[OPTION]...'))
1389 def debuglocks(ui, repo, **opts):
1390 def debuglocks(ui, repo, **opts):
1390 """show or modify state of locks
1391 """show or modify state of locks
1391
1392
1392 By default, this command will show which locks are held. This
1393 By default, this command will show which locks are held. This
1393 includes the user and process holding the lock, the amount of time
1394 includes the user and process holding the lock, the amount of time
1394 the lock has been held, and the machine name where the process is
1395 the lock has been held, and the machine name where the process is
1395 running if it's not local.
1396 running if it's not local.
1396
1397
1397 Locks protect the integrity of Mercurial's data, so should be
1398 Locks protect the integrity of Mercurial's data, so should be
1398 treated with care. System crashes or other interruptions may cause
1399 treated with care. System crashes or other interruptions may cause
1399 locks to not be properly released, though Mercurial will usually
1400 locks to not be properly released, though Mercurial will usually
1400 detect and remove such stale locks automatically.
1401 detect and remove such stale locks automatically.
1401
1402
1402 However, detecting stale locks may not always be possible (for
1403 However, detecting stale locks may not always be possible (for
1403 instance, on a shared filesystem). Removing locks may also be
1404 instance, on a shared filesystem). Removing locks may also be
1404 blocked by filesystem permissions.
1405 blocked by filesystem permissions.
1405
1406
1406 Setting a lock will prevent other commands from changing the data.
1407 Setting a lock will prevent other commands from changing the data.
1407 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1408 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1408 The set locks are removed when the command exits.
1409 The set locks are removed when the command exits.
1409
1410
1410 Returns 0 if no locks are held.
1411 Returns 0 if no locks are held.
1411
1412
1412 """
1413 """
1413
1414
1414 if opts.get(r'force_lock'):
1415 if opts.get(r'force_lock'):
1415 repo.svfs.unlink('lock')
1416 repo.svfs.unlink('lock')
1416 if opts.get(r'force_wlock'):
1417 if opts.get(r'force_wlock'):
1417 repo.vfs.unlink('wlock')
1418 repo.vfs.unlink('wlock')
1418 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1419 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1419 return 0
1420 return 0
1420
1421
1421 locks = []
1422 locks = []
1422 try:
1423 try:
1423 if opts.get(r'set_wlock'):
1424 if opts.get(r'set_wlock'):
1424 try:
1425 try:
1425 locks.append(repo.wlock(False))
1426 locks.append(repo.wlock(False))
1426 except error.LockHeld:
1427 except error.LockHeld:
1427 raise error.Abort(_('wlock is already held'))
1428 raise error.Abort(_('wlock is already held'))
1428 if opts.get(r'set_lock'):
1429 if opts.get(r'set_lock'):
1429 try:
1430 try:
1430 locks.append(repo.lock(False))
1431 locks.append(repo.lock(False))
1431 except error.LockHeld:
1432 except error.LockHeld:
1432 raise error.Abort(_('lock is already held'))
1433 raise error.Abort(_('lock is already held'))
1433 if len(locks):
1434 if len(locks):
1434 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1435 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1435 return 0
1436 return 0
1436 finally:
1437 finally:
1437 release(*locks)
1438 release(*locks)
1438
1439
1439 now = time.time()
1440 now = time.time()
1440 held = 0
1441 held = 0
1441
1442
1442 def report(vfs, name, method):
1443 def report(vfs, name, method):
1443 # this causes stale locks to get reaped for more accurate reporting
1444 # this causes stale locks to get reaped for more accurate reporting
1444 try:
1445 try:
1445 l = method(False)
1446 l = method(False)
1446 except error.LockHeld:
1447 except error.LockHeld:
1447 l = None
1448 l = None
1448
1449
1449 if l:
1450 if l:
1450 l.release()
1451 l.release()
1451 else:
1452 else:
1452 try:
1453 try:
1453 st = vfs.lstat(name)
1454 st = vfs.lstat(name)
1454 age = now - st[stat.ST_MTIME]
1455 age = now - st[stat.ST_MTIME]
1455 user = util.username(st.st_uid)
1456 user = util.username(st.st_uid)
1456 locker = vfs.readlock(name)
1457 locker = vfs.readlock(name)
1457 if ":" in locker:
1458 if ":" in locker:
1458 host, pid = locker.split(':')
1459 host, pid = locker.split(':')
1459 if host == socket.gethostname():
1460 if host == socket.gethostname():
1460 locker = 'user %s, process %s' % (user, pid)
1461 locker = 'user %s, process %s' % (user, pid)
1461 else:
1462 else:
1462 locker = 'user %s, process %s, host %s' \
1463 locker = 'user %s, process %s, host %s' \
1463 % (user, pid, host)
1464 % (user, pid, host)
1464 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1465 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1465 return 1
1466 return 1
1466 except OSError as e:
1467 except OSError as e:
1467 if e.errno != errno.ENOENT:
1468 if e.errno != errno.ENOENT:
1468 raise
1469 raise
1469
1470
1470 ui.write(("%-6s free\n") % (name + ":"))
1471 ui.write(("%-6s free\n") % (name + ":"))
1471 return 0
1472 return 0
1472
1473
1473 held += report(repo.svfs, "lock", repo.lock)
1474 held += report(repo.svfs, "lock", repo.lock)
1474 held += report(repo.vfs, "wlock", repo.wlock)
1475 held += report(repo.vfs, "wlock", repo.wlock)
1475
1476
1476 return held
1477 return held
1477
1478
1478 @command('debugmanifestfulltextcache', [
1479 @command('debugmanifestfulltextcache', [
1479 ('', 'clear', False, _('clear the cache')),
1480 ('', 'clear', False, _('clear the cache')),
1480 ('a', 'add', '', _('add the given manifest node to the cache'),
1481 ('a', 'add', '', _('add the given manifest node to the cache'),
1481 _('NODE'))
1482 _('NODE'))
1482 ], '')
1483 ], '')
1483 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1484 def debugmanifestfulltextcache(ui, repo, add=None, **opts):
1484 """show, clear or amend the contents of the manifest fulltext cache"""
1485 """show, clear or amend the contents of the manifest fulltext cache"""
1485 with repo.lock():
1486 with repo.lock():
1486 r = repo.manifestlog._revlog
1487 r = repo.manifestlog._revlog
1487 try:
1488 try:
1488 cache = r._fulltextcache
1489 cache = r._fulltextcache
1489 except AttributeError:
1490 except AttributeError:
1490 ui.warn(_(
1491 ui.warn(_(
1491 "Current revlog implementation doesn't appear to have a "
1492 "Current revlog implementation doesn't appear to have a "
1492 'manifest fulltext cache\n'))
1493 'manifest fulltext cache\n'))
1493 return
1494 return
1494
1495
1495 if opts.get(r'clear'):
1496 if opts.get(r'clear'):
1496 cache.clear()
1497 cache.clear()
1497
1498
1498 if add:
1499 if add:
1499 try:
1500 try:
1500 manifest = repo.manifestlog[r.lookup(add)]
1501 manifest = repo.manifestlog[r.lookup(add)]
1501 except error.LookupError as e:
1502 except error.LookupError as e:
1502 raise error.Abort(e, hint="Check your manifest node id")
1503 raise error.Abort(e, hint="Check your manifest node id")
1503 manifest.read() # stores revisision in cache too
1504 manifest.read() # stores revisision in cache too
1504
1505
1505 if not len(cache):
1506 if not len(cache):
1506 ui.write(_('Cache empty'))
1507 ui.write(_('Cache empty'))
1507 else:
1508 else:
1508 ui.write(
1509 ui.write(
1509 _('Cache contains %d manifest entries, in order of most to '
1510 _('Cache contains %d manifest entries, in order of most to '
1510 'least recent:\n') % (len(cache),))
1511 'least recent:\n') % (len(cache),))
1511 totalsize = 0
1512 totalsize = 0
1512 for nodeid in cache:
1513 for nodeid in cache:
1513 # Use cache.get to not update the LRU order
1514 # Use cache.get to not update the LRU order
1514 data = cache.get(nodeid)
1515 data = cache.get(nodeid)
1515 size = len(data)
1516 size = len(data)
1516 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1517 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1517 ui.write(_('id: %s, size %s\n') % (
1518 ui.write(_('id: %s, size %s\n') % (
1518 hex(nodeid), util.bytecount(size)))
1519 hex(nodeid), util.bytecount(size)))
1519 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1520 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1520 ui.write(
1521 ui.write(
1521 _('Total cache data size %s, on-disk %s\n') % (
1522 _('Total cache data size %s, on-disk %s\n') % (
1522 util.bytecount(totalsize), util.bytecount(ondisk))
1523 util.bytecount(totalsize), util.bytecount(ondisk))
1523 )
1524 )
1524
1525
1525 @command('debugmergestate', [], '')
1526 @command('debugmergestate', [], '')
1526 def debugmergestate(ui, repo, *args):
1527 def debugmergestate(ui, repo, *args):
1527 """print merge state
1528 """print merge state
1528
1529
1529 Use --verbose to print out information about whether v1 or v2 merge state
1530 Use --verbose to print out information about whether v1 or v2 merge state
1530 was chosen."""
1531 was chosen."""
1531 def _hashornull(h):
1532 def _hashornull(h):
1532 if h == nullhex:
1533 if h == nullhex:
1533 return 'null'
1534 return 'null'
1534 else:
1535 else:
1535 return h
1536 return h
1536
1537
1537 def printrecords(version):
1538 def printrecords(version):
1538 ui.write(('* version %d records\n') % version)
1539 ui.write(('* version %d records\n') % version)
1539 if version == 1:
1540 if version == 1:
1540 records = v1records
1541 records = v1records
1541 else:
1542 else:
1542 records = v2records
1543 records = v2records
1543
1544
1544 for rtype, record in records:
1545 for rtype, record in records:
1545 # pretty print some record types
1546 # pretty print some record types
1546 if rtype == 'L':
1547 if rtype == 'L':
1547 ui.write(('local: %s\n') % record)
1548 ui.write(('local: %s\n') % record)
1548 elif rtype == 'O':
1549 elif rtype == 'O':
1549 ui.write(('other: %s\n') % record)
1550 ui.write(('other: %s\n') % record)
1550 elif rtype == 'm':
1551 elif rtype == 'm':
1551 driver, mdstate = record.split('\0', 1)
1552 driver, mdstate = record.split('\0', 1)
1552 ui.write(('merge driver: %s (state "%s")\n')
1553 ui.write(('merge driver: %s (state "%s")\n')
1553 % (driver, mdstate))
1554 % (driver, mdstate))
1554 elif rtype in 'FDC':
1555 elif rtype in 'FDC':
1555 r = record.split('\0')
1556 r = record.split('\0')
1556 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1557 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1557 if version == 1:
1558 if version == 1:
1558 onode = 'not stored in v1 format'
1559 onode = 'not stored in v1 format'
1559 flags = r[7]
1560 flags = r[7]
1560 else:
1561 else:
1561 onode, flags = r[7:9]
1562 onode, flags = r[7:9]
1562 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1563 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1563 % (f, rtype, state, _hashornull(hash)))
1564 % (f, rtype, state, _hashornull(hash)))
1564 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1565 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1565 ui.write((' ancestor path: %s (node %s)\n')
1566 ui.write((' ancestor path: %s (node %s)\n')
1566 % (afile, _hashornull(anode)))
1567 % (afile, _hashornull(anode)))
1567 ui.write((' other path: %s (node %s)\n')
1568 ui.write((' other path: %s (node %s)\n')
1568 % (ofile, _hashornull(onode)))
1569 % (ofile, _hashornull(onode)))
1569 elif rtype == 'f':
1570 elif rtype == 'f':
1570 filename, rawextras = record.split('\0', 1)
1571 filename, rawextras = record.split('\0', 1)
1571 extras = rawextras.split('\0')
1572 extras = rawextras.split('\0')
1572 i = 0
1573 i = 0
1573 extrastrings = []
1574 extrastrings = []
1574 while i < len(extras):
1575 while i < len(extras):
1575 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1576 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1576 i += 2
1577 i += 2
1577
1578
1578 ui.write(('file extras: %s (%s)\n')
1579 ui.write(('file extras: %s (%s)\n')
1579 % (filename, ', '.join(extrastrings)))
1580 % (filename, ', '.join(extrastrings)))
1580 elif rtype == 'l':
1581 elif rtype == 'l':
1581 labels = record.split('\0', 2)
1582 labels = record.split('\0', 2)
1582 labels = [l for l in labels if len(l) > 0]
1583 labels = [l for l in labels if len(l) > 0]
1583 ui.write(('labels:\n'))
1584 ui.write(('labels:\n'))
1584 ui.write((' local: %s\n' % labels[0]))
1585 ui.write((' local: %s\n' % labels[0]))
1585 ui.write((' other: %s\n' % labels[1]))
1586 ui.write((' other: %s\n' % labels[1]))
1586 if len(labels) > 2:
1587 if len(labels) > 2:
1587 ui.write((' base: %s\n' % labels[2]))
1588 ui.write((' base: %s\n' % labels[2]))
1588 else:
1589 else:
1589 ui.write(('unrecognized entry: %s\t%s\n')
1590 ui.write(('unrecognized entry: %s\t%s\n')
1590 % (rtype, record.replace('\0', '\t')))
1591 % (rtype, record.replace('\0', '\t')))
1591
1592
1592 # Avoid mergestate.read() since it may raise an exception for unsupported
1593 # Avoid mergestate.read() since it may raise an exception for unsupported
1593 # merge state records. We shouldn't be doing this, but this is OK since this
1594 # merge state records. We shouldn't be doing this, but this is OK since this
1594 # command is pretty low-level.
1595 # command is pretty low-level.
1595 ms = mergemod.mergestate(repo)
1596 ms = mergemod.mergestate(repo)
1596
1597
1597 # sort so that reasonable information is on top
1598 # sort so that reasonable information is on top
1598 v1records = ms._readrecordsv1()
1599 v1records = ms._readrecordsv1()
1599 v2records = ms._readrecordsv2()
1600 v2records = ms._readrecordsv2()
1600 order = 'LOml'
1601 order = 'LOml'
1601 def key(r):
1602 def key(r):
1602 idx = order.find(r[0])
1603 idx = order.find(r[0])
1603 if idx == -1:
1604 if idx == -1:
1604 return (1, r[1])
1605 return (1, r[1])
1605 else:
1606 else:
1606 return (0, idx)
1607 return (0, idx)
1607 v1records.sort(key=key)
1608 v1records.sort(key=key)
1608 v2records.sort(key=key)
1609 v2records.sort(key=key)
1609
1610
1610 if not v1records and not v2records:
1611 if not v1records and not v2records:
1611 ui.write(('no merge state found\n'))
1612 ui.write(('no merge state found\n'))
1612 elif not v2records:
1613 elif not v2records:
1613 ui.note(('no version 2 merge state\n'))
1614 ui.note(('no version 2 merge state\n'))
1614 printrecords(1)
1615 printrecords(1)
1615 elif ms._v1v2match(v1records, v2records):
1616 elif ms._v1v2match(v1records, v2records):
1616 ui.note(('v1 and v2 states match: using v2\n'))
1617 ui.note(('v1 and v2 states match: using v2\n'))
1617 printrecords(2)
1618 printrecords(2)
1618 else:
1619 else:
1619 ui.note(('v1 and v2 states mismatch: using v1\n'))
1620 ui.note(('v1 and v2 states mismatch: using v1\n'))
1620 printrecords(1)
1621 printrecords(1)
1621 if ui.verbose:
1622 if ui.verbose:
1622 printrecords(2)
1623 printrecords(2)
1623
1624
1624 @command('debugnamecomplete', [], _('NAME...'))
1625 @command('debugnamecomplete', [], _('NAME...'))
1625 def debugnamecomplete(ui, repo, *args):
1626 def debugnamecomplete(ui, repo, *args):
1626 '''complete "names" - tags, open branch names, bookmark names'''
1627 '''complete "names" - tags, open branch names, bookmark names'''
1627
1628
1628 names = set()
1629 names = set()
1629 # since we previously only listed open branches, we will handle that
1630 # since we previously only listed open branches, we will handle that
1630 # specially (after this for loop)
1631 # specially (after this for loop)
1631 for name, ns in repo.names.iteritems():
1632 for name, ns in repo.names.iteritems():
1632 if name != 'branches':
1633 if name != 'branches':
1633 names.update(ns.listnames(repo))
1634 names.update(ns.listnames(repo))
1634 names.update(tag for (tag, heads, tip, closed)
1635 names.update(tag for (tag, heads, tip, closed)
1635 in repo.branchmap().iterbranches() if not closed)
1636 in repo.branchmap().iterbranches() if not closed)
1636 completions = set()
1637 completions = set()
1637 if not args:
1638 if not args:
1638 args = ['']
1639 args = ['']
1639 for a in args:
1640 for a in args:
1640 completions.update(n for n in names if n.startswith(a))
1641 completions.update(n for n in names if n.startswith(a))
1641 ui.write('\n'.join(sorted(completions)))
1642 ui.write('\n'.join(sorted(completions)))
1642 ui.write('\n')
1643 ui.write('\n')
1643
1644
1644 @command('debugobsolete',
1645 @command('debugobsolete',
1645 [('', 'flags', 0, _('markers flag')),
1646 [('', 'flags', 0, _('markers flag')),
1646 ('', 'record-parents', False,
1647 ('', 'record-parents', False,
1647 _('record parent information for the precursor')),
1648 _('record parent information for the precursor')),
1648 ('r', 'rev', [], _('display markers relevant to REV')),
1649 ('r', 'rev', [], _('display markers relevant to REV')),
1649 ('', 'exclusive', False, _('restrict display to markers only '
1650 ('', 'exclusive', False, _('restrict display to markers only '
1650 'relevant to REV')),
1651 'relevant to REV')),
1651 ('', 'index', False, _('display index of the marker')),
1652 ('', 'index', False, _('display index of the marker')),
1652 ('', 'delete', [], _('delete markers specified by indices')),
1653 ('', 'delete', [], _('delete markers specified by indices')),
1653 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1654 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1654 _('[OBSOLETED [REPLACEMENT ...]]'))
1655 _('[OBSOLETED [REPLACEMENT ...]]'))
1655 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1656 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1656 """create arbitrary obsolete marker
1657 """create arbitrary obsolete marker
1657
1658
1658 With no arguments, displays the list of obsolescence markers."""
1659 With no arguments, displays the list of obsolescence markers."""
1659
1660
1660 opts = pycompat.byteskwargs(opts)
1661 opts = pycompat.byteskwargs(opts)
1661
1662
1662 def parsenodeid(s):
1663 def parsenodeid(s):
1663 try:
1664 try:
1664 # We do not use revsingle/revrange functions here to accept
1665 # We do not use revsingle/revrange functions here to accept
1665 # arbitrary node identifiers, possibly not present in the
1666 # arbitrary node identifiers, possibly not present in the
1666 # local repository.
1667 # local repository.
1667 n = bin(s)
1668 n = bin(s)
1668 if len(n) != len(nullid):
1669 if len(n) != len(nullid):
1669 raise TypeError()
1670 raise TypeError()
1670 return n
1671 return n
1671 except TypeError:
1672 except TypeError:
1672 raise error.Abort('changeset references must be full hexadecimal '
1673 raise error.Abort('changeset references must be full hexadecimal '
1673 'node identifiers')
1674 'node identifiers')
1674
1675
1675 if opts.get('delete'):
1676 if opts.get('delete'):
1676 indices = []
1677 indices = []
1677 for v in opts.get('delete'):
1678 for v in opts.get('delete'):
1678 try:
1679 try:
1679 indices.append(int(v))
1680 indices.append(int(v))
1680 except ValueError:
1681 except ValueError:
1681 raise error.Abort(_('invalid index value: %r') % v,
1682 raise error.Abort(_('invalid index value: %r') % v,
1682 hint=_('use integers for indices'))
1683 hint=_('use integers for indices'))
1683
1684
1684 if repo.currenttransaction():
1685 if repo.currenttransaction():
1685 raise error.Abort(_('cannot delete obsmarkers in the middle '
1686 raise error.Abort(_('cannot delete obsmarkers in the middle '
1686 'of transaction.'))
1687 'of transaction.'))
1687
1688
1688 with repo.lock():
1689 with repo.lock():
1689 n = repair.deleteobsmarkers(repo.obsstore, indices)
1690 n = repair.deleteobsmarkers(repo.obsstore, indices)
1690 ui.write(_('deleted %i obsolescence markers\n') % n)
1691 ui.write(_('deleted %i obsolescence markers\n') % n)
1691
1692
1692 return
1693 return
1693
1694
1694 if precursor is not None:
1695 if precursor is not None:
1695 if opts['rev']:
1696 if opts['rev']:
1696 raise error.Abort('cannot select revision when creating marker')
1697 raise error.Abort('cannot select revision when creating marker')
1697 metadata = {}
1698 metadata = {}
1698 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1699 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1699 succs = tuple(parsenodeid(succ) for succ in successors)
1700 succs = tuple(parsenodeid(succ) for succ in successors)
1700 l = repo.lock()
1701 l = repo.lock()
1701 try:
1702 try:
1702 tr = repo.transaction('debugobsolete')
1703 tr = repo.transaction('debugobsolete')
1703 try:
1704 try:
1704 date = opts.get('date')
1705 date = opts.get('date')
1705 if date:
1706 if date:
1706 date = dateutil.parsedate(date)
1707 date = dateutil.parsedate(date)
1707 else:
1708 else:
1708 date = None
1709 date = None
1709 prec = parsenodeid(precursor)
1710 prec = parsenodeid(precursor)
1710 parents = None
1711 parents = None
1711 if opts['record_parents']:
1712 if opts['record_parents']:
1712 if prec not in repo.unfiltered():
1713 if prec not in repo.unfiltered():
1713 raise error.Abort('cannot used --record-parents on '
1714 raise error.Abort('cannot used --record-parents on '
1714 'unknown changesets')
1715 'unknown changesets')
1715 parents = repo.unfiltered()[prec].parents()
1716 parents = repo.unfiltered()[prec].parents()
1716 parents = tuple(p.node() for p in parents)
1717 parents = tuple(p.node() for p in parents)
1717 repo.obsstore.create(tr, prec, succs, opts['flags'],
1718 repo.obsstore.create(tr, prec, succs, opts['flags'],
1718 parents=parents, date=date,
1719 parents=parents, date=date,
1719 metadata=metadata, ui=ui)
1720 metadata=metadata, ui=ui)
1720 tr.close()
1721 tr.close()
1721 except ValueError as exc:
1722 except ValueError as exc:
1722 raise error.Abort(_('bad obsmarker input: %s') %
1723 raise error.Abort(_('bad obsmarker input: %s') %
1723 pycompat.bytestr(exc))
1724 pycompat.bytestr(exc))
1724 finally:
1725 finally:
1725 tr.release()
1726 tr.release()
1726 finally:
1727 finally:
1727 l.release()
1728 l.release()
1728 else:
1729 else:
1729 if opts['rev']:
1730 if opts['rev']:
1730 revs = scmutil.revrange(repo, opts['rev'])
1731 revs = scmutil.revrange(repo, opts['rev'])
1731 nodes = [repo[r].node() for r in revs]
1732 nodes = [repo[r].node() for r in revs]
1732 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1733 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1733 exclusive=opts['exclusive']))
1734 exclusive=opts['exclusive']))
1734 markers.sort(key=lambda x: x._data)
1735 markers.sort(key=lambda x: x._data)
1735 else:
1736 else:
1736 markers = obsutil.getmarkers(repo)
1737 markers = obsutil.getmarkers(repo)
1737
1738
1738 markerstoiter = markers
1739 markerstoiter = markers
1739 isrelevant = lambda m: True
1740 isrelevant = lambda m: True
1740 if opts.get('rev') and opts.get('index'):
1741 if opts.get('rev') and opts.get('index'):
1741 markerstoiter = obsutil.getmarkers(repo)
1742 markerstoiter = obsutil.getmarkers(repo)
1742 markerset = set(markers)
1743 markerset = set(markers)
1743 isrelevant = lambda m: m in markerset
1744 isrelevant = lambda m: m in markerset
1744
1745
1745 fm = ui.formatter('debugobsolete', opts)
1746 fm = ui.formatter('debugobsolete', opts)
1746 for i, m in enumerate(markerstoiter):
1747 for i, m in enumerate(markerstoiter):
1747 if not isrelevant(m):
1748 if not isrelevant(m):
1748 # marker can be irrelevant when we're iterating over a set
1749 # marker can be irrelevant when we're iterating over a set
1749 # of markers (markerstoiter) which is bigger than the set
1750 # of markers (markerstoiter) which is bigger than the set
1750 # of markers we want to display (markers)
1751 # of markers we want to display (markers)
1751 # this can happen if both --index and --rev options are
1752 # this can happen if both --index and --rev options are
1752 # provided and thus we need to iterate over all of the markers
1753 # provided and thus we need to iterate over all of the markers
1753 # to get the correct indices, but only display the ones that
1754 # to get the correct indices, but only display the ones that
1754 # are relevant to --rev value
1755 # are relevant to --rev value
1755 continue
1756 continue
1756 fm.startitem()
1757 fm.startitem()
1757 ind = i if opts.get('index') else None
1758 ind = i if opts.get('index') else None
1758 cmdutil.showmarker(fm, m, index=ind)
1759 cmdutil.showmarker(fm, m, index=ind)
1759 fm.end()
1760 fm.end()
1760
1761
1761 @command('debugpathcomplete',
1762 @command('debugpathcomplete',
1762 [('f', 'full', None, _('complete an entire path')),
1763 [('f', 'full', None, _('complete an entire path')),
1763 ('n', 'normal', None, _('show only normal files')),
1764 ('n', 'normal', None, _('show only normal files')),
1764 ('a', 'added', None, _('show only added files')),
1765 ('a', 'added', None, _('show only added files')),
1765 ('r', 'removed', None, _('show only removed files'))],
1766 ('r', 'removed', None, _('show only removed files'))],
1766 _('FILESPEC...'))
1767 _('FILESPEC...'))
1767 def debugpathcomplete(ui, repo, *specs, **opts):
1768 def debugpathcomplete(ui, repo, *specs, **opts):
1768 '''complete part or all of a tracked path
1769 '''complete part or all of a tracked path
1769
1770
1770 This command supports shells that offer path name completion. It
1771 This command supports shells that offer path name completion. It
1771 currently completes only files already known to the dirstate.
1772 currently completes only files already known to the dirstate.
1772
1773
1773 Completion extends only to the next path segment unless
1774 Completion extends only to the next path segment unless
1774 --full is specified, in which case entire paths are used.'''
1775 --full is specified, in which case entire paths are used.'''
1775
1776
1776 def complete(path, acceptable):
1777 def complete(path, acceptable):
1777 dirstate = repo.dirstate
1778 dirstate = repo.dirstate
1778 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1779 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1779 rootdir = repo.root + pycompat.ossep
1780 rootdir = repo.root + pycompat.ossep
1780 if spec != repo.root and not spec.startswith(rootdir):
1781 if spec != repo.root and not spec.startswith(rootdir):
1781 return [], []
1782 return [], []
1782 if os.path.isdir(spec):
1783 if os.path.isdir(spec):
1783 spec += '/'
1784 spec += '/'
1784 spec = spec[len(rootdir):]
1785 spec = spec[len(rootdir):]
1785 fixpaths = pycompat.ossep != '/'
1786 fixpaths = pycompat.ossep != '/'
1786 if fixpaths:
1787 if fixpaths:
1787 spec = spec.replace(pycompat.ossep, '/')
1788 spec = spec.replace(pycompat.ossep, '/')
1788 speclen = len(spec)
1789 speclen = len(spec)
1789 fullpaths = opts[r'full']
1790 fullpaths = opts[r'full']
1790 files, dirs = set(), set()
1791 files, dirs = set(), set()
1791 adddir, addfile = dirs.add, files.add
1792 adddir, addfile = dirs.add, files.add
1792 for f, st in dirstate.iteritems():
1793 for f, st in dirstate.iteritems():
1793 if f.startswith(spec) and st[0] in acceptable:
1794 if f.startswith(spec) and st[0] in acceptable:
1794 if fixpaths:
1795 if fixpaths:
1795 f = f.replace('/', pycompat.ossep)
1796 f = f.replace('/', pycompat.ossep)
1796 if fullpaths:
1797 if fullpaths:
1797 addfile(f)
1798 addfile(f)
1798 continue
1799 continue
1799 s = f.find(pycompat.ossep, speclen)
1800 s = f.find(pycompat.ossep, speclen)
1800 if s >= 0:
1801 if s >= 0:
1801 adddir(f[:s])
1802 adddir(f[:s])
1802 else:
1803 else:
1803 addfile(f)
1804 addfile(f)
1804 return files, dirs
1805 return files, dirs
1805
1806
1806 acceptable = ''
1807 acceptable = ''
1807 if opts[r'normal']:
1808 if opts[r'normal']:
1808 acceptable += 'nm'
1809 acceptable += 'nm'
1809 if opts[r'added']:
1810 if opts[r'added']:
1810 acceptable += 'a'
1811 acceptable += 'a'
1811 if opts[r'removed']:
1812 if opts[r'removed']:
1812 acceptable += 'r'
1813 acceptable += 'r'
1813 cwd = repo.getcwd()
1814 cwd = repo.getcwd()
1814 if not specs:
1815 if not specs:
1815 specs = ['.']
1816 specs = ['.']
1816
1817
1817 files, dirs = set(), set()
1818 files, dirs = set(), set()
1818 for spec in specs:
1819 for spec in specs:
1819 f, d = complete(spec, acceptable or 'nmar')
1820 f, d = complete(spec, acceptable or 'nmar')
1820 files.update(f)
1821 files.update(f)
1821 dirs.update(d)
1822 dirs.update(d)
1822 files.update(dirs)
1823 files.update(dirs)
1823 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1824 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1824 ui.write('\n')
1825 ui.write('\n')
1825
1826
1826 @command('debugpeer', [], _('PATH'), norepo=True)
1827 @command('debugpeer', [], _('PATH'), norepo=True)
1827 def debugpeer(ui, path):
1828 def debugpeer(ui, path):
1828 """establish a connection to a peer repository"""
1829 """establish a connection to a peer repository"""
1829 # Always enable peer request logging. Requires --debug to display
1830 # Always enable peer request logging. Requires --debug to display
1830 # though.
1831 # though.
1831 overrides = {
1832 overrides = {
1832 ('devel', 'debug.peer-request'): True,
1833 ('devel', 'debug.peer-request'): True,
1833 }
1834 }
1834
1835
1835 with ui.configoverride(overrides):
1836 with ui.configoverride(overrides):
1836 peer = hg.peer(ui, {}, path)
1837 peer = hg.peer(ui, {}, path)
1837
1838
1838 local = peer.local() is not None
1839 local = peer.local() is not None
1839 canpush = peer.canpush()
1840 canpush = peer.canpush()
1840
1841
1841 ui.write(_('url: %s\n') % peer.url())
1842 ui.write(_('url: %s\n') % peer.url())
1842 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1843 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1843 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1844 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1844
1845
1845 @command('debugpickmergetool',
1846 @command('debugpickmergetool',
1846 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1847 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1847 ('', 'changedelete', None, _('emulate merging change and delete')),
1848 ('', 'changedelete', None, _('emulate merging change and delete')),
1848 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1849 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1849 _('[PATTERN]...'),
1850 _('[PATTERN]...'),
1850 inferrepo=True)
1851 inferrepo=True)
1851 def debugpickmergetool(ui, repo, *pats, **opts):
1852 def debugpickmergetool(ui, repo, *pats, **opts):
1852 """examine which merge tool is chosen for specified file
1853 """examine which merge tool is chosen for specified file
1853
1854
1854 As described in :hg:`help merge-tools`, Mercurial examines
1855 As described in :hg:`help merge-tools`, Mercurial examines
1855 configurations below in this order to decide which merge tool is
1856 configurations below in this order to decide which merge tool is
1856 chosen for specified file.
1857 chosen for specified file.
1857
1858
1858 1. ``--tool`` option
1859 1. ``--tool`` option
1859 2. ``HGMERGE`` environment variable
1860 2. ``HGMERGE`` environment variable
1860 3. configurations in ``merge-patterns`` section
1861 3. configurations in ``merge-patterns`` section
1861 4. configuration of ``ui.merge``
1862 4. configuration of ``ui.merge``
1862 5. configurations in ``merge-tools`` section
1863 5. configurations in ``merge-tools`` section
1863 6. ``hgmerge`` tool (for historical reason only)
1864 6. ``hgmerge`` tool (for historical reason only)
1864 7. default tool for fallback (``:merge`` or ``:prompt``)
1865 7. default tool for fallback (``:merge`` or ``:prompt``)
1865
1866
1866 This command writes out examination result in the style below::
1867 This command writes out examination result in the style below::
1867
1868
1868 FILE = MERGETOOL
1869 FILE = MERGETOOL
1869
1870
1870 By default, all files known in the first parent context of the
1871 By default, all files known in the first parent context of the
1871 working directory are examined. Use file patterns and/or -I/-X
1872 working directory are examined. Use file patterns and/or -I/-X
1872 options to limit target files. -r/--rev is also useful to examine
1873 options to limit target files. -r/--rev is also useful to examine
1873 files in another context without actual updating to it.
1874 files in another context without actual updating to it.
1874
1875
1875 With --debug, this command shows warning messages while matching
1876 With --debug, this command shows warning messages while matching
1876 against ``merge-patterns`` and so on, too. It is recommended to
1877 against ``merge-patterns`` and so on, too. It is recommended to
1877 use this option with explicit file patterns and/or -I/-X options,
1878 use this option with explicit file patterns and/or -I/-X options,
1878 because this option increases amount of output per file according
1879 because this option increases amount of output per file according
1879 to configurations in hgrc.
1880 to configurations in hgrc.
1880
1881
1881 With -v/--verbose, this command shows configurations below at
1882 With -v/--verbose, this command shows configurations below at
1882 first (only if specified).
1883 first (only if specified).
1883
1884
1884 - ``--tool`` option
1885 - ``--tool`` option
1885 - ``HGMERGE`` environment variable
1886 - ``HGMERGE`` environment variable
1886 - configuration of ``ui.merge``
1887 - configuration of ``ui.merge``
1887
1888
1888 If merge tool is chosen before matching against
1889 If merge tool is chosen before matching against
1889 ``merge-patterns``, this command can't show any helpful
1890 ``merge-patterns``, this command can't show any helpful
1890 information, even with --debug. In such case, information above is
1891 information, even with --debug. In such case, information above is
1891 useful to know why a merge tool is chosen.
1892 useful to know why a merge tool is chosen.
1892 """
1893 """
1893 opts = pycompat.byteskwargs(opts)
1894 opts = pycompat.byteskwargs(opts)
1894 overrides = {}
1895 overrides = {}
1895 if opts['tool']:
1896 if opts['tool']:
1896 overrides[('ui', 'forcemerge')] = opts['tool']
1897 overrides[('ui', 'forcemerge')] = opts['tool']
1897 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1898 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1898
1899
1899 with ui.configoverride(overrides, 'debugmergepatterns'):
1900 with ui.configoverride(overrides, 'debugmergepatterns'):
1900 hgmerge = encoding.environ.get("HGMERGE")
1901 hgmerge = encoding.environ.get("HGMERGE")
1901 if hgmerge is not None:
1902 if hgmerge is not None:
1902 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1903 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1903 uimerge = ui.config("ui", "merge")
1904 uimerge = ui.config("ui", "merge")
1904 if uimerge:
1905 if uimerge:
1905 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1906 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1906
1907
1907 ctx = scmutil.revsingle(repo, opts.get('rev'))
1908 ctx = scmutil.revsingle(repo, opts.get('rev'))
1908 m = scmutil.match(ctx, pats, opts)
1909 m = scmutil.match(ctx, pats, opts)
1909 changedelete = opts['changedelete']
1910 changedelete = opts['changedelete']
1910 for path in ctx.walk(m):
1911 for path in ctx.walk(m):
1911 fctx = ctx[path]
1912 fctx = ctx[path]
1912 try:
1913 try:
1913 if not ui.debugflag:
1914 if not ui.debugflag:
1914 ui.pushbuffer(error=True)
1915 ui.pushbuffer(error=True)
1915 tool, toolpath = filemerge._picktool(repo, ui, path,
1916 tool, toolpath = filemerge._picktool(repo, ui, path,
1916 fctx.isbinary(),
1917 fctx.isbinary(),
1917 'l' in fctx.flags(),
1918 'l' in fctx.flags(),
1918 changedelete)
1919 changedelete)
1919 finally:
1920 finally:
1920 if not ui.debugflag:
1921 if not ui.debugflag:
1921 ui.popbuffer()
1922 ui.popbuffer()
1922 ui.write(('%s = %s\n') % (path, tool))
1923 ui.write(('%s = %s\n') % (path, tool))
1923
1924
1924 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1925 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1925 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1926 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1926 '''access the pushkey key/value protocol
1927 '''access the pushkey key/value protocol
1927
1928
1928 With two args, list the keys in the given namespace.
1929 With two args, list the keys in the given namespace.
1929
1930
1930 With five args, set a key to new if it currently is set to old.
1931 With five args, set a key to new if it currently is set to old.
1931 Reports success or failure.
1932 Reports success or failure.
1932 '''
1933 '''
1933
1934
1934 target = hg.peer(ui, {}, repopath)
1935 target = hg.peer(ui, {}, repopath)
1935 if keyinfo:
1936 if keyinfo:
1936 key, old, new = keyinfo
1937 key, old, new = keyinfo
1937 with target.commandexecutor() as e:
1938 with target.commandexecutor() as e:
1938 r = e.callcommand('pushkey', {
1939 r = e.callcommand('pushkey', {
1939 'namespace': namespace,
1940 'namespace': namespace,
1940 'key': key,
1941 'key': key,
1941 'old': old,
1942 'old': old,
1942 'new': new,
1943 'new': new,
1943 }).result()
1944 }).result()
1944
1945
1945 ui.status(pycompat.bytestr(r) + '\n')
1946 ui.status(pycompat.bytestr(r) + '\n')
1946 return not r
1947 return not r
1947 else:
1948 else:
1948 for k, v in sorted(target.listkeys(namespace).iteritems()):
1949 for k, v in sorted(target.listkeys(namespace).iteritems()):
1949 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1950 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1950 stringutil.escapestr(v)))
1951 stringutil.escapestr(v)))
1951
1952
1952 @command('debugpvec', [], _('A B'))
1953 @command('debugpvec', [], _('A B'))
1953 def debugpvec(ui, repo, a, b=None):
1954 def debugpvec(ui, repo, a, b=None):
1954 ca = scmutil.revsingle(repo, a)
1955 ca = scmutil.revsingle(repo, a)
1955 cb = scmutil.revsingle(repo, b)
1956 cb = scmutil.revsingle(repo, b)
1956 pa = pvec.ctxpvec(ca)
1957 pa = pvec.ctxpvec(ca)
1957 pb = pvec.ctxpvec(cb)
1958 pb = pvec.ctxpvec(cb)
1958 if pa == pb:
1959 if pa == pb:
1959 rel = "="
1960 rel = "="
1960 elif pa > pb:
1961 elif pa > pb:
1961 rel = ">"
1962 rel = ">"
1962 elif pa < pb:
1963 elif pa < pb:
1963 rel = "<"
1964 rel = "<"
1964 elif pa | pb:
1965 elif pa | pb:
1965 rel = "|"
1966 rel = "|"
1966 ui.write(_("a: %s\n") % pa)
1967 ui.write(_("a: %s\n") % pa)
1967 ui.write(_("b: %s\n") % pb)
1968 ui.write(_("b: %s\n") % pb)
1968 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1969 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1969 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1970 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1970 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1971 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1971 pa.distance(pb), rel))
1972 pa.distance(pb), rel))
1972
1973
1973 @command('debugrebuilddirstate|debugrebuildstate',
1974 @command('debugrebuilddirstate|debugrebuildstate',
1974 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1975 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1975 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1976 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1976 'the working copy parent')),
1977 'the working copy parent')),
1977 ],
1978 ],
1978 _('[-r REV]'))
1979 _('[-r REV]'))
1979 def debugrebuilddirstate(ui, repo, rev, **opts):
1980 def debugrebuilddirstate(ui, repo, rev, **opts):
1980 """rebuild the dirstate as it would look like for the given revision
1981 """rebuild the dirstate as it would look like for the given revision
1981
1982
1982 If no revision is specified the first current parent will be used.
1983 If no revision is specified the first current parent will be used.
1983
1984
1984 The dirstate will be set to the files of the given revision.
1985 The dirstate will be set to the files of the given revision.
1985 The actual working directory content or existing dirstate
1986 The actual working directory content or existing dirstate
1986 information such as adds or removes is not considered.
1987 information such as adds or removes is not considered.
1987
1988
1988 ``minimal`` will only rebuild the dirstate status for files that claim to be
1989 ``minimal`` will only rebuild the dirstate status for files that claim to be
1989 tracked but are not in the parent manifest, or that exist in the parent
1990 tracked but are not in the parent manifest, or that exist in the parent
1990 manifest but are not in the dirstate. It will not change adds, removes, or
1991 manifest but are not in the dirstate. It will not change adds, removes, or
1991 modified files that are in the working copy parent.
1992 modified files that are in the working copy parent.
1992
1993
1993 One use of this command is to make the next :hg:`status` invocation
1994 One use of this command is to make the next :hg:`status` invocation
1994 check the actual file content.
1995 check the actual file content.
1995 """
1996 """
1996 ctx = scmutil.revsingle(repo, rev)
1997 ctx = scmutil.revsingle(repo, rev)
1997 with repo.wlock():
1998 with repo.wlock():
1998 dirstate = repo.dirstate
1999 dirstate = repo.dirstate
1999 changedfiles = None
2000 changedfiles = None
2000 # See command doc for what minimal does.
2001 # See command doc for what minimal does.
2001 if opts.get(r'minimal'):
2002 if opts.get(r'minimal'):
2002 manifestfiles = set(ctx.manifest().keys())
2003 manifestfiles = set(ctx.manifest().keys())
2003 dirstatefiles = set(dirstate)
2004 dirstatefiles = set(dirstate)
2004 manifestonly = manifestfiles - dirstatefiles
2005 manifestonly = manifestfiles - dirstatefiles
2005 dsonly = dirstatefiles - manifestfiles
2006 dsonly = dirstatefiles - manifestfiles
2006 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2007 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2007 changedfiles = manifestonly | dsnotadded
2008 changedfiles = manifestonly | dsnotadded
2008
2009
2009 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2010 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2010
2011
2011 @command('debugrebuildfncache', [], '')
2012 @command('debugrebuildfncache', [], '')
2012 def debugrebuildfncache(ui, repo):
2013 def debugrebuildfncache(ui, repo):
2013 """rebuild the fncache file"""
2014 """rebuild the fncache file"""
2014 repair.rebuildfncache(ui, repo)
2015 repair.rebuildfncache(ui, repo)
2015
2016
2016 @command('debugrename',
2017 @command('debugrename',
2017 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2018 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2018 _('[-r REV] FILE'))
2019 _('[-r REV] FILE'))
2019 def debugrename(ui, repo, file1, *pats, **opts):
2020 def debugrename(ui, repo, file1, *pats, **opts):
2020 """dump rename information"""
2021 """dump rename information"""
2021
2022
2022 opts = pycompat.byteskwargs(opts)
2023 opts = pycompat.byteskwargs(opts)
2023 ctx = scmutil.revsingle(repo, opts.get('rev'))
2024 ctx = scmutil.revsingle(repo, opts.get('rev'))
2024 m = scmutil.match(ctx, (file1,) + pats, opts)
2025 m = scmutil.match(ctx, (file1,) + pats, opts)
2025 for abs in ctx.walk(m):
2026 for abs in ctx.walk(m):
2026 fctx = ctx[abs]
2027 fctx = ctx[abs]
2027 o = fctx.filelog().renamed(fctx.filenode())
2028 o = fctx.filelog().renamed(fctx.filenode())
2028 rel = m.rel(abs)
2029 rel = m.rel(abs)
2029 if o:
2030 if o:
2030 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2031 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2031 else:
2032 else:
2032 ui.write(_("%s not renamed\n") % rel)
2033 ui.write(_("%s not renamed\n") % rel)
2033
2034
2034 @command('debugrevlog', cmdutil.debugrevlogopts +
2035 @command('debugrevlog', cmdutil.debugrevlogopts +
2035 [('d', 'dump', False, _('dump index data'))],
2036 [('d', 'dump', False, _('dump index data'))],
2036 _('-c|-m|FILE'),
2037 _('-c|-m|FILE'),
2037 optionalrepo=True)
2038 optionalrepo=True)
2038 def debugrevlog(ui, repo, file_=None, **opts):
2039 def debugrevlog(ui, repo, file_=None, **opts):
2039 """show data and statistics about a revlog"""
2040 """show data and statistics about a revlog"""
2040 opts = pycompat.byteskwargs(opts)
2041 opts = pycompat.byteskwargs(opts)
2041 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2042 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2042
2043
2043 if opts.get("dump"):
2044 if opts.get("dump"):
2044 numrevs = len(r)
2045 numrevs = len(r)
2045 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2046 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2046 " rawsize totalsize compression heads chainlen\n"))
2047 " rawsize totalsize compression heads chainlen\n"))
2047 ts = 0
2048 ts = 0
2048 heads = set()
2049 heads = set()
2049
2050
2050 for rev in pycompat.xrange(numrevs):
2051 for rev in pycompat.xrange(numrevs):
2051 dbase = r.deltaparent(rev)
2052 dbase = r.deltaparent(rev)
2052 if dbase == -1:
2053 if dbase == -1:
2053 dbase = rev
2054 dbase = rev
2054 cbase = r.chainbase(rev)
2055 cbase = r.chainbase(rev)
2055 clen = r.chainlen(rev)
2056 clen = r.chainlen(rev)
2056 p1, p2 = r.parentrevs(rev)
2057 p1, p2 = r.parentrevs(rev)
2057 rs = r.rawsize(rev)
2058 rs = r.rawsize(rev)
2058 ts = ts + rs
2059 ts = ts + rs
2059 heads -= set(r.parentrevs(rev))
2060 heads -= set(r.parentrevs(rev))
2060 heads.add(rev)
2061 heads.add(rev)
2061 try:
2062 try:
2062 compression = ts / r.end(rev)
2063 compression = ts / r.end(rev)
2063 except ZeroDivisionError:
2064 except ZeroDivisionError:
2064 compression = 0
2065 compression = 0
2065 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2066 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2066 "%11d %5d %8d\n" %
2067 "%11d %5d %8d\n" %
2067 (rev, p1, p2, r.start(rev), r.end(rev),
2068 (rev, p1, p2, r.start(rev), r.end(rev),
2068 r.start(dbase), r.start(cbase),
2069 r.start(dbase), r.start(cbase),
2069 r.start(p1), r.start(p2),
2070 r.start(p1), r.start(p2),
2070 rs, ts, compression, len(heads), clen))
2071 rs, ts, compression, len(heads), clen))
2071 return 0
2072 return 0
2072
2073
2073 v = r.version
2074 v = r.version
2074 format = v & 0xFFFF
2075 format = v & 0xFFFF
2075 flags = []
2076 flags = []
2076 gdelta = False
2077 gdelta = False
2077 if v & revlog.FLAG_INLINE_DATA:
2078 if v & revlog.FLAG_INLINE_DATA:
2078 flags.append('inline')
2079 flags.append('inline')
2079 if v & revlog.FLAG_GENERALDELTA:
2080 if v & revlog.FLAG_GENERALDELTA:
2080 gdelta = True
2081 gdelta = True
2081 flags.append('generaldelta')
2082 flags.append('generaldelta')
2082 if not flags:
2083 if not flags:
2083 flags = ['(none)']
2084 flags = ['(none)']
2084
2085
2085 nummerges = 0
2086 nummerges = 0
2086 numfull = 0
2087 numfull = 0
2087 numprev = 0
2088 numprev = 0
2088 nump1 = 0
2089 nump1 = 0
2089 nump2 = 0
2090 nump2 = 0
2090 numother = 0
2091 numother = 0
2091 nump1prev = 0
2092 nump1prev = 0
2092 nump2prev = 0
2093 nump2prev = 0
2093 chainlengths = []
2094 chainlengths = []
2094 chainbases = []
2095 chainbases = []
2095 chainspans = []
2096 chainspans = []
2096
2097
2097 datasize = [None, 0, 0]
2098 datasize = [None, 0, 0]
2098 fullsize = [None, 0, 0]
2099 fullsize = [None, 0, 0]
2099 deltasize = [None, 0, 0]
2100 deltasize = [None, 0, 0]
2100 chunktypecounts = {}
2101 chunktypecounts = {}
2101 chunktypesizes = {}
2102 chunktypesizes = {}
2102
2103
2103 def addsize(size, l):
2104 def addsize(size, l):
2104 if l[0] is None or size < l[0]:
2105 if l[0] is None or size < l[0]:
2105 l[0] = size
2106 l[0] = size
2106 if size > l[1]:
2107 if size > l[1]:
2107 l[1] = size
2108 l[1] = size
2108 l[2] += size
2109 l[2] += size
2109
2110
2110 numrevs = len(r)
2111 numrevs = len(r)
2111 for rev in pycompat.xrange(numrevs):
2112 for rev in pycompat.xrange(numrevs):
2112 p1, p2 = r.parentrevs(rev)
2113 p1, p2 = r.parentrevs(rev)
2113 delta = r.deltaparent(rev)
2114 delta = r.deltaparent(rev)
2114 if format > 0:
2115 if format > 0:
2115 addsize(r.rawsize(rev), datasize)
2116 addsize(r.rawsize(rev), datasize)
2116 if p2 != nullrev:
2117 if p2 != nullrev:
2117 nummerges += 1
2118 nummerges += 1
2118 size = r.length(rev)
2119 size = r.length(rev)
2119 if delta == nullrev:
2120 if delta == nullrev:
2120 chainlengths.append(0)
2121 chainlengths.append(0)
2121 chainbases.append(r.start(rev))
2122 chainbases.append(r.start(rev))
2122 chainspans.append(size)
2123 chainspans.append(size)
2123 numfull += 1
2124 numfull += 1
2124 addsize(size, fullsize)
2125 addsize(size, fullsize)
2125 else:
2126 else:
2126 chainlengths.append(chainlengths[delta] + 1)
2127 chainlengths.append(chainlengths[delta] + 1)
2127 baseaddr = chainbases[delta]
2128 baseaddr = chainbases[delta]
2128 revaddr = r.start(rev)
2129 revaddr = r.start(rev)
2129 chainbases.append(baseaddr)
2130 chainbases.append(baseaddr)
2130 chainspans.append((revaddr - baseaddr) + size)
2131 chainspans.append((revaddr - baseaddr) + size)
2131 addsize(size, deltasize)
2132 addsize(size, deltasize)
2132 if delta == rev - 1:
2133 if delta == rev - 1:
2133 numprev += 1
2134 numprev += 1
2134 if delta == p1:
2135 if delta == p1:
2135 nump1prev += 1
2136 nump1prev += 1
2136 elif delta == p2:
2137 elif delta == p2:
2137 nump2prev += 1
2138 nump2prev += 1
2138 elif delta == p1:
2139 elif delta == p1:
2139 nump1 += 1
2140 nump1 += 1
2140 elif delta == p2:
2141 elif delta == p2:
2141 nump2 += 1
2142 nump2 += 1
2142 elif delta != nullrev:
2143 elif delta != nullrev:
2143 numother += 1
2144 numother += 1
2144
2145
2145 # Obtain data on the raw chunks in the revlog.
2146 # Obtain data on the raw chunks in the revlog.
2146 segment = r._getsegmentforrevs(rev, rev)[1]
2147 segment = r._getsegmentforrevs(rev, rev)[1]
2147 if segment:
2148 if segment:
2148 chunktype = bytes(segment[0:1])
2149 chunktype = bytes(segment[0:1])
2149 else:
2150 else:
2150 chunktype = 'empty'
2151 chunktype = 'empty'
2151
2152
2152 if chunktype not in chunktypecounts:
2153 if chunktype not in chunktypecounts:
2153 chunktypecounts[chunktype] = 0
2154 chunktypecounts[chunktype] = 0
2154 chunktypesizes[chunktype] = 0
2155 chunktypesizes[chunktype] = 0
2155
2156
2156 chunktypecounts[chunktype] += 1
2157 chunktypecounts[chunktype] += 1
2157 chunktypesizes[chunktype] += size
2158 chunktypesizes[chunktype] += size
2158
2159
2159 # Adjust size min value for empty cases
2160 # Adjust size min value for empty cases
2160 for size in (datasize, fullsize, deltasize):
2161 for size in (datasize, fullsize, deltasize):
2161 if size[0] is None:
2162 if size[0] is None:
2162 size[0] = 0
2163 size[0] = 0
2163
2164
2164 numdeltas = numrevs - numfull
2165 numdeltas = numrevs - numfull
2165 numoprev = numprev - nump1prev - nump2prev
2166 numoprev = numprev - nump1prev - nump2prev
2166 totalrawsize = datasize[2]
2167 totalrawsize = datasize[2]
2167 datasize[2] /= numrevs
2168 datasize[2] /= numrevs
2168 fulltotal = fullsize[2]
2169 fulltotal = fullsize[2]
2169 fullsize[2] /= numfull
2170 fullsize[2] /= numfull
2170 deltatotal = deltasize[2]
2171 deltatotal = deltasize[2]
2171 if numrevs - numfull > 0:
2172 if numrevs - numfull > 0:
2172 deltasize[2] /= numrevs - numfull
2173 deltasize[2] /= numrevs - numfull
2173 totalsize = fulltotal + deltatotal
2174 totalsize = fulltotal + deltatotal
2174 avgchainlen = sum(chainlengths) / numrevs
2175 avgchainlen = sum(chainlengths) / numrevs
2175 maxchainlen = max(chainlengths)
2176 maxchainlen = max(chainlengths)
2176 maxchainspan = max(chainspans)
2177 maxchainspan = max(chainspans)
2177 compratio = 1
2178 compratio = 1
2178 if totalsize:
2179 if totalsize:
2179 compratio = totalrawsize / totalsize
2180 compratio = totalrawsize / totalsize
2180
2181
2181 basedfmtstr = '%%%dd\n'
2182 basedfmtstr = '%%%dd\n'
2182 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2183 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2183
2184
2184 def dfmtstr(max):
2185 def dfmtstr(max):
2185 return basedfmtstr % len(str(max))
2186 return basedfmtstr % len(str(max))
2186 def pcfmtstr(max, padding=0):
2187 def pcfmtstr(max, padding=0):
2187 return basepcfmtstr % (len(str(max)), ' ' * padding)
2188 return basepcfmtstr % (len(str(max)), ' ' * padding)
2188
2189
2189 def pcfmt(value, total):
2190 def pcfmt(value, total):
2190 if total:
2191 if total:
2191 return (value, 100 * float(value) / total)
2192 return (value, 100 * float(value) / total)
2192 else:
2193 else:
2193 return value, 100.0
2194 return value, 100.0
2194
2195
2195 ui.write(('format : %d\n') % format)
2196 ui.write(('format : %d\n') % format)
2196 ui.write(('flags : %s\n') % ', '.join(flags))
2197 ui.write(('flags : %s\n') % ', '.join(flags))
2197
2198
2198 ui.write('\n')
2199 ui.write('\n')
2199 fmt = pcfmtstr(totalsize)
2200 fmt = pcfmtstr(totalsize)
2200 fmt2 = dfmtstr(totalsize)
2201 fmt2 = dfmtstr(totalsize)
2201 ui.write(('revisions : ') + fmt2 % numrevs)
2202 ui.write(('revisions : ') + fmt2 % numrevs)
2202 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2203 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2203 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2204 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2204 ui.write(('revisions : ') + fmt2 % numrevs)
2205 ui.write(('revisions : ') + fmt2 % numrevs)
2205 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2206 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2206 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2207 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2207 ui.write(('revision size : ') + fmt2 % totalsize)
2208 ui.write(('revision size : ') + fmt2 % totalsize)
2208 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2209 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2209 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2210 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2210
2211
2211 def fmtchunktype(chunktype):
2212 def fmtchunktype(chunktype):
2212 if chunktype == 'empty':
2213 if chunktype == 'empty':
2213 return ' %s : ' % chunktype
2214 return ' %s : ' % chunktype
2214 elif chunktype in pycompat.bytestr(string.ascii_letters):
2215 elif chunktype in pycompat.bytestr(string.ascii_letters):
2215 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2216 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2216 else:
2217 else:
2217 return ' 0x%s : ' % hex(chunktype)
2218 return ' 0x%s : ' % hex(chunktype)
2218
2219
2219 ui.write('\n')
2220 ui.write('\n')
2220 ui.write(('chunks : ') + fmt2 % numrevs)
2221 ui.write(('chunks : ') + fmt2 % numrevs)
2221 for chunktype in sorted(chunktypecounts):
2222 for chunktype in sorted(chunktypecounts):
2222 ui.write(fmtchunktype(chunktype))
2223 ui.write(fmtchunktype(chunktype))
2223 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2224 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2224 ui.write(('chunks size : ') + fmt2 % totalsize)
2225 ui.write(('chunks size : ') + fmt2 % totalsize)
2225 for chunktype in sorted(chunktypecounts):
2226 for chunktype in sorted(chunktypecounts):
2226 ui.write(fmtchunktype(chunktype))
2227 ui.write(fmtchunktype(chunktype))
2227 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2228 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2228
2229
2229 ui.write('\n')
2230 ui.write('\n')
2230 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2231 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2231 ui.write(('avg chain length : ') + fmt % avgchainlen)
2232 ui.write(('avg chain length : ') + fmt % avgchainlen)
2232 ui.write(('max chain length : ') + fmt % maxchainlen)
2233 ui.write(('max chain length : ') + fmt % maxchainlen)
2233 ui.write(('max chain reach : ') + fmt % maxchainspan)
2234 ui.write(('max chain reach : ') + fmt % maxchainspan)
2234 ui.write(('compression ratio : ') + fmt % compratio)
2235 ui.write(('compression ratio : ') + fmt % compratio)
2235
2236
2236 if format > 0:
2237 if format > 0:
2237 ui.write('\n')
2238 ui.write('\n')
2238 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2239 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2239 % tuple(datasize))
2240 % tuple(datasize))
2240 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2241 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2241 % tuple(fullsize))
2242 % tuple(fullsize))
2242 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2243 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2243 % tuple(deltasize))
2244 % tuple(deltasize))
2244
2245
2245 if numdeltas > 0:
2246 if numdeltas > 0:
2246 ui.write('\n')
2247 ui.write('\n')
2247 fmt = pcfmtstr(numdeltas)
2248 fmt = pcfmtstr(numdeltas)
2248 fmt2 = pcfmtstr(numdeltas, 4)
2249 fmt2 = pcfmtstr(numdeltas, 4)
2249 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2250 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2250 if numprev > 0:
2251 if numprev > 0:
2251 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2252 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2252 numprev))
2253 numprev))
2253 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2254 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2254 numprev))
2255 numprev))
2255 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2256 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2256 numprev))
2257 numprev))
2257 if gdelta:
2258 if gdelta:
2258 ui.write(('deltas against p1 : ')
2259 ui.write(('deltas against p1 : ')
2259 + fmt % pcfmt(nump1, numdeltas))
2260 + fmt % pcfmt(nump1, numdeltas))
2260 ui.write(('deltas against p2 : ')
2261 ui.write(('deltas against p2 : ')
2261 + fmt % pcfmt(nump2, numdeltas))
2262 + fmt % pcfmt(nump2, numdeltas))
2262 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2263 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2263 numdeltas))
2264 numdeltas))
2264
2265
2265 @command('debugrevspec',
2266 @command('debugrevspec',
2266 [('', 'optimize', None,
2267 [('', 'optimize', None,
2267 _('print parsed tree after optimizing (DEPRECATED)')),
2268 _('print parsed tree after optimizing (DEPRECATED)')),
2268 ('', 'show-revs', True, _('print list of result revisions (default)')),
2269 ('', 'show-revs', True, _('print list of result revisions (default)')),
2269 ('s', 'show-set', None, _('print internal representation of result set')),
2270 ('s', 'show-set', None, _('print internal representation of result set')),
2270 ('p', 'show-stage', [],
2271 ('p', 'show-stage', [],
2271 _('print parsed tree at the given stage'), _('NAME')),
2272 _('print parsed tree at the given stage'), _('NAME')),
2272 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2273 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2273 ('', 'verify-optimized', False, _('verify optimized result')),
2274 ('', 'verify-optimized', False, _('verify optimized result')),
2274 ],
2275 ],
2275 ('REVSPEC'))
2276 ('REVSPEC'))
2276 def debugrevspec(ui, repo, expr, **opts):
2277 def debugrevspec(ui, repo, expr, **opts):
2277 """parse and apply a revision specification
2278 """parse and apply a revision specification
2278
2279
2279 Use -p/--show-stage option to print the parsed tree at the given stages.
2280 Use -p/--show-stage option to print the parsed tree at the given stages.
2280 Use -p all to print tree at every stage.
2281 Use -p all to print tree at every stage.
2281
2282
2282 Use --no-show-revs option with -s or -p to print only the set
2283 Use --no-show-revs option with -s or -p to print only the set
2283 representation or the parsed tree respectively.
2284 representation or the parsed tree respectively.
2284
2285
2285 Use --verify-optimized to compare the optimized result with the unoptimized
2286 Use --verify-optimized to compare the optimized result with the unoptimized
2286 one. Returns 1 if the optimized result differs.
2287 one. Returns 1 if the optimized result differs.
2287 """
2288 """
2288 opts = pycompat.byteskwargs(opts)
2289 opts = pycompat.byteskwargs(opts)
2289 aliases = ui.configitems('revsetalias')
2290 aliases = ui.configitems('revsetalias')
2290 stages = [
2291 stages = [
2291 ('parsed', lambda tree: tree),
2292 ('parsed', lambda tree: tree),
2292 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2293 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2293 ui.warn)),
2294 ui.warn)),
2294 ('concatenated', revsetlang.foldconcat),
2295 ('concatenated', revsetlang.foldconcat),
2295 ('analyzed', revsetlang.analyze),
2296 ('analyzed', revsetlang.analyze),
2296 ('optimized', revsetlang.optimize),
2297 ('optimized', revsetlang.optimize),
2297 ]
2298 ]
2298 if opts['no_optimized']:
2299 if opts['no_optimized']:
2299 stages = stages[:-1]
2300 stages = stages[:-1]
2300 if opts['verify_optimized'] and opts['no_optimized']:
2301 if opts['verify_optimized'] and opts['no_optimized']:
2301 raise error.Abort(_('cannot use --verify-optimized with '
2302 raise error.Abort(_('cannot use --verify-optimized with '
2302 '--no-optimized'))
2303 '--no-optimized'))
2303 stagenames = set(n for n, f in stages)
2304 stagenames = set(n for n, f in stages)
2304
2305
2305 showalways = set()
2306 showalways = set()
2306 showchanged = set()
2307 showchanged = set()
2307 if ui.verbose and not opts['show_stage']:
2308 if ui.verbose and not opts['show_stage']:
2308 # show parsed tree by --verbose (deprecated)
2309 # show parsed tree by --verbose (deprecated)
2309 showalways.add('parsed')
2310 showalways.add('parsed')
2310 showchanged.update(['expanded', 'concatenated'])
2311 showchanged.update(['expanded', 'concatenated'])
2311 if opts['optimize']:
2312 if opts['optimize']:
2312 showalways.add('optimized')
2313 showalways.add('optimized')
2313 if opts['show_stage'] and opts['optimize']:
2314 if opts['show_stage'] and opts['optimize']:
2314 raise error.Abort(_('cannot use --optimize with --show-stage'))
2315 raise error.Abort(_('cannot use --optimize with --show-stage'))
2315 if opts['show_stage'] == ['all']:
2316 if opts['show_stage'] == ['all']:
2316 showalways.update(stagenames)
2317 showalways.update(stagenames)
2317 else:
2318 else:
2318 for n in opts['show_stage']:
2319 for n in opts['show_stage']:
2319 if n not in stagenames:
2320 if n not in stagenames:
2320 raise error.Abort(_('invalid stage name: %s') % n)
2321 raise error.Abort(_('invalid stage name: %s') % n)
2321 showalways.update(opts['show_stage'])
2322 showalways.update(opts['show_stage'])
2322
2323
2323 treebystage = {}
2324 treebystage = {}
2324 printedtree = None
2325 printedtree = None
2325 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2326 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2326 for n, f in stages:
2327 for n, f in stages:
2327 treebystage[n] = tree = f(tree)
2328 treebystage[n] = tree = f(tree)
2328 if n in showalways or (n in showchanged and tree != printedtree):
2329 if n in showalways or (n in showchanged and tree != printedtree):
2329 if opts['show_stage'] or n != 'parsed':
2330 if opts['show_stage'] or n != 'parsed':
2330 ui.write(("* %s:\n") % n)
2331 ui.write(("* %s:\n") % n)
2331 ui.write(revsetlang.prettyformat(tree), "\n")
2332 ui.write(revsetlang.prettyformat(tree), "\n")
2332 printedtree = tree
2333 printedtree = tree
2333
2334
2334 if opts['verify_optimized']:
2335 if opts['verify_optimized']:
2335 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2336 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2336 brevs = revset.makematcher(treebystage['optimized'])(repo)
2337 brevs = revset.makematcher(treebystage['optimized'])(repo)
2337 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2338 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2338 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2339 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2339 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2340 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2340 arevs = list(arevs)
2341 arevs = list(arevs)
2341 brevs = list(brevs)
2342 brevs = list(brevs)
2342 if arevs == brevs:
2343 if arevs == brevs:
2343 return 0
2344 return 0
2344 ui.write(('--- analyzed\n'), label='diff.file_a')
2345 ui.write(('--- analyzed\n'), label='diff.file_a')
2345 ui.write(('+++ optimized\n'), label='diff.file_b')
2346 ui.write(('+++ optimized\n'), label='diff.file_b')
2346 sm = difflib.SequenceMatcher(None, arevs, brevs)
2347 sm = difflib.SequenceMatcher(None, arevs, brevs)
2347 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2348 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2348 if tag in ('delete', 'replace'):
2349 if tag in ('delete', 'replace'):
2349 for c in arevs[alo:ahi]:
2350 for c in arevs[alo:ahi]:
2350 ui.write('-%s\n' % c, label='diff.deleted')
2351 ui.write('-%s\n' % c, label='diff.deleted')
2351 if tag in ('insert', 'replace'):
2352 if tag in ('insert', 'replace'):
2352 for c in brevs[blo:bhi]:
2353 for c in brevs[blo:bhi]:
2353 ui.write('+%s\n' % c, label='diff.inserted')
2354 ui.write('+%s\n' % c, label='diff.inserted')
2354 if tag == 'equal':
2355 if tag == 'equal':
2355 for c in arevs[alo:ahi]:
2356 for c in arevs[alo:ahi]:
2356 ui.write(' %s\n' % c)
2357 ui.write(' %s\n' % c)
2357 return 1
2358 return 1
2358
2359
2359 func = revset.makematcher(tree)
2360 func = revset.makematcher(tree)
2360 revs = func(repo)
2361 revs = func(repo)
2361 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2362 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2362 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2363 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2363 if not opts['show_revs']:
2364 if not opts['show_revs']:
2364 return
2365 return
2365 for c in revs:
2366 for c in revs:
2366 ui.write("%d\n" % c)
2367 ui.write("%d\n" % c)
2367
2368
2368 @command('debugserve', [
2369 @command('debugserve', [
2369 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2370 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2370 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2371 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2371 ('', 'logiofile', '', _('file to log server I/O to')),
2372 ('', 'logiofile', '', _('file to log server I/O to')),
2372 ], '')
2373 ], '')
2373 def debugserve(ui, repo, **opts):
2374 def debugserve(ui, repo, **opts):
2374 """run a server with advanced settings
2375 """run a server with advanced settings
2375
2376
2376 This command is similar to :hg:`serve`. It exists partially as a
2377 This command is similar to :hg:`serve`. It exists partially as a
2377 workaround to the fact that ``hg serve --stdio`` must have specific
2378 workaround to the fact that ``hg serve --stdio`` must have specific
2378 arguments for security reasons.
2379 arguments for security reasons.
2379 """
2380 """
2380 opts = pycompat.byteskwargs(opts)
2381 opts = pycompat.byteskwargs(opts)
2381
2382
2382 if not opts['sshstdio']:
2383 if not opts['sshstdio']:
2383 raise error.Abort(_('only --sshstdio is currently supported'))
2384 raise error.Abort(_('only --sshstdio is currently supported'))
2384
2385
2385 logfh = None
2386 logfh = None
2386
2387
2387 if opts['logiofd'] and opts['logiofile']:
2388 if opts['logiofd'] and opts['logiofile']:
2388 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2389 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2389
2390
2390 if opts['logiofd']:
2391 if opts['logiofd']:
2391 # Line buffered because output is line based.
2392 # Line buffered because output is line based.
2392 try:
2393 try:
2393 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2394 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2394 except OSError as e:
2395 except OSError as e:
2395 if e.errno != errno.ESPIPE:
2396 if e.errno != errno.ESPIPE:
2396 raise
2397 raise
2397 # can't seek a pipe, so `ab` mode fails on py3
2398 # can't seek a pipe, so `ab` mode fails on py3
2398 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2399 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2399 elif opts['logiofile']:
2400 elif opts['logiofile']:
2400 logfh = open(opts['logiofile'], 'ab', 1)
2401 logfh = open(opts['logiofile'], 'ab', 1)
2401
2402
2402 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2403 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2403 s.serve_forever()
2404 s.serve_forever()
2404
2405
2405 @command('debugsetparents', [], _('REV1 [REV2]'))
2406 @command('debugsetparents', [], _('REV1 [REV2]'))
2406 def debugsetparents(ui, repo, rev1, rev2=None):
2407 def debugsetparents(ui, repo, rev1, rev2=None):
2407 """manually set the parents of the current working directory
2408 """manually set the parents of the current working directory
2408
2409
2409 This is useful for writing repository conversion tools, but should
2410 This is useful for writing repository conversion tools, but should
2410 be used with care. For example, neither the working directory nor the
2411 be used with care. For example, neither the working directory nor the
2411 dirstate is updated, so file status may be incorrect after running this
2412 dirstate is updated, so file status may be incorrect after running this
2412 command.
2413 command.
2413
2414
2414 Returns 0 on success.
2415 Returns 0 on success.
2415 """
2416 """
2416
2417
2417 node1 = scmutil.revsingle(repo, rev1).node()
2418 node1 = scmutil.revsingle(repo, rev1).node()
2418 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2419 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2419
2420
2420 with repo.wlock():
2421 with repo.wlock():
2421 repo.setparents(node1, node2)
2422 repo.setparents(node1, node2)
2422
2423
2423 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2424 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2424 def debugssl(ui, repo, source=None, **opts):
2425 def debugssl(ui, repo, source=None, **opts):
2425 '''test a secure connection to a server
2426 '''test a secure connection to a server
2426
2427
2427 This builds the certificate chain for the server on Windows, installing the
2428 This builds the certificate chain for the server on Windows, installing the
2428 missing intermediates and trusted root via Windows Update if necessary. It
2429 missing intermediates and trusted root via Windows Update if necessary. It
2429 does nothing on other platforms.
2430 does nothing on other platforms.
2430
2431
2431 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2432 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2432 that server is used. See :hg:`help urls` for more information.
2433 that server is used. See :hg:`help urls` for more information.
2433
2434
2434 If the update succeeds, retry the original operation. Otherwise, the cause
2435 If the update succeeds, retry the original operation. Otherwise, the cause
2435 of the SSL error is likely another issue.
2436 of the SSL error is likely another issue.
2436 '''
2437 '''
2437 if not pycompat.iswindows:
2438 if not pycompat.iswindows:
2438 raise error.Abort(_('certificate chain building is only possible on '
2439 raise error.Abort(_('certificate chain building is only possible on '
2439 'Windows'))
2440 'Windows'))
2440
2441
2441 if not source:
2442 if not source:
2442 if not repo:
2443 if not repo:
2443 raise error.Abort(_("there is no Mercurial repository here, and no "
2444 raise error.Abort(_("there is no Mercurial repository here, and no "
2444 "server specified"))
2445 "server specified"))
2445 source = "default"
2446 source = "default"
2446
2447
2447 source, branches = hg.parseurl(ui.expandpath(source))
2448 source, branches = hg.parseurl(ui.expandpath(source))
2448 url = util.url(source)
2449 url = util.url(source)
2449 addr = None
2450 addr = None
2450
2451
2451 defaultport = {'https': 443, 'ssh': 22}
2452 defaultport = {'https': 443, 'ssh': 22}
2452 if url.scheme in defaultport:
2453 if url.scheme in defaultport:
2453 try:
2454 try:
2454 addr = (url.host, int(url.port or defaultport[url.scheme]))
2455 addr = (url.host, int(url.port or defaultport[url.scheme]))
2455 except ValueError:
2456 except ValueError:
2456 raise error.Abort(_("malformed port number in URL"))
2457 raise error.Abort(_("malformed port number in URL"))
2457 else:
2458 else:
2458 raise error.Abort(_("only https and ssh connections are supported"))
2459 raise error.Abort(_("only https and ssh connections are supported"))
2459
2460
2460 from . import win32
2461 from . import win32
2461
2462
2462 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2463 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2463 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2464 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2464
2465
2465 try:
2466 try:
2466 s.connect(addr)
2467 s.connect(addr)
2467 cert = s.getpeercert(True)
2468 cert = s.getpeercert(True)
2468
2469
2469 ui.status(_('checking the certificate chain for %s\n') % url.host)
2470 ui.status(_('checking the certificate chain for %s\n') % url.host)
2470
2471
2471 complete = win32.checkcertificatechain(cert, build=False)
2472 complete = win32.checkcertificatechain(cert, build=False)
2472
2473
2473 if not complete:
2474 if not complete:
2474 ui.status(_('certificate chain is incomplete, updating... '))
2475 ui.status(_('certificate chain is incomplete, updating... '))
2475
2476
2476 if not win32.checkcertificatechain(cert):
2477 if not win32.checkcertificatechain(cert):
2477 ui.status(_('failed.\n'))
2478 ui.status(_('failed.\n'))
2478 else:
2479 else:
2479 ui.status(_('done.\n'))
2480 ui.status(_('done.\n'))
2480 else:
2481 else:
2481 ui.status(_('full certificate chain is available\n'))
2482 ui.status(_('full certificate chain is available\n'))
2482 finally:
2483 finally:
2483 s.close()
2484 s.close()
2484
2485
2485 @command('debugsub',
2486 @command('debugsub',
2486 [('r', 'rev', '',
2487 [('r', 'rev', '',
2487 _('revision to check'), _('REV'))],
2488 _('revision to check'), _('REV'))],
2488 _('[-r REV] [REV]'))
2489 _('[-r REV] [REV]'))
2489 def debugsub(ui, repo, rev=None):
2490 def debugsub(ui, repo, rev=None):
2490 ctx = scmutil.revsingle(repo, rev, None)
2491 ctx = scmutil.revsingle(repo, rev, None)
2491 for k, v in sorted(ctx.substate.items()):
2492 for k, v in sorted(ctx.substate.items()):
2492 ui.write(('path %s\n') % k)
2493 ui.write(('path %s\n') % k)
2493 ui.write((' source %s\n') % v[0])
2494 ui.write((' source %s\n') % v[0])
2494 ui.write((' revision %s\n') % v[1])
2495 ui.write((' revision %s\n') % v[1])
2495
2496
2496 @command('debugsuccessorssets',
2497 @command('debugsuccessorssets',
2497 [('', 'closest', False, _('return closest successors sets only'))],
2498 [('', 'closest', False, _('return closest successors sets only'))],
2498 _('[REV]'))
2499 _('[REV]'))
2499 def debugsuccessorssets(ui, repo, *revs, **opts):
2500 def debugsuccessorssets(ui, repo, *revs, **opts):
2500 """show set of successors for revision
2501 """show set of successors for revision
2501
2502
2502 A successors set of changeset A is a consistent group of revisions that
2503 A successors set of changeset A is a consistent group of revisions that
2503 succeed A. It contains non-obsolete changesets only unless closests
2504 succeed A. It contains non-obsolete changesets only unless closests
2504 successors set is set.
2505 successors set is set.
2505
2506
2506 In most cases a changeset A has a single successors set containing a single
2507 In most cases a changeset A has a single successors set containing a single
2507 successor (changeset A replaced by A').
2508 successor (changeset A replaced by A').
2508
2509
2509 A changeset that is made obsolete with no successors are called "pruned".
2510 A changeset that is made obsolete with no successors are called "pruned".
2510 Such changesets have no successors sets at all.
2511 Such changesets have no successors sets at all.
2511
2512
2512 A changeset that has been "split" will have a successors set containing
2513 A changeset that has been "split" will have a successors set containing
2513 more than one successor.
2514 more than one successor.
2514
2515
2515 A changeset that has been rewritten in multiple different ways is called
2516 A changeset that has been rewritten in multiple different ways is called
2516 "divergent". Such changesets have multiple successor sets (each of which
2517 "divergent". Such changesets have multiple successor sets (each of which
2517 may also be split, i.e. have multiple successors).
2518 may also be split, i.e. have multiple successors).
2518
2519
2519 Results are displayed as follows::
2520 Results are displayed as follows::
2520
2521
2521 <rev1>
2522 <rev1>
2522 <successors-1A>
2523 <successors-1A>
2523 <rev2>
2524 <rev2>
2524 <successors-2A>
2525 <successors-2A>
2525 <successors-2B1> <successors-2B2> <successors-2B3>
2526 <successors-2B1> <successors-2B2> <successors-2B3>
2526
2527
2527 Here rev2 has two possible (i.e. divergent) successors sets. The first
2528 Here rev2 has two possible (i.e. divergent) successors sets. The first
2528 holds one element, whereas the second holds three (i.e. the changeset has
2529 holds one element, whereas the second holds three (i.e. the changeset has
2529 been split).
2530 been split).
2530 """
2531 """
2531 # passed to successorssets caching computation from one call to another
2532 # passed to successorssets caching computation from one call to another
2532 cache = {}
2533 cache = {}
2533 ctx2str = bytes
2534 ctx2str = bytes
2534 node2str = short
2535 node2str = short
2535 for rev in scmutil.revrange(repo, revs):
2536 for rev in scmutil.revrange(repo, revs):
2536 ctx = repo[rev]
2537 ctx = repo[rev]
2537 ui.write('%s\n'% ctx2str(ctx))
2538 ui.write('%s\n'% ctx2str(ctx))
2538 for succsset in obsutil.successorssets(repo, ctx.node(),
2539 for succsset in obsutil.successorssets(repo, ctx.node(),
2539 closest=opts[r'closest'],
2540 closest=opts[r'closest'],
2540 cache=cache):
2541 cache=cache):
2541 if succsset:
2542 if succsset:
2542 ui.write(' ')
2543 ui.write(' ')
2543 ui.write(node2str(succsset[0]))
2544 ui.write(node2str(succsset[0]))
2544 for node in succsset[1:]:
2545 for node in succsset[1:]:
2545 ui.write(' ')
2546 ui.write(' ')
2546 ui.write(node2str(node))
2547 ui.write(node2str(node))
2547 ui.write('\n')
2548 ui.write('\n')
2548
2549
2549 @command('debugtemplate',
2550 @command('debugtemplate',
2550 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2551 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2551 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2552 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2552 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2553 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2553 optionalrepo=True)
2554 optionalrepo=True)
2554 def debugtemplate(ui, repo, tmpl, **opts):
2555 def debugtemplate(ui, repo, tmpl, **opts):
2555 """parse and apply a template
2556 """parse and apply a template
2556
2557
2557 If -r/--rev is given, the template is processed as a log template and
2558 If -r/--rev is given, the template is processed as a log template and
2558 applied to the given changesets. Otherwise, it is processed as a generic
2559 applied to the given changesets. Otherwise, it is processed as a generic
2559 template.
2560 template.
2560
2561
2561 Use --verbose to print the parsed tree.
2562 Use --verbose to print the parsed tree.
2562 """
2563 """
2563 revs = None
2564 revs = None
2564 if opts[r'rev']:
2565 if opts[r'rev']:
2565 if repo is None:
2566 if repo is None:
2566 raise error.RepoError(_('there is no Mercurial repository here '
2567 raise error.RepoError(_('there is no Mercurial repository here '
2567 '(.hg not found)'))
2568 '(.hg not found)'))
2568 revs = scmutil.revrange(repo, opts[r'rev'])
2569 revs = scmutil.revrange(repo, opts[r'rev'])
2569
2570
2570 props = {}
2571 props = {}
2571 for d in opts[r'define']:
2572 for d in opts[r'define']:
2572 try:
2573 try:
2573 k, v = (e.strip() for e in d.split('=', 1))
2574 k, v = (e.strip() for e in d.split('=', 1))
2574 if not k or k == 'ui':
2575 if not k or k == 'ui':
2575 raise ValueError
2576 raise ValueError
2576 props[k] = v
2577 props[k] = v
2577 except ValueError:
2578 except ValueError:
2578 raise error.Abort(_('malformed keyword definition: %s') % d)
2579 raise error.Abort(_('malformed keyword definition: %s') % d)
2579
2580
2580 if ui.verbose:
2581 if ui.verbose:
2581 aliases = ui.configitems('templatealias')
2582 aliases = ui.configitems('templatealias')
2582 tree = templater.parse(tmpl)
2583 tree = templater.parse(tmpl)
2583 ui.note(templater.prettyformat(tree), '\n')
2584 ui.note(templater.prettyformat(tree), '\n')
2584 newtree = templater.expandaliases(tree, aliases)
2585 newtree = templater.expandaliases(tree, aliases)
2585 if newtree != tree:
2586 if newtree != tree:
2586 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2587 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2587
2588
2588 if revs is None:
2589 if revs is None:
2589 tres = formatter.templateresources(ui, repo)
2590 tres = formatter.templateresources(ui, repo)
2590 t = formatter.maketemplater(ui, tmpl, resources=tres)
2591 t = formatter.maketemplater(ui, tmpl, resources=tres)
2591 if ui.verbose:
2592 if ui.verbose:
2592 kwds, funcs = t.symbolsuseddefault()
2593 kwds, funcs = t.symbolsuseddefault()
2593 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2594 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2594 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2595 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2595 ui.write(t.renderdefault(props))
2596 ui.write(t.renderdefault(props))
2596 else:
2597 else:
2597 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2598 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2598 if ui.verbose:
2599 if ui.verbose:
2599 kwds, funcs = displayer.t.symbolsuseddefault()
2600 kwds, funcs = displayer.t.symbolsuseddefault()
2600 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2601 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2601 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2602 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2602 for r in revs:
2603 for r in revs:
2603 displayer.show(repo[r], **pycompat.strkwargs(props))
2604 displayer.show(repo[r], **pycompat.strkwargs(props))
2604 displayer.close()
2605 displayer.close()
2605
2606
2606 @command('debuguigetpass', [
2607 @command('debuguigetpass', [
2607 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2608 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2608 ], _('[-p TEXT]'), norepo=True)
2609 ], _('[-p TEXT]'), norepo=True)
2609 def debuguigetpass(ui, prompt=''):
2610 def debuguigetpass(ui, prompt=''):
2610 """show prompt to type password"""
2611 """show prompt to type password"""
2611 r = ui.getpass(prompt)
2612 r = ui.getpass(prompt)
2612 ui.write(('respose: %s\n') % r)
2613 ui.write(('respose: %s\n') % r)
2613
2614
2614 @command('debuguiprompt', [
2615 @command('debuguiprompt', [
2615 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2616 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2616 ], _('[-p TEXT]'), norepo=True)
2617 ], _('[-p TEXT]'), norepo=True)
2617 def debuguiprompt(ui, prompt=''):
2618 def debuguiprompt(ui, prompt=''):
2618 """show plain prompt"""
2619 """show plain prompt"""
2619 r = ui.prompt(prompt)
2620 r = ui.prompt(prompt)
2620 ui.write(('response: %s\n') % r)
2621 ui.write(('response: %s\n') % r)
2621
2622
2622 @command('debugupdatecaches', [])
2623 @command('debugupdatecaches', [])
2623 def debugupdatecaches(ui, repo, *pats, **opts):
2624 def debugupdatecaches(ui, repo, *pats, **opts):
2624 """warm all known caches in the repository"""
2625 """warm all known caches in the repository"""
2625 with repo.wlock(), repo.lock():
2626 with repo.wlock(), repo.lock():
2626 repo.updatecaches(full=True)
2627 repo.updatecaches(full=True)
2627
2628
2628 @command('debugupgraderepo', [
2629 @command('debugupgraderepo', [
2629 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2630 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2630 ('', 'run', False, _('performs an upgrade')),
2631 ('', 'run', False, _('performs an upgrade')),
2631 ])
2632 ])
2632 def debugupgraderepo(ui, repo, run=False, optimize=None):
2633 def debugupgraderepo(ui, repo, run=False, optimize=None):
2633 """upgrade a repository to use different features
2634 """upgrade a repository to use different features
2634
2635
2635 If no arguments are specified, the repository is evaluated for upgrade
2636 If no arguments are specified, the repository is evaluated for upgrade
2636 and a list of problems and potential optimizations is printed.
2637 and a list of problems and potential optimizations is printed.
2637
2638
2638 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2639 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2639 can be influenced via additional arguments. More details will be provided
2640 can be influenced via additional arguments. More details will be provided
2640 by the command output when run without ``--run``.
2641 by the command output when run without ``--run``.
2641
2642
2642 During the upgrade, the repository will be locked and no writes will be
2643 During the upgrade, the repository will be locked and no writes will be
2643 allowed.
2644 allowed.
2644
2645
2645 At the end of the upgrade, the repository may not be readable while new
2646 At the end of the upgrade, the repository may not be readable while new
2646 repository data is swapped in. This window will be as long as it takes to
2647 repository data is swapped in. This window will be as long as it takes to
2647 rename some directories inside the ``.hg`` directory. On most machines, this
2648 rename some directories inside the ``.hg`` directory. On most machines, this
2648 should complete almost instantaneously and the chances of a consumer being
2649 should complete almost instantaneously and the chances of a consumer being
2649 unable to access the repository should be low.
2650 unable to access the repository should be low.
2650 """
2651 """
2651 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2652 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2652
2653
2653 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2654 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2654 inferrepo=True)
2655 inferrepo=True)
2655 def debugwalk(ui, repo, *pats, **opts):
2656 def debugwalk(ui, repo, *pats, **opts):
2656 """show how files match on given patterns"""
2657 """show how files match on given patterns"""
2657 opts = pycompat.byteskwargs(opts)
2658 opts = pycompat.byteskwargs(opts)
2658 m = scmutil.match(repo[None], pats, opts)
2659 m = scmutil.match(repo[None], pats, opts)
2659 if ui.verbose:
2660 if ui.verbose:
2660 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2661 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2661 items = list(repo[None].walk(m))
2662 items = list(repo[None].walk(m))
2662 if not items:
2663 if not items:
2663 return
2664 return
2664 f = lambda fn: fn
2665 f = lambda fn: fn
2665 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2666 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2666 f = lambda fn: util.normpath(fn)
2667 f = lambda fn: util.normpath(fn)
2667 fmt = 'f %%-%ds %%-%ds %%s' % (
2668 fmt = 'f %%-%ds %%-%ds %%s' % (
2668 max([len(abs) for abs in items]),
2669 max([len(abs) for abs in items]),
2669 max([len(m.rel(abs)) for abs in items]))
2670 max([len(m.rel(abs)) for abs in items]))
2670 for abs in items:
2671 for abs in items:
2671 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2672 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2672 ui.write("%s\n" % line.rstrip())
2673 ui.write("%s\n" % line.rstrip())
2673
2674
2674 @command('debugwhyunstable', [], _('REV'))
2675 @command('debugwhyunstable', [], _('REV'))
2675 def debugwhyunstable(ui, repo, rev):
2676 def debugwhyunstable(ui, repo, rev):
2676 """explain instabilities of a changeset"""
2677 """explain instabilities of a changeset"""
2677 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2678 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2678 dnodes = ''
2679 dnodes = ''
2679 if entry.get('divergentnodes'):
2680 if entry.get('divergentnodes'):
2680 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2681 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2681 for ctx in entry['divergentnodes']) + ' '
2682 for ctx in entry['divergentnodes']) + ' '
2682 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2683 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2683 entry['reason'], entry['node']))
2684 entry['reason'], entry['node']))
2684
2685
2685 @command('debugwireargs',
2686 @command('debugwireargs',
2686 [('', 'three', '', 'three'),
2687 [('', 'three', '', 'three'),
2687 ('', 'four', '', 'four'),
2688 ('', 'four', '', 'four'),
2688 ('', 'five', '', 'five'),
2689 ('', 'five', '', 'five'),
2689 ] + cmdutil.remoteopts,
2690 ] + cmdutil.remoteopts,
2690 _('REPO [OPTIONS]... [ONE [TWO]]'),
2691 _('REPO [OPTIONS]... [ONE [TWO]]'),
2691 norepo=True)
2692 norepo=True)
2692 def debugwireargs(ui, repopath, *vals, **opts):
2693 def debugwireargs(ui, repopath, *vals, **opts):
2693 opts = pycompat.byteskwargs(opts)
2694 opts = pycompat.byteskwargs(opts)
2694 repo = hg.peer(ui, opts, repopath)
2695 repo = hg.peer(ui, opts, repopath)
2695 for opt in cmdutil.remoteopts:
2696 for opt in cmdutil.remoteopts:
2696 del opts[opt[1]]
2697 del opts[opt[1]]
2697 args = {}
2698 args = {}
2698 for k, v in opts.iteritems():
2699 for k, v in opts.iteritems():
2699 if v:
2700 if v:
2700 args[k] = v
2701 args[k] = v
2701 args = pycompat.strkwargs(args)
2702 args = pycompat.strkwargs(args)
2702 # run twice to check that we don't mess up the stream for the next command
2703 # run twice to check that we don't mess up the stream for the next command
2703 res1 = repo.debugwireargs(*vals, **args)
2704 res1 = repo.debugwireargs(*vals, **args)
2704 res2 = repo.debugwireargs(*vals, **args)
2705 res2 = repo.debugwireargs(*vals, **args)
2705 ui.write("%s\n" % res1)
2706 ui.write("%s\n" % res1)
2706 if res1 != res2:
2707 if res1 != res2:
2707 ui.warn("%s\n" % res2)
2708 ui.warn("%s\n" % res2)
2708
2709
2709 def _parsewirelangblocks(fh):
2710 def _parsewirelangblocks(fh):
2710 activeaction = None
2711 activeaction = None
2711 blocklines = []
2712 blocklines = []
2712
2713
2713 for line in fh:
2714 for line in fh:
2714 line = line.rstrip()
2715 line = line.rstrip()
2715 if not line:
2716 if not line:
2716 continue
2717 continue
2717
2718
2718 if line.startswith(b'#'):
2719 if line.startswith(b'#'):
2719 continue
2720 continue
2720
2721
2721 if not line.startswith(' '):
2722 if not line.startswith(' '):
2722 # New block. Flush previous one.
2723 # New block. Flush previous one.
2723 if activeaction:
2724 if activeaction:
2724 yield activeaction, blocklines
2725 yield activeaction, blocklines
2725
2726
2726 activeaction = line
2727 activeaction = line
2727 blocklines = []
2728 blocklines = []
2728 continue
2729 continue
2729
2730
2730 # Else we start with an indent.
2731 # Else we start with an indent.
2731
2732
2732 if not activeaction:
2733 if not activeaction:
2733 raise error.Abort(_('indented line outside of block'))
2734 raise error.Abort(_('indented line outside of block'))
2734
2735
2735 blocklines.append(line)
2736 blocklines.append(line)
2736
2737
2737 # Flush last block.
2738 # Flush last block.
2738 if activeaction:
2739 if activeaction:
2739 yield activeaction, blocklines
2740 yield activeaction, blocklines
2740
2741
2741 @command('debugwireproto',
2742 @command('debugwireproto',
2742 [
2743 [
2743 ('', 'localssh', False, _('start an SSH server for this repo')),
2744 ('', 'localssh', False, _('start an SSH server for this repo')),
2744 ('', 'peer', '', _('construct a specific version of the peer')),
2745 ('', 'peer', '', _('construct a specific version of the peer')),
2745 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2746 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2746 ('', 'nologhandshake', False,
2747 ('', 'nologhandshake', False,
2747 _('do not log I/O related to the peer handshake')),
2748 _('do not log I/O related to the peer handshake')),
2748 ] + cmdutil.remoteopts,
2749 ] + cmdutil.remoteopts,
2749 _('[PATH]'),
2750 _('[PATH]'),
2750 optionalrepo=True)
2751 optionalrepo=True)
2751 def debugwireproto(ui, repo, path=None, **opts):
2752 def debugwireproto(ui, repo, path=None, **opts):
2752 """send wire protocol commands to a server
2753 """send wire protocol commands to a server
2753
2754
2754 This command can be used to issue wire protocol commands to remote
2755 This command can be used to issue wire protocol commands to remote
2755 peers and to debug the raw data being exchanged.
2756 peers and to debug the raw data being exchanged.
2756
2757
2757 ``--localssh`` will start an SSH server against the current repository
2758 ``--localssh`` will start an SSH server against the current repository
2758 and connect to that. By default, the connection will perform a handshake
2759 and connect to that. By default, the connection will perform a handshake
2759 and establish an appropriate peer instance.
2760 and establish an appropriate peer instance.
2760
2761
2761 ``--peer`` can be used to bypass the handshake protocol and construct a
2762 ``--peer`` can be used to bypass the handshake protocol and construct a
2762 peer instance using the specified class type. Valid values are ``raw``,
2763 peer instance using the specified class type. Valid values are ``raw``,
2763 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2764 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2764 raw data payloads and don't support higher-level command actions.
2765 raw data payloads and don't support higher-level command actions.
2765
2766
2766 ``--noreadstderr`` can be used to disable automatic reading from stderr
2767 ``--noreadstderr`` can be used to disable automatic reading from stderr
2767 of the peer (for SSH connections only). Disabling automatic reading of
2768 of the peer (for SSH connections only). Disabling automatic reading of
2768 stderr is useful for making output more deterministic.
2769 stderr is useful for making output more deterministic.
2769
2770
2770 Commands are issued via a mini language which is specified via stdin.
2771 Commands are issued via a mini language which is specified via stdin.
2771 The language consists of individual actions to perform. An action is
2772 The language consists of individual actions to perform. An action is
2772 defined by a block. A block is defined as a line with no leading
2773 defined by a block. A block is defined as a line with no leading
2773 space followed by 0 or more lines with leading space. Blocks are
2774 space followed by 0 or more lines with leading space. Blocks are
2774 effectively a high-level command with additional metadata.
2775 effectively a high-level command with additional metadata.
2775
2776
2776 Lines beginning with ``#`` are ignored.
2777 Lines beginning with ``#`` are ignored.
2777
2778
2778 The following sections denote available actions.
2779 The following sections denote available actions.
2779
2780
2780 raw
2781 raw
2781 ---
2782 ---
2782
2783
2783 Send raw data to the server.
2784 Send raw data to the server.
2784
2785
2785 The block payload contains the raw data to send as one atomic send
2786 The block payload contains the raw data to send as one atomic send
2786 operation. The data may not actually be delivered in a single system
2787 operation. The data may not actually be delivered in a single system
2787 call: it depends on the abilities of the transport being used.
2788 call: it depends on the abilities of the transport being used.
2788
2789
2789 Each line in the block is de-indented and concatenated. Then, that
2790 Each line in the block is de-indented and concatenated. Then, that
2790 value is evaluated as a Python b'' literal. This allows the use of
2791 value is evaluated as a Python b'' literal. This allows the use of
2791 backslash escaping, etc.
2792 backslash escaping, etc.
2792
2793
2793 raw+
2794 raw+
2794 ----
2795 ----
2795
2796
2796 Behaves like ``raw`` except flushes output afterwards.
2797 Behaves like ``raw`` except flushes output afterwards.
2797
2798
2798 command <X>
2799 command <X>
2799 -----------
2800 -----------
2800
2801
2801 Send a request to run a named command, whose name follows the ``command``
2802 Send a request to run a named command, whose name follows the ``command``
2802 string.
2803 string.
2803
2804
2804 Arguments to the command are defined as lines in this block. The format of
2805 Arguments to the command are defined as lines in this block. The format of
2805 each line is ``<key> <value>``. e.g.::
2806 each line is ``<key> <value>``. e.g.::
2806
2807
2807 command listkeys
2808 command listkeys
2808 namespace bookmarks
2809 namespace bookmarks
2809
2810
2810 If the value begins with ``eval:``, it will be interpreted as a Python
2811 If the value begins with ``eval:``, it will be interpreted as a Python
2811 literal expression. Otherwise values are interpreted as Python b'' literals.
2812 literal expression. Otherwise values are interpreted as Python b'' literals.
2812 This allows sending complex types and encoding special byte sequences via
2813 This allows sending complex types and encoding special byte sequences via
2813 backslash escaping.
2814 backslash escaping.
2814
2815
2815 The following arguments have special meaning:
2816 The following arguments have special meaning:
2816
2817
2817 ``PUSHFILE``
2818 ``PUSHFILE``
2818 When defined, the *push* mechanism of the peer will be used instead
2819 When defined, the *push* mechanism of the peer will be used instead
2819 of the static request-response mechanism and the content of the
2820 of the static request-response mechanism and the content of the
2820 file specified in the value of this argument will be sent as the
2821 file specified in the value of this argument will be sent as the
2821 command payload.
2822 command payload.
2822
2823
2823 This can be used to submit a local bundle file to the remote.
2824 This can be used to submit a local bundle file to the remote.
2824
2825
2825 batchbegin
2826 batchbegin
2826 ----------
2827 ----------
2827
2828
2828 Instruct the peer to begin a batched send.
2829 Instruct the peer to begin a batched send.
2829
2830
2830 All ``command`` blocks are queued for execution until the next
2831 All ``command`` blocks are queued for execution until the next
2831 ``batchsubmit`` block.
2832 ``batchsubmit`` block.
2832
2833
2833 batchsubmit
2834 batchsubmit
2834 -----------
2835 -----------
2835
2836
2836 Submit previously queued ``command`` blocks as a batch request.
2837 Submit previously queued ``command`` blocks as a batch request.
2837
2838
2838 This action MUST be paired with a ``batchbegin`` action.
2839 This action MUST be paired with a ``batchbegin`` action.
2839
2840
2840 httprequest <method> <path>
2841 httprequest <method> <path>
2841 ---------------------------
2842 ---------------------------
2842
2843
2843 (HTTP peer only)
2844 (HTTP peer only)
2844
2845
2845 Send an HTTP request to the peer.
2846 Send an HTTP request to the peer.
2846
2847
2847 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2848 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2848
2849
2849 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2850 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2850 headers to add to the request. e.g. ``Accept: foo``.
2851 headers to add to the request. e.g. ``Accept: foo``.
2851
2852
2852 The following arguments are special:
2853 The following arguments are special:
2853
2854
2854 ``BODYFILE``
2855 ``BODYFILE``
2855 The content of the file defined as the value to this argument will be
2856 The content of the file defined as the value to this argument will be
2856 transferred verbatim as the HTTP request body.
2857 transferred verbatim as the HTTP request body.
2857
2858
2858 ``frame <type> <flags> <payload>``
2859 ``frame <type> <flags> <payload>``
2859 Send a unified protocol frame as part of the request body.
2860 Send a unified protocol frame as part of the request body.
2860
2861
2861 All frames will be collected and sent as the body to the HTTP
2862 All frames will be collected and sent as the body to the HTTP
2862 request.
2863 request.
2863
2864
2864 close
2865 close
2865 -----
2866 -----
2866
2867
2867 Close the connection to the server.
2868 Close the connection to the server.
2868
2869
2869 flush
2870 flush
2870 -----
2871 -----
2871
2872
2872 Flush data written to the server.
2873 Flush data written to the server.
2873
2874
2874 readavailable
2875 readavailable
2875 -------------
2876 -------------
2876
2877
2877 Close the write end of the connection and read all available data from
2878 Close the write end of the connection and read all available data from
2878 the server.
2879 the server.
2879
2880
2880 If the connection to the server encompasses multiple pipes, we poll both
2881 If the connection to the server encompasses multiple pipes, we poll both
2881 pipes and read available data.
2882 pipes and read available data.
2882
2883
2883 readline
2884 readline
2884 --------
2885 --------
2885
2886
2886 Read a line of output from the server. If there are multiple output
2887 Read a line of output from the server. If there are multiple output
2887 pipes, reads only the main pipe.
2888 pipes, reads only the main pipe.
2888
2889
2889 ereadline
2890 ereadline
2890 ---------
2891 ---------
2891
2892
2892 Like ``readline``, but read from the stderr pipe, if available.
2893 Like ``readline``, but read from the stderr pipe, if available.
2893
2894
2894 read <X>
2895 read <X>
2895 --------
2896 --------
2896
2897
2897 ``read()`` N bytes from the server's main output pipe.
2898 ``read()`` N bytes from the server's main output pipe.
2898
2899
2899 eread <X>
2900 eread <X>
2900 ---------
2901 ---------
2901
2902
2902 ``read()`` N bytes from the server's stderr pipe, if available.
2903 ``read()`` N bytes from the server's stderr pipe, if available.
2903
2904
2904 Specifying Unified Frame-Based Protocol Frames
2905 Specifying Unified Frame-Based Protocol Frames
2905 ----------------------------------------------
2906 ----------------------------------------------
2906
2907
2907 It is possible to emit a *Unified Frame-Based Protocol* by using special
2908 It is possible to emit a *Unified Frame-Based Protocol* by using special
2908 syntax.
2909 syntax.
2909
2910
2910 A frame is composed as a type, flags, and payload. These can be parsed
2911 A frame is composed as a type, flags, and payload. These can be parsed
2911 from a string of the form:
2912 from a string of the form:
2912
2913
2913 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2914 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2914
2915
2915 ``request-id`` and ``stream-id`` are integers defining the request and
2916 ``request-id`` and ``stream-id`` are integers defining the request and
2916 stream identifiers.
2917 stream identifiers.
2917
2918
2918 ``type`` can be an integer value for the frame type or the string name
2919 ``type`` can be an integer value for the frame type or the string name
2919 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2920 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2920 ``command-name``.
2921 ``command-name``.
2921
2922
2922 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2923 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2923 components. Each component (and there can be just one) can be an integer
2924 components. Each component (and there can be just one) can be an integer
2924 or a flag name for stream flags or frame flags, respectively. Values are
2925 or a flag name for stream flags or frame flags, respectively. Values are
2925 resolved to integers and then bitwise OR'd together.
2926 resolved to integers and then bitwise OR'd together.
2926
2927
2927 ``payload`` represents the raw frame payload. If it begins with
2928 ``payload`` represents the raw frame payload. If it begins with
2928 ``cbor:``, the following string is evaluated as Python code and the
2929 ``cbor:``, the following string is evaluated as Python code and the
2929 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
2930 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
2930 as a Python byte string literal.
2931 as a Python byte string literal.
2931 """
2932 """
2932 opts = pycompat.byteskwargs(opts)
2933 opts = pycompat.byteskwargs(opts)
2933
2934
2934 if opts['localssh'] and not repo:
2935 if opts['localssh'] and not repo:
2935 raise error.Abort(_('--localssh requires a repository'))
2936 raise error.Abort(_('--localssh requires a repository'))
2936
2937
2937 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
2938 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
2938 raise error.Abort(_('invalid value for --peer'),
2939 raise error.Abort(_('invalid value for --peer'),
2939 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2940 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2940
2941
2941 if path and opts['localssh']:
2942 if path and opts['localssh']:
2942 raise error.Abort(_('cannot specify --localssh with an explicit '
2943 raise error.Abort(_('cannot specify --localssh with an explicit '
2943 'path'))
2944 'path'))
2944
2945
2945 if ui.interactive():
2946 if ui.interactive():
2946 ui.write(_('(waiting for commands on stdin)\n'))
2947 ui.write(_('(waiting for commands on stdin)\n'))
2947
2948
2948 blocks = list(_parsewirelangblocks(ui.fin))
2949 blocks = list(_parsewirelangblocks(ui.fin))
2949
2950
2950 proc = None
2951 proc = None
2951 stdin = None
2952 stdin = None
2952 stdout = None
2953 stdout = None
2953 stderr = None
2954 stderr = None
2954 opener = None
2955 opener = None
2955
2956
2956 if opts['localssh']:
2957 if opts['localssh']:
2957 # We start the SSH server in its own process so there is process
2958 # We start the SSH server in its own process so there is process
2958 # separation. This prevents a whole class of potential bugs around
2959 # separation. This prevents a whole class of potential bugs around
2959 # shared state from interfering with server operation.
2960 # shared state from interfering with server operation.
2960 args = procutil.hgcmd() + [
2961 args = procutil.hgcmd() + [
2961 '-R', repo.root,
2962 '-R', repo.root,
2962 'debugserve', '--sshstdio',
2963 'debugserve', '--sshstdio',
2963 ]
2964 ]
2964 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2965 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2965 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2966 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2966 bufsize=0)
2967 bufsize=0)
2967
2968
2968 stdin = proc.stdin
2969 stdin = proc.stdin
2969 stdout = proc.stdout
2970 stdout = proc.stdout
2970 stderr = proc.stderr
2971 stderr = proc.stderr
2971
2972
2972 # We turn the pipes into observers so we can log I/O.
2973 # We turn the pipes into observers so we can log I/O.
2973 if ui.verbose or opts['peer'] == 'raw':
2974 if ui.verbose or opts['peer'] == 'raw':
2974 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2975 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2975 logdata=True)
2976 logdata=True)
2976 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2977 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2977 logdata=True)
2978 logdata=True)
2978 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2979 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2979 logdata=True)
2980 logdata=True)
2980
2981
2981 # --localssh also implies the peer connection settings.
2982 # --localssh also implies the peer connection settings.
2982
2983
2983 url = 'ssh://localserver'
2984 url = 'ssh://localserver'
2984 autoreadstderr = not opts['noreadstderr']
2985 autoreadstderr = not opts['noreadstderr']
2985
2986
2986 if opts['peer'] == 'ssh1':
2987 if opts['peer'] == 'ssh1':
2987 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2988 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2988 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2989 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2989 None, autoreadstderr=autoreadstderr)
2990 None, autoreadstderr=autoreadstderr)
2990 elif opts['peer'] == 'ssh2':
2991 elif opts['peer'] == 'ssh2':
2991 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2992 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2992 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2993 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2993 None, autoreadstderr=autoreadstderr)
2994 None, autoreadstderr=autoreadstderr)
2994 elif opts['peer'] == 'raw':
2995 elif opts['peer'] == 'raw':
2995 ui.write(_('using raw connection to peer\n'))
2996 ui.write(_('using raw connection to peer\n'))
2996 peer = None
2997 peer = None
2997 else:
2998 else:
2998 ui.write(_('creating ssh peer from handshake results\n'))
2999 ui.write(_('creating ssh peer from handshake results\n'))
2999 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3000 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3000 autoreadstderr=autoreadstderr)
3001 autoreadstderr=autoreadstderr)
3001
3002
3002 elif path:
3003 elif path:
3003 # We bypass hg.peer() so we can proxy the sockets.
3004 # We bypass hg.peer() so we can proxy the sockets.
3004 # TODO consider not doing this because we skip
3005 # TODO consider not doing this because we skip
3005 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3006 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3006 u = util.url(path)
3007 u = util.url(path)
3007 if u.scheme != 'http':
3008 if u.scheme != 'http':
3008 raise error.Abort(_('only http:// paths are currently supported'))
3009 raise error.Abort(_('only http:// paths are currently supported'))
3009
3010
3010 url, authinfo = u.authinfo()
3011 url, authinfo = u.authinfo()
3011 openerargs = {
3012 openerargs = {
3012 r'useragent': b'Mercurial debugwireproto',
3013 r'useragent': b'Mercurial debugwireproto',
3013 }
3014 }
3014
3015
3015 # Turn pipes/sockets into observers so we can log I/O.
3016 # Turn pipes/sockets into observers so we can log I/O.
3016 if ui.verbose:
3017 if ui.verbose:
3017 openerargs.update({
3018 openerargs.update({
3018 r'loggingfh': ui,
3019 r'loggingfh': ui,
3019 r'loggingname': b's',
3020 r'loggingname': b's',
3020 r'loggingopts': {
3021 r'loggingopts': {
3021 r'logdata': True,
3022 r'logdata': True,
3022 r'logdataapis': False,
3023 r'logdataapis': False,
3023 },
3024 },
3024 })
3025 })
3025
3026
3026 if ui.debugflag:
3027 if ui.debugflag:
3027 openerargs[r'loggingopts'][r'logdataapis'] = True
3028 openerargs[r'loggingopts'][r'logdataapis'] = True
3028
3029
3029 # Don't send default headers when in raw mode. This allows us to
3030 # Don't send default headers when in raw mode. This allows us to
3030 # bypass most of the behavior of our URL handling code so we can
3031 # bypass most of the behavior of our URL handling code so we can
3031 # have near complete control over what's sent on the wire.
3032 # have near complete control over what's sent on the wire.
3032 if opts['peer'] == 'raw':
3033 if opts['peer'] == 'raw':
3033 openerargs[r'sendaccept'] = False
3034 openerargs[r'sendaccept'] = False
3034
3035
3035 opener = urlmod.opener(ui, authinfo, **openerargs)
3036 opener = urlmod.opener(ui, authinfo, **openerargs)
3036
3037
3037 if opts['peer'] == 'http2':
3038 if opts['peer'] == 'http2':
3038 ui.write(_('creating http peer for wire protocol version 2\n'))
3039 ui.write(_('creating http peer for wire protocol version 2\n'))
3039 # We go through makepeer() because we need an API descriptor for
3040 # We go through makepeer() because we need an API descriptor for
3040 # the peer instance to be useful.
3041 # the peer instance to be useful.
3041 with ui.configoverride({
3042 with ui.configoverride({
3042 ('experimental', 'httppeer.advertise-v2'): True}):
3043 ('experimental', 'httppeer.advertise-v2'): True}):
3043 if opts['nologhandshake']:
3044 if opts['nologhandshake']:
3044 ui.pushbuffer()
3045 ui.pushbuffer()
3045
3046
3046 peer = httppeer.makepeer(ui, path, opener=opener)
3047 peer = httppeer.makepeer(ui, path, opener=opener)
3047
3048
3048 if opts['nologhandshake']:
3049 if opts['nologhandshake']:
3049 ui.popbuffer()
3050 ui.popbuffer()
3050
3051
3051 if not isinstance(peer, httppeer.httpv2peer):
3052 if not isinstance(peer, httppeer.httpv2peer):
3052 raise error.Abort(_('could not instantiate HTTP peer for '
3053 raise error.Abort(_('could not instantiate HTTP peer for '
3053 'wire protocol version 2'),
3054 'wire protocol version 2'),
3054 hint=_('the server may not have the feature '
3055 hint=_('the server may not have the feature '
3055 'enabled or is not allowing this '
3056 'enabled or is not allowing this '
3056 'client version'))
3057 'client version'))
3057
3058
3058 elif opts['peer'] == 'raw':
3059 elif opts['peer'] == 'raw':
3059 ui.write(_('using raw connection to peer\n'))
3060 ui.write(_('using raw connection to peer\n'))
3060 peer = None
3061 peer = None
3061 elif opts['peer']:
3062 elif opts['peer']:
3062 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3063 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3063 opts['peer'])
3064 opts['peer'])
3064 else:
3065 else:
3065 peer = httppeer.makepeer(ui, path, opener=opener)
3066 peer = httppeer.makepeer(ui, path, opener=opener)
3066
3067
3067 # We /could/ populate stdin/stdout with sock.makefile()...
3068 # We /could/ populate stdin/stdout with sock.makefile()...
3068 else:
3069 else:
3069 raise error.Abort(_('unsupported connection configuration'))
3070 raise error.Abort(_('unsupported connection configuration'))
3070
3071
3071 batchedcommands = None
3072 batchedcommands = None
3072
3073
3073 # Now perform actions based on the parsed wire language instructions.
3074 # Now perform actions based on the parsed wire language instructions.
3074 for action, lines in blocks:
3075 for action, lines in blocks:
3075 if action in ('raw', 'raw+'):
3076 if action in ('raw', 'raw+'):
3076 if not stdin:
3077 if not stdin:
3077 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3078 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3078
3079
3079 # Concatenate the data together.
3080 # Concatenate the data together.
3080 data = ''.join(l.lstrip() for l in lines)
3081 data = ''.join(l.lstrip() for l in lines)
3081 data = stringutil.unescapestr(data)
3082 data = stringutil.unescapestr(data)
3082 stdin.write(data)
3083 stdin.write(data)
3083
3084
3084 if action == 'raw+':
3085 if action == 'raw+':
3085 stdin.flush()
3086 stdin.flush()
3086 elif action == 'flush':
3087 elif action == 'flush':
3087 if not stdin:
3088 if not stdin:
3088 raise error.Abort(_('cannot call flush on this peer'))
3089 raise error.Abort(_('cannot call flush on this peer'))
3089 stdin.flush()
3090 stdin.flush()
3090 elif action.startswith('command'):
3091 elif action.startswith('command'):
3091 if not peer:
3092 if not peer:
3092 raise error.Abort(_('cannot send commands unless peer instance '
3093 raise error.Abort(_('cannot send commands unless peer instance '
3093 'is available'))
3094 'is available'))
3094
3095
3095 command = action.split(' ', 1)[1]
3096 command = action.split(' ', 1)[1]
3096
3097
3097 args = {}
3098 args = {}
3098 for line in lines:
3099 for line in lines:
3099 # We need to allow empty values.
3100 # We need to allow empty values.
3100 fields = line.lstrip().split(' ', 1)
3101 fields = line.lstrip().split(' ', 1)
3101 if len(fields) == 1:
3102 if len(fields) == 1:
3102 key = fields[0]
3103 key = fields[0]
3103 value = ''
3104 value = ''
3104 else:
3105 else:
3105 key, value = fields
3106 key, value = fields
3106
3107
3107 if value.startswith('eval:'):
3108 if value.startswith('eval:'):
3108 value = stringutil.evalpythonliteral(value[5:])
3109 value = stringutil.evalpythonliteral(value[5:])
3109 else:
3110 else:
3110 value = stringutil.unescapestr(value)
3111 value = stringutil.unescapestr(value)
3111
3112
3112 args[key] = value
3113 args[key] = value
3113
3114
3114 if batchedcommands is not None:
3115 if batchedcommands is not None:
3115 batchedcommands.append((command, args))
3116 batchedcommands.append((command, args))
3116 continue
3117 continue
3117
3118
3118 ui.status(_('sending %s command\n') % command)
3119 ui.status(_('sending %s command\n') % command)
3119
3120
3120 if 'PUSHFILE' in args:
3121 if 'PUSHFILE' in args:
3121 with open(args['PUSHFILE'], r'rb') as fh:
3122 with open(args['PUSHFILE'], r'rb') as fh:
3122 del args['PUSHFILE']
3123 del args['PUSHFILE']
3123 res, output = peer._callpush(command, fh,
3124 res, output = peer._callpush(command, fh,
3124 **pycompat.strkwargs(args))
3125 **pycompat.strkwargs(args))
3125 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3126 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3126 ui.status(_('remote output: %s\n') %
3127 ui.status(_('remote output: %s\n') %
3127 stringutil.escapestr(output))
3128 stringutil.escapestr(output))
3128 else:
3129 else:
3129 with peer.commandexecutor() as e:
3130 with peer.commandexecutor() as e:
3130 res = e.callcommand(command, args).result()
3131 res = e.callcommand(command, args).result()
3131
3132
3132 if isinstance(res, wireprotov2peer.commandresponse):
3133 if isinstance(res, wireprotov2peer.commandresponse):
3133 val = list(res.cborobjects())
3134 val = list(res.cborobjects())
3134 ui.status(_('response: %s\n') %
3135 ui.status(_('response: %s\n') %
3135 stringutil.pprint(val, bprefix=True))
3136 stringutil.pprint(val, bprefix=True))
3136
3137
3137 else:
3138 else:
3138 ui.status(_('response: %s\n') %
3139 ui.status(_('response: %s\n') %
3139 stringutil.pprint(res, bprefix=True))
3140 stringutil.pprint(res, bprefix=True))
3140
3141
3141 elif action == 'batchbegin':
3142 elif action == 'batchbegin':
3142 if batchedcommands is not None:
3143 if batchedcommands is not None:
3143 raise error.Abort(_('nested batchbegin not allowed'))
3144 raise error.Abort(_('nested batchbegin not allowed'))
3144
3145
3145 batchedcommands = []
3146 batchedcommands = []
3146 elif action == 'batchsubmit':
3147 elif action == 'batchsubmit':
3147 # There is a batching API we could go through. But it would be
3148 # There is a batching API we could go through. But it would be
3148 # difficult to normalize requests into function calls. It is easier
3149 # difficult to normalize requests into function calls. It is easier
3149 # to bypass this layer and normalize to commands + args.
3150 # to bypass this layer and normalize to commands + args.
3150 ui.status(_('sending batch with %d sub-commands\n') %
3151 ui.status(_('sending batch with %d sub-commands\n') %
3151 len(batchedcommands))
3152 len(batchedcommands))
3152 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3153 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3153 ui.status(_('response #%d: %s\n') %
3154 ui.status(_('response #%d: %s\n') %
3154 (i, stringutil.escapestr(chunk)))
3155 (i, stringutil.escapestr(chunk)))
3155
3156
3156 batchedcommands = None
3157 batchedcommands = None
3157
3158
3158 elif action.startswith('httprequest '):
3159 elif action.startswith('httprequest '):
3159 if not opener:
3160 if not opener:
3160 raise error.Abort(_('cannot use httprequest without an HTTP '
3161 raise error.Abort(_('cannot use httprequest without an HTTP '
3161 'peer'))
3162 'peer'))
3162
3163
3163 request = action.split(' ', 2)
3164 request = action.split(' ', 2)
3164 if len(request) != 3:
3165 if len(request) != 3:
3165 raise error.Abort(_('invalid httprequest: expected format is '
3166 raise error.Abort(_('invalid httprequest: expected format is '
3166 '"httprequest <method> <path>'))
3167 '"httprequest <method> <path>'))
3167
3168
3168 method, httppath = request[1:]
3169 method, httppath = request[1:]
3169 headers = {}
3170 headers = {}
3170 body = None
3171 body = None
3171 frames = []
3172 frames = []
3172 for line in lines:
3173 for line in lines:
3173 line = line.lstrip()
3174 line = line.lstrip()
3174 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3175 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3175 if m:
3176 if m:
3176 headers[m.group(1)] = m.group(2)
3177 headers[m.group(1)] = m.group(2)
3177 continue
3178 continue
3178
3179
3179 if line.startswith(b'BODYFILE '):
3180 if line.startswith(b'BODYFILE '):
3180 with open(line.split(b' ', 1), 'rb') as fh:
3181 with open(line.split(b' ', 1), 'rb') as fh:
3181 body = fh.read()
3182 body = fh.read()
3182 elif line.startswith(b'frame '):
3183 elif line.startswith(b'frame '):
3183 frame = wireprotoframing.makeframefromhumanstring(
3184 frame = wireprotoframing.makeframefromhumanstring(
3184 line[len(b'frame '):])
3185 line[len(b'frame '):])
3185
3186
3186 frames.append(frame)
3187 frames.append(frame)
3187 else:
3188 else:
3188 raise error.Abort(_('unknown argument to httprequest: %s') %
3189 raise error.Abort(_('unknown argument to httprequest: %s') %
3189 line)
3190 line)
3190
3191
3191 url = path + httppath
3192 url = path + httppath
3192
3193
3193 if frames:
3194 if frames:
3194 body = b''.join(bytes(f) for f in frames)
3195 body = b''.join(bytes(f) for f in frames)
3195
3196
3196 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3197 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3197
3198
3198 # urllib.Request insists on using has_data() as a proxy for
3199 # urllib.Request insists on using has_data() as a proxy for
3199 # determining the request method. Override that to use our
3200 # determining the request method. Override that to use our
3200 # explicitly requested method.
3201 # explicitly requested method.
3201 req.get_method = lambda: method
3202 req.get_method = lambda: method
3202
3203
3203 try:
3204 try:
3204 res = opener.open(req)
3205 res = opener.open(req)
3205 body = res.read()
3206 body = res.read()
3206 except util.urlerr.urlerror as e:
3207 except util.urlerr.urlerror as e:
3207 e.read()
3208 e.read()
3208 continue
3209 continue
3209
3210
3210 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3211 if res.headers.get('Content-Type') == 'application/mercurial-cbor':
3211 ui.write(_('cbor> %s\n') %
3212 ui.write(_('cbor> %s\n') %
3212 stringutil.pprint(cbor.loads(body), bprefix=True))
3213 stringutil.pprint(cbor.loads(body), bprefix=True))
3213
3214
3214 elif action == 'close':
3215 elif action == 'close':
3215 peer.close()
3216 peer.close()
3216 elif action == 'readavailable':
3217 elif action == 'readavailable':
3217 if not stdout or not stderr:
3218 if not stdout or not stderr:
3218 raise error.Abort(_('readavailable not available on this peer'))
3219 raise error.Abort(_('readavailable not available on this peer'))
3219
3220
3220 stdin.close()
3221 stdin.close()
3221 stdout.read()
3222 stdout.read()
3222 stderr.read()
3223 stderr.read()
3223
3224
3224 elif action == 'readline':
3225 elif action == 'readline':
3225 if not stdout:
3226 if not stdout:
3226 raise error.Abort(_('readline not available on this peer'))
3227 raise error.Abort(_('readline not available on this peer'))
3227 stdout.readline()
3228 stdout.readline()
3228 elif action == 'ereadline':
3229 elif action == 'ereadline':
3229 if not stderr:
3230 if not stderr:
3230 raise error.Abort(_('ereadline not available on this peer'))
3231 raise error.Abort(_('ereadline not available on this peer'))
3231 stderr.readline()
3232 stderr.readline()
3232 elif action.startswith('read '):
3233 elif action.startswith('read '):
3233 count = int(action.split(' ', 1)[1])
3234 count = int(action.split(' ', 1)[1])
3234 if not stdout:
3235 if not stdout:
3235 raise error.Abort(_('read not available on this peer'))
3236 raise error.Abort(_('read not available on this peer'))
3236 stdout.read(count)
3237 stdout.read(count)
3237 elif action.startswith('eread '):
3238 elif action.startswith('eread '):
3238 count = int(action.split(' ', 1)[1])
3239 count = int(action.split(' ', 1)[1])
3239 if not stderr:
3240 if not stderr:
3240 raise error.Abort(_('eread not available on this peer'))
3241 raise error.Abort(_('eread not available on this peer'))
3241 stderr.read(count)
3242 stderr.read(count)
3242 else:
3243 else:
3243 raise error.Abort(_('unknown action: %s') % action)
3244 raise error.Abort(_('unknown action: %s') % action)
3244
3245
3245 if batchedcommands is not None:
3246 if batchedcommands is not None:
3246 raise error.Abort(_('unclosed "batchbegin" request'))
3247 raise error.Abort(_('unclosed "batchbegin" request'))
3247
3248
3248 if peer:
3249 if peer:
3249 peer.close()
3250 peer.close()
3250
3251
3251 if proc:
3252 if proc:
3252 proc.kill()
3253 proc.kill()
@@ -1,556 +1,557
1 # fileset.py - file set queries for mercurial
1 # fileset.py - file set queries for mercurial
2 #
2 #
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import re
11 import re
12
12
13 from .i18n import _
13 from .i18n import _
14 from . import (
14 from . import (
15 error,
15 error,
16 filesetlang,
16 filesetlang,
17 match as matchmod,
17 match as matchmod,
18 merge,
18 merge,
19 pycompat,
19 pycompat,
20 registrar,
20 registrar,
21 scmutil,
21 scmutil,
22 util,
22 util,
23 )
23 )
24 from .utils import (
24 from .utils import (
25 stringutil,
25 stringutil,
26 )
26 )
27
27
28 # helpers for processing parsed tree
28 # helpers for processing parsed tree
29 getsymbol = filesetlang.getsymbol
29 getsymbol = filesetlang.getsymbol
30 getstring = filesetlang.getstring
30 getstring = filesetlang.getstring
31 _getkindpat = filesetlang.getkindpat
31 _getkindpat = filesetlang.getkindpat
32 getpattern = filesetlang.getpattern
32 getpattern = filesetlang.getpattern
33 getargs = filesetlang.getargs
33 getargs = filesetlang.getargs
34
34
35 def getmatch(mctx, x):
35 def getmatch(mctx, x):
36 if not x:
36 if not x:
37 raise error.ParseError(_("missing argument"))
37 raise error.ParseError(_("missing argument"))
38 return methods[x[0]](mctx, *x[1:])
38 return methods[x[0]](mctx, *x[1:])
39
39
40 def stringmatch(mctx, x):
40 def stringmatch(mctx, x):
41 return mctx.matcher([x])
41 return mctx.matcher([x])
42
42
43 def kindpatmatch(mctx, x, y):
43 def kindpatmatch(mctx, x, y):
44 return stringmatch(mctx, _getkindpat(x, y, matchmod.allpatternkinds,
44 return stringmatch(mctx, _getkindpat(x, y, matchmod.allpatternkinds,
45 _("pattern must be a string")))
45 _("pattern must be a string")))
46
46
47 def andmatch(mctx, x, y):
47 def andmatch(mctx, x, y):
48 xm = getmatch(mctx, x)
48 xm = getmatch(mctx, x)
49 ym = getmatch(mctx, y)
49 ym = getmatch(mctx, y)
50 return matchmod.intersectmatchers(xm, ym)
50 return matchmod.intersectmatchers(xm, ym)
51
51
52 def ormatch(mctx, *xs):
52 def ormatch(mctx, *xs):
53 ms = [getmatch(mctx, x) for x in xs]
53 ms = [getmatch(mctx, x) for x in xs]
54 return matchmod.unionmatcher(ms)
54 return matchmod.unionmatcher(ms)
55
55
56 def notmatch(mctx, x):
56 def notmatch(mctx, x):
57 m = getmatch(mctx, x)
57 m = getmatch(mctx, x)
58 return mctx.predicate(lambda f: not m(f), predrepr=('<not %r>', m))
58 return mctx.predicate(lambda f: not m(f), predrepr=('<not %r>', m))
59
59
60 def minusmatch(mctx, x, y):
60 def minusmatch(mctx, x, y):
61 xm = getmatch(mctx, x)
61 xm = getmatch(mctx, x)
62 ym = getmatch(mctx, y)
62 ym = getmatch(mctx, y)
63 return matchmod.differencematcher(xm, ym)
63 return matchmod.differencematcher(xm, ym)
64
64
65 def listmatch(mctx, *xs):
65 def listmatch(mctx, *xs):
66 raise error.ParseError(_("can't use a list in this context"),
66 raise error.ParseError(_("can't use a list in this context"),
67 hint=_('see \'hg help "filesets.x or y"\''))
67 hint=_('see \'hg help "filesets.x or y"\''))
68
68
69 def func(mctx, a, b):
69 def func(mctx, a, b):
70 funcname = getsymbol(a)
70 funcname = getsymbol(a)
71 if funcname in symbols:
71 if funcname in symbols:
72 return symbols[funcname](mctx, b)
72 return symbols[funcname](mctx, b)
73
73
74 keep = lambda fn: getattr(fn, '__doc__', None) is not None
74 keep = lambda fn: getattr(fn, '__doc__', None) is not None
75
75
76 syms = [s for (s, fn) in symbols.items() if keep(fn)]
76 syms = [s for (s, fn) in symbols.items() if keep(fn)]
77 raise error.UnknownIdentifier(funcname, syms)
77 raise error.UnknownIdentifier(funcname, syms)
78
78
79 # symbols are callable like:
79 # symbols are callable like:
80 # fun(mctx, x)
80 # fun(mctx, x)
81 # with:
81 # with:
82 # mctx - current matchctx instance
82 # mctx - current matchctx instance
83 # x - argument in tree form
83 # x - argument in tree form
84 symbols = filesetlang.symbols
84 symbols = filesetlang.symbols
85
85
86 # filesets using matchctx.status()
86 # filesets using matchctx.status()
87 _statuscallers = set()
87 _statuscallers = set()
88
88
89 predicate = registrar.filesetpredicate()
89 predicate = registrar.filesetpredicate()
90
90
91 @predicate('modified()', callstatus=True)
91 @predicate('modified()', callstatus=True)
92 def modified(mctx, x):
92 def modified(mctx, x):
93 """File that is modified according to :hg:`status`.
93 """File that is modified according to :hg:`status`.
94 """
94 """
95 # i18n: "modified" is a keyword
95 # i18n: "modified" is a keyword
96 getargs(x, 0, 0, _("modified takes no arguments"))
96 getargs(x, 0, 0, _("modified takes no arguments"))
97 s = set(mctx.status().modified)
97 s = set(mctx.status().modified)
98 return mctx.predicate(s.__contains__, predrepr='modified')
98 return mctx.predicate(s.__contains__, predrepr='modified')
99
99
100 @predicate('added()', callstatus=True)
100 @predicate('added()', callstatus=True)
101 def added(mctx, x):
101 def added(mctx, x):
102 """File that is added according to :hg:`status`.
102 """File that is added according to :hg:`status`.
103 """
103 """
104 # i18n: "added" is a keyword
104 # i18n: "added" is a keyword
105 getargs(x, 0, 0, _("added takes no arguments"))
105 getargs(x, 0, 0, _("added takes no arguments"))
106 s = set(mctx.status().added)
106 s = set(mctx.status().added)
107 return mctx.predicate(s.__contains__, predrepr='added')
107 return mctx.predicate(s.__contains__, predrepr='added')
108
108
109 @predicate('removed()', callstatus=True)
109 @predicate('removed()', callstatus=True)
110 def removed(mctx, x):
110 def removed(mctx, x):
111 """File that is removed according to :hg:`status`.
111 """File that is removed according to :hg:`status`.
112 """
112 """
113 # i18n: "removed" is a keyword
113 # i18n: "removed" is a keyword
114 getargs(x, 0, 0, _("removed takes no arguments"))
114 getargs(x, 0, 0, _("removed takes no arguments"))
115 s = set(mctx.status().removed)
115 s = set(mctx.status().removed)
116 return mctx.predicate(s.__contains__, predrepr='removed')
116 return mctx.predicate(s.__contains__, predrepr='removed')
117
117
118 @predicate('deleted()', callstatus=True)
118 @predicate('deleted()', callstatus=True)
119 def deleted(mctx, x):
119 def deleted(mctx, x):
120 """Alias for ``missing()``.
120 """Alias for ``missing()``.
121 """
121 """
122 # i18n: "deleted" is a keyword
122 # i18n: "deleted" is a keyword
123 getargs(x, 0, 0, _("deleted takes no arguments"))
123 getargs(x, 0, 0, _("deleted takes no arguments"))
124 s = set(mctx.status().deleted)
124 s = set(mctx.status().deleted)
125 return mctx.predicate(s.__contains__, predrepr='deleted')
125 return mctx.predicate(s.__contains__, predrepr='deleted')
126
126
127 @predicate('missing()', callstatus=True)
127 @predicate('missing()', callstatus=True)
128 def missing(mctx, x):
128 def missing(mctx, x):
129 """File that is missing according to :hg:`status`.
129 """File that is missing according to :hg:`status`.
130 """
130 """
131 # i18n: "missing" is a keyword
131 # i18n: "missing" is a keyword
132 getargs(x, 0, 0, _("missing takes no arguments"))
132 getargs(x, 0, 0, _("missing takes no arguments"))
133 s = set(mctx.status().deleted)
133 s = set(mctx.status().deleted)
134 return mctx.predicate(s.__contains__, predrepr='deleted')
134 return mctx.predicate(s.__contains__, predrepr='deleted')
135
135
136 @predicate('unknown()', callstatus=True)
136 @predicate('unknown()', callstatus=True)
137 def unknown(mctx, x):
137 def unknown(mctx, x):
138 """File that is unknown according to :hg:`status`."""
138 """File that is unknown according to :hg:`status`."""
139 # i18n: "unknown" is a keyword
139 # i18n: "unknown" is a keyword
140 getargs(x, 0, 0, _("unknown takes no arguments"))
140 getargs(x, 0, 0, _("unknown takes no arguments"))
141 s = set(mctx.status().unknown)
141 s = set(mctx.status().unknown)
142 return mctx.predicate(s.__contains__, predrepr='unknown')
142 return mctx.predicate(s.__contains__, predrepr='unknown')
143
143
144 @predicate('ignored()', callstatus=True)
144 @predicate('ignored()', callstatus=True)
145 def ignored(mctx, x):
145 def ignored(mctx, x):
146 """File that is ignored according to :hg:`status`."""
146 """File that is ignored according to :hg:`status`."""
147 # i18n: "ignored" is a keyword
147 # i18n: "ignored" is a keyword
148 getargs(x, 0, 0, _("ignored takes no arguments"))
148 getargs(x, 0, 0, _("ignored takes no arguments"))
149 s = set(mctx.status().ignored)
149 s = set(mctx.status().ignored)
150 return mctx.predicate(s.__contains__, predrepr='ignored')
150 return mctx.predicate(s.__contains__, predrepr='ignored')
151
151
152 @predicate('clean()', callstatus=True)
152 @predicate('clean()', callstatus=True)
153 def clean(mctx, x):
153 def clean(mctx, x):
154 """File that is clean according to :hg:`status`.
154 """File that is clean according to :hg:`status`.
155 """
155 """
156 # i18n: "clean" is a keyword
156 # i18n: "clean" is a keyword
157 getargs(x, 0, 0, _("clean takes no arguments"))
157 getargs(x, 0, 0, _("clean takes no arguments"))
158 s = set(mctx.status().clean)
158 s = set(mctx.status().clean)
159 return mctx.predicate(s.__contains__, predrepr='clean')
159 return mctx.predicate(s.__contains__, predrepr='clean')
160
160
161 @predicate('tracked()')
161 @predicate('tracked()')
162 def tracked(mctx, x):
162 def tracked(mctx, x):
163 """File that is under Mercurial control."""
163 """File that is under Mercurial control."""
164 # i18n: "tracked" is a keyword
164 # i18n: "tracked" is a keyword
165 getargs(x, 0, 0, _("tracked takes no arguments"))
165 getargs(x, 0, 0, _("tracked takes no arguments"))
166 return mctx.predicate(mctx.ctx.__contains__, predrepr='tracked')
166 return mctx.predicate(mctx.ctx.__contains__, predrepr='tracked')
167
167
168 @predicate('binary()')
168 @predicate('binary()')
169 def binary(mctx, x):
169 def binary(mctx, x):
170 """File that appears to be binary (contains NUL bytes).
170 """File that appears to be binary (contains NUL bytes).
171 """
171 """
172 # i18n: "binary" is a keyword
172 # i18n: "binary" is a keyword
173 getargs(x, 0, 0, _("binary takes no arguments"))
173 getargs(x, 0, 0, _("binary takes no arguments"))
174 return mctx.fpredicate(lambda fctx: fctx.isbinary(),
174 return mctx.fpredicate(lambda fctx: fctx.isbinary(),
175 predrepr='binary', cache=True)
175 predrepr='binary', cache=True)
176
176
177 @predicate('exec()')
177 @predicate('exec()')
178 def exec_(mctx, x):
178 def exec_(mctx, x):
179 """File that is marked as executable.
179 """File that is marked as executable.
180 """
180 """
181 # i18n: "exec" is a keyword
181 # i18n: "exec" is a keyword
182 getargs(x, 0, 0, _("exec takes no arguments"))
182 getargs(x, 0, 0, _("exec takes no arguments"))
183 ctx = mctx.ctx
183 ctx = mctx.ctx
184 return mctx.predicate(lambda f: ctx.flags(f) == 'x', predrepr='exec')
184 return mctx.predicate(lambda f: ctx.flags(f) == 'x', predrepr='exec')
185
185
186 @predicate('symlink()')
186 @predicate('symlink()')
187 def symlink(mctx, x):
187 def symlink(mctx, x):
188 """File that is marked as a symlink.
188 """File that is marked as a symlink.
189 """
189 """
190 # i18n: "symlink" is a keyword
190 # i18n: "symlink" is a keyword
191 getargs(x, 0, 0, _("symlink takes no arguments"))
191 getargs(x, 0, 0, _("symlink takes no arguments"))
192 ctx = mctx.ctx
192 ctx = mctx.ctx
193 return mctx.predicate(lambda f: ctx.flags(f) == 'l', predrepr='symlink')
193 return mctx.predicate(lambda f: ctx.flags(f) == 'l', predrepr='symlink')
194
194
195 @predicate('resolved()')
195 @predicate('resolved()')
196 def resolved(mctx, x):
196 def resolved(mctx, x):
197 """File that is marked resolved according to :hg:`resolve -l`.
197 """File that is marked resolved according to :hg:`resolve -l`.
198 """
198 """
199 # i18n: "resolved" is a keyword
199 # i18n: "resolved" is a keyword
200 getargs(x, 0, 0, _("resolved takes no arguments"))
200 getargs(x, 0, 0, _("resolved takes no arguments"))
201 if mctx.ctx.rev() is not None:
201 if mctx.ctx.rev() is not None:
202 return mctx.never()
202 return mctx.never()
203 ms = merge.mergestate.read(mctx.ctx.repo())
203 ms = merge.mergestate.read(mctx.ctx.repo())
204 return mctx.predicate(lambda f: f in ms and ms[f] == 'r',
204 return mctx.predicate(lambda f: f in ms and ms[f] == 'r',
205 predrepr='resolved')
205 predrepr='resolved')
206
206
207 @predicate('unresolved()')
207 @predicate('unresolved()')
208 def unresolved(mctx, x):
208 def unresolved(mctx, x):
209 """File that is marked unresolved according to :hg:`resolve -l`.
209 """File that is marked unresolved according to :hg:`resolve -l`.
210 """
210 """
211 # i18n: "unresolved" is a keyword
211 # i18n: "unresolved" is a keyword
212 getargs(x, 0, 0, _("unresolved takes no arguments"))
212 getargs(x, 0, 0, _("unresolved takes no arguments"))
213 if mctx.ctx.rev() is not None:
213 if mctx.ctx.rev() is not None:
214 return mctx.never()
214 return mctx.never()
215 ms = merge.mergestate.read(mctx.ctx.repo())
215 ms = merge.mergestate.read(mctx.ctx.repo())
216 return mctx.predicate(lambda f: f in ms and ms[f] == 'u',
216 return mctx.predicate(lambda f: f in ms and ms[f] == 'u',
217 predrepr='unresolved')
217 predrepr='unresolved')
218
218
219 @predicate('hgignore()')
219 @predicate('hgignore()')
220 def hgignore(mctx, x):
220 def hgignore(mctx, x):
221 """File that matches the active .hgignore pattern.
221 """File that matches the active .hgignore pattern.
222 """
222 """
223 # i18n: "hgignore" is a keyword
223 # i18n: "hgignore" is a keyword
224 getargs(x, 0, 0, _("hgignore takes no arguments"))
224 getargs(x, 0, 0, _("hgignore takes no arguments"))
225 return mctx.ctx.repo().dirstate._ignore
225 return mctx.ctx.repo().dirstate._ignore
226
226
227 @predicate('portable()')
227 @predicate('portable()')
228 def portable(mctx, x):
228 def portable(mctx, x):
229 """File that has a portable name. (This doesn't include filenames with case
229 """File that has a portable name. (This doesn't include filenames with case
230 collisions.)
230 collisions.)
231 """
231 """
232 # i18n: "portable" is a keyword
232 # i18n: "portable" is a keyword
233 getargs(x, 0, 0, _("portable takes no arguments"))
233 getargs(x, 0, 0, _("portable takes no arguments"))
234 return mctx.predicate(lambda f: util.checkwinfilename(f) is None,
234 return mctx.predicate(lambda f: util.checkwinfilename(f) is None,
235 predrepr='portable')
235 predrepr='portable')
236
236
237 @predicate('grep(regex)')
237 @predicate('grep(regex)')
238 def grep(mctx, x):
238 def grep(mctx, x):
239 """File contains the given regular expression.
239 """File contains the given regular expression.
240 """
240 """
241 try:
241 try:
242 # i18n: "grep" is a keyword
242 # i18n: "grep" is a keyword
243 r = re.compile(getstring(x, _("grep requires a pattern")))
243 r = re.compile(getstring(x, _("grep requires a pattern")))
244 except re.error as e:
244 except re.error as e:
245 raise error.ParseError(_('invalid match pattern: %s') %
245 raise error.ParseError(_('invalid match pattern: %s') %
246 stringutil.forcebytestr(e))
246 stringutil.forcebytestr(e))
247 return mctx.fpredicate(lambda fctx: r.search(fctx.data()),
247 return mctx.fpredicate(lambda fctx: r.search(fctx.data()),
248 predrepr=('grep(%r)', r.pattern), cache=True)
248 predrepr=('grep(%r)', r.pattern), cache=True)
249
249
250 def _sizetomax(s):
250 def _sizetomax(s):
251 try:
251 try:
252 s = s.strip().lower()
252 s = s.strip().lower()
253 for k, v in util._sizeunits:
253 for k, v in util._sizeunits:
254 if s.endswith(k):
254 if s.endswith(k):
255 # max(4k) = 5k - 1, max(4.5k) = 4.6k - 1
255 # max(4k) = 5k - 1, max(4.5k) = 4.6k - 1
256 n = s[:-len(k)]
256 n = s[:-len(k)]
257 inc = 1.0
257 inc = 1.0
258 if "." in n:
258 if "." in n:
259 inc /= 10 ** len(n.split(".")[1])
259 inc /= 10 ** len(n.split(".")[1])
260 return int((float(n) + inc) * v) - 1
260 return int((float(n) + inc) * v) - 1
261 # no extension, this is a precise value
261 # no extension, this is a precise value
262 return int(s)
262 return int(s)
263 except ValueError:
263 except ValueError:
264 raise error.ParseError(_("couldn't parse size: %s") % s)
264 raise error.ParseError(_("couldn't parse size: %s") % s)
265
265
266 def sizematcher(expr):
266 def sizematcher(expr):
267 """Return a function(size) -> bool from the ``size()`` expression"""
267 """Return a function(size) -> bool from the ``size()`` expression"""
268 expr = expr.strip()
268 expr = expr.strip()
269 if '-' in expr: # do we have a range?
269 if '-' in expr: # do we have a range?
270 a, b = expr.split('-', 1)
270 a, b = expr.split('-', 1)
271 a = util.sizetoint(a)
271 a = util.sizetoint(a)
272 b = util.sizetoint(b)
272 b = util.sizetoint(b)
273 return lambda x: x >= a and x <= b
273 return lambda x: x >= a and x <= b
274 elif expr.startswith("<="):
274 elif expr.startswith("<="):
275 a = util.sizetoint(expr[2:])
275 a = util.sizetoint(expr[2:])
276 return lambda x: x <= a
276 return lambda x: x <= a
277 elif expr.startswith("<"):
277 elif expr.startswith("<"):
278 a = util.sizetoint(expr[1:])
278 a = util.sizetoint(expr[1:])
279 return lambda x: x < a
279 return lambda x: x < a
280 elif expr.startswith(">="):
280 elif expr.startswith(">="):
281 a = util.sizetoint(expr[2:])
281 a = util.sizetoint(expr[2:])
282 return lambda x: x >= a
282 return lambda x: x >= a
283 elif expr.startswith(">"):
283 elif expr.startswith(">"):
284 a = util.sizetoint(expr[1:])
284 a = util.sizetoint(expr[1:])
285 return lambda x: x > a
285 return lambda x: x > a
286 else:
286 else:
287 a = util.sizetoint(expr)
287 a = util.sizetoint(expr)
288 b = _sizetomax(expr)
288 b = _sizetomax(expr)
289 return lambda x: x >= a and x <= b
289 return lambda x: x >= a and x <= b
290
290
291 @predicate('size(expression)')
291 @predicate('size(expression)')
292 def size(mctx, x):
292 def size(mctx, x):
293 """File size matches the given expression. Examples:
293 """File size matches the given expression. Examples:
294
294
295 - size('1k') - files from 1024 to 2047 bytes
295 - size('1k') - files from 1024 to 2047 bytes
296 - size('< 20k') - files less than 20480 bytes
296 - size('< 20k') - files less than 20480 bytes
297 - size('>= .5MB') - files at least 524288 bytes
297 - size('>= .5MB') - files at least 524288 bytes
298 - size('4k - 1MB') - files from 4096 bytes to 1048576 bytes
298 - size('4k - 1MB') - files from 4096 bytes to 1048576 bytes
299 """
299 """
300 # i18n: "size" is a keyword
300 # i18n: "size" is a keyword
301 expr = getstring(x, _("size requires an expression"))
301 expr = getstring(x, _("size requires an expression"))
302 m = sizematcher(expr)
302 m = sizematcher(expr)
303 return mctx.fpredicate(lambda fctx: m(fctx.size()),
303 return mctx.fpredicate(lambda fctx: m(fctx.size()),
304 predrepr=('size(%r)', expr), cache=True)
304 predrepr=('size(%r)', expr), cache=True)
305
305
306 @predicate('encoding(name)')
306 @predicate('encoding(name)')
307 def encoding(mctx, x):
307 def encoding(mctx, x):
308 """File can be successfully decoded with the given character
308 """File can be successfully decoded with the given character
309 encoding. May not be useful for encodings other than ASCII and
309 encoding. May not be useful for encodings other than ASCII and
310 UTF-8.
310 UTF-8.
311 """
311 """
312
312
313 # i18n: "encoding" is a keyword
313 # i18n: "encoding" is a keyword
314 enc = getstring(x, _("encoding requires an encoding name"))
314 enc = getstring(x, _("encoding requires an encoding name"))
315
315
316 def encp(fctx):
316 def encp(fctx):
317 d = fctx.data()
317 d = fctx.data()
318 try:
318 try:
319 d.decode(pycompat.sysstr(enc))
319 d.decode(pycompat.sysstr(enc))
320 return True
320 return True
321 except LookupError:
321 except LookupError:
322 raise error.Abort(_("unknown encoding '%s'") % enc)
322 raise error.Abort(_("unknown encoding '%s'") % enc)
323 except UnicodeDecodeError:
323 except UnicodeDecodeError:
324 return False
324 return False
325
325
326 return mctx.fpredicate(encp, predrepr=('encoding(%r)', enc), cache=True)
326 return mctx.fpredicate(encp, predrepr=('encoding(%r)', enc), cache=True)
327
327
328 @predicate('eol(style)')
328 @predicate('eol(style)')
329 def eol(mctx, x):
329 def eol(mctx, x):
330 """File contains newlines of the given style (dos, unix, mac). Binary
330 """File contains newlines of the given style (dos, unix, mac). Binary
331 files are excluded, files with mixed line endings match multiple
331 files are excluded, files with mixed line endings match multiple
332 styles.
332 styles.
333 """
333 """
334
334
335 # i18n: "eol" is a keyword
335 # i18n: "eol" is a keyword
336 enc = getstring(x, _("eol requires a style name"))
336 enc = getstring(x, _("eol requires a style name"))
337
337
338 def eolp(fctx):
338 def eolp(fctx):
339 if fctx.isbinary():
339 if fctx.isbinary():
340 return False
340 return False
341 d = fctx.data()
341 d = fctx.data()
342 if (enc == 'dos' or enc == 'win') and '\r\n' in d:
342 if (enc == 'dos' or enc == 'win') and '\r\n' in d:
343 return True
343 return True
344 elif enc == 'unix' and re.search('(?<!\r)\n', d):
344 elif enc == 'unix' and re.search('(?<!\r)\n', d):
345 return True
345 return True
346 elif enc == 'mac' and re.search('\r(?!\n)', d):
346 elif enc == 'mac' and re.search('\r(?!\n)', d):
347 return True
347 return True
348 return False
348 return False
349 return mctx.fpredicate(eolp, predrepr=('eol(%r)', enc), cache=True)
349 return mctx.fpredicate(eolp, predrepr=('eol(%r)', enc), cache=True)
350
350
351 @predicate('copied()')
351 @predicate('copied()')
352 def copied(mctx, x):
352 def copied(mctx, x):
353 """File that is recorded as being copied.
353 """File that is recorded as being copied.
354 """
354 """
355 # i18n: "copied" is a keyword
355 # i18n: "copied" is a keyword
356 getargs(x, 0, 0, _("copied takes no arguments"))
356 getargs(x, 0, 0, _("copied takes no arguments"))
357 def copiedp(fctx):
357 def copiedp(fctx):
358 p = fctx.parents()
358 p = fctx.parents()
359 return p and p[0].path() != fctx.path()
359 return p and p[0].path() != fctx.path()
360 return mctx.fpredicate(copiedp, predrepr='copied', cache=True)
360 return mctx.fpredicate(copiedp, predrepr='copied', cache=True)
361
361
362 @predicate('revs(revs, pattern)')
362 @predicate('revs(revs, pattern)')
363 def revs(mctx, x):
363 def revs(mctx, x):
364 """Evaluate set in the specified revisions. If the revset match multiple
364 """Evaluate set in the specified revisions. If the revset match multiple
365 revs, this will return file matching pattern in any of the revision.
365 revs, this will return file matching pattern in any of the revision.
366 """
366 """
367 # i18n: "revs" is a keyword
367 # i18n: "revs" is a keyword
368 r, x = getargs(x, 2, 2, _("revs takes two arguments"))
368 r, x = getargs(x, 2, 2, _("revs takes two arguments"))
369 # i18n: "revs" is a keyword
369 # i18n: "revs" is a keyword
370 revspec = getstring(r, _("first argument to revs must be a revision"))
370 revspec = getstring(r, _("first argument to revs must be a revision"))
371 repo = mctx.ctx.repo()
371 repo = mctx.ctx.repo()
372 revs = scmutil.revrange(repo, [revspec])
372 revs = scmutil.revrange(repo, [revspec])
373
373
374 matchers = []
374 matchers = []
375 for r in revs:
375 for r in revs:
376 ctx = repo[r]
376 ctx = repo[r]
377 matchers.append(getmatch(mctx.switch(ctx, _buildstatus(ctx, x)), x))
377 matchers.append(getmatch(mctx.switch(ctx, _buildstatus(ctx, x)), x))
378 if not matchers:
378 if not matchers:
379 return mctx.never()
379 return mctx.never()
380 if len(matchers) == 1:
380 if len(matchers) == 1:
381 return matchers[0]
381 return matchers[0]
382 return matchmod.unionmatcher(matchers)
382 return matchmod.unionmatcher(matchers)
383
383
384 @predicate('status(base, rev, pattern)')
384 @predicate('status(base, rev, pattern)')
385 def status(mctx, x):
385 def status(mctx, x):
386 """Evaluate predicate using status change between ``base`` and
386 """Evaluate predicate using status change between ``base`` and
387 ``rev``. Examples:
387 ``rev``. Examples:
388
388
389 - ``status(3, 7, added())`` - matches files added from "3" to "7"
389 - ``status(3, 7, added())`` - matches files added from "3" to "7"
390 """
390 """
391 repo = mctx.ctx.repo()
391 repo = mctx.ctx.repo()
392 # i18n: "status" is a keyword
392 # i18n: "status" is a keyword
393 b, r, x = getargs(x, 3, 3, _("status takes three arguments"))
393 b, r, x = getargs(x, 3, 3, _("status takes three arguments"))
394 # i18n: "status" is a keyword
394 # i18n: "status" is a keyword
395 baseerr = _("first argument to status must be a revision")
395 baseerr = _("first argument to status must be a revision")
396 baserevspec = getstring(b, baseerr)
396 baserevspec = getstring(b, baseerr)
397 if not baserevspec:
397 if not baserevspec:
398 raise error.ParseError(baseerr)
398 raise error.ParseError(baseerr)
399 reverr = _("second argument to status must be a revision")
399 reverr = _("second argument to status must be a revision")
400 revspec = getstring(r, reverr)
400 revspec = getstring(r, reverr)
401 if not revspec:
401 if not revspec:
402 raise error.ParseError(reverr)
402 raise error.ParseError(reverr)
403 basectx, ctx = scmutil.revpair(repo, [baserevspec, revspec])
403 basectx, ctx = scmutil.revpair(repo, [baserevspec, revspec])
404 return getmatch(mctx.switch(ctx, _buildstatus(ctx, x, basectx=basectx)), x)
404 return getmatch(mctx.switch(ctx, _buildstatus(ctx, x, basectx=basectx)), x)
405
405
406 @predicate('subrepo([pattern])')
406 @predicate('subrepo([pattern])')
407 def subrepo(mctx, x):
407 def subrepo(mctx, x):
408 """Subrepositories whose paths match the given pattern.
408 """Subrepositories whose paths match the given pattern.
409 """
409 """
410 # i18n: "subrepo" is a keyword
410 # i18n: "subrepo" is a keyword
411 getargs(x, 0, 1, _("subrepo takes at most one argument"))
411 getargs(x, 0, 1, _("subrepo takes at most one argument"))
412 ctx = mctx.ctx
412 ctx = mctx.ctx
413 sstate = ctx.substate
413 sstate = ctx.substate
414 if x:
414 if x:
415 pat = getpattern(x, matchmod.allpatternkinds,
415 pat = getpattern(x, matchmod.allpatternkinds,
416 # i18n: "subrepo" is a keyword
416 # i18n: "subrepo" is a keyword
417 _("subrepo requires a pattern or no arguments"))
417 _("subrepo requires a pattern or no arguments"))
418 fast = not matchmod.patkind(pat)
418 fast = not matchmod.patkind(pat)
419 if fast:
419 if fast:
420 def m(s):
420 def m(s):
421 return (s == pat)
421 return (s == pat)
422 else:
422 else:
423 m = matchmod.match(ctx.repo().root, '', [pat], ctx=ctx)
423 m = matchmod.match(ctx.repo().root, '', [pat], ctx=ctx)
424 return mctx.predicate(lambda f: f in sstate and m(f),
424 return mctx.predicate(lambda f: f in sstate and m(f),
425 predrepr=('subrepo(%r)', pat))
425 predrepr=('subrepo(%r)', pat))
426 else:
426 else:
427 return mctx.predicate(sstate.__contains__, predrepr='subrepo')
427 return mctx.predicate(sstate.__contains__, predrepr='subrepo')
428
428
429 methods = {
429 methods = {
430 'string': stringmatch,
430 'string': stringmatch,
431 'symbol': stringmatch,
431 'symbol': stringmatch,
432 'kindpat': kindpatmatch,
432 'kindpat': kindpatmatch,
433 'and': andmatch,
433 'and': andmatch,
434 'or': ormatch,
434 'or': ormatch,
435 'minus': minusmatch,
435 'minus': minusmatch,
436 'list': listmatch,
436 'list': listmatch,
437 'not': notmatch,
437 'not': notmatch,
438 'func': func,
438 'func': func,
439 }
439 }
440
440
441 class matchctx(object):
441 class matchctx(object):
442 def __init__(self, ctx, status=None, badfn=None):
442 def __init__(self, ctx, status=None, badfn=None):
443 self.ctx = ctx
443 self.ctx = ctx
444 self._status = status
444 self._status = status
445 self._badfn = badfn
445 self._badfn = badfn
446
446
447 def status(self):
447 def status(self):
448 return self._status
448 return self._status
449
449
450 def matcher(self, patterns):
450 def matcher(self, patterns):
451 return self.ctx.match(patterns, badfn=self._badfn)
451 return self.ctx.match(patterns, badfn=self._badfn)
452
452
453 def predicate(self, predfn, predrepr=None, cache=False):
453 def predicate(self, predfn, predrepr=None, cache=False):
454 """Create a matcher to select files by predfn(filename)"""
454 """Create a matcher to select files by predfn(filename)"""
455 if cache:
455 if cache:
456 predfn = util.cachefunc(predfn)
456 predfn = util.cachefunc(predfn)
457 repo = self.ctx.repo()
457 repo = self.ctx.repo()
458 return matchmod.predicatematcher(repo.root, repo.getcwd(), predfn,
458 return matchmod.predicatematcher(repo.root, repo.getcwd(), predfn,
459 predrepr=predrepr, badfn=self._badfn)
459 predrepr=predrepr, badfn=self._badfn)
460
460
461 def fpredicate(self, predfn, predrepr=None, cache=False):
461 def fpredicate(self, predfn, predrepr=None, cache=False):
462 """Create a matcher to select files by predfn(fctx) at the current
462 """Create a matcher to select files by predfn(fctx) at the current
463 revision
463 revision
464
464
465 Missing files are ignored.
465 Missing files are ignored.
466 """
466 """
467 ctx = self.ctx
467 ctx = self.ctx
468 if ctx.rev() is None:
468 if ctx.rev() is None:
469 def fctxpredfn(f):
469 def fctxpredfn(f):
470 try:
470 try:
471 fctx = ctx[f]
471 fctx = ctx[f]
472 except error.LookupError:
472 except error.LookupError:
473 return False
473 return False
474 try:
474 try:
475 fctx.audit()
475 fctx.audit()
476 except error.Abort:
476 except error.Abort:
477 return False
477 return False
478 try:
478 try:
479 return predfn(fctx)
479 return predfn(fctx)
480 except (IOError, OSError) as e:
480 except (IOError, OSError) as e:
481 # open()-ing a directory fails with EACCES on Windows
481 # open()-ing a directory fails with EACCES on Windows
482 if e.errno in (errno.ENOENT, errno.EACCES, errno.ENOTDIR,
482 if e.errno in (errno.ENOENT, errno.EACCES, errno.ENOTDIR,
483 errno.EISDIR):
483 errno.EISDIR):
484 return False
484 return False
485 raise
485 raise
486 else:
486 else:
487 def fctxpredfn(f):
487 def fctxpredfn(f):
488 try:
488 try:
489 fctx = ctx[f]
489 fctx = ctx[f]
490 except error.LookupError:
490 except error.LookupError:
491 return False
491 return False
492 return predfn(fctx)
492 return predfn(fctx)
493 return self.predicate(fctxpredfn, predrepr=predrepr, cache=cache)
493 return self.predicate(fctxpredfn, predrepr=predrepr, cache=cache)
494
494
495 def never(self):
495 def never(self):
496 """Create a matcher to select nothing"""
496 """Create a matcher to select nothing"""
497 repo = self.ctx.repo()
497 repo = self.ctx.repo()
498 return matchmod.nevermatcher(repo.root, repo.getcwd(),
498 return matchmod.nevermatcher(repo.root, repo.getcwd(),
499 badfn=self._badfn)
499 badfn=self._badfn)
500
500
501 def switch(self, ctx, status=None):
501 def switch(self, ctx, status=None):
502 return matchctx(ctx, status, self._badfn)
502 return matchctx(ctx, status, self._badfn)
503
503
504 # filesets using matchctx.switch()
504 # filesets using matchctx.switch()
505 _switchcallers = [
505 _switchcallers = [
506 'revs',
506 'revs',
507 'status',
507 'status',
508 ]
508 ]
509
509
510 def _intree(funcs, tree):
510 def _intree(funcs, tree):
511 if isinstance(tree, tuple):
511 if isinstance(tree, tuple):
512 if tree[0] == 'func' and tree[1][0] == 'symbol':
512 if tree[0] == 'func' and tree[1][0] == 'symbol':
513 if tree[1][1] in funcs:
513 if tree[1][1] in funcs:
514 return True
514 return True
515 if tree[1][1] in _switchcallers:
515 if tree[1][1] in _switchcallers:
516 # arguments won't be evaluated in the current context
516 # arguments won't be evaluated in the current context
517 return False
517 return False
518 for s in tree[1:]:
518 for s in tree[1:]:
519 if _intree(funcs, s):
519 if _intree(funcs, s):
520 return True
520 return True
521 return False
521 return False
522
522
523 def match(ctx, expr, badfn=None):
523 def match(ctx, expr, badfn=None):
524 """Create a matcher for a single fileset expression"""
524 """Create a matcher for a single fileset expression"""
525 tree = filesetlang.parse(expr)
525 tree = filesetlang.parse(expr)
526 tree = filesetlang.analyze(tree)
526 tree = filesetlang.analyze(tree)
527 tree = filesetlang.optimize(tree)
527 mctx = matchctx(ctx, _buildstatus(ctx, tree), badfn=badfn)
528 mctx = matchctx(ctx, _buildstatus(ctx, tree), badfn=badfn)
528 return getmatch(mctx, tree)
529 return getmatch(mctx, tree)
529
530
530 def _buildstatus(ctx, tree, basectx=None):
531 def _buildstatus(ctx, tree, basectx=None):
531 # do we need status info?
532 # do we need status info?
532
533
533 if _intree(_statuscallers, tree):
534 if _intree(_statuscallers, tree):
534 unknown = _intree(['unknown'], tree)
535 unknown = _intree(['unknown'], tree)
535 ignored = _intree(['ignored'], tree)
536 ignored = _intree(['ignored'], tree)
536
537
537 if basectx is None:
538 if basectx is None:
538 basectx = ctx.p1()
539 basectx = ctx.p1()
539 return basectx.status(ctx, listunknown=unknown, listignored=ignored,
540 return basectx.status(ctx, listunknown=unknown, listignored=ignored,
540 listclean=True)
541 listclean=True)
541 else:
542 else:
542 return None
543 return None
543
544
544 def loadpredicate(ui, extname, registrarobj):
545 def loadpredicate(ui, extname, registrarobj):
545 """Load fileset predicates from specified registrarobj
546 """Load fileset predicates from specified registrarobj
546 """
547 """
547 for name, func in registrarobj._table.iteritems():
548 for name, func in registrarobj._table.iteritems():
548 symbols[name] = func
549 symbols[name] = func
549 if func._callstatus:
550 if func._callstatus:
550 _statuscallers.add(name)
551 _statuscallers.add(name)
551
552
552 # load built-in predicates explicitly to setup _statuscallers
553 # load built-in predicates explicitly to setup _statuscallers
553 loadpredicate(None, None, predicate)
554 loadpredicate(None, None, predicate)
554
555
555 # tell hggettext to extract docstrings from these functions:
556 # tell hggettext to extract docstrings from these functions:
556 i18nfunctions = symbols.values()
557 i18nfunctions = symbols.values()
@@ -1,175 +1,213
1 # filesetlang.py - parser, tokenizer and utility for file set language
1 # filesetlang.py - parser, tokenizer and utility for file set language
2 #
2 #
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from .i18n import _
10 from .i18n import _
11 from . import (
11 from . import (
12 error,
12 error,
13 parser,
13 parser,
14 pycompat,
14 pycompat,
15 )
15 )
16
16
17 elements = {
17 elements = {
18 # token-type: binding-strength, primary, prefix, infix, suffix
18 # token-type: binding-strength, primary, prefix, infix, suffix
19 "(": (20, None, ("group", 1, ")"), ("func", 1, ")"), None),
19 "(": (20, None, ("group", 1, ")"), ("func", 1, ")"), None),
20 ":": (15, None, None, ("kindpat", 15), None),
20 ":": (15, None, None, ("kindpat", 15), None),
21 "-": (5, None, ("negate", 19), ("minus", 5), None),
21 "-": (5, None, ("negate", 19), ("minus", 5), None),
22 "not": (10, None, ("not", 10), None, None),
22 "not": (10, None, ("not", 10), None, None),
23 "!": (10, None, ("not", 10), None, None),
23 "!": (10, None, ("not", 10), None, None),
24 "and": (5, None, None, ("and", 5), None),
24 "and": (5, None, None, ("and", 5), None),
25 "&": (5, None, None, ("and", 5), None),
25 "&": (5, None, None, ("and", 5), None),
26 "or": (4, None, None, ("or", 4), None),
26 "or": (4, None, None, ("or", 4), None),
27 "|": (4, None, None, ("or", 4), None),
27 "|": (4, None, None, ("or", 4), None),
28 "+": (4, None, None, ("or", 4), None),
28 "+": (4, None, None, ("or", 4), None),
29 ",": (2, None, None, ("list", 2), None),
29 ",": (2, None, None, ("list", 2), None),
30 ")": (0, None, None, None, None),
30 ")": (0, None, None, None, None),
31 "symbol": (0, "symbol", None, None, None),
31 "symbol": (0, "symbol", None, None, None),
32 "string": (0, "string", None, None, None),
32 "string": (0, "string", None, None, None),
33 "end": (0, None, None, None, None),
33 "end": (0, None, None, None, None),
34 }
34 }
35
35
36 keywords = {'and', 'or', 'not'}
36 keywords = {'and', 'or', 'not'}
37
37
38 symbols = {}
38 symbols = {}
39
39
40 globchars = ".*{}[]?/\\_"
40 globchars = ".*{}[]?/\\_"
41
41
42 def tokenize(program):
42 def tokenize(program):
43 pos, l = 0, len(program)
43 pos, l = 0, len(program)
44 program = pycompat.bytestr(program)
44 program = pycompat.bytestr(program)
45 while pos < l:
45 while pos < l:
46 c = program[pos]
46 c = program[pos]
47 if c.isspace(): # skip inter-token whitespace
47 if c.isspace(): # skip inter-token whitespace
48 pass
48 pass
49 elif c in "(),-:|&+!": # handle simple operators
49 elif c in "(),-:|&+!": # handle simple operators
50 yield (c, None, pos)
50 yield (c, None, pos)
51 elif (c in '"\'' or c == 'r' and
51 elif (c in '"\'' or c == 'r' and
52 program[pos:pos + 2] in ("r'", 'r"')): # handle quoted strings
52 program[pos:pos + 2] in ("r'", 'r"')): # handle quoted strings
53 if c == 'r':
53 if c == 'r':
54 pos += 1
54 pos += 1
55 c = program[pos]
55 c = program[pos]
56 decode = lambda x: x
56 decode = lambda x: x
57 else:
57 else:
58 decode = parser.unescapestr
58 decode = parser.unescapestr
59 pos += 1
59 pos += 1
60 s = pos
60 s = pos
61 while pos < l: # find closing quote
61 while pos < l: # find closing quote
62 d = program[pos]
62 d = program[pos]
63 if d == '\\': # skip over escaped characters
63 if d == '\\': # skip over escaped characters
64 pos += 2
64 pos += 2
65 continue
65 continue
66 if d == c:
66 if d == c:
67 yield ('string', decode(program[s:pos]), s)
67 yield ('string', decode(program[s:pos]), s)
68 break
68 break
69 pos += 1
69 pos += 1
70 else:
70 else:
71 raise error.ParseError(_("unterminated string"), s)
71 raise error.ParseError(_("unterminated string"), s)
72 elif c.isalnum() or c in globchars or ord(c) > 127:
72 elif c.isalnum() or c in globchars or ord(c) > 127:
73 # gather up a symbol/keyword
73 # gather up a symbol/keyword
74 s = pos
74 s = pos
75 pos += 1
75 pos += 1
76 while pos < l: # find end of symbol
76 while pos < l: # find end of symbol
77 d = program[pos]
77 d = program[pos]
78 if not (d.isalnum() or d in globchars or ord(d) > 127):
78 if not (d.isalnum() or d in globchars or ord(d) > 127):
79 break
79 break
80 pos += 1
80 pos += 1
81 sym = program[s:pos]
81 sym = program[s:pos]
82 if sym in keywords: # operator keywords
82 if sym in keywords: # operator keywords
83 yield (sym, None, s)
83 yield (sym, None, s)
84 else:
84 else:
85 yield ('symbol', sym, s)
85 yield ('symbol', sym, s)
86 pos -= 1
86 pos -= 1
87 else:
87 else:
88 raise error.ParseError(_("syntax error"), pos)
88 raise error.ParseError(_("syntax error"), pos)
89 pos += 1
89 pos += 1
90 yield ('end', None, pos)
90 yield ('end', None, pos)
91
91
92 def parse(expr):
92 def parse(expr):
93 p = parser.parser(elements)
93 p = parser.parser(elements)
94 tree, pos = p.parse(tokenize(expr))
94 tree, pos = p.parse(tokenize(expr))
95 if pos != len(expr):
95 if pos != len(expr):
96 raise error.ParseError(_("invalid token"), pos)
96 raise error.ParseError(_("invalid token"), pos)
97 return parser.simplifyinfixops(tree, {'list', 'or'})
97 return parser.simplifyinfixops(tree, {'list', 'or'})
98
98
99 def getsymbol(x):
99 def getsymbol(x):
100 if x and x[0] == 'symbol':
100 if x and x[0] == 'symbol':
101 return x[1]
101 return x[1]
102 raise error.ParseError(_('not a symbol'))
102 raise error.ParseError(_('not a symbol'))
103
103
104 def getstring(x, err):
104 def getstring(x, err):
105 if x and (x[0] == 'string' or x[0] == 'symbol'):
105 if x and (x[0] == 'string' or x[0] == 'symbol'):
106 return x[1]
106 return x[1]
107 raise error.ParseError(err)
107 raise error.ParseError(err)
108
108
109 def getkindpat(x, y, allkinds, err):
109 def getkindpat(x, y, allkinds, err):
110 kind = getsymbol(x)
110 kind = getsymbol(x)
111 pat = getstring(y, err)
111 pat = getstring(y, err)
112 if kind not in allkinds:
112 if kind not in allkinds:
113 raise error.ParseError(_("invalid pattern kind: %s") % kind)
113 raise error.ParseError(_("invalid pattern kind: %s") % kind)
114 return '%s:%s' % (kind, pat)
114 return '%s:%s' % (kind, pat)
115
115
116 def getpattern(x, allkinds, err):
116 def getpattern(x, allkinds, err):
117 if x and x[0] == 'kindpat':
117 if x and x[0] == 'kindpat':
118 return getkindpat(x[1], x[2], allkinds, err)
118 return getkindpat(x[1], x[2], allkinds, err)
119 return getstring(x, err)
119 return getstring(x, err)
120
120
121 def getlist(x):
121 def getlist(x):
122 if not x:
122 if not x:
123 return []
123 return []
124 if x[0] == 'list':
124 if x[0] == 'list':
125 return list(x[1:])
125 return list(x[1:])
126 return [x]
126 return [x]
127
127
128 def getargs(x, min, max, err):
128 def getargs(x, min, max, err):
129 l = getlist(x)
129 l = getlist(x)
130 if len(l) < min or len(l) > max:
130 if len(l) < min or len(l) > max:
131 raise error.ParseError(err)
131 raise error.ParseError(err)
132 return l
132 return l
133
133
134 def _analyze(x):
134 def _analyze(x):
135 if x is None:
135 if x is None:
136 return x
136 return x
137
137
138 op = x[0]
138 op = x[0]
139 if op in {'string', 'symbol'}:
139 if op in {'string', 'symbol'}:
140 return x
140 return x
141 if op == 'kindpat':
141 if op == 'kindpat':
142 getsymbol(x[1]) # kind must be a symbol
142 getsymbol(x[1]) # kind must be a symbol
143 t = _analyze(x[2])
143 t = _analyze(x[2])
144 return (op, x[1], t)
144 return (op, x[1], t)
145 if op == 'group':
145 if op == 'group':
146 return _analyze(x[1])
146 return _analyze(x[1])
147 if op == 'negate':
147 if op == 'negate':
148 raise error.ParseError(_("can't use negate operator in this context"))
148 raise error.ParseError(_("can't use negate operator in this context"))
149 if op == 'not':
149 if op == 'not':
150 t = _analyze(x[1])
150 t = _analyze(x[1])
151 return (op, t)
151 return (op, t)
152 if op in {'and', 'minus'}:
152 if op in {'and', 'minus'}:
153 ta = _analyze(x[1])
153 ta = _analyze(x[1])
154 tb = _analyze(x[2])
154 tb = _analyze(x[2])
155 return (op, ta, tb)
155 return (op, ta, tb)
156 if op in {'list', 'or'}:
156 if op in {'list', 'or'}:
157 ts = tuple(_analyze(y) for y in x[1:])
157 ts = tuple(_analyze(y) for y in x[1:])
158 return (op,) + ts
158 return (op,) + ts
159 if op == 'func':
159 if op == 'func':
160 getsymbol(x[1]) # function name must be a symbol
160 getsymbol(x[1]) # function name must be a symbol
161 ta = _analyze(x[2])
161 ta = _analyze(x[2])
162 return (op, x[1], ta)
162 return (op, x[1], ta)
163 raise error.ProgrammingError('invalid operator %r' % op)
163 raise error.ProgrammingError('invalid operator %r' % op)
164
164
165 def analyze(x):
165 def analyze(x):
166 """Transform raw parsed tree to evaluatable tree which can be fed to
166 """Transform raw parsed tree to evaluatable tree which can be fed to
167 getmatch()
167 optimize() or getmatch()
168
168
169 All pseudo operations should be mapped to real operations or functions
169 All pseudo operations should be mapped to real operations or functions
170 defined in methods or symbols table respectively.
170 defined in methods or symbols table respectively.
171 """
171 """
172 return _analyze(x)
172 return _analyze(x)
173
173
174 def _optimize(x):
175 if x is None:
176 return 0, x
177
178 op = x[0]
179 if op in {'string', 'symbol'}:
180 return 0.5, x
181 if op == 'kindpat':
182 w, t = _optimize(x[2])
183 return w, (op, x[1], t)
184 if op == 'not':
185 w, t = _optimize(x[1])
186 return w, (op, t)
187 if op in {'and', 'minus'}:
188 wa, ta = _optimize(x[1])
189 wb, tb = _optimize(x[2])
190 return max(wa, wb), (op, ta, tb)
191 if op == 'or':
192 ws, ts = zip(*(_optimize(y) for y in x[1:]))
193 return max(ws), (op,) + ts
194 if op == 'list':
195 ws, ts = zip(*(_optimize(y) for y in x[1:]))
196 return sum(ws), (op,) + ts
197 if op == 'func':
198 f = getsymbol(x[1])
199 w = getattr(symbols.get(f), '_weight', 1)
200 wa, ta = _optimize(x[2])
201 return w + wa, (op, x[1], ta)
202 raise error.ProgrammingError('invalid operator %r' % op)
203
204 def optimize(x):
205 """Reorder/rewrite evaluatable tree for optimization
206
207 All pseudo operations should be transformed beforehand.
208 """
209 _w, t = _optimize(x)
210 return t
211
174 def prettyformat(tree):
212 def prettyformat(tree):
175 return parser.prettyformat(tree, ('string', 'symbol'))
213 return parser.prettyformat(tree, ('string', 'symbol'))
@@ -1,89 +1,90
1 # minifileset.py - a simple language to select files
1 # minifileset.py - a simple language to select files
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from .i18n import _
10 from .i18n import _
11 from . import (
11 from . import (
12 error,
12 error,
13 fileset,
13 fileset,
14 filesetlang,
14 filesetlang,
15 pycompat,
15 pycompat,
16 )
16 )
17
17
18 def _sizep(x):
18 def _sizep(x):
19 # i18n: "size" is a keyword
19 # i18n: "size" is a keyword
20 expr = filesetlang.getstring(x, _("size requires an expression"))
20 expr = filesetlang.getstring(x, _("size requires an expression"))
21 return fileset.sizematcher(expr)
21 return fileset.sizematcher(expr)
22
22
23 def _compile(tree):
23 def _compile(tree):
24 if not tree:
24 if not tree:
25 raise error.ParseError(_("missing argument"))
25 raise error.ParseError(_("missing argument"))
26 op = tree[0]
26 op = tree[0]
27 if op in {'symbol', 'string', 'kindpat'}:
27 if op in {'symbol', 'string', 'kindpat'}:
28 name = filesetlang.getpattern(tree, {'path'}, _('invalid file pattern'))
28 name = filesetlang.getpattern(tree, {'path'}, _('invalid file pattern'))
29 if name.startswith('**'): # file extension test, ex. "**.tar.gz"
29 if name.startswith('**'): # file extension test, ex. "**.tar.gz"
30 ext = name[2:]
30 ext = name[2:]
31 for c in pycompat.bytestr(ext):
31 for c in pycompat.bytestr(ext):
32 if c in '*{}[]?/\\':
32 if c in '*{}[]?/\\':
33 raise error.ParseError(_('reserved character: %s') % c)
33 raise error.ParseError(_('reserved character: %s') % c)
34 return lambda n, s: n.endswith(ext)
34 return lambda n, s: n.endswith(ext)
35 elif name.startswith('path:'): # directory or full path test
35 elif name.startswith('path:'): # directory or full path test
36 p = name[5:] # prefix
36 p = name[5:] # prefix
37 pl = len(p)
37 pl = len(p)
38 f = lambda n, s: n.startswith(p) and (len(n) == pl
38 f = lambda n, s: n.startswith(p) and (len(n) == pl
39 or n[pl:pl + 1] == '/')
39 or n[pl:pl + 1] == '/')
40 return f
40 return f
41 raise error.ParseError(_("unsupported file pattern: %s") % name,
41 raise error.ParseError(_("unsupported file pattern: %s") % name,
42 hint=_('paths must be prefixed with "path:"'))
42 hint=_('paths must be prefixed with "path:"'))
43 elif op == 'or':
43 elif op == 'or':
44 funcs = [_compile(x) for x in tree[1:]]
44 funcs = [_compile(x) for x in tree[1:]]
45 return lambda n, s: any(f(n, s) for f in funcs)
45 return lambda n, s: any(f(n, s) for f in funcs)
46 elif op == 'and':
46 elif op == 'and':
47 func1 = _compile(tree[1])
47 func1 = _compile(tree[1])
48 func2 = _compile(tree[2])
48 func2 = _compile(tree[2])
49 return lambda n, s: func1(n, s) and func2(n, s)
49 return lambda n, s: func1(n, s) and func2(n, s)
50 elif op == 'not':
50 elif op == 'not':
51 return lambda n, s: not _compile(tree[1])(n, s)
51 return lambda n, s: not _compile(tree[1])(n, s)
52 elif op == 'func':
52 elif op == 'func':
53 symbols = {
53 symbols = {
54 'all': lambda n, s: True,
54 'all': lambda n, s: True,
55 'none': lambda n, s: False,
55 'none': lambda n, s: False,
56 'size': lambda n, s: _sizep(tree[2])(s),
56 'size': lambda n, s: _sizep(tree[2])(s),
57 }
57 }
58
58
59 name = filesetlang.getsymbol(tree[1])
59 name = filesetlang.getsymbol(tree[1])
60 if name in symbols:
60 if name in symbols:
61 return symbols[name]
61 return symbols[name]
62
62
63 raise error.UnknownIdentifier(name, symbols.keys())
63 raise error.UnknownIdentifier(name, symbols.keys())
64 elif op == 'minus': # equivalent to 'x and not y'
64 elif op == 'minus': # equivalent to 'x and not y'
65 func1 = _compile(tree[1])
65 func1 = _compile(tree[1])
66 func2 = _compile(tree[2])
66 func2 = _compile(tree[2])
67 return lambda n, s: func1(n, s) and not func2(n, s)
67 return lambda n, s: func1(n, s) and not func2(n, s)
68 elif op == 'list':
68 elif op == 'list':
69 raise error.ParseError(_("can't use a list in this context"),
69 raise error.ParseError(_("can't use a list in this context"),
70 hint=_('see \'hg help "filesets.x or y"\''))
70 hint=_('see \'hg help "filesets.x or y"\''))
71 raise error.ProgrammingError('illegal tree: %r' % (tree,))
71 raise error.ProgrammingError('illegal tree: %r' % (tree,))
72
72
73 def compile(text):
73 def compile(text):
74 """generate a function (path, size) -> bool from filter specification.
74 """generate a function (path, size) -> bool from filter specification.
75
75
76 "text" could contain the operators defined by the fileset language for
76 "text" could contain the operators defined by the fileset language for
77 common logic operations, and parenthesis for grouping. The supported path
77 common logic operations, and parenthesis for grouping. The supported path
78 tests are '**.extname' for file extension test, and '"path:dir/subdir"'
78 tests are '**.extname' for file extension test, and '"path:dir/subdir"'
79 for prefix test. The ``size()`` predicate is borrowed from filesets to test
79 for prefix test. The ``size()`` predicate is borrowed from filesets to test
80 file size. The predicates ``all()`` and ``none()`` are also supported.
80 file size. The predicates ``all()`` and ``none()`` are also supported.
81
81
82 '(**.php & size(">10MB")) | **.zip | (path:bin & !path:bin/README)' for
82 '(**.php & size(">10MB")) | **.zip | (path:bin & !path:bin/README)' for
83 example, will catch all php files whose size is greater than 10 MB, all
83 example, will catch all php files whose size is greater than 10 MB, all
84 files whose name ends with ".zip", and all files under "bin" in the repo
84 files whose name ends with ".zip", and all files under "bin" in the repo
85 root except for "bin/README".
85 root except for "bin/README".
86 """
86 """
87 tree = filesetlang.parse(text)
87 tree = filesetlang.parse(text)
88 tree = filesetlang.analyze(tree)
88 tree = filesetlang.analyze(tree)
89 tree = filesetlang.optimize(tree)
89 return _compile(tree)
90 return _compile(tree)
@@ -1,439 +1,443
1 # registrar.py - utilities to register function for specific purpose
1 # registrar.py - utilities to register function for specific purpose
2 #
2 #
3 # Copyright FUJIWARA Katsunori <foozy@lares.dti.ne.jp> and others
3 # Copyright FUJIWARA Katsunori <foozy@lares.dti.ne.jp> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 from . import (
10 from . import (
11 configitems,
11 configitems,
12 error,
12 error,
13 pycompat,
13 pycompat,
14 util,
14 util,
15 )
15 )
16
16
17 # unlike the other registered items, config options are neither functions or
17 # unlike the other registered items, config options are neither functions or
18 # classes. Registering the option is just small function call.
18 # classes. Registering the option is just small function call.
19 #
19 #
20 # We still add the official API to the registrar module for consistency with
20 # We still add the official API to the registrar module for consistency with
21 # the other items extensions want might to register.
21 # the other items extensions want might to register.
22 configitem = configitems.getitemregister
22 configitem = configitems.getitemregister
23
23
24 class _funcregistrarbase(object):
24 class _funcregistrarbase(object):
25 """Base of decorator to register a function for specific purpose
25 """Base of decorator to register a function for specific purpose
26
26
27 This decorator stores decorated functions into own dict 'table'.
27 This decorator stores decorated functions into own dict 'table'.
28
28
29 The least derived class can be defined by overriding 'formatdoc',
29 The least derived class can be defined by overriding 'formatdoc',
30 for example::
30 for example::
31
31
32 class keyword(_funcregistrarbase):
32 class keyword(_funcregistrarbase):
33 _docformat = ":%s: %s"
33 _docformat = ":%s: %s"
34
34
35 This should be used as below:
35 This should be used as below:
36
36
37 keyword = registrar.keyword()
37 keyword = registrar.keyword()
38
38
39 @keyword('bar')
39 @keyword('bar')
40 def barfunc(*args, **kwargs):
40 def barfunc(*args, **kwargs):
41 '''Explanation of bar keyword ....
41 '''Explanation of bar keyword ....
42 '''
42 '''
43 pass
43 pass
44
44
45 In this case:
45 In this case:
46
46
47 - 'barfunc' is stored as 'bar' in '_table' of an instance 'keyword' above
47 - 'barfunc' is stored as 'bar' in '_table' of an instance 'keyword' above
48 - 'barfunc.__doc__' becomes ":bar: Explanation of bar keyword"
48 - 'barfunc.__doc__' becomes ":bar: Explanation of bar keyword"
49 """
49 """
50 def __init__(self, table=None):
50 def __init__(self, table=None):
51 if table is None:
51 if table is None:
52 self._table = {}
52 self._table = {}
53 else:
53 else:
54 self._table = table
54 self._table = table
55
55
56 def __call__(self, decl, *args, **kwargs):
56 def __call__(self, decl, *args, **kwargs):
57 return lambda func: self._doregister(func, decl, *args, **kwargs)
57 return lambda func: self._doregister(func, decl, *args, **kwargs)
58
58
59 def _doregister(self, func, decl, *args, **kwargs):
59 def _doregister(self, func, decl, *args, **kwargs):
60 name = self._getname(decl)
60 name = self._getname(decl)
61
61
62 if name in self._table:
62 if name in self._table:
63 msg = 'duplicate registration for name: "%s"' % name
63 msg = 'duplicate registration for name: "%s"' % name
64 raise error.ProgrammingError(msg)
64 raise error.ProgrammingError(msg)
65
65
66 if func.__doc__ and not util.safehasattr(func, '_origdoc'):
66 if func.__doc__ and not util.safehasattr(func, '_origdoc'):
67 doc = pycompat.sysbytes(func.__doc__).strip()
67 doc = pycompat.sysbytes(func.__doc__).strip()
68 func._origdoc = doc
68 func._origdoc = doc
69 func.__doc__ = pycompat.sysstr(self._formatdoc(decl, doc))
69 func.__doc__ = pycompat.sysstr(self._formatdoc(decl, doc))
70
70
71 self._table[name] = func
71 self._table[name] = func
72 self._extrasetup(name, func, *args, **kwargs)
72 self._extrasetup(name, func, *args, **kwargs)
73
73
74 return func
74 return func
75
75
76 def _parsefuncdecl(self, decl):
76 def _parsefuncdecl(self, decl):
77 """Parse function declaration and return the name of function in it
77 """Parse function declaration and return the name of function in it
78 """
78 """
79 i = decl.find('(')
79 i = decl.find('(')
80 if i >= 0:
80 if i >= 0:
81 return decl[:i]
81 return decl[:i]
82 else:
82 else:
83 return decl
83 return decl
84
84
85 def _getname(self, decl):
85 def _getname(self, decl):
86 """Return the name of the registered function from decl
86 """Return the name of the registered function from decl
87
87
88 Derived class should override this, if it allows more
88 Derived class should override this, if it allows more
89 descriptive 'decl' string than just a name.
89 descriptive 'decl' string than just a name.
90 """
90 """
91 return decl
91 return decl
92
92
93 _docformat = None
93 _docformat = None
94
94
95 def _formatdoc(self, decl, doc):
95 def _formatdoc(self, decl, doc):
96 """Return formatted document of the registered function for help
96 """Return formatted document of the registered function for help
97
97
98 'doc' is '__doc__.strip()' of the registered function.
98 'doc' is '__doc__.strip()' of the registered function.
99 """
99 """
100 return self._docformat % (decl, doc)
100 return self._docformat % (decl, doc)
101
101
102 def _extrasetup(self, name, func):
102 def _extrasetup(self, name, func):
103 """Execute exra setup for registered function, if needed
103 """Execute exra setup for registered function, if needed
104 """
104 """
105
105
106 class command(_funcregistrarbase):
106 class command(_funcregistrarbase):
107 """Decorator to register a command function to table
107 """Decorator to register a command function to table
108
108
109 This class receives a command table as its argument. The table should
109 This class receives a command table as its argument. The table should
110 be a dict.
110 be a dict.
111
111
112 The created object can be used as a decorator for adding commands to
112 The created object can be used as a decorator for adding commands to
113 that command table. This accepts multiple arguments to define a command.
113 that command table. This accepts multiple arguments to define a command.
114
114
115 The first argument is the command name (as bytes).
115 The first argument is the command name (as bytes).
116
116
117 The `options` keyword argument is an iterable of tuples defining command
117 The `options` keyword argument is an iterable of tuples defining command
118 arguments. See ``mercurial.fancyopts.fancyopts()`` for the format of each
118 arguments. See ``mercurial.fancyopts.fancyopts()`` for the format of each
119 tuple.
119 tuple.
120
120
121 The `synopsis` argument defines a short, one line summary of how to use the
121 The `synopsis` argument defines a short, one line summary of how to use the
122 command. This shows up in the help output.
122 command. This shows up in the help output.
123
123
124 There are three arguments that control what repository (if any) is found
124 There are three arguments that control what repository (if any) is found
125 and passed to the decorated function: `norepo`, `optionalrepo`, and
125 and passed to the decorated function: `norepo`, `optionalrepo`, and
126 `inferrepo`.
126 `inferrepo`.
127
127
128 The `norepo` argument defines whether the command does not require a
128 The `norepo` argument defines whether the command does not require a
129 local repository. Most commands operate against a repository, thus the
129 local repository. Most commands operate against a repository, thus the
130 default is False. When True, no repository will be passed.
130 default is False. When True, no repository will be passed.
131
131
132 The `optionalrepo` argument defines whether the command optionally requires
132 The `optionalrepo` argument defines whether the command optionally requires
133 a local repository. If no repository can be found, None will be passed
133 a local repository. If no repository can be found, None will be passed
134 to the decorated function.
134 to the decorated function.
135
135
136 The `inferrepo` argument defines whether to try to find a repository from
136 The `inferrepo` argument defines whether to try to find a repository from
137 the command line arguments. If True, arguments will be examined for
137 the command line arguments. If True, arguments will be examined for
138 potential repository locations. See ``findrepo()``. If a repository is
138 potential repository locations. See ``findrepo()``. If a repository is
139 found, it will be used and passed to the decorated function.
139 found, it will be used and passed to the decorated function.
140
140
141 The `intents` argument defines a set of intended actions or capabilities
141 The `intents` argument defines a set of intended actions or capabilities
142 the command is taking. These intents can be used to affect the construction
142 the command is taking. These intents can be used to affect the construction
143 of the repository object passed to the command. For example, commands
143 of the repository object passed to the command. For example, commands
144 declaring that they are read-only could receive a repository that doesn't
144 declaring that they are read-only could receive a repository that doesn't
145 have any methods allowing repository mutation. Other intents could be used
145 have any methods allowing repository mutation. Other intents could be used
146 to prevent the command from running if the requested intent could not be
146 to prevent the command from running if the requested intent could not be
147 fulfilled.
147 fulfilled.
148
148
149 The following intents are defined:
149 The following intents are defined:
150
150
151 readonly
151 readonly
152 The command is read-only
152 The command is read-only
153
153
154 The signature of the decorated function looks like this:
154 The signature of the decorated function looks like this:
155 def cmd(ui[, repo] [, <args>] [, <options>])
155 def cmd(ui[, repo] [, <args>] [, <options>])
156
156
157 `repo` is required if `norepo` is False.
157 `repo` is required if `norepo` is False.
158 `<args>` are positional args (or `*args`) arguments, of non-option
158 `<args>` are positional args (or `*args`) arguments, of non-option
159 arguments from the command line.
159 arguments from the command line.
160 `<options>` are keyword arguments (or `**options`) of option arguments
160 `<options>` are keyword arguments (or `**options`) of option arguments
161 from the command line.
161 from the command line.
162
162
163 See the WritingExtensions and MercurialApi documentation for more exhaustive
163 See the WritingExtensions and MercurialApi documentation for more exhaustive
164 descriptions and examples.
164 descriptions and examples.
165 """
165 """
166
166
167 def _doregister(self, func, name, options=(), synopsis=None,
167 def _doregister(self, func, name, options=(), synopsis=None,
168 norepo=False, optionalrepo=False, inferrepo=False,
168 norepo=False, optionalrepo=False, inferrepo=False,
169 intents=None):
169 intents=None):
170
170
171 func.norepo = norepo
171 func.norepo = norepo
172 func.optionalrepo = optionalrepo
172 func.optionalrepo = optionalrepo
173 func.inferrepo = inferrepo
173 func.inferrepo = inferrepo
174 func.intents = intents or set()
174 func.intents = intents or set()
175 if synopsis:
175 if synopsis:
176 self._table[name] = func, list(options), synopsis
176 self._table[name] = func, list(options), synopsis
177 else:
177 else:
178 self._table[name] = func, list(options)
178 self._table[name] = func, list(options)
179 return func
179 return func
180
180
181 INTENT_READONLY = b'readonly'
181 INTENT_READONLY = b'readonly'
182
182
183 class revsetpredicate(_funcregistrarbase):
183 class revsetpredicate(_funcregistrarbase):
184 """Decorator to register revset predicate
184 """Decorator to register revset predicate
185
185
186 Usage::
186 Usage::
187
187
188 revsetpredicate = registrar.revsetpredicate()
188 revsetpredicate = registrar.revsetpredicate()
189
189
190 @revsetpredicate('mypredicate(arg1, arg2[, arg3])')
190 @revsetpredicate('mypredicate(arg1, arg2[, arg3])')
191 def mypredicatefunc(repo, subset, x):
191 def mypredicatefunc(repo, subset, x):
192 '''Explanation of this revset predicate ....
192 '''Explanation of this revset predicate ....
193 '''
193 '''
194 pass
194 pass
195
195
196 The first string argument is used also in online help.
196 The first string argument is used also in online help.
197
197
198 Optional argument 'safe' indicates whether a predicate is safe for
198 Optional argument 'safe' indicates whether a predicate is safe for
199 DoS attack (False by default).
199 DoS attack (False by default).
200
200
201 Optional argument 'takeorder' indicates whether a predicate function
201 Optional argument 'takeorder' indicates whether a predicate function
202 takes ordering policy as the last argument.
202 takes ordering policy as the last argument.
203
203
204 Optional argument 'weight' indicates the estimated run-time cost, useful
204 Optional argument 'weight' indicates the estimated run-time cost, useful
205 for static optimization, default is 1. Higher weight means more expensive.
205 for static optimization, default is 1. Higher weight means more expensive.
206 Usually, revsets that are fast and return only one revision has a weight of
206 Usually, revsets that are fast and return only one revision has a weight of
207 0.5 (ex. a symbol); revsets with O(changelog) complexity and read only the
207 0.5 (ex. a symbol); revsets with O(changelog) complexity and read only the
208 changelog have weight 10 (ex. author); revsets reading manifest deltas have
208 changelog have weight 10 (ex. author); revsets reading manifest deltas have
209 weight 30 (ex. adds); revset reading manifest contents have weight 100
209 weight 30 (ex. adds); revset reading manifest contents have weight 100
210 (ex. contains). Note: those values are flexible. If the revset has a
210 (ex. contains). Note: those values are flexible. If the revset has a
211 same big-O time complexity as 'contains', but with a smaller constant, it
211 same big-O time complexity as 'contains', but with a smaller constant, it
212 might have a weight of 90.
212 might have a weight of 90.
213
213
214 'revsetpredicate' instance in example above can be used to
214 'revsetpredicate' instance in example above can be used to
215 decorate multiple functions.
215 decorate multiple functions.
216
216
217 Decorated functions are registered automatically at loading
217 Decorated functions are registered automatically at loading
218 extension, if an instance named as 'revsetpredicate' is used for
218 extension, if an instance named as 'revsetpredicate' is used for
219 decorating in extension.
219 decorating in extension.
220
220
221 Otherwise, explicit 'revset.loadpredicate()' is needed.
221 Otherwise, explicit 'revset.loadpredicate()' is needed.
222 """
222 """
223 _getname = _funcregistrarbase._parsefuncdecl
223 _getname = _funcregistrarbase._parsefuncdecl
224 _docformat = "``%s``\n %s"
224 _docformat = "``%s``\n %s"
225
225
226 def _extrasetup(self, name, func, safe=False, takeorder=False, weight=1):
226 def _extrasetup(self, name, func, safe=False, takeorder=False, weight=1):
227 func._safe = safe
227 func._safe = safe
228 func._takeorder = takeorder
228 func._takeorder = takeorder
229 func._weight = weight
229 func._weight = weight
230
230
231 class filesetpredicate(_funcregistrarbase):
231 class filesetpredicate(_funcregistrarbase):
232 """Decorator to register fileset predicate
232 """Decorator to register fileset predicate
233
233
234 Usage::
234 Usage::
235
235
236 filesetpredicate = registrar.filesetpredicate()
236 filesetpredicate = registrar.filesetpredicate()
237
237
238 @filesetpredicate('mypredicate()')
238 @filesetpredicate('mypredicate()')
239 def mypredicatefunc(mctx, x):
239 def mypredicatefunc(mctx, x):
240 '''Explanation of this fileset predicate ....
240 '''Explanation of this fileset predicate ....
241 '''
241 '''
242 pass
242 pass
243
243
244 The first string argument is used also in online help.
244 The first string argument is used also in online help.
245
245
246 Optional argument 'callstatus' indicates whether a predicate
246 Optional argument 'callstatus' indicates whether a predicate
247 implies 'matchctx.status()' at runtime or not (False, by
247 implies 'matchctx.status()' at runtime or not (False, by
248 default).
248 default).
249
249
250 Optional argument 'weight' indicates the estimated run-time cost, useful
251 for static optimization, default is 1. Higher weight means more expensive.
252
250 'filesetpredicate' instance in example above can be used to
253 'filesetpredicate' instance in example above can be used to
251 decorate multiple functions.
254 decorate multiple functions.
252
255
253 Decorated functions are registered automatically at loading
256 Decorated functions are registered automatically at loading
254 extension, if an instance named as 'filesetpredicate' is used for
257 extension, if an instance named as 'filesetpredicate' is used for
255 decorating in extension.
258 decorating in extension.
256
259
257 Otherwise, explicit 'fileset.loadpredicate()' is needed.
260 Otherwise, explicit 'fileset.loadpredicate()' is needed.
258 """
261 """
259 _getname = _funcregistrarbase._parsefuncdecl
262 _getname = _funcregistrarbase._parsefuncdecl
260 _docformat = "``%s``\n %s"
263 _docformat = "``%s``\n %s"
261
264
262 def _extrasetup(self, name, func, callstatus=False):
265 def _extrasetup(self, name, func, callstatus=False, weight=1):
263 func._callstatus = callstatus
266 func._callstatus = callstatus
267 func._weight = weight
264
268
265 class _templateregistrarbase(_funcregistrarbase):
269 class _templateregistrarbase(_funcregistrarbase):
266 """Base of decorator to register functions as template specific one
270 """Base of decorator to register functions as template specific one
267 """
271 """
268 _docformat = ":%s: %s"
272 _docformat = ":%s: %s"
269
273
270 class templatekeyword(_templateregistrarbase):
274 class templatekeyword(_templateregistrarbase):
271 """Decorator to register template keyword
275 """Decorator to register template keyword
272
276
273 Usage::
277 Usage::
274
278
275 templatekeyword = registrar.templatekeyword()
279 templatekeyword = registrar.templatekeyword()
276
280
277 # new API (since Mercurial 4.6)
281 # new API (since Mercurial 4.6)
278 @templatekeyword('mykeyword', requires={'repo', 'ctx'})
282 @templatekeyword('mykeyword', requires={'repo', 'ctx'})
279 def mykeywordfunc(context, mapping):
283 def mykeywordfunc(context, mapping):
280 '''Explanation of this template keyword ....
284 '''Explanation of this template keyword ....
281 '''
285 '''
282 pass
286 pass
283
287
284 # old API
288 # old API
285 @templatekeyword('mykeyword')
289 @templatekeyword('mykeyword')
286 def mykeywordfunc(repo, ctx, templ, cache, revcache, **args):
290 def mykeywordfunc(repo, ctx, templ, cache, revcache, **args):
287 '''Explanation of this template keyword ....
291 '''Explanation of this template keyword ....
288 '''
292 '''
289 pass
293 pass
290
294
291 The first string argument is used also in online help.
295 The first string argument is used also in online help.
292
296
293 Optional argument 'requires' should be a collection of resource names
297 Optional argument 'requires' should be a collection of resource names
294 which the template keyword depends on. This also serves as a flag to
298 which the template keyword depends on. This also serves as a flag to
295 switch to the new API. If 'requires' is unspecified, all template
299 switch to the new API. If 'requires' is unspecified, all template
296 keywords and resources are expanded to the function arguments.
300 keywords and resources are expanded to the function arguments.
297
301
298 'templatekeyword' instance in example above can be used to
302 'templatekeyword' instance in example above can be used to
299 decorate multiple functions.
303 decorate multiple functions.
300
304
301 Decorated functions are registered automatically at loading
305 Decorated functions are registered automatically at loading
302 extension, if an instance named as 'templatekeyword' is used for
306 extension, if an instance named as 'templatekeyword' is used for
303 decorating in extension.
307 decorating in extension.
304
308
305 Otherwise, explicit 'templatekw.loadkeyword()' is needed.
309 Otherwise, explicit 'templatekw.loadkeyword()' is needed.
306 """
310 """
307
311
308 def _extrasetup(self, name, func, requires=None):
312 def _extrasetup(self, name, func, requires=None):
309 func._requires = requires
313 func._requires = requires
310
314
311 class templatefilter(_templateregistrarbase):
315 class templatefilter(_templateregistrarbase):
312 """Decorator to register template filer
316 """Decorator to register template filer
313
317
314 Usage::
318 Usage::
315
319
316 templatefilter = registrar.templatefilter()
320 templatefilter = registrar.templatefilter()
317
321
318 @templatefilter('myfilter', intype=bytes)
322 @templatefilter('myfilter', intype=bytes)
319 def myfilterfunc(text):
323 def myfilterfunc(text):
320 '''Explanation of this template filter ....
324 '''Explanation of this template filter ....
321 '''
325 '''
322 pass
326 pass
323
327
324 The first string argument is used also in online help.
328 The first string argument is used also in online help.
325
329
326 Optional argument 'intype' defines the type of the input argument,
330 Optional argument 'intype' defines the type of the input argument,
327 which should be (bytes, int, templateutil.date, or None for any.)
331 which should be (bytes, int, templateutil.date, or None for any.)
328
332
329 'templatefilter' instance in example above can be used to
333 'templatefilter' instance in example above can be used to
330 decorate multiple functions.
334 decorate multiple functions.
331
335
332 Decorated functions are registered automatically at loading
336 Decorated functions are registered automatically at loading
333 extension, if an instance named as 'templatefilter' is used for
337 extension, if an instance named as 'templatefilter' is used for
334 decorating in extension.
338 decorating in extension.
335
339
336 Otherwise, explicit 'templatefilters.loadkeyword()' is needed.
340 Otherwise, explicit 'templatefilters.loadkeyword()' is needed.
337 """
341 """
338
342
339 def _extrasetup(self, name, func, intype=None):
343 def _extrasetup(self, name, func, intype=None):
340 func._intype = intype
344 func._intype = intype
341
345
342 class templatefunc(_templateregistrarbase):
346 class templatefunc(_templateregistrarbase):
343 """Decorator to register template function
347 """Decorator to register template function
344
348
345 Usage::
349 Usage::
346
350
347 templatefunc = registrar.templatefunc()
351 templatefunc = registrar.templatefunc()
348
352
349 @templatefunc('myfunc(arg1, arg2[, arg3])', argspec='arg1 arg2 arg3',
353 @templatefunc('myfunc(arg1, arg2[, arg3])', argspec='arg1 arg2 arg3',
350 requires={'ctx'})
354 requires={'ctx'})
351 def myfuncfunc(context, mapping, args):
355 def myfuncfunc(context, mapping, args):
352 '''Explanation of this template function ....
356 '''Explanation of this template function ....
353 '''
357 '''
354 pass
358 pass
355
359
356 The first string argument is used also in online help.
360 The first string argument is used also in online help.
357
361
358 If optional 'argspec' is defined, the function will receive 'args' as
362 If optional 'argspec' is defined, the function will receive 'args' as
359 a dict of named arguments. Otherwise 'args' is a list of positional
363 a dict of named arguments. Otherwise 'args' is a list of positional
360 arguments.
364 arguments.
361
365
362 Optional argument 'requires' should be a collection of resource names
366 Optional argument 'requires' should be a collection of resource names
363 which the template function depends on.
367 which the template function depends on.
364
368
365 'templatefunc' instance in example above can be used to
369 'templatefunc' instance in example above can be used to
366 decorate multiple functions.
370 decorate multiple functions.
367
371
368 Decorated functions are registered automatically at loading
372 Decorated functions are registered automatically at loading
369 extension, if an instance named as 'templatefunc' is used for
373 extension, if an instance named as 'templatefunc' is used for
370 decorating in extension.
374 decorating in extension.
371
375
372 Otherwise, explicit 'templatefuncs.loadfunction()' is needed.
376 Otherwise, explicit 'templatefuncs.loadfunction()' is needed.
373 """
377 """
374 _getname = _funcregistrarbase._parsefuncdecl
378 _getname = _funcregistrarbase._parsefuncdecl
375
379
376 def _extrasetup(self, name, func, argspec=None, requires=()):
380 def _extrasetup(self, name, func, argspec=None, requires=()):
377 func._argspec = argspec
381 func._argspec = argspec
378 func._requires = requires
382 func._requires = requires
379
383
380 class internalmerge(_funcregistrarbase):
384 class internalmerge(_funcregistrarbase):
381 """Decorator to register in-process merge tool
385 """Decorator to register in-process merge tool
382
386
383 Usage::
387 Usage::
384
388
385 internalmerge = registrar.internalmerge()
389 internalmerge = registrar.internalmerge()
386
390
387 @internalmerge('mymerge', internalmerge.mergeonly,
391 @internalmerge('mymerge', internalmerge.mergeonly,
388 onfailure=None, precheck=None):
392 onfailure=None, precheck=None):
389 def mymergefunc(repo, mynode, orig, fcd, fco, fca,
393 def mymergefunc(repo, mynode, orig, fcd, fco, fca,
390 toolconf, files, labels=None):
394 toolconf, files, labels=None):
391 '''Explanation of this internal merge tool ....
395 '''Explanation of this internal merge tool ....
392 '''
396 '''
393 return 1, False # means "conflicted", "no deletion needed"
397 return 1, False # means "conflicted", "no deletion needed"
394
398
395 The first string argument is used to compose actual merge tool name,
399 The first string argument is used to compose actual merge tool name,
396 ":name" and "internal:name" (the latter is historical one).
400 ":name" and "internal:name" (the latter is historical one).
397
401
398 The second argument is one of merge types below:
402 The second argument is one of merge types below:
399
403
400 ========== ======== ======== =========
404 ========== ======== ======== =========
401 merge type precheck premerge fullmerge
405 merge type precheck premerge fullmerge
402 ========== ======== ======== =========
406 ========== ======== ======== =========
403 nomerge x x x
407 nomerge x x x
404 mergeonly o x o
408 mergeonly o x o
405 fullmerge o o o
409 fullmerge o o o
406 ========== ======== ======== =========
410 ========== ======== ======== =========
407
411
408 Optional argument 'onfailure' is the format of warning message
412 Optional argument 'onfailure' is the format of warning message
409 to be used at failure of merging (target filename is specified
413 to be used at failure of merging (target filename is specified
410 at formatting). Or, None or so, if warning message should be
414 at formatting). Or, None or so, if warning message should be
411 suppressed.
415 suppressed.
412
416
413 Optional argument 'precheck' is the function to be used
417 Optional argument 'precheck' is the function to be used
414 before actual invocation of internal merge tool itself.
418 before actual invocation of internal merge tool itself.
415 It takes as same arguments as internal merge tool does, other than
419 It takes as same arguments as internal merge tool does, other than
416 'files' and 'labels'. If it returns false value, merging is aborted
420 'files' and 'labels'. If it returns false value, merging is aborted
417 immediately (and file is marked as "unresolved").
421 immediately (and file is marked as "unresolved").
418
422
419 'internalmerge' instance in example above can be used to
423 'internalmerge' instance in example above can be used to
420 decorate multiple functions.
424 decorate multiple functions.
421
425
422 Decorated functions are registered automatically at loading
426 Decorated functions are registered automatically at loading
423 extension, if an instance named as 'internalmerge' is used for
427 extension, if an instance named as 'internalmerge' is used for
424 decorating in extension.
428 decorating in extension.
425
429
426 Otherwise, explicit 'filemerge.loadinternalmerge()' is needed.
430 Otherwise, explicit 'filemerge.loadinternalmerge()' is needed.
427 """
431 """
428 _docformat = "``:%s``\n %s"
432 _docformat = "``:%s``\n %s"
429
433
430 # merge type definitions:
434 # merge type definitions:
431 nomerge = None
435 nomerge = None
432 mergeonly = 'mergeonly' # just the full merge, no premerge
436 mergeonly = 'mergeonly' # just the full merge, no premerge
433 fullmerge = 'fullmerge' # both premerge and merge
437 fullmerge = 'fullmerge' # both premerge and merge
434
438
435 def _extrasetup(self, name, func, mergetype,
439 def _extrasetup(self, name, func, mergetype,
436 onfailure=None, precheck=None):
440 onfailure=None, precheck=None):
437 func.mergetype = mergetype
441 func.mergetype = mergetype
438 func.onfailure = onfailure
442 func.onfailure = onfailure
439 func.precheck = precheck
443 func.precheck = precheck
@@ -1,754 +1,765
1 $ fileset() {
1 $ fileset() {
2 > hg debugfileset --all-files "$@"
2 > hg debugfileset --all-files "$@"
3 > }
3 > }
4
4
5 $ hg init repo
5 $ hg init repo
6 $ cd repo
6 $ cd repo
7 $ echo a > a1
7 $ echo a > a1
8 $ echo a > a2
8 $ echo a > a2
9 $ echo b > b1
9 $ echo b > b1
10 $ echo b > b2
10 $ echo b > b2
11 $ hg ci -Am addfiles
11 $ hg ci -Am addfiles
12 adding a1
12 adding a1
13 adding a2
13 adding a2
14 adding b1
14 adding b1
15 adding b2
15 adding b2
16
16
17 Test operators and basic patterns
17 Test operators and basic patterns
18
18
19 $ fileset -v a1
19 $ fileset -v a1
20 (symbol 'a1')
20 (symbol 'a1')
21 * matcher:
21 * matcher:
22 <patternmatcher patterns='(?:a1$)'>
22 <patternmatcher patterns='(?:a1$)'>
23 a1
23 a1
24 $ fileset -v 'a*'
24 $ fileset -v 'a*'
25 (symbol 'a*')
25 (symbol 'a*')
26 * matcher:
26 * matcher:
27 <patternmatcher patterns='(?:a[^/]*$)'>
27 <patternmatcher patterns='(?:a[^/]*$)'>
28 a1
28 a1
29 a2
29 a2
30 $ fileset -v '"re:a\d"'
30 $ fileset -v '"re:a\d"'
31 (string 're:a\\d')
31 (string 're:a\\d')
32 * matcher:
32 * matcher:
33 <patternmatcher patterns='(?:a\\d)'>
33 <patternmatcher patterns='(?:a\\d)'>
34 a1
34 a1
35 a2
35 a2
36 $ fileset -v '!re:"a\d"'
36 $ fileset -v '!re:"a\d"'
37 (not
37 (not
38 (kindpat
38 (kindpat
39 (symbol 're')
39 (symbol 're')
40 (string 'a\\d')))
40 (string 'a\\d')))
41 * matcher:
41 * matcher:
42 <predicatenmatcher
42 <predicatenmatcher
43 pred=<not
43 pred=<not
44 <patternmatcher patterns='(?:a\\d)'>>>
44 <patternmatcher patterns='(?:a\\d)'>>>
45 b1
45 b1
46 b2
46 b2
47 $ fileset -v 'path:a1 or glob:b?'
47 $ fileset -v 'path:a1 or glob:b?'
48 (or
48 (or
49 (kindpat
49 (kindpat
50 (symbol 'path')
50 (symbol 'path')
51 (symbol 'a1'))
51 (symbol 'a1'))
52 (kindpat
52 (kindpat
53 (symbol 'glob')
53 (symbol 'glob')
54 (symbol 'b?')))
54 (symbol 'b?')))
55 * matcher:
55 * matcher:
56 <unionmatcher matchers=[
56 <unionmatcher matchers=[
57 <patternmatcher patterns='(?:a1(?:/|$))'>,
57 <patternmatcher patterns='(?:a1(?:/|$))'>,
58 <patternmatcher patterns='(?:b.$)'>]>
58 <patternmatcher patterns='(?:b.$)'>]>
59 a1
59 a1
60 b1
60 b1
61 b2
61 b2
62 $ fileset -v --no-show-matcher 'a1 or a2'
62 $ fileset -v --no-show-matcher 'a1 or a2'
63 (or
63 (or
64 (symbol 'a1')
64 (symbol 'a1')
65 (symbol 'a2'))
65 (symbol 'a2'))
66 a1
66 a1
67 a2
67 a2
68 $ fileset 'a1 | a2'
68 $ fileset 'a1 | a2'
69 a1
69 a1
70 a2
70 a2
71 $ fileset 'a* and "*1"'
71 $ fileset 'a* and "*1"'
72 a1
72 a1
73 $ fileset 'a* & "*1"'
73 $ fileset 'a* & "*1"'
74 a1
74 a1
75 $ fileset 'not (r"a*")'
75 $ fileset 'not (r"a*")'
76 b1
76 b1
77 b2
77 b2
78 $ fileset '! ("a*")'
78 $ fileset '! ("a*")'
79 b1
79 b1
80 b2
80 b2
81 $ fileset 'a* - a1'
81 $ fileset 'a* - a1'
82 a2
82 a2
83 $ fileset 'a_b'
83 $ fileset 'a_b'
84 $ fileset '"\xy"'
84 $ fileset '"\xy"'
85 hg: parse error: invalid \x escape* (glob)
85 hg: parse error: invalid \x escape* (glob)
86 [255]
86 [255]
87
87
88 Test invalid syntax
88 Test invalid syntax
89
89
90 $ fileset -v '"added"()'
90 $ fileset -v '"added"()'
91 (func
91 (func
92 (string 'added')
92 (string 'added')
93 None)
93 None)
94 hg: parse error: not a symbol
94 hg: parse error: not a symbol
95 [255]
95 [255]
96 $ fileset -v '()()'
96 $ fileset -v '()()'
97 (func
97 (func
98 (group
98 (group
99 None)
99 None)
100 None)
100 None)
101 hg: parse error: not a symbol
101 hg: parse error: not a symbol
102 [255]
102 [255]
103 $ fileset -v -- '-x'
103 $ fileset -v -- '-x'
104 (negate
104 (negate
105 (symbol 'x'))
105 (symbol 'x'))
106 hg: parse error: can't use negate operator in this context
106 hg: parse error: can't use negate operator in this context
107 [255]
107 [255]
108 $ fileset -v -- '-()'
108 $ fileset -v -- '-()'
109 (negate
109 (negate
110 (group
110 (group
111 None))
111 None))
112 hg: parse error: can't use negate operator in this context
112 hg: parse error: can't use negate operator in this context
113 [255]
113 [255]
114 $ fileset -p parsed 'a, b, c'
114 $ fileset -p parsed 'a, b, c'
115 * parsed:
115 * parsed:
116 (list
116 (list
117 (symbol 'a')
117 (symbol 'a')
118 (symbol 'b')
118 (symbol 'b')
119 (symbol 'c'))
119 (symbol 'c'))
120 hg: parse error: can't use a list in this context
120 hg: parse error: can't use a list in this context
121 (see 'hg help "filesets.x or y"')
121 (see 'hg help "filesets.x or y"')
122 [255]
122 [255]
123
123
124 $ fileset '"path":.'
124 $ fileset '"path":.'
125 hg: parse error: not a symbol
125 hg: parse error: not a symbol
126 [255]
126 [255]
127 $ fileset 'path:foo bar'
127 $ fileset 'path:foo bar'
128 hg: parse error at 9: invalid token
128 hg: parse error at 9: invalid token
129 [255]
129 [255]
130 $ fileset 'foo:bar:baz'
130 $ fileset 'foo:bar:baz'
131 hg: parse error: not a symbol
131 hg: parse error: not a symbol
132 [255]
132 [255]
133 $ fileset 'foo:bar()'
133 $ fileset 'foo:bar()'
134 hg: parse error: pattern must be a string
134 hg: parse error: pattern must be a string
135 [255]
135 [255]
136 $ fileset 'foo:bar'
136 $ fileset 'foo:bar'
137 hg: parse error: invalid pattern kind: foo
137 hg: parse error: invalid pattern kind: foo
138 [255]
138 [255]
139
139
140 Show parsed tree at stages:
140 Show parsed tree at stages:
141
141
142 $ fileset -p unknown a
142 $ fileset -p unknown a
143 abort: invalid stage name: unknown
143 abort: invalid stage name: unknown
144 [255]
144 [255]
145
145
146 $ fileset -p parsed 'path:a1 or glob:b?'
146 $ fileset -p parsed 'path:a1 or glob:b?'
147 * parsed:
147 * parsed:
148 (or
148 (or
149 (kindpat
149 (kindpat
150 (symbol 'path')
150 (symbol 'path')
151 (symbol 'a1'))
151 (symbol 'a1'))
152 (kindpat
152 (kindpat
153 (symbol 'glob')
153 (symbol 'glob')
154 (symbol 'b?')))
154 (symbol 'b?')))
155 a1
155 a1
156 b1
156 b1
157 b2
157 b2
158
158
159 $ fileset -p all -s 'a1 or a2 or (grep("b") & clean())'
159 $ fileset -p all -s 'a1 or a2 or (grep("b") & clean())'
160 * parsed:
160 * parsed:
161 (or
161 (or
162 (symbol 'a1')
162 (symbol 'a1')
163 (symbol 'a2')
163 (symbol 'a2')
164 (group
164 (group
165 (and
165 (and
166 (func
166 (func
167 (symbol 'grep')
167 (symbol 'grep')
168 (string 'b'))
168 (string 'b'))
169 (func
169 (func
170 (symbol 'clean')
170 (symbol 'clean')
171 None))))
171 None))))
172 * analyzed:
172 * analyzed:
173 (or
173 (or
174 (symbol 'a1')
174 (symbol 'a1')
175 (symbol 'a2')
175 (symbol 'a2')
176 (and
176 (and
177 (func
177 (func
178 (symbol 'grep')
178 (symbol 'grep')
179 (string 'b'))
179 (string 'b'))
180 (func
180 (func
181 (symbol 'clean')
181 (symbol 'clean')
182 None)))
182 None)))
183 * optimized:
184 (or
185 (symbol 'a1')
186 (symbol 'a2')
187 (and
188 (func
189 (symbol 'grep')
190 (string 'b'))
191 (func
192 (symbol 'clean')
193 None)))
183 * matcher:
194 * matcher:
184 <unionmatcher matchers=[
195 <unionmatcher matchers=[
185 <patternmatcher patterns='(?:a1$)'>,
196 <patternmatcher patterns='(?:a1$)'>,
186 <patternmatcher patterns='(?:a2$)'>,
197 <patternmatcher patterns='(?:a2$)'>,
187 <intersectionmatcher
198 <intersectionmatcher
188 m1=<predicatenmatcher pred=grep('b')>,
199 m1=<predicatenmatcher pred=grep('b')>,
189 m2=<predicatenmatcher pred=clean>>]>
200 m2=<predicatenmatcher pred=clean>>]>
190 a1
201 a1
191 a2
202 a2
192 b1
203 b1
193 b2
204 b2
194
205
195 Test files status
206 Test files status
196
207
197 $ rm a1
208 $ rm a1
198 $ hg rm a2
209 $ hg rm a2
199 $ echo b >> b2
210 $ echo b >> b2
200 $ hg cp b1 c1
211 $ hg cp b1 c1
201 $ echo c > c2
212 $ echo c > c2
202 $ echo c > c3
213 $ echo c > c3
203 $ cat > .hgignore <<EOF
214 $ cat > .hgignore <<EOF
204 > \.hgignore
215 > \.hgignore
205 > 2$
216 > 2$
206 > EOF
217 > EOF
207 $ fileset 'modified()'
218 $ fileset 'modified()'
208 b2
219 b2
209 $ fileset 'added()'
220 $ fileset 'added()'
210 c1
221 c1
211 $ fileset 'removed()'
222 $ fileset 'removed()'
212 a2
223 a2
213 $ fileset 'deleted()'
224 $ fileset 'deleted()'
214 a1
225 a1
215 $ fileset 'missing()'
226 $ fileset 'missing()'
216 a1
227 a1
217 $ fileset 'unknown()'
228 $ fileset 'unknown()'
218 c3
229 c3
219 $ fileset 'ignored()'
230 $ fileset 'ignored()'
220 .hgignore
231 .hgignore
221 c2
232 c2
222 $ fileset 'hgignore()'
233 $ fileset 'hgignore()'
223 .hgignore
234 .hgignore
224 a2
235 a2
225 b2
236 b2
226 c2
237 c2
227 $ fileset 'clean()'
238 $ fileset 'clean()'
228 b1
239 b1
229 $ fileset 'copied()'
240 $ fileset 'copied()'
230 c1
241 c1
231
242
232 Test files status in different revisions
243 Test files status in different revisions
233
244
234 $ hg status -m
245 $ hg status -m
235 M b2
246 M b2
236 $ fileset -r0 'revs("wdir()", modified())' --traceback
247 $ fileset -r0 'revs("wdir()", modified())' --traceback
237 b2
248 b2
238 $ hg status -a
249 $ hg status -a
239 A c1
250 A c1
240 $ fileset -r0 'revs("wdir()", added())'
251 $ fileset -r0 'revs("wdir()", added())'
241 c1
252 c1
242 $ hg status --change 0 -a
253 $ hg status --change 0 -a
243 A a1
254 A a1
244 A a2
255 A a2
245 A b1
256 A b1
246 A b2
257 A b2
247 $ hg status -mru
258 $ hg status -mru
248 M b2
259 M b2
249 R a2
260 R a2
250 ? c3
261 ? c3
251 $ fileset -r0 'added() and revs("wdir()", modified() or removed() or unknown())'
262 $ fileset -r0 'added() and revs("wdir()", modified() or removed() or unknown())'
252 a2
263 a2
253 b2
264 b2
254 $ fileset -r0 'added() or revs("wdir()", added())'
265 $ fileset -r0 'added() or revs("wdir()", added())'
255 a1
266 a1
256 a2
267 a2
257 b1
268 b1
258 b2
269 b2
259 c1
270 c1
260
271
261 Test files properties
272 Test files properties
262
273
263 >>> open('bin', 'wb').write(b'\0a') and None
274 >>> open('bin', 'wb').write(b'\0a') and None
264 $ fileset 'binary()'
275 $ fileset 'binary()'
265 bin
276 bin
266 $ fileset 'binary() and unknown()'
277 $ fileset 'binary() and unknown()'
267 bin
278 bin
268 $ echo '^bin$' >> .hgignore
279 $ echo '^bin$' >> .hgignore
269 $ fileset 'binary() and ignored()'
280 $ fileset 'binary() and ignored()'
270 bin
281 bin
271 $ hg add bin
282 $ hg add bin
272 $ fileset 'binary()'
283 $ fileset 'binary()'
273 bin
284 bin
274
285
275 $ fileset 'grep("b{1}")'
286 $ fileset 'grep("b{1}")'
276 .hgignore
287 .hgignore
277 b1
288 b1
278 b2
289 b2
279 c1
290 c1
280 $ fileset 'grep("missingparens(")'
291 $ fileset 'grep("missingparens(")'
281 hg: parse error: invalid match pattern: (unbalanced parenthesis|missing \)).* (re)
292 hg: parse error: invalid match pattern: (unbalanced parenthesis|missing \)).* (re)
282 [255]
293 [255]
283
294
284 #if execbit
295 #if execbit
285 $ chmod +x b2
296 $ chmod +x b2
286 $ fileset 'exec()'
297 $ fileset 'exec()'
287 b2
298 b2
288 #endif
299 #endif
289
300
290 #if symlink
301 #if symlink
291 $ ln -s b2 b2link
302 $ ln -s b2 b2link
292 $ fileset 'symlink() and unknown()'
303 $ fileset 'symlink() and unknown()'
293 b2link
304 b2link
294 $ hg add b2link
305 $ hg add b2link
295 #endif
306 #endif
296
307
297 #if no-windows
308 #if no-windows
298 $ echo foo > con.xml
309 $ echo foo > con.xml
299 $ fileset 'not portable()'
310 $ fileset 'not portable()'
300 con.xml
311 con.xml
301 $ hg --config ui.portablefilenames=ignore add con.xml
312 $ hg --config ui.portablefilenames=ignore add con.xml
302 #endif
313 #endif
303
314
304 >>> open('1k', 'wb').write(b' '*1024) and None
315 >>> open('1k', 'wb').write(b' '*1024) and None
305 >>> open('2k', 'wb').write(b' '*2048) and None
316 >>> open('2k', 'wb').write(b' '*2048) and None
306 $ hg add 1k 2k
317 $ hg add 1k 2k
307 $ fileset 'size("bar")'
318 $ fileset 'size("bar")'
308 hg: parse error: couldn't parse size: bar
319 hg: parse error: couldn't parse size: bar
309 [255]
320 [255]
310 $ fileset '(1k, 2k)'
321 $ fileset '(1k, 2k)'
311 hg: parse error: can't use a list in this context
322 hg: parse error: can't use a list in this context
312 (see 'hg help "filesets.x or y"')
323 (see 'hg help "filesets.x or y"')
313 [255]
324 [255]
314 $ fileset 'size(1k)'
325 $ fileset 'size(1k)'
315 1k
326 1k
316 $ fileset '(1k or 2k) and size("< 2k")'
327 $ fileset '(1k or 2k) and size("< 2k")'
317 1k
328 1k
318 $ fileset '(1k or 2k) and size("<=2k")'
329 $ fileset '(1k or 2k) and size("<=2k")'
319 1k
330 1k
320 2k
331 2k
321 $ fileset '(1k or 2k) and size("> 1k")'
332 $ fileset '(1k or 2k) and size("> 1k")'
322 2k
333 2k
323 $ fileset '(1k or 2k) and size(">=1K")'
334 $ fileset '(1k or 2k) and size(">=1K")'
324 1k
335 1k
325 2k
336 2k
326 $ fileset '(1k or 2k) and size(".5KB - 1.5kB")'
337 $ fileset '(1k or 2k) and size(".5KB - 1.5kB")'
327 1k
338 1k
328 $ fileset 'size("1M")'
339 $ fileset 'size("1M")'
329 $ fileset 'size("1 GB")'
340 $ fileset 'size("1 GB")'
330
341
331 Test merge states
342 Test merge states
332
343
333 $ hg ci -m manychanges
344 $ hg ci -m manychanges
334 $ hg file -r . 'set:copied() & modified()'
345 $ hg file -r . 'set:copied() & modified()'
335 [1]
346 [1]
336 $ hg up -C 0
347 $ hg up -C 0
337 * files updated, 0 files merged, * files removed, 0 files unresolved (glob)
348 * files updated, 0 files merged, * files removed, 0 files unresolved (glob)
338 $ echo c >> b2
349 $ echo c >> b2
339 $ hg ci -m diverging b2
350 $ hg ci -m diverging b2
340 created new head
351 created new head
341 $ fileset 'resolved()'
352 $ fileset 'resolved()'
342 $ fileset 'unresolved()'
353 $ fileset 'unresolved()'
343 $ hg merge
354 $ hg merge
344 merging b2
355 merging b2
345 warning: conflicts while merging b2! (edit, then use 'hg resolve --mark')
356 warning: conflicts while merging b2! (edit, then use 'hg resolve --mark')
346 * files updated, 0 files merged, 1 files removed, 1 files unresolved (glob)
357 * files updated, 0 files merged, 1 files removed, 1 files unresolved (glob)
347 use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
358 use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
348 [1]
359 [1]
349 $ fileset 'resolved()'
360 $ fileset 'resolved()'
350 $ fileset 'unresolved()'
361 $ fileset 'unresolved()'
351 b2
362 b2
352 $ echo e > b2
363 $ echo e > b2
353 $ hg resolve -m b2
364 $ hg resolve -m b2
354 (no more unresolved files)
365 (no more unresolved files)
355 $ fileset 'resolved()'
366 $ fileset 'resolved()'
356 b2
367 b2
357 $ fileset 'unresolved()'
368 $ fileset 'unresolved()'
358 $ hg ci -m merge
369 $ hg ci -m merge
359
370
360 Test subrepo predicate
371 Test subrepo predicate
361
372
362 $ hg init sub
373 $ hg init sub
363 $ echo a > sub/suba
374 $ echo a > sub/suba
364 $ hg -R sub add sub/suba
375 $ hg -R sub add sub/suba
365 $ hg -R sub ci -m sub
376 $ hg -R sub ci -m sub
366 $ echo 'sub = sub' > .hgsub
377 $ echo 'sub = sub' > .hgsub
367 $ hg init sub2
378 $ hg init sub2
368 $ echo b > sub2/b
379 $ echo b > sub2/b
369 $ hg -R sub2 ci -Am sub2
380 $ hg -R sub2 ci -Am sub2
370 adding b
381 adding b
371 $ echo 'sub2 = sub2' >> .hgsub
382 $ echo 'sub2 = sub2' >> .hgsub
372 $ fileset 'subrepo()'
383 $ fileset 'subrepo()'
373 $ hg add .hgsub
384 $ hg add .hgsub
374 $ fileset 'subrepo()'
385 $ fileset 'subrepo()'
375 sub
386 sub
376 sub2
387 sub2
377 $ fileset 'subrepo("sub")'
388 $ fileset 'subrepo("sub")'
378 sub
389 sub
379 $ fileset 'subrepo("glob:*")'
390 $ fileset 'subrepo("glob:*")'
380 sub
391 sub
381 sub2
392 sub2
382 $ hg ci -m subrepo
393 $ hg ci -m subrepo
383
394
384 Test that .hgsubstate is updated as appropriate during a conversion. The
395 Test that .hgsubstate is updated as appropriate during a conversion. The
385 saverev property is enough to alter the hashes of the subrepo.
396 saverev property is enough to alter the hashes of the subrepo.
386
397
387 $ hg init ../converted
398 $ hg init ../converted
388 $ hg --config extensions.convert= convert --config convert.hg.saverev=True \
399 $ hg --config extensions.convert= convert --config convert.hg.saverev=True \
389 > sub ../converted/sub
400 > sub ../converted/sub
390 initializing destination ../converted/sub repository
401 initializing destination ../converted/sub repository
391 scanning source...
402 scanning source...
392 sorting...
403 sorting...
393 converting...
404 converting...
394 0 sub
405 0 sub
395 $ hg clone -U sub2 ../converted/sub2
406 $ hg clone -U sub2 ../converted/sub2
396 $ hg --config extensions.convert= convert --config convert.hg.saverev=True \
407 $ hg --config extensions.convert= convert --config convert.hg.saverev=True \
397 > . ../converted
408 > . ../converted
398 scanning source...
409 scanning source...
399 sorting...
410 sorting...
400 converting...
411 converting...
401 4 addfiles
412 4 addfiles
402 3 manychanges
413 3 manychanges
403 2 diverging
414 2 diverging
404 1 merge
415 1 merge
405 0 subrepo
416 0 subrepo
406 no ".hgsubstate" updates will be made for "sub2"
417 no ".hgsubstate" updates will be made for "sub2"
407 $ hg up -q -R ../converted -r tip
418 $ hg up -q -R ../converted -r tip
408 $ hg --cwd ../converted cat sub/suba sub2/b -r tip
419 $ hg --cwd ../converted cat sub/suba sub2/b -r tip
409 a
420 a
410 b
421 b
411 $ oldnode=`hg log -r tip -T "{node}\n"`
422 $ oldnode=`hg log -r tip -T "{node}\n"`
412 $ newnode=`hg log -R ../converted -r tip -T "{node}\n"`
423 $ newnode=`hg log -R ../converted -r tip -T "{node}\n"`
413 $ [ "$oldnode" != "$newnode" ] || echo "nothing changed"
424 $ [ "$oldnode" != "$newnode" ] || echo "nothing changed"
414
425
415 Test with a revision
426 Test with a revision
416
427
417 $ hg log -G --template '{rev} {desc}\n'
428 $ hg log -G --template '{rev} {desc}\n'
418 @ 4 subrepo
429 @ 4 subrepo
419 |
430 |
420 o 3 merge
431 o 3 merge
421 |\
432 |\
422 | o 2 diverging
433 | o 2 diverging
423 | |
434 | |
424 o | 1 manychanges
435 o | 1 manychanges
425 |/
436 |/
426 o 0 addfiles
437 o 0 addfiles
427
438
428 $ echo unknown > unknown
439 $ echo unknown > unknown
429 $ fileset -r1 'modified()'
440 $ fileset -r1 'modified()'
430 b2
441 b2
431 $ fileset -r1 'added() and c1'
442 $ fileset -r1 'added() and c1'
432 c1
443 c1
433 $ fileset -r1 'removed()'
444 $ fileset -r1 'removed()'
434 a2
445 a2
435 $ fileset -r1 'deleted()'
446 $ fileset -r1 'deleted()'
436 $ fileset -r1 'unknown()'
447 $ fileset -r1 'unknown()'
437 $ fileset -r1 'ignored()'
448 $ fileset -r1 'ignored()'
438 $ fileset -r1 'hgignore()'
449 $ fileset -r1 'hgignore()'
439 .hgignore
450 .hgignore
440 a2
451 a2
441 b2
452 b2
442 bin
453 bin
443 c2
454 c2
444 sub2
455 sub2
445 $ fileset -r1 'binary()'
456 $ fileset -r1 'binary()'
446 bin
457 bin
447 $ fileset -r1 'size(1k)'
458 $ fileset -r1 'size(1k)'
448 1k
459 1k
449 $ fileset -r3 'resolved()'
460 $ fileset -r3 'resolved()'
450 $ fileset -r3 'unresolved()'
461 $ fileset -r3 'unresolved()'
451
462
452 #if execbit
463 #if execbit
453 $ fileset -r1 'exec()'
464 $ fileset -r1 'exec()'
454 b2
465 b2
455 #endif
466 #endif
456
467
457 #if symlink
468 #if symlink
458 $ fileset -r1 'symlink()'
469 $ fileset -r1 'symlink()'
459 b2link
470 b2link
460 #endif
471 #endif
461
472
462 #if no-windows
473 #if no-windows
463 $ fileset -r1 'not portable()'
474 $ fileset -r1 'not portable()'
464 con.xml
475 con.xml
465 $ hg forget 'con.xml'
476 $ hg forget 'con.xml'
466 #endif
477 #endif
467
478
468 $ fileset -r4 'subrepo("re:su.*")'
479 $ fileset -r4 'subrepo("re:su.*")'
469 sub
480 sub
470 sub2
481 sub2
471 $ fileset -r4 'subrepo(re:su.*)'
482 $ fileset -r4 'subrepo(re:su.*)'
472 sub
483 sub
473 sub2
484 sub2
474 $ fileset -r4 'subrepo("sub")'
485 $ fileset -r4 'subrepo("sub")'
475 sub
486 sub
476 $ fileset -r4 'b2 or c1'
487 $ fileset -r4 'b2 or c1'
477 b2
488 b2
478 c1
489 c1
479
490
480 >>> open('dos', 'wb').write(b"dos\r\n") and None
491 >>> open('dos', 'wb').write(b"dos\r\n") and None
481 >>> open('mixed', 'wb').write(b"dos\r\nunix\n") and None
492 >>> open('mixed', 'wb').write(b"dos\r\nunix\n") and None
482 >>> open('mac', 'wb').write(b"mac\r") and None
493 >>> open('mac', 'wb').write(b"mac\r") and None
483 $ hg add dos mixed mac
494 $ hg add dos mixed mac
484
495
485 (remove a1, to examine safety of 'eol' on removed files)
496 (remove a1, to examine safety of 'eol' on removed files)
486 $ rm a1
497 $ rm a1
487
498
488 $ fileset 'eol(dos)'
499 $ fileset 'eol(dos)'
489 dos
500 dos
490 mixed
501 mixed
491 $ fileset 'eol(unix)'
502 $ fileset 'eol(unix)'
492 .hgignore
503 .hgignore
493 .hgsub
504 .hgsub
494 .hgsubstate
505 .hgsubstate
495 b1
506 b1
496 b2
507 b2
497 b2.orig
508 b2.orig
498 c1
509 c1
499 c2
510 c2
500 c3
511 c3
501 con.xml (no-windows !)
512 con.xml (no-windows !)
502 mixed
513 mixed
503 unknown
514 unknown
504 $ fileset 'eol(mac)'
515 $ fileset 'eol(mac)'
505 mac
516 mac
506
517
507 Test safety of 'encoding' on removed files
518 Test safety of 'encoding' on removed files
508
519
509 $ fileset 'encoding("ascii")'
520 $ fileset 'encoding("ascii")'
510 .hgignore
521 .hgignore
511 .hgsub
522 .hgsub
512 .hgsubstate
523 .hgsubstate
513 1k
524 1k
514 2k
525 2k
515 b1
526 b1
516 b2
527 b2
517 b2.orig
528 b2.orig
518 b2link (symlink !)
529 b2link (symlink !)
519 bin
530 bin
520 c1
531 c1
521 c2
532 c2
522 c3
533 c3
523 con.xml (no-windows !)
534 con.xml (no-windows !)
524 dos
535 dos
525 mac
536 mac
526 mixed
537 mixed
527 unknown
538 unknown
528
539
529 Test 'revs(...)'
540 Test 'revs(...)'
530 ================
541 ================
531
542
532 small reminder of the repository state
543 small reminder of the repository state
533
544
534 $ hg log -G
545 $ hg log -G
535 @ changeset: 4:* (glob)
546 @ changeset: 4:* (glob)
536 | tag: tip
547 | tag: tip
537 | user: test
548 | user: test
538 | date: Thu Jan 01 00:00:00 1970 +0000
549 | date: Thu Jan 01 00:00:00 1970 +0000
539 | summary: subrepo
550 | summary: subrepo
540 |
551 |
541 o changeset: 3:* (glob)
552 o changeset: 3:* (glob)
542 |\ parent: 2:55b05bdebf36
553 |\ parent: 2:55b05bdebf36
543 | | parent: 1:* (glob)
554 | | parent: 1:* (glob)
544 | | user: test
555 | | user: test
545 | | date: Thu Jan 01 00:00:00 1970 +0000
556 | | date: Thu Jan 01 00:00:00 1970 +0000
546 | | summary: merge
557 | | summary: merge
547 | |
558 | |
548 | o changeset: 2:55b05bdebf36
559 | o changeset: 2:55b05bdebf36
549 | | parent: 0:8a9576c51c1f
560 | | parent: 0:8a9576c51c1f
550 | | user: test
561 | | user: test
551 | | date: Thu Jan 01 00:00:00 1970 +0000
562 | | date: Thu Jan 01 00:00:00 1970 +0000
552 | | summary: diverging
563 | | summary: diverging
553 | |
564 | |
554 o | changeset: 1:* (glob)
565 o | changeset: 1:* (glob)
555 |/ user: test
566 |/ user: test
556 | date: Thu Jan 01 00:00:00 1970 +0000
567 | date: Thu Jan 01 00:00:00 1970 +0000
557 | summary: manychanges
568 | summary: manychanges
558 |
569 |
559 o changeset: 0:8a9576c51c1f
570 o changeset: 0:8a9576c51c1f
560 user: test
571 user: test
561 date: Thu Jan 01 00:00:00 1970 +0000
572 date: Thu Jan 01 00:00:00 1970 +0000
562 summary: addfiles
573 summary: addfiles
563
574
564 $ hg status --change 0
575 $ hg status --change 0
565 A a1
576 A a1
566 A a2
577 A a2
567 A b1
578 A b1
568 A b2
579 A b2
569 $ hg status --change 1
580 $ hg status --change 1
570 M b2
581 M b2
571 A 1k
582 A 1k
572 A 2k
583 A 2k
573 A b2link (no-windows !)
584 A b2link (no-windows !)
574 A bin
585 A bin
575 A c1
586 A c1
576 A con.xml (no-windows !)
587 A con.xml (no-windows !)
577 R a2
588 R a2
578 $ hg status --change 2
589 $ hg status --change 2
579 M b2
590 M b2
580 $ hg status --change 3
591 $ hg status --change 3
581 M b2
592 M b2
582 A 1k
593 A 1k
583 A 2k
594 A 2k
584 A b2link (no-windows !)
595 A b2link (no-windows !)
585 A bin
596 A bin
586 A c1
597 A c1
587 A con.xml (no-windows !)
598 A con.xml (no-windows !)
588 R a2
599 R a2
589 $ hg status --change 4
600 $ hg status --change 4
590 A .hgsub
601 A .hgsub
591 A .hgsubstate
602 A .hgsubstate
592 $ hg status
603 $ hg status
593 A dos
604 A dos
594 A mac
605 A mac
595 A mixed
606 A mixed
596 R con.xml (no-windows !)
607 R con.xml (no-windows !)
597 ! a1
608 ! a1
598 ? b2.orig
609 ? b2.orig
599 ? c3
610 ? c3
600 ? unknown
611 ? unknown
601
612
602 Test files at -r0 should be filtered by files at wdir
613 Test files at -r0 should be filtered by files at wdir
603 -----------------------------------------------------
614 -----------------------------------------------------
604
615
605 $ fileset -r0 'tracked() and revs("wdir()", tracked())'
616 $ fileset -r0 'tracked() and revs("wdir()", tracked())'
606 a1
617 a1
607 b1
618 b1
608 b2
619 b2
609
620
610 Test that "revs()" work at all
621 Test that "revs()" work at all
611 ------------------------------
622 ------------------------------
612
623
613 $ fileset "revs('2', modified())"
624 $ fileset "revs('2', modified())"
614 b2
625 b2
615
626
616 Test that "revs()" work for file missing in the working copy/current context
627 Test that "revs()" work for file missing in the working copy/current context
617 ----------------------------------------------------------------------------
628 ----------------------------------------------------------------------------
618
629
619 (a2 not in working copy)
630 (a2 not in working copy)
620
631
621 $ fileset "revs('0', added())"
632 $ fileset "revs('0', added())"
622 a1
633 a1
623 a2
634 a2
624 b1
635 b1
625 b2
636 b2
626
637
627 (none of the file exist in "0")
638 (none of the file exist in "0")
628
639
629 $ fileset -r 0 "revs('4', added())"
640 $ fileset -r 0 "revs('4', added())"
630 .hgsub
641 .hgsub
631 .hgsubstate
642 .hgsubstate
632
643
633 Call with empty revset
644 Call with empty revset
634 --------------------------
645 --------------------------
635
646
636 $ fileset "revs('2-2', modified())"
647 $ fileset "revs('2-2', modified())"
637
648
638 Call with revset matching multiple revs
649 Call with revset matching multiple revs
639 ---------------------------------------
650 ---------------------------------------
640
651
641 $ fileset "revs('0+4', added())"
652 $ fileset "revs('0+4', added())"
642 .hgsub
653 .hgsub
643 .hgsubstate
654 .hgsubstate
644 a1
655 a1
645 a2
656 a2
646 b1
657 b1
647 b2
658 b2
648
659
649 overlapping set
660 overlapping set
650
661
651 $ fileset "revs('1+2', modified())"
662 $ fileset "revs('1+2', modified())"
652 b2
663 b2
653
664
654 test 'status(...)'
665 test 'status(...)'
655 =================
666 =================
656
667
657 Simple case
668 Simple case
658 -----------
669 -----------
659
670
660 $ fileset "status(3, 4, added())"
671 $ fileset "status(3, 4, added())"
661 .hgsub
672 .hgsub
662 .hgsubstate
673 .hgsubstate
663
674
664 use rev to restrict matched file
675 use rev to restrict matched file
665 -----------------------------------------
676 -----------------------------------------
666
677
667 $ hg status --removed --rev 0 --rev 1
678 $ hg status --removed --rev 0 --rev 1
668 R a2
679 R a2
669 $ fileset "status(0, 1, removed())"
680 $ fileset "status(0, 1, removed())"
670 a2
681 a2
671 $ fileset "tracked() and status(0, 1, removed())"
682 $ fileset "tracked() and status(0, 1, removed())"
672 $ fileset -r 4 "status(0, 1, removed())"
683 $ fileset -r 4 "status(0, 1, removed())"
673 a2
684 a2
674 $ fileset -r 4 "tracked() and status(0, 1, removed())"
685 $ fileset -r 4 "tracked() and status(0, 1, removed())"
675 $ fileset "revs('4', tracked() and status(0, 1, removed()))"
686 $ fileset "revs('4', tracked() and status(0, 1, removed()))"
676 $ fileset "revs('0', tracked() and status(0, 1, removed()))"
687 $ fileset "revs('0', tracked() and status(0, 1, removed()))"
677 a2
688 a2
678
689
679 check wdir()
690 check wdir()
680 ------------
691 ------------
681
692
682 $ hg status --removed --rev 4
693 $ hg status --removed --rev 4
683 R con.xml (no-windows !)
694 R con.xml (no-windows !)
684 $ fileset "status(4, 'wdir()', removed())"
695 $ fileset "status(4, 'wdir()', removed())"
685 con.xml (no-windows !)
696 con.xml (no-windows !)
686
697
687 $ hg status --removed --rev 2
698 $ hg status --removed --rev 2
688 R a2
699 R a2
689 $ fileset "status('2', 'wdir()', removed())"
700 $ fileset "status('2', 'wdir()', removed())"
690 a2
701 a2
691
702
692 test backward status
703 test backward status
693 --------------------
704 --------------------
694
705
695 $ hg status --removed --rev 0 --rev 4
706 $ hg status --removed --rev 0 --rev 4
696 R a2
707 R a2
697 $ hg status --added --rev 4 --rev 0
708 $ hg status --added --rev 4 --rev 0
698 A a2
709 A a2
699 $ fileset "status(4, 0, added())"
710 $ fileset "status(4, 0, added())"
700 a2
711 a2
701
712
702 test cross branch status
713 test cross branch status
703 ------------------------
714 ------------------------
704
715
705 $ hg status --added --rev 1 --rev 2
716 $ hg status --added --rev 1 --rev 2
706 A a2
717 A a2
707 $ fileset "status(1, 2, added())"
718 $ fileset "status(1, 2, added())"
708 a2
719 a2
709
720
710 test with multi revs revset
721 test with multi revs revset
711 ---------------------------
722 ---------------------------
712 $ hg status --added --rev 0:1 --rev 3:4
723 $ hg status --added --rev 0:1 --rev 3:4
713 A .hgsub
724 A .hgsub
714 A .hgsubstate
725 A .hgsubstate
715 A 1k
726 A 1k
716 A 2k
727 A 2k
717 A b2link (no-windows !)
728 A b2link (no-windows !)
718 A bin
729 A bin
719 A c1
730 A c1
720 A con.xml (no-windows !)
731 A con.xml (no-windows !)
721 $ fileset "status('0:1', '3:4', added())"
732 $ fileset "status('0:1', '3:4', added())"
722 .hgsub
733 .hgsub
723 .hgsubstate
734 .hgsubstate
724 1k
735 1k
725 2k
736 2k
726 b2link (no-windows !)
737 b2link (no-windows !)
727 bin
738 bin
728 c1
739 c1
729 con.xml (no-windows !)
740 con.xml (no-windows !)
730
741
731 tests with empty value
742 tests with empty value
732 ----------------------
743 ----------------------
733
744
734 Fully empty revset
745 Fully empty revset
735
746
736 $ fileset "status('', '4', added())"
747 $ fileset "status('', '4', added())"
737 hg: parse error: first argument to status must be a revision
748 hg: parse error: first argument to status must be a revision
738 [255]
749 [255]
739 $ fileset "status('2', '', added())"
750 $ fileset "status('2', '', added())"
740 hg: parse error: second argument to status must be a revision
751 hg: parse error: second argument to status must be a revision
741 [255]
752 [255]
742
753
743 Empty revset will error at the revset layer
754 Empty revset will error at the revset layer
744
755
745 $ fileset "status(' ', '4', added())"
756 $ fileset "status(' ', '4', added())"
746 hg: parse error at 1: not a prefix: end
757 hg: parse error at 1: not a prefix: end
747 (
758 (
748 ^ here)
759 ^ here)
749 [255]
760 [255]
750 $ fileset "status('2', ' ', added())"
761 $ fileset "status('2', ' ', added())"
751 hg: parse error at 1: not a prefix: end
762 hg: parse error at 1: not a prefix: end
752 (
763 (
753 ^ here)
764 ^ here)
754 [255]
765 [255]
General Comments 0
You need to be logged in to leave comments. Login now