##// END OF EJS Templates
py3: use '%d' for integers instead of '%s'...
Pulkit Goyal -
r36417:a24c57f1 default
parent child Browse files
Show More
@@ -1,2500 +1,2500
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import socket
17 import socket
18 import ssl
18 import ssl
19 import string
19 import string
20 import sys
20 import sys
21 import tempfile
21 import tempfile
22 import time
22 import time
23
23
24 from .i18n import _
24 from .i18n import _
25 from .node import (
25 from .node import (
26 bin,
26 bin,
27 hex,
27 hex,
28 nullhex,
28 nullhex,
29 nullid,
29 nullid,
30 nullrev,
30 nullrev,
31 short,
31 short,
32 )
32 )
33 from . import (
33 from . import (
34 bundle2,
34 bundle2,
35 changegroup,
35 changegroup,
36 cmdutil,
36 cmdutil,
37 color,
37 color,
38 context,
38 context,
39 dagparser,
39 dagparser,
40 dagutil,
40 dagutil,
41 encoding,
41 encoding,
42 error,
42 error,
43 exchange,
43 exchange,
44 extensions,
44 extensions,
45 filemerge,
45 filemerge,
46 fileset,
46 fileset,
47 formatter,
47 formatter,
48 hg,
48 hg,
49 localrepo,
49 localrepo,
50 lock as lockmod,
50 lock as lockmod,
51 logcmdutil,
51 logcmdutil,
52 merge as mergemod,
52 merge as mergemod,
53 obsolete,
53 obsolete,
54 obsutil,
54 obsutil,
55 phases,
55 phases,
56 policy,
56 policy,
57 pvec,
57 pvec,
58 pycompat,
58 pycompat,
59 registrar,
59 registrar,
60 repair,
60 repair,
61 revlog,
61 revlog,
62 revset,
62 revset,
63 revsetlang,
63 revsetlang,
64 scmutil,
64 scmutil,
65 setdiscovery,
65 setdiscovery,
66 simplemerge,
66 simplemerge,
67 smartset,
67 smartset,
68 sslutil,
68 sslutil,
69 streamclone,
69 streamclone,
70 templater,
70 templater,
71 treediscovery,
71 treediscovery,
72 upgrade,
72 upgrade,
73 url as urlmod,
73 url as urlmod,
74 util,
74 util,
75 vfs as vfsmod,
75 vfs as vfsmod,
76 )
76 )
77
77
78 release = lockmod.release
78 release = lockmod.release
79
79
80 command = registrar.command()
80 command = registrar.command()
81
81
82 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
82 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
83 def debugancestor(ui, repo, *args):
83 def debugancestor(ui, repo, *args):
84 """find the ancestor revision of two revisions in a given index"""
84 """find the ancestor revision of two revisions in a given index"""
85 if len(args) == 3:
85 if len(args) == 3:
86 index, rev1, rev2 = args
86 index, rev1, rev2 = args
87 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
87 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
88 lookup = r.lookup
88 lookup = r.lookup
89 elif len(args) == 2:
89 elif len(args) == 2:
90 if not repo:
90 if not repo:
91 raise error.Abort(_('there is no Mercurial repository here '
91 raise error.Abort(_('there is no Mercurial repository here '
92 '(.hg not found)'))
92 '(.hg not found)'))
93 rev1, rev2 = args
93 rev1, rev2 = args
94 r = repo.changelog
94 r = repo.changelog
95 lookup = repo.lookup
95 lookup = repo.lookup
96 else:
96 else:
97 raise error.Abort(_('either two or three arguments required'))
97 raise error.Abort(_('either two or three arguments required'))
98 a = r.ancestor(lookup(rev1), lookup(rev2))
98 a = r.ancestor(lookup(rev1), lookup(rev2))
99 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
99 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
100
100
101 @command('debugapplystreamclonebundle', [], 'FILE')
101 @command('debugapplystreamclonebundle', [], 'FILE')
102 def debugapplystreamclonebundle(ui, repo, fname):
102 def debugapplystreamclonebundle(ui, repo, fname):
103 """apply a stream clone bundle file"""
103 """apply a stream clone bundle file"""
104 f = hg.openpath(ui, fname)
104 f = hg.openpath(ui, fname)
105 gen = exchange.readbundle(ui, f, fname)
105 gen = exchange.readbundle(ui, f, fname)
106 gen.apply(repo)
106 gen.apply(repo)
107
107
108 @command('debugbuilddag',
108 @command('debugbuilddag',
109 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
109 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
110 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
110 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
111 ('n', 'new-file', None, _('add new file at each rev'))],
111 ('n', 'new-file', None, _('add new file at each rev'))],
112 _('[OPTION]... [TEXT]'))
112 _('[OPTION]... [TEXT]'))
113 def debugbuilddag(ui, repo, text=None,
113 def debugbuilddag(ui, repo, text=None,
114 mergeable_file=False,
114 mergeable_file=False,
115 overwritten_file=False,
115 overwritten_file=False,
116 new_file=False):
116 new_file=False):
117 """builds a repo with a given DAG from scratch in the current empty repo
117 """builds a repo with a given DAG from scratch in the current empty repo
118
118
119 The description of the DAG is read from stdin if not given on the
119 The description of the DAG is read from stdin if not given on the
120 command line.
120 command line.
121
121
122 Elements:
122 Elements:
123
123
124 - "+n" is a linear run of n nodes based on the current default parent
124 - "+n" is a linear run of n nodes based on the current default parent
125 - "." is a single node based on the current default parent
125 - "." is a single node based on the current default parent
126 - "$" resets the default parent to null (implied at the start);
126 - "$" resets the default parent to null (implied at the start);
127 otherwise the default parent is always the last node created
127 otherwise the default parent is always the last node created
128 - "<p" sets the default parent to the backref p
128 - "<p" sets the default parent to the backref p
129 - "*p" is a fork at parent p, which is a backref
129 - "*p" is a fork at parent p, which is a backref
130 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
130 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
131 - "/p2" is a merge of the preceding node and p2
131 - "/p2" is a merge of the preceding node and p2
132 - ":tag" defines a local tag for the preceding node
132 - ":tag" defines a local tag for the preceding node
133 - "@branch" sets the named branch for subsequent nodes
133 - "@branch" sets the named branch for subsequent nodes
134 - "#...\\n" is a comment up to the end of the line
134 - "#...\\n" is a comment up to the end of the line
135
135
136 Whitespace between the above elements is ignored.
136 Whitespace between the above elements is ignored.
137
137
138 A backref is either
138 A backref is either
139
139
140 - a number n, which references the node curr-n, where curr is the current
140 - a number n, which references the node curr-n, where curr is the current
141 node, or
141 node, or
142 - the name of a local tag you placed earlier using ":tag", or
142 - the name of a local tag you placed earlier using ":tag", or
143 - empty to denote the default parent.
143 - empty to denote the default parent.
144
144
145 All string valued-elements are either strictly alphanumeric, or must
145 All string valued-elements are either strictly alphanumeric, or must
146 be enclosed in double quotes ("..."), with "\\" as escape character.
146 be enclosed in double quotes ("..."), with "\\" as escape character.
147 """
147 """
148
148
149 if text is None:
149 if text is None:
150 ui.status(_("reading DAG from stdin\n"))
150 ui.status(_("reading DAG from stdin\n"))
151 text = ui.fin.read()
151 text = ui.fin.read()
152
152
153 cl = repo.changelog
153 cl = repo.changelog
154 if len(cl) > 0:
154 if len(cl) > 0:
155 raise error.Abort(_('repository is not empty'))
155 raise error.Abort(_('repository is not empty'))
156
156
157 # determine number of revs in DAG
157 # determine number of revs in DAG
158 total = 0
158 total = 0
159 for type, data in dagparser.parsedag(text):
159 for type, data in dagparser.parsedag(text):
160 if type == 'n':
160 if type == 'n':
161 total += 1
161 total += 1
162
162
163 if mergeable_file:
163 if mergeable_file:
164 linesperrev = 2
164 linesperrev = 2
165 # make a file with k lines per rev
165 # make a file with k lines per rev
166 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
166 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
167 initialmergedlines.append("")
167 initialmergedlines.append("")
168
168
169 tags = []
169 tags = []
170
170
171 wlock = lock = tr = None
171 wlock = lock = tr = None
172 try:
172 try:
173 wlock = repo.wlock()
173 wlock = repo.wlock()
174 lock = repo.lock()
174 lock = repo.lock()
175 tr = repo.transaction("builddag")
175 tr = repo.transaction("builddag")
176
176
177 at = -1
177 at = -1
178 atbranch = 'default'
178 atbranch = 'default'
179 nodeids = []
179 nodeids = []
180 id = 0
180 id = 0
181 ui.progress(_('building'), id, unit=_('revisions'), total=total)
181 ui.progress(_('building'), id, unit=_('revisions'), total=total)
182 for type, data in dagparser.parsedag(text):
182 for type, data in dagparser.parsedag(text):
183 if type == 'n':
183 if type == 'n':
184 ui.note(('node %s\n' % pycompat.bytestr(data)))
184 ui.note(('node %s\n' % pycompat.bytestr(data)))
185 id, ps = data
185 id, ps = data
186
186
187 files = []
187 files = []
188 filecontent = {}
188 filecontent = {}
189
189
190 p2 = None
190 p2 = None
191 if mergeable_file:
191 if mergeable_file:
192 fn = "mf"
192 fn = "mf"
193 p1 = repo[ps[0]]
193 p1 = repo[ps[0]]
194 if len(ps) > 1:
194 if len(ps) > 1:
195 p2 = repo[ps[1]]
195 p2 = repo[ps[1]]
196 pa = p1.ancestor(p2)
196 pa = p1.ancestor(p2)
197 base, local, other = [x[fn].data() for x in (pa, p1,
197 base, local, other = [x[fn].data() for x in (pa, p1,
198 p2)]
198 p2)]
199 m3 = simplemerge.Merge3Text(base, local, other)
199 m3 = simplemerge.Merge3Text(base, local, other)
200 ml = [l.strip() for l in m3.merge_lines()]
200 ml = [l.strip() for l in m3.merge_lines()]
201 ml.append("")
201 ml.append("")
202 elif at > 0:
202 elif at > 0:
203 ml = p1[fn].data().split("\n")
203 ml = p1[fn].data().split("\n")
204 else:
204 else:
205 ml = initialmergedlines
205 ml = initialmergedlines
206 ml[id * linesperrev] += " r%i" % id
206 ml[id * linesperrev] += " r%i" % id
207 mergedtext = "\n".join(ml)
207 mergedtext = "\n".join(ml)
208 files.append(fn)
208 files.append(fn)
209 filecontent[fn] = mergedtext
209 filecontent[fn] = mergedtext
210
210
211 if overwritten_file:
211 if overwritten_file:
212 fn = "of"
212 fn = "of"
213 files.append(fn)
213 files.append(fn)
214 filecontent[fn] = "r%i\n" % id
214 filecontent[fn] = "r%i\n" % id
215
215
216 if new_file:
216 if new_file:
217 fn = "nf%i" % id
217 fn = "nf%i" % id
218 files.append(fn)
218 files.append(fn)
219 filecontent[fn] = "r%i\n" % id
219 filecontent[fn] = "r%i\n" % id
220 if len(ps) > 1:
220 if len(ps) > 1:
221 if not p2:
221 if not p2:
222 p2 = repo[ps[1]]
222 p2 = repo[ps[1]]
223 for fn in p2:
223 for fn in p2:
224 if fn.startswith("nf"):
224 if fn.startswith("nf"):
225 files.append(fn)
225 files.append(fn)
226 filecontent[fn] = p2[fn].data()
226 filecontent[fn] = p2[fn].data()
227
227
228 def fctxfn(repo, cx, path):
228 def fctxfn(repo, cx, path):
229 if path in filecontent:
229 if path in filecontent:
230 return context.memfilectx(repo, cx, path,
230 return context.memfilectx(repo, cx, path,
231 filecontent[path])
231 filecontent[path])
232 return None
232 return None
233
233
234 if len(ps) == 0 or ps[0] < 0:
234 if len(ps) == 0 or ps[0] < 0:
235 pars = [None, None]
235 pars = [None, None]
236 elif len(ps) == 1:
236 elif len(ps) == 1:
237 pars = [nodeids[ps[0]], None]
237 pars = [nodeids[ps[0]], None]
238 else:
238 else:
239 pars = [nodeids[p] for p in ps]
239 pars = [nodeids[p] for p in ps]
240 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
240 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
241 date=(id, 0),
241 date=(id, 0),
242 user="debugbuilddag",
242 user="debugbuilddag",
243 extra={'branch': atbranch})
243 extra={'branch': atbranch})
244 nodeid = repo.commitctx(cx)
244 nodeid = repo.commitctx(cx)
245 nodeids.append(nodeid)
245 nodeids.append(nodeid)
246 at = id
246 at = id
247 elif type == 'l':
247 elif type == 'l':
248 id, name = data
248 id, name = data
249 ui.note(('tag %s\n' % name))
249 ui.note(('tag %s\n' % name))
250 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
250 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
251 elif type == 'a':
251 elif type == 'a':
252 ui.note(('branch %s\n' % data))
252 ui.note(('branch %s\n' % data))
253 atbranch = data
253 atbranch = data
254 ui.progress(_('building'), id, unit=_('revisions'), total=total)
254 ui.progress(_('building'), id, unit=_('revisions'), total=total)
255 tr.close()
255 tr.close()
256
256
257 if tags:
257 if tags:
258 repo.vfs.write("localtags", "".join(tags))
258 repo.vfs.write("localtags", "".join(tags))
259 finally:
259 finally:
260 ui.progress(_('building'), None)
260 ui.progress(_('building'), None)
261 release(tr, lock, wlock)
261 release(tr, lock, wlock)
262
262
263 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
263 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
264 indent_string = ' ' * indent
264 indent_string = ' ' * indent
265 if all:
265 if all:
266 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
266 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
267 % indent_string)
267 % indent_string)
268
268
269 def showchunks(named):
269 def showchunks(named):
270 ui.write("\n%s%s\n" % (indent_string, named))
270 ui.write("\n%s%s\n" % (indent_string, named))
271 for deltadata in gen.deltaiter():
271 for deltadata in gen.deltaiter():
272 node, p1, p2, cs, deltabase, delta, flags = deltadata
272 node, p1, p2, cs, deltabase, delta, flags = deltadata
273 ui.write("%s%s %s %s %s %s %s\n" %
273 ui.write("%s%s %s %s %s %s %s\n" %
274 (indent_string, hex(node), hex(p1), hex(p2),
274 (indent_string, hex(node), hex(p1), hex(p2),
275 hex(cs), hex(deltabase), len(delta)))
275 hex(cs), hex(deltabase), len(delta)))
276
276
277 chunkdata = gen.changelogheader()
277 chunkdata = gen.changelogheader()
278 showchunks("changelog")
278 showchunks("changelog")
279 chunkdata = gen.manifestheader()
279 chunkdata = gen.manifestheader()
280 showchunks("manifest")
280 showchunks("manifest")
281 for chunkdata in iter(gen.filelogheader, {}):
281 for chunkdata in iter(gen.filelogheader, {}):
282 fname = chunkdata['filename']
282 fname = chunkdata['filename']
283 showchunks(fname)
283 showchunks(fname)
284 else:
284 else:
285 if isinstance(gen, bundle2.unbundle20):
285 if isinstance(gen, bundle2.unbundle20):
286 raise error.Abort(_('use debugbundle2 for this file'))
286 raise error.Abort(_('use debugbundle2 for this file'))
287 chunkdata = gen.changelogheader()
287 chunkdata = gen.changelogheader()
288 for deltadata in gen.deltaiter():
288 for deltadata in gen.deltaiter():
289 node, p1, p2, cs, deltabase, delta, flags = deltadata
289 node, p1, p2, cs, deltabase, delta, flags = deltadata
290 ui.write("%s%s\n" % (indent_string, hex(node)))
290 ui.write("%s%s\n" % (indent_string, hex(node)))
291
291
292 def _debugobsmarkers(ui, part, indent=0, **opts):
292 def _debugobsmarkers(ui, part, indent=0, **opts):
293 """display version and markers contained in 'data'"""
293 """display version and markers contained in 'data'"""
294 opts = pycompat.byteskwargs(opts)
294 opts = pycompat.byteskwargs(opts)
295 data = part.read()
295 data = part.read()
296 indent_string = ' ' * indent
296 indent_string = ' ' * indent
297 try:
297 try:
298 version, markers = obsolete._readmarkers(data)
298 version, markers = obsolete._readmarkers(data)
299 except error.UnknownVersion as exc:
299 except error.UnknownVersion as exc:
300 msg = "%sunsupported version: %s (%d bytes)\n"
300 msg = "%sunsupported version: %s (%d bytes)\n"
301 msg %= indent_string, exc.version, len(data)
301 msg %= indent_string, exc.version, len(data)
302 ui.write(msg)
302 ui.write(msg)
303 else:
303 else:
304 msg = "%sversion: %d (%d bytes)\n"
304 msg = "%sversion: %d (%d bytes)\n"
305 msg %= indent_string, version, len(data)
305 msg %= indent_string, version, len(data)
306 ui.write(msg)
306 ui.write(msg)
307 fm = ui.formatter('debugobsolete', opts)
307 fm = ui.formatter('debugobsolete', opts)
308 for rawmarker in sorted(markers):
308 for rawmarker in sorted(markers):
309 m = obsutil.marker(None, rawmarker)
309 m = obsutil.marker(None, rawmarker)
310 fm.startitem()
310 fm.startitem()
311 fm.plain(indent_string)
311 fm.plain(indent_string)
312 cmdutil.showmarker(fm, m)
312 cmdutil.showmarker(fm, m)
313 fm.end()
313 fm.end()
314
314
315 def _debugphaseheads(ui, data, indent=0):
315 def _debugphaseheads(ui, data, indent=0):
316 """display version and markers contained in 'data'"""
316 """display version and markers contained in 'data'"""
317 indent_string = ' ' * indent
317 indent_string = ' ' * indent
318 headsbyphase = phases.binarydecode(data)
318 headsbyphase = phases.binarydecode(data)
319 for phase in phases.allphases:
319 for phase in phases.allphases:
320 for head in headsbyphase[phase]:
320 for head in headsbyphase[phase]:
321 ui.write(indent_string)
321 ui.write(indent_string)
322 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
322 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
323
323
324 def _quasirepr(thing):
324 def _quasirepr(thing):
325 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
325 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
326 return '{%s}' % (
326 return '{%s}' % (
327 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
327 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
328 return pycompat.bytestr(repr(thing))
328 return pycompat.bytestr(repr(thing))
329
329
330 def _debugbundle2(ui, gen, all=None, **opts):
330 def _debugbundle2(ui, gen, all=None, **opts):
331 """lists the contents of a bundle2"""
331 """lists the contents of a bundle2"""
332 if not isinstance(gen, bundle2.unbundle20):
332 if not isinstance(gen, bundle2.unbundle20):
333 raise error.Abort(_('not a bundle2 file'))
333 raise error.Abort(_('not a bundle2 file'))
334 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
334 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
335 parttypes = opts.get(r'part_type', [])
335 parttypes = opts.get(r'part_type', [])
336 for part in gen.iterparts():
336 for part in gen.iterparts():
337 if parttypes and part.type not in parttypes:
337 if parttypes and part.type not in parttypes:
338 continue
338 continue
339 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
339 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
340 if part.type == 'changegroup':
340 if part.type == 'changegroup':
341 version = part.params.get('version', '01')
341 version = part.params.get('version', '01')
342 cg = changegroup.getunbundler(version, part, 'UN')
342 cg = changegroup.getunbundler(version, part, 'UN')
343 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
343 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
344 if part.type == 'obsmarkers':
344 if part.type == 'obsmarkers':
345 _debugobsmarkers(ui, part, indent=4, **opts)
345 _debugobsmarkers(ui, part, indent=4, **opts)
346 if part.type == 'phase-heads':
346 if part.type == 'phase-heads':
347 _debugphaseheads(ui, part, indent=4)
347 _debugphaseheads(ui, part, indent=4)
348
348
349 @command('debugbundle',
349 @command('debugbundle',
350 [('a', 'all', None, _('show all details')),
350 [('a', 'all', None, _('show all details')),
351 ('', 'part-type', [], _('show only the named part type')),
351 ('', 'part-type', [], _('show only the named part type')),
352 ('', 'spec', None, _('print the bundlespec of the bundle'))],
352 ('', 'spec', None, _('print the bundlespec of the bundle'))],
353 _('FILE'),
353 _('FILE'),
354 norepo=True)
354 norepo=True)
355 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
355 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
356 """lists the contents of a bundle"""
356 """lists the contents of a bundle"""
357 with hg.openpath(ui, bundlepath) as f:
357 with hg.openpath(ui, bundlepath) as f:
358 if spec:
358 if spec:
359 spec = exchange.getbundlespec(ui, f)
359 spec = exchange.getbundlespec(ui, f)
360 ui.write('%s\n' % spec)
360 ui.write('%s\n' % spec)
361 return
361 return
362
362
363 gen = exchange.readbundle(ui, f, bundlepath)
363 gen = exchange.readbundle(ui, f, bundlepath)
364 if isinstance(gen, bundle2.unbundle20):
364 if isinstance(gen, bundle2.unbundle20):
365 return _debugbundle2(ui, gen, all=all, **opts)
365 return _debugbundle2(ui, gen, all=all, **opts)
366 _debugchangegroup(ui, gen, all=all, **opts)
366 _debugchangegroup(ui, gen, all=all, **opts)
367
367
368 @command('debugcapabilities',
368 @command('debugcapabilities',
369 [], _('PATH'),
369 [], _('PATH'),
370 norepo=True)
370 norepo=True)
371 def debugcapabilities(ui, path, **opts):
371 def debugcapabilities(ui, path, **opts):
372 """lists the capabilities of a remote peer"""
372 """lists the capabilities of a remote peer"""
373 opts = pycompat.byteskwargs(opts)
373 opts = pycompat.byteskwargs(opts)
374 peer = hg.peer(ui, opts, path)
374 peer = hg.peer(ui, opts, path)
375 caps = peer.capabilities()
375 caps = peer.capabilities()
376 ui.write(('Main capabilities:\n'))
376 ui.write(('Main capabilities:\n'))
377 for c in sorted(caps):
377 for c in sorted(caps):
378 ui.write((' %s\n') % c)
378 ui.write((' %s\n') % c)
379 b2caps = bundle2.bundle2caps(peer)
379 b2caps = bundle2.bundle2caps(peer)
380 if b2caps:
380 if b2caps:
381 ui.write(('Bundle2 capabilities:\n'))
381 ui.write(('Bundle2 capabilities:\n'))
382 for key, values in sorted(b2caps.iteritems()):
382 for key, values in sorted(b2caps.iteritems()):
383 ui.write((' %s\n') % key)
383 ui.write((' %s\n') % key)
384 for v in values:
384 for v in values:
385 ui.write((' %s\n') % v)
385 ui.write((' %s\n') % v)
386
386
387 @command('debugcheckstate', [], '')
387 @command('debugcheckstate', [], '')
388 def debugcheckstate(ui, repo):
388 def debugcheckstate(ui, repo):
389 """validate the correctness of the current dirstate"""
389 """validate the correctness of the current dirstate"""
390 parent1, parent2 = repo.dirstate.parents()
390 parent1, parent2 = repo.dirstate.parents()
391 m1 = repo[parent1].manifest()
391 m1 = repo[parent1].manifest()
392 m2 = repo[parent2].manifest()
392 m2 = repo[parent2].manifest()
393 errors = 0
393 errors = 0
394 for f in repo.dirstate:
394 for f in repo.dirstate:
395 state = repo.dirstate[f]
395 state = repo.dirstate[f]
396 if state in "nr" and f not in m1:
396 if state in "nr" and f not in m1:
397 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
397 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
398 errors += 1
398 errors += 1
399 if state in "a" and f in m1:
399 if state in "a" and f in m1:
400 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
400 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
401 errors += 1
401 errors += 1
402 if state in "m" and f not in m1 and f not in m2:
402 if state in "m" and f not in m1 and f not in m2:
403 ui.warn(_("%s in state %s, but not in either manifest\n") %
403 ui.warn(_("%s in state %s, but not in either manifest\n") %
404 (f, state))
404 (f, state))
405 errors += 1
405 errors += 1
406 for f in m1:
406 for f in m1:
407 state = repo.dirstate[f]
407 state = repo.dirstate[f]
408 if state not in "nrm":
408 if state not in "nrm":
409 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
409 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
410 errors += 1
410 errors += 1
411 if errors:
411 if errors:
412 error = _(".hg/dirstate inconsistent with current parent's manifest")
412 error = _(".hg/dirstate inconsistent with current parent's manifest")
413 raise error.Abort(error)
413 raise error.Abort(error)
414
414
415 @command('debugcolor',
415 @command('debugcolor',
416 [('', 'style', None, _('show all configured styles'))],
416 [('', 'style', None, _('show all configured styles'))],
417 'hg debugcolor')
417 'hg debugcolor')
418 def debugcolor(ui, repo, **opts):
418 def debugcolor(ui, repo, **opts):
419 """show available color, effects or style"""
419 """show available color, effects or style"""
420 ui.write(('color mode: %s\n') % ui._colormode)
420 ui.write(('color mode: %s\n') % ui._colormode)
421 if opts.get(r'style'):
421 if opts.get(r'style'):
422 return _debugdisplaystyle(ui)
422 return _debugdisplaystyle(ui)
423 else:
423 else:
424 return _debugdisplaycolor(ui)
424 return _debugdisplaycolor(ui)
425
425
426 def _debugdisplaycolor(ui):
426 def _debugdisplaycolor(ui):
427 ui = ui.copy()
427 ui = ui.copy()
428 ui._styles.clear()
428 ui._styles.clear()
429 for effect in color._activeeffects(ui).keys():
429 for effect in color._activeeffects(ui).keys():
430 ui._styles[effect] = effect
430 ui._styles[effect] = effect
431 if ui._terminfoparams:
431 if ui._terminfoparams:
432 for k, v in ui.configitems('color'):
432 for k, v in ui.configitems('color'):
433 if k.startswith('color.'):
433 if k.startswith('color.'):
434 ui._styles[k] = k[6:]
434 ui._styles[k] = k[6:]
435 elif k.startswith('terminfo.'):
435 elif k.startswith('terminfo.'):
436 ui._styles[k] = k[9:]
436 ui._styles[k] = k[9:]
437 ui.write(_('available colors:\n'))
437 ui.write(_('available colors:\n'))
438 # sort label with a '_' after the other to group '_background' entry.
438 # sort label with a '_' after the other to group '_background' entry.
439 items = sorted(ui._styles.items(),
439 items = sorted(ui._styles.items(),
440 key=lambda i: ('_' in i[0], i[0], i[1]))
440 key=lambda i: ('_' in i[0], i[0], i[1]))
441 for colorname, label in items:
441 for colorname, label in items:
442 ui.write(('%s\n') % colorname, label=label)
442 ui.write(('%s\n') % colorname, label=label)
443
443
444 def _debugdisplaystyle(ui):
444 def _debugdisplaystyle(ui):
445 ui.write(_('available style:\n'))
445 ui.write(_('available style:\n'))
446 width = max(len(s) for s in ui._styles)
446 width = max(len(s) for s in ui._styles)
447 for label, effects in sorted(ui._styles.items()):
447 for label, effects in sorted(ui._styles.items()):
448 ui.write('%s' % label, label=label)
448 ui.write('%s' % label, label=label)
449 if effects:
449 if effects:
450 # 50
450 # 50
451 ui.write(': ')
451 ui.write(': ')
452 ui.write(' ' * (max(0, width - len(label))))
452 ui.write(' ' * (max(0, width - len(label))))
453 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
453 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
454 ui.write('\n')
454 ui.write('\n')
455
455
456 @command('debugcreatestreamclonebundle', [], 'FILE')
456 @command('debugcreatestreamclonebundle', [], 'FILE')
457 def debugcreatestreamclonebundle(ui, repo, fname):
457 def debugcreatestreamclonebundle(ui, repo, fname):
458 """create a stream clone bundle file
458 """create a stream clone bundle file
459
459
460 Stream bundles are special bundles that are essentially archives of
460 Stream bundles are special bundles that are essentially archives of
461 revlog files. They are commonly used for cloning very quickly.
461 revlog files. They are commonly used for cloning very quickly.
462 """
462 """
463 # TODO we may want to turn this into an abort when this functionality
463 # TODO we may want to turn this into an abort when this functionality
464 # is moved into `hg bundle`.
464 # is moved into `hg bundle`.
465 if phases.hassecret(repo):
465 if phases.hassecret(repo):
466 ui.warn(_('(warning: stream clone bundle will contain secret '
466 ui.warn(_('(warning: stream clone bundle will contain secret '
467 'revisions)\n'))
467 'revisions)\n'))
468
468
469 requirements, gen = streamclone.generatebundlev1(repo)
469 requirements, gen = streamclone.generatebundlev1(repo)
470 changegroup.writechunks(ui, gen, fname)
470 changegroup.writechunks(ui, gen, fname)
471
471
472 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
472 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
473
473
474 @command('debugdag',
474 @command('debugdag',
475 [('t', 'tags', None, _('use tags as labels')),
475 [('t', 'tags', None, _('use tags as labels')),
476 ('b', 'branches', None, _('annotate with branch names')),
476 ('b', 'branches', None, _('annotate with branch names')),
477 ('', 'dots', None, _('use dots for runs')),
477 ('', 'dots', None, _('use dots for runs')),
478 ('s', 'spaces', None, _('separate elements by spaces'))],
478 ('s', 'spaces', None, _('separate elements by spaces'))],
479 _('[OPTION]... [FILE [REV]...]'),
479 _('[OPTION]... [FILE [REV]...]'),
480 optionalrepo=True)
480 optionalrepo=True)
481 def debugdag(ui, repo, file_=None, *revs, **opts):
481 def debugdag(ui, repo, file_=None, *revs, **opts):
482 """format the changelog or an index DAG as a concise textual description
482 """format the changelog or an index DAG as a concise textual description
483
483
484 If you pass a revlog index, the revlog's DAG is emitted. If you list
484 If you pass a revlog index, the revlog's DAG is emitted. If you list
485 revision numbers, they get labeled in the output as rN.
485 revision numbers, they get labeled in the output as rN.
486
486
487 Otherwise, the changelog DAG of the current repo is emitted.
487 Otherwise, the changelog DAG of the current repo is emitted.
488 """
488 """
489 spaces = opts.get(r'spaces')
489 spaces = opts.get(r'spaces')
490 dots = opts.get(r'dots')
490 dots = opts.get(r'dots')
491 if file_:
491 if file_:
492 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
492 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
493 file_)
493 file_)
494 revs = set((int(r) for r in revs))
494 revs = set((int(r) for r in revs))
495 def events():
495 def events():
496 for r in rlog:
496 for r in rlog:
497 yield 'n', (r, list(p for p in rlog.parentrevs(r)
497 yield 'n', (r, list(p for p in rlog.parentrevs(r)
498 if p != -1))
498 if p != -1))
499 if r in revs:
499 if r in revs:
500 yield 'l', (r, "r%i" % r)
500 yield 'l', (r, "r%i" % r)
501 elif repo:
501 elif repo:
502 cl = repo.changelog
502 cl = repo.changelog
503 tags = opts.get(r'tags')
503 tags = opts.get(r'tags')
504 branches = opts.get(r'branches')
504 branches = opts.get(r'branches')
505 if tags:
505 if tags:
506 labels = {}
506 labels = {}
507 for l, n in repo.tags().items():
507 for l, n in repo.tags().items():
508 labels.setdefault(cl.rev(n), []).append(l)
508 labels.setdefault(cl.rev(n), []).append(l)
509 def events():
509 def events():
510 b = "default"
510 b = "default"
511 for r in cl:
511 for r in cl:
512 if branches:
512 if branches:
513 newb = cl.read(cl.node(r))[5]['branch']
513 newb = cl.read(cl.node(r))[5]['branch']
514 if newb != b:
514 if newb != b:
515 yield 'a', newb
515 yield 'a', newb
516 b = newb
516 b = newb
517 yield 'n', (r, list(p for p in cl.parentrevs(r)
517 yield 'n', (r, list(p for p in cl.parentrevs(r)
518 if p != -1))
518 if p != -1))
519 if tags:
519 if tags:
520 ls = labels.get(r)
520 ls = labels.get(r)
521 if ls:
521 if ls:
522 for l in ls:
522 for l in ls:
523 yield 'l', (r, l)
523 yield 'l', (r, l)
524 else:
524 else:
525 raise error.Abort(_('need repo for changelog dag'))
525 raise error.Abort(_('need repo for changelog dag'))
526
526
527 for line in dagparser.dagtextlines(events(),
527 for line in dagparser.dagtextlines(events(),
528 addspaces=spaces,
528 addspaces=spaces,
529 wraplabels=True,
529 wraplabels=True,
530 wrapannotations=True,
530 wrapannotations=True,
531 wrapnonlinear=dots,
531 wrapnonlinear=dots,
532 usedots=dots,
532 usedots=dots,
533 maxlinewidth=70):
533 maxlinewidth=70):
534 ui.write(line)
534 ui.write(line)
535 ui.write("\n")
535 ui.write("\n")
536
536
537 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
537 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
538 def debugdata(ui, repo, file_, rev=None, **opts):
538 def debugdata(ui, repo, file_, rev=None, **opts):
539 """dump the contents of a data file revision"""
539 """dump the contents of a data file revision"""
540 opts = pycompat.byteskwargs(opts)
540 opts = pycompat.byteskwargs(opts)
541 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
541 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
542 if rev is not None:
542 if rev is not None:
543 raise error.CommandError('debugdata', _('invalid arguments'))
543 raise error.CommandError('debugdata', _('invalid arguments'))
544 file_, rev = None, file_
544 file_, rev = None, file_
545 elif rev is None:
545 elif rev is None:
546 raise error.CommandError('debugdata', _('invalid arguments'))
546 raise error.CommandError('debugdata', _('invalid arguments'))
547 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
547 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
548 try:
548 try:
549 ui.write(r.revision(r.lookup(rev), raw=True))
549 ui.write(r.revision(r.lookup(rev), raw=True))
550 except KeyError:
550 except KeyError:
551 raise error.Abort(_('invalid revision identifier %s') % rev)
551 raise error.Abort(_('invalid revision identifier %s') % rev)
552
552
553 @command('debugdate',
553 @command('debugdate',
554 [('e', 'extended', None, _('try extended date formats'))],
554 [('e', 'extended', None, _('try extended date formats'))],
555 _('[-e] DATE [RANGE]'),
555 _('[-e] DATE [RANGE]'),
556 norepo=True, optionalrepo=True)
556 norepo=True, optionalrepo=True)
557 def debugdate(ui, date, range=None, **opts):
557 def debugdate(ui, date, range=None, **opts):
558 """parse and display a date"""
558 """parse and display a date"""
559 if opts[r"extended"]:
559 if opts[r"extended"]:
560 d = util.parsedate(date, util.extendeddateformats)
560 d = util.parsedate(date, util.extendeddateformats)
561 else:
561 else:
562 d = util.parsedate(date)
562 d = util.parsedate(date)
563 ui.write(("internal: %s %s\n") % d)
563 ui.write(("internal: %d %d\n") % d)
564 ui.write(("standard: %s\n") % util.datestr(d))
564 ui.write(("standard: %s\n") % util.datestr(d))
565 if range:
565 if range:
566 m = util.matchdate(range)
566 m = util.matchdate(range)
567 ui.write(("match: %s\n") % m(d[0]))
567 ui.write(("match: %s\n") % m(d[0]))
568
568
569 @command('debugdeltachain',
569 @command('debugdeltachain',
570 cmdutil.debugrevlogopts + cmdutil.formatteropts,
570 cmdutil.debugrevlogopts + cmdutil.formatteropts,
571 _('-c|-m|FILE'),
571 _('-c|-m|FILE'),
572 optionalrepo=True)
572 optionalrepo=True)
573 def debugdeltachain(ui, repo, file_=None, **opts):
573 def debugdeltachain(ui, repo, file_=None, **opts):
574 """dump information about delta chains in a revlog
574 """dump information about delta chains in a revlog
575
575
576 Output can be templatized. Available template keywords are:
576 Output can be templatized. Available template keywords are:
577
577
578 :``rev``: revision number
578 :``rev``: revision number
579 :``chainid``: delta chain identifier (numbered by unique base)
579 :``chainid``: delta chain identifier (numbered by unique base)
580 :``chainlen``: delta chain length to this revision
580 :``chainlen``: delta chain length to this revision
581 :``prevrev``: previous revision in delta chain
581 :``prevrev``: previous revision in delta chain
582 :``deltatype``: role of delta / how it was computed
582 :``deltatype``: role of delta / how it was computed
583 :``compsize``: compressed size of revision
583 :``compsize``: compressed size of revision
584 :``uncompsize``: uncompressed size of revision
584 :``uncompsize``: uncompressed size of revision
585 :``chainsize``: total size of compressed revisions in chain
585 :``chainsize``: total size of compressed revisions in chain
586 :``chainratio``: total chain size divided by uncompressed revision size
586 :``chainratio``: total chain size divided by uncompressed revision size
587 (new delta chains typically start at ratio 2.00)
587 (new delta chains typically start at ratio 2.00)
588 :``lindist``: linear distance from base revision in delta chain to end
588 :``lindist``: linear distance from base revision in delta chain to end
589 of this revision
589 of this revision
590 :``extradist``: total size of revisions not part of this delta chain from
590 :``extradist``: total size of revisions not part of this delta chain from
591 base of delta chain to end of this revision; a measurement
591 base of delta chain to end of this revision; a measurement
592 of how much extra data we need to read/seek across to read
592 of how much extra data we need to read/seek across to read
593 the delta chain for this revision
593 the delta chain for this revision
594 :``extraratio``: extradist divided by chainsize; another representation of
594 :``extraratio``: extradist divided by chainsize; another representation of
595 how much unrelated data is needed to load this delta chain
595 how much unrelated data is needed to load this delta chain
596
596
597 If the repository is configured to use the sparse read, additional keywords
597 If the repository is configured to use the sparse read, additional keywords
598 are available:
598 are available:
599
599
600 :``readsize``: total size of data read from the disk for a revision
600 :``readsize``: total size of data read from the disk for a revision
601 (sum of the sizes of all the blocks)
601 (sum of the sizes of all the blocks)
602 :``largestblock``: size of the largest block of data read from the disk
602 :``largestblock``: size of the largest block of data read from the disk
603 :``readdensity``: density of useful bytes in the data read from the disk
603 :``readdensity``: density of useful bytes in the data read from the disk
604 :``srchunks``: in how many data hunks the whole revision would be read
604 :``srchunks``: in how many data hunks the whole revision would be read
605
605
606 The sparse read can be enabled with experimental.sparse-read = True
606 The sparse read can be enabled with experimental.sparse-read = True
607 """
607 """
608 opts = pycompat.byteskwargs(opts)
608 opts = pycompat.byteskwargs(opts)
609 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
609 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
610 index = r.index
610 index = r.index
611 generaldelta = r.version & revlog.FLAG_GENERALDELTA
611 generaldelta = r.version & revlog.FLAG_GENERALDELTA
612 withsparseread = getattr(r, '_withsparseread', False)
612 withsparseread = getattr(r, '_withsparseread', False)
613
613
614 def revinfo(rev):
614 def revinfo(rev):
615 e = index[rev]
615 e = index[rev]
616 compsize = e[1]
616 compsize = e[1]
617 uncompsize = e[2]
617 uncompsize = e[2]
618 chainsize = 0
618 chainsize = 0
619
619
620 if generaldelta:
620 if generaldelta:
621 if e[3] == e[5]:
621 if e[3] == e[5]:
622 deltatype = 'p1'
622 deltatype = 'p1'
623 elif e[3] == e[6]:
623 elif e[3] == e[6]:
624 deltatype = 'p2'
624 deltatype = 'p2'
625 elif e[3] == rev - 1:
625 elif e[3] == rev - 1:
626 deltatype = 'prev'
626 deltatype = 'prev'
627 elif e[3] == rev:
627 elif e[3] == rev:
628 deltatype = 'base'
628 deltatype = 'base'
629 else:
629 else:
630 deltatype = 'other'
630 deltatype = 'other'
631 else:
631 else:
632 if e[3] == rev:
632 if e[3] == rev:
633 deltatype = 'base'
633 deltatype = 'base'
634 else:
634 else:
635 deltatype = 'prev'
635 deltatype = 'prev'
636
636
637 chain = r._deltachain(rev)[0]
637 chain = r._deltachain(rev)[0]
638 for iterrev in chain:
638 for iterrev in chain:
639 e = index[iterrev]
639 e = index[iterrev]
640 chainsize += e[1]
640 chainsize += e[1]
641
641
642 return compsize, uncompsize, deltatype, chain, chainsize
642 return compsize, uncompsize, deltatype, chain, chainsize
643
643
644 fm = ui.formatter('debugdeltachain', opts)
644 fm = ui.formatter('debugdeltachain', opts)
645
645
646 fm.plain(' rev chain# chainlen prev delta '
646 fm.plain(' rev chain# chainlen prev delta '
647 'size rawsize chainsize ratio lindist extradist '
647 'size rawsize chainsize ratio lindist extradist '
648 'extraratio')
648 'extraratio')
649 if withsparseread:
649 if withsparseread:
650 fm.plain(' readsize largestblk rddensity srchunks')
650 fm.plain(' readsize largestblk rddensity srchunks')
651 fm.plain('\n')
651 fm.plain('\n')
652
652
653 chainbases = {}
653 chainbases = {}
654 for rev in r:
654 for rev in r:
655 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
655 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
656 chainbase = chain[0]
656 chainbase = chain[0]
657 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
657 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
658 start = r.start
658 start = r.start
659 length = r.length
659 length = r.length
660 basestart = start(chainbase)
660 basestart = start(chainbase)
661 revstart = start(rev)
661 revstart = start(rev)
662 lineardist = revstart + comp - basestart
662 lineardist = revstart + comp - basestart
663 extradist = lineardist - chainsize
663 extradist = lineardist - chainsize
664 try:
664 try:
665 prevrev = chain[-2]
665 prevrev = chain[-2]
666 except IndexError:
666 except IndexError:
667 prevrev = -1
667 prevrev = -1
668
668
669 chainratio = float(chainsize) / float(uncomp)
669 chainratio = float(chainsize) / float(uncomp)
670 extraratio = float(extradist) / float(chainsize)
670 extraratio = float(extradist) / float(chainsize)
671
671
672 fm.startitem()
672 fm.startitem()
673 fm.write('rev chainid chainlen prevrev deltatype compsize '
673 fm.write('rev chainid chainlen prevrev deltatype compsize '
674 'uncompsize chainsize chainratio lindist extradist '
674 'uncompsize chainsize chainratio lindist extradist '
675 'extraratio',
675 'extraratio',
676 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
676 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
677 rev, chainid, len(chain), prevrev, deltatype, comp,
677 rev, chainid, len(chain), prevrev, deltatype, comp,
678 uncomp, chainsize, chainratio, lineardist, extradist,
678 uncomp, chainsize, chainratio, lineardist, extradist,
679 extraratio,
679 extraratio,
680 rev=rev, chainid=chainid, chainlen=len(chain),
680 rev=rev, chainid=chainid, chainlen=len(chain),
681 prevrev=prevrev, deltatype=deltatype, compsize=comp,
681 prevrev=prevrev, deltatype=deltatype, compsize=comp,
682 uncompsize=uncomp, chainsize=chainsize,
682 uncompsize=uncomp, chainsize=chainsize,
683 chainratio=chainratio, lindist=lineardist,
683 chainratio=chainratio, lindist=lineardist,
684 extradist=extradist, extraratio=extraratio)
684 extradist=extradist, extraratio=extraratio)
685 if withsparseread:
685 if withsparseread:
686 readsize = 0
686 readsize = 0
687 largestblock = 0
687 largestblock = 0
688 srchunks = 0
688 srchunks = 0
689
689
690 for revschunk in revlog._slicechunk(r, chain):
690 for revschunk in revlog._slicechunk(r, chain):
691 srchunks += 1
691 srchunks += 1
692 blkend = start(revschunk[-1]) + length(revschunk[-1])
692 blkend = start(revschunk[-1]) + length(revschunk[-1])
693 blksize = blkend - start(revschunk[0])
693 blksize = blkend - start(revschunk[0])
694
694
695 readsize += blksize
695 readsize += blksize
696 if largestblock < blksize:
696 if largestblock < blksize:
697 largestblock = blksize
697 largestblock = blksize
698
698
699 readdensity = float(chainsize) / float(readsize)
699 readdensity = float(chainsize) / float(readsize)
700
700
701 fm.write('readsize largestblock readdensity srchunks',
701 fm.write('readsize largestblock readdensity srchunks',
702 ' %10d %10d %9.5f %8d',
702 ' %10d %10d %9.5f %8d',
703 readsize, largestblock, readdensity, srchunks,
703 readsize, largestblock, readdensity, srchunks,
704 readsize=readsize, largestblock=largestblock,
704 readsize=readsize, largestblock=largestblock,
705 readdensity=readdensity, srchunks=srchunks)
705 readdensity=readdensity, srchunks=srchunks)
706
706
707 fm.plain('\n')
707 fm.plain('\n')
708
708
709 fm.end()
709 fm.end()
710
710
711 @command('debugdirstate|debugstate',
711 @command('debugdirstate|debugstate',
712 [('', 'nodates', None, _('do not display the saved mtime')),
712 [('', 'nodates', None, _('do not display the saved mtime')),
713 ('', 'datesort', None, _('sort by saved mtime'))],
713 ('', 'datesort', None, _('sort by saved mtime'))],
714 _('[OPTION]...'))
714 _('[OPTION]...'))
715 def debugstate(ui, repo, **opts):
715 def debugstate(ui, repo, **opts):
716 """show the contents of the current dirstate"""
716 """show the contents of the current dirstate"""
717
717
718 nodates = opts.get(r'nodates')
718 nodates = opts.get(r'nodates')
719 datesort = opts.get(r'datesort')
719 datesort = opts.get(r'datesort')
720
720
721 timestr = ""
721 timestr = ""
722 if datesort:
722 if datesort:
723 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
723 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
724 else:
724 else:
725 keyfunc = None # sort by filename
725 keyfunc = None # sort by filename
726 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
726 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
727 if ent[3] == -1:
727 if ent[3] == -1:
728 timestr = 'unset '
728 timestr = 'unset '
729 elif nodates:
729 elif nodates:
730 timestr = 'set '
730 timestr = 'set '
731 else:
731 else:
732 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
732 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
733 time.localtime(ent[3]))
733 time.localtime(ent[3]))
734 timestr = encoding.strtolocal(timestr)
734 timestr = encoding.strtolocal(timestr)
735 if ent[1] & 0o20000:
735 if ent[1] & 0o20000:
736 mode = 'lnk'
736 mode = 'lnk'
737 else:
737 else:
738 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
738 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
739 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
739 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
740 for f in repo.dirstate.copies():
740 for f in repo.dirstate.copies():
741 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
741 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
742
742
743 @command('debugdiscovery',
743 @command('debugdiscovery',
744 [('', 'old', None, _('use old-style discovery')),
744 [('', 'old', None, _('use old-style discovery')),
745 ('', 'nonheads', None,
745 ('', 'nonheads', None,
746 _('use old-style discovery with non-heads included')),
746 _('use old-style discovery with non-heads included')),
747 ('', 'rev', [], 'restrict discovery to this set of revs'),
747 ('', 'rev', [], 'restrict discovery to this set of revs'),
748 ] + cmdutil.remoteopts,
748 ] + cmdutil.remoteopts,
749 _('[--rev REV] [OTHER]'))
749 _('[--rev REV] [OTHER]'))
750 def debugdiscovery(ui, repo, remoteurl="default", **opts):
750 def debugdiscovery(ui, repo, remoteurl="default", **opts):
751 """runs the changeset discovery protocol in isolation"""
751 """runs the changeset discovery protocol in isolation"""
752 opts = pycompat.byteskwargs(opts)
752 opts = pycompat.byteskwargs(opts)
753 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
753 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
754 remote = hg.peer(repo, opts, remoteurl)
754 remote = hg.peer(repo, opts, remoteurl)
755 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
755 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
756
756
757 # make sure tests are repeatable
757 # make sure tests are repeatable
758 random.seed(12323)
758 random.seed(12323)
759
759
760 def doit(pushedrevs, remoteheads, remote=remote):
760 def doit(pushedrevs, remoteheads, remote=remote):
761 if opts.get('old'):
761 if opts.get('old'):
762 if not util.safehasattr(remote, 'branches'):
762 if not util.safehasattr(remote, 'branches'):
763 # enable in-client legacy support
763 # enable in-client legacy support
764 remote = localrepo.locallegacypeer(remote.local())
764 remote = localrepo.locallegacypeer(remote.local())
765 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
765 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
766 force=True)
766 force=True)
767 common = set(common)
767 common = set(common)
768 if not opts.get('nonheads'):
768 if not opts.get('nonheads'):
769 ui.write(("unpruned common: %s\n") %
769 ui.write(("unpruned common: %s\n") %
770 " ".join(sorted(short(n) for n in common)))
770 " ".join(sorted(short(n) for n in common)))
771 dag = dagutil.revlogdag(repo.changelog)
771 dag = dagutil.revlogdag(repo.changelog)
772 all = dag.ancestorset(dag.internalizeall(common))
772 all = dag.ancestorset(dag.internalizeall(common))
773 common = dag.externalizeall(dag.headsetofconnecteds(all))
773 common = dag.externalizeall(dag.headsetofconnecteds(all))
774 else:
774 else:
775 nodes = None
775 nodes = None
776 if pushedrevs:
776 if pushedrevs:
777 revs = scmutil.revrange(repo, pushedrevs)
777 revs = scmutil.revrange(repo, pushedrevs)
778 nodes = [repo[r].node() for r in revs]
778 nodes = [repo[r].node() for r in revs]
779 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
779 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
780 ancestorsof=nodes)
780 ancestorsof=nodes)
781 common = set(common)
781 common = set(common)
782 rheads = set(hds)
782 rheads = set(hds)
783 lheads = set(repo.heads())
783 lheads = set(repo.heads())
784 ui.write(("common heads: %s\n") %
784 ui.write(("common heads: %s\n") %
785 " ".join(sorted(short(n) for n in common)))
785 " ".join(sorted(short(n) for n in common)))
786 if lheads <= common:
786 if lheads <= common:
787 ui.write(("local is subset\n"))
787 ui.write(("local is subset\n"))
788 elif rheads <= common:
788 elif rheads <= common:
789 ui.write(("remote is subset\n"))
789 ui.write(("remote is subset\n"))
790
790
791 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
791 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
792 localrevs = opts['rev']
792 localrevs = opts['rev']
793 doit(localrevs, remoterevs)
793 doit(localrevs, remoterevs)
794
794
795 _chunksize = 4 << 10
795 _chunksize = 4 << 10
796
796
797 @command('debugdownload',
797 @command('debugdownload',
798 [
798 [
799 ('o', 'output', '', _('path')),
799 ('o', 'output', '', _('path')),
800 ],
800 ],
801 optionalrepo=True)
801 optionalrepo=True)
802 def debugdownload(ui, repo, url, output=None, **opts):
802 def debugdownload(ui, repo, url, output=None, **opts):
803 """download a resource using Mercurial logic and config
803 """download a resource using Mercurial logic and config
804 """
804 """
805 fh = urlmod.open(ui, url, output)
805 fh = urlmod.open(ui, url, output)
806
806
807 dest = ui
807 dest = ui
808 if output:
808 if output:
809 dest = open(output, "wb", _chunksize)
809 dest = open(output, "wb", _chunksize)
810 try:
810 try:
811 data = fh.read(_chunksize)
811 data = fh.read(_chunksize)
812 while data:
812 while data:
813 dest.write(data)
813 dest.write(data)
814 data = fh.read(_chunksize)
814 data = fh.read(_chunksize)
815 finally:
815 finally:
816 if output:
816 if output:
817 dest.close()
817 dest.close()
818
818
819 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
819 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
820 def debugextensions(ui, **opts):
820 def debugextensions(ui, **opts):
821 '''show information about active extensions'''
821 '''show information about active extensions'''
822 opts = pycompat.byteskwargs(opts)
822 opts = pycompat.byteskwargs(opts)
823 exts = extensions.extensions(ui)
823 exts = extensions.extensions(ui)
824 hgver = util.version()
824 hgver = util.version()
825 fm = ui.formatter('debugextensions', opts)
825 fm = ui.formatter('debugextensions', opts)
826 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
826 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
827 isinternal = extensions.ismoduleinternal(extmod)
827 isinternal = extensions.ismoduleinternal(extmod)
828 extsource = pycompat.fsencode(extmod.__file__)
828 extsource = pycompat.fsencode(extmod.__file__)
829 if isinternal:
829 if isinternal:
830 exttestedwith = [] # never expose magic string to users
830 exttestedwith = [] # never expose magic string to users
831 else:
831 else:
832 exttestedwith = getattr(extmod, 'testedwith', '').split()
832 exttestedwith = getattr(extmod, 'testedwith', '').split()
833 extbuglink = getattr(extmod, 'buglink', None)
833 extbuglink = getattr(extmod, 'buglink', None)
834
834
835 fm.startitem()
835 fm.startitem()
836
836
837 if ui.quiet or ui.verbose:
837 if ui.quiet or ui.verbose:
838 fm.write('name', '%s\n', extname)
838 fm.write('name', '%s\n', extname)
839 else:
839 else:
840 fm.write('name', '%s', extname)
840 fm.write('name', '%s', extname)
841 if isinternal or hgver in exttestedwith:
841 if isinternal or hgver in exttestedwith:
842 fm.plain('\n')
842 fm.plain('\n')
843 elif not exttestedwith:
843 elif not exttestedwith:
844 fm.plain(_(' (untested!)\n'))
844 fm.plain(_(' (untested!)\n'))
845 else:
845 else:
846 lasttestedversion = exttestedwith[-1]
846 lasttestedversion = exttestedwith[-1]
847 fm.plain(' (%s!)\n' % lasttestedversion)
847 fm.plain(' (%s!)\n' % lasttestedversion)
848
848
849 fm.condwrite(ui.verbose and extsource, 'source',
849 fm.condwrite(ui.verbose and extsource, 'source',
850 _(' location: %s\n'), extsource or "")
850 _(' location: %s\n'), extsource or "")
851
851
852 if ui.verbose:
852 if ui.verbose:
853 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
853 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
854 fm.data(bundled=isinternal)
854 fm.data(bundled=isinternal)
855
855
856 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
856 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
857 _(' tested with: %s\n'),
857 _(' tested with: %s\n'),
858 fm.formatlist(exttestedwith, name='ver'))
858 fm.formatlist(exttestedwith, name='ver'))
859
859
860 fm.condwrite(ui.verbose and extbuglink, 'buglink',
860 fm.condwrite(ui.verbose and extbuglink, 'buglink',
861 _(' bug reporting: %s\n'), extbuglink or "")
861 _(' bug reporting: %s\n'), extbuglink or "")
862
862
863 fm.end()
863 fm.end()
864
864
865 @command('debugfileset',
865 @command('debugfileset',
866 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
866 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
867 _('[-r REV] FILESPEC'))
867 _('[-r REV] FILESPEC'))
868 def debugfileset(ui, repo, expr, **opts):
868 def debugfileset(ui, repo, expr, **opts):
869 '''parse and apply a fileset specification'''
869 '''parse and apply a fileset specification'''
870 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
870 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
871 if ui.verbose:
871 if ui.verbose:
872 tree = fileset.parse(expr)
872 tree = fileset.parse(expr)
873 ui.note(fileset.prettyformat(tree), "\n")
873 ui.note(fileset.prettyformat(tree), "\n")
874
874
875 for f in ctx.getfileset(expr):
875 for f in ctx.getfileset(expr):
876 ui.write("%s\n" % f)
876 ui.write("%s\n" % f)
877
877
878 @command('debugformat',
878 @command('debugformat',
879 [] + cmdutil.formatteropts,
879 [] + cmdutil.formatteropts,
880 _(''))
880 _(''))
881 def debugformat(ui, repo, **opts):
881 def debugformat(ui, repo, **opts):
882 """display format information about the current repository
882 """display format information about the current repository
883
883
884 Use --verbose to get extra information about current config value and
884 Use --verbose to get extra information about current config value and
885 Mercurial default."""
885 Mercurial default."""
886 opts = pycompat.byteskwargs(opts)
886 opts = pycompat.byteskwargs(opts)
887 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
887 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
888 maxvariantlength = max(len('format-variant'), maxvariantlength)
888 maxvariantlength = max(len('format-variant'), maxvariantlength)
889
889
890 def makeformatname(name):
890 def makeformatname(name):
891 return '%s:' + (' ' * (maxvariantlength - len(name)))
891 return '%s:' + (' ' * (maxvariantlength - len(name)))
892
892
893 fm = ui.formatter('debugformat', opts)
893 fm = ui.formatter('debugformat', opts)
894 if fm.isplain():
894 if fm.isplain():
895 def formatvalue(value):
895 def formatvalue(value):
896 if util.safehasattr(value, 'startswith'):
896 if util.safehasattr(value, 'startswith'):
897 return value
897 return value
898 if value:
898 if value:
899 return 'yes'
899 return 'yes'
900 else:
900 else:
901 return 'no'
901 return 'no'
902 else:
902 else:
903 formatvalue = pycompat.identity
903 formatvalue = pycompat.identity
904
904
905 fm.plain('format-variant')
905 fm.plain('format-variant')
906 fm.plain(' ' * (maxvariantlength - len('format-variant')))
906 fm.plain(' ' * (maxvariantlength - len('format-variant')))
907 fm.plain(' repo')
907 fm.plain(' repo')
908 if ui.verbose:
908 if ui.verbose:
909 fm.plain(' config default')
909 fm.plain(' config default')
910 fm.plain('\n')
910 fm.plain('\n')
911 for fv in upgrade.allformatvariant:
911 for fv in upgrade.allformatvariant:
912 fm.startitem()
912 fm.startitem()
913 repovalue = fv.fromrepo(repo)
913 repovalue = fv.fromrepo(repo)
914 configvalue = fv.fromconfig(repo)
914 configvalue = fv.fromconfig(repo)
915
915
916 if repovalue != configvalue:
916 if repovalue != configvalue:
917 namelabel = 'formatvariant.name.mismatchconfig'
917 namelabel = 'formatvariant.name.mismatchconfig'
918 repolabel = 'formatvariant.repo.mismatchconfig'
918 repolabel = 'formatvariant.repo.mismatchconfig'
919 elif repovalue != fv.default:
919 elif repovalue != fv.default:
920 namelabel = 'formatvariant.name.mismatchdefault'
920 namelabel = 'formatvariant.name.mismatchdefault'
921 repolabel = 'formatvariant.repo.mismatchdefault'
921 repolabel = 'formatvariant.repo.mismatchdefault'
922 else:
922 else:
923 namelabel = 'formatvariant.name.uptodate'
923 namelabel = 'formatvariant.name.uptodate'
924 repolabel = 'formatvariant.repo.uptodate'
924 repolabel = 'formatvariant.repo.uptodate'
925
925
926 fm.write('name', makeformatname(fv.name), fv.name,
926 fm.write('name', makeformatname(fv.name), fv.name,
927 label=namelabel)
927 label=namelabel)
928 fm.write('repo', ' %3s', formatvalue(repovalue),
928 fm.write('repo', ' %3s', formatvalue(repovalue),
929 label=repolabel)
929 label=repolabel)
930 if fv.default != configvalue:
930 if fv.default != configvalue:
931 configlabel = 'formatvariant.config.special'
931 configlabel = 'formatvariant.config.special'
932 else:
932 else:
933 configlabel = 'formatvariant.config.default'
933 configlabel = 'formatvariant.config.default'
934 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
934 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
935 label=configlabel)
935 label=configlabel)
936 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
936 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
937 label='formatvariant.default')
937 label='formatvariant.default')
938 fm.plain('\n')
938 fm.plain('\n')
939 fm.end()
939 fm.end()
940
940
941 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
941 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
942 def debugfsinfo(ui, path="."):
942 def debugfsinfo(ui, path="."):
943 """show information detected about current filesystem"""
943 """show information detected about current filesystem"""
944 ui.write(('path: %s\n') % path)
944 ui.write(('path: %s\n') % path)
945 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
945 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
946 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
946 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
947 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
947 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
948 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
948 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
949 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
949 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
950 casesensitive = '(unknown)'
950 casesensitive = '(unknown)'
951 try:
951 try:
952 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
952 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
953 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
953 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
954 except OSError:
954 except OSError:
955 pass
955 pass
956 ui.write(('case-sensitive: %s\n') % casesensitive)
956 ui.write(('case-sensitive: %s\n') % casesensitive)
957
957
958 @command('debuggetbundle',
958 @command('debuggetbundle',
959 [('H', 'head', [], _('id of head node'), _('ID')),
959 [('H', 'head', [], _('id of head node'), _('ID')),
960 ('C', 'common', [], _('id of common node'), _('ID')),
960 ('C', 'common', [], _('id of common node'), _('ID')),
961 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
961 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
962 _('REPO FILE [-H|-C ID]...'),
962 _('REPO FILE [-H|-C ID]...'),
963 norepo=True)
963 norepo=True)
964 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
964 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
965 """retrieves a bundle from a repo
965 """retrieves a bundle from a repo
966
966
967 Every ID must be a full-length hex node id string. Saves the bundle to the
967 Every ID must be a full-length hex node id string. Saves the bundle to the
968 given file.
968 given file.
969 """
969 """
970 opts = pycompat.byteskwargs(opts)
970 opts = pycompat.byteskwargs(opts)
971 repo = hg.peer(ui, opts, repopath)
971 repo = hg.peer(ui, opts, repopath)
972 if not repo.capable('getbundle'):
972 if not repo.capable('getbundle'):
973 raise error.Abort("getbundle() not supported by target repository")
973 raise error.Abort("getbundle() not supported by target repository")
974 args = {}
974 args = {}
975 if common:
975 if common:
976 args[r'common'] = [bin(s) for s in common]
976 args[r'common'] = [bin(s) for s in common]
977 if head:
977 if head:
978 args[r'heads'] = [bin(s) for s in head]
978 args[r'heads'] = [bin(s) for s in head]
979 # TODO: get desired bundlecaps from command line.
979 # TODO: get desired bundlecaps from command line.
980 args[r'bundlecaps'] = None
980 args[r'bundlecaps'] = None
981 bundle = repo.getbundle('debug', **args)
981 bundle = repo.getbundle('debug', **args)
982
982
983 bundletype = opts.get('type', 'bzip2').lower()
983 bundletype = opts.get('type', 'bzip2').lower()
984 btypes = {'none': 'HG10UN',
984 btypes = {'none': 'HG10UN',
985 'bzip2': 'HG10BZ',
985 'bzip2': 'HG10BZ',
986 'gzip': 'HG10GZ',
986 'gzip': 'HG10GZ',
987 'bundle2': 'HG20'}
987 'bundle2': 'HG20'}
988 bundletype = btypes.get(bundletype)
988 bundletype = btypes.get(bundletype)
989 if bundletype not in bundle2.bundletypes:
989 if bundletype not in bundle2.bundletypes:
990 raise error.Abort(_('unknown bundle type specified with --type'))
990 raise error.Abort(_('unknown bundle type specified with --type'))
991 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
991 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
992
992
993 @command('debugignore', [], '[FILE]')
993 @command('debugignore', [], '[FILE]')
994 def debugignore(ui, repo, *files, **opts):
994 def debugignore(ui, repo, *files, **opts):
995 """display the combined ignore pattern and information about ignored files
995 """display the combined ignore pattern and information about ignored files
996
996
997 With no argument display the combined ignore pattern.
997 With no argument display the combined ignore pattern.
998
998
999 Given space separated file names, shows if the given file is ignored and
999 Given space separated file names, shows if the given file is ignored and
1000 if so, show the ignore rule (file and line number) that matched it.
1000 if so, show the ignore rule (file and line number) that matched it.
1001 """
1001 """
1002 ignore = repo.dirstate._ignore
1002 ignore = repo.dirstate._ignore
1003 if not files:
1003 if not files:
1004 # Show all the patterns
1004 # Show all the patterns
1005 ui.write("%s\n" % repr(ignore))
1005 ui.write("%s\n" % repr(ignore))
1006 else:
1006 else:
1007 m = scmutil.match(repo[None], pats=files)
1007 m = scmutil.match(repo[None], pats=files)
1008 for f in m.files():
1008 for f in m.files():
1009 nf = util.normpath(f)
1009 nf = util.normpath(f)
1010 ignored = None
1010 ignored = None
1011 ignoredata = None
1011 ignoredata = None
1012 if nf != '.':
1012 if nf != '.':
1013 if ignore(nf):
1013 if ignore(nf):
1014 ignored = nf
1014 ignored = nf
1015 ignoredata = repo.dirstate._ignorefileandline(nf)
1015 ignoredata = repo.dirstate._ignorefileandline(nf)
1016 else:
1016 else:
1017 for p in util.finddirs(nf):
1017 for p in util.finddirs(nf):
1018 if ignore(p):
1018 if ignore(p):
1019 ignored = p
1019 ignored = p
1020 ignoredata = repo.dirstate._ignorefileandline(p)
1020 ignoredata = repo.dirstate._ignorefileandline(p)
1021 break
1021 break
1022 if ignored:
1022 if ignored:
1023 if ignored == nf:
1023 if ignored == nf:
1024 ui.write(_("%s is ignored\n") % m.uipath(f))
1024 ui.write(_("%s is ignored\n") % m.uipath(f))
1025 else:
1025 else:
1026 ui.write(_("%s is ignored because of "
1026 ui.write(_("%s is ignored because of "
1027 "containing folder %s\n")
1027 "containing folder %s\n")
1028 % (m.uipath(f), ignored))
1028 % (m.uipath(f), ignored))
1029 ignorefile, lineno, line = ignoredata
1029 ignorefile, lineno, line = ignoredata
1030 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1030 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1031 % (ignorefile, lineno, line))
1031 % (ignorefile, lineno, line))
1032 else:
1032 else:
1033 ui.write(_("%s is not ignored\n") % m.uipath(f))
1033 ui.write(_("%s is not ignored\n") % m.uipath(f))
1034
1034
1035 @command('debugindex', cmdutil.debugrevlogopts +
1035 @command('debugindex', cmdutil.debugrevlogopts +
1036 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1036 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1037 _('[-f FORMAT] -c|-m|FILE'),
1037 _('[-f FORMAT] -c|-m|FILE'),
1038 optionalrepo=True)
1038 optionalrepo=True)
1039 def debugindex(ui, repo, file_=None, **opts):
1039 def debugindex(ui, repo, file_=None, **opts):
1040 """dump the contents of an index file"""
1040 """dump the contents of an index file"""
1041 opts = pycompat.byteskwargs(opts)
1041 opts = pycompat.byteskwargs(opts)
1042 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1042 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1043 format = opts.get('format', 0)
1043 format = opts.get('format', 0)
1044 if format not in (0, 1):
1044 if format not in (0, 1):
1045 raise error.Abort(_("unknown format %d") % format)
1045 raise error.Abort(_("unknown format %d") % format)
1046
1046
1047 generaldelta = r.version & revlog.FLAG_GENERALDELTA
1047 generaldelta = r.version & revlog.FLAG_GENERALDELTA
1048 if generaldelta:
1048 if generaldelta:
1049 basehdr = ' delta'
1049 basehdr = ' delta'
1050 else:
1050 else:
1051 basehdr = ' base'
1051 basehdr = ' base'
1052
1052
1053 if ui.debugflag:
1053 if ui.debugflag:
1054 shortfn = hex
1054 shortfn = hex
1055 else:
1055 else:
1056 shortfn = short
1056 shortfn = short
1057
1057
1058 # There might not be anything in r, so have a sane default
1058 # There might not be anything in r, so have a sane default
1059 idlen = 12
1059 idlen = 12
1060 for i in r:
1060 for i in r:
1061 idlen = len(shortfn(r.node(i)))
1061 idlen = len(shortfn(r.node(i)))
1062 break
1062 break
1063
1063
1064 if format == 0:
1064 if format == 0:
1065 ui.write((" rev offset length " + basehdr + " linkrev"
1065 ui.write((" rev offset length " + basehdr + " linkrev"
1066 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
1066 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
1067 elif format == 1:
1067 elif format == 1:
1068 ui.write((" rev flag offset length"
1068 ui.write((" rev flag offset length"
1069 " size " + basehdr + " link p1 p2"
1069 " size " + basehdr + " link p1 p2"
1070 " %s\n") % "nodeid".rjust(idlen))
1070 " %s\n") % "nodeid".rjust(idlen))
1071
1071
1072 for i in r:
1072 for i in r:
1073 node = r.node(i)
1073 node = r.node(i)
1074 if generaldelta:
1074 if generaldelta:
1075 base = r.deltaparent(i)
1075 base = r.deltaparent(i)
1076 else:
1076 else:
1077 base = r.chainbase(i)
1077 base = r.chainbase(i)
1078 if format == 0:
1078 if format == 0:
1079 try:
1079 try:
1080 pp = r.parents(node)
1080 pp = r.parents(node)
1081 except Exception:
1081 except Exception:
1082 pp = [nullid, nullid]
1082 pp = [nullid, nullid]
1083 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1083 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1084 i, r.start(i), r.length(i), base, r.linkrev(i),
1084 i, r.start(i), r.length(i), base, r.linkrev(i),
1085 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1085 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1086 elif format == 1:
1086 elif format == 1:
1087 pr = r.parentrevs(i)
1087 pr = r.parentrevs(i)
1088 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1088 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1089 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1089 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1090 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
1090 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
1091
1091
1092 @command('debugindexdot', cmdutil.debugrevlogopts,
1092 @command('debugindexdot', cmdutil.debugrevlogopts,
1093 _('-c|-m|FILE'), optionalrepo=True)
1093 _('-c|-m|FILE'), optionalrepo=True)
1094 def debugindexdot(ui, repo, file_=None, **opts):
1094 def debugindexdot(ui, repo, file_=None, **opts):
1095 """dump an index DAG as a graphviz dot file"""
1095 """dump an index DAG as a graphviz dot file"""
1096 opts = pycompat.byteskwargs(opts)
1096 opts = pycompat.byteskwargs(opts)
1097 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1097 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1098 ui.write(("digraph G {\n"))
1098 ui.write(("digraph G {\n"))
1099 for i in r:
1099 for i in r:
1100 node = r.node(i)
1100 node = r.node(i)
1101 pp = r.parents(node)
1101 pp = r.parents(node)
1102 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1102 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1103 if pp[1] != nullid:
1103 if pp[1] != nullid:
1104 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1104 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1105 ui.write("}\n")
1105 ui.write("}\n")
1106
1106
1107 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1107 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1108 def debuginstall(ui, **opts):
1108 def debuginstall(ui, **opts):
1109 '''test Mercurial installation
1109 '''test Mercurial installation
1110
1110
1111 Returns 0 on success.
1111 Returns 0 on success.
1112 '''
1112 '''
1113 opts = pycompat.byteskwargs(opts)
1113 opts = pycompat.byteskwargs(opts)
1114
1114
1115 def writetemp(contents):
1115 def writetemp(contents):
1116 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1116 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1117 f = os.fdopen(fd, pycompat.sysstr("wb"))
1117 f = os.fdopen(fd, pycompat.sysstr("wb"))
1118 f.write(contents)
1118 f.write(contents)
1119 f.close()
1119 f.close()
1120 return name
1120 return name
1121
1121
1122 problems = 0
1122 problems = 0
1123
1123
1124 fm = ui.formatter('debuginstall', opts)
1124 fm = ui.formatter('debuginstall', opts)
1125 fm.startitem()
1125 fm.startitem()
1126
1126
1127 # encoding
1127 # encoding
1128 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1128 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1129 err = None
1129 err = None
1130 try:
1130 try:
1131 codecs.lookup(pycompat.sysstr(encoding.encoding))
1131 codecs.lookup(pycompat.sysstr(encoding.encoding))
1132 except LookupError as inst:
1132 except LookupError as inst:
1133 err = util.forcebytestr(inst)
1133 err = util.forcebytestr(inst)
1134 problems += 1
1134 problems += 1
1135 fm.condwrite(err, 'encodingerror', _(" %s\n"
1135 fm.condwrite(err, 'encodingerror', _(" %s\n"
1136 " (check that your locale is properly set)\n"), err)
1136 " (check that your locale is properly set)\n"), err)
1137
1137
1138 # Python
1138 # Python
1139 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1139 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1140 pycompat.sysexecutable)
1140 pycompat.sysexecutable)
1141 fm.write('pythonver', _("checking Python version (%s)\n"),
1141 fm.write('pythonver', _("checking Python version (%s)\n"),
1142 ("%d.%d.%d" % sys.version_info[:3]))
1142 ("%d.%d.%d" % sys.version_info[:3]))
1143 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1143 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1144 os.path.dirname(pycompat.fsencode(os.__file__)))
1144 os.path.dirname(pycompat.fsencode(os.__file__)))
1145
1145
1146 security = set(sslutil.supportedprotocols)
1146 security = set(sslutil.supportedprotocols)
1147 if sslutil.hassni:
1147 if sslutil.hassni:
1148 security.add('sni')
1148 security.add('sni')
1149
1149
1150 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1150 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1151 fm.formatlist(sorted(security), name='protocol',
1151 fm.formatlist(sorted(security), name='protocol',
1152 fmt='%s', sep=','))
1152 fmt='%s', sep=','))
1153
1153
1154 # These are warnings, not errors. So don't increment problem count. This
1154 # These are warnings, not errors. So don't increment problem count. This
1155 # may change in the future.
1155 # may change in the future.
1156 if 'tls1.2' not in security:
1156 if 'tls1.2' not in security:
1157 fm.plain(_(' TLS 1.2 not supported by Python install; '
1157 fm.plain(_(' TLS 1.2 not supported by Python install; '
1158 'network connections lack modern security\n'))
1158 'network connections lack modern security\n'))
1159 if 'sni' not in security:
1159 if 'sni' not in security:
1160 fm.plain(_(' SNI not supported by Python install; may have '
1160 fm.plain(_(' SNI not supported by Python install; may have '
1161 'connectivity issues with some servers\n'))
1161 'connectivity issues with some servers\n'))
1162
1162
1163 # TODO print CA cert info
1163 # TODO print CA cert info
1164
1164
1165 # hg version
1165 # hg version
1166 hgver = util.version()
1166 hgver = util.version()
1167 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1167 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1168 hgver.split('+')[0])
1168 hgver.split('+')[0])
1169 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1169 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1170 '+'.join(hgver.split('+')[1:]))
1170 '+'.join(hgver.split('+')[1:]))
1171
1171
1172 # compiled modules
1172 # compiled modules
1173 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1173 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1174 policy.policy)
1174 policy.policy)
1175 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1175 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1176 os.path.dirname(pycompat.fsencode(__file__)))
1176 os.path.dirname(pycompat.fsencode(__file__)))
1177
1177
1178 if policy.policy in ('c', 'allow'):
1178 if policy.policy in ('c', 'allow'):
1179 err = None
1179 err = None
1180 try:
1180 try:
1181 from .cext import (
1181 from .cext import (
1182 base85,
1182 base85,
1183 bdiff,
1183 bdiff,
1184 mpatch,
1184 mpatch,
1185 osutil,
1185 osutil,
1186 )
1186 )
1187 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1187 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1188 except Exception as inst:
1188 except Exception as inst:
1189 err = util.forcebytestr(inst)
1189 err = util.forcebytestr(inst)
1190 problems += 1
1190 problems += 1
1191 fm.condwrite(err, 'extensionserror', " %s\n", err)
1191 fm.condwrite(err, 'extensionserror', " %s\n", err)
1192
1192
1193 compengines = util.compengines._engines.values()
1193 compengines = util.compengines._engines.values()
1194 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1194 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1195 fm.formatlist(sorted(e.name() for e in compengines),
1195 fm.formatlist(sorted(e.name() for e in compengines),
1196 name='compengine', fmt='%s', sep=', '))
1196 name='compengine', fmt='%s', sep=', '))
1197 fm.write('compenginesavail', _('checking available compression engines '
1197 fm.write('compenginesavail', _('checking available compression engines '
1198 '(%s)\n'),
1198 '(%s)\n'),
1199 fm.formatlist(sorted(e.name() for e in compengines
1199 fm.formatlist(sorted(e.name() for e in compengines
1200 if e.available()),
1200 if e.available()),
1201 name='compengine', fmt='%s', sep=', '))
1201 name='compengine', fmt='%s', sep=', '))
1202 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1202 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1203 fm.write('compenginesserver', _('checking available compression engines '
1203 fm.write('compenginesserver', _('checking available compression engines '
1204 'for wire protocol (%s)\n'),
1204 'for wire protocol (%s)\n'),
1205 fm.formatlist([e.name() for e in wirecompengines
1205 fm.formatlist([e.name() for e in wirecompengines
1206 if e.wireprotosupport()],
1206 if e.wireprotosupport()],
1207 name='compengine', fmt='%s', sep=', '))
1207 name='compengine', fmt='%s', sep=', '))
1208 re2 = 'missing'
1208 re2 = 'missing'
1209 if util._re2:
1209 if util._re2:
1210 re2 = 'available'
1210 re2 = 'available'
1211 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1211 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1212 fm.data(re2=bool(util._re2))
1212 fm.data(re2=bool(util._re2))
1213
1213
1214 # templates
1214 # templates
1215 p = templater.templatepaths()
1215 p = templater.templatepaths()
1216 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1216 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1217 fm.condwrite(not p, '', _(" no template directories found\n"))
1217 fm.condwrite(not p, '', _(" no template directories found\n"))
1218 if p:
1218 if p:
1219 m = templater.templatepath("map-cmdline.default")
1219 m = templater.templatepath("map-cmdline.default")
1220 if m:
1220 if m:
1221 # template found, check if it is working
1221 # template found, check if it is working
1222 err = None
1222 err = None
1223 try:
1223 try:
1224 templater.templater.frommapfile(m)
1224 templater.templater.frommapfile(m)
1225 except Exception as inst:
1225 except Exception as inst:
1226 err = util.forcebytestr(inst)
1226 err = util.forcebytestr(inst)
1227 p = None
1227 p = None
1228 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1228 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1229 else:
1229 else:
1230 p = None
1230 p = None
1231 fm.condwrite(p, 'defaulttemplate',
1231 fm.condwrite(p, 'defaulttemplate',
1232 _("checking default template (%s)\n"), m)
1232 _("checking default template (%s)\n"), m)
1233 fm.condwrite(not m, 'defaulttemplatenotfound',
1233 fm.condwrite(not m, 'defaulttemplatenotfound',
1234 _(" template '%s' not found\n"), "default")
1234 _(" template '%s' not found\n"), "default")
1235 if not p:
1235 if not p:
1236 problems += 1
1236 problems += 1
1237 fm.condwrite(not p, '',
1237 fm.condwrite(not p, '',
1238 _(" (templates seem to have been installed incorrectly)\n"))
1238 _(" (templates seem to have been installed incorrectly)\n"))
1239
1239
1240 # editor
1240 # editor
1241 editor = ui.geteditor()
1241 editor = ui.geteditor()
1242 editor = util.expandpath(editor)
1242 editor = util.expandpath(editor)
1243 editorbin = pycompat.shlexsplit(editor, posix=not pycompat.iswindows)[0]
1243 editorbin = pycompat.shlexsplit(editor, posix=not pycompat.iswindows)[0]
1244 if pycompat.iswindows and editorbin[0] == '"' and editorbin[-1] == '"':
1244 if pycompat.iswindows and editorbin[0] == '"' and editorbin[-1] == '"':
1245 editorbin = editorbin[1:-1]
1245 editorbin = editorbin[1:-1]
1246 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1246 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1247 cmdpath = util.findexe(editorbin)
1247 cmdpath = util.findexe(editorbin)
1248 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1248 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1249 _(" No commit editor set and can't find %s in PATH\n"
1249 _(" No commit editor set and can't find %s in PATH\n"
1250 " (specify a commit editor in your configuration"
1250 " (specify a commit editor in your configuration"
1251 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1251 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1252 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1252 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1253 _(" Can't find editor '%s' in PATH\n"
1253 _(" Can't find editor '%s' in PATH\n"
1254 " (specify a commit editor in your configuration"
1254 " (specify a commit editor in your configuration"
1255 " file)\n"), not cmdpath and editorbin)
1255 " file)\n"), not cmdpath and editorbin)
1256 if not cmdpath and editor != 'vi':
1256 if not cmdpath and editor != 'vi':
1257 problems += 1
1257 problems += 1
1258
1258
1259 # check username
1259 # check username
1260 username = None
1260 username = None
1261 err = None
1261 err = None
1262 try:
1262 try:
1263 username = ui.username()
1263 username = ui.username()
1264 except error.Abort as e:
1264 except error.Abort as e:
1265 err = util.forcebytestr(e)
1265 err = util.forcebytestr(e)
1266 problems += 1
1266 problems += 1
1267
1267
1268 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1268 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1269 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1269 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1270 " (specify a username in your configuration file)\n"), err)
1270 " (specify a username in your configuration file)\n"), err)
1271
1271
1272 fm.condwrite(not problems, '',
1272 fm.condwrite(not problems, '',
1273 _("no problems detected\n"))
1273 _("no problems detected\n"))
1274 if not problems:
1274 if not problems:
1275 fm.data(problems=problems)
1275 fm.data(problems=problems)
1276 fm.condwrite(problems, 'problems',
1276 fm.condwrite(problems, 'problems',
1277 _("%d problems detected,"
1277 _("%d problems detected,"
1278 " please check your install!\n"), problems)
1278 " please check your install!\n"), problems)
1279 fm.end()
1279 fm.end()
1280
1280
1281 return problems
1281 return problems
1282
1282
1283 @command('debugknown', [], _('REPO ID...'), norepo=True)
1283 @command('debugknown', [], _('REPO ID...'), norepo=True)
1284 def debugknown(ui, repopath, *ids, **opts):
1284 def debugknown(ui, repopath, *ids, **opts):
1285 """test whether node ids are known to a repo
1285 """test whether node ids are known to a repo
1286
1286
1287 Every ID must be a full-length hex node id string. Returns a list of 0s
1287 Every ID must be a full-length hex node id string. Returns a list of 0s
1288 and 1s indicating unknown/known.
1288 and 1s indicating unknown/known.
1289 """
1289 """
1290 opts = pycompat.byteskwargs(opts)
1290 opts = pycompat.byteskwargs(opts)
1291 repo = hg.peer(ui, opts, repopath)
1291 repo = hg.peer(ui, opts, repopath)
1292 if not repo.capable('known'):
1292 if not repo.capable('known'):
1293 raise error.Abort("known() not supported by target repository")
1293 raise error.Abort("known() not supported by target repository")
1294 flags = repo.known([bin(s) for s in ids])
1294 flags = repo.known([bin(s) for s in ids])
1295 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1295 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1296
1296
1297 @command('debuglabelcomplete', [], _('LABEL...'))
1297 @command('debuglabelcomplete', [], _('LABEL...'))
1298 def debuglabelcomplete(ui, repo, *args):
1298 def debuglabelcomplete(ui, repo, *args):
1299 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1299 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1300 debugnamecomplete(ui, repo, *args)
1300 debugnamecomplete(ui, repo, *args)
1301
1301
1302 @command('debuglocks',
1302 @command('debuglocks',
1303 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1303 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1304 ('W', 'force-wlock', None,
1304 ('W', 'force-wlock', None,
1305 _('free the working state lock (DANGEROUS)')),
1305 _('free the working state lock (DANGEROUS)')),
1306 ('s', 'set-lock', None, _('set the store lock until stopped')),
1306 ('s', 'set-lock', None, _('set the store lock until stopped')),
1307 ('S', 'set-wlock', None,
1307 ('S', 'set-wlock', None,
1308 _('set the working state lock until stopped'))],
1308 _('set the working state lock until stopped'))],
1309 _('[OPTION]...'))
1309 _('[OPTION]...'))
1310 def debuglocks(ui, repo, **opts):
1310 def debuglocks(ui, repo, **opts):
1311 """show or modify state of locks
1311 """show or modify state of locks
1312
1312
1313 By default, this command will show which locks are held. This
1313 By default, this command will show which locks are held. This
1314 includes the user and process holding the lock, the amount of time
1314 includes the user and process holding the lock, the amount of time
1315 the lock has been held, and the machine name where the process is
1315 the lock has been held, and the machine name where the process is
1316 running if it's not local.
1316 running if it's not local.
1317
1317
1318 Locks protect the integrity of Mercurial's data, so should be
1318 Locks protect the integrity of Mercurial's data, so should be
1319 treated with care. System crashes or other interruptions may cause
1319 treated with care. System crashes or other interruptions may cause
1320 locks to not be properly released, though Mercurial will usually
1320 locks to not be properly released, though Mercurial will usually
1321 detect and remove such stale locks automatically.
1321 detect and remove such stale locks automatically.
1322
1322
1323 However, detecting stale locks may not always be possible (for
1323 However, detecting stale locks may not always be possible (for
1324 instance, on a shared filesystem). Removing locks may also be
1324 instance, on a shared filesystem). Removing locks may also be
1325 blocked by filesystem permissions.
1325 blocked by filesystem permissions.
1326
1326
1327 Setting a lock will prevent other commands from changing the data.
1327 Setting a lock will prevent other commands from changing the data.
1328 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1328 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1329 The set locks are removed when the command exits.
1329 The set locks are removed when the command exits.
1330
1330
1331 Returns 0 if no locks are held.
1331 Returns 0 if no locks are held.
1332
1332
1333 """
1333 """
1334
1334
1335 if opts.get(r'force_lock'):
1335 if opts.get(r'force_lock'):
1336 repo.svfs.unlink('lock')
1336 repo.svfs.unlink('lock')
1337 if opts.get(r'force_wlock'):
1337 if opts.get(r'force_wlock'):
1338 repo.vfs.unlink('wlock')
1338 repo.vfs.unlink('wlock')
1339 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1339 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1340 return 0
1340 return 0
1341
1341
1342 locks = []
1342 locks = []
1343 try:
1343 try:
1344 if opts.get(r'set_wlock'):
1344 if opts.get(r'set_wlock'):
1345 try:
1345 try:
1346 locks.append(repo.wlock(False))
1346 locks.append(repo.wlock(False))
1347 except error.LockHeld:
1347 except error.LockHeld:
1348 raise error.Abort(_('wlock is already held'))
1348 raise error.Abort(_('wlock is already held'))
1349 if opts.get(r'set_lock'):
1349 if opts.get(r'set_lock'):
1350 try:
1350 try:
1351 locks.append(repo.lock(False))
1351 locks.append(repo.lock(False))
1352 except error.LockHeld:
1352 except error.LockHeld:
1353 raise error.Abort(_('lock is already held'))
1353 raise error.Abort(_('lock is already held'))
1354 if len(locks):
1354 if len(locks):
1355 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1355 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1356 return 0
1356 return 0
1357 finally:
1357 finally:
1358 release(*locks)
1358 release(*locks)
1359
1359
1360 now = time.time()
1360 now = time.time()
1361 held = 0
1361 held = 0
1362
1362
1363 def report(vfs, name, method):
1363 def report(vfs, name, method):
1364 # this causes stale locks to get reaped for more accurate reporting
1364 # this causes stale locks to get reaped for more accurate reporting
1365 try:
1365 try:
1366 l = method(False)
1366 l = method(False)
1367 except error.LockHeld:
1367 except error.LockHeld:
1368 l = None
1368 l = None
1369
1369
1370 if l:
1370 if l:
1371 l.release()
1371 l.release()
1372 else:
1372 else:
1373 try:
1373 try:
1374 stat = vfs.lstat(name)
1374 stat = vfs.lstat(name)
1375 age = now - stat.st_mtime
1375 age = now - stat.st_mtime
1376 user = util.username(stat.st_uid)
1376 user = util.username(stat.st_uid)
1377 locker = vfs.readlock(name)
1377 locker = vfs.readlock(name)
1378 if ":" in locker:
1378 if ":" in locker:
1379 host, pid = locker.split(':')
1379 host, pid = locker.split(':')
1380 if host == socket.gethostname():
1380 if host == socket.gethostname():
1381 locker = 'user %s, process %s' % (user, pid)
1381 locker = 'user %s, process %s' % (user, pid)
1382 else:
1382 else:
1383 locker = 'user %s, process %s, host %s' \
1383 locker = 'user %s, process %s, host %s' \
1384 % (user, pid, host)
1384 % (user, pid, host)
1385 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1385 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1386 return 1
1386 return 1
1387 except OSError as e:
1387 except OSError as e:
1388 if e.errno != errno.ENOENT:
1388 if e.errno != errno.ENOENT:
1389 raise
1389 raise
1390
1390
1391 ui.write(("%-6s free\n") % (name + ":"))
1391 ui.write(("%-6s free\n") % (name + ":"))
1392 return 0
1392 return 0
1393
1393
1394 held += report(repo.svfs, "lock", repo.lock)
1394 held += report(repo.svfs, "lock", repo.lock)
1395 held += report(repo.vfs, "wlock", repo.wlock)
1395 held += report(repo.vfs, "wlock", repo.wlock)
1396
1396
1397 return held
1397 return held
1398
1398
1399 @command('debugmergestate', [], '')
1399 @command('debugmergestate', [], '')
1400 def debugmergestate(ui, repo, *args):
1400 def debugmergestate(ui, repo, *args):
1401 """print merge state
1401 """print merge state
1402
1402
1403 Use --verbose to print out information about whether v1 or v2 merge state
1403 Use --verbose to print out information about whether v1 or v2 merge state
1404 was chosen."""
1404 was chosen."""
1405 def _hashornull(h):
1405 def _hashornull(h):
1406 if h == nullhex:
1406 if h == nullhex:
1407 return 'null'
1407 return 'null'
1408 else:
1408 else:
1409 return h
1409 return h
1410
1410
1411 def printrecords(version):
1411 def printrecords(version):
1412 ui.write(('* version %d records\n') % version)
1412 ui.write(('* version %d records\n') % version)
1413 if version == 1:
1413 if version == 1:
1414 records = v1records
1414 records = v1records
1415 else:
1415 else:
1416 records = v2records
1416 records = v2records
1417
1417
1418 for rtype, record in records:
1418 for rtype, record in records:
1419 # pretty print some record types
1419 # pretty print some record types
1420 if rtype == 'L':
1420 if rtype == 'L':
1421 ui.write(('local: %s\n') % record)
1421 ui.write(('local: %s\n') % record)
1422 elif rtype == 'O':
1422 elif rtype == 'O':
1423 ui.write(('other: %s\n') % record)
1423 ui.write(('other: %s\n') % record)
1424 elif rtype == 'm':
1424 elif rtype == 'm':
1425 driver, mdstate = record.split('\0', 1)
1425 driver, mdstate = record.split('\0', 1)
1426 ui.write(('merge driver: %s (state "%s")\n')
1426 ui.write(('merge driver: %s (state "%s")\n')
1427 % (driver, mdstate))
1427 % (driver, mdstate))
1428 elif rtype in 'FDC':
1428 elif rtype in 'FDC':
1429 r = record.split('\0')
1429 r = record.split('\0')
1430 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1430 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1431 if version == 1:
1431 if version == 1:
1432 onode = 'not stored in v1 format'
1432 onode = 'not stored in v1 format'
1433 flags = r[7]
1433 flags = r[7]
1434 else:
1434 else:
1435 onode, flags = r[7:9]
1435 onode, flags = r[7:9]
1436 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1436 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1437 % (f, rtype, state, _hashornull(hash)))
1437 % (f, rtype, state, _hashornull(hash)))
1438 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1438 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1439 ui.write((' ancestor path: %s (node %s)\n')
1439 ui.write((' ancestor path: %s (node %s)\n')
1440 % (afile, _hashornull(anode)))
1440 % (afile, _hashornull(anode)))
1441 ui.write((' other path: %s (node %s)\n')
1441 ui.write((' other path: %s (node %s)\n')
1442 % (ofile, _hashornull(onode)))
1442 % (ofile, _hashornull(onode)))
1443 elif rtype == 'f':
1443 elif rtype == 'f':
1444 filename, rawextras = record.split('\0', 1)
1444 filename, rawextras = record.split('\0', 1)
1445 extras = rawextras.split('\0')
1445 extras = rawextras.split('\0')
1446 i = 0
1446 i = 0
1447 extrastrings = []
1447 extrastrings = []
1448 while i < len(extras):
1448 while i < len(extras):
1449 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1449 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1450 i += 2
1450 i += 2
1451
1451
1452 ui.write(('file extras: %s (%s)\n')
1452 ui.write(('file extras: %s (%s)\n')
1453 % (filename, ', '.join(extrastrings)))
1453 % (filename, ', '.join(extrastrings)))
1454 elif rtype == 'l':
1454 elif rtype == 'l':
1455 labels = record.split('\0', 2)
1455 labels = record.split('\0', 2)
1456 labels = [l for l in labels if len(l) > 0]
1456 labels = [l for l in labels if len(l) > 0]
1457 ui.write(('labels:\n'))
1457 ui.write(('labels:\n'))
1458 ui.write((' local: %s\n' % labels[0]))
1458 ui.write((' local: %s\n' % labels[0]))
1459 ui.write((' other: %s\n' % labels[1]))
1459 ui.write((' other: %s\n' % labels[1]))
1460 if len(labels) > 2:
1460 if len(labels) > 2:
1461 ui.write((' base: %s\n' % labels[2]))
1461 ui.write((' base: %s\n' % labels[2]))
1462 else:
1462 else:
1463 ui.write(('unrecognized entry: %s\t%s\n')
1463 ui.write(('unrecognized entry: %s\t%s\n')
1464 % (rtype, record.replace('\0', '\t')))
1464 % (rtype, record.replace('\0', '\t')))
1465
1465
1466 # Avoid mergestate.read() since it may raise an exception for unsupported
1466 # Avoid mergestate.read() since it may raise an exception for unsupported
1467 # merge state records. We shouldn't be doing this, but this is OK since this
1467 # merge state records. We shouldn't be doing this, but this is OK since this
1468 # command is pretty low-level.
1468 # command is pretty low-level.
1469 ms = mergemod.mergestate(repo)
1469 ms = mergemod.mergestate(repo)
1470
1470
1471 # sort so that reasonable information is on top
1471 # sort so that reasonable information is on top
1472 v1records = ms._readrecordsv1()
1472 v1records = ms._readrecordsv1()
1473 v2records = ms._readrecordsv2()
1473 v2records = ms._readrecordsv2()
1474 order = 'LOml'
1474 order = 'LOml'
1475 def key(r):
1475 def key(r):
1476 idx = order.find(r[0])
1476 idx = order.find(r[0])
1477 if idx == -1:
1477 if idx == -1:
1478 return (1, r[1])
1478 return (1, r[1])
1479 else:
1479 else:
1480 return (0, idx)
1480 return (0, idx)
1481 v1records.sort(key=key)
1481 v1records.sort(key=key)
1482 v2records.sort(key=key)
1482 v2records.sort(key=key)
1483
1483
1484 if not v1records and not v2records:
1484 if not v1records and not v2records:
1485 ui.write(('no merge state found\n'))
1485 ui.write(('no merge state found\n'))
1486 elif not v2records:
1486 elif not v2records:
1487 ui.note(('no version 2 merge state\n'))
1487 ui.note(('no version 2 merge state\n'))
1488 printrecords(1)
1488 printrecords(1)
1489 elif ms._v1v2match(v1records, v2records):
1489 elif ms._v1v2match(v1records, v2records):
1490 ui.note(('v1 and v2 states match: using v2\n'))
1490 ui.note(('v1 and v2 states match: using v2\n'))
1491 printrecords(2)
1491 printrecords(2)
1492 else:
1492 else:
1493 ui.note(('v1 and v2 states mismatch: using v1\n'))
1493 ui.note(('v1 and v2 states mismatch: using v1\n'))
1494 printrecords(1)
1494 printrecords(1)
1495 if ui.verbose:
1495 if ui.verbose:
1496 printrecords(2)
1496 printrecords(2)
1497
1497
1498 @command('debugnamecomplete', [], _('NAME...'))
1498 @command('debugnamecomplete', [], _('NAME...'))
1499 def debugnamecomplete(ui, repo, *args):
1499 def debugnamecomplete(ui, repo, *args):
1500 '''complete "names" - tags, open branch names, bookmark names'''
1500 '''complete "names" - tags, open branch names, bookmark names'''
1501
1501
1502 names = set()
1502 names = set()
1503 # since we previously only listed open branches, we will handle that
1503 # since we previously only listed open branches, we will handle that
1504 # specially (after this for loop)
1504 # specially (after this for loop)
1505 for name, ns in repo.names.iteritems():
1505 for name, ns in repo.names.iteritems():
1506 if name != 'branches':
1506 if name != 'branches':
1507 names.update(ns.listnames(repo))
1507 names.update(ns.listnames(repo))
1508 names.update(tag for (tag, heads, tip, closed)
1508 names.update(tag for (tag, heads, tip, closed)
1509 in repo.branchmap().iterbranches() if not closed)
1509 in repo.branchmap().iterbranches() if not closed)
1510 completions = set()
1510 completions = set()
1511 if not args:
1511 if not args:
1512 args = ['']
1512 args = ['']
1513 for a in args:
1513 for a in args:
1514 completions.update(n for n in names if n.startswith(a))
1514 completions.update(n for n in names if n.startswith(a))
1515 ui.write('\n'.join(sorted(completions)))
1515 ui.write('\n'.join(sorted(completions)))
1516 ui.write('\n')
1516 ui.write('\n')
1517
1517
1518 @command('debugobsolete',
1518 @command('debugobsolete',
1519 [('', 'flags', 0, _('markers flag')),
1519 [('', 'flags', 0, _('markers flag')),
1520 ('', 'record-parents', False,
1520 ('', 'record-parents', False,
1521 _('record parent information for the precursor')),
1521 _('record parent information for the precursor')),
1522 ('r', 'rev', [], _('display markers relevant to REV')),
1522 ('r', 'rev', [], _('display markers relevant to REV')),
1523 ('', 'exclusive', False, _('restrict display to markers only '
1523 ('', 'exclusive', False, _('restrict display to markers only '
1524 'relevant to REV')),
1524 'relevant to REV')),
1525 ('', 'index', False, _('display index of the marker')),
1525 ('', 'index', False, _('display index of the marker')),
1526 ('', 'delete', [], _('delete markers specified by indices')),
1526 ('', 'delete', [], _('delete markers specified by indices')),
1527 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1527 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1528 _('[OBSOLETED [REPLACEMENT ...]]'))
1528 _('[OBSOLETED [REPLACEMENT ...]]'))
1529 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1529 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1530 """create arbitrary obsolete marker
1530 """create arbitrary obsolete marker
1531
1531
1532 With no arguments, displays the list of obsolescence markers."""
1532 With no arguments, displays the list of obsolescence markers."""
1533
1533
1534 opts = pycompat.byteskwargs(opts)
1534 opts = pycompat.byteskwargs(opts)
1535
1535
1536 def parsenodeid(s):
1536 def parsenodeid(s):
1537 try:
1537 try:
1538 # We do not use revsingle/revrange functions here to accept
1538 # We do not use revsingle/revrange functions here to accept
1539 # arbitrary node identifiers, possibly not present in the
1539 # arbitrary node identifiers, possibly not present in the
1540 # local repository.
1540 # local repository.
1541 n = bin(s)
1541 n = bin(s)
1542 if len(n) != len(nullid):
1542 if len(n) != len(nullid):
1543 raise TypeError()
1543 raise TypeError()
1544 return n
1544 return n
1545 except TypeError:
1545 except TypeError:
1546 raise error.Abort('changeset references must be full hexadecimal '
1546 raise error.Abort('changeset references must be full hexadecimal '
1547 'node identifiers')
1547 'node identifiers')
1548
1548
1549 if opts.get('delete'):
1549 if opts.get('delete'):
1550 indices = []
1550 indices = []
1551 for v in opts.get('delete'):
1551 for v in opts.get('delete'):
1552 try:
1552 try:
1553 indices.append(int(v))
1553 indices.append(int(v))
1554 except ValueError:
1554 except ValueError:
1555 raise error.Abort(_('invalid index value: %r') % v,
1555 raise error.Abort(_('invalid index value: %r') % v,
1556 hint=_('use integers for indices'))
1556 hint=_('use integers for indices'))
1557
1557
1558 if repo.currenttransaction():
1558 if repo.currenttransaction():
1559 raise error.Abort(_('cannot delete obsmarkers in the middle '
1559 raise error.Abort(_('cannot delete obsmarkers in the middle '
1560 'of transaction.'))
1560 'of transaction.'))
1561
1561
1562 with repo.lock():
1562 with repo.lock():
1563 n = repair.deleteobsmarkers(repo.obsstore, indices)
1563 n = repair.deleteobsmarkers(repo.obsstore, indices)
1564 ui.write(_('deleted %i obsolescence markers\n') % n)
1564 ui.write(_('deleted %i obsolescence markers\n') % n)
1565
1565
1566 return
1566 return
1567
1567
1568 if precursor is not None:
1568 if precursor is not None:
1569 if opts['rev']:
1569 if opts['rev']:
1570 raise error.Abort('cannot select revision when creating marker')
1570 raise error.Abort('cannot select revision when creating marker')
1571 metadata = {}
1571 metadata = {}
1572 metadata['user'] = opts['user'] or ui.username()
1572 metadata['user'] = opts['user'] or ui.username()
1573 succs = tuple(parsenodeid(succ) for succ in successors)
1573 succs = tuple(parsenodeid(succ) for succ in successors)
1574 l = repo.lock()
1574 l = repo.lock()
1575 try:
1575 try:
1576 tr = repo.transaction('debugobsolete')
1576 tr = repo.transaction('debugobsolete')
1577 try:
1577 try:
1578 date = opts.get('date')
1578 date = opts.get('date')
1579 if date:
1579 if date:
1580 date = util.parsedate(date)
1580 date = util.parsedate(date)
1581 else:
1581 else:
1582 date = None
1582 date = None
1583 prec = parsenodeid(precursor)
1583 prec = parsenodeid(precursor)
1584 parents = None
1584 parents = None
1585 if opts['record_parents']:
1585 if opts['record_parents']:
1586 if prec not in repo.unfiltered():
1586 if prec not in repo.unfiltered():
1587 raise error.Abort('cannot used --record-parents on '
1587 raise error.Abort('cannot used --record-parents on '
1588 'unknown changesets')
1588 'unknown changesets')
1589 parents = repo.unfiltered()[prec].parents()
1589 parents = repo.unfiltered()[prec].parents()
1590 parents = tuple(p.node() for p in parents)
1590 parents = tuple(p.node() for p in parents)
1591 repo.obsstore.create(tr, prec, succs, opts['flags'],
1591 repo.obsstore.create(tr, prec, succs, opts['flags'],
1592 parents=parents, date=date,
1592 parents=parents, date=date,
1593 metadata=metadata, ui=ui)
1593 metadata=metadata, ui=ui)
1594 tr.close()
1594 tr.close()
1595 except ValueError as exc:
1595 except ValueError as exc:
1596 raise error.Abort(_('bad obsmarker input: %s') % exc)
1596 raise error.Abort(_('bad obsmarker input: %s') % exc)
1597 finally:
1597 finally:
1598 tr.release()
1598 tr.release()
1599 finally:
1599 finally:
1600 l.release()
1600 l.release()
1601 else:
1601 else:
1602 if opts['rev']:
1602 if opts['rev']:
1603 revs = scmutil.revrange(repo, opts['rev'])
1603 revs = scmutil.revrange(repo, opts['rev'])
1604 nodes = [repo[r].node() for r in revs]
1604 nodes = [repo[r].node() for r in revs]
1605 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1605 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1606 exclusive=opts['exclusive']))
1606 exclusive=opts['exclusive']))
1607 markers.sort(key=lambda x: x._data)
1607 markers.sort(key=lambda x: x._data)
1608 else:
1608 else:
1609 markers = obsutil.getmarkers(repo)
1609 markers = obsutil.getmarkers(repo)
1610
1610
1611 markerstoiter = markers
1611 markerstoiter = markers
1612 isrelevant = lambda m: True
1612 isrelevant = lambda m: True
1613 if opts.get('rev') and opts.get('index'):
1613 if opts.get('rev') and opts.get('index'):
1614 markerstoiter = obsutil.getmarkers(repo)
1614 markerstoiter = obsutil.getmarkers(repo)
1615 markerset = set(markers)
1615 markerset = set(markers)
1616 isrelevant = lambda m: m in markerset
1616 isrelevant = lambda m: m in markerset
1617
1617
1618 fm = ui.formatter('debugobsolete', opts)
1618 fm = ui.formatter('debugobsolete', opts)
1619 for i, m in enumerate(markerstoiter):
1619 for i, m in enumerate(markerstoiter):
1620 if not isrelevant(m):
1620 if not isrelevant(m):
1621 # marker can be irrelevant when we're iterating over a set
1621 # marker can be irrelevant when we're iterating over a set
1622 # of markers (markerstoiter) which is bigger than the set
1622 # of markers (markerstoiter) which is bigger than the set
1623 # of markers we want to display (markers)
1623 # of markers we want to display (markers)
1624 # this can happen if both --index and --rev options are
1624 # this can happen if both --index and --rev options are
1625 # provided and thus we need to iterate over all of the markers
1625 # provided and thus we need to iterate over all of the markers
1626 # to get the correct indices, but only display the ones that
1626 # to get the correct indices, but only display the ones that
1627 # are relevant to --rev value
1627 # are relevant to --rev value
1628 continue
1628 continue
1629 fm.startitem()
1629 fm.startitem()
1630 ind = i if opts.get('index') else None
1630 ind = i if opts.get('index') else None
1631 cmdutil.showmarker(fm, m, index=ind)
1631 cmdutil.showmarker(fm, m, index=ind)
1632 fm.end()
1632 fm.end()
1633
1633
1634 @command('debugpathcomplete',
1634 @command('debugpathcomplete',
1635 [('f', 'full', None, _('complete an entire path')),
1635 [('f', 'full', None, _('complete an entire path')),
1636 ('n', 'normal', None, _('show only normal files')),
1636 ('n', 'normal', None, _('show only normal files')),
1637 ('a', 'added', None, _('show only added files')),
1637 ('a', 'added', None, _('show only added files')),
1638 ('r', 'removed', None, _('show only removed files'))],
1638 ('r', 'removed', None, _('show only removed files'))],
1639 _('FILESPEC...'))
1639 _('FILESPEC...'))
1640 def debugpathcomplete(ui, repo, *specs, **opts):
1640 def debugpathcomplete(ui, repo, *specs, **opts):
1641 '''complete part or all of a tracked path
1641 '''complete part or all of a tracked path
1642
1642
1643 This command supports shells that offer path name completion. It
1643 This command supports shells that offer path name completion. It
1644 currently completes only files already known to the dirstate.
1644 currently completes only files already known to the dirstate.
1645
1645
1646 Completion extends only to the next path segment unless
1646 Completion extends only to the next path segment unless
1647 --full is specified, in which case entire paths are used.'''
1647 --full is specified, in which case entire paths are used.'''
1648
1648
1649 def complete(path, acceptable):
1649 def complete(path, acceptable):
1650 dirstate = repo.dirstate
1650 dirstate = repo.dirstate
1651 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1651 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1652 rootdir = repo.root + pycompat.ossep
1652 rootdir = repo.root + pycompat.ossep
1653 if spec != repo.root and not spec.startswith(rootdir):
1653 if spec != repo.root and not spec.startswith(rootdir):
1654 return [], []
1654 return [], []
1655 if os.path.isdir(spec):
1655 if os.path.isdir(spec):
1656 spec += '/'
1656 spec += '/'
1657 spec = spec[len(rootdir):]
1657 spec = spec[len(rootdir):]
1658 fixpaths = pycompat.ossep != '/'
1658 fixpaths = pycompat.ossep != '/'
1659 if fixpaths:
1659 if fixpaths:
1660 spec = spec.replace(pycompat.ossep, '/')
1660 spec = spec.replace(pycompat.ossep, '/')
1661 speclen = len(spec)
1661 speclen = len(spec)
1662 fullpaths = opts[r'full']
1662 fullpaths = opts[r'full']
1663 files, dirs = set(), set()
1663 files, dirs = set(), set()
1664 adddir, addfile = dirs.add, files.add
1664 adddir, addfile = dirs.add, files.add
1665 for f, st in dirstate.iteritems():
1665 for f, st in dirstate.iteritems():
1666 if f.startswith(spec) and st[0] in acceptable:
1666 if f.startswith(spec) and st[0] in acceptable:
1667 if fixpaths:
1667 if fixpaths:
1668 f = f.replace('/', pycompat.ossep)
1668 f = f.replace('/', pycompat.ossep)
1669 if fullpaths:
1669 if fullpaths:
1670 addfile(f)
1670 addfile(f)
1671 continue
1671 continue
1672 s = f.find(pycompat.ossep, speclen)
1672 s = f.find(pycompat.ossep, speclen)
1673 if s >= 0:
1673 if s >= 0:
1674 adddir(f[:s])
1674 adddir(f[:s])
1675 else:
1675 else:
1676 addfile(f)
1676 addfile(f)
1677 return files, dirs
1677 return files, dirs
1678
1678
1679 acceptable = ''
1679 acceptable = ''
1680 if opts[r'normal']:
1680 if opts[r'normal']:
1681 acceptable += 'nm'
1681 acceptable += 'nm'
1682 if opts[r'added']:
1682 if opts[r'added']:
1683 acceptable += 'a'
1683 acceptable += 'a'
1684 if opts[r'removed']:
1684 if opts[r'removed']:
1685 acceptable += 'r'
1685 acceptable += 'r'
1686 cwd = repo.getcwd()
1686 cwd = repo.getcwd()
1687 if not specs:
1687 if not specs:
1688 specs = ['.']
1688 specs = ['.']
1689
1689
1690 files, dirs = set(), set()
1690 files, dirs = set(), set()
1691 for spec in specs:
1691 for spec in specs:
1692 f, d = complete(spec, acceptable or 'nmar')
1692 f, d = complete(spec, acceptable or 'nmar')
1693 files.update(f)
1693 files.update(f)
1694 dirs.update(d)
1694 dirs.update(d)
1695 files.update(dirs)
1695 files.update(dirs)
1696 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1696 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1697 ui.write('\n')
1697 ui.write('\n')
1698
1698
1699 @command('debugpeer', [], _('PATH'), norepo=True)
1699 @command('debugpeer', [], _('PATH'), norepo=True)
1700 def debugpeer(ui, path):
1700 def debugpeer(ui, path):
1701 """establish a connection to a peer repository"""
1701 """establish a connection to a peer repository"""
1702 # Always enable peer request logging. Requires --debug to display
1702 # Always enable peer request logging. Requires --debug to display
1703 # though.
1703 # though.
1704 overrides = {
1704 overrides = {
1705 ('devel', 'debug.peer-request'): True,
1705 ('devel', 'debug.peer-request'): True,
1706 }
1706 }
1707
1707
1708 with ui.configoverride(overrides):
1708 with ui.configoverride(overrides):
1709 peer = hg.peer(ui, {}, path)
1709 peer = hg.peer(ui, {}, path)
1710
1710
1711 local = peer.local() is not None
1711 local = peer.local() is not None
1712 canpush = peer.canpush()
1712 canpush = peer.canpush()
1713
1713
1714 ui.write(_('url: %s\n') % peer.url())
1714 ui.write(_('url: %s\n') % peer.url())
1715 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1715 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1716 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1716 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1717
1717
1718 @command('debugpickmergetool',
1718 @command('debugpickmergetool',
1719 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1719 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1720 ('', 'changedelete', None, _('emulate merging change and delete')),
1720 ('', 'changedelete', None, _('emulate merging change and delete')),
1721 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1721 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1722 _('[PATTERN]...'),
1722 _('[PATTERN]...'),
1723 inferrepo=True)
1723 inferrepo=True)
1724 def debugpickmergetool(ui, repo, *pats, **opts):
1724 def debugpickmergetool(ui, repo, *pats, **opts):
1725 """examine which merge tool is chosen for specified file
1725 """examine which merge tool is chosen for specified file
1726
1726
1727 As described in :hg:`help merge-tools`, Mercurial examines
1727 As described in :hg:`help merge-tools`, Mercurial examines
1728 configurations below in this order to decide which merge tool is
1728 configurations below in this order to decide which merge tool is
1729 chosen for specified file.
1729 chosen for specified file.
1730
1730
1731 1. ``--tool`` option
1731 1. ``--tool`` option
1732 2. ``HGMERGE`` environment variable
1732 2. ``HGMERGE`` environment variable
1733 3. configurations in ``merge-patterns`` section
1733 3. configurations in ``merge-patterns`` section
1734 4. configuration of ``ui.merge``
1734 4. configuration of ``ui.merge``
1735 5. configurations in ``merge-tools`` section
1735 5. configurations in ``merge-tools`` section
1736 6. ``hgmerge`` tool (for historical reason only)
1736 6. ``hgmerge`` tool (for historical reason only)
1737 7. default tool for fallback (``:merge`` or ``:prompt``)
1737 7. default tool for fallback (``:merge`` or ``:prompt``)
1738
1738
1739 This command writes out examination result in the style below::
1739 This command writes out examination result in the style below::
1740
1740
1741 FILE = MERGETOOL
1741 FILE = MERGETOOL
1742
1742
1743 By default, all files known in the first parent context of the
1743 By default, all files known in the first parent context of the
1744 working directory are examined. Use file patterns and/or -I/-X
1744 working directory are examined. Use file patterns and/or -I/-X
1745 options to limit target files. -r/--rev is also useful to examine
1745 options to limit target files. -r/--rev is also useful to examine
1746 files in another context without actual updating to it.
1746 files in another context without actual updating to it.
1747
1747
1748 With --debug, this command shows warning messages while matching
1748 With --debug, this command shows warning messages while matching
1749 against ``merge-patterns`` and so on, too. It is recommended to
1749 against ``merge-patterns`` and so on, too. It is recommended to
1750 use this option with explicit file patterns and/or -I/-X options,
1750 use this option with explicit file patterns and/or -I/-X options,
1751 because this option increases amount of output per file according
1751 because this option increases amount of output per file according
1752 to configurations in hgrc.
1752 to configurations in hgrc.
1753
1753
1754 With -v/--verbose, this command shows configurations below at
1754 With -v/--verbose, this command shows configurations below at
1755 first (only if specified).
1755 first (only if specified).
1756
1756
1757 - ``--tool`` option
1757 - ``--tool`` option
1758 - ``HGMERGE`` environment variable
1758 - ``HGMERGE`` environment variable
1759 - configuration of ``ui.merge``
1759 - configuration of ``ui.merge``
1760
1760
1761 If merge tool is chosen before matching against
1761 If merge tool is chosen before matching against
1762 ``merge-patterns``, this command can't show any helpful
1762 ``merge-patterns``, this command can't show any helpful
1763 information, even with --debug. In such case, information above is
1763 information, even with --debug. In such case, information above is
1764 useful to know why a merge tool is chosen.
1764 useful to know why a merge tool is chosen.
1765 """
1765 """
1766 opts = pycompat.byteskwargs(opts)
1766 opts = pycompat.byteskwargs(opts)
1767 overrides = {}
1767 overrides = {}
1768 if opts['tool']:
1768 if opts['tool']:
1769 overrides[('ui', 'forcemerge')] = opts['tool']
1769 overrides[('ui', 'forcemerge')] = opts['tool']
1770 ui.note(('with --tool %r\n') % (opts['tool']))
1770 ui.note(('with --tool %r\n') % (opts['tool']))
1771
1771
1772 with ui.configoverride(overrides, 'debugmergepatterns'):
1772 with ui.configoverride(overrides, 'debugmergepatterns'):
1773 hgmerge = encoding.environ.get("HGMERGE")
1773 hgmerge = encoding.environ.get("HGMERGE")
1774 if hgmerge is not None:
1774 if hgmerge is not None:
1775 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1775 ui.note(('with HGMERGE=%r\n') % (hgmerge))
1776 uimerge = ui.config("ui", "merge")
1776 uimerge = ui.config("ui", "merge")
1777 if uimerge:
1777 if uimerge:
1778 ui.note(('with ui.merge=%r\n') % (uimerge))
1778 ui.note(('with ui.merge=%r\n') % (uimerge))
1779
1779
1780 ctx = scmutil.revsingle(repo, opts.get('rev'))
1780 ctx = scmutil.revsingle(repo, opts.get('rev'))
1781 m = scmutil.match(ctx, pats, opts)
1781 m = scmutil.match(ctx, pats, opts)
1782 changedelete = opts['changedelete']
1782 changedelete = opts['changedelete']
1783 for path in ctx.walk(m):
1783 for path in ctx.walk(m):
1784 fctx = ctx[path]
1784 fctx = ctx[path]
1785 try:
1785 try:
1786 if not ui.debugflag:
1786 if not ui.debugflag:
1787 ui.pushbuffer(error=True)
1787 ui.pushbuffer(error=True)
1788 tool, toolpath = filemerge._picktool(repo, ui, path,
1788 tool, toolpath = filemerge._picktool(repo, ui, path,
1789 fctx.isbinary(),
1789 fctx.isbinary(),
1790 'l' in fctx.flags(),
1790 'l' in fctx.flags(),
1791 changedelete)
1791 changedelete)
1792 finally:
1792 finally:
1793 if not ui.debugflag:
1793 if not ui.debugflag:
1794 ui.popbuffer()
1794 ui.popbuffer()
1795 ui.write(('%s = %s\n') % (path, tool))
1795 ui.write(('%s = %s\n') % (path, tool))
1796
1796
1797 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1797 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1798 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1798 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1799 '''access the pushkey key/value protocol
1799 '''access the pushkey key/value protocol
1800
1800
1801 With two args, list the keys in the given namespace.
1801 With two args, list the keys in the given namespace.
1802
1802
1803 With five args, set a key to new if it currently is set to old.
1803 With five args, set a key to new if it currently is set to old.
1804 Reports success or failure.
1804 Reports success or failure.
1805 '''
1805 '''
1806
1806
1807 target = hg.peer(ui, {}, repopath)
1807 target = hg.peer(ui, {}, repopath)
1808 if keyinfo:
1808 if keyinfo:
1809 key, old, new = keyinfo
1809 key, old, new = keyinfo
1810 r = target.pushkey(namespace, key, old, new)
1810 r = target.pushkey(namespace, key, old, new)
1811 ui.status(str(r) + '\n')
1811 ui.status(str(r) + '\n')
1812 return not r
1812 return not r
1813 else:
1813 else:
1814 for k, v in sorted(target.listkeys(namespace).iteritems()):
1814 for k, v in sorted(target.listkeys(namespace).iteritems()):
1815 ui.write("%s\t%s\n" % (util.escapestr(k),
1815 ui.write("%s\t%s\n" % (util.escapestr(k),
1816 util.escapestr(v)))
1816 util.escapestr(v)))
1817
1817
1818 @command('debugpvec', [], _('A B'))
1818 @command('debugpvec', [], _('A B'))
1819 def debugpvec(ui, repo, a, b=None):
1819 def debugpvec(ui, repo, a, b=None):
1820 ca = scmutil.revsingle(repo, a)
1820 ca = scmutil.revsingle(repo, a)
1821 cb = scmutil.revsingle(repo, b)
1821 cb = scmutil.revsingle(repo, b)
1822 pa = pvec.ctxpvec(ca)
1822 pa = pvec.ctxpvec(ca)
1823 pb = pvec.ctxpvec(cb)
1823 pb = pvec.ctxpvec(cb)
1824 if pa == pb:
1824 if pa == pb:
1825 rel = "="
1825 rel = "="
1826 elif pa > pb:
1826 elif pa > pb:
1827 rel = ">"
1827 rel = ">"
1828 elif pa < pb:
1828 elif pa < pb:
1829 rel = "<"
1829 rel = "<"
1830 elif pa | pb:
1830 elif pa | pb:
1831 rel = "|"
1831 rel = "|"
1832 ui.write(_("a: %s\n") % pa)
1832 ui.write(_("a: %s\n") % pa)
1833 ui.write(_("b: %s\n") % pb)
1833 ui.write(_("b: %s\n") % pb)
1834 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1834 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1835 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1835 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1836 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1836 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1837 pa.distance(pb), rel))
1837 pa.distance(pb), rel))
1838
1838
1839 @command('debugrebuilddirstate|debugrebuildstate',
1839 @command('debugrebuilddirstate|debugrebuildstate',
1840 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1840 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1841 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1841 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1842 'the working copy parent')),
1842 'the working copy parent')),
1843 ],
1843 ],
1844 _('[-r REV]'))
1844 _('[-r REV]'))
1845 def debugrebuilddirstate(ui, repo, rev, **opts):
1845 def debugrebuilddirstate(ui, repo, rev, **opts):
1846 """rebuild the dirstate as it would look like for the given revision
1846 """rebuild the dirstate as it would look like for the given revision
1847
1847
1848 If no revision is specified the first current parent will be used.
1848 If no revision is specified the first current parent will be used.
1849
1849
1850 The dirstate will be set to the files of the given revision.
1850 The dirstate will be set to the files of the given revision.
1851 The actual working directory content or existing dirstate
1851 The actual working directory content or existing dirstate
1852 information such as adds or removes is not considered.
1852 information such as adds or removes is not considered.
1853
1853
1854 ``minimal`` will only rebuild the dirstate status for files that claim to be
1854 ``minimal`` will only rebuild the dirstate status for files that claim to be
1855 tracked but are not in the parent manifest, or that exist in the parent
1855 tracked but are not in the parent manifest, or that exist in the parent
1856 manifest but are not in the dirstate. It will not change adds, removes, or
1856 manifest but are not in the dirstate. It will not change adds, removes, or
1857 modified files that are in the working copy parent.
1857 modified files that are in the working copy parent.
1858
1858
1859 One use of this command is to make the next :hg:`status` invocation
1859 One use of this command is to make the next :hg:`status` invocation
1860 check the actual file content.
1860 check the actual file content.
1861 """
1861 """
1862 ctx = scmutil.revsingle(repo, rev)
1862 ctx = scmutil.revsingle(repo, rev)
1863 with repo.wlock():
1863 with repo.wlock():
1864 dirstate = repo.dirstate
1864 dirstate = repo.dirstate
1865 changedfiles = None
1865 changedfiles = None
1866 # See command doc for what minimal does.
1866 # See command doc for what minimal does.
1867 if opts.get(r'minimal'):
1867 if opts.get(r'minimal'):
1868 manifestfiles = set(ctx.manifest().keys())
1868 manifestfiles = set(ctx.manifest().keys())
1869 dirstatefiles = set(dirstate)
1869 dirstatefiles = set(dirstate)
1870 manifestonly = manifestfiles - dirstatefiles
1870 manifestonly = manifestfiles - dirstatefiles
1871 dsonly = dirstatefiles - manifestfiles
1871 dsonly = dirstatefiles - manifestfiles
1872 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1872 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1873 changedfiles = manifestonly | dsnotadded
1873 changedfiles = manifestonly | dsnotadded
1874
1874
1875 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1875 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1876
1876
1877 @command('debugrebuildfncache', [], '')
1877 @command('debugrebuildfncache', [], '')
1878 def debugrebuildfncache(ui, repo):
1878 def debugrebuildfncache(ui, repo):
1879 """rebuild the fncache file"""
1879 """rebuild the fncache file"""
1880 repair.rebuildfncache(ui, repo)
1880 repair.rebuildfncache(ui, repo)
1881
1881
1882 @command('debugrename',
1882 @command('debugrename',
1883 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1883 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1884 _('[-r REV] FILE'))
1884 _('[-r REV] FILE'))
1885 def debugrename(ui, repo, file1, *pats, **opts):
1885 def debugrename(ui, repo, file1, *pats, **opts):
1886 """dump rename information"""
1886 """dump rename information"""
1887
1887
1888 opts = pycompat.byteskwargs(opts)
1888 opts = pycompat.byteskwargs(opts)
1889 ctx = scmutil.revsingle(repo, opts.get('rev'))
1889 ctx = scmutil.revsingle(repo, opts.get('rev'))
1890 m = scmutil.match(ctx, (file1,) + pats, opts)
1890 m = scmutil.match(ctx, (file1,) + pats, opts)
1891 for abs in ctx.walk(m):
1891 for abs in ctx.walk(m):
1892 fctx = ctx[abs]
1892 fctx = ctx[abs]
1893 o = fctx.filelog().renamed(fctx.filenode())
1893 o = fctx.filelog().renamed(fctx.filenode())
1894 rel = m.rel(abs)
1894 rel = m.rel(abs)
1895 if o:
1895 if o:
1896 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1896 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1897 else:
1897 else:
1898 ui.write(_("%s not renamed\n") % rel)
1898 ui.write(_("%s not renamed\n") % rel)
1899
1899
1900 @command('debugrevlog', cmdutil.debugrevlogopts +
1900 @command('debugrevlog', cmdutil.debugrevlogopts +
1901 [('d', 'dump', False, _('dump index data'))],
1901 [('d', 'dump', False, _('dump index data'))],
1902 _('-c|-m|FILE'),
1902 _('-c|-m|FILE'),
1903 optionalrepo=True)
1903 optionalrepo=True)
1904 def debugrevlog(ui, repo, file_=None, **opts):
1904 def debugrevlog(ui, repo, file_=None, **opts):
1905 """show data and statistics about a revlog"""
1905 """show data and statistics about a revlog"""
1906 opts = pycompat.byteskwargs(opts)
1906 opts = pycompat.byteskwargs(opts)
1907 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1907 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1908
1908
1909 if opts.get("dump"):
1909 if opts.get("dump"):
1910 numrevs = len(r)
1910 numrevs = len(r)
1911 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1911 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1912 " rawsize totalsize compression heads chainlen\n"))
1912 " rawsize totalsize compression heads chainlen\n"))
1913 ts = 0
1913 ts = 0
1914 heads = set()
1914 heads = set()
1915
1915
1916 for rev in xrange(numrevs):
1916 for rev in xrange(numrevs):
1917 dbase = r.deltaparent(rev)
1917 dbase = r.deltaparent(rev)
1918 if dbase == -1:
1918 if dbase == -1:
1919 dbase = rev
1919 dbase = rev
1920 cbase = r.chainbase(rev)
1920 cbase = r.chainbase(rev)
1921 clen = r.chainlen(rev)
1921 clen = r.chainlen(rev)
1922 p1, p2 = r.parentrevs(rev)
1922 p1, p2 = r.parentrevs(rev)
1923 rs = r.rawsize(rev)
1923 rs = r.rawsize(rev)
1924 ts = ts + rs
1924 ts = ts + rs
1925 heads -= set(r.parentrevs(rev))
1925 heads -= set(r.parentrevs(rev))
1926 heads.add(rev)
1926 heads.add(rev)
1927 try:
1927 try:
1928 compression = ts / r.end(rev)
1928 compression = ts / r.end(rev)
1929 except ZeroDivisionError:
1929 except ZeroDivisionError:
1930 compression = 0
1930 compression = 0
1931 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1931 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1932 "%11d %5d %8d\n" %
1932 "%11d %5d %8d\n" %
1933 (rev, p1, p2, r.start(rev), r.end(rev),
1933 (rev, p1, p2, r.start(rev), r.end(rev),
1934 r.start(dbase), r.start(cbase),
1934 r.start(dbase), r.start(cbase),
1935 r.start(p1), r.start(p2),
1935 r.start(p1), r.start(p2),
1936 rs, ts, compression, len(heads), clen))
1936 rs, ts, compression, len(heads), clen))
1937 return 0
1937 return 0
1938
1938
1939 v = r.version
1939 v = r.version
1940 format = v & 0xFFFF
1940 format = v & 0xFFFF
1941 flags = []
1941 flags = []
1942 gdelta = False
1942 gdelta = False
1943 if v & revlog.FLAG_INLINE_DATA:
1943 if v & revlog.FLAG_INLINE_DATA:
1944 flags.append('inline')
1944 flags.append('inline')
1945 if v & revlog.FLAG_GENERALDELTA:
1945 if v & revlog.FLAG_GENERALDELTA:
1946 gdelta = True
1946 gdelta = True
1947 flags.append('generaldelta')
1947 flags.append('generaldelta')
1948 if not flags:
1948 if not flags:
1949 flags = ['(none)']
1949 flags = ['(none)']
1950
1950
1951 nummerges = 0
1951 nummerges = 0
1952 numfull = 0
1952 numfull = 0
1953 numprev = 0
1953 numprev = 0
1954 nump1 = 0
1954 nump1 = 0
1955 nump2 = 0
1955 nump2 = 0
1956 numother = 0
1956 numother = 0
1957 nump1prev = 0
1957 nump1prev = 0
1958 nump2prev = 0
1958 nump2prev = 0
1959 chainlengths = []
1959 chainlengths = []
1960 chainbases = []
1960 chainbases = []
1961 chainspans = []
1961 chainspans = []
1962
1962
1963 datasize = [None, 0, 0]
1963 datasize = [None, 0, 0]
1964 fullsize = [None, 0, 0]
1964 fullsize = [None, 0, 0]
1965 deltasize = [None, 0, 0]
1965 deltasize = [None, 0, 0]
1966 chunktypecounts = {}
1966 chunktypecounts = {}
1967 chunktypesizes = {}
1967 chunktypesizes = {}
1968
1968
1969 def addsize(size, l):
1969 def addsize(size, l):
1970 if l[0] is None or size < l[0]:
1970 if l[0] is None or size < l[0]:
1971 l[0] = size
1971 l[0] = size
1972 if size > l[1]:
1972 if size > l[1]:
1973 l[1] = size
1973 l[1] = size
1974 l[2] += size
1974 l[2] += size
1975
1975
1976 numrevs = len(r)
1976 numrevs = len(r)
1977 for rev in xrange(numrevs):
1977 for rev in xrange(numrevs):
1978 p1, p2 = r.parentrevs(rev)
1978 p1, p2 = r.parentrevs(rev)
1979 delta = r.deltaparent(rev)
1979 delta = r.deltaparent(rev)
1980 if format > 0:
1980 if format > 0:
1981 addsize(r.rawsize(rev), datasize)
1981 addsize(r.rawsize(rev), datasize)
1982 if p2 != nullrev:
1982 if p2 != nullrev:
1983 nummerges += 1
1983 nummerges += 1
1984 size = r.length(rev)
1984 size = r.length(rev)
1985 if delta == nullrev:
1985 if delta == nullrev:
1986 chainlengths.append(0)
1986 chainlengths.append(0)
1987 chainbases.append(r.start(rev))
1987 chainbases.append(r.start(rev))
1988 chainspans.append(size)
1988 chainspans.append(size)
1989 numfull += 1
1989 numfull += 1
1990 addsize(size, fullsize)
1990 addsize(size, fullsize)
1991 else:
1991 else:
1992 chainlengths.append(chainlengths[delta] + 1)
1992 chainlengths.append(chainlengths[delta] + 1)
1993 baseaddr = chainbases[delta]
1993 baseaddr = chainbases[delta]
1994 revaddr = r.start(rev)
1994 revaddr = r.start(rev)
1995 chainbases.append(baseaddr)
1995 chainbases.append(baseaddr)
1996 chainspans.append((revaddr - baseaddr) + size)
1996 chainspans.append((revaddr - baseaddr) + size)
1997 addsize(size, deltasize)
1997 addsize(size, deltasize)
1998 if delta == rev - 1:
1998 if delta == rev - 1:
1999 numprev += 1
1999 numprev += 1
2000 if delta == p1:
2000 if delta == p1:
2001 nump1prev += 1
2001 nump1prev += 1
2002 elif delta == p2:
2002 elif delta == p2:
2003 nump2prev += 1
2003 nump2prev += 1
2004 elif delta == p1:
2004 elif delta == p1:
2005 nump1 += 1
2005 nump1 += 1
2006 elif delta == p2:
2006 elif delta == p2:
2007 nump2 += 1
2007 nump2 += 1
2008 elif delta != nullrev:
2008 elif delta != nullrev:
2009 numother += 1
2009 numother += 1
2010
2010
2011 # Obtain data on the raw chunks in the revlog.
2011 # Obtain data on the raw chunks in the revlog.
2012 segment = r._getsegmentforrevs(rev, rev)[1]
2012 segment = r._getsegmentforrevs(rev, rev)[1]
2013 if segment:
2013 if segment:
2014 chunktype = bytes(segment[0:1])
2014 chunktype = bytes(segment[0:1])
2015 else:
2015 else:
2016 chunktype = 'empty'
2016 chunktype = 'empty'
2017
2017
2018 if chunktype not in chunktypecounts:
2018 if chunktype not in chunktypecounts:
2019 chunktypecounts[chunktype] = 0
2019 chunktypecounts[chunktype] = 0
2020 chunktypesizes[chunktype] = 0
2020 chunktypesizes[chunktype] = 0
2021
2021
2022 chunktypecounts[chunktype] += 1
2022 chunktypecounts[chunktype] += 1
2023 chunktypesizes[chunktype] += size
2023 chunktypesizes[chunktype] += size
2024
2024
2025 # Adjust size min value for empty cases
2025 # Adjust size min value for empty cases
2026 for size in (datasize, fullsize, deltasize):
2026 for size in (datasize, fullsize, deltasize):
2027 if size[0] is None:
2027 if size[0] is None:
2028 size[0] = 0
2028 size[0] = 0
2029
2029
2030 numdeltas = numrevs - numfull
2030 numdeltas = numrevs - numfull
2031 numoprev = numprev - nump1prev - nump2prev
2031 numoprev = numprev - nump1prev - nump2prev
2032 totalrawsize = datasize[2]
2032 totalrawsize = datasize[2]
2033 datasize[2] /= numrevs
2033 datasize[2] /= numrevs
2034 fulltotal = fullsize[2]
2034 fulltotal = fullsize[2]
2035 fullsize[2] /= numfull
2035 fullsize[2] /= numfull
2036 deltatotal = deltasize[2]
2036 deltatotal = deltasize[2]
2037 if numrevs - numfull > 0:
2037 if numrevs - numfull > 0:
2038 deltasize[2] /= numrevs - numfull
2038 deltasize[2] /= numrevs - numfull
2039 totalsize = fulltotal + deltatotal
2039 totalsize = fulltotal + deltatotal
2040 avgchainlen = sum(chainlengths) / numrevs
2040 avgchainlen = sum(chainlengths) / numrevs
2041 maxchainlen = max(chainlengths)
2041 maxchainlen = max(chainlengths)
2042 maxchainspan = max(chainspans)
2042 maxchainspan = max(chainspans)
2043 compratio = 1
2043 compratio = 1
2044 if totalsize:
2044 if totalsize:
2045 compratio = totalrawsize / totalsize
2045 compratio = totalrawsize / totalsize
2046
2046
2047 basedfmtstr = '%%%dd\n'
2047 basedfmtstr = '%%%dd\n'
2048 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2048 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2049
2049
2050 def dfmtstr(max):
2050 def dfmtstr(max):
2051 return basedfmtstr % len(str(max))
2051 return basedfmtstr % len(str(max))
2052 def pcfmtstr(max, padding=0):
2052 def pcfmtstr(max, padding=0):
2053 return basepcfmtstr % (len(str(max)), ' ' * padding)
2053 return basepcfmtstr % (len(str(max)), ' ' * padding)
2054
2054
2055 def pcfmt(value, total):
2055 def pcfmt(value, total):
2056 if total:
2056 if total:
2057 return (value, 100 * float(value) / total)
2057 return (value, 100 * float(value) / total)
2058 else:
2058 else:
2059 return value, 100.0
2059 return value, 100.0
2060
2060
2061 ui.write(('format : %d\n') % format)
2061 ui.write(('format : %d\n') % format)
2062 ui.write(('flags : %s\n') % ', '.join(flags))
2062 ui.write(('flags : %s\n') % ', '.join(flags))
2063
2063
2064 ui.write('\n')
2064 ui.write('\n')
2065 fmt = pcfmtstr(totalsize)
2065 fmt = pcfmtstr(totalsize)
2066 fmt2 = dfmtstr(totalsize)
2066 fmt2 = dfmtstr(totalsize)
2067 ui.write(('revisions : ') + fmt2 % numrevs)
2067 ui.write(('revisions : ') + fmt2 % numrevs)
2068 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2068 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2069 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2069 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2070 ui.write(('revisions : ') + fmt2 % numrevs)
2070 ui.write(('revisions : ') + fmt2 % numrevs)
2071 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2071 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2072 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2072 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2073 ui.write(('revision size : ') + fmt2 % totalsize)
2073 ui.write(('revision size : ') + fmt2 % totalsize)
2074 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2074 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2075 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2075 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2076
2076
2077 def fmtchunktype(chunktype):
2077 def fmtchunktype(chunktype):
2078 if chunktype == 'empty':
2078 if chunktype == 'empty':
2079 return ' %s : ' % chunktype
2079 return ' %s : ' % chunktype
2080 elif chunktype in pycompat.bytestr(string.ascii_letters):
2080 elif chunktype in pycompat.bytestr(string.ascii_letters):
2081 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2081 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2082 else:
2082 else:
2083 return ' 0x%s : ' % hex(chunktype)
2083 return ' 0x%s : ' % hex(chunktype)
2084
2084
2085 ui.write('\n')
2085 ui.write('\n')
2086 ui.write(('chunks : ') + fmt2 % numrevs)
2086 ui.write(('chunks : ') + fmt2 % numrevs)
2087 for chunktype in sorted(chunktypecounts):
2087 for chunktype in sorted(chunktypecounts):
2088 ui.write(fmtchunktype(chunktype))
2088 ui.write(fmtchunktype(chunktype))
2089 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2089 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2090 ui.write(('chunks size : ') + fmt2 % totalsize)
2090 ui.write(('chunks size : ') + fmt2 % totalsize)
2091 for chunktype in sorted(chunktypecounts):
2091 for chunktype in sorted(chunktypecounts):
2092 ui.write(fmtchunktype(chunktype))
2092 ui.write(fmtchunktype(chunktype))
2093 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2093 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2094
2094
2095 ui.write('\n')
2095 ui.write('\n')
2096 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2096 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2097 ui.write(('avg chain length : ') + fmt % avgchainlen)
2097 ui.write(('avg chain length : ') + fmt % avgchainlen)
2098 ui.write(('max chain length : ') + fmt % maxchainlen)
2098 ui.write(('max chain length : ') + fmt % maxchainlen)
2099 ui.write(('max chain reach : ') + fmt % maxchainspan)
2099 ui.write(('max chain reach : ') + fmt % maxchainspan)
2100 ui.write(('compression ratio : ') + fmt % compratio)
2100 ui.write(('compression ratio : ') + fmt % compratio)
2101
2101
2102 if format > 0:
2102 if format > 0:
2103 ui.write('\n')
2103 ui.write('\n')
2104 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2104 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2105 % tuple(datasize))
2105 % tuple(datasize))
2106 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2106 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2107 % tuple(fullsize))
2107 % tuple(fullsize))
2108 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2108 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2109 % tuple(deltasize))
2109 % tuple(deltasize))
2110
2110
2111 if numdeltas > 0:
2111 if numdeltas > 0:
2112 ui.write('\n')
2112 ui.write('\n')
2113 fmt = pcfmtstr(numdeltas)
2113 fmt = pcfmtstr(numdeltas)
2114 fmt2 = pcfmtstr(numdeltas, 4)
2114 fmt2 = pcfmtstr(numdeltas, 4)
2115 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2115 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2116 if numprev > 0:
2116 if numprev > 0:
2117 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2117 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2118 numprev))
2118 numprev))
2119 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2119 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2120 numprev))
2120 numprev))
2121 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2121 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2122 numprev))
2122 numprev))
2123 if gdelta:
2123 if gdelta:
2124 ui.write(('deltas against p1 : ')
2124 ui.write(('deltas against p1 : ')
2125 + fmt % pcfmt(nump1, numdeltas))
2125 + fmt % pcfmt(nump1, numdeltas))
2126 ui.write(('deltas against p2 : ')
2126 ui.write(('deltas against p2 : ')
2127 + fmt % pcfmt(nump2, numdeltas))
2127 + fmt % pcfmt(nump2, numdeltas))
2128 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2128 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2129 numdeltas))
2129 numdeltas))
2130
2130
2131 @command('debugrevspec',
2131 @command('debugrevspec',
2132 [('', 'optimize', None,
2132 [('', 'optimize', None,
2133 _('print parsed tree after optimizing (DEPRECATED)')),
2133 _('print parsed tree after optimizing (DEPRECATED)')),
2134 ('', 'show-revs', True, _('print list of result revisions (default)')),
2134 ('', 'show-revs', True, _('print list of result revisions (default)')),
2135 ('s', 'show-set', None, _('print internal representation of result set')),
2135 ('s', 'show-set', None, _('print internal representation of result set')),
2136 ('p', 'show-stage', [],
2136 ('p', 'show-stage', [],
2137 _('print parsed tree at the given stage'), _('NAME')),
2137 _('print parsed tree at the given stage'), _('NAME')),
2138 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2138 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2139 ('', 'verify-optimized', False, _('verify optimized result')),
2139 ('', 'verify-optimized', False, _('verify optimized result')),
2140 ],
2140 ],
2141 ('REVSPEC'))
2141 ('REVSPEC'))
2142 def debugrevspec(ui, repo, expr, **opts):
2142 def debugrevspec(ui, repo, expr, **opts):
2143 """parse and apply a revision specification
2143 """parse and apply a revision specification
2144
2144
2145 Use -p/--show-stage option to print the parsed tree at the given stages.
2145 Use -p/--show-stage option to print the parsed tree at the given stages.
2146 Use -p all to print tree at every stage.
2146 Use -p all to print tree at every stage.
2147
2147
2148 Use --no-show-revs option with -s or -p to print only the set
2148 Use --no-show-revs option with -s or -p to print only the set
2149 representation or the parsed tree respectively.
2149 representation or the parsed tree respectively.
2150
2150
2151 Use --verify-optimized to compare the optimized result with the unoptimized
2151 Use --verify-optimized to compare the optimized result with the unoptimized
2152 one. Returns 1 if the optimized result differs.
2152 one. Returns 1 if the optimized result differs.
2153 """
2153 """
2154 opts = pycompat.byteskwargs(opts)
2154 opts = pycompat.byteskwargs(opts)
2155 aliases = ui.configitems('revsetalias')
2155 aliases = ui.configitems('revsetalias')
2156 stages = [
2156 stages = [
2157 ('parsed', lambda tree: tree),
2157 ('parsed', lambda tree: tree),
2158 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2158 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2159 ui.warn)),
2159 ui.warn)),
2160 ('concatenated', revsetlang.foldconcat),
2160 ('concatenated', revsetlang.foldconcat),
2161 ('analyzed', revsetlang.analyze),
2161 ('analyzed', revsetlang.analyze),
2162 ('optimized', revsetlang.optimize),
2162 ('optimized', revsetlang.optimize),
2163 ]
2163 ]
2164 if opts['no_optimized']:
2164 if opts['no_optimized']:
2165 stages = stages[:-1]
2165 stages = stages[:-1]
2166 if opts['verify_optimized'] and opts['no_optimized']:
2166 if opts['verify_optimized'] and opts['no_optimized']:
2167 raise error.Abort(_('cannot use --verify-optimized with '
2167 raise error.Abort(_('cannot use --verify-optimized with '
2168 '--no-optimized'))
2168 '--no-optimized'))
2169 stagenames = set(n for n, f in stages)
2169 stagenames = set(n for n, f in stages)
2170
2170
2171 showalways = set()
2171 showalways = set()
2172 showchanged = set()
2172 showchanged = set()
2173 if ui.verbose and not opts['show_stage']:
2173 if ui.verbose and not opts['show_stage']:
2174 # show parsed tree by --verbose (deprecated)
2174 # show parsed tree by --verbose (deprecated)
2175 showalways.add('parsed')
2175 showalways.add('parsed')
2176 showchanged.update(['expanded', 'concatenated'])
2176 showchanged.update(['expanded', 'concatenated'])
2177 if opts['optimize']:
2177 if opts['optimize']:
2178 showalways.add('optimized')
2178 showalways.add('optimized')
2179 if opts['show_stage'] and opts['optimize']:
2179 if opts['show_stage'] and opts['optimize']:
2180 raise error.Abort(_('cannot use --optimize with --show-stage'))
2180 raise error.Abort(_('cannot use --optimize with --show-stage'))
2181 if opts['show_stage'] == ['all']:
2181 if opts['show_stage'] == ['all']:
2182 showalways.update(stagenames)
2182 showalways.update(stagenames)
2183 else:
2183 else:
2184 for n in opts['show_stage']:
2184 for n in opts['show_stage']:
2185 if n not in stagenames:
2185 if n not in stagenames:
2186 raise error.Abort(_('invalid stage name: %s') % n)
2186 raise error.Abort(_('invalid stage name: %s') % n)
2187 showalways.update(opts['show_stage'])
2187 showalways.update(opts['show_stage'])
2188
2188
2189 treebystage = {}
2189 treebystage = {}
2190 printedtree = None
2190 printedtree = None
2191 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2191 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2192 for n, f in stages:
2192 for n, f in stages:
2193 treebystage[n] = tree = f(tree)
2193 treebystage[n] = tree = f(tree)
2194 if n in showalways or (n in showchanged and tree != printedtree):
2194 if n in showalways or (n in showchanged and tree != printedtree):
2195 if opts['show_stage'] or n != 'parsed':
2195 if opts['show_stage'] or n != 'parsed':
2196 ui.write(("* %s:\n") % n)
2196 ui.write(("* %s:\n") % n)
2197 ui.write(revsetlang.prettyformat(tree), "\n")
2197 ui.write(revsetlang.prettyformat(tree), "\n")
2198 printedtree = tree
2198 printedtree = tree
2199
2199
2200 if opts['verify_optimized']:
2200 if opts['verify_optimized']:
2201 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2201 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2202 brevs = revset.makematcher(treebystage['optimized'])(repo)
2202 brevs = revset.makematcher(treebystage['optimized'])(repo)
2203 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2203 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2204 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2204 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2205 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2205 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2206 arevs = list(arevs)
2206 arevs = list(arevs)
2207 brevs = list(brevs)
2207 brevs = list(brevs)
2208 if arevs == brevs:
2208 if arevs == brevs:
2209 return 0
2209 return 0
2210 ui.write(('--- analyzed\n'), label='diff.file_a')
2210 ui.write(('--- analyzed\n'), label='diff.file_a')
2211 ui.write(('+++ optimized\n'), label='diff.file_b')
2211 ui.write(('+++ optimized\n'), label='diff.file_b')
2212 sm = difflib.SequenceMatcher(None, arevs, brevs)
2212 sm = difflib.SequenceMatcher(None, arevs, brevs)
2213 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2213 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2214 if tag in ('delete', 'replace'):
2214 if tag in ('delete', 'replace'):
2215 for c in arevs[alo:ahi]:
2215 for c in arevs[alo:ahi]:
2216 ui.write('-%s\n' % c, label='diff.deleted')
2216 ui.write('-%s\n' % c, label='diff.deleted')
2217 if tag in ('insert', 'replace'):
2217 if tag in ('insert', 'replace'):
2218 for c in brevs[blo:bhi]:
2218 for c in brevs[blo:bhi]:
2219 ui.write('+%s\n' % c, label='diff.inserted')
2219 ui.write('+%s\n' % c, label='diff.inserted')
2220 if tag == 'equal':
2220 if tag == 'equal':
2221 for c in arevs[alo:ahi]:
2221 for c in arevs[alo:ahi]:
2222 ui.write(' %s\n' % c)
2222 ui.write(' %s\n' % c)
2223 return 1
2223 return 1
2224
2224
2225 func = revset.makematcher(tree)
2225 func = revset.makematcher(tree)
2226 revs = func(repo)
2226 revs = func(repo)
2227 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2227 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2228 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2228 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2229 if not opts['show_revs']:
2229 if not opts['show_revs']:
2230 return
2230 return
2231 for c in revs:
2231 for c in revs:
2232 ui.write("%d\n" % c)
2232 ui.write("%d\n" % c)
2233
2233
2234 @command('debugsetparents', [], _('REV1 [REV2]'))
2234 @command('debugsetparents', [], _('REV1 [REV2]'))
2235 def debugsetparents(ui, repo, rev1, rev2=None):
2235 def debugsetparents(ui, repo, rev1, rev2=None):
2236 """manually set the parents of the current working directory
2236 """manually set the parents of the current working directory
2237
2237
2238 This is useful for writing repository conversion tools, but should
2238 This is useful for writing repository conversion tools, but should
2239 be used with care. For example, neither the working directory nor the
2239 be used with care. For example, neither the working directory nor the
2240 dirstate is updated, so file status may be incorrect after running this
2240 dirstate is updated, so file status may be incorrect after running this
2241 command.
2241 command.
2242
2242
2243 Returns 0 on success.
2243 Returns 0 on success.
2244 """
2244 """
2245
2245
2246 r1 = scmutil.revsingle(repo, rev1).node()
2246 r1 = scmutil.revsingle(repo, rev1).node()
2247 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2247 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2248
2248
2249 with repo.wlock():
2249 with repo.wlock():
2250 repo.setparents(r1, r2)
2250 repo.setparents(r1, r2)
2251
2251
2252 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2252 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2253 def debugssl(ui, repo, source=None, **opts):
2253 def debugssl(ui, repo, source=None, **opts):
2254 '''test a secure connection to a server
2254 '''test a secure connection to a server
2255
2255
2256 This builds the certificate chain for the server on Windows, installing the
2256 This builds the certificate chain for the server on Windows, installing the
2257 missing intermediates and trusted root via Windows Update if necessary. It
2257 missing intermediates and trusted root via Windows Update if necessary. It
2258 does nothing on other platforms.
2258 does nothing on other platforms.
2259
2259
2260 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2260 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2261 that server is used. See :hg:`help urls` for more information.
2261 that server is used. See :hg:`help urls` for more information.
2262
2262
2263 If the update succeeds, retry the original operation. Otherwise, the cause
2263 If the update succeeds, retry the original operation. Otherwise, the cause
2264 of the SSL error is likely another issue.
2264 of the SSL error is likely another issue.
2265 '''
2265 '''
2266 if not pycompat.iswindows:
2266 if not pycompat.iswindows:
2267 raise error.Abort(_('certificate chain building is only possible on '
2267 raise error.Abort(_('certificate chain building is only possible on '
2268 'Windows'))
2268 'Windows'))
2269
2269
2270 if not source:
2270 if not source:
2271 if not repo:
2271 if not repo:
2272 raise error.Abort(_("there is no Mercurial repository here, and no "
2272 raise error.Abort(_("there is no Mercurial repository here, and no "
2273 "server specified"))
2273 "server specified"))
2274 source = "default"
2274 source = "default"
2275
2275
2276 source, branches = hg.parseurl(ui.expandpath(source))
2276 source, branches = hg.parseurl(ui.expandpath(source))
2277 url = util.url(source)
2277 url = util.url(source)
2278 addr = None
2278 addr = None
2279
2279
2280 defaultport = {'https': 443, 'ssh': 22}
2280 defaultport = {'https': 443, 'ssh': 22}
2281 if url.scheme in defaultport:
2281 if url.scheme in defaultport:
2282 try:
2282 try:
2283 addr = (url.host, int(url.port or defaultport[url.scheme]))
2283 addr = (url.host, int(url.port or defaultport[url.scheme]))
2284 except ValueError:
2284 except ValueError:
2285 raise error.Abort(_("malformed port number in URL"))
2285 raise error.Abort(_("malformed port number in URL"))
2286 else:
2286 else:
2287 raise error.Abort(_("only https and ssh connections are supported"))
2287 raise error.Abort(_("only https and ssh connections are supported"))
2288
2288
2289 from . import win32
2289 from . import win32
2290
2290
2291 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2291 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2292 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2292 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2293
2293
2294 try:
2294 try:
2295 s.connect(addr)
2295 s.connect(addr)
2296 cert = s.getpeercert(True)
2296 cert = s.getpeercert(True)
2297
2297
2298 ui.status(_('checking the certificate chain for %s\n') % url.host)
2298 ui.status(_('checking the certificate chain for %s\n') % url.host)
2299
2299
2300 complete = win32.checkcertificatechain(cert, build=False)
2300 complete = win32.checkcertificatechain(cert, build=False)
2301
2301
2302 if not complete:
2302 if not complete:
2303 ui.status(_('certificate chain is incomplete, updating... '))
2303 ui.status(_('certificate chain is incomplete, updating... '))
2304
2304
2305 if not win32.checkcertificatechain(cert):
2305 if not win32.checkcertificatechain(cert):
2306 ui.status(_('failed.\n'))
2306 ui.status(_('failed.\n'))
2307 else:
2307 else:
2308 ui.status(_('done.\n'))
2308 ui.status(_('done.\n'))
2309 else:
2309 else:
2310 ui.status(_('full certificate chain is available\n'))
2310 ui.status(_('full certificate chain is available\n'))
2311 finally:
2311 finally:
2312 s.close()
2312 s.close()
2313
2313
2314 @command('debugsub',
2314 @command('debugsub',
2315 [('r', 'rev', '',
2315 [('r', 'rev', '',
2316 _('revision to check'), _('REV'))],
2316 _('revision to check'), _('REV'))],
2317 _('[-r REV] [REV]'))
2317 _('[-r REV] [REV]'))
2318 def debugsub(ui, repo, rev=None):
2318 def debugsub(ui, repo, rev=None):
2319 ctx = scmutil.revsingle(repo, rev, None)
2319 ctx = scmutil.revsingle(repo, rev, None)
2320 for k, v in sorted(ctx.substate.items()):
2320 for k, v in sorted(ctx.substate.items()):
2321 ui.write(('path %s\n') % k)
2321 ui.write(('path %s\n') % k)
2322 ui.write((' source %s\n') % v[0])
2322 ui.write((' source %s\n') % v[0])
2323 ui.write((' revision %s\n') % v[1])
2323 ui.write((' revision %s\n') % v[1])
2324
2324
2325 @command('debugsuccessorssets',
2325 @command('debugsuccessorssets',
2326 [('', 'closest', False, _('return closest successors sets only'))],
2326 [('', 'closest', False, _('return closest successors sets only'))],
2327 _('[REV]'))
2327 _('[REV]'))
2328 def debugsuccessorssets(ui, repo, *revs, **opts):
2328 def debugsuccessorssets(ui, repo, *revs, **opts):
2329 """show set of successors for revision
2329 """show set of successors for revision
2330
2330
2331 A successors set of changeset A is a consistent group of revisions that
2331 A successors set of changeset A is a consistent group of revisions that
2332 succeed A. It contains non-obsolete changesets only unless closests
2332 succeed A. It contains non-obsolete changesets only unless closests
2333 successors set is set.
2333 successors set is set.
2334
2334
2335 In most cases a changeset A has a single successors set containing a single
2335 In most cases a changeset A has a single successors set containing a single
2336 successor (changeset A replaced by A').
2336 successor (changeset A replaced by A').
2337
2337
2338 A changeset that is made obsolete with no successors are called "pruned".
2338 A changeset that is made obsolete with no successors are called "pruned".
2339 Such changesets have no successors sets at all.
2339 Such changesets have no successors sets at all.
2340
2340
2341 A changeset that has been "split" will have a successors set containing
2341 A changeset that has been "split" will have a successors set containing
2342 more than one successor.
2342 more than one successor.
2343
2343
2344 A changeset that has been rewritten in multiple different ways is called
2344 A changeset that has been rewritten in multiple different ways is called
2345 "divergent". Such changesets have multiple successor sets (each of which
2345 "divergent". Such changesets have multiple successor sets (each of which
2346 may also be split, i.e. have multiple successors).
2346 may also be split, i.e. have multiple successors).
2347
2347
2348 Results are displayed as follows::
2348 Results are displayed as follows::
2349
2349
2350 <rev1>
2350 <rev1>
2351 <successors-1A>
2351 <successors-1A>
2352 <rev2>
2352 <rev2>
2353 <successors-2A>
2353 <successors-2A>
2354 <successors-2B1> <successors-2B2> <successors-2B3>
2354 <successors-2B1> <successors-2B2> <successors-2B3>
2355
2355
2356 Here rev2 has two possible (i.e. divergent) successors sets. The first
2356 Here rev2 has two possible (i.e. divergent) successors sets. The first
2357 holds one element, whereas the second holds three (i.e. the changeset has
2357 holds one element, whereas the second holds three (i.e. the changeset has
2358 been split).
2358 been split).
2359 """
2359 """
2360 # passed to successorssets caching computation from one call to another
2360 # passed to successorssets caching computation from one call to another
2361 cache = {}
2361 cache = {}
2362 ctx2str = bytes
2362 ctx2str = bytes
2363 node2str = short
2363 node2str = short
2364 for rev in scmutil.revrange(repo, revs):
2364 for rev in scmutil.revrange(repo, revs):
2365 ctx = repo[rev]
2365 ctx = repo[rev]
2366 ui.write('%s\n'% ctx2str(ctx))
2366 ui.write('%s\n'% ctx2str(ctx))
2367 for succsset in obsutil.successorssets(repo, ctx.node(),
2367 for succsset in obsutil.successorssets(repo, ctx.node(),
2368 closest=opts[r'closest'],
2368 closest=opts[r'closest'],
2369 cache=cache):
2369 cache=cache):
2370 if succsset:
2370 if succsset:
2371 ui.write(' ')
2371 ui.write(' ')
2372 ui.write(node2str(succsset[0]))
2372 ui.write(node2str(succsset[0]))
2373 for node in succsset[1:]:
2373 for node in succsset[1:]:
2374 ui.write(' ')
2374 ui.write(' ')
2375 ui.write(node2str(node))
2375 ui.write(node2str(node))
2376 ui.write('\n')
2376 ui.write('\n')
2377
2377
2378 @command('debugtemplate',
2378 @command('debugtemplate',
2379 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2379 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2380 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2380 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2381 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2381 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2382 optionalrepo=True)
2382 optionalrepo=True)
2383 def debugtemplate(ui, repo, tmpl, **opts):
2383 def debugtemplate(ui, repo, tmpl, **opts):
2384 """parse and apply a template
2384 """parse and apply a template
2385
2385
2386 If -r/--rev is given, the template is processed as a log template and
2386 If -r/--rev is given, the template is processed as a log template and
2387 applied to the given changesets. Otherwise, it is processed as a generic
2387 applied to the given changesets. Otherwise, it is processed as a generic
2388 template.
2388 template.
2389
2389
2390 Use --verbose to print the parsed tree.
2390 Use --verbose to print the parsed tree.
2391 """
2391 """
2392 revs = None
2392 revs = None
2393 if opts[r'rev']:
2393 if opts[r'rev']:
2394 if repo is None:
2394 if repo is None:
2395 raise error.RepoError(_('there is no Mercurial repository here '
2395 raise error.RepoError(_('there is no Mercurial repository here '
2396 '(.hg not found)'))
2396 '(.hg not found)'))
2397 revs = scmutil.revrange(repo, opts[r'rev'])
2397 revs = scmutil.revrange(repo, opts[r'rev'])
2398
2398
2399 props = {}
2399 props = {}
2400 for d in opts[r'define']:
2400 for d in opts[r'define']:
2401 try:
2401 try:
2402 k, v = (e.strip() for e in d.split('=', 1))
2402 k, v = (e.strip() for e in d.split('=', 1))
2403 if not k or k == 'ui':
2403 if not k or k == 'ui':
2404 raise ValueError
2404 raise ValueError
2405 props[k] = v
2405 props[k] = v
2406 except ValueError:
2406 except ValueError:
2407 raise error.Abort(_('malformed keyword definition: %s') % d)
2407 raise error.Abort(_('malformed keyword definition: %s') % d)
2408
2408
2409 if ui.verbose:
2409 if ui.verbose:
2410 aliases = ui.configitems('templatealias')
2410 aliases = ui.configitems('templatealias')
2411 tree = templater.parse(tmpl)
2411 tree = templater.parse(tmpl)
2412 ui.note(templater.prettyformat(tree), '\n')
2412 ui.note(templater.prettyformat(tree), '\n')
2413 newtree = templater.expandaliases(tree, aliases)
2413 newtree = templater.expandaliases(tree, aliases)
2414 if newtree != tree:
2414 if newtree != tree:
2415 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2415 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2416
2416
2417 if revs is None:
2417 if revs is None:
2418 tres = formatter.templateresources(ui, repo)
2418 tres = formatter.templateresources(ui, repo)
2419 t = formatter.maketemplater(ui, tmpl, resources=tres)
2419 t = formatter.maketemplater(ui, tmpl, resources=tres)
2420 ui.write(t.render(props))
2420 ui.write(t.render(props))
2421 else:
2421 else:
2422 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2422 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2423 for r in revs:
2423 for r in revs:
2424 displayer.show(repo[r], **pycompat.strkwargs(props))
2424 displayer.show(repo[r], **pycompat.strkwargs(props))
2425 displayer.close()
2425 displayer.close()
2426
2426
2427 @command('debugupdatecaches', [])
2427 @command('debugupdatecaches', [])
2428 def debugupdatecaches(ui, repo, *pats, **opts):
2428 def debugupdatecaches(ui, repo, *pats, **opts):
2429 """warm all known caches in the repository"""
2429 """warm all known caches in the repository"""
2430 with repo.wlock(), repo.lock():
2430 with repo.wlock(), repo.lock():
2431 repo.updatecaches()
2431 repo.updatecaches()
2432
2432
2433 @command('debugupgraderepo', [
2433 @command('debugupgraderepo', [
2434 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2434 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2435 ('', 'run', False, _('performs an upgrade')),
2435 ('', 'run', False, _('performs an upgrade')),
2436 ])
2436 ])
2437 def debugupgraderepo(ui, repo, run=False, optimize=None):
2437 def debugupgraderepo(ui, repo, run=False, optimize=None):
2438 """upgrade a repository to use different features
2438 """upgrade a repository to use different features
2439
2439
2440 If no arguments are specified, the repository is evaluated for upgrade
2440 If no arguments are specified, the repository is evaluated for upgrade
2441 and a list of problems and potential optimizations is printed.
2441 and a list of problems and potential optimizations is printed.
2442
2442
2443 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2443 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2444 can be influenced via additional arguments. More details will be provided
2444 can be influenced via additional arguments. More details will be provided
2445 by the command output when run without ``--run``.
2445 by the command output when run without ``--run``.
2446
2446
2447 During the upgrade, the repository will be locked and no writes will be
2447 During the upgrade, the repository will be locked and no writes will be
2448 allowed.
2448 allowed.
2449
2449
2450 At the end of the upgrade, the repository may not be readable while new
2450 At the end of the upgrade, the repository may not be readable while new
2451 repository data is swapped in. This window will be as long as it takes to
2451 repository data is swapped in. This window will be as long as it takes to
2452 rename some directories inside the ``.hg`` directory. On most machines, this
2452 rename some directories inside the ``.hg`` directory. On most machines, this
2453 should complete almost instantaneously and the chances of a consumer being
2453 should complete almost instantaneously and the chances of a consumer being
2454 unable to access the repository should be low.
2454 unable to access the repository should be low.
2455 """
2455 """
2456 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2456 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2457
2457
2458 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2458 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2459 inferrepo=True)
2459 inferrepo=True)
2460 def debugwalk(ui, repo, *pats, **opts):
2460 def debugwalk(ui, repo, *pats, **opts):
2461 """show how files match on given patterns"""
2461 """show how files match on given patterns"""
2462 opts = pycompat.byteskwargs(opts)
2462 opts = pycompat.byteskwargs(opts)
2463 m = scmutil.match(repo[None], pats, opts)
2463 m = scmutil.match(repo[None], pats, opts)
2464 ui.write(('matcher: %r\n' % m))
2464 ui.write(('matcher: %r\n' % m))
2465 items = list(repo[None].walk(m))
2465 items = list(repo[None].walk(m))
2466 if not items:
2466 if not items:
2467 return
2467 return
2468 f = lambda fn: fn
2468 f = lambda fn: fn
2469 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2469 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2470 f = lambda fn: util.normpath(fn)
2470 f = lambda fn: util.normpath(fn)
2471 fmt = 'f %%-%ds %%-%ds %%s' % (
2471 fmt = 'f %%-%ds %%-%ds %%s' % (
2472 max([len(abs) for abs in items]),
2472 max([len(abs) for abs in items]),
2473 max([len(m.rel(abs)) for abs in items]))
2473 max([len(m.rel(abs)) for abs in items]))
2474 for abs in items:
2474 for abs in items:
2475 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2475 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2476 ui.write("%s\n" % line.rstrip())
2476 ui.write("%s\n" % line.rstrip())
2477
2477
2478 @command('debugwireargs',
2478 @command('debugwireargs',
2479 [('', 'three', '', 'three'),
2479 [('', 'three', '', 'three'),
2480 ('', 'four', '', 'four'),
2480 ('', 'four', '', 'four'),
2481 ('', 'five', '', 'five'),
2481 ('', 'five', '', 'five'),
2482 ] + cmdutil.remoteopts,
2482 ] + cmdutil.remoteopts,
2483 _('REPO [OPTIONS]... [ONE [TWO]]'),
2483 _('REPO [OPTIONS]... [ONE [TWO]]'),
2484 norepo=True)
2484 norepo=True)
2485 def debugwireargs(ui, repopath, *vals, **opts):
2485 def debugwireargs(ui, repopath, *vals, **opts):
2486 opts = pycompat.byteskwargs(opts)
2486 opts = pycompat.byteskwargs(opts)
2487 repo = hg.peer(ui, opts, repopath)
2487 repo = hg.peer(ui, opts, repopath)
2488 for opt in cmdutil.remoteopts:
2488 for opt in cmdutil.remoteopts:
2489 del opts[opt[1]]
2489 del opts[opt[1]]
2490 args = {}
2490 args = {}
2491 for k, v in opts.iteritems():
2491 for k, v in opts.iteritems():
2492 if v:
2492 if v:
2493 args[k] = v
2493 args[k] = v
2494 args = pycompat.strkwargs(args)
2494 args = pycompat.strkwargs(args)
2495 # run twice to check that we don't mess up the stream for the next command
2495 # run twice to check that we don't mess up the stream for the next command
2496 res1 = repo.debugwireargs(*vals, **args)
2496 res1 = repo.debugwireargs(*vals, **args)
2497 res2 = repo.debugwireargs(*vals, **args)
2497 res2 = repo.debugwireargs(*vals, **args)
2498 ui.write("%s\n" % res1)
2498 ui.write("%s\n" % res1)
2499 if res1 != res2:
2499 if res1 != res2:
2500 ui.warn("%s\n" % res2)
2500 ui.warn("%s\n" % res2)
@@ -1,788 +1,788
1 # tags.py - read tag info from local repository
1 # tags.py - read tag info from local repository
2 #
2 #
3 # Copyright 2009 Matt Mackall <mpm@selenic.com>
3 # Copyright 2009 Matt Mackall <mpm@selenic.com>
4 # Copyright 2009 Greg Ward <greg@gerg.ca>
4 # Copyright 2009 Greg Ward <greg@gerg.ca>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 # Currently this module only deals with reading and caching tags.
9 # Currently this module only deals with reading and caching tags.
10 # Eventually, it could take care of updating (adding/removing/moving)
10 # Eventually, it could take care of updating (adding/removing/moving)
11 # tags too.
11 # tags too.
12
12
13 from __future__ import absolute_import
13 from __future__ import absolute_import
14
14
15 import errno
15 import errno
16
16
17 from .node import (
17 from .node import (
18 bin,
18 bin,
19 hex,
19 hex,
20 nullid,
20 nullid,
21 short,
21 short,
22 )
22 )
23 from .i18n import _
23 from .i18n import _
24 from . import (
24 from . import (
25 encoding,
25 encoding,
26 error,
26 error,
27 match as matchmod,
27 match as matchmod,
28 scmutil,
28 scmutil,
29 util,
29 util,
30 )
30 )
31
31
32 # Tags computation can be expensive and caches exist to make it fast in
32 # Tags computation can be expensive and caches exist to make it fast in
33 # the common case.
33 # the common case.
34 #
34 #
35 # The "hgtagsfnodes1" cache file caches the .hgtags filenode values for
35 # The "hgtagsfnodes1" cache file caches the .hgtags filenode values for
36 # each revision in the repository. The file is effectively an array of
36 # each revision in the repository. The file is effectively an array of
37 # fixed length records. Read the docs for "hgtagsfnodescache" for technical
37 # fixed length records. Read the docs for "hgtagsfnodescache" for technical
38 # details.
38 # details.
39 #
39 #
40 # The .hgtags filenode cache grows in proportion to the length of the
40 # The .hgtags filenode cache grows in proportion to the length of the
41 # changelog. The file is truncated when the # changelog is stripped.
41 # changelog. The file is truncated when the # changelog is stripped.
42 #
42 #
43 # The purpose of the filenode cache is to avoid the most expensive part
43 # The purpose of the filenode cache is to avoid the most expensive part
44 # of finding global tags, which is looking up the .hgtags filenode in the
44 # of finding global tags, which is looking up the .hgtags filenode in the
45 # manifest for each head. This can take dozens or over 100ms for
45 # manifest for each head. This can take dozens or over 100ms for
46 # repositories with very large manifests. Multiplied by dozens or even
46 # repositories with very large manifests. Multiplied by dozens or even
47 # hundreds of heads and there is a significant performance concern.
47 # hundreds of heads and there is a significant performance concern.
48 #
48 #
49 # There also exist a separate cache file for each repository filter.
49 # There also exist a separate cache file for each repository filter.
50 # These "tags-*" files store information about the history of tags.
50 # These "tags-*" files store information about the history of tags.
51 #
51 #
52 # The tags cache files consists of a cache validation line followed by
52 # The tags cache files consists of a cache validation line followed by
53 # a history of tags.
53 # a history of tags.
54 #
54 #
55 # The cache validation line has the format:
55 # The cache validation line has the format:
56 #
56 #
57 # <tiprev> <tipnode> [<filteredhash>]
57 # <tiprev> <tipnode> [<filteredhash>]
58 #
58 #
59 # <tiprev> is an integer revision and <tipnode> is a 40 character hex
59 # <tiprev> is an integer revision and <tipnode> is a 40 character hex
60 # node for that changeset. These redundantly identify the repository
60 # node for that changeset. These redundantly identify the repository
61 # tip from the time the cache was written. In addition, <filteredhash>,
61 # tip from the time the cache was written. In addition, <filteredhash>,
62 # if present, is a 40 character hex hash of the contents of the filtered
62 # if present, is a 40 character hex hash of the contents of the filtered
63 # revisions for this filter. If the set of filtered revs changes, the
63 # revisions for this filter. If the set of filtered revs changes, the
64 # hash will change and invalidate the cache.
64 # hash will change and invalidate the cache.
65 #
65 #
66 # The history part of the tags cache consists of lines of the form:
66 # The history part of the tags cache consists of lines of the form:
67 #
67 #
68 # <node> <tag>
68 # <node> <tag>
69 #
69 #
70 # (This format is identical to that of .hgtags files.)
70 # (This format is identical to that of .hgtags files.)
71 #
71 #
72 # <tag> is the tag name and <node> is the 40 character hex changeset
72 # <tag> is the tag name and <node> is the 40 character hex changeset
73 # the tag is associated with.
73 # the tag is associated with.
74 #
74 #
75 # Tags are written sorted by tag name.
75 # Tags are written sorted by tag name.
76 #
76 #
77 # Tags associated with multiple changesets have an entry for each changeset.
77 # Tags associated with multiple changesets have an entry for each changeset.
78 # The most recent changeset (in terms of revlog ordering for the head
78 # The most recent changeset (in terms of revlog ordering for the head
79 # setting it) for each tag is last.
79 # setting it) for each tag is last.
80
80
81 def fnoderevs(ui, repo, revs):
81 def fnoderevs(ui, repo, revs):
82 """return the list of '.hgtags' fnodes used in a set revisions
82 """return the list of '.hgtags' fnodes used in a set revisions
83
83
84 This is returned as list of unique fnodes. We use a list instead of a set
84 This is returned as list of unique fnodes. We use a list instead of a set
85 because order matters when it comes to tags."""
85 because order matters when it comes to tags."""
86 unfi = repo.unfiltered()
86 unfi = repo.unfiltered()
87 tonode = unfi.changelog.node
87 tonode = unfi.changelog.node
88 nodes = [tonode(r) for r in revs]
88 nodes = [tonode(r) for r in revs]
89 fnodes = _getfnodes(ui, repo, nodes[::-1]) # reversed help the cache
89 fnodes = _getfnodes(ui, repo, nodes[::-1]) # reversed help the cache
90 fnodes = _filterfnodes(fnodes, nodes)
90 fnodes = _filterfnodes(fnodes, nodes)
91 return fnodes
91 return fnodes
92
92
93 def _nulltonone(value):
93 def _nulltonone(value):
94 """convert nullid to None
94 """convert nullid to None
95
95
96 For tag value, nullid means "deleted". This small utility function helps
96 For tag value, nullid means "deleted". This small utility function helps
97 translating that to None."""
97 translating that to None."""
98 if value == nullid:
98 if value == nullid:
99 return None
99 return None
100 return value
100 return value
101
101
102 def difftags(ui, repo, oldfnodes, newfnodes):
102 def difftags(ui, repo, oldfnodes, newfnodes):
103 """list differences between tags expressed in two set of file-nodes
103 """list differences between tags expressed in two set of file-nodes
104
104
105 The list contains entries in the form: (tagname, oldvalue, new value).
105 The list contains entries in the form: (tagname, oldvalue, new value).
106 None is used to expressed missing value:
106 None is used to expressed missing value:
107 ('foo', None, 'abcd') is a new tag,
107 ('foo', None, 'abcd') is a new tag,
108 ('bar', 'ef01', None) is a deletion,
108 ('bar', 'ef01', None) is a deletion,
109 ('baz', 'abcd', 'ef01') is a tag movement.
109 ('baz', 'abcd', 'ef01') is a tag movement.
110 """
110 """
111 if oldfnodes == newfnodes:
111 if oldfnodes == newfnodes:
112 return []
112 return []
113 oldtags = _tagsfromfnodes(ui, repo, oldfnodes)
113 oldtags = _tagsfromfnodes(ui, repo, oldfnodes)
114 newtags = _tagsfromfnodes(ui, repo, newfnodes)
114 newtags = _tagsfromfnodes(ui, repo, newfnodes)
115
115
116 # list of (tag, old, new): None means missing
116 # list of (tag, old, new): None means missing
117 entries = []
117 entries = []
118 for tag, (new, __) in newtags.items():
118 for tag, (new, __) in newtags.items():
119 new = _nulltonone(new)
119 new = _nulltonone(new)
120 old, __ = oldtags.pop(tag, (None, None))
120 old, __ = oldtags.pop(tag, (None, None))
121 old = _nulltonone(old)
121 old = _nulltonone(old)
122 if old != new:
122 if old != new:
123 entries.append((tag, old, new))
123 entries.append((tag, old, new))
124 # handle deleted tags
124 # handle deleted tags
125 for tag, (old, __) in oldtags.items():
125 for tag, (old, __) in oldtags.items():
126 old = _nulltonone(old)
126 old = _nulltonone(old)
127 if old is not None:
127 if old is not None:
128 entries.append((tag, old, None))
128 entries.append((tag, old, None))
129 entries.sort()
129 entries.sort()
130 return entries
130 return entries
131
131
132 def writediff(fp, difflist):
132 def writediff(fp, difflist):
133 """write tags diff information to a file.
133 """write tags diff information to a file.
134
134
135 Data are stored with a line based format:
135 Data are stored with a line based format:
136
136
137 <action> <hex-node> <tag-name>\n
137 <action> <hex-node> <tag-name>\n
138
138
139 Action are defined as follow:
139 Action are defined as follow:
140 -R tag is removed,
140 -R tag is removed,
141 +A tag is added,
141 +A tag is added,
142 -M tag is moved (old value),
142 -M tag is moved (old value),
143 +M tag is moved (new value),
143 +M tag is moved (new value),
144
144
145 Example:
145 Example:
146
146
147 +A 875517b4806a848f942811a315a5bce30804ae85 t5
147 +A 875517b4806a848f942811a315a5bce30804ae85 t5
148
148
149 See documentation of difftags output for details about the input.
149 See documentation of difftags output for details about the input.
150 """
150 """
151 add = '+A %s %s\n'
151 add = '+A %s %s\n'
152 remove = '-R %s %s\n'
152 remove = '-R %s %s\n'
153 updateold = '-M %s %s\n'
153 updateold = '-M %s %s\n'
154 updatenew = '+M %s %s\n'
154 updatenew = '+M %s %s\n'
155 for tag, old, new in difflist:
155 for tag, old, new in difflist:
156 # translate to hex
156 # translate to hex
157 if old is not None:
157 if old is not None:
158 old = hex(old)
158 old = hex(old)
159 if new is not None:
159 if new is not None:
160 new = hex(new)
160 new = hex(new)
161 # write to file
161 # write to file
162 if old is None:
162 if old is None:
163 fp.write(add % (new, tag))
163 fp.write(add % (new, tag))
164 elif new is None:
164 elif new is None:
165 fp.write(remove % (old, tag))
165 fp.write(remove % (old, tag))
166 else:
166 else:
167 fp.write(updateold % (old, tag))
167 fp.write(updateold % (old, tag))
168 fp.write(updatenew % (new, tag))
168 fp.write(updatenew % (new, tag))
169
169
170 def findglobaltags(ui, repo):
170 def findglobaltags(ui, repo):
171 '''Find global tags in a repo: return a tagsmap
171 '''Find global tags in a repo: return a tagsmap
172
172
173 tagsmap: tag name to (node, hist) 2-tuples.
173 tagsmap: tag name to (node, hist) 2-tuples.
174
174
175 The tags cache is read and updated as a side-effect of calling.
175 The tags cache is read and updated as a side-effect of calling.
176 '''
176 '''
177 (heads, tagfnode, valid, cachetags, shouldwrite) = _readtagcache(ui, repo)
177 (heads, tagfnode, valid, cachetags, shouldwrite) = _readtagcache(ui, repo)
178 if cachetags is not None:
178 if cachetags is not None:
179 assert not shouldwrite
179 assert not shouldwrite
180 # XXX is this really 100% correct? are there oddball special
180 # XXX is this really 100% correct? are there oddball special
181 # cases where a global tag should outrank a local tag but won't,
181 # cases where a global tag should outrank a local tag but won't,
182 # because cachetags does not contain rank info?
182 # because cachetags does not contain rank info?
183 alltags = {}
183 alltags = {}
184 _updatetags(cachetags, alltags)
184 _updatetags(cachetags, alltags)
185 return alltags
185 return alltags
186
186
187 for head in reversed(heads): # oldest to newest
187 for head in reversed(heads): # oldest to newest
188 assert head in repo.changelog.nodemap, \
188 assert head in repo.changelog.nodemap, \
189 "tag cache returned bogus head %s" % short(head)
189 "tag cache returned bogus head %s" % short(head)
190 fnodes = _filterfnodes(tagfnode, reversed(heads))
190 fnodes = _filterfnodes(tagfnode, reversed(heads))
191 alltags = _tagsfromfnodes(ui, repo, fnodes)
191 alltags = _tagsfromfnodes(ui, repo, fnodes)
192
192
193 # and update the cache (if necessary)
193 # and update the cache (if necessary)
194 if shouldwrite:
194 if shouldwrite:
195 _writetagcache(ui, repo, valid, alltags)
195 _writetagcache(ui, repo, valid, alltags)
196 return alltags
196 return alltags
197
197
198 def _filterfnodes(tagfnode, nodes):
198 def _filterfnodes(tagfnode, nodes):
199 """return a list of unique fnodes
199 """return a list of unique fnodes
200
200
201 The order of this list matches the order of "nodes". Preserving this order
201 The order of this list matches the order of "nodes". Preserving this order
202 is important as reading tags in different order provides different
202 is important as reading tags in different order provides different
203 results."""
203 results."""
204 seen = set() # set of fnode
204 seen = set() # set of fnode
205 fnodes = []
205 fnodes = []
206 for no in nodes: # oldest to newest
206 for no in nodes: # oldest to newest
207 fnode = tagfnode.get(no)
207 fnode = tagfnode.get(no)
208 if fnode and fnode not in seen:
208 if fnode and fnode not in seen:
209 seen.add(fnode)
209 seen.add(fnode)
210 fnodes.append(fnode)
210 fnodes.append(fnode)
211 return fnodes
211 return fnodes
212
212
213 def _tagsfromfnodes(ui, repo, fnodes):
213 def _tagsfromfnodes(ui, repo, fnodes):
214 """return a tagsmap from a list of file-node
214 """return a tagsmap from a list of file-node
215
215
216 tagsmap: tag name to (node, hist) 2-tuples.
216 tagsmap: tag name to (node, hist) 2-tuples.
217
217
218 The order of the list matters."""
218 The order of the list matters."""
219 alltags = {}
219 alltags = {}
220 fctx = None
220 fctx = None
221 for fnode in fnodes:
221 for fnode in fnodes:
222 if fctx is None:
222 if fctx is None:
223 fctx = repo.filectx('.hgtags', fileid=fnode)
223 fctx = repo.filectx('.hgtags', fileid=fnode)
224 else:
224 else:
225 fctx = fctx.filectx(fnode)
225 fctx = fctx.filectx(fnode)
226 filetags = _readtags(ui, repo, fctx.data().splitlines(), fctx)
226 filetags = _readtags(ui, repo, fctx.data().splitlines(), fctx)
227 _updatetags(filetags, alltags)
227 _updatetags(filetags, alltags)
228 return alltags
228 return alltags
229
229
230 def readlocaltags(ui, repo, alltags, tagtypes):
230 def readlocaltags(ui, repo, alltags, tagtypes):
231 '''Read local tags in repo. Update alltags and tagtypes.'''
231 '''Read local tags in repo. Update alltags and tagtypes.'''
232 try:
232 try:
233 data = repo.vfs.read("localtags")
233 data = repo.vfs.read("localtags")
234 except IOError as inst:
234 except IOError as inst:
235 if inst.errno != errno.ENOENT:
235 if inst.errno != errno.ENOENT:
236 raise
236 raise
237 return
237 return
238
238
239 # localtags is in the local encoding; re-encode to UTF-8 on
239 # localtags is in the local encoding; re-encode to UTF-8 on
240 # input for consistency with the rest of this module.
240 # input for consistency with the rest of this module.
241 filetags = _readtags(
241 filetags = _readtags(
242 ui, repo, data.splitlines(), "localtags",
242 ui, repo, data.splitlines(), "localtags",
243 recode=encoding.fromlocal)
243 recode=encoding.fromlocal)
244
244
245 # remove tags pointing to invalid nodes
245 # remove tags pointing to invalid nodes
246 cl = repo.changelog
246 cl = repo.changelog
247 for t in list(filetags):
247 for t in list(filetags):
248 try:
248 try:
249 cl.rev(filetags[t][0])
249 cl.rev(filetags[t][0])
250 except (LookupError, ValueError):
250 except (LookupError, ValueError):
251 del filetags[t]
251 del filetags[t]
252
252
253 _updatetags(filetags, alltags, 'local', tagtypes)
253 _updatetags(filetags, alltags, 'local', tagtypes)
254
254
255 def _readtaghist(ui, repo, lines, fn, recode=None, calcnodelines=False):
255 def _readtaghist(ui, repo, lines, fn, recode=None, calcnodelines=False):
256 '''Read tag definitions from a file (or any source of lines).
256 '''Read tag definitions from a file (or any source of lines).
257
257
258 This function returns two sortdicts with similar information:
258 This function returns two sortdicts with similar information:
259
259
260 - the first dict, bintaghist, contains the tag information as expected by
260 - the first dict, bintaghist, contains the tag information as expected by
261 the _readtags function, i.e. a mapping from tag name to (node, hist):
261 the _readtags function, i.e. a mapping from tag name to (node, hist):
262 - node is the node id from the last line read for that name,
262 - node is the node id from the last line read for that name,
263 - hist is the list of node ids previously associated with it (in file
263 - hist is the list of node ids previously associated with it (in file
264 order). All node ids are binary, not hex.
264 order). All node ids are binary, not hex.
265
265
266 - the second dict, hextaglines, is a mapping from tag name to a list of
266 - the second dict, hextaglines, is a mapping from tag name to a list of
267 [hexnode, line number] pairs, ordered from the oldest to the newest node.
267 [hexnode, line number] pairs, ordered from the oldest to the newest node.
268
268
269 When calcnodelines is False the hextaglines dict is not calculated (an
269 When calcnodelines is False the hextaglines dict is not calculated (an
270 empty dict is returned). This is done to improve this function's
270 empty dict is returned). This is done to improve this function's
271 performance in cases where the line numbers are not needed.
271 performance in cases where the line numbers are not needed.
272 '''
272 '''
273
273
274 bintaghist = util.sortdict()
274 bintaghist = util.sortdict()
275 hextaglines = util.sortdict()
275 hextaglines = util.sortdict()
276 count = 0
276 count = 0
277
277
278 def dbg(msg):
278 def dbg(msg):
279 ui.debug("%s, line %s: %s\n" % (fn, count, msg))
279 ui.debug("%s, line %d: %s\n" % (fn, count, msg))
280
280
281 for nline, line in enumerate(lines):
281 for nline, line in enumerate(lines):
282 count += 1
282 count += 1
283 if not line:
283 if not line:
284 continue
284 continue
285 try:
285 try:
286 (nodehex, name) = line.split(" ", 1)
286 (nodehex, name) = line.split(" ", 1)
287 except ValueError:
287 except ValueError:
288 dbg("cannot parse entry")
288 dbg("cannot parse entry")
289 continue
289 continue
290 name = name.strip()
290 name = name.strip()
291 if recode:
291 if recode:
292 name = recode(name)
292 name = recode(name)
293 try:
293 try:
294 nodebin = bin(nodehex)
294 nodebin = bin(nodehex)
295 except TypeError:
295 except TypeError:
296 dbg("node '%s' is not well formed" % nodehex)
296 dbg("node '%s' is not well formed" % nodehex)
297 continue
297 continue
298
298
299 # update filetags
299 # update filetags
300 if calcnodelines:
300 if calcnodelines:
301 # map tag name to a list of line numbers
301 # map tag name to a list of line numbers
302 if name not in hextaglines:
302 if name not in hextaglines:
303 hextaglines[name] = []
303 hextaglines[name] = []
304 hextaglines[name].append([nodehex, nline])
304 hextaglines[name].append([nodehex, nline])
305 continue
305 continue
306 # map tag name to (node, hist)
306 # map tag name to (node, hist)
307 if name not in bintaghist:
307 if name not in bintaghist:
308 bintaghist[name] = []
308 bintaghist[name] = []
309 bintaghist[name].append(nodebin)
309 bintaghist[name].append(nodebin)
310 return bintaghist, hextaglines
310 return bintaghist, hextaglines
311
311
312 def _readtags(ui, repo, lines, fn, recode=None, calcnodelines=False):
312 def _readtags(ui, repo, lines, fn, recode=None, calcnodelines=False):
313 '''Read tag definitions from a file (or any source of lines).
313 '''Read tag definitions from a file (or any source of lines).
314
314
315 Returns a mapping from tag name to (node, hist).
315 Returns a mapping from tag name to (node, hist).
316
316
317 "node" is the node id from the last line read for that name. "hist"
317 "node" is the node id from the last line read for that name. "hist"
318 is the list of node ids previously associated with it (in file order).
318 is the list of node ids previously associated with it (in file order).
319 All node ids are binary, not hex.
319 All node ids are binary, not hex.
320 '''
320 '''
321 filetags, nodelines = _readtaghist(ui, repo, lines, fn, recode=recode,
321 filetags, nodelines = _readtaghist(ui, repo, lines, fn, recode=recode,
322 calcnodelines=calcnodelines)
322 calcnodelines=calcnodelines)
323 # util.sortdict().__setitem__ is much slower at replacing then inserting
323 # util.sortdict().__setitem__ is much slower at replacing then inserting
324 # new entries. The difference can matter if there are thousands of tags.
324 # new entries. The difference can matter if there are thousands of tags.
325 # Create a new sortdict to avoid the performance penalty.
325 # Create a new sortdict to avoid the performance penalty.
326 newtags = util.sortdict()
326 newtags = util.sortdict()
327 for tag, taghist in filetags.items():
327 for tag, taghist in filetags.items():
328 newtags[tag] = (taghist[-1], taghist[:-1])
328 newtags[tag] = (taghist[-1], taghist[:-1])
329 return newtags
329 return newtags
330
330
331 def _updatetags(filetags, alltags, tagtype=None, tagtypes=None):
331 def _updatetags(filetags, alltags, tagtype=None, tagtypes=None):
332 """Incorporate the tag info read from one file into dictionnaries
332 """Incorporate the tag info read from one file into dictionnaries
333
333
334 The first one, 'alltags', is a "tagmaps" (see 'findglobaltags' for details).
334 The first one, 'alltags', is a "tagmaps" (see 'findglobaltags' for details).
335
335
336 The second one, 'tagtypes', is optional and will be updated to track the
336 The second one, 'tagtypes', is optional and will be updated to track the
337 "tagtype" of entries in the tagmaps. When set, the 'tagtype' argument also
337 "tagtype" of entries in the tagmaps. When set, the 'tagtype' argument also
338 needs to be set."""
338 needs to be set."""
339 if tagtype is None:
339 if tagtype is None:
340 assert tagtypes is None
340 assert tagtypes is None
341
341
342 for name, nodehist in filetags.iteritems():
342 for name, nodehist in filetags.iteritems():
343 if name not in alltags:
343 if name not in alltags:
344 alltags[name] = nodehist
344 alltags[name] = nodehist
345 if tagtype is not None:
345 if tagtype is not None:
346 tagtypes[name] = tagtype
346 tagtypes[name] = tagtype
347 continue
347 continue
348
348
349 # we prefer alltags[name] if:
349 # we prefer alltags[name] if:
350 # it supersedes us OR
350 # it supersedes us OR
351 # mutual supersedes and it has a higher rank
351 # mutual supersedes and it has a higher rank
352 # otherwise we win because we're tip-most
352 # otherwise we win because we're tip-most
353 anode, ahist = nodehist
353 anode, ahist = nodehist
354 bnode, bhist = alltags[name]
354 bnode, bhist = alltags[name]
355 if (bnode != anode and anode in bhist and
355 if (bnode != anode and anode in bhist and
356 (bnode not in ahist or len(bhist) > len(ahist))):
356 (bnode not in ahist or len(bhist) > len(ahist))):
357 anode = bnode
357 anode = bnode
358 elif tagtype is not None:
358 elif tagtype is not None:
359 tagtypes[name] = tagtype
359 tagtypes[name] = tagtype
360 ahist.extend([n for n in bhist if n not in ahist])
360 ahist.extend([n for n in bhist if n not in ahist])
361 alltags[name] = anode, ahist
361 alltags[name] = anode, ahist
362
362
363 def _filename(repo):
363 def _filename(repo):
364 """name of a tagcache file for a given repo or repoview"""
364 """name of a tagcache file for a given repo or repoview"""
365 filename = 'tags2'
365 filename = 'tags2'
366 if repo.filtername:
366 if repo.filtername:
367 filename = '%s-%s' % (filename, repo.filtername)
367 filename = '%s-%s' % (filename, repo.filtername)
368 return filename
368 return filename
369
369
370 def _readtagcache(ui, repo):
370 def _readtagcache(ui, repo):
371 '''Read the tag cache.
371 '''Read the tag cache.
372
372
373 Returns a tuple (heads, fnodes, validinfo, cachetags, shouldwrite).
373 Returns a tuple (heads, fnodes, validinfo, cachetags, shouldwrite).
374
374
375 If the cache is completely up-to-date, "cachetags" is a dict of the
375 If the cache is completely up-to-date, "cachetags" is a dict of the
376 form returned by _readtags() and "heads", "fnodes", and "validinfo" are
376 form returned by _readtags() and "heads", "fnodes", and "validinfo" are
377 None and "shouldwrite" is False.
377 None and "shouldwrite" is False.
378
378
379 If the cache is not up to date, "cachetags" is None. "heads" is a list
379 If the cache is not up to date, "cachetags" is None. "heads" is a list
380 of all heads currently in the repository, ordered from tip to oldest.
380 of all heads currently in the repository, ordered from tip to oldest.
381 "validinfo" is a tuple describing cache validation info. This is used
381 "validinfo" is a tuple describing cache validation info. This is used
382 when writing the tags cache. "fnodes" is a mapping from head to .hgtags
382 when writing the tags cache. "fnodes" is a mapping from head to .hgtags
383 filenode. "shouldwrite" is True.
383 filenode. "shouldwrite" is True.
384
384
385 If the cache is not up to date, the caller is responsible for reading tag
385 If the cache is not up to date, the caller is responsible for reading tag
386 info from each returned head. (See findglobaltags().)
386 info from each returned head. (See findglobaltags().)
387 '''
387 '''
388 try:
388 try:
389 cachefile = repo.cachevfs(_filename(repo), 'r')
389 cachefile = repo.cachevfs(_filename(repo), 'r')
390 # force reading the file for static-http
390 # force reading the file for static-http
391 cachelines = iter(cachefile)
391 cachelines = iter(cachefile)
392 except IOError:
392 except IOError:
393 cachefile = None
393 cachefile = None
394
394
395 cacherev = None
395 cacherev = None
396 cachenode = None
396 cachenode = None
397 cachehash = None
397 cachehash = None
398 if cachefile:
398 if cachefile:
399 try:
399 try:
400 validline = next(cachelines)
400 validline = next(cachelines)
401 validline = validline.split()
401 validline = validline.split()
402 cacherev = int(validline[0])
402 cacherev = int(validline[0])
403 cachenode = bin(validline[1])
403 cachenode = bin(validline[1])
404 if len(validline) > 2:
404 if len(validline) > 2:
405 cachehash = bin(validline[2])
405 cachehash = bin(validline[2])
406 except Exception:
406 except Exception:
407 # corruption of the cache, just recompute it.
407 # corruption of the cache, just recompute it.
408 pass
408 pass
409
409
410 tipnode = repo.changelog.tip()
410 tipnode = repo.changelog.tip()
411 tiprev = len(repo.changelog) - 1
411 tiprev = len(repo.changelog) - 1
412
412
413 # Case 1 (common): tip is the same, so nothing has changed.
413 # Case 1 (common): tip is the same, so nothing has changed.
414 # (Unchanged tip trivially means no changesets have been added.
414 # (Unchanged tip trivially means no changesets have been added.
415 # But, thanks to localrepository.destroyed(), it also means none
415 # But, thanks to localrepository.destroyed(), it also means none
416 # have been destroyed by strip or rollback.)
416 # have been destroyed by strip or rollback.)
417 if (cacherev == tiprev
417 if (cacherev == tiprev
418 and cachenode == tipnode
418 and cachenode == tipnode
419 and cachehash == scmutil.filteredhash(repo, tiprev)):
419 and cachehash == scmutil.filteredhash(repo, tiprev)):
420 tags = _readtags(ui, repo, cachelines, cachefile.name)
420 tags = _readtags(ui, repo, cachelines, cachefile.name)
421 cachefile.close()
421 cachefile.close()
422 return (None, None, None, tags, False)
422 return (None, None, None, tags, False)
423 if cachefile:
423 if cachefile:
424 cachefile.close() # ignore rest of file
424 cachefile.close() # ignore rest of file
425
425
426 valid = (tiprev, tipnode, scmutil.filteredhash(repo, tiprev))
426 valid = (tiprev, tipnode, scmutil.filteredhash(repo, tiprev))
427
427
428 repoheads = repo.heads()
428 repoheads = repo.heads()
429 # Case 2 (uncommon): empty repo; get out quickly and don't bother
429 # Case 2 (uncommon): empty repo; get out quickly and don't bother
430 # writing an empty cache.
430 # writing an empty cache.
431 if repoheads == [nullid]:
431 if repoheads == [nullid]:
432 return ([], {}, valid, {}, False)
432 return ([], {}, valid, {}, False)
433
433
434 # Case 3 (uncommon): cache file missing or empty.
434 # Case 3 (uncommon): cache file missing or empty.
435
435
436 # Case 4 (uncommon): tip rev decreased. This should only happen
436 # Case 4 (uncommon): tip rev decreased. This should only happen
437 # when we're called from localrepository.destroyed(). Refresh the
437 # when we're called from localrepository.destroyed(). Refresh the
438 # cache so future invocations will not see disappeared heads in the
438 # cache so future invocations will not see disappeared heads in the
439 # cache.
439 # cache.
440
440
441 # Case 5 (common): tip has changed, so we've added/replaced heads.
441 # Case 5 (common): tip has changed, so we've added/replaced heads.
442
442
443 # As it happens, the code to handle cases 3, 4, 5 is the same.
443 # As it happens, the code to handle cases 3, 4, 5 is the same.
444
444
445 # N.B. in case 4 (nodes destroyed), "new head" really means "newly
445 # N.B. in case 4 (nodes destroyed), "new head" really means "newly
446 # exposed".
446 # exposed".
447 if not len(repo.file('.hgtags')):
447 if not len(repo.file('.hgtags')):
448 # No tags have ever been committed, so we can avoid a
448 # No tags have ever been committed, so we can avoid a
449 # potentially expensive search.
449 # potentially expensive search.
450 return ([], {}, valid, None, True)
450 return ([], {}, valid, None, True)
451
451
452
452
453 # Now we have to lookup the .hgtags filenode for every new head.
453 # Now we have to lookup the .hgtags filenode for every new head.
454 # This is the most expensive part of finding tags, so performance
454 # This is the most expensive part of finding tags, so performance
455 # depends primarily on the size of newheads. Worst case: no cache
455 # depends primarily on the size of newheads. Worst case: no cache
456 # file, so newheads == repoheads.
456 # file, so newheads == repoheads.
457 cachefnode = _getfnodes(ui, repo, repoheads)
457 cachefnode = _getfnodes(ui, repo, repoheads)
458
458
459 # Caller has to iterate over all heads, but can use the filenodes in
459 # Caller has to iterate over all heads, but can use the filenodes in
460 # cachefnode to get to each .hgtags revision quickly.
460 # cachefnode to get to each .hgtags revision quickly.
461 return (repoheads, cachefnode, valid, None, True)
461 return (repoheads, cachefnode, valid, None, True)
462
462
463 def _getfnodes(ui, repo, nodes):
463 def _getfnodes(ui, repo, nodes):
464 """return .hgtags fnodes for a list of changeset nodes
464 """return .hgtags fnodes for a list of changeset nodes
465
465
466 Return value is a {node: fnode} mapping. There will be no entry for nodes
466 Return value is a {node: fnode} mapping. There will be no entry for nodes
467 without a '.hgtags' file.
467 without a '.hgtags' file.
468 """
468 """
469 starttime = util.timer()
469 starttime = util.timer()
470 fnodescache = hgtagsfnodescache(repo.unfiltered())
470 fnodescache = hgtagsfnodescache(repo.unfiltered())
471 cachefnode = {}
471 cachefnode = {}
472 for node in reversed(nodes):
472 for node in reversed(nodes):
473 fnode = fnodescache.getfnode(node)
473 fnode = fnodescache.getfnode(node)
474 if fnode != nullid:
474 if fnode != nullid:
475 cachefnode[node] = fnode
475 cachefnode[node] = fnode
476
476
477 fnodescache.write()
477 fnodescache.write()
478
478
479 duration = util.timer() - starttime
479 duration = util.timer() - starttime
480 ui.log('tagscache',
480 ui.log('tagscache',
481 '%d/%d cache hits/lookups in %0.4f '
481 '%d/%d cache hits/lookups in %0.4f '
482 'seconds\n',
482 'seconds\n',
483 fnodescache.hitcount, fnodescache.lookupcount, duration)
483 fnodescache.hitcount, fnodescache.lookupcount, duration)
484 return cachefnode
484 return cachefnode
485
485
486 def _writetagcache(ui, repo, valid, cachetags):
486 def _writetagcache(ui, repo, valid, cachetags):
487 filename = _filename(repo)
487 filename = _filename(repo)
488 try:
488 try:
489 cachefile = repo.cachevfs(filename, 'w', atomictemp=True)
489 cachefile = repo.cachevfs(filename, 'w', atomictemp=True)
490 except (OSError, IOError):
490 except (OSError, IOError):
491 return
491 return
492
492
493 ui.log('tagscache', 'writing .hg/cache/%s with %d tags\n',
493 ui.log('tagscache', 'writing .hg/cache/%s with %d tags\n',
494 filename, len(cachetags))
494 filename, len(cachetags))
495
495
496 if valid[2]:
496 if valid[2]:
497 cachefile.write('%d %s %s\n' % (valid[0], hex(valid[1]), hex(valid[2])))
497 cachefile.write('%d %s %s\n' % (valid[0], hex(valid[1]), hex(valid[2])))
498 else:
498 else:
499 cachefile.write('%d %s\n' % (valid[0], hex(valid[1])))
499 cachefile.write('%d %s\n' % (valid[0], hex(valid[1])))
500
500
501 # Tag names in the cache are in UTF-8 -- which is the whole reason
501 # Tag names in the cache are in UTF-8 -- which is the whole reason
502 # we keep them in UTF-8 throughout this module. If we converted
502 # we keep them in UTF-8 throughout this module. If we converted
503 # them local encoding on input, we would lose info writing them to
503 # them local encoding on input, we would lose info writing them to
504 # the cache.
504 # the cache.
505 for (name, (node, hist)) in sorted(cachetags.iteritems()):
505 for (name, (node, hist)) in sorted(cachetags.iteritems()):
506 for n in hist:
506 for n in hist:
507 cachefile.write("%s %s\n" % (hex(n), name))
507 cachefile.write("%s %s\n" % (hex(n), name))
508 cachefile.write("%s %s\n" % (hex(node), name))
508 cachefile.write("%s %s\n" % (hex(node), name))
509
509
510 try:
510 try:
511 cachefile.close()
511 cachefile.close()
512 except (OSError, IOError):
512 except (OSError, IOError):
513 pass
513 pass
514
514
515 def tag(repo, names, node, message, local, user, date, editor=False):
515 def tag(repo, names, node, message, local, user, date, editor=False):
516 '''tag a revision with one or more symbolic names.
516 '''tag a revision with one or more symbolic names.
517
517
518 names is a list of strings or, when adding a single tag, names may be a
518 names is a list of strings or, when adding a single tag, names may be a
519 string.
519 string.
520
520
521 if local is True, the tags are stored in a per-repository file.
521 if local is True, the tags are stored in a per-repository file.
522 otherwise, they are stored in the .hgtags file, and a new
522 otherwise, they are stored in the .hgtags file, and a new
523 changeset is committed with the change.
523 changeset is committed with the change.
524
524
525 keyword arguments:
525 keyword arguments:
526
526
527 local: whether to store tags in non-version-controlled file
527 local: whether to store tags in non-version-controlled file
528 (default False)
528 (default False)
529
529
530 message: commit message to use if committing
530 message: commit message to use if committing
531
531
532 user: name of user to use if committing
532 user: name of user to use if committing
533
533
534 date: date tuple to use if committing'''
534 date: date tuple to use if committing'''
535
535
536 if not local:
536 if not local:
537 m = matchmod.exact(repo.root, '', ['.hgtags'])
537 m = matchmod.exact(repo.root, '', ['.hgtags'])
538 if any(repo.status(match=m, unknown=True, ignored=True)):
538 if any(repo.status(match=m, unknown=True, ignored=True)):
539 raise error.Abort(_('working copy of .hgtags is changed'),
539 raise error.Abort(_('working copy of .hgtags is changed'),
540 hint=_('please commit .hgtags manually'))
540 hint=_('please commit .hgtags manually'))
541
541
542 with repo.wlock():
542 with repo.wlock():
543 repo.tags() # instantiate the cache
543 repo.tags() # instantiate the cache
544 _tag(repo, names, node, message, local, user, date,
544 _tag(repo, names, node, message, local, user, date,
545 editor=editor)
545 editor=editor)
546
546
547 def _tag(repo, names, node, message, local, user, date, extra=None,
547 def _tag(repo, names, node, message, local, user, date, extra=None,
548 editor=False):
548 editor=False):
549 if isinstance(names, str):
549 if isinstance(names, str):
550 names = (names,)
550 names = (names,)
551
551
552 branches = repo.branchmap()
552 branches = repo.branchmap()
553 for name in names:
553 for name in names:
554 repo.hook('pretag', throw=True, node=hex(node), tag=name,
554 repo.hook('pretag', throw=True, node=hex(node), tag=name,
555 local=local)
555 local=local)
556 if name in branches:
556 if name in branches:
557 repo.ui.warn(_("warning: tag %s conflicts with existing"
557 repo.ui.warn(_("warning: tag %s conflicts with existing"
558 " branch name\n") % name)
558 " branch name\n") % name)
559
559
560 def writetags(fp, names, munge, prevtags):
560 def writetags(fp, names, munge, prevtags):
561 fp.seek(0, 2)
561 fp.seek(0, 2)
562 if prevtags and prevtags[-1] != '\n':
562 if prevtags and prevtags[-1] != '\n':
563 fp.write('\n')
563 fp.write('\n')
564 for name in names:
564 for name in names:
565 if munge:
565 if munge:
566 m = munge(name)
566 m = munge(name)
567 else:
567 else:
568 m = name
568 m = name
569
569
570 if (repo._tagscache.tagtypes and
570 if (repo._tagscache.tagtypes and
571 name in repo._tagscache.tagtypes):
571 name in repo._tagscache.tagtypes):
572 old = repo.tags().get(name, nullid)
572 old = repo.tags().get(name, nullid)
573 fp.write('%s %s\n' % (hex(old), m))
573 fp.write('%s %s\n' % (hex(old), m))
574 fp.write('%s %s\n' % (hex(node), m))
574 fp.write('%s %s\n' % (hex(node), m))
575 fp.close()
575 fp.close()
576
576
577 prevtags = ''
577 prevtags = ''
578 if local:
578 if local:
579 try:
579 try:
580 fp = repo.vfs('localtags', 'r+')
580 fp = repo.vfs('localtags', 'r+')
581 except IOError:
581 except IOError:
582 fp = repo.vfs('localtags', 'a')
582 fp = repo.vfs('localtags', 'a')
583 else:
583 else:
584 prevtags = fp.read()
584 prevtags = fp.read()
585
585
586 # local tags are stored in the current charset
586 # local tags are stored in the current charset
587 writetags(fp, names, None, prevtags)
587 writetags(fp, names, None, prevtags)
588 for name in names:
588 for name in names:
589 repo.hook('tag', node=hex(node), tag=name, local=local)
589 repo.hook('tag', node=hex(node), tag=name, local=local)
590 return
590 return
591
591
592 try:
592 try:
593 fp = repo.wvfs('.hgtags', 'rb+')
593 fp = repo.wvfs('.hgtags', 'rb+')
594 except IOError as e:
594 except IOError as e:
595 if e.errno != errno.ENOENT:
595 if e.errno != errno.ENOENT:
596 raise
596 raise
597 fp = repo.wvfs('.hgtags', 'ab')
597 fp = repo.wvfs('.hgtags', 'ab')
598 else:
598 else:
599 prevtags = fp.read()
599 prevtags = fp.read()
600
600
601 # committed tags are stored in UTF-8
601 # committed tags are stored in UTF-8
602 writetags(fp, names, encoding.fromlocal, prevtags)
602 writetags(fp, names, encoding.fromlocal, prevtags)
603
603
604 fp.close()
604 fp.close()
605
605
606 repo.invalidatecaches()
606 repo.invalidatecaches()
607
607
608 if '.hgtags' not in repo.dirstate:
608 if '.hgtags' not in repo.dirstate:
609 repo[None].add(['.hgtags'])
609 repo[None].add(['.hgtags'])
610
610
611 m = matchmod.exact(repo.root, '', ['.hgtags'])
611 m = matchmod.exact(repo.root, '', ['.hgtags'])
612 tagnode = repo.commit(message, user, date, extra=extra, match=m,
612 tagnode = repo.commit(message, user, date, extra=extra, match=m,
613 editor=editor)
613 editor=editor)
614
614
615 for name in names:
615 for name in names:
616 repo.hook('tag', node=hex(node), tag=name, local=local)
616 repo.hook('tag', node=hex(node), tag=name, local=local)
617
617
618 return tagnode
618 return tagnode
619
619
620 _fnodescachefile = 'hgtagsfnodes1'
620 _fnodescachefile = 'hgtagsfnodes1'
621 _fnodesrecsize = 4 + 20 # changeset fragment + filenode
621 _fnodesrecsize = 4 + 20 # changeset fragment + filenode
622 _fnodesmissingrec = '\xff' * 24
622 _fnodesmissingrec = '\xff' * 24
623
623
624 class hgtagsfnodescache(object):
624 class hgtagsfnodescache(object):
625 """Persistent cache mapping revisions to .hgtags filenodes.
625 """Persistent cache mapping revisions to .hgtags filenodes.
626
626
627 The cache is an array of records. Each item in the array corresponds to
627 The cache is an array of records. Each item in the array corresponds to
628 a changelog revision. Values in the array contain the first 4 bytes of
628 a changelog revision. Values in the array contain the first 4 bytes of
629 the node hash and the 20 bytes .hgtags filenode for that revision.
629 the node hash and the 20 bytes .hgtags filenode for that revision.
630
630
631 The first 4 bytes are present as a form of verification. Repository
631 The first 4 bytes are present as a form of verification. Repository
632 stripping and rewriting may change the node at a numeric revision in the
632 stripping and rewriting may change the node at a numeric revision in the
633 changelog. The changeset fragment serves as a verifier to detect
633 changelog. The changeset fragment serves as a verifier to detect
634 rewriting. This logic is shared with the rev branch cache (see
634 rewriting. This logic is shared with the rev branch cache (see
635 branchmap.py).
635 branchmap.py).
636
636
637 The instance holds in memory the full cache content but entries are
637 The instance holds in memory the full cache content but entries are
638 only parsed on read.
638 only parsed on read.
639
639
640 Instances behave like lists. ``c[i]`` works where i is a rev or
640 Instances behave like lists. ``c[i]`` works where i is a rev or
641 changeset node. Missing indexes are populated automatically on access.
641 changeset node. Missing indexes are populated automatically on access.
642 """
642 """
643 def __init__(self, repo):
643 def __init__(self, repo):
644 assert repo.filtername is None
644 assert repo.filtername is None
645
645
646 self._repo = repo
646 self._repo = repo
647
647
648 # Only for reporting purposes.
648 # Only for reporting purposes.
649 self.lookupcount = 0
649 self.lookupcount = 0
650 self.hitcount = 0
650 self.hitcount = 0
651
651
652
652
653 try:
653 try:
654 data = repo.cachevfs.read(_fnodescachefile)
654 data = repo.cachevfs.read(_fnodescachefile)
655 except (OSError, IOError):
655 except (OSError, IOError):
656 data = ""
656 data = ""
657 self._raw = bytearray(data)
657 self._raw = bytearray(data)
658
658
659 # The end state of self._raw is an array that is of the exact length
659 # The end state of self._raw is an array that is of the exact length
660 # required to hold a record for every revision in the repository.
660 # required to hold a record for every revision in the repository.
661 # We truncate or extend the array as necessary. self._dirtyoffset is
661 # We truncate or extend the array as necessary. self._dirtyoffset is
662 # defined to be the start offset at which we need to write the output
662 # defined to be the start offset at which we need to write the output
663 # file. This offset is also adjusted when new entries are calculated
663 # file. This offset is also adjusted when new entries are calculated
664 # for array members.
664 # for array members.
665 cllen = len(repo.changelog)
665 cllen = len(repo.changelog)
666 wantedlen = cllen * _fnodesrecsize
666 wantedlen = cllen * _fnodesrecsize
667 rawlen = len(self._raw)
667 rawlen = len(self._raw)
668
668
669 self._dirtyoffset = None
669 self._dirtyoffset = None
670
670
671 if rawlen < wantedlen:
671 if rawlen < wantedlen:
672 self._dirtyoffset = rawlen
672 self._dirtyoffset = rawlen
673 self._raw.extend('\xff' * (wantedlen - rawlen))
673 self._raw.extend('\xff' * (wantedlen - rawlen))
674 elif rawlen > wantedlen:
674 elif rawlen > wantedlen:
675 # There's no easy way to truncate array instances. This seems
675 # There's no easy way to truncate array instances. This seems
676 # slightly less evil than copying a potentially large array slice.
676 # slightly less evil than copying a potentially large array slice.
677 for i in range(rawlen - wantedlen):
677 for i in range(rawlen - wantedlen):
678 self._raw.pop()
678 self._raw.pop()
679 self._dirtyoffset = len(self._raw)
679 self._dirtyoffset = len(self._raw)
680
680
681 def getfnode(self, node, computemissing=True):
681 def getfnode(self, node, computemissing=True):
682 """Obtain the filenode of the .hgtags file at a specified revision.
682 """Obtain the filenode of the .hgtags file at a specified revision.
683
683
684 If the value is in the cache, the entry will be validated and returned.
684 If the value is in the cache, the entry will be validated and returned.
685 Otherwise, the filenode will be computed and returned unless
685 Otherwise, the filenode will be computed and returned unless
686 "computemissing" is False, in which case None will be returned without
686 "computemissing" is False, in which case None will be returned without
687 any potentially expensive computation being performed.
687 any potentially expensive computation being performed.
688
688
689 If an .hgtags does not exist at the specified revision, nullid is
689 If an .hgtags does not exist at the specified revision, nullid is
690 returned.
690 returned.
691 """
691 """
692 ctx = self._repo[node]
692 ctx = self._repo[node]
693 rev = ctx.rev()
693 rev = ctx.rev()
694
694
695 self.lookupcount += 1
695 self.lookupcount += 1
696
696
697 offset = rev * _fnodesrecsize
697 offset = rev * _fnodesrecsize
698 record = '%s' % self._raw[offset:offset + _fnodesrecsize]
698 record = '%s' % self._raw[offset:offset + _fnodesrecsize]
699 properprefix = node[0:4]
699 properprefix = node[0:4]
700
700
701 # Validate and return existing entry.
701 # Validate and return existing entry.
702 if record != _fnodesmissingrec:
702 if record != _fnodesmissingrec:
703 fileprefix = record[0:4]
703 fileprefix = record[0:4]
704
704
705 if fileprefix == properprefix:
705 if fileprefix == properprefix:
706 self.hitcount += 1
706 self.hitcount += 1
707 return record[4:]
707 return record[4:]
708
708
709 # Fall through.
709 # Fall through.
710
710
711 # If we get here, the entry is either missing or invalid.
711 # If we get here, the entry is either missing or invalid.
712
712
713 if not computemissing:
713 if not computemissing:
714 return None
714 return None
715
715
716 # Populate missing entry.
716 # Populate missing entry.
717 try:
717 try:
718 fnode = ctx.filenode('.hgtags')
718 fnode = ctx.filenode('.hgtags')
719 except error.LookupError:
719 except error.LookupError:
720 # No .hgtags file on this revision.
720 # No .hgtags file on this revision.
721 fnode = nullid
721 fnode = nullid
722
722
723 self._writeentry(offset, properprefix, fnode)
723 self._writeentry(offset, properprefix, fnode)
724 return fnode
724 return fnode
725
725
726 def setfnode(self, node, fnode):
726 def setfnode(self, node, fnode):
727 """Set the .hgtags filenode for a given changeset."""
727 """Set the .hgtags filenode for a given changeset."""
728 assert len(fnode) == 20
728 assert len(fnode) == 20
729 ctx = self._repo[node]
729 ctx = self._repo[node]
730
730
731 # Do a lookup first to avoid writing if nothing has changed.
731 # Do a lookup first to avoid writing if nothing has changed.
732 if self.getfnode(ctx.node(), computemissing=False) == fnode:
732 if self.getfnode(ctx.node(), computemissing=False) == fnode:
733 return
733 return
734
734
735 self._writeentry(ctx.rev() * _fnodesrecsize, node[0:4], fnode)
735 self._writeentry(ctx.rev() * _fnodesrecsize, node[0:4], fnode)
736
736
737 def _writeentry(self, offset, prefix, fnode):
737 def _writeentry(self, offset, prefix, fnode):
738 # Slices on array instances only accept other array.
738 # Slices on array instances only accept other array.
739 entry = bytearray(prefix + fnode)
739 entry = bytearray(prefix + fnode)
740 self._raw[offset:offset + _fnodesrecsize] = entry
740 self._raw[offset:offset + _fnodesrecsize] = entry
741 # self._dirtyoffset could be None.
741 # self._dirtyoffset could be None.
742 self._dirtyoffset = min(self._dirtyoffset or 0, offset or 0)
742 self._dirtyoffset = min(self._dirtyoffset or 0, offset or 0)
743
743
744 def write(self):
744 def write(self):
745 """Perform all necessary writes to cache file.
745 """Perform all necessary writes to cache file.
746
746
747 This may no-op if no writes are needed or if a write lock could
747 This may no-op if no writes are needed or if a write lock could
748 not be obtained.
748 not be obtained.
749 """
749 """
750 if self._dirtyoffset is None:
750 if self._dirtyoffset is None:
751 return
751 return
752
752
753 data = self._raw[self._dirtyoffset:]
753 data = self._raw[self._dirtyoffset:]
754 if not data:
754 if not data:
755 return
755 return
756
756
757 repo = self._repo
757 repo = self._repo
758
758
759 try:
759 try:
760 lock = repo.wlock(wait=False)
760 lock = repo.wlock(wait=False)
761 except error.LockError:
761 except error.LockError:
762 repo.ui.log('tagscache', 'not writing .hg/cache/%s because '
762 repo.ui.log('tagscache', 'not writing .hg/cache/%s because '
763 'lock cannot be acquired\n' % (_fnodescachefile))
763 'lock cannot be acquired\n' % (_fnodescachefile))
764 return
764 return
765
765
766 try:
766 try:
767 f = repo.cachevfs.open(_fnodescachefile, 'ab')
767 f = repo.cachevfs.open(_fnodescachefile, 'ab')
768 try:
768 try:
769 # if the file has been truncated
769 # if the file has been truncated
770 actualoffset = f.tell()
770 actualoffset = f.tell()
771 if actualoffset < self._dirtyoffset:
771 if actualoffset < self._dirtyoffset:
772 self._dirtyoffset = actualoffset
772 self._dirtyoffset = actualoffset
773 data = self._raw[self._dirtyoffset:]
773 data = self._raw[self._dirtyoffset:]
774 f.seek(self._dirtyoffset)
774 f.seek(self._dirtyoffset)
775 f.truncate()
775 f.truncate()
776 repo.ui.log('tagscache',
776 repo.ui.log('tagscache',
777 'writing %d bytes to cache/%s\n' % (
777 'writing %d bytes to cache/%s\n' % (
778 len(data), _fnodescachefile))
778 len(data), _fnodescachefile))
779 f.write(data)
779 f.write(data)
780 self._dirtyoffset = None
780 self._dirtyoffset = None
781 finally:
781 finally:
782 f.close()
782 f.close()
783 except (IOError, OSError) as inst:
783 except (IOError, OSError) as inst:
784 repo.ui.log('tagscache',
784 repo.ui.log('tagscache',
785 "couldn't write cache/%s: %s\n" % (
785 "couldn't write cache/%s: %s\n" % (
786 _fnodescachefile, util.forcebytestr(inst)))
786 _fnodescachefile, util.forcebytestr(inst)))
787 finally:
787 finally:
788 lock.release()
788 lock.release()
General Comments 0
You need to be logged in to leave comments. Login now