##// END OF EJS Templates
rust: module policy with importrust...
Georges Racinet -
r42651:810f66b4 default
parent child Browse files
Show More
@@ -1,3469 +1,3481 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from . import (
35 from . import (
36 bundle2,
36 bundle2,
37 changegroup,
37 changegroup,
38 cmdutil,
38 cmdutil,
39 color,
39 color,
40 context,
40 context,
41 copies,
41 copies,
42 dagparser,
42 dagparser,
43 encoding,
43 encoding,
44 error,
44 error,
45 exchange,
45 exchange,
46 extensions,
46 extensions,
47 filemerge,
47 filemerge,
48 filesetlang,
48 filesetlang,
49 formatter,
49 formatter,
50 hg,
50 hg,
51 httppeer,
51 httppeer,
52 localrepo,
52 localrepo,
53 lock as lockmod,
53 lock as lockmod,
54 logcmdutil,
54 logcmdutil,
55 merge as mergemod,
55 merge as mergemod,
56 obsolete,
56 obsolete,
57 obsutil,
57 obsutil,
58 phases,
58 phases,
59 policy,
59 policy,
60 pvec,
60 pvec,
61 pycompat,
61 pycompat,
62 registrar,
62 registrar,
63 repair,
63 repair,
64 revlog,
64 revlog,
65 revset,
65 revset,
66 revsetlang,
66 revsetlang,
67 scmutil,
67 scmutil,
68 setdiscovery,
68 setdiscovery,
69 simplemerge,
69 simplemerge,
70 sshpeer,
70 sshpeer,
71 sslutil,
71 sslutil,
72 streamclone,
72 streamclone,
73 templater,
73 templater,
74 treediscovery,
74 treediscovery,
75 upgrade,
75 upgrade,
76 url as urlmod,
76 url as urlmod,
77 util,
77 util,
78 vfs as vfsmod,
78 vfs as vfsmod,
79 wireprotoframing,
79 wireprotoframing,
80 wireprotoserver,
80 wireprotoserver,
81 wireprotov2peer,
81 wireprotov2peer,
82 )
82 )
83 from .utils import (
83 from .utils import (
84 cborutil,
84 cborutil,
85 compression,
85 compression,
86 dateutil,
86 dateutil,
87 procutil,
87 procutil,
88 stringutil,
88 stringutil,
89 )
89 )
90
90
91 from .revlogutils import (
91 from .revlogutils import (
92 deltas as deltautil
92 deltas as deltautil
93 )
93 )
94
94
95 release = lockmod.release
95 release = lockmod.release
96
96
97 command = registrar.command()
97 command = registrar.command()
98
98
99 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
99 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
100 def debugancestor(ui, repo, *args):
100 def debugancestor(ui, repo, *args):
101 """find the ancestor revision of two revisions in a given index"""
101 """find the ancestor revision of two revisions in a given index"""
102 if len(args) == 3:
102 if len(args) == 3:
103 index, rev1, rev2 = args
103 index, rev1, rev2 = args
104 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
104 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
105 lookup = r.lookup
105 lookup = r.lookup
106 elif len(args) == 2:
106 elif len(args) == 2:
107 if not repo:
107 if not repo:
108 raise error.Abort(_('there is no Mercurial repository here '
108 raise error.Abort(_('there is no Mercurial repository here '
109 '(.hg not found)'))
109 '(.hg not found)'))
110 rev1, rev2 = args
110 rev1, rev2 = args
111 r = repo.changelog
111 r = repo.changelog
112 lookup = repo.lookup
112 lookup = repo.lookup
113 else:
113 else:
114 raise error.Abort(_('either two or three arguments required'))
114 raise error.Abort(_('either two or three arguments required'))
115 a = r.ancestor(lookup(rev1), lookup(rev2))
115 a = r.ancestor(lookup(rev1), lookup(rev2))
116 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
116 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
117
117
118 @command('debugapplystreamclonebundle', [], 'FILE')
118 @command('debugapplystreamclonebundle', [], 'FILE')
119 def debugapplystreamclonebundle(ui, repo, fname):
119 def debugapplystreamclonebundle(ui, repo, fname):
120 """apply a stream clone bundle file"""
120 """apply a stream clone bundle file"""
121 f = hg.openpath(ui, fname)
121 f = hg.openpath(ui, fname)
122 gen = exchange.readbundle(ui, f, fname)
122 gen = exchange.readbundle(ui, f, fname)
123 gen.apply(repo)
123 gen.apply(repo)
124
124
125 @command('debugbuilddag',
125 @command('debugbuilddag',
126 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
126 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
127 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
127 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
128 ('n', 'new-file', None, _('add new file at each rev'))],
128 ('n', 'new-file', None, _('add new file at each rev'))],
129 _('[OPTION]... [TEXT]'))
129 _('[OPTION]... [TEXT]'))
130 def debugbuilddag(ui, repo, text=None,
130 def debugbuilddag(ui, repo, text=None,
131 mergeable_file=False,
131 mergeable_file=False,
132 overwritten_file=False,
132 overwritten_file=False,
133 new_file=False):
133 new_file=False):
134 """builds a repo with a given DAG from scratch in the current empty repo
134 """builds a repo with a given DAG from scratch in the current empty repo
135
135
136 The description of the DAG is read from stdin if not given on the
136 The description of the DAG is read from stdin if not given on the
137 command line.
137 command line.
138
138
139 Elements:
139 Elements:
140
140
141 - "+n" is a linear run of n nodes based on the current default parent
141 - "+n" is a linear run of n nodes based on the current default parent
142 - "." is a single node based on the current default parent
142 - "." is a single node based on the current default parent
143 - "$" resets the default parent to null (implied at the start);
143 - "$" resets the default parent to null (implied at the start);
144 otherwise the default parent is always the last node created
144 otherwise the default parent is always the last node created
145 - "<p" sets the default parent to the backref p
145 - "<p" sets the default parent to the backref p
146 - "*p" is a fork at parent p, which is a backref
146 - "*p" is a fork at parent p, which is a backref
147 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
147 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
148 - "/p2" is a merge of the preceding node and p2
148 - "/p2" is a merge of the preceding node and p2
149 - ":tag" defines a local tag for the preceding node
149 - ":tag" defines a local tag for the preceding node
150 - "@branch" sets the named branch for subsequent nodes
150 - "@branch" sets the named branch for subsequent nodes
151 - "#...\\n" is a comment up to the end of the line
151 - "#...\\n" is a comment up to the end of the line
152
152
153 Whitespace between the above elements is ignored.
153 Whitespace between the above elements is ignored.
154
154
155 A backref is either
155 A backref is either
156
156
157 - a number n, which references the node curr-n, where curr is the current
157 - a number n, which references the node curr-n, where curr is the current
158 node, or
158 node, or
159 - the name of a local tag you placed earlier using ":tag", or
159 - the name of a local tag you placed earlier using ":tag", or
160 - empty to denote the default parent.
160 - empty to denote the default parent.
161
161
162 All string valued-elements are either strictly alphanumeric, or must
162 All string valued-elements are either strictly alphanumeric, or must
163 be enclosed in double quotes ("..."), with "\\" as escape character.
163 be enclosed in double quotes ("..."), with "\\" as escape character.
164 """
164 """
165
165
166 if text is None:
166 if text is None:
167 ui.status(_("reading DAG from stdin\n"))
167 ui.status(_("reading DAG from stdin\n"))
168 text = ui.fin.read()
168 text = ui.fin.read()
169
169
170 cl = repo.changelog
170 cl = repo.changelog
171 if len(cl) > 0:
171 if len(cl) > 0:
172 raise error.Abort(_('repository is not empty'))
172 raise error.Abort(_('repository is not empty'))
173
173
174 # determine number of revs in DAG
174 # determine number of revs in DAG
175 total = 0
175 total = 0
176 for type, data in dagparser.parsedag(text):
176 for type, data in dagparser.parsedag(text):
177 if type == 'n':
177 if type == 'n':
178 total += 1
178 total += 1
179
179
180 if mergeable_file:
180 if mergeable_file:
181 linesperrev = 2
181 linesperrev = 2
182 # make a file with k lines per rev
182 # make a file with k lines per rev
183 initialmergedlines = ['%d' % i
183 initialmergedlines = ['%d' % i
184 for i in pycompat.xrange(0, total * linesperrev)]
184 for i in pycompat.xrange(0, total * linesperrev)]
185 initialmergedlines.append("")
185 initialmergedlines.append("")
186
186
187 tags = []
187 tags = []
188 progress = ui.makeprogress(_('building'), unit=_('revisions'),
188 progress = ui.makeprogress(_('building'), unit=_('revisions'),
189 total=total)
189 total=total)
190 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
190 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
191 at = -1
191 at = -1
192 atbranch = 'default'
192 atbranch = 'default'
193 nodeids = []
193 nodeids = []
194 id = 0
194 id = 0
195 progress.update(id)
195 progress.update(id)
196 for type, data in dagparser.parsedag(text):
196 for type, data in dagparser.parsedag(text):
197 if type == 'n':
197 if type == 'n':
198 ui.note(('node %s\n' % pycompat.bytestr(data)))
198 ui.note(('node %s\n' % pycompat.bytestr(data)))
199 id, ps = data
199 id, ps = data
200
200
201 files = []
201 files = []
202 filecontent = {}
202 filecontent = {}
203
203
204 p2 = None
204 p2 = None
205 if mergeable_file:
205 if mergeable_file:
206 fn = "mf"
206 fn = "mf"
207 p1 = repo[ps[0]]
207 p1 = repo[ps[0]]
208 if len(ps) > 1:
208 if len(ps) > 1:
209 p2 = repo[ps[1]]
209 p2 = repo[ps[1]]
210 pa = p1.ancestor(p2)
210 pa = p1.ancestor(p2)
211 base, local, other = [x[fn].data() for x in (pa, p1,
211 base, local, other = [x[fn].data() for x in (pa, p1,
212 p2)]
212 p2)]
213 m3 = simplemerge.Merge3Text(base, local, other)
213 m3 = simplemerge.Merge3Text(base, local, other)
214 ml = [l.strip() for l in m3.merge_lines()]
214 ml = [l.strip() for l in m3.merge_lines()]
215 ml.append("")
215 ml.append("")
216 elif at > 0:
216 elif at > 0:
217 ml = p1[fn].data().split("\n")
217 ml = p1[fn].data().split("\n")
218 else:
218 else:
219 ml = initialmergedlines
219 ml = initialmergedlines
220 ml[id * linesperrev] += " r%i" % id
220 ml[id * linesperrev] += " r%i" % id
221 mergedtext = "\n".join(ml)
221 mergedtext = "\n".join(ml)
222 files.append(fn)
222 files.append(fn)
223 filecontent[fn] = mergedtext
223 filecontent[fn] = mergedtext
224
224
225 if overwritten_file:
225 if overwritten_file:
226 fn = "of"
226 fn = "of"
227 files.append(fn)
227 files.append(fn)
228 filecontent[fn] = "r%i\n" % id
228 filecontent[fn] = "r%i\n" % id
229
229
230 if new_file:
230 if new_file:
231 fn = "nf%i" % id
231 fn = "nf%i" % id
232 files.append(fn)
232 files.append(fn)
233 filecontent[fn] = "r%i\n" % id
233 filecontent[fn] = "r%i\n" % id
234 if len(ps) > 1:
234 if len(ps) > 1:
235 if not p2:
235 if not p2:
236 p2 = repo[ps[1]]
236 p2 = repo[ps[1]]
237 for fn in p2:
237 for fn in p2:
238 if fn.startswith("nf"):
238 if fn.startswith("nf"):
239 files.append(fn)
239 files.append(fn)
240 filecontent[fn] = p2[fn].data()
240 filecontent[fn] = p2[fn].data()
241
241
242 def fctxfn(repo, cx, path):
242 def fctxfn(repo, cx, path):
243 if path in filecontent:
243 if path in filecontent:
244 return context.memfilectx(repo, cx, path,
244 return context.memfilectx(repo, cx, path,
245 filecontent[path])
245 filecontent[path])
246 return None
246 return None
247
247
248 if len(ps) == 0 or ps[0] < 0:
248 if len(ps) == 0 or ps[0] < 0:
249 pars = [None, None]
249 pars = [None, None]
250 elif len(ps) == 1:
250 elif len(ps) == 1:
251 pars = [nodeids[ps[0]], None]
251 pars = [nodeids[ps[0]], None]
252 else:
252 else:
253 pars = [nodeids[p] for p in ps]
253 pars = [nodeids[p] for p in ps]
254 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
254 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
255 date=(id, 0),
255 date=(id, 0),
256 user="debugbuilddag",
256 user="debugbuilddag",
257 extra={'branch': atbranch})
257 extra={'branch': atbranch})
258 nodeid = repo.commitctx(cx)
258 nodeid = repo.commitctx(cx)
259 nodeids.append(nodeid)
259 nodeids.append(nodeid)
260 at = id
260 at = id
261 elif type == 'l':
261 elif type == 'l':
262 id, name = data
262 id, name = data
263 ui.note(('tag %s\n' % name))
263 ui.note(('tag %s\n' % name))
264 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
264 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
265 elif type == 'a':
265 elif type == 'a':
266 ui.note(('branch %s\n' % data))
266 ui.note(('branch %s\n' % data))
267 atbranch = data
267 atbranch = data
268 progress.update(id)
268 progress.update(id)
269
269
270 if tags:
270 if tags:
271 repo.vfs.write("localtags", "".join(tags))
271 repo.vfs.write("localtags", "".join(tags))
272
272
273 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
273 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
274 indent_string = ' ' * indent
274 indent_string = ' ' * indent
275 if all:
275 if all:
276 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
276 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
277 % indent_string)
277 % indent_string)
278
278
279 def showchunks(named):
279 def showchunks(named):
280 ui.write("\n%s%s\n" % (indent_string, named))
280 ui.write("\n%s%s\n" % (indent_string, named))
281 for deltadata in gen.deltaiter():
281 for deltadata in gen.deltaiter():
282 node, p1, p2, cs, deltabase, delta, flags = deltadata
282 node, p1, p2, cs, deltabase, delta, flags = deltadata
283 ui.write("%s%s %s %s %s %s %d\n" %
283 ui.write("%s%s %s %s %s %s %d\n" %
284 (indent_string, hex(node), hex(p1), hex(p2),
284 (indent_string, hex(node), hex(p1), hex(p2),
285 hex(cs), hex(deltabase), len(delta)))
285 hex(cs), hex(deltabase), len(delta)))
286
286
287 chunkdata = gen.changelogheader()
287 chunkdata = gen.changelogheader()
288 showchunks("changelog")
288 showchunks("changelog")
289 chunkdata = gen.manifestheader()
289 chunkdata = gen.manifestheader()
290 showchunks("manifest")
290 showchunks("manifest")
291 for chunkdata in iter(gen.filelogheader, {}):
291 for chunkdata in iter(gen.filelogheader, {}):
292 fname = chunkdata['filename']
292 fname = chunkdata['filename']
293 showchunks(fname)
293 showchunks(fname)
294 else:
294 else:
295 if isinstance(gen, bundle2.unbundle20):
295 if isinstance(gen, bundle2.unbundle20):
296 raise error.Abort(_('use debugbundle2 for this file'))
296 raise error.Abort(_('use debugbundle2 for this file'))
297 chunkdata = gen.changelogheader()
297 chunkdata = gen.changelogheader()
298 for deltadata in gen.deltaiter():
298 for deltadata in gen.deltaiter():
299 node, p1, p2, cs, deltabase, delta, flags = deltadata
299 node, p1, p2, cs, deltabase, delta, flags = deltadata
300 ui.write("%s%s\n" % (indent_string, hex(node)))
300 ui.write("%s%s\n" % (indent_string, hex(node)))
301
301
302 def _debugobsmarkers(ui, part, indent=0, **opts):
302 def _debugobsmarkers(ui, part, indent=0, **opts):
303 """display version and markers contained in 'data'"""
303 """display version and markers contained in 'data'"""
304 opts = pycompat.byteskwargs(opts)
304 opts = pycompat.byteskwargs(opts)
305 data = part.read()
305 data = part.read()
306 indent_string = ' ' * indent
306 indent_string = ' ' * indent
307 try:
307 try:
308 version, markers = obsolete._readmarkers(data)
308 version, markers = obsolete._readmarkers(data)
309 except error.UnknownVersion as exc:
309 except error.UnknownVersion as exc:
310 msg = "%sunsupported version: %s (%d bytes)\n"
310 msg = "%sunsupported version: %s (%d bytes)\n"
311 msg %= indent_string, exc.version, len(data)
311 msg %= indent_string, exc.version, len(data)
312 ui.write(msg)
312 ui.write(msg)
313 else:
313 else:
314 msg = "%sversion: %d (%d bytes)\n"
314 msg = "%sversion: %d (%d bytes)\n"
315 msg %= indent_string, version, len(data)
315 msg %= indent_string, version, len(data)
316 ui.write(msg)
316 ui.write(msg)
317 fm = ui.formatter('debugobsolete', opts)
317 fm = ui.formatter('debugobsolete', opts)
318 for rawmarker in sorted(markers):
318 for rawmarker in sorted(markers):
319 m = obsutil.marker(None, rawmarker)
319 m = obsutil.marker(None, rawmarker)
320 fm.startitem()
320 fm.startitem()
321 fm.plain(indent_string)
321 fm.plain(indent_string)
322 cmdutil.showmarker(fm, m)
322 cmdutil.showmarker(fm, m)
323 fm.end()
323 fm.end()
324
324
325 def _debugphaseheads(ui, data, indent=0):
325 def _debugphaseheads(ui, data, indent=0):
326 """display version and markers contained in 'data'"""
326 """display version and markers contained in 'data'"""
327 indent_string = ' ' * indent
327 indent_string = ' ' * indent
328 headsbyphase = phases.binarydecode(data)
328 headsbyphase = phases.binarydecode(data)
329 for phase in phases.allphases:
329 for phase in phases.allphases:
330 for head in headsbyphase[phase]:
330 for head in headsbyphase[phase]:
331 ui.write(indent_string)
331 ui.write(indent_string)
332 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
332 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
333
333
334 def _quasirepr(thing):
334 def _quasirepr(thing):
335 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
335 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
336 return '{%s}' % (
336 return '{%s}' % (
337 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
337 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
338 return pycompat.bytestr(repr(thing))
338 return pycompat.bytestr(repr(thing))
339
339
340 def _debugbundle2(ui, gen, all=None, **opts):
340 def _debugbundle2(ui, gen, all=None, **opts):
341 """lists the contents of a bundle2"""
341 """lists the contents of a bundle2"""
342 if not isinstance(gen, bundle2.unbundle20):
342 if not isinstance(gen, bundle2.unbundle20):
343 raise error.Abort(_('not a bundle2 file'))
343 raise error.Abort(_('not a bundle2 file'))
344 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
344 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
345 parttypes = opts.get(r'part_type', [])
345 parttypes = opts.get(r'part_type', [])
346 for part in gen.iterparts():
346 for part in gen.iterparts():
347 if parttypes and part.type not in parttypes:
347 if parttypes and part.type not in parttypes:
348 continue
348 continue
349 msg = '%s -- %s (mandatory: %r)\n'
349 msg = '%s -- %s (mandatory: %r)\n'
350 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
350 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
351 if part.type == 'changegroup':
351 if part.type == 'changegroup':
352 version = part.params.get('version', '01')
352 version = part.params.get('version', '01')
353 cg = changegroup.getunbundler(version, part, 'UN')
353 cg = changegroup.getunbundler(version, part, 'UN')
354 if not ui.quiet:
354 if not ui.quiet:
355 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
355 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
356 if part.type == 'obsmarkers':
356 if part.type == 'obsmarkers':
357 if not ui.quiet:
357 if not ui.quiet:
358 _debugobsmarkers(ui, part, indent=4, **opts)
358 _debugobsmarkers(ui, part, indent=4, **opts)
359 if part.type == 'phase-heads':
359 if part.type == 'phase-heads':
360 if not ui.quiet:
360 if not ui.quiet:
361 _debugphaseheads(ui, part, indent=4)
361 _debugphaseheads(ui, part, indent=4)
362
362
363 @command('debugbundle',
363 @command('debugbundle',
364 [('a', 'all', None, _('show all details')),
364 [('a', 'all', None, _('show all details')),
365 ('', 'part-type', [], _('show only the named part type')),
365 ('', 'part-type', [], _('show only the named part type')),
366 ('', 'spec', None, _('print the bundlespec of the bundle'))],
366 ('', 'spec', None, _('print the bundlespec of the bundle'))],
367 _('FILE'),
367 _('FILE'),
368 norepo=True)
368 norepo=True)
369 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
369 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
370 """lists the contents of a bundle"""
370 """lists the contents of a bundle"""
371 with hg.openpath(ui, bundlepath) as f:
371 with hg.openpath(ui, bundlepath) as f:
372 if spec:
372 if spec:
373 spec = exchange.getbundlespec(ui, f)
373 spec = exchange.getbundlespec(ui, f)
374 ui.write('%s\n' % spec)
374 ui.write('%s\n' % spec)
375 return
375 return
376
376
377 gen = exchange.readbundle(ui, f, bundlepath)
377 gen = exchange.readbundle(ui, f, bundlepath)
378 if isinstance(gen, bundle2.unbundle20):
378 if isinstance(gen, bundle2.unbundle20):
379 return _debugbundle2(ui, gen, all=all, **opts)
379 return _debugbundle2(ui, gen, all=all, **opts)
380 _debugchangegroup(ui, gen, all=all, **opts)
380 _debugchangegroup(ui, gen, all=all, **opts)
381
381
382 @command('debugcapabilities',
382 @command('debugcapabilities',
383 [], _('PATH'),
383 [], _('PATH'),
384 norepo=True)
384 norepo=True)
385 def debugcapabilities(ui, path, **opts):
385 def debugcapabilities(ui, path, **opts):
386 """lists the capabilities of a remote peer"""
386 """lists the capabilities of a remote peer"""
387 opts = pycompat.byteskwargs(opts)
387 opts = pycompat.byteskwargs(opts)
388 peer = hg.peer(ui, opts, path)
388 peer = hg.peer(ui, opts, path)
389 caps = peer.capabilities()
389 caps = peer.capabilities()
390 ui.write(('Main capabilities:\n'))
390 ui.write(('Main capabilities:\n'))
391 for c in sorted(caps):
391 for c in sorted(caps):
392 ui.write((' %s\n') % c)
392 ui.write((' %s\n') % c)
393 b2caps = bundle2.bundle2caps(peer)
393 b2caps = bundle2.bundle2caps(peer)
394 if b2caps:
394 if b2caps:
395 ui.write(('Bundle2 capabilities:\n'))
395 ui.write(('Bundle2 capabilities:\n'))
396 for key, values in sorted(b2caps.iteritems()):
396 for key, values in sorted(b2caps.iteritems()):
397 ui.write((' %s\n') % key)
397 ui.write((' %s\n') % key)
398 for v in values:
398 for v in values:
399 ui.write((' %s\n') % v)
399 ui.write((' %s\n') % v)
400
400
401 @command('debugcheckstate', [], '')
401 @command('debugcheckstate', [], '')
402 def debugcheckstate(ui, repo):
402 def debugcheckstate(ui, repo):
403 """validate the correctness of the current dirstate"""
403 """validate the correctness of the current dirstate"""
404 parent1, parent2 = repo.dirstate.parents()
404 parent1, parent2 = repo.dirstate.parents()
405 m1 = repo[parent1].manifest()
405 m1 = repo[parent1].manifest()
406 m2 = repo[parent2].manifest()
406 m2 = repo[parent2].manifest()
407 errors = 0
407 errors = 0
408 for f in repo.dirstate:
408 for f in repo.dirstate:
409 state = repo.dirstate[f]
409 state = repo.dirstate[f]
410 if state in "nr" and f not in m1:
410 if state in "nr" and f not in m1:
411 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
411 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
412 errors += 1
412 errors += 1
413 if state in "a" and f in m1:
413 if state in "a" and f in m1:
414 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
414 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
415 errors += 1
415 errors += 1
416 if state in "m" and f not in m1 and f not in m2:
416 if state in "m" and f not in m1 and f not in m2:
417 ui.warn(_("%s in state %s, but not in either manifest\n") %
417 ui.warn(_("%s in state %s, but not in either manifest\n") %
418 (f, state))
418 (f, state))
419 errors += 1
419 errors += 1
420 for f in m1:
420 for f in m1:
421 state = repo.dirstate[f]
421 state = repo.dirstate[f]
422 if state not in "nrm":
422 if state not in "nrm":
423 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
423 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
424 errors += 1
424 errors += 1
425 if errors:
425 if errors:
426 error = _(".hg/dirstate inconsistent with current parent's manifest")
426 error = _(".hg/dirstate inconsistent with current parent's manifest")
427 raise error.Abort(error)
427 raise error.Abort(error)
428
428
429 @command('debugcolor',
429 @command('debugcolor',
430 [('', 'style', None, _('show all configured styles'))],
430 [('', 'style', None, _('show all configured styles'))],
431 'hg debugcolor')
431 'hg debugcolor')
432 def debugcolor(ui, repo, **opts):
432 def debugcolor(ui, repo, **opts):
433 """show available color, effects or style"""
433 """show available color, effects or style"""
434 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
434 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
435 if opts.get(r'style'):
435 if opts.get(r'style'):
436 return _debugdisplaystyle(ui)
436 return _debugdisplaystyle(ui)
437 else:
437 else:
438 return _debugdisplaycolor(ui)
438 return _debugdisplaycolor(ui)
439
439
440 def _debugdisplaycolor(ui):
440 def _debugdisplaycolor(ui):
441 ui = ui.copy()
441 ui = ui.copy()
442 ui._styles.clear()
442 ui._styles.clear()
443 for effect in color._activeeffects(ui).keys():
443 for effect in color._activeeffects(ui).keys():
444 ui._styles[effect] = effect
444 ui._styles[effect] = effect
445 if ui._terminfoparams:
445 if ui._terminfoparams:
446 for k, v in ui.configitems('color'):
446 for k, v in ui.configitems('color'):
447 if k.startswith('color.'):
447 if k.startswith('color.'):
448 ui._styles[k] = k[6:]
448 ui._styles[k] = k[6:]
449 elif k.startswith('terminfo.'):
449 elif k.startswith('terminfo.'):
450 ui._styles[k] = k[9:]
450 ui._styles[k] = k[9:]
451 ui.write(_('available colors:\n'))
451 ui.write(_('available colors:\n'))
452 # sort label with a '_' after the other to group '_background' entry.
452 # sort label with a '_' after the other to group '_background' entry.
453 items = sorted(ui._styles.items(),
453 items = sorted(ui._styles.items(),
454 key=lambda i: ('_' in i[0], i[0], i[1]))
454 key=lambda i: ('_' in i[0], i[0], i[1]))
455 for colorname, label in items:
455 for colorname, label in items:
456 ui.write(('%s\n') % colorname, label=label)
456 ui.write(('%s\n') % colorname, label=label)
457
457
458 def _debugdisplaystyle(ui):
458 def _debugdisplaystyle(ui):
459 ui.write(_('available style:\n'))
459 ui.write(_('available style:\n'))
460 if not ui._styles:
460 if not ui._styles:
461 return
461 return
462 width = max(len(s) for s in ui._styles)
462 width = max(len(s) for s in ui._styles)
463 for label, effects in sorted(ui._styles.items()):
463 for label, effects in sorted(ui._styles.items()):
464 ui.write('%s' % label, label=label)
464 ui.write('%s' % label, label=label)
465 if effects:
465 if effects:
466 # 50
466 # 50
467 ui.write(': ')
467 ui.write(': ')
468 ui.write(' ' * (max(0, width - len(label))))
468 ui.write(' ' * (max(0, width - len(label))))
469 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
469 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
470 ui.write('\n')
470 ui.write('\n')
471
471
472 @command('debugcreatestreamclonebundle', [], 'FILE')
472 @command('debugcreatestreamclonebundle', [], 'FILE')
473 def debugcreatestreamclonebundle(ui, repo, fname):
473 def debugcreatestreamclonebundle(ui, repo, fname):
474 """create a stream clone bundle file
474 """create a stream clone bundle file
475
475
476 Stream bundles are special bundles that are essentially archives of
476 Stream bundles are special bundles that are essentially archives of
477 revlog files. They are commonly used for cloning very quickly.
477 revlog files. They are commonly used for cloning very quickly.
478 """
478 """
479 # TODO we may want to turn this into an abort when this functionality
479 # TODO we may want to turn this into an abort when this functionality
480 # is moved into `hg bundle`.
480 # is moved into `hg bundle`.
481 if phases.hassecret(repo):
481 if phases.hassecret(repo):
482 ui.warn(_('(warning: stream clone bundle will contain secret '
482 ui.warn(_('(warning: stream clone bundle will contain secret '
483 'revisions)\n'))
483 'revisions)\n'))
484
484
485 requirements, gen = streamclone.generatebundlev1(repo)
485 requirements, gen = streamclone.generatebundlev1(repo)
486 changegroup.writechunks(ui, gen, fname)
486 changegroup.writechunks(ui, gen, fname)
487
487
488 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
488 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
489
489
490 @command('debugdag',
490 @command('debugdag',
491 [('t', 'tags', None, _('use tags as labels')),
491 [('t', 'tags', None, _('use tags as labels')),
492 ('b', 'branches', None, _('annotate with branch names')),
492 ('b', 'branches', None, _('annotate with branch names')),
493 ('', 'dots', None, _('use dots for runs')),
493 ('', 'dots', None, _('use dots for runs')),
494 ('s', 'spaces', None, _('separate elements by spaces'))],
494 ('s', 'spaces', None, _('separate elements by spaces'))],
495 _('[OPTION]... [FILE [REV]...]'),
495 _('[OPTION]... [FILE [REV]...]'),
496 optionalrepo=True)
496 optionalrepo=True)
497 def debugdag(ui, repo, file_=None, *revs, **opts):
497 def debugdag(ui, repo, file_=None, *revs, **opts):
498 """format the changelog or an index DAG as a concise textual description
498 """format the changelog or an index DAG as a concise textual description
499
499
500 If you pass a revlog index, the revlog's DAG is emitted. If you list
500 If you pass a revlog index, the revlog's DAG is emitted. If you list
501 revision numbers, they get labeled in the output as rN.
501 revision numbers, they get labeled in the output as rN.
502
502
503 Otherwise, the changelog DAG of the current repo is emitted.
503 Otherwise, the changelog DAG of the current repo is emitted.
504 """
504 """
505 spaces = opts.get(r'spaces')
505 spaces = opts.get(r'spaces')
506 dots = opts.get(r'dots')
506 dots = opts.get(r'dots')
507 if file_:
507 if file_:
508 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
508 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
509 file_)
509 file_)
510 revs = set((int(r) for r in revs))
510 revs = set((int(r) for r in revs))
511 def events():
511 def events():
512 for r in rlog:
512 for r in rlog:
513 yield 'n', (r, list(p for p in rlog.parentrevs(r)
513 yield 'n', (r, list(p for p in rlog.parentrevs(r)
514 if p != -1))
514 if p != -1))
515 if r in revs:
515 if r in revs:
516 yield 'l', (r, "r%i" % r)
516 yield 'l', (r, "r%i" % r)
517 elif repo:
517 elif repo:
518 cl = repo.changelog
518 cl = repo.changelog
519 tags = opts.get(r'tags')
519 tags = opts.get(r'tags')
520 branches = opts.get(r'branches')
520 branches = opts.get(r'branches')
521 if tags:
521 if tags:
522 labels = {}
522 labels = {}
523 for l, n in repo.tags().items():
523 for l, n in repo.tags().items():
524 labels.setdefault(cl.rev(n), []).append(l)
524 labels.setdefault(cl.rev(n), []).append(l)
525 def events():
525 def events():
526 b = "default"
526 b = "default"
527 for r in cl:
527 for r in cl:
528 if branches:
528 if branches:
529 newb = cl.read(cl.node(r))[5]['branch']
529 newb = cl.read(cl.node(r))[5]['branch']
530 if newb != b:
530 if newb != b:
531 yield 'a', newb
531 yield 'a', newb
532 b = newb
532 b = newb
533 yield 'n', (r, list(p for p in cl.parentrevs(r)
533 yield 'n', (r, list(p for p in cl.parentrevs(r)
534 if p != -1))
534 if p != -1))
535 if tags:
535 if tags:
536 ls = labels.get(r)
536 ls = labels.get(r)
537 if ls:
537 if ls:
538 for l in ls:
538 for l in ls:
539 yield 'l', (r, l)
539 yield 'l', (r, l)
540 else:
540 else:
541 raise error.Abort(_('need repo for changelog dag'))
541 raise error.Abort(_('need repo for changelog dag'))
542
542
543 for line in dagparser.dagtextlines(events(),
543 for line in dagparser.dagtextlines(events(),
544 addspaces=spaces,
544 addspaces=spaces,
545 wraplabels=True,
545 wraplabels=True,
546 wrapannotations=True,
546 wrapannotations=True,
547 wrapnonlinear=dots,
547 wrapnonlinear=dots,
548 usedots=dots,
548 usedots=dots,
549 maxlinewidth=70):
549 maxlinewidth=70):
550 ui.write(line)
550 ui.write(line)
551 ui.write("\n")
551 ui.write("\n")
552
552
553 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
553 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
554 def debugdata(ui, repo, file_, rev=None, **opts):
554 def debugdata(ui, repo, file_, rev=None, **opts):
555 """dump the contents of a data file revision"""
555 """dump the contents of a data file revision"""
556 opts = pycompat.byteskwargs(opts)
556 opts = pycompat.byteskwargs(opts)
557 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
557 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
558 if rev is not None:
558 if rev is not None:
559 raise error.CommandError('debugdata', _('invalid arguments'))
559 raise error.CommandError('debugdata', _('invalid arguments'))
560 file_, rev = None, file_
560 file_, rev = None, file_
561 elif rev is None:
561 elif rev is None:
562 raise error.CommandError('debugdata', _('invalid arguments'))
562 raise error.CommandError('debugdata', _('invalid arguments'))
563 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
563 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
564 try:
564 try:
565 ui.write(r.revision(r.lookup(rev), raw=True))
565 ui.write(r.revision(r.lookup(rev), raw=True))
566 except KeyError:
566 except KeyError:
567 raise error.Abort(_('invalid revision identifier %s') % rev)
567 raise error.Abort(_('invalid revision identifier %s') % rev)
568
568
569 @command('debugdate',
569 @command('debugdate',
570 [('e', 'extended', None, _('try extended date formats'))],
570 [('e', 'extended', None, _('try extended date formats'))],
571 _('[-e] DATE [RANGE]'),
571 _('[-e] DATE [RANGE]'),
572 norepo=True, optionalrepo=True)
572 norepo=True, optionalrepo=True)
573 def debugdate(ui, date, range=None, **opts):
573 def debugdate(ui, date, range=None, **opts):
574 """parse and display a date"""
574 """parse and display a date"""
575 if opts[r"extended"]:
575 if opts[r"extended"]:
576 d = dateutil.parsedate(date, util.extendeddateformats)
576 d = dateutil.parsedate(date, util.extendeddateformats)
577 else:
577 else:
578 d = dateutil.parsedate(date)
578 d = dateutil.parsedate(date)
579 ui.write(("internal: %d %d\n") % d)
579 ui.write(("internal: %d %d\n") % d)
580 ui.write(("standard: %s\n") % dateutil.datestr(d))
580 ui.write(("standard: %s\n") % dateutil.datestr(d))
581 if range:
581 if range:
582 m = dateutil.matchdate(range)
582 m = dateutil.matchdate(range)
583 ui.write(("match: %s\n") % m(d[0]))
583 ui.write(("match: %s\n") % m(d[0]))
584
584
585 @command('debugdeltachain',
585 @command('debugdeltachain',
586 cmdutil.debugrevlogopts + cmdutil.formatteropts,
586 cmdutil.debugrevlogopts + cmdutil.formatteropts,
587 _('-c|-m|FILE'),
587 _('-c|-m|FILE'),
588 optionalrepo=True)
588 optionalrepo=True)
589 def debugdeltachain(ui, repo, file_=None, **opts):
589 def debugdeltachain(ui, repo, file_=None, **opts):
590 """dump information about delta chains in a revlog
590 """dump information about delta chains in a revlog
591
591
592 Output can be templatized. Available template keywords are:
592 Output can be templatized. Available template keywords are:
593
593
594 :``rev``: revision number
594 :``rev``: revision number
595 :``chainid``: delta chain identifier (numbered by unique base)
595 :``chainid``: delta chain identifier (numbered by unique base)
596 :``chainlen``: delta chain length to this revision
596 :``chainlen``: delta chain length to this revision
597 :``prevrev``: previous revision in delta chain
597 :``prevrev``: previous revision in delta chain
598 :``deltatype``: role of delta / how it was computed
598 :``deltatype``: role of delta / how it was computed
599 :``compsize``: compressed size of revision
599 :``compsize``: compressed size of revision
600 :``uncompsize``: uncompressed size of revision
600 :``uncompsize``: uncompressed size of revision
601 :``chainsize``: total size of compressed revisions in chain
601 :``chainsize``: total size of compressed revisions in chain
602 :``chainratio``: total chain size divided by uncompressed revision size
602 :``chainratio``: total chain size divided by uncompressed revision size
603 (new delta chains typically start at ratio 2.00)
603 (new delta chains typically start at ratio 2.00)
604 :``lindist``: linear distance from base revision in delta chain to end
604 :``lindist``: linear distance from base revision in delta chain to end
605 of this revision
605 of this revision
606 :``extradist``: total size of revisions not part of this delta chain from
606 :``extradist``: total size of revisions not part of this delta chain from
607 base of delta chain to end of this revision; a measurement
607 base of delta chain to end of this revision; a measurement
608 of how much extra data we need to read/seek across to read
608 of how much extra data we need to read/seek across to read
609 the delta chain for this revision
609 the delta chain for this revision
610 :``extraratio``: extradist divided by chainsize; another representation of
610 :``extraratio``: extradist divided by chainsize; another representation of
611 how much unrelated data is needed to load this delta chain
611 how much unrelated data is needed to load this delta chain
612
612
613 If the repository is configured to use the sparse read, additional keywords
613 If the repository is configured to use the sparse read, additional keywords
614 are available:
614 are available:
615
615
616 :``readsize``: total size of data read from the disk for a revision
616 :``readsize``: total size of data read from the disk for a revision
617 (sum of the sizes of all the blocks)
617 (sum of the sizes of all the blocks)
618 :``largestblock``: size of the largest block of data read from the disk
618 :``largestblock``: size of the largest block of data read from the disk
619 :``readdensity``: density of useful bytes in the data read from the disk
619 :``readdensity``: density of useful bytes in the data read from the disk
620 :``srchunks``: in how many data hunks the whole revision would be read
620 :``srchunks``: in how many data hunks the whole revision would be read
621
621
622 The sparse read can be enabled with experimental.sparse-read = True
622 The sparse read can be enabled with experimental.sparse-read = True
623 """
623 """
624 opts = pycompat.byteskwargs(opts)
624 opts = pycompat.byteskwargs(opts)
625 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
625 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
626 index = r.index
626 index = r.index
627 start = r.start
627 start = r.start
628 length = r.length
628 length = r.length
629 generaldelta = r.version & revlog.FLAG_GENERALDELTA
629 generaldelta = r.version & revlog.FLAG_GENERALDELTA
630 withsparseread = getattr(r, '_withsparseread', False)
630 withsparseread = getattr(r, '_withsparseread', False)
631
631
632 def revinfo(rev):
632 def revinfo(rev):
633 e = index[rev]
633 e = index[rev]
634 compsize = e[1]
634 compsize = e[1]
635 uncompsize = e[2]
635 uncompsize = e[2]
636 chainsize = 0
636 chainsize = 0
637
637
638 if generaldelta:
638 if generaldelta:
639 if e[3] == e[5]:
639 if e[3] == e[5]:
640 deltatype = 'p1'
640 deltatype = 'p1'
641 elif e[3] == e[6]:
641 elif e[3] == e[6]:
642 deltatype = 'p2'
642 deltatype = 'p2'
643 elif e[3] == rev - 1:
643 elif e[3] == rev - 1:
644 deltatype = 'prev'
644 deltatype = 'prev'
645 elif e[3] == rev:
645 elif e[3] == rev:
646 deltatype = 'base'
646 deltatype = 'base'
647 else:
647 else:
648 deltatype = 'other'
648 deltatype = 'other'
649 else:
649 else:
650 if e[3] == rev:
650 if e[3] == rev:
651 deltatype = 'base'
651 deltatype = 'base'
652 else:
652 else:
653 deltatype = 'prev'
653 deltatype = 'prev'
654
654
655 chain = r._deltachain(rev)[0]
655 chain = r._deltachain(rev)[0]
656 for iterrev in chain:
656 for iterrev in chain:
657 e = index[iterrev]
657 e = index[iterrev]
658 chainsize += e[1]
658 chainsize += e[1]
659
659
660 return compsize, uncompsize, deltatype, chain, chainsize
660 return compsize, uncompsize, deltatype, chain, chainsize
661
661
662 fm = ui.formatter('debugdeltachain', opts)
662 fm = ui.formatter('debugdeltachain', opts)
663
663
664 fm.plain(' rev chain# chainlen prev delta '
664 fm.plain(' rev chain# chainlen prev delta '
665 'size rawsize chainsize ratio lindist extradist '
665 'size rawsize chainsize ratio lindist extradist '
666 'extraratio')
666 'extraratio')
667 if withsparseread:
667 if withsparseread:
668 fm.plain(' readsize largestblk rddensity srchunks')
668 fm.plain(' readsize largestblk rddensity srchunks')
669 fm.plain('\n')
669 fm.plain('\n')
670
670
671 chainbases = {}
671 chainbases = {}
672 for rev in r:
672 for rev in r:
673 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
673 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
674 chainbase = chain[0]
674 chainbase = chain[0]
675 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
675 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
676 basestart = start(chainbase)
676 basestart = start(chainbase)
677 revstart = start(rev)
677 revstart = start(rev)
678 lineardist = revstart + comp - basestart
678 lineardist = revstart + comp - basestart
679 extradist = lineardist - chainsize
679 extradist = lineardist - chainsize
680 try:
680 try:
681 prevrev = chain[-2]
681 prevrev = chain[-2]
682 except IndexError:
682 except IndexError:
683 prevrev = -1
683 prevrev = -1
684
684
685 if uncomp != 0:
685 if uncomp != 0:
686 chainratio = float(chainsize) / float(uncomp)
686 chainratio = float(chainsize) / float(uncomp)
687 else:
687 else:
688 chainratio = chainsize
688 chainratio = chainsize
689
689
690 if chainsize != 0:
690 if chainsize != 0:
691 extraratio = float(extradist) / float(chainsize)
691 extraratio = float(extradist) / float(chainsize)
692 else:
692 else:
693 extraratio = extradist
693 extraratio = extradist
694
694
695 fm.startitem()
695 fm.startitem()
696 fm.write('rev chainid chainlen prevrev deltatype compsize '
696 fm.write('rev chainid chainlen prevrev deltatype compsize '
697 'uncompsize chainsize chainratio lindist extradist '
697 'uncompsize chainsize chainratio lindist extradist '
698 'extraratio',
698 'extraratio',
699 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
699 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
700 rev, chainid, len(chain), prevrev, deltatype, comp,
700 rev, chainid, len(chain), prevrev, deltatype, comp,
701 uncomp, chainsize, chainratio, lineardist, extradist,
701 uncomp, chainsize, chainratio, lineardist, extradist,
702 extraratio,
702 extraratio,
703 rev=rev, chainid=chainid, chainlen=len(chain),
703 rev=rev, chainid=chainid, chainlen=len(chain),
704 prevrev=prevrev, deltatype=deltatype, compsize=comp,
704 prevrev=prevrev, deltatype=deltatype, compsize=comp,
705 uncompsize=uncomp, chainsize=chainsize,
705 uncompsize=uncomp, chainsize=chainsize,
706 chainratio=chainratio, lindist=lineardist,
706 chainratio=chainratio, lindist=lineardist,
707 extradist=extradist, extraratio=extraratio)
707 extradist=extradist, extraratio=extraratio)
708 if withsparseread:
708 if withsparseread:
709 readsize = 0
709 readsize = 0
710 largestblock = 0
710 largestblock = 0
711 srchunks = 0
711 srchunks = 0
712
712
713 for revschunk in deltautil.slicechunk(r, chain):
713 for revschunk in deltautil.slicechunk(r, chain):
714 srchunks += 1
714 srchunks += 1
715 blkend = start(revschunk[-1]) + length(revschunk[-1])
715 blkend = start(revschunk[-1]) + length(revschunk[-1])
716 blksize = blkend - start(revschunk[0])
716 blksize = blkend - start(revschunk[0])
717
717
718 readsize += blksize
718 readsize += blksize
719 if largestblock < blksize:
719 if largestblock < blksize:
720 largestblock = blksize
720 largestblock = blksize
721
721
722 if readsize:
722 if readsize:
723 readdensity = float(chainsize) / float(readsize)
723 readdensity = float(chainsize) / float(readsize)
724 else:
724 else:
725 readdensity = 1
725 readdensity = 1
726
726
727 fm.write('readsize largestblock readdensity srchunks',
727 fm.write('readsize largestblock readdensity srchunks',
728 ' %10d %10d %9.5f %8d',
728 ' %10d %10d %9.5f %8d',
729 readsize, largestblock, readdensity, srchunks,
729 readsize, largestblock, readdensity, srchunks,
730 readsize=readsize, largestblock=largestblock,
730 readsize=readsize, largestblock=largestblock,
731 readdensity=readdensity, srchunks=srchunks)
731 readdensity=readdensity, srchunks=srchunks)
732
732
733 fm.plain('\n')
733 fm.plain('\n')
734
734
735 fm.end()
735 fm.end()
736
736
737 @command('debugdirstate|debugstate',
737 @command('debugdirstate|debugstate',
738 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
738 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
739 ('', 'dates', True, _('display the saved mtime')),
739 ('', 'dates', True, _('display the saved mtime')),
740 ('', 'datesort', None, _('sort by saved mtime'))],
740 ('', 'datesort', None, _('sort by saved mtime'))],
741 _('[OPTION]...'))
741 _('[OPTION]...'))
742 def debugstate(ui, repo, **opts):
742 def debugstate(ui, repo, **opts):
743 """show the contents of the current dirstate"""
743 """show the contents of the current dirstate"""
744
744
745 nodates = not opts[r'dates']
745 nodates = not opts[r'dates']
746 if opts.get(r'nodates') is not None:
746 if opts.get(r'nodates') is not None:
747 nodates = True
747 nodates = True
748 datesort = opts.get(r'datesort')
748 datesort = opts.get(r'datesort')
749
749
750 if datesort:
750 if datesort:
751 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
751 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
752 else:
752 else:
753 keyfunc = None # sort by filename
753 keyfunc = None # sort by filename
754 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
754 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
755 if ent[3] == -1:
755 if ent[3] == -1:
756 timestr = 'unset '
756 timestr = 'unset '
757 elif nodates:
757 elif nodates:
758 timestr = 'set '
758 timestr = 'set '
759 else:
759 else:
760 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
760 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
761 time.localtime(ent[3]))
761 time.localtime(ent[3]))
762 timestr = encoding.strtolocal(timestr)
762 timestr = encoding.strtolocal(timestr)
763 if ent[1] & 0o20000:
763 if ent[1] & 0o20000:
764 mode = 'lnk'
764 mode = 'lnk'
765 else:
765 else:
766 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
766 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
767 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
767 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
768 for f in repo.dirstate.copies():
768 for f in repo.dirstate.copies():
769 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
769 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
770
770
771 @command('debugdiscovery',
771 @command('debugdiscovery',
772 [('', 'old', None, _('use old-style discovery')),
772 [('', 'old', None, _('use old-style discovery')),
773 ('', 'nonheads', None,
773 ('', 'nonheads', None,
774 _('use old-style discovery with non-heads included')),
774 _('use old-style discovery with non-heads included')),
775 ('', 'rev', [], 'restrict discovery to this set of revs'),
775 ('', 'rev', [], 'restrict discovery to this set of revs'),
776 ('', 'seed', '12323', 'specify the random seed use for discovery'),
776 ('', 'seed', '12323', 'specify the random seed use for discovery'),
777 ] + cmdutil.remoteopts,
777 ] + cmdutil.remoteopts,
778 _('[--rev REV] [OTHER]'))
778 _('[--rev REV] [OTHER]'))
779 def debugdiscovery(ui, repo, remoteurl="default", **opts):
779 def debugdiscovery(ui, repo, remoteurl="default", **opts):
780 """runs the changeset discovery protocol in isolation"""
780 """runs the changeset discovery protocol in isolation"""
781 opts = pycompat.byteskwargs(opts)
781 opts = pycompat.byteskwargs(opts)
782 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
782 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
783 remote = hg.peer(repo, opts, remoteurl)
783 remote = hg.peer(repo, opts, remoteurl)
784 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
784 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
785
785
786 # make sure tests are repeatable
786 # make sure tests are repeatable
787 random.seed(int(opts['seed']))
787 random.seed(int(opts['seed']))
788
788
789
789
790
790
791 if opts.get('old'):
791 if opts.get('old'):
792 def doit(pushedrevs, remoteheads, remote=remote):
792 def doit(pushedrevs, remoteheads, remote=remote):
793 if not util.safehasattr(remote, 'branches'):
793 if not util.safehasattr(remote, 'branches'):
794 # enable in-client legacy support
794 # enable in-client legacy support
795 remote = localrepo.locallegacypeer(remote.local())
795 remote = localrepo.locallegacypeer(remote.local())
796 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
796 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
797 force=True)
797 force=True)
798 common = set(common)
798 common = set(common)
799 if not opts.get('nonheads'):
799 if not opts.get('nonheads'):
800 ui.write(("unpruned common: %s\n") %
800 ui.write(("unpruned common: %s\n") %
801 " ".join(sorted(short(n) for n in common)))
801 " ".join(sorted(short(n) for n in common)))
802
802
803 clnode = repo.changelog.node
803 clnode = repo.changelog.node
804 common = repo.revs('heads(::%ln)', common)
804 common = repo.revs('heads(::%ln)', common)
805 common = {clnode(r) for r in common}
805 common = {clnode(r) for r in common}
806 return common, hds
806 return common, hds
807 else:
807 else:
808 def doit(pushedrevs, remoteheads, remote=remote):
808 def doit(pushedrevs, remoteheads, remote=remote):
809 nodes = None
809 nodes = None
810 if pushedrevs:
810 if pushedrevs:
811 revs = scmutil.revrange(repo, pushedrevs)
811 revs = scmutil.revrange(repo, pushedrevs)
812 nodes = [repo[r].node() for r in revs]
812 nodes = [repo[r].node() for r in revs]
813 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
813 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
814 ancestorsof=nodes)
814 ancestorsof=nodes)
815 return common, hds
815 return common, hds
816
816
817 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
817 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
818 localrevs = opts['rev']
818 localrevs = opts['rev']
819 with util.timedcm('debug-discovery') as t:
819 with util.timedcm('debug-discovery') as t:
820 common, hds = doit(localrevs, remoterevs)
820 common, hds = doit(localrevs, remoterevs)
821
821
822 # compute all statistics
822 # compute all statistics
823 common = set(common)
823 common = set(common)
824 rheads = set(hds)
824 rheads = set(hds)
825 lheads = set(repo.heads())
825 lheads = set(repo.heads())
826
826
827 data = {}
827 data = {}
828 data['elapsed'] = t.elapsed
828 data['elapsed'] = t.elapsed
829 data['nb-common'] = len(common)
829 data['nb-common'] = len(common)
830 data['nb-common-local'] = len(common & lheads)
830 data['nb-common-local'] = len(common & lheads)
831 data['nb-common-remote'] = len(common & rheads)
831 data['nb-common-remote'] = len(common & rheads)
832 data['nb-common-both'] = len(common & rheads & lheads)
832 data['nb-common-both'] = len(common & rheads & lheads)
833 data['nb-local'] = len(lheads)
833 data['nb-local'] = len(lheads)
834 data['nb-local-missing'] = data['nb-local'] - data['nb-common-local']
834 data['nb-local-missing'] = data['nb-local'] - data['nb-common-local']
835 data['nb-remote'] = len(rheads)
835 data['nb-remote'] = len(rheads)
836 data['nb-remote-unknown'] = data['nb-remote'] - data['nb-common-remote']
836 data['nb-remote-unknown'] = data['nb-remote'] - data['nb-common-remote']
837 data['nb-revs'] = len(repo.revs('all()'))
837 data['nb-revs'] = len(repo.revs('all()'))
838 data['nb-revs-common'] = len(repo.revs('::%ln', common))
838 data['nb-revs-common'] = len(repo.revs('::%ln', common))
839 data['nb-revs-missing'] = data['nb-revs'] - data['nb-revs-common']
839 data['nb-revs-missing'] = data['nb-revs'] - data['nb-revs-common']
840
840
841 # display discovery summary
841 # display discovery summary
842 ui.write(("elapsed time: %(elapsed)f seconds\n") % data)
842 ui.write(("elapsed time: %(elapsed)f seconds\n") % data)
843 ui.write(("heads summary:\n"))
843 ui.write(("heads summary:\n"))
844 ui.write((" total common heads: %(nb-common)9d\n") % data)
844 ui.write((" total common heads: %(nb-common)9d\n") % data)
845 ui.write((" also local heads: %(nb-common-local)9d\n") % data)
845 ui.write((" also local heads: %(nb-common-local)9d\n") % data)
846 ui.write((" also remote heads: %(nb-common-remote)9d\n") % data)
846 ui.write((" also remote heads: %(nb-common-remote)9d\n") % data)
847 ui.write((" both: %(nb-common-both)9d\n") % data)
847 ui.write((" both: %(nb-common-both)9d\n") % data)
848 ui.write((" local heads: %(nb-local)9d\n") % data)
848 ui.write((" local heads: %(nb-local)9d\n") % data)
849 ui.write((" common: %(nb-common-local)9d\n") % data)
849 ui.write((" common: %(nb-common-local)9d\n") % data)
850 ui.write((" missing: %(nb-local-missing)9d\n") % data)
850 ui.write((" missing: %(nb-local-missing)9d\n") % data)
851 ui.write((" remote heads: %(nb-remote)9d\n") % data)
851 ui.write((" remote heads: %(nb-remote)9d\n") % data)
852 ui.write((" common: %(nb-common-remote)9d\n") % data)
852 ui.write((" common: %(nb-common-remote)9d\n") % data)
853 ui.write((" unknown: %(nb-remote-unknown)9d\n") % data)
853 ui.write((" unknown: %(nb-remote-unknown)9d\n") % data)
854 ui.write(("local changesets: %(nb-revs)9d\n") % data)
854 ui.write(("local changesets: %(nb-revs)9d\n") % data)
855 ui.write((" common: %(nb-revs-common)9d\n") % data)
855 ui.write((" common: %(nb-revs-common)9d\n") % data)
856 ui.write((" missing: %(nb-revs-missing)9d\n") % data)
856 ui.write((" missing: %(nb-revs-missing)9d\n") % data)
857
857
858 if ui.verbose:
858 if ui.verbose:
859 ui.write(("common heads: %s\n") %
859 ui.write(("common heads: %s\n") %
860 " ".join(sorted(short(n) for n in common)))
860 " ".join(sorted(short(n) for n in common)))
861
861
862 _chunksize = 4 << 10
862 _chunksize = 4 << 10
863
863
864 @command('debugdownload',
864 @command('debugdownload',
865 [
865 [
866 ('o', 'output', '', _('path')),
866 ('o', 'output', '', _('path')),
867 ],
867 ],
868 optionalrepo=True)
868 optionalrepo=True)
869 def debugdownload(ui, repo, url, output=None, **opts):
869 def debugdownload(ui, repo, url, output=None, **opts):
870 """download a resource using Mercurial logic and config
870 """download a resource using Mercurial logic and config
871 """
871 """
872 fh = urlmod.open(ui, url, output)
872 fh = urlmod.open(ui, url, output)
873
873
874 dest = ui
874 dest = ui
875 if output:
875 if output:
876 dest = open(output, "wb", _chunksize)
876 dest = open(output, "wb", _chunksize)
877 try:
877 try:
878 data = fh.read(_chunksize)
878 data = fh.read(_chunksize)
879 while data:
879 while data:
880 dest.write(data)
880 dest.write(data)
881 data = fh.read(_chunksize)
881 data = fh.read(_chunksize)
882 finally:
882 finally:
883 if output:
883 if output:
884 dest.close()
884 dest.close()
885
885
886 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
886 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
887 def debugextensions(ui, repo, **opts):
887 def debugextensions(ui, repo, **opts):
888 '''show information about active extensions'''
888 '''show information about active extensions'''
889 opts = pycompat.byteskwargs(opts)
889 opts = pycompat.byteskwargs(opts)
890 exts = extensions.extensions(ui)
890 exts = extensions.extensions(ui)
891 hgver = util.version()
891 hgver = util.version()
892 fm = ui.formatter('debugextensions', opts)
892 fm = ui.formatter('debugextensions', opts)
893 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
893 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
894 isinternal = extensions.ismoduleinternal(extmod)
894 isinternal = extensions.ismoduleinternal(extmod)
895 extsource = pycompat.fsencode(extmod.__file__)
895 extsource = pycompat.fsencode(extmod.__file__)
896 if isinternal:
896 if isinternal:
897 exttestedwith = [] # never expose magic string to users
897 exttestedwith = [] # never expose magic string to users
898 else:
898 else:
899 exttestedwith = getattr(extmod, 'testedwith', '').split()
899 exttestedwith = getattr(extmod, 'testedwith', '').split()
900 extbuglink = getattr(extmod, 'buglink', None)
900 extbuglink = getattr(extmod, 'buglink', None)
901
901
902 fm.startitem()
902 fm.startitem()
903
903
904 if ui.quiet or ui.verbose:
904 if ui.quiet or ui.verbose:
905 fm.write('name', '%s\n', extname)
905 fm.write('name', '%s\n', extname)
906 else:
906 else:
907 fm.write('name', '%s', extname)
907 fm.write('name', '%s', extname)
908 if isinternal or hgver in exttestedwith:
908 if isinternal or hgver in exttestedwith:
909 fm.plain('\n')
909 fm.plain('\n')
910 elif not exttestedwith:
910 elif not exttestedwith:
911 fm.plain(_(' (untested!)\n'))
911 fm.plain(_(' (untested!)\n'))
912 else:
912 else:
913 lasttestedversion = exttestedwith[-1]
913 lasttestedversion = exttestedwith[-1]
914 fm.plain(' (%s!)\n' % lasttestedversion)
914 fm.plain(' (%s!)\n' % lasttestedversion)
915
915
916 fm.condwrite(ui.verbose and extsource, 'source',
916 fm.condwrite(ui.verbose and extsource, 'source',
917 _(' location: %s\n'), extsource or "")
917 _(' location: %s\n'), extsource or "")
918
918
919 if ui.verbose:
919 if ui.verbose:
920 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
920 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
921 fm.data(bundled=isinternal)
921 fm.data(bundled=isinternal)
922
922
923 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
923 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
924 _(' tested with: %s\n'),
924 _(' tested with: %s\n'),
925 fm.formatlist(exttestedwith, name='ver'))
925 fm.formatlist(exttestedwith, name='ver'))
926
926
927 fm.condwrite(ui.verbose and extbuglink, 'buglink',
927 fm.condwrite(ui.verbose and extbuglink, 'buglink',
928 _(' bug reporting: %s\n'), extbuglink or "")
928 _(' bug reporting: %s\n'), extbuglink or "")
929
929
930 fm.end()
930 fm.end()
931
931
932 @command('debugfileset',
932 @command('debugfileset',
933 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
933 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
934 ('', 'all-files', False,
934 ('', 'all-files', False,
935 _('test files from all revisions and working directory')),
935 _('test files from all revisions and working directory')),
936 ('s', 'show-matcher', None,
936 ('s', 'show-matcher', None,
937 _('print internal representation of matcher')),
937 _('print internal representation of matcher')),
938 ('p', 'show-stage', [],
938 ('p', 'show-stage', [],
939 _('print parsed tree at the given stage'), _('NAME'))],
939 _('print parsed tree at the given stage'), _('NAME'))],
940 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
940 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
941 def debugfileset(ui, repo, expr, **opts):
941 def debugfileset(ui, repo, expr, **opts):
942 '''parse and apply a fileset specification'''
942 '''parse and apply a fileset specification'''
943 from . import fileset
943 from . import fileset
944 fileset.symbols # force import of fileset so we have predicates to optimize
944 fileset.symbols # force import of fileset so we have predicates to optimize
945 opts = pycompat.byteskwargs(opts)
945 opts = pycompat.byteskwargs(opts)
946 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
946 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
947
947
948 stages = [
948 stages = [
949 ('parsed', pycompat.identity),
949 ('parsed', pycompat.identity),
950 ('analyzed', filesetlang.analyze),
950 ('analyzed', filesetlang.analyze),
951 ('optimized', filesetlang.optimize),
951 ('optimized', filesetlang.optimize),
952 ]
952 ]
953 stagenames = set(n for n, f in stages)
953 stagenames = set(n for n, f in stages)
954
954
955 showalways = set()
955 showalways = set()
956 if ui.verbose and not opts['show_stage']:
956 if ui.verbose and not opts['show_stage']:
957 # show parsed tree by --verbose (deprecated)
957 # show parsed tree by --verbose (deprecated)
958 showalways.add('parsed')
958 showalways.add('parsed')
959 if opts['show_stage'] == ['all']:
959 if opts['show_stage'] == ['all']:
960 showalways.update(stagenames)
960 showalways.update(stagenames)
961 else:
961 else:
962 for n in opts['show_stage']:
962 for n in opts['show_stage']:
963 if n not in stagenames:
963 if n not in stagenames:
964 raise error.Abort(_('invalid stage name: %s') % n)
964 raise error.Abort(_('invalid stage name: %s') % n)
965 showalways.update(opts['show_stage'])
965 showalways.update(opts['show_stage'])
966
966
967 tree = filesetlang.parse(expr)
967 tree = filesetlang.parse(expr)
968 for n, f in stages:
968 for n, f in stages:
969 tree = f(tree)
969 tree = f(tree)
970 if n in showalways:
970 if n in showalways:
971 if opts['show_stage'] or n != 'parsed':
971 if opts['show_stage'] or n != 'parsed':
972 ui.write(("* %s:\n") % n)
972 ui.write(("* %s:\n") % n)
973 ui.write(filesetlang.prettyformat(tree), "\n")
973 ui.write(filesetlang.prettyformat(tree), "\n")
974
974
975 files = set()
975 files = set()
976 if opts['all_files']:
976 if opts['all_files']:
977 for r in repo:
977 for r in repo:
978 c = repo[r]
978 c = repo[r]
979 files.update(c.files())
979 files.update(c.files())
980 files.update(c.substate)
980 files.update(c.substate)
981 if opts['all_files'] or ctx.rev() is None:
981 if opts['all_files'] or ctx.rev() is None:
982 wctx = repo[None]
982 wctx = repo[None]
983 files.update(repo.dirstate.walk(scmutil.matchall(repo),
983 files.update(repo.dirstate.walk(scmutil.matchall(repo),
984 subrepos=list(wctx.substate),
984 subrepos=list(wctx.substate),
985 unknown=True, ignored=True))
985 unknown=True, ignored=True))
986 files.update(wctx.substate)
986 files.update(wctx.substate)
987 else:
987 else:
988 files.update(ctx.files())
988 files.update(ctx.files())
989 files.update(ctx.substate)
989 files.update(ctx.substate)
990
990
991 m = ctx.matchfileset(expr)
991 m = ctx.matchfileset(expr)
992 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
992 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
993 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
993 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
994 for f in sorted(files):
994 for f in sorted(files):
995 if not m(f):
995 if not m(f):
996 continue
996 continue
997 ui.write("%s\n" % f)
997 ui.write("%s\n" % f)
998
998
999 @command('debugformat',
999 @command('debugformat',
1000 [] + cmdutil.formatteropts)
1000 [] + cmdutil.formatteropts)
1001 def debugformat(ui, repo, **opts):
1001 def debugformat(ui, repo, **opts):
1002 """display format information about the current repository
1002 """display format information about the current repository
1003
1003
1004 Use --verbose to get extra information about current config value and
1004 Use --verbose to get extra information about current config value and
1005 Mercurial default."""
1005 Mercurial default."""
1006 opts = pycompat.byteskwargs(opts)
1006 opts = pycompat.byteskwargs(opts)
1007 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1007 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1008 maxvariantlength = max(len('format-variant'), maxvariantlength)
1008 maxvariantlength = max(len('format-variant'), maxvariantlength)
1009
1009
1010 def makeformatname(name):
1010 def makeformatname(name):
1011 return '%s:' + (' ' * (maxvariantlength - len(name)))
1011 return '%s:' + (' ' * (maxvariantlength - len(name)))
1012
1012
1013 fm = ui.formatter('debugformat', opts)
1013 fm = ui.formatter('debugformat', opts)
1014 if fm.isplain():
1014 if fm.isplain():
1015 def formatvalue(value):
1015 def formatvalue(value):
1016 if util.safehasattr(value, 'startswith'):
1016 if util.safehasattr(value, 'startswith'):
1017 return value
1017 return value
1018 if value:
1018 if value:
1019 return 'yes'
1019 return 'yes'
1020 else:
1020 else:
1021 return 'no'
1021 return 'no'
1022 else:
1022 else:
1023 formatvalue = pycompat.identity
1023 formatvalue = pycompat.identity
1024
1024
1025 fm.plain('format-variant')
1025 fm.plain('format-variant')
1026 fm.plain(' ' * (maxvariantlength - len('format-variant')))
1026 fm.plain(' ' * (maxvariantlength - len('format-variant')))
1027 fm.plain(' repo')
1027 fm.plain(' repo')
1028 if ui.verbose:
1028 if ui.verbose:
1029 fm.plain(' config default')
1029 fm.plain(' config default')
1030 fm.plain('\n')
1030 fm.plain('\n')
1031 for fv in upgrade.allformatvariant:
1031 for fv in upgrade.allformatvariant:
1032 fm.startitem()
1032 fm.startitem()
1033 repovalue = fv.fromrepo(repo)
1033 repovalue = fv.fromrepo(repo)
1034 configvalue = fv.fromconfig(repo)
1034 configvalue = fv.fromconfig(repo)
1035
1035
1036 if repovalue != configvalue:
1036 if repovalue != configvalue:
1037 namelabel = 'formatvariant.name.mismatchconfig'
1037 namelabel = 'formatvariant.name.mismatchconfig'
1038 repolabel = 'formatvariant.repo.mismatchconfig'
1038 repolabel = 'formatvariant.repo.mismatchconfig'
1039 elif repovalue != fv.default:
1039 elif repovalue != fv.default:
1040 namelabel = 'formatvariant.name.mismatchdefault'
1040 namelabel = 'formatvariant.name.mismatchdefault'
1041 repolabel = 'formatvariant.repo.mismatchdefault'
1041 repolabel = 'formatvariant.repo.mismatchdefault'
1042 else:
1042 else:
1043 namelabel = 'formatvariant.name.uptodate'
1043 namelabel = 'formatvariant.name.uptodate'
1044 repolabel = 'formatvariant.repo.uptodate'
1044 repolabel = 'formatvariant.repo.uptodate'
1045
1045
1046 fm.write('name', makeformatname(fv.name), fv.name,
1046 fm.write('name', makeformatname(fv.name), fv.name,
1047 label=namelabel)
1047 label=namelabel)
1048 fm.write('repo', ' %3s', formatvalue(repovalue),
1048 fm.write('repo', ' %3s', formatvalue(repovalue),
1049 label=repolabel)
1049 label=repolabel)
1050 if fv.default != configvalue:
1050 if fv.default != configvalue:
1051 configlabel = 'formatvariant.config.special'
1051 configlabel = 'formatvariant.config.special'
1052 else:
1052 else:
1053 configlabel = 'formatvariant.config.default'
1053 configlabel = 'formatvariant.config.default'
1054 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1054 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1055 label=configlabel)
1055 label=configlabel)
1056 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1056 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1057 label='formatvariant.default')
1057 label='formatvariant.default')
1058 fm.plain('\n')
1058 fm.plain('\n')
1059 fm.end()
1059 fm.end()
1060
1060
1061 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1061 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1062 def debugfsinfo(ui, path="."):
1062 def debugfsinfo(ui, path="."):
1063 """show information detected about current filesystem"""
1063 """show information detected about current filesystem"""
1064 ui.write(('path: %s\n') % path)
1064 ui.write(('path: %s\n') % path)
1065 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1065 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1066 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1066 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1067 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1067 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1068 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1068 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1069 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1069 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1070 casesensitive = '(unknown)'
1070 casesensitive = '(unknown)'
1071 try:
1071 try:
1072 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1072 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1073 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1073 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1074 except OSError:
1074 except OSError:
1075 pass
1075 pass
1076 ui.write(('case-sensitive: %s\n') % casesensitive)
1076 ui.write(('case-sensitive: %s\n') % casesensitive)
1077
1077
1078 @command('debuggetbundle',
1078 @command('debuggetbundle',
1079 [('H', 'head', [], _('id of head node'), _('ID')),
1079 [('H', 'head', [], _('id of head node'), _('ID')),
1080 ('C', 'common', [], _('id of common node'), _('ID')),
1080 ('C', 'common', [], _('id of common node'), _('ID')),
1081 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1081 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1082 _('REPO FILE [-H|-C ID]...'),
1082 _('REPO FILE [-H|-C ID]...'),
1083 norepo=True)
1083 norepo=True)
1084 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1084 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1085 """retrieves a bundle from a repo
1085 """retrieves a bundle from a repo
1086
1086
1087 Every ID must be a full-length hex node id string. Saves the bundle to the
1087 Every ID must be a full-length hex node id string. Saves the bundle to the
1088 given file.
1088 given file.
1089 """
1089 """
1090 opts = pycompat.byteskwargs(opts)
1090 opts = pycompat.byteskwargs(opts)
1091 repo = hg.peer(ui, opts, repopath)
1091 repo = hg.peer(ui, opts, repopath)
1092 if not repo.capable('getbundle'):
1092 if not repo.capable('getbundle'):
1093 raise error.Abort("getbundle() not supported by target repository")
1093 raise error.Abort("getbundle() not supported by target repository")
1094 args = {}
1094 args = {}
1095 if common:
1095 if common:
1096 args[r'common'] = [bin(s) for s in common]
1096 args[r'common'] = [bin(s) for s in common]
1097 if head:
1097 if head:
1098 args[r'heads'] = [bin(s) for s in head]
1098 args[r'heads'] = [bin(s) for s in head]
1099 # TODO: get desired bundlecaps from command line.
1099 # TODO: get desired bundlecaps from command line.
1100 args[r'bundlecaps'] = None
1100 args[r'bundlecaps'] = None
1101 bundle = repo.getbundle('debug', **args)
1101 bundle = repo.getbundle('debug', **args)
1102
1102
1103 bundletype = opts.get('type', 'bzip2').lower()
1103 bundletype = opts.get('type', 'bzip2').lower()
1104 btypes = {'none': 'HG10UN',
1104 btypes = {'none': 'HG10UN',
1105 'bzip2': 'HG10BZ',
1105 'bzip2': 'HG10BZ',
1106 'gzip': 'HG10GZ',
1106 'gzip': 'HG10GZ',
1107 'bundle2': 'HG20'}
1107 'bundle2': 'HG20'}
1108 bundletype = btypes.get(bundletype)
1108 bundletype = btypes.get(bundletype)
1109 if bundletype not in bundle2.bundletypes:
1109 if bundletype not in bundle2.bundletypes:
1110 raise error.Abort(_('unknown bundle type specified with --type'))
1110 raise error.Abort(_('unknown bundle type specified with --type'))
1111 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1111 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1112
1112
1113 @command('debugignore', [], '[FILE]')
1113 @command('debugignore', [], '[FILE]')
1114 def debugignore(ui, repo, *files, **opts):
1114 def debugignore(ui, repo, *files, **opts):
1115 """display the combined ignore pattern and information about ignored files
1115 """display the combined ignore pattern and information about ignored files
1116
1116
1117 With no argument display the combined ignore pattern.
1117 With no argument display the combined ignore pattern.
1118
1118
1119 Given space separated file names, shows if the given file is ignored and
1119 Given space separated file names, shows if the given file is ignored and
1120 if so, show the ignore rule (file and line number) that matched it.
1120 if so, show the ignore rule (file and line number) that matched it.
1121 """
1121 """
1122 ignore = repo.dirstate._ignore
1122 ignore = repo.dirstate._ignore
1123 if not files:
1123 if not files:
1124 # Show all the patterns
1124 # Show all the patterns
1125 ui.write("%s\n" % pycompat.byterepr(ignore))
1125 ui.write("%s\n" % pycompat.byterepr(ignore))
1126 else:
1126 else:
1127 m = scmutil.match(repo[None], pats=files)
1127 m = scmutil.match(repo[None], pats=files)
1128 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1128 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1129 for f in m.files():
1129 for f in m.files():
1130 nf = util.normpath(f)
1130 nf = util.normpath(f)
1131 ignored = None
1131 ignored = None
1132 ignoredata = None
1132 ignoredata = None
1133 if nf != '.':
1133 if nf != '.':
1134 if ignore(nf):
1134 if ignore(nf):
1135 ignored = nf
1135 ignored = nf
1136 ignoredata = repo.dirstate._ignorefileandline(nf)
1136 ignoredata = repo.dirstate._ignorefileandline(nf)
1137 else:
1137 else:
1138 for p in util.finddirs(nf):
1138 for p in util.finddirs(nf):
1139 if ignore(p):
1139 if ignore(p):
1140 ignored = p
1140 ignored = p
1141 ignoredata = repo.dirstate._ignorefileandline(p)
1141 ignoredata = repo.dirstate._ignorefileandline(p)
1142 break
1142 break
1143 if ignored:
1143 if ignored:
1144 if ignored == nf:
1144 if ignored == nf:
1145 ui.write(_("%s is ignored\n") % uipathfn(f))
1145 ui.write(_("%s is ignored\n") % uipathfn(f))
1146 else:
1146 else:
1147 ui.write(_("%s is ignored because of "
1147 ui.write(_("%s is ignored because of "
1148 "containing directory %s\n")
1148 "containing directory %s\n")
1149 % (uipathfn(f), ignored))
1149 % (uipathfn(f), ignored))
1150 ignorefile, lineno, line = ignoredata
1150 ignorefile, lineno, line = ignoredata
1151 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1151 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1152 % (ignorefile, lineno, line))
1152 % (ignorefile, lineno, line))
1153 else:
1153 else:
1154 ui.write(_("%s is not ignored\n") % uipathfn(f))
1154 ui.write(_("%s is not ignored\n") % uipathfn(f))
1155
1155
1156 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1156 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1157 _('-c|-m|FILE'))
1157 _('-c|-m|FILE'))
1158 def debugindex(ui, repo, file_=None, **opts):
1158 def debugindex(ui, repo, file_=None, **opts):
1159 """dump index data for a storage primitive"""
1159 """dump index data for a storage primitive"""
1160 opts = pycompat.byteskwargs(opts)
1160 opts = pycompat.byteskwargs(opts)
1161 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1161 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1162
1162
1163 if ui.debugflag:
1163 if ui.debugflag:
1164 shortfn = hex
1164 shortfn = hex
1165 else:
1165 else:
1166 shortfn = short
1166 shortfn = short
1167
1167
1168 idlen = 12
1168 idlen = 12
1169 for i in store:
1169 for i in store:
1170 idlen = len(shortfn(store.node(i)))
1170 idlen = len(shortfn(store.node(i)))
1171 break
1171 break
1172
1172
1173 fm = ui.formatter('debugindex', opts)
1173 fm = ui.formatter('debugindex', opts)
1174 fm.plain(b' rev linkrev %s %s p2\n' % (
1174 fm.plain(b' rev linkrev %s %s p2\n' % (
1175 b'nodeid'.ljust(idlen),
1175 b'nodeid'.ljust(idlen),
1176 b'p1'.ljust(idlen)))
1176 b'p1'.ljust(idlen)))
1177
1177
1178 for rev in store:
1178 for rev in store:
1179 node = store.node(rev)
1179 node = store.node(rev)
1180 parents = store.parents(node)
1180 parents = store.parents(node)
1181
1181
1182 fm.startitem()
1182 fm.startitem()
1183 fm.write(b'rev', b'%6d ', rev)
1183 fm.write(b'rev', b'%6d ', rev)
1184 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1184 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1185 fm.write(b'node', '%s ', shortfn(node))
1185 fm.write(b'node', '%s ', shortfn(node))
1186 fm.write(b'p1', '%s ', shortfn(parents[0]))
1186 fm.write(b'p1', '%s ', shortfn(parents[0]))
1187 fm.write(b'p2', '%s', shortfn(parents[1]))
1187 fm.write(b'p2', '%s', shortfn(parents[1]))
1188 fm.plain(b'\n')
1188 fm.plain(b'\n')
1189
1189
1190 fm.end()
1190 fm.end()
1191
1191
1192 @command('debugindexdot', cmdutil.debugrevlogopts,
1192 @command('debugindexdot', cmdutil.debugrevlogopts,
1193 _('-c|-m|FILE'), optionalrepo=True)
1193 _('-c|-m|FILE'), optionalrepo=True)
1194 def debugindexdot(ui, repo, file_=None, **opts):
1194 def debugindexdot(ui, repo, file_=None, **opts):
1195 """dump an index DAG as a graphviz dot file"""
1195 """dump an index DAG as a graphviz dot file"""
1196 opts = pycompat.byteskwargs(opts)
1196 opts = pycompat.byteskwargs(opts)
1197 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1197 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1198 ui.write(("digraph G {\n"))
1198 ui.write(("digraph G {\n"))
1199 for i in r:
1199 for i in r:
1200 node = r.node(i)
1200 node = r.node(i)
1201 pp = r.parents(node)
1201 pp = r.parents(node)
1202 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1202 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1203 if pp[1] != nullid:
1203 if pp[1] != nullid:
1204 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1204 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1205 ui.write("}\n")
1205 ui.write("}\n")
1206
1206
1207 @command('debugindexstats', [])
1207 @command('debugindexstats', [])
1208 def debugindexstats(ui, repo):
1208 def debugindexstats(ui, repo):
1209 """show stats related to the changelog index"""
1209 """show stats related to the changelog index"""
1210 repo.changelog.shortest(nullid, 1)
1210 repo.changelog.shortest(nullid, 1)
1211 index = repo.changelog.index
1211 index = repo.changelog.index
1212 if not util.safehasattr(index, 'stats'):
1212 if not util.safehasattr(index, 'stats'):
1213 raise error.Abort(_('debugindexstats only works with native code'))
1213 raise error.Abort(_('debugindexstats only works with native code'))
1214 for k, v in sorted(index.stats().items()):
1214 for k, v in sorted(index.stats().items()):
1215 ui.write('%s: %d\n' % (k, v))
1215 ui.write('%s: %d\n' % (k, v))
1216
1216
1217 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1217 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1218 def debuginstall(ui, **opts):
1218 def debuginstall(ui, **opts):
1219 '''test Mercurial installation
1219 '''test Mercurial installation
1220
1220
1221 Returns 0 on success.
1221 Returns 0 on success.
1222 '''
1222 '''
1223 opts = pycompat.byteskwargs(opts)
1223 opts = pycompat.byteskwargs(opts)
1224
1224
1225 problems = 0
1225 problems = 0
1226
1226
1227 fm = ui.formatter('debuginstall', opts)
1227 fm = ui.formatter('debuginstall', opts)
1228 fm.startitem()
1228 fm.startitem()
1229
1229
1230 # encoding
1230 # encoding
1231 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1231 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1232 err = None
1232 err = None
1233 try:
1233 try:
1234 codecs.lookup(pycompat.sysstr(encoding.encoding))
1234 codecs.lookup(pycompat.sysstr(encoding.encoding))
1235 except LookupError as inst:
1235 except LookupError as inst:
1236 err = stringutil.forcebytestr(inst)
1236 err = stringutil.forcebytestr(inst)
1237 problems += 1
1237 problems += 1
1238 fm.condwrite(err, 'encodingerror', _(" %s\n"
1238 fm.condwrite(err, 'encodingerror', _(" %s\n"
1239 " (check that your locale is properly set)\n"), err)
1239 " (check that your locale is properly set)\n"), err)
1240
1240
1241 # Python
1241 # Python
1242 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1242 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1243 pycompat.sysexecutable)
1243 pycompat.sysexecutable)
1244 fm.write('pythonver', _("checking Python version (%s)\n"),
1244 fm.write('pythonver', _("checking Python version (%s)\n"),
1245 ("%d.%d.%d" % sys.version_info[:3]))
1245 ("%d.%d.%d" % sys.version_info[:3]))
1246 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1246 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1247 os.path.dirname(pycompat.fsencode(os.__file__)))
1247 os.path.dirname(pycompat.fsencode(os.__file__)))
1248
1248
1249 security = set(sslutil.supportedprotocols)
1249 security = set(sslutil.supportedprotocols)
1250 if sslutil.hassni:
1250 if sslutil.hassni:
1251 security.add('sni')
1251 security.add('sni')
1252
1252
1253 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1253 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1254 fm.formatlist(sorted(security), name='protocol',
1254 fm.formatlist(sorted(security), name='protocol',
1255 fmt='%s', sep=','))
1255 fmt='%s', sep=','))
1256
1256
1257 # These are warnings, not errors. So don't increment problem count. This
1257 # These are warnings, not errors. So don't increment problem count. This
1258 # may change in the future.
1258 # may change in the future.
1259 if 'tls1.2' not in security:
1259 if 'tls1.2' not in security:
1260 fm.plain(_(' TLS 1.2 not supported by Python install; '
1260 fm.plain(_(' TLS 1.2 not supported by Python install; '
1261 'network connections lack modern security\n'))
1261 'network connections lack modern security\n'))
1262 if 'sni' not in security:
1262 if 'sni' not in security:
1263 fm.plain(_(' SNI not supported by Python install; may have '
1263 fm.plain(_(' SNI not supported by Python install; may have '
1264 'connectivity issues with some servers\n'))
1264 'connectivity issues with some servers\n'))
1265
1265
1266 # TODO print CA cert info
1266 # TODO print CA cert info
1267
1267
1268 # hg version
1268 # hg version
1269 hgver = util.version()
1269 hgver = util.version()
1270 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1270 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1271 hgver.split('+')[0])
1271 hgver.split('+')[0])
1272 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1272 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1273 '+'.join(hgver.split('+')[1:]))
1273 '+'.join(hgver.split('+')[1:]))
1274
1274
1275 # compiled modules
1275 # compiled modules
1276 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1276 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1277 policy.policy)
1277 policy.policy)
1278 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1278 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1279 os.path.dirname(pycompat.fsencode(__file__)))
1279 os.path.dirname(pycompat.fsencode(__file__)))
1280
1280
1281 if policy.policy in ('c', 'allow'):
1281 rustandc = policy.policy in ('rust+c', 'rust+c-allow')
1282 rustext = rustandc # for now, that's the only case
1283 cext = policy.policy in ('c', 'allow') or rustandc
1284 nopure = cext or rustext
1285 if nopure:
1282 err = None
1286 err = None
1283 try:
1287 try:
1284 from .cext import (
1288 if cext:
1285 base85,
1289 from .cext import (
1286 bdiff,
1290 base85,
1287 mpatch,
1291 bdiff,
1288 osutil,
1292 mpatch,
1289 )
1293 osutil,
1290 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1294 )
1295 # quiet pyflakes
1296 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1297 if rustext:
1298 from .rustext import (
1299 ancestor,
1300 dirstate,
1301 )
1302 dir(ancestor), dir(dirstate) # quiet pyflakes
1291 except Exception as inst:
1303 except Exception as inst:
1292 err = stringutil.forcebytestr(inst)
1304 err = stringutil.forcebytestr(inst)
1293 problems += 1
1305 problems += 1
1294 fm.condwrite(err, 'extensionserror', " %s\n", err)
1306 fm.condwrite(err, 'extensionserror', " %s\n", err)
1295
1307
1296 compengines = util.compengines._engines.values()
1308 compengines = util.compengines._engines.values()
1297 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1309 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1298 fm.formatlist(sorted(e.name() for e in compengines),
1310 fm.formatlist(sorted(e.name() for e in compengines),
1299 name='compengine', fmt='%s', sep=', '))
1311 name='compengine', fmt='%s', sep=', '))
1300 fm.write('compenginesavail', _('checking available compression engines '
1312 fm.write('compenginesavail', _('checking available compression engines '
1301 '(%s)\n'),
1313 '(%s)\n'),
1302 fm.formatlist(sorted(e.name() for e in compengines
1314 fm.formatlist(sorted(e.name() for e in compengines
1303 if e.available()),
1315 if e.available()),
1304 name='compengine', fmt='%s', sep=', '))
1316 name='compengine', fmt='%s', sep=', '))
1305 wirecompengines = compression.compengines.supportedwireengines(
1317 wirecompengines = compression.compengines.supportedwireengines(
1306 compression.SERVERROLE)
1318 compression.SERVERROLE)
1307 fm.write('compenginesserver', _('checking available compression engines '
1319 fm.write('compenginesserver', _('checking available compression engines '
1308 'for wire protocol (%s)\n'),
1320 'for wire protocol (%s)\n'),
1309 fm.formatlist([e.name() for e in wirecompengines
1321 fm.formatlist([e.name() for e in wirecompengines
1310 if e.wireprotosupport()],
1322 if e.wireprotosupport()],
1311 name='compengine', fmt='%s', sep=', '))
1323 name='compengine', fmt='%s', sep=', '))
1312 re2 = 'missing'
1324 re2 = 'missing'
1313 if util._re2:
1325 if util._re2:
1314 re2 = 'available'
1326 re2 = 'available'
1315 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1327 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1316 fm.data(re2=bool(util._re2))
1328 fm.data(re2=bool(util._re2))
1317
1329
1318 # templates
1330 # templates
1319 p = templater.templatepaths()
1331 p = templater.templatepaths()
1320 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1332 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1321 fm.condwrite(not p, '', _(" no template directories found\n"))
1333 fm.condwrite(not p, '', _(" no template directories found\n"))
1322 if p:
1334 if p:
1323 m = templater.templatepath("map-cmdline.default")
1335 m = templater.templatepath("map-cmdline.default")
1324 if m:
1336 if m:
1325 # template found, check if it is working
1337 # template found, check if it is working
1326 err = None
1338 err = None
1327 try:
1339 try:
1328 templater.templater.frommapfile(m)
1340 templater.templater.frommapfile(m)
1329 except Exception as inst:
1341 except Exception as inst:
1330 err = stringutil.forcebytestr(inst)
1342 err = stringutil.forcebytestr(inst)
1331 p = None
1343 p = None
1332 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1344 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1333 else:
1345 else:
1334 p = None
1346 p = None
1335 fm.condwrite(p, 'defaulttemplate',
1347 fm.condwrite(p, 'defaulttemplate',
1336 _("checking default template (%s)\n"), m)
1348 _("checking default template (%s)\n"), m)
1337 fm.condwrite(not m, 'defaulttemplatenotfound',
1349 fm.condwrite(not m, 'defaulttemplatenotfound',
1338 _(" template '%s' not found\n"), "default")
1350 _(" template '%s' not found\n"), "default")
1339 if not p:
1351 if not p:
1340 problems += 1
1352 problems += 1
1341 fm.condwrite(not p, '',
1353 fm.condwrite(not p, '',
1342 _(" (templates seem to have been installed incorrectly)\n"))
1354 _(" (templates seem to have been installed incorrectly)\n"))
1343
1355
1344 # editor
1356 # editor
1345 editor = ui.geteditor()
1357 editor = ui.geteditor()
1346 editor = util.expandpath(editor)
1358 editor = util.expandpath(editor)
1347 editorbin = procutil.shellsplit(editor)[0]
1359 editorbin = procutil.shellsplit(editor)[0]
1348 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1360 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1349 cmdpath = procutil.findexe(editorbin)
1361 cmdpath = procutil.findexe(editorbin)
1350 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1362 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1351 _(" No commit editor set and can't find %s in PATH\n"
1363 _(" No commit editor set and can't find %s in PATH\n"
1352 " (specify a commit editor in your configuration"
1364 " (specify a commit editor in your configuration"
1353 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1365 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1354 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1366 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1355 _(" Can't find editor '%s' in PATH\n"
1367 _(" Can't find editor '%s' in PATH\n"
1356 " (specify a commit editor in your configuration"
1368 " (specify a commit editor in your configuration"
1357 " file)\n"), not cmdpath and editorbin)
1369 " file)\n"), not cmdpath and editorbin)
1358 if not cmdpath and editor != 'vi':
1370 if not cmdpath and editor != 'vi':
1359 problems += 1
1371 problems += 1
1360
1372
1361 # check username
1373 # check username
1362 username = None
1374 username = None
1363 err = None
1375 err = None
1364 try:
1376 try:
1365 username = ui.username()
1377 username = ui.username()
1366 except error.Abort as e:
1378 except error.Abort as e:
1367 err = stringutil.forcebytestr(e)
1379 err = stringutil.forcebytestr(e)
1368 problems += 1
1380 problems += 1
1369
1381
1370 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1382 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1371 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1383 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1372 " (specify a username in your configuration file)\n"), err)
1384 " (specify a username in your configuration file)\n"), err)
1373
1385
1374 fm.condwrite(not problems, '',
1386 fm.condwrite(not problems, '',
1375 _("no problems detected\n"))
1387 _("no problems detected\n"))
1376 if not problems:
1388 if not problems:
1377 fm.data(problems=problems)
1389 fm.data(problems=problems)
1378 fm.condwrite(problems, 'problems',
1390 fm.condwrite(problems, 'problems',
1379 _("%d problems detected,"
1391 _("%d problems detected,"
1380 " please check your install!\n"), problems)
1392 " please check your install!\n"), problems)
1381 fm.end()
1393 fm.end()
1382
1394
1383 return problems
1395 return problems
1384
1396
1385 @command('debugknown', [], _('REPO ID...'), norepo=True)
1397 @command('debugknown', [], _('REPO ID...'), norepo=True)
1386 def debugknown(ui, repopath, *ids, **opts):
1398 def debugknown(ui, repopath, *ids, **opts):
1387 """test whether node ids are known to a repo
1399 """test whether node ids are known to a repo
1388
1400
1389 Every ID must be a full-length hex node id string. Returns a list of 0s
1401 Every ID must be a full-length hex node id string. Returns a list of 0s
1390 and 1s indicating unknown/known.
1402 and 1s indicating unknown/known.
1391 """
1403 """
1392 opts = pycompat.byteskwargs(opts)
1404 opts = pycompat.byteskwargs(opts)
1393 repo = hg.peer(ui, opts, repopath)
1405 repo = hg.peer(ui, opts, repopath)
1394 if not repo.capable('known'):
1406 if not repo.capable('known'):
1395 raise error.Abort("known() not supported by target repository")
1407 raise error.Abort("known() not supported by target repository")
1396 flags = repo.known([bin(s) for s in ids])
1408 flags = repo.known([bin(s) for s in ids])
1397 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1409 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1398
1410
1399 @command('debuglabelcomplete', [], _('LABEL...'))
1411 @command('debuglabelcomplete', [], _('LABEL...'))
1400 def debuglabelcomplete(ui, repo, *args):
1412 def debuglabelcomplete(ui, repo, *args):
1401 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1413 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1402 debugnamecomplete(ui, repo, *args)
1414 debugnamecomplete(ui, repo, *args)
1403
1415
1404 @command('debuglocks',
1416 @command('debuglocks',
1405 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1417 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1406 ('W', 'force-wlock', None,
1418 ('W', 'force-wlock', None,
1407 _('free the working state lock (DANGEROUS)')),
1419 _('free the working state lock (DANGEROUS)')),
1408 ('s', 'set-lock', None, _('set the store lock until stopped')),
1420 ('s', 'set-lock', None, _('set the store lock until stopped')),
1409 ('S', 'set-wlock', None,
1421 ('S', 'set-wlock', None,
1410 _('set the working state lock until stopped'))],
1422 _('set the working state lock until stopped'))],
1411 _('[OPTION]...'))
1423 _('[OPTION]...'))
1412 def debuglocks(ui, repo, **opts):
1424 def debuglocks(ui, repo, **opts):
1413 """show or modify state of locks
1425 """show or modify state of locks
1414
1426
1415 By default, this command will show which locks are held. This
1427 By default, this command will show which locks are held. This
1416 includes the user and process holding the lock, the amount of time
1428 includes the user and process holding the lock, the amount of time
1417 the lock has been held, and the machine name where the process is
1429 the lock has been held, and the machine name where the process is
1418 running if it's not local.
1430 running if it's not local.
1419
1431
1420 Locks protect the integrity of Mercurial's data, so should be
1432 Locks protect the integrity of Mercurial's data, so should be
1421 treated with care. System crashes or other interruptions may cause
1433 treated with care. System crashes or other interruptions may cause
1422 locks to not be properly released, though Mercurial will usually
1434 locks to not be properly released, though Mercurial will usually
1423 detect and remove such stale locks automatically.
1435 detect and remove such stale locks automatically.
1424
1436
1425 However, detecting stale locks may not always be possible (for
1437 However, detecting stale locks may not always be possible (for
1426 instance, on a shared filesystem). Removing locks may also be
1438 instance, on a shared filesystem). Removing locks may also be
1427 blocked by filesystem permissions.
1439 blocked by filesystem permissions.
1428
1440
1429 Setting a lock will prevent other commands from changing the data.
1441 Setting a lock will prevent other commands from changing the data.
1430 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1442 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1431 The set locks are removed when the command exits.
1443 The set locks are removed when the command exits.
1432
1444
1433 Returns 0 if no locks are held.
1445 Returns 0 if no locks are held.
1434
1446
1435 """
1447 """
1436
1448
1437 if opts.get(r'force_lock'):
1449 if opts.get(r'force_lock'):
1438 repo.svfs.unlink('lock')
1450 repo.svfs.unlink('lock')
1439 if opts.get(r'force_wlock'):
1451 if opts.get(r'force_wlock'):
1440 repo.vfs.unlink('wlock')
1452 repo.vfs.unlink('wlock')
1441 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1453 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1442 return 0
1454 return 0
1443
1455
1444 locks = []
1456 locks = []
1445 try:
1457 try:
1446 if opts.get(r'set_wlock'):
1458 if opts.get(r'set_wlock'):
1447 try:
1459 try:
1448 locks.append(repo.wlock(False))
1460 locks.append(repo.wlock(False))
1449 except error.LockHeld:
1461 except error.LockHeld:
1450 raise error.Abort(_('wlock is already held'))
1462 raise error.Abort(_('wlock is already held'))
1451 if opts.get(r'set_lock'):
1463 if opts.get(r'set_lock'):
1452 try:
1464 try:
1453 locks.append(repo.lock(False))
1465 locks.append(repo.lock(False))
1454 except error.LockHeld:
1466 except error.LockHeld:
1455 raise error.Abort(_('lock is already held'))
1467 raise error.Abort(_('lock is already held'))
1456 if len(locks):
1468 if len(locks):
1457 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1469 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1458 return 0
1470 return 0
1459 finally:
1471 finally:
1460 release(*locks)
1472 release(*locks)
1461
1473
1462 now = time.time()
1474 now = time.time()
1463 held = 0
1475 held = 0
1464
1476
1465 def report(vfs, name, method):
1477 def report(vfs, name, method):
1466 # this causes stale locks to get reaped for more accurate reporting
1478 # this causes stale locks to get reaped for more accurate reporting
1467 try:
1479 try:
1468 l = method(False)
1480 l = method(False)
1469 except error.LockHeld:
1481 except error.LockHeld:
1470 l = None
1482 l = None
1471
1483
1472 if l:
1484 if l:
1473 l.release()
1485 l.release()
1474 else:
1486 else:
1475 try:
1487 try:
1476 st = vfs.lstat(name)
1488 st = vfs.lstat(name)
1477 age = now - st[stat.ST_MTIME]
1489 age = now - st[stat.ST_MTIME]
1478 user = util.username(st.st_uid)
1490 user = util.username(st.st_uid)
1479 locker = vfs.readlock(name)
1491 locker = vfs.readlock(name)
1480 if ":" in locker:
1492 if ":" in locker:
1481 host, pid = locker.split(':')
1493 host, pid = locker.split(':')
1482 if host == socket.gethostname():
1494 if host == socket.gethostname():
1483 locker = 'user %s, process %s' % (user or b'None', pid)
1495 locker = 'user %s, process %s' % (user or b'None', pid)
1484 else:
1496 else:
1485 locker = ('user %s, process %s, host %s'
1497 locker = ('user %s, process %s, host %s'
1486 % (user or b'None', pid, host))
1498 % (user or b'None', pid, host))
1487 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1499 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1488 return 1
1500 return 1
1489 except OSError as e:
1501 except OSError as e:
1490 if e.errno != errno.ENOENT:
1502 if e.errno != errno.ENOENT:
1491 raise
1503 raise
1492
1504
1493 ui.write(("%-6s free\n") % (name + ":"))
1505 ui.write(("%-6s free\n") % (name + ":"))
1494 return 0
1506 return 0
1495
1507
1496 held += report(repo.svfs, "lock", repo.lock)
1508 held += report(repo.svfs, "lock", repo.lock)
1497 held += report(repo.vfs, "wlock", repo.wlock)
1509 held += report(repo.vfs, "wlock", repo.wlock)
1498
1510
1499 return held
1511 return held
1500
1512
1501 @command('debugmanifestfulltextcache', [
1513 @command('debugmanifestfulltextcache', [
1502 ('', 'clear', False, _('clear the cache')),
1514 ('', 'clear', False, _('clear the cache')),
1503 ('a', 'add', [], _('add the given manifest nodes to the cache'),
1515 ('a', 'add', [], _('add the given manifest nodes to the cache'),
1504 _('NODE'))
1516 _('NODE'))
1505 ], '')
1517 ], '')
1506 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1518 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1507 """show, clear or amend the contents of the manifest fulltext cache"""
1519 """show, clear or amend the contents of the manifest fulltext cache"""
1508
1520
1509 def getcache():
1521 def getcache():
1510 r = repo.manifestlog.getstorage(b'')
1522 r = repo.manifestlog.getstorage(b'')
1511 try:
1523 try:
1512 return r._fulltextcache
1524 return r._fulltextcache
1513 except AttributeError:
1525 except AttributeError:
1514 msg = _("Current revlog implementation doesn't appear to have a "
1526 msg = _("Current revlog implementation doesn't appear to have a "
1515 "manifest fulltext cache\n")
1527 "manifest fulltext cache\n")
1516 raise error.Abort(msg)
1528 raise error.Abort(msg)
1517
1529
1518 if opts.get(r'clear'):
1530 if opts.get(r'clear'):
1519 with repo.wlock():
1531 with repo.wlock():
1520 cache = getcache()
1532 cache = getcache()
1521 cache.clear(clear_persisted_data=True)
1533 cache.clear(clear_persisted_data=True)
1522 return
1534 return
1523
1535
1524 if add:
1536 if add:
1525 with repo.wlock():
1537 with repo.wlock():
1526 m = repo.manifestlog
1538 m = repo.manifestlog
1527 store = m.getstorage(b'')
1539 store = m.getstorage(b'')
1528 for n in add:
1540 for n in add:
1529 try:
1541 try:
1530 manifest = m[store.lookup(n)]
1542 manifest = m[store.lookup(n)]
1531 except error.LookupError as e:
1543 except error.LookupError as e:
1532 raise error.Abort(e, hint="Check your manifest node id")
1544 raise error.Abort(e, hint="Check your manifest node id")
1533 manifest.read() # stores revisision in cache too
1545 manifest.read() # stores revisision in cache too
1534 return
1546 return
1535
1547
1536 cache = getcache()
1548 cache = getcache()
1537 if not len(cache):
1549 if not len(cache):
1538 ui.write(_('cache empty\n'))
1550 ui.write(_('cache empty\n'))
1539 else:
1551 else:
1540 ui.write(
1552 ui.write(
1541 _('cache contains %d manifest entries, in order of most to '
1553 _('cache contains %d manifest entries, in order of most to '
1542 'least recent:\n') % (len(cache),))
1554 'least recent:\n') % (len(cache),))
1543 totalsize = 0
1555 totalsize = 0
1544 for nodeid in cache:
1556 for nodeid in cache:
1545 # Use cache.get to not update the LRU order
1557 # Use cache.get to not update the LRU order
1546 data = cache.peek(nodeid)
1558 data = cache.peek(nodeid)
1547 size = len(data)
1559 size = len(data)
1548 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1560 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1549 ui.write(_('id: %s, size %s\n') % (
1561 ui.write(_('id: %s, size %s\n') % (
1550 hex(nodeid), util.bytecount(size)))
1562 hex(nodeid), util.bytecount(size)))
1551 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1563 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1552 ui.write(
1564 ui.write(
1553 _('total cache data size %s, on-disk %s\n') % (
1565 _('total cache data size %s, on-disk %s\n') % (
1554 util.bytecount(totalsize), util.bytecount(ondisk))
1566 util.bytecount(totalsize), util.bytecount(ondisk))
1555 )
1567 )
1556
1568
1557 @command('debugmergestate', [], '')
1569 @command('debugmergestate', [], '')
1558 def debugmergestate(ui, repo, *args):
1570 def debugmergestate(ui, repo, *args):
1559 """print merge state
1571 """print merge state
1560
1572
1561 Use --verbose to print out information about whether v1 or v2 merge state
1573 Use --verbose to print out information about whether v1 or v2 merge state
1562 was chosen."""
1574 was chosen."""
1563 def _hashornull(h):
1575 def _hashornull(h):
1564 if h == nullhex:
1576 if h == nullhex:
1565 return 'null'
1577 return 'null'
1566 else:
1578 else:
1567 return h
1579 return h
1568
1580
1569 def printrecords(version):
1581 def printrecords(version):
1570 ui.write(('* version %d records\n') % version)
1582 ui.write(('* version %d records\n') % version)
1571 if version == 1:
1583 if version == 1:
1572 records = v1records
1584 records = v1records
1573 else:
1585 else:
1574 records = v2records
1586 records = v2records
1575
1587
1576 for rtype, record in records:
1588 for rtype, record in records:
1577 # pretty print some record types
1589 # pretty print some record types
1578 if rtype == 'L':
1590 if rtype == 'L':
1579 ui.write(('local: %s\n') % record)
1591 ui.write(('local: %s\n') % record)
1580 elif rtype == 'O':
1592 elif rtype == 'O':
1581 ui.write(('other: %s\n') % record)
1593 ui.write(('other: %s\n') % record)
1582 elif rtype == 'm':
1594 elif rtype == 'm':
1583 driver, mdstate = record.split('\0', 1)
1595 driver, mdstate = record.split('\0', 1)
1584 ui.write(('merge driver: %s (state "%s")\n')
1596 ui.write(('merge driver: %s (state "%s")\n')
1585 % (driver, mdstate))
1597 % (driver, mdstate))
1586 elif rtype in 'FDC':
1598 elif rtype in 'FDC':
1587 r = record.split('\0')
1599 r = record.split('\0')
1588 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1600 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1589 if version == 1:
1601 if version == 1:
1590 onode = 'not stored in v1 format'
1602 onode = 'not stored in v1 format'
1591 flags = r[7]
1603 flags = r[7]
1592 else:
1604 else:
1593 onode, flags = r[7:9]
1605 onode, flags = r[7:9]
1594 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1606 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1595 % (f, rtype, state, _hashornull(hash)))
1607 % (f, rtype, state, _hashornull(hash)))
1596 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1608 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1597 ui.write((' ancestor path: %s (node %s)\n')
1609 ui.write((' ancestor path: %s (node %s)\n')
1598 % (afile, _hashornull(anode)))
1610 % (afile, _hashornull(anode)))
1599 ui.write((' other path: %s (node %s)\n')
1611 ui.write((' other path: %s (node %s)\n')
1600 % (ofile, _hashornull(onode)))
1612 % (ofile, _hashornull(onode)))
1601 elif rtype == 'f':
1613 elif rtype == 'f':
1602 filename, rawextras = record.split('\0', 1)
1614 filename, rawextras = record.split('\0', 1)
1603 extras = rawextras.split('\0')
1615 extras = rawextras.split('\0')
1604 i = 0
1616 i = 0
1605 extrastrings = []
1617 extrastrings = []
1606 while i < len(extras):
1618 while i < len(extras):
1607 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1619 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1608 i += 2
1620 i += 2
1609
1621
1610 ui.write(('file extras: %s (%s)\n')
1622 ui.write(('file extras: %s (%s)\n')
1611 % (filename, ', '.join(extrastrings)))
1623 % (filename, ', '.join(extrastrings)))
1612 elif rtype == 'l':
1624 elif rtype == 'l':
1613 labels = record.split('\0', 2)
1625 labels = record.split('\0', 2)
1614 labels = [l for l in labels if len(l) > 0]
1626 labels = [l for l in labels if len(l) > 0]
1615 ui.write(('labels:\n'))
1627 ui.write(('labels:\n'))
1616 ui.write((' local: %s\n' % labels[0]))
1628 ui.write((' local: %s\n' % labels[0]))
1617 ui.write((' other: %s\n' % labels[1]))
1629 ui.write((' other: %s\n' % labels[1]))
1618 if len(labels) > 2:
1630 if len(labels) > 2:
1619 ui.write((' base: %s\n' % labels[2]))
1631 ui.write((' base: %s\n' % labels[2]))
1620 else:
1632 else:
1621 ui.write(('unrecognized entry: %s\t%s\n')
1633 ui.write(('unrecognized entry: %s\t%s\n')
1622 % (rtype, record.replace('\0', '\t')))
1634 % (rtype, record.replace('\0', '\t')))
1623
1635
1624 # Avoid mergestate.read() since it may raise an exception for unsupported
1636 # Avoid mergestate.read() since it may raise an exception for unsupported
1625 # merge state records. We shouldn't be doing this, but this is OK since this
1637 # merge state records. We shouldn't be doing this, but this is OK since this
1626 # command is pretty low-level.
1638 # command is pretty low-level.
1627 ms = mergemod.mergestate(repo)
1639 ms = mergemod.mergestate(repo)
1628
1640
1629 # sort so that reasonable information is on top
1641 # sort so that reasonable information is on top
1630 v1records = ms._readrecordsv1()
1642 v1records = ms._readrecordsv1()
1631 v2records = ms._readrecordsv2()
1643 v2records = ms._readrecordsv2()
1632 order = 'LOml'
1644 order = 'LOml'
1633 def key(r):
1645 def key(r):
1634 idx = order.find(r[0])
1646 idx = order.find(r[0])
1635 if idx == -1:
1647 if idx == -1:
1636 return (1, r[1])
1648 return (1, r[1])
1637 else:
1649 else:
1638 return (0, idx)
1650 return (0, idx)
1639 v1records.sort(key=key)
1651 v1records.sort(key=key)
1640 v2records.sort(key=key)
1652 v2records.sort(key=key)
1641
1653
1642 if not v1records and not v2records:
1654 if not v1records and not v2records:
1643 ui.write(('no merge state found\n'))
1655 ui.write(('no merge state found\n'))
1644 elif not v2records:
1656 elif not v2records:
1645 ui.note(('no version 2 merge state\n'))
1657 ui.note(('no version 2 merge state\n'))
1646 printrecords(1)
1658 printrecords(1)
1647 elif ms._v1v2match(v1records, v2records):
1659 elif ms._v1v2match(v1records, v2records):
1648 ui.note(('v1 and v2 states match: using v2\n'))
1660 ui.note(('v1 and v2 states match: using v2\n'))
1649 printrecords(2)
1661 printrecords(2)
1650 else:
1662 else:
1651 ui.note(('v1 and v2 states mismatch: using v1\n'))
1663 ui.note(('v1 and v2 states mismatch: using v1\n'))
1652 printrecords(1)
1664 printrecords(1)
1653 if ui.verbose:
1665 if ui.verbose:
1654 printrecords(2)
1666 printrecords(2)
1655
1667
1656 @command('debugnamecomplete', [], _('NAME...'))
1668 @command('debugnamecomplete', [], _('NAME...'))
1657 def debugnamecomplete(ui, repo, *args):
1669 def debugnamecomplete(ui, repo, *args):
1658 '''complete "names" - tags, open branch names, bookmark names'''
1670 '''complete "names" - tags, open branch names, bookmark names'''
1659
1671
1660 names = set()
1672 names = set()
1661 # since we previously only listed open branches, we will handle that
1673 # since we previously only listed open branches, we will handle that
1662 # specially (after this for loop)
1674 # specially (after this for loop)
1663 for name, ns in repo.names.iteritems():
1675 for name, ns in repo.names.iteritems():
1664 if name != 'branches':
1676 if name != 'branches':
1665 names.update(ns.listnames(repo))
1677 names.update(ns.listnames(repo))
1666 names.update(tag for (tag, heads, tip, closed)
1678 names.update(tag for (tag, heads, tip, closed)
1667 in repo.branchmap().iterbranches() if not closed)
1679 in repo.branchmap().iterbranches() if not closed)
1668 completions = set()
1680 completions = set()
1669 if not args:
1681 if not args:
1670 args = ['']
1682 args = ['']
1671 for a in args:
1683 for a in args:
1672 completions.update(n for n in names if n.startswith(a))
1684 completions.update(n for n in names if n.startswith(a))
1673 ui.write('\n'.join(sorted(completions)))
1685 ui.write('\n'.join(sorted(completions)))
1674 ui.write('\n')
1686 ui.write('\n')
1675
1687
1676 @command('debugobsolete',
1688 @command('debugobsolete',
1677 [('', 'flags', 0, _('markers flag')),
1689 [('', 'flags', 0, _('markers flag')),
1678 ('', 'record-parents', False,
1690 ('', 'record-parents', False,
1679 _('record parent information for the precursor')),
1691 _('record parent information for the precursor')),
1680 ('r', 'rev', [], _('display markers relevant to REV')),
1692 ('r', 'rev', [], _('display markers relevant to REV')),
1681 ('', 'exclusive', False, _('restrict display to markers only '
1693 ('', 'exclusive', False, _('restrict display to markers only '
1682 'relevant to REV')),
1694 'relevant to REV')),
1683 ('', 'index', False, _('display index of the marker')),
1695 ('', 'index', False, _('display index of the marker')),
1684 ('', 'delete', [], _('delete markers specified by indices')),
1696 ('', 'delete', [], _('delete markers specified by indices')),
1685 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1697 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1686 _('[OBSOLETED [REPLACEMENT ...]]'))
1698 _('[OBSOLETED [REPLACEMENT ...]]'))
1687 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1699 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1688 """create arbitrary obsolete marker
1700 """create arbitrary obsolete marker
1689
1701
1690 With no arguments, displays the list of obsolescence markers."""
1702 With no arguments, displays the list of obsolescence markers."""
1691
1703
1692 opts = pycompat.byteskwargs(opts)
1704 opts = pycompat.byteskwargs(opts)
1693
1705
1694 def parsenodeid(s):
1706 def parsenodeid(s):
1695 try:
1707 try:
1696 # We do not use revsingle/revrange functions here to accept
1708 # We do not use revsingle/revrange functions here to accept
1697 # arbitrary node identifiers, possibly not present in the
1709 # arbitrary node identifiers, possibly not present in the
1698 # local repository.
1710 # local repository.
1699 n = bin(s)
1711 n = bin(s)
1700 if len(n) != len(nullid):
1712 if len(n) != len(nullid):
1701 raise TypeError()
1713 raise TypeError()
1702 return n
1714 return n
1703 except TypeError:
1715 except TypeError:
1704 raise error.Abort('changeset references must be full hexadecimal '
1716 raise error.Abort('changeset references must be full hexadecimal '
1705 'node identifiers')
1717 'node identifiers')
1706
1718
1707 if opts.get('delete'):
1719 if opts.get('delete'):
1708 indices = []
1720 indices = []
1709 for v in opts.get('delete'):
1721 for v in opts.get('delete'):
1710 try:
1722 try:
1711 indices.append(int(v))
1723 indices.append(int(v))
1712 except ValueError:
1724 except ValueError:
1713 raise error.Abort(_('invalid index value: %r') % v,
1725 raise error.Abort(_('invalid index value: %r') % v,
1714 hint=_('use integers for indices'))
1726 hint=_('use integers for indices'))
1715
1727
1716 if repo.currenttransaction():
1728 if repo.currenttransaction():
1717 raise error.Abort(_('cannot delete obsmarkers in the middle '
1729 raise error.Abort(_('cannot delete obsmarkers in the middle '
1718 'of transaction.'))
1730 'of transaction.'))
1719
1731
1720 with repo.lock():
1732 with repo.lock():
1721 n = repair.deleteobsmarkers(repo.obsstore, indices)
1733 n = repair.deleteobsmarkers(repo.obsstore, indices)
1722 ui.write(_('deleted %i obsolescence markers\n') % n)
1734 ui.write(_('deleted %i obsolescence markers\n') % n)
1723
1735
1724 return
1736 return
1725
1737
1726 if precursor is not None:
1738 if precursor is not None:
1727 if opts['rev']:
1739 if opts['rev']:
1728 raise error.Abort('cannot select revision when creating marker')
1740 raise error.Abort('cannot select revision when creating marker')
1729 metadata = {}
1741 metadata = {}
1730 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1742 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1731 succs = tuple(parsenodeid(succ) for succ in successors)
1743 succs = tuple(parsenodeid(succ) for succ in successors)
1732 l = repo.lock()
1744 l = repo.lock()
1733 try:
1745 try:
1734 tr = repo.transaction('debugobsolete')
1746 tr = repo.transaction('debugobsolete')
1735 try:
1747 try:
1736 date = opts.get('date')
1748 date = opts.get('date')
1737 if date:
1749 if date:
1738 date = dateutil.parsedate(date)
1750 date = dateutil.parsedate(date)
1739 else:
1751 else:
1740 date = None
1752 date = None
1741 prec = parsenodeid(precursor)
1753 prec = parsenodeid(precursor)
1742 parents = None
1754 parents = None
1743 if opts['record_parents']:
1755 if opts['record_parents']:
1744 if prec not in repo.unfiltered():
1756 if prec not in repo.unfiltered():
1745 raise error.Abort('cannot used --record-parents on '
1757 raise error.Abort('cannot used --record-parents on '
1746 'unknown changesets')
1758 'unknown changesets')
1747 parents = repo.unfiltered()[prec].parents()
1759 parents = repo.unfiltered()[prec].parents()
1748 parents = tuple(p.node() for p in parents)
1760 parents = tuple(p.node() for p in parents)
1749 repo.obsstore.create(tr, prec, succs, opts['flags'],
1761 repo.obsstore.create(tr, prec, succs, opts['flags'],
1750 parents=parents, date=date,
1762 parents=parents, date=date,
1751 metadata=metadata, ui=ui)
1763 metadata=metadata, ui=ui)
1752 tr.close()
1764 tr.close()
1753 except ValueError as exc:
1765 except ValueError as exc:
1754 raise error.Abort(_('bad obsmarker input: %s') %
1766 raise error.Abort(_('bad obsmarker input: %s') %
1755 pycompat.bytestr(exc))
1767 pycompat.bytestr(exc))
1756 finally:
1768 finally:
1757 tr.release()
1769 tr.release()
1758 finally:
1770 finally:
1759 l.release()
1771 l.release()
1760 else:
1772 else:
1761 if opts['rev']:
1773 if opts['rev']:
1762 revs = scmutil.revrange(repo, opts['rev'])
1774 revs = scmutil.revrange(repo, opts['rev'])
1763 nodes = [repo[r].node() for r in revs]
1775 nodes = [repo[r].node() for r in revs]
1764 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1776 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1765 exclusive=opts['exclusive']))
1777 exclusive=opts['exclusive']))
1766 markers.sort(key=lambda x: x._data)
1778 markers.sort(key=lambda x: x._data)
1767 else:
1779 else:
1768 markers = obsutil.getmarkers(repo)
1780 markers = obsutil.getmarkers(repo)
1769
1781
1770 markerstoiter = markers
1782 markerstoiter = markers
1771 isrelevant = lambda m: True
1783 isrelevant = lambda m: True
1772 if opts.get('rev') and opts.get('index'):
1784 if opts.get('rev') and opts.get('index'):
1773 markerstoiter = obsutil.getmarkers(repo)
1785 markerstoiter = obsutil.getmarkers(repo)
1774 markerset = set(markers)
1786 markerset = set(markers)
1775 isrelevant = lambda m: m in markerset
1787 isrelevant = lambda m: m in markerset
1776
1788
1777 fm = ui.formatter('debugobsolete', opts)
1789 fm = ui.formatter('debugobsolete', opts)
1778 for i, m in enumerate(markerstoiter):
1790 for i, m in enumerate(markerstoiter):
1779 if not isrelevant(m):
1791 if not isrelevant(m):
1780 # marker can be irrelevant when we're iterating over a set
1792 # marker can be irrelevant when we're iterating over a set
1781 # of markers (markerstoiter) which is bigger than the set
1793 # of markers (markerstoiter) which is bigger than the set
1782 # of markers we want to display (markers)
1794 # of markers we want to display (markers)
1783 # this can happen if both --index and --rev options are
1795 # this can happen if both --index and --rev options are
1784 # provided and thus we need to iterate over all of the markers
1796 # provided and thus we need to iterate over all of the markers
1785 # to get the correct indices, but only display the ones that
1797 # to get the correct indices, but only display the ones that
1786 # are relevant to --rev value
1798 # are relevant to --rev value
1787 continue
1799 continue
1788 fm.startitem()
1800 fm.startitem()
1789 ind = i if opts.get('index') else None
1801 ind = i if opts.get('index') else None
1790 cmdutil.showmarker(fm, m, index=ind)
1802 cmdutil.showmarker(fm, m, index=ind)
1791 fm.end()
1803 fm.end()
1792
1804
1793 @command('debugp1copies',
1805 @command('debugp1copies',
1794 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1806 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1795 _('[-r REV]'))
1807 _('[-r REV]'))
1796 def debugp1copies(ui, repo, **opts):
1808 def debugp1copies(ui, repo, **opts):
1797 """dump copy information compared to p1"""
1809 """dump copy information compared to p1"""
1798
1810
1799 opts = pycompat.byteskwargs(opts)
1811 opts = pycompat.byteskwargs(opts)
1800 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1812 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1801 for dst, src in ctx.p1copies().items():
1813 for dst, src in ctx.p1copies().items():
1802 ui.write('%s -> %s\n' % (src, dst))
1814 ui.write('%s -> %s\n' % (src, dst))
1803
1815
1804 @command('debugp2copies',
1816 @command('debugp2copies',
1805 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1817 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1806 _('[-r REV]'))
1818 _('[-r REV]'))
1807 def debugp1copies(ui, repo, **opts):
1819 def debugp1copies(ui, repo, **opts):
1808 """dump copy information compared to p2"""
1820 """dump copy information compared to p2"""
1809
1821
1810 opts = pycompat.byteskwargs(opts)
1822 opts = pycompat.byteskwargs(opts)
1811 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1823 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1812 for dst, src in ctx.p2copies().items():
1824 for dst, src in ctx.p2copies().items():
1813 ui.write('%s -> %s\n' % (src, dst))
1825 ui.write('%s -> %s\n' % (src, dst))
1814
1826
1815 @command('debugpathcomplete',
1827 @command('debugpathcomplete',
1816 [('f', 'full', None, _('complete an entire path')),
1828 [('f', 'full', None, _('complete an entire path')),
1817 ('n', 'normal', None, _('show only normal files')),
1829 ('n', 'normal', None, _('show only normal files')),
1818 ('a', 'added', None, _('show only added files')),
1830 ('a', 'added', None, _('show only added files')),
1819 ('r', 'removed', None, _('show only removed files'))],
1831 ('r', 'removed', None, _('show only removed files'))],
1820 _('FILESPEC...'))
1832 _('FILESPEC...'))
1821 def debugpathcomplete(ui, repo, *specs, **opts):
1833 def debugpathcomplete(ui, repo, *specs, **opts):
1822 '''complete part or all of a tracked path
1834 '''complete part or all of a tracked path
1823
1835
1824 This command supports shells that offer path name completion. It
1836 This command supports shells that offer path name completion. It
1825 currently completes only files already known to the dirstate.
1837 currently completes only files already known to the dirstate.
1826
1838
1827 Completion extends only to the next path segment unless
1839 Completion extends only to the next path segment unless
1828 --full is specified, in which case entire paths are used.'''
1840 --full is specified, in which case entire paths are used.'''
1829
1841
1830 def complete(path, acceptable):
1842 def complete(path, acceptable):
1831 dirstate = repo.dirstate
1843 dirstate = repo.dirstate
1832 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1844 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1833 rootdir = repo.root + pycompat.ossep
1845 rootdir = repo.root + pycompat.ossep
1834 if spec != repo.root and not spec.startswith(rootdir):
1846 if spec != repo.root and not spec.startswith(rootdir):
1835 return [], []
1847 return [], []
1836 if os.path.isdir(spec):
1848 if os.path.isdir(spec):
1837 spec += '/'
1849 spec += '/'
1838 spec = spec[len(rootdir):]
1850 spec = spec[len(rootdir):]
1839 fixpaths = pycompat.ossep != '/'
1851 fixpaths = pycompat.ossep != '/'
1840 if fixpaths:
1852 if fixpaths:
1841 spec = spec.replace(pycompat.ossep, '/')
1853 spec = spec.replace(pycompat.ossep, '/')
1842 speclen = len(spec)
1854 speclen = len(spec)
1843 fullpaths = opts[r'full']
1855 fullpaths = opts[r'full']
1844 files, dirs = set(), set()
1856 files, dirs = set(), set()
1845 adddir, addfile = dirs.add, files.add
1857 adddir, addfile = dirs.add, files.add
1846 for f, st in dirstate.iteritems():
1858 for f, st in dirstate.iteritems():
1847 if f.startswith(spec) and st[0] in acceptable:
1859 if f.startswith(spec) and st[0] in acceptable:
1848 if fixpaths:
1860 if fixpaths:
1849 f = f.replace('/', pycompat.ossep)
1861 f = f.replace('/', pycompat.ossep)
1850 if fullpaths:
1862 if fullpaths:
1851 addfile(f)
1863 addfile(f)
1852 continue
1864 continue
1853 s = f.find(pycompat.ossep, speclen)
1865 s = f.find(pycompat.ossep, speclen)
1854 if s >= 0:
1866 if s >= 0:
1855 adddir(f[:s])
1867 adddir(f[:s])
1856 else:
1868 else:
1857 addfile(f)
1869 addfile(f)
1858 return files, dirs
1870 return files, dirs
1859
1871
1860 acceptable = ''
1872 acceptable = ''
1861 if opts[r'normal']:
1873 if opts[r'normal']:
1862 acceptable += 'nm'
1874 acceptable += 'nm'
1863 if opts[r'added']:
1875 if opts[r'added']:
1864 acceptable += 'a'
1876 acceptable += 'a'
1865 if opts[r'removed']:
1877 if opts[r'removed']:
1866 acceptable += 'r'
1878 acceptable += 'r'
1867 cwd = repo.getcwd()
1879 cwd = repo.getcwd()
1868 if not specs:
1880 if not specs:
1869 specs = ['.']
1881 specs = ['.']
1870
1882
1871 files, dirs = set(), set()
1883 files, dirs = set(), set()
1872 for spec in specs:
1884 for spec in specs:
1873 f, d = complete(spec, acceptable or 'nmar')
1885 f, d = complete(spec, acceptable or 'nmar')
1874 files.update(f)
1886 files.update(f)
1875 dirs.update(d)
1887 dirs.update(d)
1876 files.update(dirs)
1888 files.update(dirs)
1877 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1889 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1878 ui.write('\n')
1890 ui.write('\n')
1879
1891
1880 @command('debugpathcopies',
1892 @command('debugpathcopies',
1881 cmdutil.walkopts,
1893 cmdutil.walkopts,
1882 'hg debugpathcopies REV1 REV2 [FILE]',
1894 'hg debugpathcopies REV1 REV2 [FILE]',
1883 inferrepo=True)
1895 inferrepo=True)
1884 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
1896 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
1885 """show copies between two revisions"""
1897 """show copies between two revisions"""
1886 ctx1 = scmutil.revsingle(repo, rev1)
1898 ctx1 = scmutil.revsingle(repo, rev1)
1887 ctx2 = scmutil.revsingle(repo, rev2)
1899 ctx2 = scmutil.revsingle(repo, rev2)
1888 m = scmutil.match(ctx1, pats, opts)
1900 m = scmutil.match(ctx1, pats, opts)
1889 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
1901 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
1890 ui.write('%s -> %s\n' % (src, dst))
1902 ui.write('%s -> %s\n' % (src, dst))
1891
1903
1892 @command('debugpeer', [], _('PATH'), norepo=True)
1904 @command('debugpeer', [], _('PATH'), norepo=True)
1893 def debugpeer(ui, path):
1905 def debugpeer(ui, path):
1894 """establish a connection to a peer repository"""
1906 """establish a connection to a peer repository"""
1895 # Always enable peer request logging. Requires --debug to display
1907 # Always enable peer request logging. Requires --debug to display
1896 # though.
1908 # though.
1897 overrides = {
1909 overrides = {
1898 ('devel', 'debug.peer-request'): True,
1910 ('devel', 'debug.peer-request'): True,
1899 }
1911 }
1900
1912
1901 with ui.configoverride(overrides):
1913 with ui.configoverride(overrides):
1902 peer = hg.peer(ui, {}, path)
1914 peer = hg.peer(ui, {}, path)
1903
1915
1904 local = peer.local() is not None
1916 local = peer.local() is not None
1905 canpush = peer.canpush()
1917 canpush = peer.canpush()
1906
1918
1907 ui.write(_('url: %s\n') % peer.url())
1919 ui.write(_('url: %s\n') % peer.url())
1908 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1920 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1909 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1921 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1910
1922
1911 @command('debugpickmergetool',
1923 @command('debugpickmergetool',
1912 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1924 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1913 ('', 'changedelete', None, _('emulate merging change and delete')),
1925 ('', 'changedelete', None, _('emulate merging change and delete')),
1914 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1926 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1915 _('[PATTERN]...'),
1927 _('[PATTERN]...'),
1916 inferrepo=True)
1928 inferrepo=True)
1917 def debugpickmergetool(ui, repo, *pats, **opts):
1929 def debugpickmergetool(ui, repo, *pats, **opts):
1918 """examine which merge tool is chosen for specified file
1930 """examine which merge tool is chosen for specified file
1919
1931
1920 As described in :hg:`help merge-tools`, Mercurial examines
1932 As described in :hg:`help merge-tools`, Mercurial examines
1921 configurations below in this order to decide which merge tool is
1933 configurations below in this order to decide which merge tool is
1922 chosen for specified file.
1934 chosen for specified file.
1923
1935
1924 1. ``--tool`` option
1936 1. ``--tool`` option
1925 2. ``HGMERGE`` environment variable
1937 2. ``HGMERGE`` environment variable
1926 3. configurations in ``merge-patterns`` section
1938 3. configurations in ``merge-patterns`` section
1927 4. configuration of ``ui.merge``
1939 4. configuration of ``ui.merge``
1928 5. configurations in ``merge-tools`` section
1940 5. configurations in ``merge-tools`` section
1929 6. ``hgmerge`` tool (for historical reason only)
1941 6. ``hgmerge`` tool (for historical reason only)
1930 7. default tool for fallback (``:merge`` or ``:prompt``)
1942 7. default tool for fallback (``:merge`` or ``:prompt``)
1931
1943
1932 This command writes out examination result in the style below::
1944 This command writes out examination result in the style below::
1933
1945
1934 FILE = MERGETOOL
1946 FILE = MERGETOOL
1935
1947
1936 By default, all files known in the first parent context of the
1948 By default, all files known in the first parent context of the
1937 working directory are examined. Use file patterns and/or -I/-X
1949 working directory are examined. Use file patterns and/or -I/-X
1938 options to limit target files. -r/--rev is also useful to examine
1950 options to limit target files. -r/--rev is also useful to examine
1939 files in another context without actual updating to it.
1951 files in another context without actual updating to it.
1940
1952
1941 With --debug, this command shows warning messages while matching
1953 With --debug, this command shows warning messages while matching
1942 against ``merge-patterns`` and so on, too. It is recommended to
1954 against ``merge-patterns`` and so on, too. It is recommended to
1943 use this option with explicit file patterns and/or -I/-X options,
1955 use this option with explicit file patterns and/or -I/-X options,
1944 because this option increases amount of output per file according
1956 because this option increases amount of output per file according
1945 to configurations in hgrc.
1957 to configurations in hgrc.
1946
1958
1947 With -v/--verbose, this command shows configurations below at
1959 With -v/--verbose, this command shows configurations below at
1948 first (only if specified).
1960 first (only if specified).
1949
1961
1950 - ``--tool`` option
1962 - ``--tool`` option
1951 - ``HGMERGE`` environment variable
1963 - ``HGMERGE`` environment variable
1952 - configuration of ``ui.merge``
1964 - configuration of ``ui.merge``
1953
1965
1954 If merge tool is chosen before matching against
1966 If merge tool is chosen before matching against
1955 ``merge-patterns``, this command can't show any helpful
1967 ``merge-patterns``, this command can't show any helpful
1956 information, even with --debug. In such case, information above is
1968 information, even with --debug. In such case, information above is
1957 useful to know why a merge tool is chosen.
1969 useful to know why a merge tool is chosen.
1958 """
1970 """
1959 opts = pycompat.byteskwargs(opts)
1971 opts = pycompat.byteskwargs(opts)
1960 overrides = {}
1972 overrides = {}
1961 if opts['tool']:
1973 if opts['tool']:
1962 overrides[('ui', 'forcemerge')] = opts['tool']
1974 overrides[('ui', 'forcemerge')] = opts['tool']
1963 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1975 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1964
1976
1965 with ui.configoverride(overrides, 'debugmergepatterns'):
1977 with ui.configoverride(overrides, 'debugmergepatterns'):
1966 hgmerge = encoding.environ.get("HGMERGE")
1978 hgmerge = encoding.environ.get("HGMERGE")
1967 if hgmerge is not None:
1979 if hgmerge is not None:
1968 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1980 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1969 uimerge = ui.config("ui", "merge")
1981 uimerge = ui.config("ui", "merge")
1970 if uimerge:
1982 if uimerge:
1971 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1983 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1972
1984
1973 ctx = scmutil.revsingle(repo, opts.get('rev'))
1985 ctx = scmutil.revsingle(repo, opts.get('rev'))
1974 m = scmutil.match(ctx, pats, opts)
1986 m = scmutil.match(ctx, pats, opts)
1975 changedelete = opts['changedelete']
1987 changedelete = opts['changedelete']
1976 for path in ctx.walk(m):
1988 for path in ctx.walk(m):
1977 fctx = ctx[path]
1989 fctx = ctx[path]
1978 try:
1990 try:
1979 if not ui.debugflag:
1991 if not ui.debugflag:
1980 ui.pushbuffer(error=True)
1992 ui.pushbuffer(error=True)
1981 tool, toolpath = filemerge._picktool(repo, ui, path,
1993 tool, toolpath = filemerge._picktool(repo, ui, path,
1982 fctx.isbinary(),
1994 fctx.isbinary(),
1983 'l' in fctx.flags(),
1995 'l' in fctx.flags(),
1984 changedelete)
1996 changedelete)
1985 finally:
1997 finally:
1986 if not ui.debugflag:
1998 if not ui.debugflag:
1987 ui.popbuffer()
1999 ui.popbuffer()
1988 ui.write(('%s = %s\n') % (path, tool))
2000 ui.write(('%s = %s\n') % (path, tool))
1989
2001
1990 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2002 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1991 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2003 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1992 '''access the pushkey key/value protocol
2004 '''access the pushkey key/value protocol
1993
2005
1994 With two args, list the keys in the given namespace.
2006 With two args, list the keys in the given namespace.
1995
2007
1996 With five args, set a key to new if it currently is set to old.
2008 With five args, set a key to new if it currently is set to old.
1997 Reports success or failure.
2009 Reports success or failure.
1998 '''
2010 '''
1999
2011
2000 target = hg.peer(ui, {}, repopath)
2012 target = hg.peer(ui, {}, repopath)
2001 if keyinfo:
2013 if keyinfo:
2002 key, old, new = keyinfo
2014 key, old, new = keyinfo
2003 with target.commandexecutor() as e:
2015 with target.commandexecutor() as e:
2004 r = e.callcommand('pushkey', {
2016 r = e.callcommand('pushkey', {
2005 'namespace': namespace,
2017 'namespace': namespace,
2006 'key': key,
2018 'key': key,
2007 'old': old,
2019 'old': old,
2008 'new': new,
2020 'new': new,
2009 }).result()
2021 }).result()
2010
2022
2011 ui.status(pycompat.bytestr(r) + '\n')
2023 ui.status(pycompat.bytestr(r) + '\n')
2012 return not r
2024 return not r
2013 else:
2025 else:
2014 for k, v in sorted(target.listkeys(namespace).iteritems()):
2026 for k, v in sorted(target.listkeys(namespace).iteritems()):
2015 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
2027 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
2016 stringutil.escapestr(v)))
2028 stringutil.escapestr(v)))
2017
2029
2018 @command('debugpvec', [], _('A B'))
2030 @command('debugpvec', [], _('A B'))
2019 def debugpvec(ui, repo, a, b=None):
2031 def debugpvec(ui, repo, a, b=None):
2020 ca = scmutil.revsingle(repo, a)
2032 ca = scmutil.revsingle(repo, a)
2021 cb = scmutil.revsingle(repo, b)
2033 cb = scmutil.revsingle(repo, b)
2022 pa = pvec.ctxpvec(ca)
2034 pa = pvec.ctxpvec(ca)
2023 pb = pvec.ctxpvec(cb)
2035 pb = pvec.ctxpvec(cb)
2024 if pa == pb:
2036 if pa == pb:
2025 rel = "="
2037 rel = "="
2026 elif pa > pb:
2038 elif pa > pb:
2027 rel = ">"
2039 rel = ">"
2028 elif pa < pb:
2040 elif pa < pb:
2029 rel = "<"
2041 rel = "<"
2030 elif pa | pb:
2042 elif pa | pb:
2031 rel = "|"
2043 rel = "|"
2032 ui.write(_("a: %s\n") % pa)
2044 ui.write(_("a: %s\n") % pa)
2033 ui.write(_("b: %s\n") % pb)
2045 ui.write(_("b: %s\n") % pb)
2034 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2046 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2035 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2047 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2036 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2048 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2037 pa.distance(pb), rel))
2049 pa.distance(pb), rel))
2038
2050
2039 @command('debugrebuilddirstate|debugrebuildstate',
2051 @command('debugrebuilddirstate|debugrebuildstate',
2040 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
2052 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
2041 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
2053 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
2042 'the working copy parent')),
2054 'the working copy parent')),
2043 ],
2055 ],
2044 _('[-r REV]'))
2056 _('[-r REV]'))
2045 def debugrebuilddirstate(ui, repo, rev, **opts):
2057 def debugrebuilddirstate(ui, repo, rev, **opts):
2046 """rebuild the dirstate as it would look like for the given revision
2058 """rebuild the dirstate as it would look like for the given revision
2047
2059
2048 If no revision is specified the first current parent will be used.
2060 If no revision is specified the first current parent will be used.
2049
2061
2050 The dirstate will be set to the files of the given revision.
2062 The dirstate will be set to the files of the given revision.
2051 The actual working directory content or existing dirstate
2063 The actual working directory content or existing dirstate
2052 information such as adds or removes is not considered.
2064 information such as adds or removes is not considered.
2053
2065
2054 ``minimal`` will only rebuild the dirstate status for files that claim to be
2066 ``minimal`` will only rebuild the dirstate status for files that claim to be
2055 tracked but are not in the parent manifest, or that exist in the parent
2067 tracked but are not in the parent manifest, or that exist in the parent
2056 manifest but are not in the dirstate. It will not change adds, removes, or
2068 manifest but are not in the dirstate. It will not change adds, removes, or
2057 modified files that are in the working copy parent.
2069 modified files that are in the working copy parent.
2058
2070
2059 One use of this command is to make the next :hg:`status` invocation
2071 One use of this command is to make the next :hg:`status` invocation
2060 check the actual file content.
2072 check the actual file content.
2061 """
2073 """
2062 ctx = scmutil.revsingle(repo, rev)
2074 ctx = scmutil.revsingle(repo, rev)
2063 with repo.wlock():
2075 with repo.wlock():
2064 dirstate = repo.dirstate
2076 dirstate = repo.dirstate
2065 changedfiles = None
2077 changedfiles = None
2066 # See command doc for what minimal does.
2078 # See command doc for what minimal does.
2067 if opts.get(r'minimal'):
2079 if opts.get(r'minimal'):
2068 manifestfiles = set(ctx.manifest().keys())
2080 manifestfiles = set(ctx.manifest().keys())
2069 dirstatefiles = set(dirstate)
2081 dirstatefiles = set(dirstate)
2070 manifestonly = manifestfiles - dirstatefiles
2082 manifestonly = manifestfiles - dirstatefiles
2071 dsonly = dirstatefiles - manifestfiles
2083 dsonly = dirstatefiles - manifestfiles
2072 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2084 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2073 changedfiles = manifestonly | dsnotadded
2085 changedfiles = manifestonly | dsnotadded
2074
2086
2075 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2087 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2076
2088
2077 @command('debugrebuildfncache', [], '')
2089 @command('debugrebuildfncache', [], '')
2078 def debugrebuildfncache(ui, repo):
2090 def debugrebuildfncache(ui, repo):
2079 """rebuild the fncache file"""
2091 """rebuild the fncache file"""
2080 repair.rebuildfncache(ui, repo)
2092 repair.rebuildfncache(ui, repo)
2081
2093
2082 @command('debugrename',
2094 @command('debugrename',
2083 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2095 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2084 _('[-r REV] [FILE]...'))
2096 _('[-r REV] [FILE]...'))
2085 def debugrename(ui, repo, *pats, **opts):
2097 def debugrename(ui, repo, *pats, **opts):
2086 """dump rename information"""
2098 """dump rename information"""
2087
2099
2088 opts = pycompat.byteskwargs(opts)
2100 opts = pycompat.byteskwargs(opts)
2089 ctx = scmutil.revsingle(repo, opts.get('rev'))
2101 ctx = scmutil.revsingle(repo, opts.get('rev'))
2090 m = scmutil.match(ctx, pats, opts)
2102 m = scmutil.match(ctx, pats, opts)
2091 for abs in ctx.walk(m):
2103 for abs in ctx.walk(m):
2092 fctx = ctx[abs]
2104 fctx = ctx[abs]
2093 o = fctx.filelog().renamed(fctx.filenode())
2105 o = fctx.filelog().renamed(fctx.filenode())
2094 rel = repo.pathto(abs)
2106 rel = repo.pathto(abs)
2095 if o:
2107 if o:
2096 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2108 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2097 else:
2109 else:
2098 ui.write(_("%s not renamed\n") % rel)
2110 ui.write(_("%s not renamed\n") % rel)
2099
2111
2100 @command('debugrevlog', cmdutil.debugrevlogopts +
2112 @command('debugrevlog', cmdutil.debugrevlogopts +
2101 [('d', 'dump', False, _('dump index data'))],
2113 [('d', 'dump', False, _('dump index data'))],
2102 _('-c|-m|FILE'),
2114 _('-c|-m|FILE'),
2103 optionalrepo=True)
2115 optionalrepo=True)
2104 def debugrevlog(ui, repo, file_=None, **opts):
2116 def debugrevlog(ui, repo, file_=None, **opts):
2105 """show data and statistics about a revlog"""
2117 """show data and statistics about a revlog"""
2106 opts = pycompat.byteskwargs(opts)
2118 opts = pycompat.byteskwargs(opts)
2107 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2119 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2108
2120
2109 if opts.get("dump"):
2121 if opts.get("dump"):
2110 numrevs = len(r)
2122 numrevs = len(r)
2111 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2123 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2112 " rawsize totalsize compression heads chainlen\n"))
2124 " rawsize totalsize compression heads chainlen\n"))
2113 ts = 0
2125 ts = 0
2114 heads = set()
2126 heads = set()
2115
2127
2116 for rev in pycompat.xrange(numrevs):
2128 for rev in pycompat.xrange(numrevs):
2117 dbase = r.deltaparent(rev)
2129 dbase = r.deltaparent(rev)
2118 if dbase == -1:
2130 if dbase == -1:
2119 dbase = rev
2131 dbase = rev
2120 cbase = r.chainbase(rev)
2132 cbase = r.chainbase(rev)
2121 clen = r.chainlen(rev)
2133 clen = r.chainlen(rev)
2122 p1, p2 = r.parentrevs(rev)
2134 p1, p2 = r.parentrevs(rev)
2123 rs = r.rawsize(rev)
2135 rs = r.rawsize(rev)
2124 ts = ts + rs
2136 ts = ts + rs
2125 heads -= set(r.parentrevs(rev))
2137 heads -= set(r.parentrevs(rev))
2126 heads.add(rev)
2138 heads.add(rev)
2127 try:
2139 try:
2128 compression = ts / r.end(rev)
2140 compression = ts / r.end(rev)
2129 except ZeroDivisionError:
2141 except ZeroDivisionError:
2130 compression = 0
2142 compression = 0
2131 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2143 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2132 "%11d %5d %8d\n" %
2144 "%11d %5d %8d\n" %
2133 (rev, p1, p2, r.start(rev), r.end(rev),
2145 (rev, p1, p2, r.start(rev), r.end(rev),
2134 r.start(dbase), r.start(cbase),
2146 r.start(dbase), r.start(cbase),
2135 r.start(p1), r.start(p2),
2147 r.start(p1), r.start(p2),
2136 rs, ts, compression, len(heads), clen))
2148 rs, ts, compression, len(heads), clen))
2137 return 0
2149 return 0
2138
2150
2139 v = r.version
2151 v = r.version
2140 format = v & 0xFFFF
2152 format = v & 0xFFFF
2141 flags = []
2153 flags = []
2142 gdelta = False
2154 gdelta = False
2143 if v & revlog.FLAG_INLINE_DATA:
2155 if v & revlog.FLAG_INLINE_DATA:
2144 flags.append('inline')
2156 flags.append('inline')
2145 if v & revlog.FLAG_GENERALDELTA:
2157 if v & revlog.FLAG_GENERALDELTA:
2146 gdelta = True
2158 gdelta = True
2147 flags.append('generaldelta')
2159 flags.append('generaldelta')
2148 if not flags:
2160 if not flags:
2149 flags = ['(none)']
2161 flags = ['(none)']
2150
2162
2151 ### tracks merge vs single parent
2163 ### tracks merge vs single parent
2152 nummerges = 0
2164 nummerges = 0
2153
2165
2154 ### tracks ways the "delta" are build
2166 ### tracks ways the "delta" are build
2155 # nodelta
2167 # nodelta
2156 numempty = 0
2168 numempty = 0
2157 numemptytext = 0
2169 numemptytext = 0
2158 numemptydelta = 0
2170 numemptydelta = 0
2159 # full file content
2171 # full file content
2160 numfull = 0
2172 numfull = 0
2161 # intermediate snapshot against a prior snapshot
2173 # intermediate snapshot against a prior snapshot
2162 numsemi = 0
2174 numsemi = 0
2163 # snapshot count per depth
2175 # snapshot count per depth
2164 numsnapdepth = collections.defaultdict(lambda: 0)
2176 numsnapdepth = collections.defaultdict(lambda: 0)
2165 # delta against previous revision
2177 # delta against previous revision
2166 numprev = 0
2178 numprev = 0
2167 # delta against first or second parent (not prev)
2179 # delta against first or second parent (not prev)
2168 nump1 = 0
2180 nump1 = 0
2169 nump2 = 0
2181 nump2 = 0
2170 # delta against neither prev nor parents
2182 # delta against neither prev nor parents
2171 numother = 0
2183 numother = 0
2172 # delta against prev that are also first or second parent
2184 # delta against prev that are also first or second parent
2173 # (details of `numprev`)
2185 # (details of `numprev`)
2174 nump1prev = 0
2186 nump1prev = 0
2175 nump2prev = 0
2187 nump2prev = 0
2176
2188
2177 # data about delta chain of each revs
2189 # data about delta chain of each revs
2178 chainlengths = []
2190 chainlengths = []
2179 chainbases = []
2191 chainbases = []
2180 chainspans = []
2192 chainspans = []
2181
2193
2182 # data about each revision
2194 # data about each revision
2183 datasize = [None, 0, 0]
2195 datasize = [None, 0, 0]
2184 fullsize = [None, 0, 0]
2196 fullsize = [None, 0, 0]
2185 semisize = [None, 0, 0]
2197 semisize = [None, 0, 0]
2186 # snapshot count per depth
2198 # snapshot count per depth
2187 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2199 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2188 deltasize = [None, 0, 0]
2200 deltasize = [None, 0, 0]
2189 chunktypecounts = {}
2201 chunktypecounts = {}
2190 chunktypesizes = {}
2202 chunktypesizes = {}
2191
2203
2192 def addsize(size, l):
2204 def addsize(size, l):
2193 if l[0] is None or size < l[0]:
2205 if l[0] is None or size < l[0]:
2194 l[0] = size
2206 l[0] = size
2195 if size > l[1]:
2207 if size > l[1]:
2196 l[1] = size
2208 l[1] = size
2197 l[2] += size
2209 l[2] += size
2198
2210
2199 numrevs = len(r)
2211 numrevs = len(r)
2200 for rev in pycompat.xrange(numrevs):
2212 for rev in pycompat.xrange(numrevs):
2201 p1, p2 = r.parentrevs(rev)
2213 p1, p2 = r.parentrevs(rev)
2202 delta = r.deltaparent(rev)
2214 delta = r.deltaparent(rev)
2203 if format > 0:
2215 if format > 0:
2204 addsize(r.rawsize(rev), datasize)
2216 addsize(r.rawsize(rev), datasize)
2205 if p2 != nullrev:
2217 if p2 != nullrev:
2206 nummerges += 1
2218 nummerges += 1
2207 size = r.length(rev)
2219 size = r.length(rev)
2208 if delta == nullrev:
2220 if delta == nullrev:
2209 chainlengths.append(0)
2221 chainlengths.append(0)
2210 chainbases.append(r.start(rev))
2222 chainbases.append(r.start(rev))
2211 chainspans.append(size)
2223 chainspans.append(size)
2212 if size == 0:
2224 if size == 0:
2213 numempty += 1
2225 numempty += 1
2214 numemptytext += 1
2226 numemptytext += 1
2215 else:
2227 else:
2216 numfull += 1
2228 numfull += 1
2217 numsnapdepth[0] += 1
2229 numsnapdepth[0] += 1
2218 addsize(size, fullsize)
2230 addsize(size, fullsize)
2219 addsize(size, snapsizedepth[0])
2231 addsize(size, snapsizedepth[0])
2220 else:
2232 else:
2221 chainlengths.append(chainlengths[delta] + 1)
2233 chainlengths.append(chainlengths[delta] + 1)
2222 baseaddr = chainbases[delta]
2234 baseaddr = chainbases[delta]
2223 revaddr = r.start(rev)
2235 revaddr = r.start(rev)
2224 chainbases.append(baseaddr)
2236 chainbases.append(baseaddr)
2225 chainspans.append((revaddr - baseaddr) + size)
2237 chainspans.append((revaddr - baseaddr) + size)
2226 if size == 0:
2238 if size == 0:
2227 numempty += 1
2239 numempty += 1
2228 numemptydelta += 1
2240 numemptydelta += 1
2229 elif r.issnapshot(rev):
2241 elif r.issnapshot(rev):
2230 addsize(size, semisize)
2242 addsize(size, semisize)
2231 numsemi += 1
2243 numsemi += 1
2232 depth = r.snapshotdepth(rev)
2244 depth = r.snapshotdepth(rev)
2233 numsnapdepth[depth] += 1
2245 numsnapdepth[depth] += 1
2234 addsize(size, snapsizedepth[depth])
2246 addsize(size, snapsizedepth[depth])
2235 else:
2247 else:
2236 addsize(size, deltasize)
2248 addsize(size, deltasize)
2237 if delta == rev - 1:
2249 if delta == rev - 1:
2238 numprev += 1
2250 numprev += 1
2239 if delta == p1:
2251 if delta == p1:
2240 nump1prev += 1
2252 nump1prev += 1
2241 elif delta == p2:
2253 elif delta == p2:
2242 nump2prev += 1
2254 nump2prev += 1
2243 elif delta == p1:
2255 elif delta == p1:
2244 nump1 += 1
2256 nump1 += 1
2245 elif delta == p2:
2257 elif delta == p2:
2246 nump2 += 1
2258 nump2 += 1
2247 elif delta != nullrev:
2259 elif delta != nullrev:
2248 numother += 1
2260 numother += 1
2249
2261
2250 # Obtain data on the raw chunks in the revlog.
2262 # Obtain data on the raw chunks in the revlog.
2251 if util.safehasattr(r, '_getsegmentforrevs'):
2263 if util.safehasattr(r, '_getsegmentforrevs'):
2252 segment = r._getsegmentforrevs(rev, rev)[1]
2264 segment = r._getsegmentforrevs(rev, rev)[1]
2253 else:
2265 else:
2254 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2266 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2255 if segment:
2267 if segment:
2256 chunktype = bytes(segment[0:1])
2268 chunktype = bytes(segment[0:1])
2257 else:
2269 else:
2258 chunktype = 'empty'
2270 chunktype = 'empty'
2259
2271
2260 if chunktype not in chunktypecounts:
2272 if chunktype not in chunktypecounts:
2261 chunktypecounts[chunktype] = 0
2273 chunktypecounts[chunktype] = 0
2262 chunktypesizes[chunktype] = 0
2274 chunktypesizes[chunktype] = 0
2263
2275
2264 chunktypecounts[chunktype] += 1
2276 chunktypecounts[chunktype] += 1
2265 chunktypesizes[chunktype] += size
2277 chunktypesizes[chunktype] += size
2266
2278
2267 # Adjust size min value for empty cases
2279 # Adjust size min value for empty cases
2268 for size in (datasize, fullsize, semisize, deltasize):
2280 for size in (datasize, fullsize, semisize, deltasize):
2269 if size[0] is None:
2281 if size[0] is None:
2270 size[0] = 0
2282 size[0] = 0
2271
2283
2272 numdeltas = numrevs - numfull - numempty - numsemi
2284 numdeltas = numrevs - numfull - numempty - numsemi
2273 numoprev = numprev - nump1prev - nump2prev
2285 numoprev = numprev - nump1prev - nump2prev
2274 totalrawsize = datasize[2]
2286 totalrawsize = datasize[2]
2275 datasize[2] /= numrevs
2287 datasize[2] /= numrevs
2276 fulltotal = fullsize[2]
2288 fulltotal = fullsize[2]
2277 fullsize[2] /= numfull
2289 fullsize[2] /= numfull
2278 semitotal = semisize[2]
2290 semitotal = semisize[2]
2279 snaptotal = {}
2291 snaptotal = {}
2280 if numsemi > 0:
2292 if numsemi > 0:
2281 semisize[2] /= numsemi
2293 semisize[2] /= numsemi
2282 for depth in snapsizedepth:
2294 for depth in snapsizedepth:
2283 snaptotal[depth] = snapsizedepth[depth][2]
2295 snaptotal[depth] = snapsizedepth[depth][2]
2284 snapsizedepth[depth][2] /= numsnapdepth[depth]
2296 snapsizedepth[depth][2] /= numsnapdepth[depth]
2285
2297
2286 deltatotal = deltasize[2]
2298 deltatotal = deltasize[2]
2287 if numdeltas > 0:
2299 if numdeltas > 0:
2288 deltasize[2] /= numdeltas
2300 deltasize[2] /= numdeltas
2289 totalsize = fulltotal + semitotal + deltatotal
2301 totalsize = fulltotal + semitotal + deltatotal
2290 avgchainlen = sum(chainlengths) / numrevs
2302 avgchainlen = sum(chainlengths) / numrevs
2291 maxchainlen = max(chainlengths)
2303 maxchainlen = max(chainlengths)
2292 maxchainspan = max(chainspans)
2304 maxchainspan = max(chainspans)
2293 compratio = 1
2305 compratio = 1
2294 if totalsize:
2306 if totalsize:
2295 compratio = totalrawsize / totalsize
2307 compratio = totalrawsize / totalsize
2296
2308
2297 basedfmtstr = '%%%dd\n'
2309 basedfmtstr = '%%%dd\n'
2298 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2310 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2299
2311
2300 def dfmtstr(max):
2312 def dfmtstr(max):
2301 return basedfmtstr % len(str(max))
2313 return basedfmtstr % len(str(max))
2302 def pcfmtstr(max, padding=0):
2314 def pcfmtstr(max, padding=0):
2303 return basepcfmtstr % (len(str(max)), ' ' * padding)
2315 return basepcfmtstr % (len(str(max)), ' ' * padding)
2304
2316
2305 def pcfmt(value, total):
2317 def pcfmt(value, total):
2306 if total:
2318 if total:
2307 return (value, 100 * float(value) / total)
2319 return (value, 100 * float(value) / total)
2308 else:
2320 else:
2309 return value, 100.0
2321 return value, 100.0
2310
2322
2311 ui.write(('format : %d\n') % format)
2323 ui.write(('format : %d\n') % format)
2312 ui.write(('flags : %s\n') % ', '.join(flags))
2324 ui.write(('flags : %s\n') % ', '.join(flags))
2313
2325
2314 ui.write('\n')
2326 ui.write('\n')
2315 fmt = pcfmtstr(totalsize)
2327 fmt = pcfmtstr(totalsize)
2316 fmt2 = dfmtstr(totalsize)
2328 fmt2 = dfmtstr(totalsize)
2317 ui.write(('revisions : ') + fmt2 % numrevs)
2329 ui.write(('revisions : ') + fmt2 % numrevs)
2318 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2330 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2319 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2331 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2320 ui.write(('revisions : ') + fmt2 % numrevs)
2332 ui.write(('revisions : ') + fmt2 % numrevs)
2321 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2333 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2322 ui.write((' text : ')
2334 ui.write((' text : ')
2323 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2335 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2324 ui.write((' delta : ')
2336 ui.write((' delta : ')
2325 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2337 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2326 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2338 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2327 for depth in sorted(numsnapdepth):
2339 for depth in sorted(numsnapdepth):
2328 ui.write((' lvl-%-3d : ' % depth)
2340 ui.write((' lvl-%-3d : ' % depth)
2329 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2341 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2330 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2342 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2331 ui.write(('revision size : ') + fmt2 % totalsize)
2343 ui.write(('revision size : ') + fmt2 % totalsize)
2332 ui.write((' snapshot : ')
2344 ui.write((' snapshot : ')
2333 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2345 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2334 for depth in sorted(numsnapdepth):
2346 for depth in sorted(numsnapdepth):
2335 ui.write((' lvl-%-3d : ' % depth)
2347 ui.write((' lvl-%-3d : ' % depth)
2336 + fmt % pcfmt(snaptotal[depth], totalsize))
2348 + fmt % pcfmt(snaptotal[depth], totalsize))
2337 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2349 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2338
2350
2339 def fmtchunktype(chunktype):
2351 def fmtchunktype(chunktype):
2340 if chunktype == 'empty':
2352 if chunktype == 'empty':
2341 return ' %s : ' % chunktype
2353 return ' %s : ' % chunktype
2342 elif chunktype in pycompat.bytestr(string.ascii_letters):
2354 elif chunktype in pycompat.bytestr(string.ascii_letters):
2343 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2355 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2344 else:
2356 else:
2345 return ' 0x%s : ' % hex(chunktype)
2357 return ' 0x%s : ' % hex(chunktype)
2346
2358
2347 ui.write('\n')
2359 ui.write('\n')
2348 ui.write(('chunks : ') + fmt2 % numrevs)
2360 ui.write(('chunks : ') + fmt2 % numrevs)
2349 for chunktype in sorted(chunktypecounts):
2361 for chunktype in sorted(chunktypecounts):
2350 ui.write(fmtchunktype(chunktype))
2362 ui.write(fmtchunktype(chunktype))
2351 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2363 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2352 ui.write(('chunks size : ') + fmt2 % totalsize)
2364 ui.write(('chunks size : ') + fmt2 % totalsize)
2353 for chunktype in sorted(chunktypecounts):
2365 for chunktype in sorted(chunktypecounts):
2354 ui.write(fmtchunktype(chunktype))
2366 ui.write(fmtchunktype(chunktype))
2355 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2367 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2356
2368
2357 ui.write('\n')
2369 ui.write('\n')
2358 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2370 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2359 ui.write(('avg chain length : ') + fmt % avgchainlen)
2371 ui.write(('avg chain length : ') + fmt % avgchainlen)
2360 ui.write(('max chain length : ') + fmt % maxchainlen)
2372 ui.write(('max chain length : ') + fmt % maxchainlen)
2361 ui.write(('max chain reach : ') + fmt % maxchainspan)
2373 ui.write(('max chain reach : ') + fmt % maxchainspan)
2362 ui.write(('compression ratio : ') + fmt % compratio)
2374 ui.write(('compression ratio : ') + fmt % compratio)
2363
2375
2364 if format > 0:
2376 if format > 0:
2365 ui.write('\n')
2377 ui.write('\n')
2366 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2378 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2367 % tuple(datasize))
2379 % tuple(datasize))
2368 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2380 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2369 % tuple(fullsize))
2381 % tuple(fullsize))
2370 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2382 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2371 % tuple(semisize))
2383 % tuple(semisize))
2372 for depth in sorted(snapsizedepth):
2384 for depth in sorted(snapsizedepth):
2373 if depth == 0:
2385 if depth == 0:
2374 continue
2386 continue
2375 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2387 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2376 % ((depth,) + tuple(snapsizedepth[depth])))
2388 % ((depth,) + tuple(snapsizedepth[depth])))
2377 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2389 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2378 % tuple(deltasize))
2390 % tuple(deltasize))
2379
2391
2380 if numdeltas > 0:
2392 if numdeltas > 0:
2381 ui.write('\n')
2393 ui.write('\n')
2382 fmt = pcfmtstr(numdeltas)
2394 fmt = pcfmtstr(numdeltas)
2383 fmt2 = pcfmtstr(numdeltas, 4)
2395 fmt2 = pcfmtstr(numdeltas, 4)
2384 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2396 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2385 if numprev > 0:
2397 if numprev > 0:
2386 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2398 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2387 numprev))
2399 numprev))
2388 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2400 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2389 numprev))
2401 numprev))
2390 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2402 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2391 numprev))
2403 numprev))
2392 if gdelta:
2404 if gdelta:
2393 ui.write(('deltas against p1 : ')
2405 ui.write(('deltas against p1 : ')
2394 + fmt % pcfmt(nump1, numdeltas))
2406 + fmt % pcfmt(nump1, numdeltas))
2395 ui.write(('deltas against p2 : ')
2407 ui.write(('deltas against p2 : ')
2396 + fmt % pcfmt(nump2, numdeltas))
2408 + fmt % pcfmt(nump2, numdeltas))
2397 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2409 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2398 numdeltas))
2410 numdeltas))
2399
2411
2400 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2412 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2401 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2413 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2402 _('[-f FORMAT] -c|-m|FILE'),
2414 _('[-f FORMAT] -c|-m|FILE'),
2403 optionalrepo=True)
2415 optionalrepo=True)
2404 def debugrevlogindex(ui, repo, file_=None, **opts):
2416 def debugrevlogindex(ui, repo, file_=None, **opts):
2405 """dump the contents of a revlog index"""
2417 """dump the contents of a revlog index"""
2406 opts = pycompat.byteskwargs(opts)
2418 opts = pycompat.byteskwargs(opts)
2407 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2419 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2408 format = opts.get('format', 0)
2420 format = opts.get('format', 0)
2409 if format not in (0, 1):
2421 if format not in (0, 1):
2410 raise error.Abort(_("unknown format %d") % format)
2422 raise error.Abort(_("unknown format %d") % format)
2411
2423
2412 if ui.debugflag:
2424 if ui.debugflag:
2413 shortfn = hex
2425 shortfn = hex
2414 else:
2426 else:
2415 shortfn = short
2427 shortfn = short
2416
2428
2417 # There might not be anything in r, so have a sane default
2429 # There might not be anything in r, so have a sane default
2418 idlen = 12
2430 idlen = 12
2419 for i in r:
2431 for i in r:
2420 idlen = len(shortfn(r.node(i)))
2432 idlen = len(shortfn(r.node(i)))
2421 break
2433 break
2422
2434
2423 if format == 0:
2435 if format == 0:
2424 if ui.verbose:
2436 if ui.verbose:
2425 ui.write((" rev offset length linkrev"
2437 ui.write((" rev offset length linkrev"
2426 " %s %s p2\n") % ("nodeid".ljust(idlen),
2438 " %s %s p2\n") % ("nodeid".ljust(idlen),
2427 "p1".ljust(idlen)))
2439 "p1".ljust(idlen)))
2428 else:
2440 else:
2429 ui.write((" rev linkrev %s %s p2\n") % (
2441 ui.write((" rev linkrev %s %s p2\n") % (
2430 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2442 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2431 elif format == 1:
2443 elif format == 1:
2432 if ui.verbose:
2444 if ui.verbose:
2433 ui.write((" rev flag offset length size link p1"
2445 ui.write((" rev flag offset length size link p1"
2434 " p2 %s\n") % "nodeid".rjust(idlen))
2446 " p2 %s\n") % "nodeid".rjust(idlen))
2435 else:
2447 else:
2436 ui.write((" rev flag size link p1 p2 %s\n") %
2448 ui.write((" rev flag size link p1 p2 %s\n") %
2437 "nodeid".rjust(idlen))
2449 "nodeid".rjust(idlen))
2438
2450
2439 for i in r:
2451 for i in r:
2440 node = r.node(i)
2452 node = r.node(i)
2441 if format == 0:
2453 if format == 0:
2442 try:
2454 try:
2443 pp = r.parents(node)
2455 pp = r.parents(node)
2444 except Exception:
2456 except Exception:
2445 pp = [nullid, nullid]
2457 pp = [nullid, nullid]
2446 if ui.verbose:
2458 if ui.verbose:
2447 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2459 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2448 i, r.start(i), r.length(i), r.linkrev(i),
2460 i, r.start(i), r.length(i), r.linkrev(i),
2449 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2461 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2450 else:
2462 else:
2451 ui.write("% 6d % 7d %s %s %s\n" % (
2463 ui.write("% 6d % 7d %s %s %s\n" % (
2452 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2464 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2453 shortfn(pp[1])))
2465 shortfn(pp[1])))
2454 elif format == 1:
2466 elif format == 1:
2455 pr = r.parentrevs(i)
2467 pr = r.parentrevs(i)
2456 if ui.verbose:
2468 if ui.verbose:
2457 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2469 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2458 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2470 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2459 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2471 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2460 else:
2472 else:
2461 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2473 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2462 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2474 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2463 shortfn(node)))
2475 shortfn(node)))
2464
2476
2465 @command('debugrevspec',
2477 @command('debugrevspec',
2466 [('', 'optimize', None,
2478 [('', 'optimize', None,
2467 _('print parsed tree after optimizing (DEPRECATED)')),
2479 _('print parsed tree after optimizing (DEPRECATED)')),
2468 ('', 'show-revs', True, _('print list of result revisions (default)')),
2480 ('', 'show-revs', True, _('print list of result revisions (default)')),
2469 ('s', 'show-set', None, _('print internal representation of result set')),
2481 ('s', 'show-set', None, _('print internal representation of result set')),
2470 ('p', 'show-stage', [],
2482 ('p', 'show-stage', [],
2471 _('print parsed tree at the given stage'), _('NAME')),
2483 _('print parsed tree at the given stage'), _('NAME')),
2472 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2484 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2473 ('', 'verify-optimized', False, _('verify optimized result')),
2485 ('', 'verify-optimized', False, _('verify optimized result')),
2474 ],
2486 ],
2475 ('REVSPEC'))
2487 ('REVSPEC'))
2476 def debugrevspec(ui, repo, expr, **opts):
2488 def debugrevspec(ui, repo, expr, **opts):
2477 """parse and apply a revision specification
2489 """parse and apply a revision specification
2478
2490
2479 Use -p/--show-stage option to print the parsed tree at the given stages.
2491 Use -p/--show-stage option to print the parsed tree at the given stages.
2480 Use -p all to print tree at every stage.
2492 Use -p all to print tree at every stage.
2481
2493
2482 Use --no-show-revs option with -s or -p to print only the set
2494 Use --no-show-revs option with -s or -p to print only the set
2483 representation or the parsed tree respectively.
2495 representation or the parsed tree respectively.
2484
2496
2485 Use --verify-optimized to compare the optimized result with the unoptimized
2497 Use --verify-optimized to compare the optimized result with the unoptimized
2486 one. Returns 1 if the optimized result differs.
2498 one. Returns 1 if the optimized result differs.
2487 """
2499 """
2488 opts = pycompat.byteskwargs(opts)
2500 opts = pycompat.byteskwargs(opts)
2489 aliases = ui.configitems('revsetalias')
2501 aliases = ui.configitems('revsetalias')
2490 stages = [
2502 stages = [
2491 ('parsed', lambda tree: tree),
2503 ('parsed', lambda tree: tree),
2492 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2504 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2493 ui.warn)),
2505 ui.warn)),
2494 ('concatenated', revsetlang.foldconcat),
2506 ('concatenated', revsetlang.foldconcat),
2495 ('analyzed', revsetlang.analyze),
2507 ('analyzed', revsetlang.analyze),
2496 ('optimized', revsetlang.optimize),
2508 ('optimized', revsetlang.optimize),
2497 ]
2509 ]
2498 if opts['no_optimized']:
2510 if opts['no_optimized']:
2499 stages = stages[:-1]
2511 stages = stages[:-1]
2500 if opts['verify_optimized'] and opts['no_optimized']:
2512 if opts['verify_optimized'] and opts['no_optimized']:
2501 raise error.Abort(_('cannot use --verify-optimized with '
2513 raise error.Abort(_('cannot use --verify-optimized with '
2502 '--no-optimized'))
2514 '--no-optimized'))
2503 stagenames = set(n for n, f in stages)
2515 stagenames = set(n for n, f in stages)
2504
2516
2505 showalways = set()
2517 showalways = set()
2506 showchanged = set()
2518 showchanged = set()
2507 if ui.verbose and not opts['show_stage']:
2519 if ui.verbose and not opts['show_stage']:
2508 # show parsed tree by --verbose (deprecated)
2520 # show parsed tree by --verbose (deprecated)
2509 showalways.add('parsed')
2521 showalways.add('parsed')
2510 showchanged.update(['expanded', 'concatenated'])
2522 showchanged.update(['expanded', 'concatenated'])
2511 if opts['optimize']:
2523 if opts['optimize']:
2512 showalways.add('optimized')
2524 showalways.add('optimized')
2513 if opts['show_stage'] and opts['optimize']:
2525 if opts['show_stage'] and opts['optimize']:
2514 raise error.Abort(_('cannot use --optimize with --show-stage'))
2526 raise error.Abort(_('cannot use --optimize with --show-stage'))
2515 if opts['show_stage'] == ['all']:
2527 if opts['show_stage'] == ['all']:
2516 showalways.update(stagenames)
2528 showalways.update(stagenames)
2517 else:
2529 else:
2518 for n in opts['show_stage']:
2530 for n in opts['show_stage']:
2519 if n not in stagenames:
2531 if n not in stagenames:
2520 raise error.Abort(_('invalid stage name: %s') % n)
2532 raise error.Abort(_('invalid stage name: %s') % n)
2521 showalways.update(opts['show_stage'])
2533 showalways.update(opts['show_stage'])
2522
2534
2523 treebystage = {}
2535 treebystage = {}
2524 printedtree = None
2536 printedtree = None
2525 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2537 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2526 for n, f in stages:
2538 for n, f in stages:
2527 treebystage[n] = tree = f(tree)
2539 treebystage[n] = tree = f(tree)
2528 if n in showalways or (n in showchanged and tree != printedtree):
2540 if n in showalways or (n in showchanged and tree != printedtree):
2529 if opts['show_stage'] or n != 'parsed':
2541 if opts['show_stage'] or n != 'parsed':
2530 ui.write(("* %s:\n") % n)
2542 ui.write(("* %s:\n") % n)
2531 ui.write(revsetlang.prettyformat(tree), "\n")
2543 ui.write(revsetlang.prettyformat(tree), "\n")
2532 printedtree = tree
2544 printedtree = tree
2533
2545
2534 if opts['verify_optimized']:
2546 if opts['verify_optimized']:
2535 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2547 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2536 brevs = revset.makematcher(treebystage['optimized'])(repo)
2548 brevs = revset.makematcher(treebystage['optimized'])(repo)
2537 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2549 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2538 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2550 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2539 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2551 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2540 arevs = list(arevs)
2552 arevs = list(arevs)
2541 brevs = list(brevs)
2553 brevs = list(brevs)
2542 if arevs == brevs:
2554 if arevs == brevs:
2543 return 0
2555 return 0
2544 ui.write(('--- analyzed\n'), label='diff.file_a')
2556 ui.write(('--- analyzed\n'), label='diff.file_a')
2545 ui.write(('+++ optimized\n'), label='diff.file_b')
2557 ui.write(('+++ optimized\n'), label='diff.file_b')
2546 sm = difflib.SequenceMatcher(None, arevs, brevs)
2558 sm = difflib.SequenceMatcher(None, arevs, brevs)
2547 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2559 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2548 if tag in (r'delete', r'replace'):
2560 if tag in (r'delete', r'replace'):
2549 for c in arevs[alo:ahi]:
2561 for c in arevs[alo:ahi]:
2550 ui.write('-%d\n' % c, label='diff.deleted')
2562 ui.write('-%d\n' % c, label='diff.deleted')
2551 if tag in (r'insert', r'replace'):
2563 if tag in (r'insert', r'replace'):
2552 for c in brevs[blo:bhi]:
2564 for c in brevs[blo:bhi]:
2553 ui.write('+%d\n' % c, label='diff.inserted')
2565 ui.write('+%d\n' % c, label='diff.inserted')
2554 if tag == r'equal':
2566 if tag == r'equal':
2555 for c in arevs[alo:ahi]:
2567 for c in arevs[alo:ahi]:
2556 ui.write(' %d\n' % c)
2568 ui.write(' %d\n' % c)
2557 return 1
2569 return 1
2558
2570
2559 func = revset.makematcher(tree)
2571 func = revset.makematcher(tree)
2560 revs = func(repo)
2572 revs = func(repo)
2561 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2573 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2562 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2574 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2563 if not opts['show_revs']:
2575 if not opts['show_revs']:
2564 return
2576 return
2565 for c in revs:
2577 for c in revs:
2566 ui.write("%d\n" % c)
2578 ui.write("%d\n" % c)
2567
2579
2568 @command('debugserve', [
2580 @command('debugserve', [
2569 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2581 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2570 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2582 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2571 ('', 'logiofile', '', _('file to log server I/O to')),
2583 ('', 'logiofile', '', _('file to log server I/O to')),
2572 ], '')
2584 ], '')
2573 def debugserve(ui, repo, **opts):
2585 def debugserve(ui, repo, **opts):
2574 """run a server with advanced settings
2586 """run a server with advanced settings
2575
2587
2576 This command is similar to :hg:`serve`. It exists partially as a
2588 This command is similar to :hg:`serve`. It exists partially as a
2577 workaround to the fact that ``hg serve --stdio`` must have specific
2589 workaround to the fact that ``hg serve --stdio`` must have specific
2578 arguments for security reasons.
2590 arguments for security reasons.
2579 """
2591 """
2580 opts = pycompat.byteskwargs(opts)
2592 opts = pycompat.byteskwargs(opts)
2581
2593
2582 if not opts['sshstdio']:
2594 if not opts['sshstdio']:
2583 raise error.Abort(_('only --sshstdio is currently supported'))
2595 raise error.Abort(_('only --sshstdio is currently supported'))
2584
2596
2585 logfh = None
2597 logfh = None
2586
2598
2587 if opts['logiofd'] and opts['logiofile']:
2599 if opts['logiofd'] and opts['logiofile']:
2588 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2600 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2589
2601
2590 if opts['logiofd']:
2602 if opts['logiofd']:
2591 # Line buffered because output is line based.
2603 # Line buffered because output is line based.
2592 try:
2604 try:
2593 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2605 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2594 except OSError as e:
2606 except OSError as e:
2595 if e.errno != errno.ESPIPE:
2607 if e.errno != errno.ESPIPE:
2596 raise
2608 raise
2597 # can't seek a pipe, so `ab` mode fails on py3
2609 # can't seek a pipe, so `ab` mode fails on py3
2598 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2610 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2599 elif opts['logiofile']:
2611 elif opts['logiofile']:
2600 logfh = open(opts['logiofile'], 'ab', 1)
2612 logfh = open(opts['logiofile'], 'ab', 1)
2601
2613
2602 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2614 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2603 s.serve_forever()
2615 s.serve_forever()
2604
2616
2605 @command('debugsetparents', [], _('REV1 [REV2]'))
2617 @command('debugsetparents', [], _('REV1 [REV2]'))
2606 def debugsetparents(ui, repo, rev1, rev2=None):
2618 def debugsetparents(ui, repo, rev1, rev2=None):
2607 """manually set the parents of the current working directory
2619 """manually set the parents of the current working directory
2608
2620
2609 This is useful for writing repository conversion tools, but should
2621 This is useful for writing repository conversion tools, but should
2610 be used with care. For example, neither the working directory nor the
2622 be used with care. For example, neither the working directory nor the
2611 dirstate is updated, so file status may be incorrect after running this
2623 dirstate is updated, so file status may be incorrect after running this
2612 command.
2624 command.
2613
2625
2614 Returns 0 on success.
2626 Returns 0 on success.
2615 """
2627 """
2616
2628
2617 node1 = scmutil.revsingle(repo, rev1).node()
2629 node1 = scmutil.revsingle(repo, rev1).node()
2618 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2630 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2619
2631
2620 with repo.wlock():
2632 with repo.wlock():
2621 repo.setparents(node1, node2)
2633 repo.setparents(node1, node2)
2622
2634
2623 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2635 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2624 def debugssl(ui, repo, source=None, **opts):
2636 def debugssl(ui, repo, source=None, **opts):
2625 '''test a secure connection to a server
2637 '''test a secure connection to a server
2626
2638
2627 This builds the certificate chain for the server on Windows, installing the
2639 This builds the certificate chain for the server on Windows, installing the
2628 missing intermediates and trusted root via Windows Update if necessary. It
2640 missing intermediates and trusted root via Windows Update if necessary. It
2629 does nothing on other platforms.
2641 does nothing on other platforms.
2630
2642
2631 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2643 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2632 that server is used. See :hg:`help urls` for more information.
2644 that server is used. See :hg:`help urls` for more information.
2633
2645
2634 If the update succeeds, retry the original operation. Otherwise, the cause
2646 If the update succeeds, retry the original operation. Otherwise, the cause
2635 of the SSL error is likely another issue.
2647 of the SSL error is likely another issue.
2636 '''
2648 '''
2637 if not pycompat.iswindows:
2649 if not pycompat.iswindows:
2638 raise error.Abort(_('certificate chain building is only possible on '
2650 raise error.Abort(_('certificate chain building is only possible on '
2639 'Windows'))
2651 'Windows'))
2640
2652
2641 if not source:
2653 if not source:
2642 if not repo:
2654 if not repo:
2643 raise error.Abort(_("there is no Mercurial repository here, and no "
2655 raise error.Abort(_("there is no Mercurial repository here, and no "
2644 "server specified"))
2656 "server specified"))
2645 source = "default"
2657 source = "default"
2646
2658
2647 source, branches = hg.parseurl(ui.expandpath(source))
2659 source, branches = hg.parseurl(ui.expandpath(source))
2648 url = util.url(source)
2660 url = util.url(source)
2649
2661
2650 defaultport = {'https': 443, 'ssh': 22}
2662 defaultport = {'https': 443, 'ssh': 22}
2651 if url.scheme in defaultport:
2663 if url.scheme in defaultport:
2652 try:
2664 try:
2653 addr = (url.host, int(url.port or defaultport[url.scheme]))
2665 addr = (url.host, int(url.port or defaultport[url.scheme]))
2654 except ValueError:
2666 except ValueError:
2655 raise error.Abort(_("malformed port number in URL"))
2667 raise error.Abort(_("malformed port number in URL"))
2656 else:
2668 else:
2657 raise error.Abort(_("only https and ssh connections are supported"))
2669 raise error.Abort(_("only https and ssh connections are supported"))
2658
2670
2659 from . import win32
2671 from . import win32
2660
2672
2661 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2673 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2662 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2674 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2663
2675
2664 try:
2676 try:
2665 s.connect(addr)
2677 s.connect(addr)
2666 cert = s.getpeercert(True)
2678 cert = s.getpeercert(True)
2667
2679
2668 ui.status(_('checking the certificate chain for %s\n') % url.host)
2680 ui.status(_('checking the certificate chain for %s\n') % url.host)
2669
2681
2670 complete = win32.checkcertificatechain(cert, build=False)
2682 complete = win32.checkcertificatechain(cert, build=False)
2671
2683
2672 if not complete:
2684 if not complete:
2673 ui.status(_('certificate chain is incomplete, updating... '))
2685 ui.status(_('certificate chain is incomplete, updating... '))
2674
2686
2675 if not win32.checkcertificatechain(cert):
2687 if not win32.checkcertificatechain(cert):
2676 ui.status(_('failed.\n'))
2688 ui.status(_('failed.\n'))
2677 else:
2689 else:
2678 ui.status(_('done.\n'))
2690 ui.status(_('done.\n'))
2679 else:
2691 else:
2680 ui.status(_('full certificate chain is available\n'))
2692 ui.status(_('full certificate chain is available\n'))
2681 finally:
2693 finally:
2682 s.close()
2694 s.close()
2683
2695
2684 @command('debugsub',
2696 @command('debugsub',
2685 [('r', 'rev', '',
2697 [('r', 'rev', '',
2686 _('revision to check'), _('REV'))],
2698 _('revision to check'), _('REV'))],
2687 _('[-r REV] [REV]'))
2699 _('[-r REV] [REV]'))
2688 def debugsub(ui, repo, rev=None):
2700 def debugsub(ui, repo, rev=None):
2689 ctx = scmutil.revsingle(repo, rev, None)
2701 ctx = scmutil.revsingle(repo, rev, None)
2690 for k, v in sorted(ctx.substate.items()):
2702 for k, v in sorted(ctx.substate.items()):
2691 ui.write(('path %s\n') % k)
2703 ui.write(('path %s\n') % k)
2692 ui.write((' source %s\n') % v[0])
2704 ui.write((' source %s\n') % v[0])
2693 ui.write((' revision %s\n') % v[1])
2705 ui.write((' revision %s\n') % v[1])
2694
2706
2695 @command('debugsuccessorssets',
2707 @command('debugsuccessorssets',
2696 [('', 'closest', False, _('return closest successors sets only'))],
2708 [('', 'closest', False, _('return closest successors sets only'))],
2697 _('[REV]'))
2709 _('[REV]'))
2698 def debugsuccessorssets(ui, repo, *revs, **opts):
2710 def debugsuccessorssets(ui, repo, *revs, **opts):
2699 """show set of successors for revision
2711 """show set of successors for revision
2700
2712
2701 A successors set of changeset A is a consistent group of revisions that
2713 A successors set of changeset A is a consistent group of revisions that
2702 succeed A. It contains non-obsolete changesets only unless closests
2714 succeed A. It contains non-obsolete changesets only unless closests
2703 successors set is set.
2715 successors set is set.
2704
2716
2705 In most cases a changeset A has a single successors set containing a single
2717 In most cases a changeset A has a single successors set containing a single
2706 successor (changeset A replaced by A').
2718 successor (changeset A replaced by A').
2707
2719
2708 A changeset that is made obsolete with no successors are called "pruned".
2720 A changeset that is made obsolete with no successors are called "pruned".
2709 Such changesets have no successors sets at all.
2721 Such changesets have no successors sets at all.
2710
2722
2711 A changeset that has been "split" will have a successors set containing
2723 A changeset that has been "split" will have a successors set containing
2712 more than one successor.
2724 more than one successor.
2713
2725
2714 A changeset that has been rewritten in multiple different ways is called
2726 A changeset that has been rewritten in multiple different ways is called
2715 "divergent". Such changesets have multiple successor sets (each of which
2727 "divergent". Such changesets have multiple successor sets (each of which
2716 may also be split, i.e. have multiple successors).
2728 may also be split, i.e. have multiple successors).
2717
2729
2718 Results are displayed as follows::
2730 Results are displayed as follows::
2719
2731
2720 <rev1>
2732 <rev1>
2721 <successors-1A>
2733 <successors-1A>
2722 <rev2>
2734 <rev2>
2723 <successors-2A>
2735 <successors-2A>
2724 <successors-2B1> <successors-2B2> <successors-2B3>
2736 <successors-2B1> <successors-2B2> <successors-2B3>
2725
2737
2726 Here rev2 has two possible (i.e. divergent) successors sets. The first
2738 Here rev2 has two possible (i.e. divergent) successors sets. The first
2727 holds one element, whereas the second holds three (i.e. the changeset has
2739 holds one element, whereas the second holds three (i.e. the changeset has
2728 been split).
2740 been split).
2729 """
2741 """
2730 # passed to successorssets caching computation from one call to another
2742 # passed to successorssets caching computation from one call to another
2731 cache = {}
2743 cache = {}
2732 ctx2str = bytes
2744 ctx2str = bytes
2733 node2str = short
2745 node2str = short
2734 for rev in scmutil.revrange(repo, revs):
2746 for rev in scmutil.revrange(repo, revs):
2735 ctx = repo[rev]
2747 ctx = repo[rev]
2736 ui.write('%s\n'% ctx2str(ctx))
2748 ui.write('%s\n'% ctx2str(ctx))
2737 for succsset in obsutil.successorssets(repo, ctx.node(),
2749 for succsset in obsutil.successorssets(repo, ctx.node(),
2738 closest=opts[r'closest'],
2750 closest=opts[r'closest'],
2739 cache=cache):
2751 cache=cache):
2740 if succsset:
2752 if succsset:
2741 ui.write(' ')
2753 ui.write(' ')
2742 ui.write(node2str(succsset[0]))
2754 ui.write(node2str(succsset[0]))
2743 for node in succsset[1:]:
2755 for node in succsset[1:]:
2744 ui.write(' ')
2756 ui.write(' ')
2745 ui.write(node2str(node))
2757 ui.write(node2str(node))
2746 ui.write('\n')
2758 ui.write('\n')
2747
2759
2748 @command('debugtemplate',
2760 @command('debugtemplate',
2749 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2761 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2750 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2762 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2751 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2763 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2752 optionalrepo=True)
2764 optionalrepo=True)
2753 def debugtemplate(ui, repo, tmpl, **opts):
2765 def debugtemplate(ui, repo, tmpl, **opts):
2754 """parse and apply a template
2766 """parse and apply a template
2755
2767
2756 If -r/--rev is given, the template is processed as a log template and
2768 If -r/--rev is given, the template is processed as a log template and
2757 applied to the given changesets. Otherwise, it is processed as a generic
2769 applied to the given changesets. Otherwise, it is processed as a generic
2758 template.
2770 template.
2759
2771
2760 Use --verbose to print the parsed tree.
2772 Use --verbose to print the parsed tree.
2761 """
2773 """
2762 revs = None
2774 revs = None
2763 if opts[r'rev']:
2775 if opts[r'rev']:
2764 if repo is None:
2776 if repo is None:
2765 raise error.RepoError(_('there is no Mercurial repository here '
2777 raise error.RepoError(_('there is no Mercurial repository here '
2766 '(.hg not found)'))
2778 '(.hg not found)'))
2767 revs = scmutil.revrange(repo, opts[r'rev'])
2779 revs = scmutil.revrange(repo, opts[r'rev'])
2768
2780
2769 props = {}
2781 props = {}
2770 for d in opts[r'define']:
2782 for d in opts[r'define']:
2771 try:
2783 try:
2772 k, v = (e.strip() for e in d.split('=', 1))
2784 k, v = (e.strip() for e in d.split('=', 1))
2773 if not k or k == 'ui':
2785 if not k or k == 'ui':
2774 raise ValueError
2786 raise ValueError
2775 props[k] = v
2787 props[k] = v
2776 except ValueError:
2788 except ValueError:
2777 raise error.Abort(_('malformed keyword definition: %s') % d)
2789 raise error.Abort(_('malformed keyword definition: %s') % d)
2778
2790
2779 if ui.verbose:
2791 if ui.verbose:
2780 aliases = ui.configitems('templatealias')
2792 aliases = ui.configitems('templatealias')
2781 tree = templater.parse(tmpl)
2793 tree = templater.parse(tmpl)
2782 ui.note(templater.prettyformat(tree), '\n')
2794 ui.note(templater.prettyformat(tree), '\n')
2783 newtree = templater.expandaliases(tree, aliases)
2795 newtree = templater.expandaliases(tree, aliases)
2784 if newtree != tree:
2796 if newtree != tree:
2785 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2797 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2786
2798
2787 if revs is None:
2799 if revs is None:
2788 tres = formatter.templateresources(ui, repo)
2800 tres = formatter.templateresources(ui, repo)
2789 t = formatter.maketemplater(ui, tmpl, resources=tres)
2801 t = formatter.maketemplater(ui, tmpl, resources=tres)
2790 if ui.verbose:
2802 if ui.verbose:
2791 kwds, funcs = t.symbolsuseddefault()
2803 kwds, funcs = t.symbolsuseddefault()
2792 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2804 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2793 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2805 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2794 ui.write(t.renderdefault(props))
2806 ui.write(t.renderdefault(props))
2795 else:
2807 else:
2796 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2808 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2797 if ui.verbose:
2809 if ui.verbose:
2798 kwds, funcs = displayer.t.symbolsuseddefault()
2810 kwds, funcs = displayer.t.symbolsuseddefault()
2799 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2811 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2800 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2812 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2801 for r in revs:
2813 for r in revs:
2802 displayer.show(repo[r], **pycompat.strkwargs(props))
2814 displayer.show(repo[r], **pycompat.strkwargs(props))
2803 displayer.close()
2815 displayer.close()
2804
2816
2805 @command('debuguigetpass', [
2817 @command('debuguigetpass', [
2806 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2818 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2807 ], _('[-p TEXT]'), norepo=True)
2819 ], _('[-p TEXT]'), norepo=True)
2808 def debuguigetpass(ui, prompt=''):
2820 def debuguigetpass(ui, prompt=''):
2809 """show prompt to type password"""
2821 """show prompt to type password"""
2810 r = ui.getpass(prompt)
2822 r = ui.getpass(prompt)
2811 ui.write(('respose: %s\n') % r)
2823 ui.write(('respose: %s\n') % r)
2812
2824
2813 @command('debuguiprompt', [
2825 @command('debuguiprompt', [
2814 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2826 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2815 ], _('[-p TEXT]'), norepo=True)
2827 ], _('[-p TEXT]'), norepo=True)
2816 def debuguiprompt(ui, prompt=''):
2828 def debuguiprompt(ui, prompt=''):
2817 """show plain prompt"""
2829 """show plain prompt"""
2818 r = ui.prompt(prompt)
2830 r = ui.prompt(prompt)
2819 ui.write(('response: %s\n') % r)
2831 ui.write(('response: %s\n') % r)
2820
2832
2821 @command('debugupdatecaches', [])
2833 @command('debugupdatecaches', [])
2822 def debugupdatecaches(ui, repo, *pats, **opts):
2834 def debugupdatecaches(ui, repo, *pats, **opts):
2823 """warm all known caches in the repository"""
2835 """warm all known caches in the repository"""
2824 with repo.wlock(), repo.lock():
2836 with repo.wlock(), repo.lock():
2825 repo.updatecaches(full=True)
2837 repo.updatecaches(full=True)
2826
2838
2827 @command('debugupgraderepo', [
2839 @command('debugupgraderepo', [
2828 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2840 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2829 ('', 'run', False, _('performs an upgrade')),
2841 ('', 'run', False, _('performs an upgrade')),
2830 ('', 'backup', True, _('keep the old repository content around')),
2842 ('', 'backup', True, _('keep the old repository content around')),
2831 ])
2843 ])
2832 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True):
2844 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True):
2833 """upgrade a repository to use different features
2845 """upgrade a repository to use different features
2834
2846
2835 If no arguments are specified, the repository is evaluated for upgrade
2847 If no arguments are specified, the repository is evaluated for upgrade
2836 and a list of problems and potential optimizations is printed.
2848 and a list of problems and potential optimizations is printed.
2837
2849
2838 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2850 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2839 can be influenced via additional arguments. More details will be provided
2851 can be influenced via additional arguments. More details will be provided
2840 by the command output when run without ``--run``.
2852 by the command output when run without ``--run``.
2841
2853
2842 During the upgrade, the repository will be locked and no writes will be
2854 During the upgrade, the repository will be locked and no writes will be
2843 allowed.
2855 allowed.
2844
2856
2845 At the end of the upgrade, the repository may not be readable while new
2857 At the end of the upgrade, the repository may not be readable while new
2846 repository data is swapped in. This window will be as long as it takes to
2858 repository data is swapped in. This window will be as long as it takes to
2847 rename some directories inside the ``.hg`` directory. On most machines, this
2859 rename some directories inside the ``.hg`` directory. On most machines, this
2848 should complete almost instantaneously and the chances of a consumer being
2860 should complete almost instantaneously and the chances of a consumer being
2849 unable to access the repository should be low.
2861 unable to access the repository should be low.
2850 """
2862 """
2851 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
2863 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
2852 backup=backup)
2864 backup=backup)
2853
2865
2854 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2866 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2855 inferrepo=True)
2867 inferrepo=True)
2856 def debugwalk(ui, repo, *pats, **opts):
2868 def debugwalk(ui, repo, *pats, **opts):
2857 """show how files match on given patterns"""
2869 """show how files match on given patterns"""
2858 opts = pycompat.byteskwargs(opts)
2870 opts = pycompat.byteskwargs(opts)
2859 m = scmutil.match(repo[None], pats, opts)
2871 m = scmutil.match(repo[None], pats, opts)
2860 if ui.verbose:
2872 if ui.verbose:
2861 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2873 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2862 items = list(repo[None].walk(m))
2874 items = list(repo[None].walk(m))
2863 if not items:
2875 if not items:
2864 return
2876 return
2865 f = lambda fn: fn
2877 f = lambda fn: fn
2866 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2878 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2867 f = lambda fn: util.normpath(fn)
2879 f = lambda fn: util.normpath(fn)
2868 fmt = 'f %%-%ds %%-%ds %%s' % (
2880 fmt = 'f %%-%ds %%-%ds %%s' % (
2869 max([len(abs) for abs in items]),
2881 max([len(abs) for abs in items]),
2870 max([len(repo.pathto(abs)) for abs in items]))
2882 max([len(repo.pathto(abs)) for abs in items]))
2871 for abs in items:
2883 for abs in items:
2872 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
2884 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
2873 ui.write("%s\n" % line.rstrip())
2885 ui.write("%s\n" % line.rstrip())
2874
2886
2875 @command('debugwhyunstable', [], _('REV'))
2887 @command('debugwhyunstable', [], _('REV'))
2876 def debugwhyunstable(ui, repo, rev):
2888 def debugwhyunstable(ui, repo, rev):
2877 """explain instabilities of a changeset"""
2889 """explain instabilities of a changeset"""
2878 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2890 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2879 dnodes = ''
2891 dnodes = ''
2880 if entry.get('divergentnodes'):
2892 if entry.get('divergentnodes'):
2881 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2893 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2882 for ctx in entry['divergentnodes']) + ' '
2894 for ctx in entry['divergentnodes']) + ' '
2883 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2895 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2884 entry['reason'], entry['node']))
2896 entry['reason'], entry['node']))
2885
2897
2886 @command('debugwireargs',
2898 @command('debugwireargs',
2887 [('', 'three', '', 'three'),
2899 [('', 'three', '', 'three'),
2888 ('', 'four', '', 'four'),
2900 ('', 'four', '', 'four'),
2889 ('', 'five', '', 'five'),
2901 ('', 'five', '', 'five'),
2890 ] + cmdutil.remoteopts,
2902 ] + cmdutil.remoteopts,
2891 _('REPO [OPTIONS]... [ONE [TWO]]'),
2903 _('REPO [OPTIONS]... [ONE [TWO]]'),
2892 norepo=True)
2904 norepo=True)
2893 def debugwireargs(ui, repopath, *vals, **opts):
2905 def debugwireargs(ui, repopath, *vals, **opts):
2894 opts = pycompat.byteskwargs(opts)
2906 opts = pycompat.byteskwargs(opts)
2895 repo = hg.peer(ui, opts, repopath)
2907 repo = hg.peer(ui, opts, repopath)
2896 for opt in cmdutil.remoteopts:
2908 for opt in cmdutil.remoteopts:
2897 del opts[opt[1]]
2909 del opts[opt[1]]
2898 args = {}
2910 args = {}
2899 for k, v in opts.iteritems():
2911 for k, v in opts.iteritems():
2900 if v:
2912 if v:
2901 args[k] = v
2913 args[k] = v
2902 args = pycompat.strkwargs(args)
2914 args = pycompat.strkwargs(args)
2903 # run twice to check that we don't mess up the stream for the next command
2915 # run twice to check that we don't mess up the stream for the next command
2904 res1 = repo.debugwireargs(*vals, **args)
2916 res1 = repo.debugwireargs(*vals, **args)
2905 res2 = repo.debugwireargs(*vals, **args)
2917 res2 = repo.debugwireargs(*vals, **args)
2906 ui.write("%s\n" % res1)
2918 ui.write("%s\n" % res1)
2907 if res1 != res2:
2919 if res1 != res2:
2908 ui.warn("%s\n" % res2)
2920 ui.warn("%s\n" % res2)
2909
2921
2910 def _parsewirelangblocks(fh):
2922 def _parsewirelangblocks(fh):
2911 activeaction = None
2923 activeaction = None
2912 blocklines = []
2924 blocklines = []
2913 lastindent = 0
2925 lastindent = 0
2914
2926
2915 for line in fh:
2927 for line in fh:
2916 line = line.rstrip()
2928 line = line.rstrip()
2917 if not line:
2929 if not line:
2918 continue
2930 continue
2919
2931
2920 if line.startswith(b'#'):
2932 if line.startswith(b'#'):
2921 continue
2933 continue
2922
2934
2923 if not line.startswith(b' '):
2935 if not line.startswith(b' '):
2924 # New block. Flush previous one.
2936 # New block. Flush previous one.
2925 if activeaction:
2937 if activeaction:
2926 yield activeaction, blocklines
2938 yield activeaction, blocklines
2927
2939
2928 activeaction = line
2940 activeaction = line
2929 blocklines = []
2941 blocklines = []
2930 lastindent = 0
2942 lastindent = 0
2931 continue
2943 continue
2932
2944
2933 # Else we start with an indent.
2945 # Else we start with an indent.
2934
2946
2935 if not activeaction:
2947 if not activeaction:
2936 raise error.Abort(_('indented line outside of block'))
2948 raise error.Abort(_('indented line outside of block'))
2937
2949
2938 indent = len(line) - len(line.lstrip())
2950 indent = len(line) - len(line.lstrip())
2939
2951
2940 # If this line is indented more than the last line, concatenate it.
2952 # If this line is indented more than the last line, concatenate it.
2941 if indent > lastindent and blocklines:
2953 if indent > lastindent and blocklines:
2942 blocklines[-1] += line.lstrip()
2954 blocklines[-1] += line.lstrip()
2943 else:
2955 else:
2944 blocklines.append(line)
2956 blocklines.append(line)
2945 lastindent = indent
2957 lastindent = indent
2946
2958
2947 # Flush last block.
2959 # Flush last block.
2948 if activeaction:
2960 if activeaction:
2949 yield activeaction, blocklines
2961 yield activeaction, blocklines
2950
2962
2951 @command('debugwireproto',
2963 @command('debugwireproto',
2952 [
2964 [
2953 ('', 'localssh', False, _('start an SSH server for this repo')),
2965 ('', 'localssh', False, _('start an SSH server for this repo')),
2954 ('', 'peer', '', _('construct a specific version of the peer')),
2966 ('', 'peer', '', _('construct a specific version of the peer')),
2955 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2967 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2956 ('', 'nologhandshake', False,
2968 ('', 'nologhandshake', False,
2957 _('do not log I/O related to the peer handshake')),
2969 _('do not log I/O related to the peer handshake')),
2958 ] + cmdutil.remoteopts,
2970 ] + cmdutil.remoteopts,
2959 _('[PATH]'),
2971 _('[PATH]'),
2960 optionalrepo=True)
2972 optionalrepo=True)
2961 def debugwireproto(ui, repo, path=None, **opts):
2973 def debugwireproto(ui, repo, path=None, **opts):
2962 """send wire protocol commands to a server
2974 """send wire protocol commands to a server
2963
2975
2964 This command can be used to issue wire protocol commands to remote
2976 This command can be used to issue wire protocol commands to remote
2965 peers and to debug the raw data being exchanged.
2977 peers and to debug the raw data being exchanged.
2966
2978
2967 ``--localssh`` will start an SSH server against the current repository
2979 ``--localssh`` will start an SSH server against the current repository
2968 and connect to that. By default, the connection will perform a handshake
2980 and connect to that. By default, the connection will perform a handshake
2969 and establish an appropriate peer instance.
2981 and establish an appropriate peer instance.
2970
2982
2971 ``--peer`` can be used to bypass the handshake protocol and construct a
2983 ``--peer`` can be used to bypass the handshake protocol and construct a
2972 peer instance using the specified class type. Valid values are ``raw``,
2984 peer instance using the specified class type. Valid values are ``raw``,
2973 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2985 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2974 raw data payloads and don't support higher-level command actions.
2986 raw data payloads and don't support higher-level command actions.
2975
2987
2976 ``--noreadstderr`` can be used to disable automatic reading from stderr
2988 ``--noreadstderr`` can be used to disable automatic reading from stderr
2977 of the peer (for SSH connections only). Disabling automatic reading of
2989 of the peer (for SSH connections only). Disabling automatic reading of
2978 stderr is useful for making output more deterministic.
2990 stderr is useful for making output more deterministic.
2979
2991
2980 Commands are issued via a mini language which is specified via stdin.
2992 Commands are issued via a mini language which is specified via stdin.
2981 The language consists of individual actions to perform. An action is
2993 The language consists of individual actions to perform. An action is
2982 defined by a block. A block is defined as a line with no leading
2994 defined by a block. A block is defined as a line with no leading
2983 space followed by 0 or more lines with leading space. Blocks are
2995 space followed by 0 or more lines with leading space. Blocks are
2984 effectively a high-level command with additional metadata.
2996 effectively a high-level command with additional metadata.
2985
2997
2986 Lines beginning with ``#`` are ignored.
2998 Lines beginning with ``#`` are ignored.
2987
2999
2988 The following sections denote available actions.
3000 The following sections denote available actions.
2989
3001
2990 raw
3002 raw
2991 ---
3003 ---
2992
3004
2993 Send raw data to the server.
3005 Send raw data to the server.
2994
3006
2995 The block payload contains the raw data to send as one atomic send
3007 The block payload contains the raw data to send as one atomic send
2996 operation. The data may not actually be delivered in a single system
3008 operation. The data may not actually be delivered in a single system
2997 call: it depends on the abilities of the transport being used.
3009 call: it depends on the abilities of the transport being used.
2998
3010
2999 Each line in the block is de-indented and concatenated. Then, that
3011 Each line in the block is de-indented and concatenated. Then, that
3000 value is evaluated as a Python b'' literal. This allows the use of
3012 value is evaluated as a Python b'' literal. This allows the use of
3001 backslash escaping, etc.
3013 backslash escaping, etc.
3002
3014
3003 raw+
3015 raw+
3004 ----
3016 ----
3005
3017
3006 Behaves like ``raw`` except flushes output afterwards.
3018 Behaves like ``raw`` except flushes output afterwards.
3007
3019
3008 command <X>
3020 command <X>
3009 -----------
3021 -----------
3010
3022
3011 Send a request to run a named command, whose name follows the ``command``
3023 Send a request to run a named command, whose name follows the ``command``
3012 string.
3024 string.
3013
3025
3014 Arguments to the command are defined as lines in this block. The format of
3026 Arguments to the command are defined as lines in this block. The format of
3015 each line is ``<key> <value>``. e.g.::
3027 each line is ``<key> <value>``. e.g.::
3016
3028
3017 command listkeys
3029 command listkeys
3018 namespace bookmarks
3030 namespace bookmarks
3019
3031
3020 If the value begins with ``eval:``, it will be interpreted as a Python
3032 If the value begins with ``eval:``, it will be interpreted as a Python
3021 literal expression. Otherwise values are interpreted as Python b'' literals.
3033 literal expression. Otherwise values are interpreted as Python b'' literals.
3022 This allows sending complex types and encoding special byte sequences via
3034 This allows sending complex types and encoding special byte sequences via
3023 backslash escaping.
3035 backslash escaping.
3024
3036
3025 The following arguments have special meaning:
3037 The following arguments have special meaning:
3026
3038
3027 ``PUSHFILE``
3039 ``PUSHFILE``
3028 When defined, the *push* mechanism of the peer will be used instead
3040 When defined, the *push* mechanism of the peer will be used instead
3029 of the static request-response mechanism and the content of the
3041 of the static request-response mechanism and the content of the
3030 file specified in the value of this argument will be sent as the
3042 file specified in the value of this argument will be sent as the
3031 command payload.
3043 command payload.
3032
3044
3033 This can be used to submit a local bundle file to the remote.
3045 This can be used to submit a local bundle file to the remote.
3034
3046
3035 batchbegin
3047 batchbegin
3036 ----------
3048 ----------
3037
3049
3038 Instruct the peer to begin a batched send.
3050 Instruct the peer to begin a batched send.
3039
3051
3040 All ``command`` blocks are queued for execution until the next
3052 All ``command`` blocks are queued for execution until the next
3041 ``batchsubmit`` block.
3053 ``batchsubmit`` block.
3042
3054
3043 batchsubmit
3055 batchsubmit
3044 -----------
3056 -----------
3045
3057
3046 Submit previously queued ``command`` blocks as a batch request.
3058 Submit previously queued ``command`` blocks as a batch request.
3047
3059
3048 This action MUST be paired with a ``batchbegin`` action.
3060 This action MUST be paired with a ``batchbegin`` action.
3049
3061
3050 httprequest <method> <path>
3062 httprequest <method> <path>
3051 ---------------------------
3063 ---------------------------
3052
3064
3053 (HTTP peer only)
3065 (HTTP peer only)
3054
3066
3055 Send an HTTP request to the peer.
3067 Send an HTTP request to the peer.
3056
3068
3057 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3069 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3058
3070
3059 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3071 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3060 headers to add to the request. e.g. ``Accept: foo``.
3072 headers to add to the request. e.g. ``Accept: foo``.
3061
3073
3062 The following arguments are special:
3074 The following arguments are special:
3063
3075
3064 ``BODYFILE``
3076 ``BODYFILE``
3065 The content of the file defined as the value to this argument will be
3077 The content of the file defined as the value to this argument will be
3066 transferred verbatim as the HTTP request body.
3078 transferred verbatim as the HTTP request body.
3067
3079
3068 ``frame <type> <flags> <payload>``
3080 ``frame <type> <flags> <payload>``
3069 Send a unified protocol frame as part of the request body.
3081 Send a unified protocol frame as part of the request body.
3070
3082
3071 All frames will be collected and sent as the body to the HTTP
3083 All frames will be collected and sent as the body to the HTTP
3072 request.
3084 request.
3073
3085
3074 close
3086 close
3075 -----
3087 -----
3076
3088
3077 Close the connection to the server.
3089 Close the connection to the server.
3078
3090
3079 flush
3091 flush
3080 -----
3092 -----
3081
3093
3082 Flush data written to the server.
3094 Flush data written to the server.
3083
3095
3084 readavailable
3096 readavailable
3085 -------------
3097 -------------
3086
3098
3087 Close the write end of the connection and read all available data from
3099 Close the write end of the connection and read all available data from
3088 the server.
3100 the server.
3089
3101
3090 If the connection to the server encompasses multiple pipes, we poll both
3102 If the connection to the server encompasses multiple pipes, we poll both
3091 pipes and read available data.
3103 pipes and read available data.
3092
3104
3093 readline
3105 readline
3094 --------
3106 --------
3095
3107
3096 Read a line of output from the server. If there are multiple output
3108 Read a line of output from the server. If there are multiple output
3097 pipes, reads only the main pipe.
3109 pipes, reads only the main pipe.
3098
3110
3099 ereadline
3111 ereadline
3100 ---------
3112 ---------
3101
3113
3102 Like ``readline``, but read from the stderr pipe, if available.
3114 Like ``readline``, but read from the stderr pipe, if available.
3103
3115
3104 read <X>
3116 read <X>
3105 --------
3117 --------
3106
3118
3107 ``read()`` N bytes from the server's main output pipe.
3119 ``read()`` N bytes from the server's main output pipe.
3108
3120
3109 eread <X>
3121 eread <X>
3110 ---------
3122 ---------
3111
3123
3112 ``read()`` N bytes from the server's stderr pipe, if available.
3124 ``read()`` N bytes from the server's stderr pipe, if available.
3113
3125
3114 Specifying Unified Frame-Based Protocol Frames
3126 Specifying Unified Frame-Based Protocol Frames
3115 ----------------------------------------------
3127 ----------------------------------------------
3116
3128
3117 It is possible to emit a *Unified Frame-Based Protocol* by using special
3129 It is possible to emit a *Unified Frame-Based Protocol* by using special
3118 syntax.
3130 syntax.
3119
3131
3120 A frame is composed as a type, flags, and payload. These can be parsed
3132 A frame is composed as a type, flags, and payload. These can be parsed
3121 from a string of the form:
3133 from a string of the form:
3122
3134
3123 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3135 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3124
3136
3125 ``request-id`` and ``stream-id`` are integers defining the request and
3137 ``request-id`` and ``stream-id`` are integers defining the request and
3126 stream identifiers.
3138 stream identifiers.
3127
3139
3128 ``type`` can be an integer value for the frame type or the string name
3140 ``type`` can be an integer value for the frame type or the string name
3129 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3141 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3130 ``command-name``.
3142 ``command-name``.
3131
3143
3132 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3144 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3133 components. Each component (and there can be just one) can be an integer
3145 components. Each component (and there can be just one) can be an integer
3134 or a flag name for stream flags or frame flags, respectively. Values are
3146 or a flag name for stream flags or frame flags, respectively. Values are
3135 resolved to integers and then bitwise OR'd together.
3147 resolved to integers and then bitwise OR'd together.
3136
3148
3137 ``payload`` represents the raw frame payload. If it begins with
3149 ``payload`` represents the raw frame payload. If it begins with
3138 ``cbor:``, the following string is evaluated as Python code and the
3150 ``cbor:``, the following string is evaluated as Python code and the
3139 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3151 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3140 as a Python byte string literal.
3152 as a Python byte string literal.
3141 """
3153 """
3142 opts = pycompat.byteskwargs(opts)
3154 opts = pycompat.byteskwargs(opts)
3143
3155
3144 if opts['localssh'] and not repo:
3156 if opts['localssh'] and not repo:
3145 raise error.Abort(_('--localssh requires a repository'))
3157 raise error.Abort(_('--localssh requires a repository'))
3146
3158
3147 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3159 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3148 raise error.Abort(_('invalid value for --peer'),
3160 raise error.Abort(_('invalid value for --peer'),
3149 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3161 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3150
3162
3151 if path and opts['localssh']:
3163 if path and opts['localssh']:
3152 raise error.Abort(_('cannot specify --localssh with an explicit '
3164 raise error.Abort(_('cannot specify --localssh with an explicit '
3153 'path'))
3165 'path'))
3154
3166
3155 if ui.interactive():
3167 if ui.interactive():
3156 ui.write(_('(waiting for commands on stdin)\n'))
3168 ui.write(_('(waiting for commands on stdin)\n'))
3157
3169
3158 blocks = list(_parsewirelangblocks(ui.fin))
3170 blocks = list(_parsewirelangblocks(ui.fin))
3159
3171
3160 proc = None
3172 proc = None
3161 stdin = None
3173 stdin = None
3162 stdout = None
3174 stdout = None
3163 stderr = None
3175 stderr = None
3164 opener = None
3176 opener = None
3165
3177
3166 if opts['localssh']:
3178 if opts['localssh']:
3167 # We start the SSH server in its own process so there is process
3179 # We start the SSH server in its own process so there is process
3168 # separation. This prevents a whole class of potential bugs around
3180 # separation. This prevents a whole class of potential bugs around
3169 # shared state from interfering with server operation.
3181 # shared state from interfering with server operation.
3170 args = procutil.hgcmd() + [
3182 args = procutil.hgcmd() + [
3171 '-R', repo.root,
3183 '-R', repo.root,
3172 'debugserve', '--sshstdio',
3184 'debugserve', '--sshstdio',
3173 ]
3185 ]
3174 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3186 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3175 stdin=subprocess.PIPE,
3187 stdin=subprocess.PIPE,
3176 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3188 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3177 bufsize=0)
3189 bufsize=0)
3178
3190
3179 stdin = proc.stdin
3191 stdin = proc.stdin
3180 stdout = proc.stdout
3192 stdout = proc.stdout
3181 stderr = proc.stderr
3193 stderr = proc.stderr
3182
3194
3183 # We turn the pipes into observers so we can log I/O.
3195 # We turn the pipes into observers so we can log I/O.
3184 if ui.verbose or opts['peer'] == 'raw':
3196 if ui.verbose or opts['peer'] == 'raw':
3185 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3197 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3186 logdata=True)
3198 logdata=True)
3187 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3199 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3188 logdata=True)
3200 logdata=True)
3189 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3201 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3190 logdata=True)
3202 logdata=True)
3191
3203
3192 # --localssh also implies the peer connection settings.
3204 # --localssh also implies the peer connection settings.
3193
3205
3194 url = 'ssh://localserver'
3206 url = 'ssh://localserver'
3195 autoreadstderr = not opts['noreadstderr']
3207 autoreadstderr = not opts['noreadstderr']
3196
3208
3197 if opts['peer'] == 'ssh1':
3209 if opts['peer'] == 'ssh1':
3198 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3210 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3199 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3211 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3200 None, autoreadstderr=autoreadstderr)
3212 None, autoreadstderr=autoreadstderr)
3201 elif opts['peer'] == 'ssh2':
3213 elif opts['peer'] == 'ssh2':
3202 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3214 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3203 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3215 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3204 None, autoreadstderr=autoreadstderr)
3216 None, autoreadstderr=autoreadstderr)
3205 elif opts['peer'] == 'raw':
3217 elif opts['peer'] == 'raw':
3206 ui.write(_('using raw connection to peer\n'))
3218 ui.write(_('using raw connection to peer\n'))
3207 peer = None
3219 peer = None
3208 else:
3220 else:
3209 ui.write(_('creating ssh peer from handshake results\n'))
3221 ui.write(_('creating ssh peer from handshake results\n'))
3210 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3222 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3211 autoreadstderr=autoreadstderr)
3223 autoreadstderr=autoreadstderr)
3212
3224
3213 elif path:
3225 elif path:
3214 # We bypass hg.peer() so we can proxy the sockets.
3226 # We bypass hg.peer() so we can proxy the sockets.
3215 # TODO consider not doing this because we skip
3227 # TODO consider not doing this because we skip
3216 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3228 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3217 u = util.url(path)
3229 u = util.url(path)
3218 if u.scheme != 'http':
3230 if u.scheme != 'http':
3219 raise error.Abort(_('only http:// paths are currently supported'))
3231 raise error.Abort(_('only http:// paths are currently supported'))
3220
3232
3221 url, authinfo = u.authinfo()
3233 url, authinfo = u.authinfo()
3222 openerargs = {
3234 openerargs = {
3223 r'useragent': b'Mercurial debugwireproto',
3235 r'useragent': b'Mercurial debugwireproto',
3224 }
3236 }
3225
3237
3226 # Turn pipes/sockets into observers so we can log I/O.
3238 # Turn pipes/sockets into observers so we can log I/O.
3227 if ui.verbose:
3239 if ui.verbose:
3228 openerargs.update({
3240 openerargs.update({
3229 r'loggingfh': ui,
3241 r'loggingfh': ui,
3230 r'loggingname': b's',
3242 r'loggingname': b's',
3231 r'loggingopts': {
3243 r'loggingopts': {
3232 r'logdata': True,
3244 r'logdata': True,
3233 r'logdataapis': False,
3245 r'logdataapis': False,
3234 },
3246 },
3235 })
3247 })
3236
3248
3237 if ui.debugflag:
3249 if ui.debugflag:
3238 openerargs[r'loggingopts'][r'logdataapis'] = True
3250 openerargs[r'loggingopts'][r'logdataapis'] = True
3239
3251
3240 # Don't send default headers when in raw mode. This allows us to
3252 # Don't send default headers when in raw mode. This allows us to
3241 # bypass most of the behavior of our URL handling code so we can
3253 # bypass most of the behavior of our URL handling code so we can
3242 # have near complete control over what's sent on the wire.
3254 # have near complete control over what's sent on the wire.
3243 if opts['peer'] == 'raw':
3255 if opts['peer'] == 'raw':
3244 openerargs[r'sendaccept'] = False
3256 openerargs[r'sendaccept'] = False
3245
3257
3246 opener = urlmod.opener(ui, authinfo, **openerargs)
3258 opener = urlmod.opener(ui, authinfo, **openerargs)
3247
3259
3248 if opts['peer'] == 'http2':
3260 if opts['peer'] == 'http2':
3249 ui.write(_('creating http peer for wire protocol version 2\n'))
3261 ui.write(_('creating http peer for wire protocol version 2\n'))
3250 # We go through makepeer() because we need an API descriptor for
3262 # We go through makepeer() because we need an API descriptor for
3251 # the peer instance to be useful.
3263 # the peer instance to be useful.
3252 with ui.configoverride({
3264 with ui.configoverride({
3253 ('experimental', 'httppeer.advertise-v2'): True}):
3265 ('experimental', 'httppeer.advertise-v2'): True}):
3254 if opts['nologhandshake']:
3266 if opts['nologhandshake']:
3255 ui.pushbuffer()
3267 ui.pushbuffer()
3256
3268
3257 peer = httppeer.makepeer(ui, path, opener=opener)
3269 peer = httppeer.makepeer(ui, path, opener=opener)
3258
3270
3259 if opts['nologhandshake']:
3271 if opts['nologhandshake']:
3260 ui.popbuffer()
3272 ui.popbuffer()
3261
3273
3262 if not isinstance(peer, httppeer.httpv2peer):
3274 if not isinstance(peer, httppeer.httpv2peer):
3263 raise error.Abort(_('could not instantiate HTTP peer for '
3275 raise error.Abort(_('could not instantiate HTTP peer for '
3264 'wire protocol version 2'),
3276 'wire protocol version 2'),
3265 hint=_('the server may not have the feature '
3277 hint=_('the server may not have the feature '
3266 'enabled or is not allowing this '
3278 'enabled or is not allowing this '
3267 'client version'))
3279 'client version'))
3268
3280
3269 elif opts['peer'] == 'raw':
3281 elif opts['peer'] == 'raw':
3270 ui.write(_('using raw connection to peer\n'))
3282 ui.write(_('using raw connection to peer\n'))
3271 peer = None
3283 peer = None
3272 elif opts['peer']:
3284 elif opts['peer']:
3273 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3285 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3274 opts['peer'])
3286 opts['peer'])
3275 else:
3287 else:
3276 peer = httppeer.makepeer(ui, path, opener=opener)
3288 peer = httppeer.makepeer(ui, path, opener=opener)
3277
3289
3278 # We /could/ populate stdin/stdout with sock.makefile()...
3290 # We /could/ populate stdin/stdout with sock.makefile()...
3279 else:
3291 else:
3280 raise error.Abort(_('unsupported connection configuration'))
3292 raise error.Abort(_('unsupported connection configuration'))
3281
3293
3282 batchedcommands = None
3294 batchedcommands = None
3283
3295
3284 # Now perform actions based on the parsed wire language instructions.
3296 # Now perform actions based on the parsed wire language instructions.
3285 for action, lines in blocks:
3297 for action, lines in blocks:
3286 if action in ('raw', 'raw+'):
3298 if action in ('raw', 'raw+'):
3287 if not stdin:
3299 if not stdin:
3288 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3300 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3289
3301
3290 # Concatenate the data together.
3302 # Concatenate the data together.
3291 data = ''.join(l.lstrip() for l in lines)
3303 data = ''.join(l.lstrip() for l in lines)
3292 data = stringutil.unescapestr(data)
3304 data = stringutil.unescapestr(data)
3293 stdin.write(data)
3305 stdin.write(data)
3294
3306
3295 if action == 'raw+':
3307 if action == 'raw+':
3296 stdin.flush()
3308 stdin.flush()
3297 elif action == 'flush':
3309 elif action == 'flush':
3298 if not stdin:
3310 if not stdin:
3299 raise error.Abort(_('cannot call flush on this peer'))
3311 raise error.Abort(_('cannot call flush on this peer'))
3300 stdin.flush()
3312 stdin.flush()
3301 elif action.startswith('command'):
3313 elif action.startswith('command'):
3302 if not peer:
3314 if not peer:
3303 raise error.Abort(_('cannot send commands unless peer instance '
3315 raise error.Abort(_('cannot send commands unless peer instance '
3304 'is available'))
3316 'is available'))
3305
3317
3306 command = action.split(' ', 1)[1]
3318 command = action.split(' ', 1)[1]
3307
3319
3308 args = {}
3320 args = {}
3309 for line in lines:
3321 for line in lines:
3310 # We need to allow empty values.
3322 # We need to allow empty values.
3311 fields = line.lstrip().split(' ', 1)
3323 fields = line.lstrip().split(' ', 1)
3312 if len(fields) == 1:
3324 if len(fields) == 1:
3313 key = fields[0]
3325 key = fields[0]
3314 value = ''
3326 value = ''
3315 else:
3327 else:
3316 key, value = fields
3328 key, value = fields
3317
3329
3318 if value.startswith('eval:'):
3330 if value.startswith('eval:'):
3319 value = stringutil.evalpythonliteral(value[5:])
3331 value = stringutil.evalpythonliteral(value[5:])
3320 else:
3332 else:
3321 value = stringutil.unescapestr(value)
3333 value = stringutil.unescapestr(value)
3322
3334
3323 args[key] = value
3335 args[key] = value
3324
3336
3325 if batchedcommands is not None:
3337 if batchedcommands is not None:
3326 batchedcommands.append((command, args))
3338 batchedcommands.append((command, args))
3327 continue
3339 continue
3328
3340
3329 ui.status(_('sending %s command\n') % command)
3341 ui.status(_('sending %s command\n') % command)
3330
3342
3331 if 'PUSHFILE' in args:
3343 if 'PUSHFILE' in args:
3332 with open(args['PUSHFILE'], r'rb') as fh:
3344 with open(args['PUSHFILE'], r'rb') as fh:
3333 del args['PUSHFILE']
3345 del args['PUSHFILE']
3334 res, output = peer._callpush(command, fh,
3346 res, output = peer._callpush(command, fh,
3335 **pycompat.strkwargs(args))
3347 **pycompat.strkwargs(args))
3336 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3348 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3337 ui.status(_('remote output: %s\n') %
3349 ui.status(_('remote output: %s\n') %
3338 stringutil.escapestr(output))
3350 stringutil.escapestr(output))
3339 else:
3351 else:
3340 with peer.commandexecutor() as e:
3352 with peer.commandexecutor() as e:
3341 res = e.callcommand(command, args).result()
3353 res = e.callcommand(command, args).result()
3342
3354
3343 if isinstance(res, wireprotov2peer.commandresponse):
3355 if isinstance(res, wireprotov2peer.commandresponse):
3344 val = res.objects()
3356 val = res.objects()
3345 ui.status(_('response: %s\n') %
3357 ui.status(_('response: %s\n') %
3346 stringutil.pprint(val, bprefix=True, indent=2))
3358 stringutil.pprint(val, bprefix=True, indent=2))
3347 else:
3359 else:
3348 ui.status(_('response: %s\n') %
3360 ui.status(_('response: %s\n') %
3349 stringutil.pprint(res, bprefix=True, indent=2))
3361 stringutil.pprint(res, bprefix=True, indent=2))
3350
3362
3351 elif action == 'batchbegin':
3363 elif action == 'batchbegin':
3352 if batchedcommands is not None:
3364 if batchedcommands is not None:
3353 raise error.Abort(_('nested batchbegin not allowed'))
3365 raise error.Abort(_('nested batchbegin not allowed'))
3354
3366
3355 batchedcommands = []
3367 batchedcommands = []
3356 elif action == 'batchsubmit':
3368 elif action == 'batchsubmit':
3357 # There is a batching API we could go through. But it would be
3369 # There is a batching API we could go through. But it would be
3358 # difficult to normalize requests into function calls. It is easier
3370 # difficult to normalize requests into function calls. It is easier
3359 # to bypass this layer and normalize to commands + args.
3371 # to bypass this layer and normalize to commands + args.
3360 ui.status(_('sending batch with %d sub-commands\n') %
3372 ui.status(_('sending batch with %d sub-commands\n') %
3361 len(batchedcommands))
3373 len(batchedcommands))
3362 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3374 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3363 ui.status(_('response #%d: %s\n') %
3375 ui.status(_('response #%d: %s\n') %
3364 (i, stringutil.escapestr(chunk)))
3376 (i, stringutil.escapestr(chunk)))
3365
3377
3366 batchedcommands = None
3378 batchedcommands = None
3367
3379
3368 elif action.startswith('httprequest '):
3380 elif action.startswith('httprequest '):
3369 if not opener:
3381 if not opener:
3370 raise error.Abort(_('cannot use httprequest without an HTTP '
3382 raise error.Abort(_('cannot use httprequest without an HTTP '
3371 'peer'))
3383 'peer'))
3372
3384
3373 request = action.split(' ', 2)
3385 request = action.split(' ', 2)
3374 if len(request) != 3:
3386 if len(request) != 3:
3375 raise error.Abort(_('invalid httprequest: expected format is '
3387 raise error.Abort(_('invalid httprequest: expected format is '
3376 '"httprequest <method> <path>'))
3388 '"httprequest <method> <path>'))
3377
3389
3378 method, httppath = request[1:]
3390 method, httppath = request[1:]
3379 headers = {}
3391 headers = {}
3380 body = None
3392 body = None
3381 frames = []
3393 frames = []
3382 for line in lines:
3394 for line in lines:
3383 line = line.lstrip()
3395 line = line.lstrip()
3384 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3396 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3385 if m:
3397 if m:
3386 # Headers need to use native strings.
3398 # Headers need to use native strings.
3387 key = pycompat.strurl(m.group(1))
3399 key = pycompat.strurl(m.group(1))
3388 value = pycompat.strurl(m.group(2))
3400 value = pycompat.strurl(m.group(2))
3389 headers[key] = value
3401 headers[key] = value
3390 continue
3402 continue
3391
3403
3392 if line.startswith(b'BODYFILE '):
3404 if line.startswith(b'BODYFILE '):
3393 with open(line.split(b' ', 1), 'rb') as fh:
3405 with open(line.split(b' ', 1), 'rb') as fh:
3394 body = fh.read()
3406 body = fh.read()
3395 elif line.startswith(b'frame '):
3407 elif line.startswith(b'frame '):
3396 frame = wireprotoframing.makeframefromhumanstring(
3408 frame = wireprotoframing.makeframefromhumanstring(
3397 line[len(b'frame '):])
3409 line[len(b'frame '):])
3398
3410
3399 frames.append(frame)
3411 frames.append(frame)
3400 else:
3412 else:
3401 raise error.Abort(_('unknown argument to httprequest: %s') %
3413 raise error.Abort(_('unknown argument to httprequest: %s') %
3402 line)
3414 line)
3403
3415
3404 url = path + httppath
3416 url = path + httppath
3405
3417
3406 if frames:
3418 if frames:
3407 body = b''.join(bytes(f) for f in frames)
3419 body = b''.join(bytes(f) for f in frames)
3408
3420
3409 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3421 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3410
3422
3411 # urllib.Request insists on using has_data() as a proxy for
3423 # urllib.Request insists on using has_data() as a proxy for
3412 # determining the request method. Override that to use our
3424 # determining the request method. Override that to use our
3413 # explicitly requested method.
3425 # explicitly requested method.
3414 req.get_method = lambda: pycompat.sysstr(method)
3426 req.get_method = lambda: pycompat.sysstr(method)
3415
3427
3416 try:
3428 try:
3417 res = opener.open(req)
3429 res = opener.open(req)
3418 body = res.read()
3430 body = res.read()
3419 except util.urlerr.urlerror as e:
3431 except util.urlerr.urlerror as e:
3420 # read() method must be called, but only exists in Python 2
3432 # read() method must be called, but only exists in Python 2
3421 getattr(e, 'read', lambda: None)()
3433 getattr(e, 'read', lambda: None)()
3422 continue
3434 continue
3423
3435
3424 ct = res.headers.get(r'Content-Type')
3436 ct = res.headers.get(r'Content-Type')
3425 if ct == r'application/mercurial-cbor':
3437 if ct == r'application/mercurial-cbor':
3426 ui.write(_('cbor> %s\n') %
3438 ui.write(_('cbor> %s\n') %
3427 stringutil.pprint(cborutil.decodeall(body),
3439 stringutil.pprint(cborutil.decodeall(body),
3428 bprefix=True,
3440 bprefix=True,
3429 indent=2))
3441 indent=2))
3430
3442
3431 elif action == 'close':
3443 elif action == 'close':
3432 peer.close()
3444 peer.close()
3433 elif action == 'readavailable':
3445 elif action == 'readavailable':
3434 if not stdout or not stderr:
3446 if not stdout or not stderr:
3435 raise error.Abort(_('readavailable not available on this peer'))
3447 raise error.Abort(_('readavailable not available on this peer'))
3436
3448
3437 stdin.close()
3449 stdin.close()
3438 stdout.read()
3450 stdout.read()
3439 stderr.read()
3451 stderr.read()
3440
3452
3441 elif action == 'readline':
3453 elif action == 'readline':
3442 if not stdout:
3454 if not stdout:
3443 raise error.Abort(_('readline not available on this peer'))
3455 raise error.Abort(_('readline not available on this peer'))
3444 stdout.readline()
3456 stdout.readline()
3445 elif action == 'ereadline':
3457 elif action == 'ereadline':
3446 if not stderr:
3458 if not stderr:
3447 raise error.Abort(_('ereadline not available on this peer'))
3459 raise error.Abort(_('ereadline not available on this peer'))
3448 stderr.readline()
3460 stderr.readline()
3449 elif action.startswith('read '):
3461 elif action.startswith('read '):
3450 count = int(action.split(' ', 1)[1])
3462 count = int(action.split(' ', 1)[1])
3451 if not stdout:
3463 if not stdout:
3452 raise error.Abort(_('read not available on this peer'))
3464 raise error.Abort(_('read not available on this peer'))
3453 stdout.read(count)
3465 stdout.read(count)
3454 elif action.startswith('eread '):
3466 elif action.startswith('eread '):
3455 count = int(action.split(' ', 1)[1])
3467 count = int(action.split(' ', 1)[1])
3456 if not stderr:
3468 if not stderr:
3457 raise error.Abort(_('eread not available on this peer'))
3469 raise error.Abort(_('eread not available on this peer'))
3458 stderr.read(count)
3470 stderr.read(count)
3459 else:
3471 else:
3460 raise error.Abort(_('unknown action: %s') % action)
3472 raise error.Abort(_('unknown action: %s') % action)
3461
3473
3462 if batchedcommands is not None:
3474 if batchedcommands is not None:
3463 raise error.Abort(_('unclosed "batchbegin" request'))
3475 raise error.Abort(_('unclosed "batchbegin" request'))
3464
3476
3465 if peer:
3477 if peer:
3466 peer.close()
3478 peer.close()
3467
3479
3468 if proc:
3480 if proc:
3469 proc.kill()
3481 proc.kill()
@@ -1,109 +1,146 b''
1 # policy.py - module policy logic for Mercurial.
1 # policy.py - module policy logic for Mercurial.
2 #
2 #
3 # Copyright 2015 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2015 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import os
10 import os
11 import sys
11 import sys
12
12
13 # Rules for how modules can be loaded. Values are:
13 # Rules for how modules can be loaded. Values are:
14 #
14 #
15 # c - require C extensions
15 # c - require C extensions
16 # rust+c - require Rust and C extensions
17 # rust+c-allow - allow Rust and C extensions with fallback to pure Python
18 # for each
16 # allow - allow pure Python implementation when C loading fails
19 # allow - allow pure Python implementation when C loading fails
17 # cffi - required cffi versions (implemented within pure module)
20 # cffi - required cffi versions (implemented within pure module)
18 # cffi-allow - allow pure Python implementation if cffi version is missing
21 # cffi-allow - allow pure Python implementation if cffi version is missing
19 # py - only load pure Python modules
22 # py - only load pure Python modules
20 #
23 #
21 # By default, fall back to the pure modules so the in-place build can
24 # By default, fall back to the pure modules so the in-place build can
22 # run without recompiling the C extensions. This will be overridden by
25 # run without recompiling the C extensions. This will be overridden by
23 # __modulepolicy__ generated by setup.py.
26 # __modulepolicy__ generated by setup.py.
24 policy = b'allow'
27 policy = b'allow'
25 _packageprefs = {
28 _packageprefs = {
26 # policy: (versioned package, pure package)
29 # policy: (versioned package, pure package)
27 b'c': (r'cext', None),
30 b'c': (r'cext', None),
28 b'allow': (r'cext', r'pure'),
31 b'allow': (r'cext', r'pure'),
29 b'cffi': (r'cffi', None),
32 b'cffi': (r'cffi', None),
30 b'cffi-allow': (r'cffi', r'pure'),
33 b'cffi-allow': (r'cffi', r'pure'),
31 b'py': (None, r'pure'),
34 b'py': (None, r'pure'),
35 # For now, rust policies impact importrust only
36 b'rust+c': (r'cext', None),
37 b'rust+c-allow': (r'cext', r'pure'),
32 }
38 }
33
39
34 try:
40 try:
35 from . import __modulepolicy__
41 from . import __modulepolicy__
36 policy = __modulepolicy__.modulepolicy
42 policy = __modulepolicy__.modulepolicy
37 except ImportError:
43 except ImportError:
38 pass
44 pass
39
45
40 # PyPy doesn't load C extensions.
46 # PyPy doesn't load C extensions.
41 #
47 #
42 # The canonical way to do this is to test platform.python_implementation().
48 # The canonical way to do this is to test platform.python_implementation().
43 # But we don't import platform and don't bloat for it here.
49 # But we don't import platform and don't bloat for it here.
44 if r'__pypy__' in sys.builtin_module_names:
50 if r'__pypy__' in sys.builtin_module_names:
45 policy = b'cffi'
51 policy = b'cffi'
46
52
47 # Environment variable can always force settings.
53 # Environment variable can always force settings.
48 if sys.version_info[0] >= 3:
54 if sys.version_info[0] >= 3:
49 if r'HGMODULEPOLICY' in os.environ:
55 if r'HGMODULEPOLICY' in os.environ:
50 policy = os.environ[r'HGMODULEPOLICY'].encode(r'utf-8')
56 policy = os.environ[r'HGMODULEPOLICY'].encode(r'utf-8')
51 else:
57 else:
52 policy = os.environ.get(r'HGMODULEPOLICY', policy)
58 policy = os.environ.get(r'HGMODULEPOLICY', policy)
53
59
54 def _importfrom(pkgname, modname):
60 def _importfrom(pkgname, modname):
55 # from .<pkgname> import <modname> (where . is looked through this module)
61 # from .<pkgname> import <modname> (where . is looked through this module)
56 fakelocals = {}
62 fakelocals = {}
57 pkg = __import__(pkgname, globals(), fakelocals, [modname], level=1)
63 pkg = __import__(pkgname, globals(), fakelocals, [modname], level=1)
58 try:
64 try:
59 fakelocals[modname] = mod = getattr(pkg, modname)
65 fakelocals[modname] = mod = getattr(pkg, modname)
60 except AttributeError:
66 except AttributeError:
61 raise ImportError(r'cannot import name %s' % modname)
67 raise ImportError(r'cannot import name %s' % modname)
62 # force import; fakelocals[modname] may be replaced with the real module
68 # force import; fakelocals[modname] may be replaced with the real module
63 getattr(mod, r'__doc__', None)
69 getattr(mod, r'__doc__', None)
64 return fakelocals[modname]
70 return fakelocals[modname]
65
71
66 # keep in sync with "version" in C modules
72 # keep in sync with "version" in C modules
67 _cextversions = {
73 _cextversions = {
68 (r'cext', r'base85'): 1,
74 (r'cext', r'base85'): 1,
69 (r'cext', r'bdiff'): 3,
75 (r'cext', r'bdiff'): 3,
70 (r'cext', r'mpatch'): 1,
76 (r'cext', r'mpatch'): 1,
71 (r'cext', r'osutil'): 4,
77 (r'cext', r'osutil'): 4,
72 (r'cext', r'parsers'): 13,
78 (r'cext', r'parsers'): 13,
73 }
79 }
74
80
75 # map import request to other package or module
81 # map import request to other package or module
76 _modredirects = {
82 _modredirects = {
77 (r'cext', r'charencode'): (r'cext', r'parsers'),
83 (r'cext', r'charencode'): (r'cext', r'parsers'),
78 (r'cffi', r'base85'): (r'pure', r'base85'),
84 (r'cffi', r'base85'): (r'pure', r'base85'),
79 (r'cffi', r'charencode'): (r'pure', r'charencode'),
85 (r'cffi', r'charencode'): (r'pure', r'charencode'),
80 (r'cffi', r'parsers'): (r'pure', r'parsers'),
86 (r'cffi', r'parsers'): (r'pure', r'parsers'),
81 }
87 }
82
88
83 def _checkmod(pkgname, modname, mod):
89 def _checkmod(pkgname, modname, mod):
84 expected = _cextversions.get((pkgname, modname))
90 expected = _cextversions.get((pkgname, modname))
85 actual = getattr(mod, r'version', None)
91 actual = getattr(mod, r'version', None)
86 if actual != expected:
92 if actual != expected:
87 raise ImportError(r'cannot import module %s.%s '
93 raise ImportError(r'cannot import module %s.%s '
88 r'(expected version: %d, actual: %r)'
94 r'(expected version: %d, actual: %r)'
89 % (pkgname, modname, expected, actual))
95 % (pkgname, modname, expected, actual))
90
96
91 def importmod(modname):
97 def importmod(modname):
92 """Import module according to policy and check API version"""
98 """Import module according to policy and check API version"""
93 try:
99 try:
94 verpkg, purepkg = _packageprefs[policy]
100 verpkg, purepkg = _packageprefs[policy]
95 except KeyError:
101 except KeyError:
96 raise ImportError(r'invalid HGMODULEPOLICY %r' % policy)
102 raise ImportError(r'invalid HGMODULEPOLICY %r' % policy)
97 assert verpkg or purepkg
103 assert verpkg or purepkg
98 if verpkg:
104 if verpkg:
99 pn, mn = _modredirects.get((verpkg, modname), (verpkg, modname))
105 pn, mn = _modredirects.get((verpkg, modname), (verpkg, modname))
100 try:
106 try:
101 mod = _importfrom(pn, mn)
107 mod = _importfrom(pn, mn)
102 if pn == verpkg:
108 if pn == verpkg:
103 _checkmod(pn, mn, mod)
109 _checkmod(pn, mn, mod)
104 return mod
110 return mod
105 except ImportError:
111 except ImportError:
106 if not purepkg:
112 if not purepkg:
107 raise
113 raise
108 pn, mn = _modredirects.get((purepkg, modname), (purepkg, modname))
114 pn, mn = _modredirects.get((purepkg, modname), (purepkg, modname))
109 return _importfrom(pn, mn)
115 return _importfrom(pn, mn)
116
117 def _isrustpermissive():
118 """Assuming the policy is a Rust one, tell if it's permissive."""
119 return policy.endswith(b'-allow')
120
121 def importrust(modname, member=None, default=None):
122 """Import Rust module according to policy and availability.
123
124 If policy isn't a Rust one, this returns `default`.
125
126 If either the module or its member is not available, this returns `default`
127 if policy is permissive and raises `ImportError` if not.
128 """
129 if not policy.startswith(b'rust'):
130 return default
131
132 try:
133 mod = _importfrom(r'rustext', modname)
134 except ImportError:
135 if _isrustpermissive():
136 return default
137 raise
138 if member is None:
139 return mod
140
141 try:
142 return getattr(mod, member)
143 except AttributeError:
144 if _isrustpermissive():
145 return default
146 raise ImportError(r"Cannot import name %s" % member)
@@ -1,1477 +1,1484 b''
1 #
1 #
2 # This is the mercurial setup script.
2 # This is the mercurial setup script.
3 #
3 #
4 # 'python setup.py install', or
4 # 'python setup.py install', or
5 # 'python setup.py --help' for more options
5 # 'python setup.py --help' for more options
6
6
7 import os
7 import os
8
8
9 supportedpy = '~= 2.7'
9 supportedpy = '~= 2.7'
10 if os.environ.get('HGALLOWPYTHON3', ''):
10 if os.environ.get('HGALLOWPYTHON3', ''):
11 # Mercurial will never work on Python 3 before 3.5 due to a lack
11 # Mercurial will never work on Python 3 before 3.5 due to a lack
12 # of % formatting on bytestrings, and can't work on 3.6.0 or 3.6.1
12 # of % formatting on bytestrings, and can't work on 3.6.0 or 3.6.1
13 # due to a bug in % formatting in bytestrings.
13 # due to a bug in % formatting in bytestrings.
14 # We cannot support Python 3.5.0, 3.5.1, 3.5.2 because of bug in
14 # We cannot support Python 3.5.0, 3.5.1, 3.5.2 because of bug in
15 # codecs.escape_encode() where it raises SystemError on empty bytestring
15 # codecs.escape_encode() where it raises SystemError on empty bytestring
16 # bug link: https://bugs.python.org/issue25270
16 # bug link: https://bugs.python.org/issue25270
17 #
17 #
18 # TODO: when we actually work on Python 3, use this string as the
18 # TODO: when we actually work on Python 3, use this string as the
19 # actual supportedpy string.
19 # actual supportedpy string.
20 supportedpy = ','.join([
20 supportedpy = ','.join([
21 '>=2.7',
21 '>=2.7',
22 '!=3.0.*',
22 '!=3.0.*',
23 '!=3.1.*',
23 '!=3.1.*',
24 '!=3.2.*',
24 '!=3.2.*',
25 '!=3.3.*',
25 '!=3.3.*',
26 '!=3.4.*',
26 '!=3.4.*',
27 '!=3.5.0',
27 '!=3.5.0',
28 '!=3.5.1',
28 '!=3.5.1',
29 '!=3.5.2',
29 '!=3.5.2',
30 '!=3.6.0',
30 '!=3.6.0',
31 '!=3.6.1',
31 '!=3.6.1',
32 ])
32 ])
33
33
34 import sys, platform
34 import sys, platform
35 if sys.version_info[0] >= 3:
35 if sys.version_info[0] >= 3:
36 printf = eval('print')
36 printf = eval('print')
37 libdir_escape = 'unicode_escape'
37 libdir_escape = 'unicode_escape'
38 def sysstr(s):
38 def sysstr(s):
39 return s.decode('latin-1')
39 return s.decode('latin-1')
40 else:
40 else:
41 libdir_escape = 'string_escape'
41 libdir_escape = 'string_escape'
42 def printf(*args, **kwargs):
42 def printf(*args, **kwargs):
43 f = kwargs.get('file', sys.stdout)
43 f = kwargs.get('file', sys.stdout)
44 end = kwargs.get('end', '\n')
44 end = kwargs.get('end', '\n')
45 f.write(b' '.join(args) + end)
45 f.write(b' '.join(args) + end)
46 def sysstr(s):
46 def sysstr(s):
47 return s
47 return s
48
48
49 # Attempt to guide users to a modern pip - this means that 2.6 users
49 # Attempt to guide users to a modern pip - this means that 2.6 users
50 # should have a chance of getting a 4.2 release, and when we ratchet
50 # should have a chance of getting a 4.2 release, and when we ratchet
51 # the version requirement forward again hopefully everyone will get
51 # the version requirement forward again hopefully everyone will get
52 # something that works for them.
52 # something that works for them.
53 if sys.version_info < (2, 7, 0, 'final'):
53 if sys.version_info < (2, 7, 0, 'final'):
54 pip_message = ('This may be due to an out of date pip. '
54 pip_message = ('This may be due to an out of date pip. '
55 'Make sure you have pip >= 9.0.1.')
55 'Make sure you have pip >= 9.0.1.')
56 try:
56 try:
57 import pip
57 import pip
58 pip_version = tuple([int(x) for x in pip.__version__.split('.')[:3]])
58 pip_version = tuple([int(x) for x in pip.__version__.split('.')[:3]])
59 if pip_version < (9, 0, 1) :
59 if pip_version < (9, 0, 1) :
60 pip_message = (
60 pip_message = (
61 'Your pip version is out of date, please install '
61 'Your pip version is out of date, please install '
62 'pip >= 9.0.1. pip {} detected.'.format(pip.__version__))
62 'pip >= 9.0.1. pip {} detected.'.format(pip.__version__))
63 else:
63 else:
64 # pip is new enough - it must be something else
64 # pip is new enough - it must be something else
65 pip_message = ''
65 pip_message = ''
66 except Exception:
66 except Exception:
67 pass
67 pass
68 error = """
68 error = """
69 Mercurial does not support Python older than 2.7.
69 Mercurial does not support Python older than 2.7.
70 Python {py} detected.
70 Python {py} detected.
71 {pip}
71 {pip}
72 """.format(py=sys.version_info, pip=pip_message)
72 """.format(py=sys.version_info, pip=pip_message)
73 printf(error, file=sys.stderr)
73 printf(error, file=sys.stderr)
74 sys.exit(1)
74 sys.exit(1)
75
75
76 # We don't yet officially support Python 3. But we want to allow developers to
76 # We don't yet officially support Python 3. But we want to allow developers to
77 # hack on. Detect and disallow running on Python 3 by default. But provide a
77 # hack on. Detect and disallow running on Python 3 by default. But provide a
78 # backdoor to enable working on Python 3.
78 # backdoor to enable working on Python 3.
79 if sys.version_info[0] != 2:
79 if sys.version_info[0] != 2:
80 badpython = True
80 badpython = True
81
81
82 # Allow Python 3 from source checkouts.
82 # Allow Python 3 from source checkouts.
83 if os.path.isdir('.hg') or 'HGPYTHON3' in os.environ:
83 if os.path.isdir('.hg') or 'HGPYTHON3' in os.environ:
84 badpython = False
84 badpython = False
85
85
86 if badpython:
86 if badpython:
87 error = """
87 error = """
88 Python {py} detected.
88 Python {py} detected.
89
89
90 Mercurial currently has beta support for Python 3 and use of Python 2.7 is
90 Mercurial currently has beta support for Python 3 and use of Python 2.7 is
91 recommended for the best experience.
91 recommended for the best experience.
92
92
93 Please re-run with Python 2.7 for a faster, less buggy experience.
93 Please re-run with Python 2.7 for a faster, less buggy experience.
94
94
95 If you would like to beta test Mercurial with Python 3, this error can
95 If you would like to beta test Mercurial with Python 3, this error can
96 be suppressed by defining the HGPYTHON3 environment variable when invoking
96 be suppressed by defining the HGPYTHON3 environment variable when invoking
97 this command. No special environment variables or configuration changes are
97 this command. No special environment variables or configuration changes are
98 necessary to run `hg` with Python 3.
98 necessary to run `hg` with Python 3.
99
99
100 See https://www.mercurial-scm.org/wiki/Python3 for more on Mercurial's
100 See https://www.mercurial-scm.org/wiki/Python3 for more on Mercurial's
101 Python 3 support.
101 Python 3 support.
102 """.format(py='.'.join('%d' % x for x in sys.version_info[0:2]))
102 """.format(py='.'.join('%d' % x for x in sys.version_info[0:2]))
103
103
104 printf(error, file=sys.stderr)
104 printf(error, file=sys.stderr)
105 sys.exit(1)
105 sys.exit(1)
106
106
107 # Solaris Python packaging brain damage
107 # Solaris Python packaging brain damage
108 try:
108 try:
109 import hashlib
109 import hashlib
110 sha = hashlib.sha1()
110 sha = hashlib.sha1()
111 except ImportError:
111 except ImportError:
112 try:
112 try:
113 import sha
113 import sha
114 sha.sha # silence unused import warning
114 sha.sha # silence unused import warning
115 except ImportError:
115 except ImportError:
116 raise SystemExit(
116 raise SystemExit(
117 "Couldn't import standard hashlib (incomplete Python install).")
117 "Couldn't import standard hashlib (incomplete Python install).")
118
118
119 try:
119 try:
120 import zlib
120 import zlib
121 zlib.compressobj # silence unused import warning
121 zlib.compressobj # silence unused import warning
122 except ImportError:
122 except ImportError:
123 raise SystemExit(
123 raise SystemExit(
124 "Couldn't import standard zlib (incomplete Python install).")
124 "Couldn't import standard zlib (incomplete Python install).")
125
125
126 # The base IronPython distribution (as of 2.7.1) doesn't support bz2
126 # The base IronPython distribution (as of 2.7.1) doesn't support bz2
127 isironpython = False
127 isironpython = False
128 try:
128 try:
129 isironpython = (platform.python_implementation()
129 isironpython = (platform.python_implementation()
130 .lower().find("ironpython") != -1)
130 .lower().find("ironpython") != -1)
131 except AttributeError:
131 except AttributeError:
132 pass
132 pass
133
133
134 if isironpython:
134 if isironpython:
135 sys.stderr.write("warning: IronPython detected (no bz2 support)\n")
135 sys.stderr.write("warning: IronPython detected (no bz2 support)\n")
136 else:
136 else:
137 try:
137 try:
138 import bz2
138 import bz2
139 bz2.BZ2Compressor # silence unused import warning
139 bz2.BZ2Compressor # silence unused import warning
140 except ImportError:
140 except ImportError:
141 raise SystemExit(
141 raise SystemExit(
142 "Couldn't import standard bz2 (incomplete Python install).")
142 "Couldn't import standard bz2 (incomplete Python install).")
143
143
144 ispypy = "PyPy" in sys.version
144 ispypy = "PyPy" in sys.version
145
145
146 hgrustext = os.environ.get('HGWITHRUSTEXT')
146 hgrustext = os.environ.get('HGWITHRUSTEXT')
147 # TODO record it for proper rebuild upon changes
147 # TODO record it for proper rebuild upon changes
148 # (see mercurial/__modulepolicy__.py)
148 # (see mercurial/__modulepolicy__.py)
149 if hgrustext != 'cpython' and hgrustext is not None:
149 if hgrustext != 'cpython' and hgrustext is not None:
150 hgrustext = 'direct-ffi'
150 hgrustext = 'direct-ffi'
151
151
152 import ctypes
152 import ctypes
153 import errno
153 import errno
154 import stat, subprocess, time
154 import stat, subprocess, time
155 import re
155 import re
156 import shutil
156 import shutil
157 import tempfile
157 import tempfile
158 from distutils import log
158 from distutils import log
159 # We have issues with setuptools on some platforms and builders. Until
159 # We have issues with setuptools on some platforms and builders. Until
160 # those are resolved, setuptools is opt-in except for platforms where
160 # those are resolved, setuptools is opt-in except for platforms where
161 # we don't have issues.
161 # we don't have issues.
162 issetuptools = (os.name == 'nt' or 'FORCE_SETUPTOOLS' in os.environ)
162 issetuptools = (os.name == 'nt' or 'FORCE_SETUPTOOLS' in os.environ)
163 if issetuptools:
163 if issetuptools:
164 from setuptools import setup
164 from setuptools import setup
165 else:
165 else:
166 from distutils.core import setup
166 from distutils.core import setup
167 from distutils.ccompiler import new_compiler
167 from distutils.ccompiler import new_compiler
168 from distutils.core import Command, Extension
168 from distutils.core import Command, Extension
169 from distutils.dist import Distribution
169 from distutils.dist import Distribution
170 from distutils.command.build import build
170 from distutils.command.build import build
171 from distutils.command.build_ext import build_ext
171 from distutils.command.build_ext import build_ext
172 from distutils.command.build_py import build_py
172 from distutils.command.build_py import build_py
173 from distutils.command.build_scripts import build_scripts
173 from distutils.command.build_scripts import build_scripts
174 from distutils.command.install import install
174 from distutils.command.install import install
175 from distutils.command.install_lib import install_lib
175 from distutils.command.install_lib import install_lib
176 from distutils.command.install_scripts import install_scripts
176 from distutils.command.install_scripts import install_scripts
177 from distutils.spawn import spawn, find_executable
177 from distutils.spawn import spawn, find_executable
178 from distutils import file_util
178 from distutils import file_util
179 from distutils.errors import (
179 from distutils.errors import (
180 CCompilerError,
180 CCompilerError,
181 DistutilsError,
181 DistutilsError,
182 DistutilsExecError,
182 DistutilsExecError,
183 )
183 )
184 from distutils.sysconfig import get_python_inc, get_config_var
184 from distutils.sysconfig import get_python_inc, get_config_var
185 from distutils.version import StrictVersion
185 from distutils.version import StrictVersion
186
186
187 # Explain to distutils.StrictVersion how our release candidates are versionned
187 # Explain to distutils.StrictVersion how our release candidates are versionned
188 StrictVersion.version_re = re.compile(r'^(\d+)\.(\d+)(\.(\d+))?-?(rc(\d+))?$')
188 StrictVersion.version_re = re.compile(r'^(\d+)\.(\d+)(\.(\d+))?-?(rc(\d+))?$')
189
189
190 def write_if_changed(path, content):
190 def write_if_changed(path, content):
191 """Write content to a file iff the content hasn't changed."""
191 """Write content to a file iff the content hasn't changed."""
192 if os.path.exists(path):
192 if os.path.exists(path):
193 with open(path, 'rb') as fh:
193 with open(path, 'rb') as fh:
194 current = fh.read()
194 current = fh.read()
195 else:
195 else:
196 current = b''
196 current = b''
197
197
198 if current != content:
198 if current != content:
199 with open(path, 'wb') as fh:
199 with open(path, 'wb') as fh:
200 fh.write(content)
200 fh.write(content)
201
201
202 scripts = ['hg']
202 scripts = ['hg']
203 if os.name == 'nt':
203 if os.name == 'nt':
204 # We remove hg.bat if we are able to build hg.exe.
204 # We remove hg.bat if we are able to build hg.exe.
205 scripts.append('contrib/win32/hg.bat')
205 scripts.append('contrib/win32/hg.bat')
206
206
207 def cancompile(cc, code):
207 def cancompile(cc, code):
208 tmpdir = tempfile.mkdtemp(prefix='hg-install-')
208 tmpdir = tempfile.mkdtemp(prefix='hg-install-')
209 devnull = oldstderr = None
209 devnull = oldstderr = None
210 try:
210 try:
211 fname = os.path.join(tmpdir, 'testcomp.c')
211 fname = os.path.join(tmpdir, 'testcomp.c')
212 f = open(fname, 'w')
212 f = open(fname, 'w')
213 f.write(code)
213 f.write(code)
214 f.close()
214 f.close()
215 # Redirect stderr to /dev/null to hide any error messages
215 # Redirect stderr to /dev/null to hide any error messages
216 # from the compiler.
216 # from the compiler.
217 # This will have to be changed if we ever have to check
217 # This will have to be changed if we ever have to check
218 # for a function on Windows.
218 # for a function on Windows.
219 devnull = open('/dev/null', 'w')
219 devnull = open('/dev/null', 'w')
220 oldstderr = os.dup(sys.stderr.fileno())
220 oldstderr = os.dup(sys.stderr.fileno())
221 os.dup2(devnull.fileno(), sys.stderr.fileno())
221 os.dup2(devnull.fileno(), sys.stderr.fileno())
222 objects = cc.compile([fname], output_dir=tmpdir)
222 objects = cc.compile([fname], output_dir=tmpdir)
223 cc.link_executable(objects, os.path.join(tmpdir, "a.out"))
223 cc.link_executable(objects, os.path.join(tmpdir, "a.out"))
224 return True
224 return True
225 except Exception:
225 except Exception:
226 return False
226 return False
227 finally:
227 finally:
228 if oldstderr is not None:
228 if oldstderr is not None:
229 os.dup2(oldstderr, sys.stderr.fileno())
229 os.dup2(oldstderr, sys.stderr.fileno())
230 if devnull is not None:
230 if devnull is not None:
231 devnull.close()
231 devnull.close()
232 shutil.rmtree(tmpdir)
232 shutil.rmtree(tmpdir)
233
233
234 # simplified version of distutils.ccompiler.CCompiler.has_function
234 # simplified version of distutils.ccompiler.CCompiler.has_function
235 # that actually removes its temporary files.
235 # that actually removes its temporary files.
236 def hasfunction(cc, funcname):
236 def hasfunction(cc, funcname):
237 code = 'int main(void) { %s(); }\n' % funcname
237 code = 'int main(void) { %s(); }\n' % funcname
238 return cancompile(cc, code)
238 return cancompile(cc, code)
239
239
240 def hasheader(cc, headername):
240 def hasheader(cc, headername):
241 code = '#include <%s>\nint main(void) { return 0; }\n' % headername
241 code = '#include <%s>\nint main(void) { return 0; }\n' % headername
242 return cancompile(cc, code)
242 return cancompile(cc, code)
243
243
244 # py2exe needs to be installed to work
244 # py2exe needs to be installed to work
245 try:
245 try:
246 import py2exe
246 import py2exe
247 py2exe.Distribution # silence unused import warning
247 py2exe.Distribution # silence unused import warning
248 py2exeloaded = True
248 py2exeloaded = True
249 # import py2exe's patched Distribution class
249 # import py2exe's patched Distribution class
250 from distutils.core import Distribution
250 from distutils.core import Distribution
251 except ImportError:
251 except ImportError:
252 py2exeloaded = False
252 py2exeloaded = False
253
253
254 def runcmd(cmd, env, cwd=None):
254 def runcmd(cmd, env, cwd=None):
255 p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
255 p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
256 stderr=subprocess.PIPE, env=env, cwd=cwd)
256 stderr=subprocess.PIPE, env=env, cwd=cwd)
257 out, err = p.communicate()
257 out, err = p.communicate()
258 return p.returncode, out, err
258 return p.returncode, out, err
259
259
260 class hgcommand(object):
260 class hgcommand(object):
261 def __init__(self, cmd, env):
261 def __init__(self, cmd, env):
262 self.cmd = cmd
262 self.cmd = cmd
263 self.env = env
263 self.env = env
264
264
265 def run(self, args):
265 def run(self, args):
266 cmd = self.cmd + args
266 cmd = self.cmd + args
267 returncode, out, err = runcmd(cmd, self.env)
267 returncode, out, err = runcmd(cmd, self.env)
268 err = filterhgerr(err)
268 err = filterhgerr(err)
269 if err or returncode != 0:
269 if err or returncode != 0:
270 printf("stderr from '%s':" % (' '.join(cmd)), file=sys.stderr)
270 printf("stderr from '%s':" % (' '.join(cmd)), file=sys.stderr)
271 printf(err, file=sys.stderr)
271 printf(err, file=sys.stderr)
272 return ''
272 return ''
273 return out
273 return out
274
274
275 def filterhgerr(err):
275 def filterhgerr(err):
276 # If root is executing setup.py, but the repository is owned by
276 # If root is executing setup.py, but the repository is owned by
277 # another user (as in "sudo python setup.py install") we will get
277 # another user (as in "sudo python setup.py install") we will get
278 # trust warnings since the .hg/hgrc file is untrusted. That is
278 # trust warnings since the .hg/hgrc file is untrusted. That is
279 # fine, we don't want to load it anyway. Python may warn about
279 # fine, we don't want to load it anyway. Python may warn about
280 # a missing __init__.py in mercurial/locale, we also ignore that.
280 # a missing __init__.py in mercurial/locale, we also ignore that.
281 err = [e for e in err.splitlines()
281 err = [e for e in err.splitlines()
282 if (not e.startswith(b'not trusting file')
282 if (not e.startswith(b'not trusting file')
283 and not e.startswith(b'warning: Not importing')
283 and not e.startswith(b'warning: Not importing')
284 and not e.startswith(b'obsolete feature not enabled')
284 and not e.startswith(b'obsolete feature not enabled')
285 and not e.startswith(b'*** failed to import extension')
285 and not e.startswith(b'*** failed to import extension')
286 and not e.startswith(b'devel-warn:')
286 and not e.startswith(b'devel-warn:')
287 and not (e.startswith(b'(third party extension')
287 and not (e.startswith(b'(third party extension')
288 and e.endswith(b'or newer of Mercurial; disabling)')))]
288 and e.endswith(b'or newer of Mercurial; disabling)')))]
289 return b'\n'.join(b' ' + e for e in err)
289 return b'\n'.join(b' ' + e for e in err)
290
290
291 def findhg():
291 def findhg():
292 """Try to figure out how we should invoke hg for examining the local
292 """Try to figure out how we should invoke hg for examining the local
293 repository contents.
293 repository contents.
294
294
295 Returns an hgcommand object."""
295 Returns an hgcommand object."""
296 # By default, prefer the "hg" command in the user's path. This was
296 # By default, prefer the "hg" command in the user's path. This was
297 # presumably the hg command that the user used to create this repository.
297 # presumably the hg command that the user used to create this repository.
298 #
298 #
299 # This repository may require extensions or other settings that would not
299 # This repository may require extensions or other settings that would not
300 # be enabled by running the hg script directly from this local repository.
300 # be enabled by running the hg script directly from this local repository.
301 hgenv = os.environ.copy()
301 hgenv = os.environ.copy()
302 # Use HGPLAIN to disable hgrc settings that would change output formatting,
302 # Use HGPLAIN to disable hgrc settings that would change output formatting,
303 # and disable localization for the same reasons.
303 # and disable localization for the same reasons.
304 hgenv['HGPLAIN'] = '1'
304 hgenv['HGPLAIN'] = '1'
305 hgenv['LANGUAGE'] = 'C'
305 hgenv['LANGUAGE'] = 'C'
306 hgcmd = ['hg']
306 hgcmd = ['hg']
307 # Run a simple "hg log" command just to see if using hg from the user's
307 # Run a simple "hg log" command just to see if using hg from the user's
308 # path works and can successfully interact with this repository. Windows
308 # path works and can successfully interact with this repository. Windows
309 # gives precedence to hg.exe in the current directory, so fall back to the
309 # gives precedence to hg.exe in the current directory, so fall back to the
310 # python invocation of local hg, where pythonXY.dll can always be found.
310 # python invocation of local hg, where pythonXY.dll can always be found.
311 check_cmd = ['log', '-r.', '-Ttest']
311 check_cmd = ['log', '-r.', '-Ttest']
312 if os.name != 'nt':
312 if os.name != 'nt':
313 try:
313 try:
314 retcode, out, err = runcmd(hgcmd + check_cmd, hgenv)
314 retcode, out, err = runcmd(hgcmd + check_cmd, hgenv)
315 except EnvironmentError:
315 except EnvironmentError:
316 retcode = -1
316 retcode = -1
317 if retcode == 0 and not filterhgerr(err):
317 if retcode == 0 and not filterhgerr(err):
318 return hgcommand(hgcmd, hgenv)
318 return hgcommand(hgcmd, hgenv)
319
319
320 # Fall back to trying the local hg installation.
320 # Fall back to trying the local hg installation.
321 hgenv = localhgenv()
321 hgenv = localhgenv()
322 hgcmd = [sys.executable, 'hg']
322 hgcmd = [sys.executable, 'hg']
323 try:
323 try:
324 retcode, out, err = runcmd(hgcmd + check_cmd, hgenv)
324 retcode, out, err = runcmd(hgcmd + check_cmd, hgenv)
325 except EnvironmentError:
325 except EnvironmentError:
326 retcode = -1
326 retcode = -1
327 if retcode == 0 and not filterhgerr(err):
327 if retcode == 0 and not filterhgerr(err):
328 return hgcommand(hgcmd, hgenv)
328 return hgcommand(hgcmd, hgenv)
329
329
330 raise SystemExit('Unable to find a working hg binary to extract the '
330 raise SystemExit('Unable to find a working hg binary to extract the '
331 'version from the repository tags')
331 'version from the repository tags')
332
332
333 def localhgenv():
333 def localhgenv():
334 """Get an environment dictionary to use for invoking or importing
334 """Get an environment dictionary to use for invoking or importing
335 mercurial from the local repository."""
335 mercurial from the local repository."""
336 # Execute hg out of this directory with a custom environment which takes
336 # Execute hg out of this directory with a custom environment which takes
337 # care to not use any hgrc files and do no localization.
337 # care to not use any hgrc files and do no localization.
338 env = {'HGMODULEPOLICY': 'py',
338 env = {'HGMODULEPOLICY': 'py',
339 'HGRCPATH': '',
339 'HGRCPATH': '',
340 'LANGUAGE': 'C',
340 'LANGUAGE': 'C',
341 'PATH': ''} # make pypi modules that use os.environ['PATH'] happy
341 'PATH': ''} # make pypi modules that use os.environ['PATH'] happy
342 if 'LD_LIBRARY_PATH' in os.environ:
342 if 'LD_LIBRARY_PATH' in os.environ:
343 env['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH']
343 env['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH']
344 if 'SystemRoot' in os.environ:
344 if 'SystemRoot' in os.environ:
345 # SystemRoot is required by Windows to load various DLLs. See:
345 # SystemRoot is required by Windows to load various DLLs. See:
346 # https://bugs.python.org/issue13524#msg148850
346 # https://bugs.python.org/issue13524#msg148850
347 env['SystemRoot'] = os.environ['SystemRoot']
347 env['SystemRoot'] = os.environ['SystemRoot']
348 return env
348 return env
349
349
350 version = ''
350 version = ''
351
351
352 if os.path.isdir('.hg'):
352 if os.path.isdir('.hg'):
353 hg = findhg()
353 hg = findhg()
354 cmd = ['log', '-r', '.', '--template', '{tags}\n']
354 cmd = ['log', '-r', '.', '--template', '{tags}\n']
355 numerictags = [t for t in sysstr(hg.run(cmd)).split() if t[0:1].isdigit()]
355 numerictags = [t for t in sysstr(hg.run(cmd)).split() if t[0:1].isdigit()]
356 hgid = sysstr(hg.run(['id', '-i'])).strip()
356 hgid = sysstr(hg.run(['id', '-i'])).strip()
357 if not hgid:
357 if not hgid:
358 # Bail out if hg is having problems interacting with this repository,
358 # Bail out if hg is having problems interacting with this repository,
359 # rather than falling through and producing a bogus version number.
359 # rather than falling through and producing a bogus version number.
360 # Continuing with an invalid version number will break extensions
360 # Continuing with an invalid version number will break extensions
361 # that define minimumhgversion.
361 # that define minimumhgversion.
362 raise SystemExit('Unable to determine hg version from local repository')
362 raise SystemExit('Unable to determine hg version from local repository')
363 if numerictags: # tag(s) found
363 if numerictags: # tag(s) found
364 version = numerictags[-1]
364 version = numerictags[-1]
365 if hgid.endswith('+'): # propagate the dirty status to the tag
365 if hgid.endswith('+'): # propagate the dirty status to the tag
366 version += '+'
366 version += '+'
367 else: # no tag found
367 else: # no tag found
368 ltagcmd = ['parents', '--template', '{latesttag}']
368 ltagcmd = ['parents', '--template', '{latesttag}']
369 ltag = sysstr(hg.run(ltagcmd))
369 ltag = sysstr(hg.run(ltagcmd))
370 changessincecmd = ['log', '-T', 'x\n', '-r', "only(.,'%s')" % ltag]
370 changessincecmd = ['log', '-T', 'x\n', '-r', "only(.,'%s')" % ltag]
371 changessince = len(hg.run(changessincecmd).splitlines())
371 changessince = len(hg.run(changessincecmd).splitlines())
372 version = '%s+%s-%s' % (ltag, changessince, hgid)
372 version = '%s+%s-%s' % (ltag, changessince, hgid)
373 if version.endswith('+'):
373 if version.endswith('+'):
374 version += time.strftime('%Y%m%d')
374 version += time.strftime('%Y%m%d')
375 elif os.path.exists('.hg_archival.txt'):
375 elif os.path.exists('.hg_archival.txt'):
376 kw = dict([[t.strip() for t in l.split(':', 1)]
376 kw = dict([[t.strip() for t in l.split(':', 1)]
377 for l in open('.hg_archival.txt')])
377 for l in open('.hg_archival.txt')])
378 if 'tag' in kw:
378 if 'tag' in kw:
379 version = kw['tag']
379 version = kw['tag']
380 elif 'latesttag' in kw:
380 elif 'latesttag' in kw:
381 if 'changessincelatesttag' in kw:
381 if 'changessincelatesttag' in kw:
382 version = '%(latesttag)s+%(changessincelatesttag)s-%(node).12s' % kw
382 version = '%(latesttag)s+%(changessincelatesttag)s-%(node).12s' % kw
383 else:
383 else:
384 version = '%(latesttag)s+%(latesttagdistance)s-%(node).12s' % kw
384 version = '%(latesttag)s+%(latesttagdistance)s-%(node).12s' % kw
385 else:
385 else:
386 version = kw.get('node', '')[:12]
386 version = kw.get('node', '')[:12]
387
387
388 if version:
388 if version:
389 versionb = version
389 versionb = version
390 if not isinstance(versionb, bytes):
390 if not isinstance(versionb, bytes):
391 versionb = versionb.encode('ascii')
391 versionb = versionb.encode('ascii')
392
392
393 write_if_changed('mercurial/__version__.py', b''.join([
393 write_if_changed('mercurial/__version__.py', b''.join([
394 b'# this file is autogenerated by setup.py\n'
394 b'# this file is autogenerated by setup.py\n'
395 b'version = b"%s"\n' % versionb,
395 b'version = b"%s"\n' % versionb,
396 ]))
396 ]))
397
397
398 try:
398 try:
399 oldpolicy = os.environ.get('HGMODULEPOLICY', None)
399 oldpolicy = os.environ.get('HGMODULEPOLICY', None)
400 os.environ['HGMODULEPOLICY'] = 'py'
400 os.environ['HGMODULEPOLICY'] = 'py'
401 from mercurial import __version__
401 from mercurial import __version__
402 version = __version__.version
402 version = __version__.version
403 except ImportError:
403 except ImportError:
404 version = b'unknown'
404 version = b'unknown'
405 finally:
405 finally:
406 if oldpolicy is None:
406 if oldpolicy is None:
407 del os.environ['HGMODULEPOLICY']
407 del os.environ['HGMODULEPOLICY']
408 else:
408 else:
409 os.environ['HGMODULEPOLICY'] = oldpolicy
409 os.environ['HGMODULEPOLICY'] = oldpolicy
410
410
411 class hgbuild(build):
411 class hgbuild(build):
412 # Insert hgbuildmo first so that files in mercurial/locale/ are found
412 # Insert hgbuildmo first so that files in mercurial/locale/ are found
413 # when build_py is run next.
413 # when build_py is run next.
414 sub_commands = [('build_mo', None)] + build.sub_commands
414 sub_commands = [('build_mo', None)] + build.sub_commands
415
415
416 class hgbuildmo(build):
416 class hgbuildmo(build):
417
417
418 description = "build translations (.mo files)"
418 description = "build translations (.mo files)"
419
419
420 def run(self):
420 def run(self):
421 if not find_executable('msgfmt'):
421 if not find_executable('msgfmt'):
422 self.warn("could not find msgfmt executable, no translations "
422 self.warn("could not find msgfmt executable, no translations "
423 "will be built")
423 "will be built")
424 return
424 return
425
425
426 podir = 'i18n'
426 podir = 'i18n'
427 if not os.path.isdir(podir):
427 if not os.path.isdir(podir):
428 self.warn("could not find %s/ directory" % podir)
428 self.warn("could not find %s/ directory" % podir)
429 return
429 return
430
430
431 join = os.path.join
431 join = os.path.join
432 for po in os.listdir(podir):
432 for po in os.listdir(podir):
433 if not po.endswith('.po'):
433 if not po.endswith('.po'):
434 continue
434 continue
435 pofile = join(podir, po)
435 pofile = join(podir, po)
436 modir = join('locale', po[:-3], 'LC_MESSAGES')
436 modir = join('locale', po[:-3], 'LC_MESSAGES')
437 mofile = join(modir, 'hg.mo')
437 mofile = join(modir, 'hg.mo')
438 mobuildfile = join('mercurial', mofile)
438 mobuildfile = join('mercurial', mofile)
439 cmd = ['msgfmt', '-v', '-o', mobuildfile, pofile]
439 cmd = ['msgfmt', '-v', '-o', mobuildfile, pofile]
440 if sys.platform != 'sunos5':
440 if sys.platform != 'sunos5':
441 # msgfmt on Solaris does not know about -c
441 # msgfmt on Solaris does not know about -c
442 cmd.append('-c')
442 cmd.append('-c')
443 self.mkpath(join('mercurial', modir))
443 self.mkpath(join('mercurial', modir))
444 self.make_file([pofile], mobuildfile, spawn, (cmd,))
444 self.make_file([pofile], mobuildfile, spawn, (cmd,))
445
445
446
446
447 class hgdist(Distribution):
447 class hgdist(Distribution):
448 pure = False
448 pure = False
449 cffi = ispypy
449 cffi = ispypy
450
450
451 global_options = Distribution.global_options + [
451 global_options = Distribution.global_options + [
452 ('pure', None, "use pure (slow) Python code instead of C extensions"),
452 ('pure', None, "use pure (slow) Python code instead of C extensions"),
453 ]
453 ]
454
454
455 def has_ext_modules(self):
455 def has_ext_modules(self):
456 # self.ext_modules is emptied in hgbuildpy.finalize_options which is
456 # self.ext_modules is emptied in hgbuildpy.finalize_options which is
457 # too late for some cases
457 # too late for some cases
458 return not self.pure and Distribution.has_ext_modules(self)
458 return not self.pure and Distribution.has_ext_modules(self)
459
459
460 # This is ugly as a one-liner. So use a variable.
460 # This is ugly as a one-liner. So use a variable.
461 buildextnegops = dict(getattr(build_ext, 'negative_options', {}))
461 buildextnegops = dict(getattr(build_ext, 'negative_options', {}))
462 buildextnegops['no-zstd'] = 'zstd'
462 buildextnegops['no-zstd'] = 'zstd'
463
463
464 class hgbuildext(build_ext):
464 class hgbuildext(build_ext):
465 user_options = build_ext.user_options + [
465 user_options = build_ext.user_options + [
466 ('zstd', None, 'compile zstd bindings [default]'),
466 ('zstd', None, 'compile zstd bindings [default]'),
467 ('no-zstd', None, 'do not compile zstd bindings'),
467 ('no-zstd', None, 'do not compile zstd bindings'),
468 ]
468 ]
469
469
470 boolean_options = build_ext.boolean_options + ['zstd']
470 boolean_options = build_ext.boolean_options + ['zstd']
471 negative_opt = buildextnegops
471 negative_opt = buildextnegops
472
472
473 def initialize_options(self):
473 def initialize_options(self):
474 self.zstd = True
474 self.zstd = True
475 return build_ext.initialize_options(self)
475 return build_ext.initialize_options(self)
476
476
477 def build_extensions(self):
477 def build_extensions(self):
478 ruststandalones = [e for e in self.extensions
478 ruststandalones = [e for e in self.extensions
479 if isinstance(e, RustStandaloneExtension)]
479 if isinstance(e, RustStandaloneExtension)]
480 self.extensions = [e for e in self.extensions
480 self.extensions = [e for e in self.extensions
481 if e not in ruststandalones]
481 if e not in ruststandalones]
482 # Filter out zstd if disabled via argument.
482 # Filter out zstd if disabled via argument.
483 if not self.zstd:
483 if not self.zstd:
484 self.extensions = [e for e in self.extensions
484 self.extensions = [e for e in self.extensions
485 if e.name != 'mercurial.zstd']
485 if e.name != 'mercurial.zstd']
486
486
487 for rustext in ruststandalones:
487 for rustext in ruststandalones:
488 rustext.build('' if self.inplace else self.build_lib)
488 rustext.build('' if self.inplace else self.build_lib)
489
489
490 return build_ext.build_extensions(self)
490 return build_ext.build_extensions(self)
491
491
492 def build_extension(self, ext):
492 def build_extension(self, ext):
493 if isinstance(ext, RustExtension):
493 if isinstance(ext, RustExtension):
494 ext.rustbuild()
494 ext.rustbuild()
495 try:
495 try:
496 build_ext.build_extension(self, ext)
496 build_ext.build_extension(self, ext)
497 except CCompilerError:
497 except CCompilerError:
498 if not getattr(ext, 'optional', False):
498 if not getattr(ext, 'optional', False):
499 raise
499 raise
500 log.warn("Failed to build optional extension '%s' (skipping)",
500 log.warn("Failed to build optional extension '%s' (skipping)",
501 ext.name)
501 ext.name)
502
502
503 class hgbuildscripts(build_scripts):
503 class hgbuildscripts(build_scripts):
504 def run(self):
504 def run(self):
505 if os.name != 'nt' or self.distribution.pure:
505 if os.name != 'nt' or self.distribution.pure:
506 return build_scripts.run(self)
506 return build_scripts.run(self)
507
507
508 exebuilt = False
508 exebuilt = False
509 try:
509 try:
510 self.run_command('build_hgexe')
510 self.run_command('build_hgexe')
511 exebuilt = True
511 exebuilt = True
512 except (DistutilsError, CCompilerError):
512 except (DistutilsError, CCompilerError):
513 log.warn('failed to build optional hg.exe')
513 log.warn('failed to build optional hg.exe')
514
514
515 if exebuilt:
515 if exebuilt:
516 # Copying hg.exe to the scripts build directory ensures it is
516 # Copying hg.exe to the scripts build directory ensures it is
517 # installed by the install_scripts command.
517 # installed by the install_scripts command.
518 hgexecommand = self.get_finalized_command('build_hgexe')
518 hgexecommand = self.get_finalized_command('build_hgexe')
519 dest = os.path.join(self.build_dir, 'hg.exe')
519 dest = os.path.join(self.build_dir, 'hg.exe')
520 self.mkpath(self.build_dir)
520 self.mkpath(self.build_dir)
521 self.copy_file(hgexecommand.hgexepath, dest)
521 self.copy_file(hgexecommand.hgexepath, dest)
522
522
523 # Remove hg.bat because it is redundant with hg.exe.
523 # Remove hg.bat because it is redundant with hg.exe.
524 self.scripts.remove('contrib/win32/hg.bat')
524 self.scripts.remove('contrib/win32/hg.bat')
525
525
526 return build_scripts.run(self)
526 return build_scripts.run(self)
527
527
528 class hgbuildpy(build_py):
528 class hgbuildpy(build_py):
529 def finalize_options(self):
529 def finalize_options(self):
530 build_py.finalize_options(self)
530 build_py.finalize_options(self)
531
531
532 if self.distribution.pure:
532 if self.distribution.pure:
533 self.distribution.ext_modules = []
533 self.distribution.ext_modules = []
534 elif self.distribution.cffi:
534 elif self.distribution.cffi:
535 from mercurial.cffi import (
535 from mercurial.cffi import (
536 bdiffbuild,
536 bdiffbuild,
537 mpatchbuild,
537 mpatchbuild,
538 )
538 )
539 exts = [mpatchbuild.ffi.distutils_extension(),
539 exts = [mpatchbuild.ffi.distutils_extension(),
540 bdiffbuild.ffi.distutils_extension()]
540 bdiffbuild.ffi.distutils_extension()]
541 # cffi modules go here
541 # cffi modules go here
542 if sys.platform == 'darwin':
542 if sys.platform == 'darwin':
543 from mercurial.cffi import osutilbuild
543 from mercurial.cffi import osutilbuild
544 exts.append(osutilbuild.ffi.distutils_extension())
544 exts.append(osutilbuild.ffi.distutils_extension())
545 self.distribution.ext_modules = exts
545 self.distribution.ext_modules = exts
546 else:
546 else:
547 h = os.path.join(get_python_inc(), 'Python.h')
547 h = os.path.join(get_python_inc(), 'Python.h')
548 if not os.path.exists(h):
548 if not os.path.exists(h):
549 raise SystemExit('Python headers are required to build '
549 raise SystemExit('Python headers are required to build '
550 'Mercurial but weren\'t found in %s' % h)
550 'Mercurial but weren\'t found in %s' % h)
551
551
552 def run(self):
552 def run(self):
553 basepath = os.path.join(self.build_lib, 'mercurial')
553 basepath = os.path.join(self.build_lib, 'mercurial')
554 self.mkpath(basepath)
554 self.mkpath(basepath)
555
555
556 if self.distribution.pure:
556 if self.distribution.pure:
557 modulepolicy = 'py'
557 modulepolicy = 'py'
558 elif self.build_lib == '.':
558 elif self.build_lib == '.':
559 # in-place build should run without rebuilding C extensions
559 # in-place build should run without rebuilding C
560 modulepolicy = 'allow'
560 # and Rust extensions
561 if hgrustext == 'cpython':
562 modulepolicy = 'rust+c-allow'
563 else:
564 modulepolicy = 'allow'
561 else:
565 else:
562 modulepolicy = 'c'
566 if hgrustext == 'cpython':
567 modulepolicy = 'rust+c'
568 else:
569 modulepolicy = 'c'
563
570
564 content = b''.join([
571 content = b''.join([
565 b'# this file is autogenerated by setup.py\n',
572 b'# this file is autogenerated by setup.py\n',
566 b'modulepolicy = b"%s"\n' % modulepolicy.encode('ascii'),
573 b'modulepolicy = b"%s"\n' % modulepolicy.encode('ascii'),
567 ])
574 ])
568 write_if_changed(os.path.join(basepath, '__modulepolicy__.py'),
575 write_if_changed(os.path.join(basepath, '__modulepolicy__.py'),
569 content)
576 content)
570
577
571 build_py.run(self)
578 build_py.run(self)
572
579
573 class buildhgextindex(Command):
580 class buildhgextindex(Command):
574 description = 'generate prebuilt index of hgext (for frozen package)'
581 description = 'generate prebuilt index of hgext (for frozen package)'
575 user_options = []
582 user_options = []
576 _indexfilename = 'hgext/__index__.py'
583 _indexfilename = 'hgext/__index__.py'
577
584
578 def initialize_options(self):
585 def initialize_options(self):
579 pass
586 pass
580
587
581 def finalize_options(self):
588 def finalize_options(self):
582 pass
589 pass
583
590
584 def run(self):
591 def run(self):
585 if os.path.exists(self._indexfilename):
592 if os.path.exists(self._indexfilename):
586 with open(self._indexfilename, 'w') as f:
593 with open(self._indexfilename, 'w') as f:
587 f.write('# empty\n')
594 f.write('# empty\n')
588
595
589 # here no extension enabled, disabled() lists up everything
596 # here no extension enabled, disabled() lists up everything
590 code = ('import pprint; from mercurial import extensions; '
597 code = ('import pprint; from mercurial import extensions; '
591 'pprint.pprint(extensions.disabled())')
598 'pprint.pprint(extensions.disabled())')
592 returncode, out, err = runcmd([sys.executable, '-c', code],
599 returncode, out, err = runcmd([sys.executable, '-c', code],
593 localhgenv())
600 localhgenv())
594 if err or returncode != 0:
601 if err or returncode != 0:
595 raise DistutilsExecError(err)
602 raise DistutilsExecError(err)
596
603
597 with open(self._indexfilename, 'wb') as f:
604 with open(self._indexfilename, 'wb') as f:
598 f.write(b'# this file is autogenerated by setup.py\n')
605 f.write(b'# this file is autogenerated by setup.py\n')
599 f.write(b'docs = ')
606 f.write(b'docs = ')
600 f.write(out)
607 f.write(out)
601
608
602 class buildhgexe(build_ext):
609 class buildhgexe(build_ext):
603 description = 'compile hg.exe from mercurial/exewrapper.c'
610 description = 'compile hg.exe from mercurial/exewrapper.c'
604 user_options = build_ext.user_options + [
611 user_options = build_ext.user_options + [
605 ('long-paths-support', None, 'enable support for long paths on '
612 ('long-paths-support', None, 'enable support for long paths on '
606 'Windows (off by default and '
613 'Windows (off by default and '
607 'experimental)'),
614 'experimental)'),
608 ]
615 ]
609
616
610 LONG_PATHS_MANIFEST = """
617 LONG_PATHS_MANIFEST = """
611 <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
618 <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
612 <assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
619 <assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
613 <application>
620 <application>
614 <windowsSettings
621 <windowsSettings
615 xmlns:ws2="http://schemas.microsoft.com/SMI/2016/WindowsSettings">
622 xmlns:ws2="http://schemas.microsoft.com/SMI/2016/WindowsSettings">
616 <ws2:longPathAware>true</ws2:longPathAware>
623 <ws2:longPathAware>true</ws2:longPathAware>
617 </windowsSettings>
624 </windowsSettings>
618 </application>
625 </application>
619 </assembly>"""
626 </assembly>"""
620
627
621 def initialize_options(self):
628 def initialize_options(self):
622 build_ext.initialize_options(self)
629 build_ext.initialize_options(self)
623 self.long_paths_support = False
630 self.long_paths_support = False
624
631
625 def build_extensions(self):
632 def build_extensions(self):
626 if os.name != 'nt':
633 if os.name != 'nt':
627 return
634 return
628 if isinstance(self.compiler, HackedMingw32CCompiler):
635 if isinstance(self.compiler, HackedMingw32CCompiler):
629 self.compiler.compiler_so = self.compiler.compiler # no -mdll
636 self.compiler.compiler_so = self.compiler.compiler # no -mdll
630 self.compiler.dll_libraries = [] # no -lmsrvc90
637 self.compiler.dll_libraries = [] # no -lmsrvc90
631
638
632 # Different Python installs can have different Python library
639 # Different Python installs can have different Python library
633 # names. e.g. the official CPython distribution uses pythonXY.dll
640 # names. e.g. the official CPython distribution uses pythonXY.dll
634 # and MinGW uses libpythonX.Y.dll.
641 # and MinGW uses libpythonX.Y.dll.
635 _kernel32 = ctypes.windll.kernel32
642 _kernel32 = ctypes.windll.kernel32
636 _kernel32.GetModuleFileNameA.argtypes = [ctypes.c_void_p,
643 _kernel32.GetModuleFileNameA.argtypes = [ctypes.c_void_p,
637 ctypes.c_void_p,
644 ctypes.c_void_p,
638 ctypes.c_ulong]
645 ctypes.c_ulong]
639 _kernel32.GetModuleFileNameA.restype = ctypes.c_ulong
646 _kernel32.GetModuleFileNameA.restype = ctypes.c_ulong
640 size = 1000
647 size = 1000
641 buf = ctypes.create_string_buffer(size + 1)
648 buf = ctypes.create_string_buffer(size + 1)
642 filelen = _kernel32.GetModuleFileNameA(sys.dllhandle, ctypes.byref(buf),
649 filelen = _kernel32.GetModuleFileNameA(sys.dllhandle, ctypes.byref(buf),
643 size)
650 size)
644
651
645 if filelen > 0 and filelen != size:
652 if filelen > 0 and filelen != size:
646 dllbasename = os.path.basename(buf.value)
653 dllbasename = os.path.basename(buf.value)
647 if not dllbasename.lower().endswith(b'.dll'):
654 if not dllbasename.lower().endswith(b'.dll'):
648 raise SystemExit('Python DLL does not end with .dll: %s' %
655 raise SystemExit('Python DLL does not end with .dll: %s' %
649 dllbasename)
656 dllbasename)
650 pythonlib = dllbasename[:-4]
657 pythonlib = dllbasename[:-4]
651 else:
658 else:
652 log.warn('could not determine Python DLL filename; '
659 log.warn('could not determine Python DLL filename; '
653 'assuming pythonXY')
660 'assuming pythonXY')
654
661
655 hv = sys.hexversion
662 hv = sys.hexversion
656 pythonlib = 'python%d%d' % (hv >> 24, (hv >> 16) & 0xff)
663 pythonlib = 'python%d%d' % (hv >> 24, (hv >> 16) & 0xff)
657
664
658 log.info('using %s as Python library name' % pythonlib)
665 log.info('using %s as Python library name' % pythonlib)
659 with open('mercurial/hgpythonlib.h', 'wb') as f:
666 with open('mercurial/hgpythonlib.h', 'wb') as f:
660 f.write(b'/* this file is autogenerated by setup.py */\n')
667 f.write(b'/* this file is autogenerated by setup.py */\n')
661 f.write(b'#define HGPYTHONLIB "%s"\n' % pythonlib)
668 f.write(b'#define HGPYTHONLIB "%s"\n' % pythonlib)
662
669
663 macros = None
670 macros = None
664 if sys.version_info[0] >= 3:
671 if sys.version_info[0] >= 3:
665 macros = [('_UNICODE', None), ('UNICODE', None)]
672 macros = [('_UNICODE', None), ('UNICODE', None)]
666
673
667 objects = self.compiler.compile(['mercurial/exewrapper.c'],
674 objects = self.compiler.compile(['mercurial/exewrapper.c'],
668 output_dir=self.build_temp,
675 output_dir=self.build_temp,
669 macros=macros)
676 macros=macros)
670 dir = os.path.dirname(self.get_ext_fullpath('dummy'))
677 dir = os.path.dirname(self.get_ext_fullpath('dummy'))
671 self.hgtarget = os.path.join(dir, 'hg')
678 self.hgtarget = os.path.join(dir, 'hg')
672 self.compiler.link_executable(objects, self.hgtarget,
679 self.compiler.link_executable(objects, self.hgtarget,
673 libraries=[],
680 libraries=[],
674 output_dir=self.build_temp)
681 output_dir=self.build_temp)
675 if self.long_paths_support:
682 if self.long_paths_support:
676 self.addlongpathsmanifest()
683 self.addlongpathsmanifest()
677
684
678 def addlongpathsmanifest(self):
685 def addlongpathsmanifest(self):
679 r"""Add manifest pieces so that hg.exe understands long paths
686 r"""Add manifest pieces so that hg.exe understands long paths
680
687
681 This is an EXPERIMENTAL feature, use with care.
688 This is an EXPERIMENTAL feature, use with care.
682 To enable long paths support, one needs to do two things:
689 To enable long paths support, one needs to do two things:
683 - build Mercurial with --long-paths-support option
690 - build Mercurial with --long-paths-support option
684 - change HKLM\SYSTEM\CurrentControlSet\Control\FileSystem\
691 - change HKLM\SYSTEM\CurrentControlSet\Control\FileSystem\
685 LongPathsEnabled to have value 1.
692 LongPathsEnabled to have value 1.
686
693
687 Please ignore 'warning 81010002: Unrecognized Element "longPathAware"';
694 Please ignore 'warning 81010002: Unrecognized Element "longPathAware"';
688 it happens because Mercurial uses mt.exe circa 2008, which is not
695 it happens because Mercurial uses mt.exe circa 2008, which is not
689 yet aware of long paths support in the manifest (I think so at least).
696 yet aware of long paths support in the manifest (I think so at least).
690 This does not stop mt.exe from embedding/merging the XML properly.
697 This does not stop mt.exe from embedding/merging the XML properly.
691
698
692 Why resource #1 should be used for .exe manifests? I don't know and
699 Why resource #1 should be used for .exe manifests? I don't know and
693 wasn't able to find an explanation for mortals. But it seems to work.
700 wasn't able to find an explanation for mortals. But it seems to work.
694 """
701 """
695 exefname = self.compiler.executable_filename(self.hgtarget)
702 exefname = self.compiler.executable_filename(self.hgtarget)
696 fdauto, manfname = tempfile.mkstemp(suffix='.hg.exe.manifest')
703 fdauto, manfname = tempfile.mkstemp(suffix='.hg.exe.manifest')
697 os.close(fdauto)
704 os.close(fdauto)
698 with open(manfname, 'w') as f:
705 with open(manfname, 'w') as f:
699 f.write(self.LONG_PATHS_MANIFEST)
706 f.write(self.LONG_PATHS_MANIFEST)
700 log.info("long paths manifest is written to '%s'" % manfname)
707 log.info("long paths manifest is written to '%s'" % manfname)
701 inputresource = '-inputresource:%s;#1' % exefname
708 inputresource = '-inputresource:%s;#1' % exefname
702 outputresource = '-outputresource:%s;#1' % exefname
709 outputresource = '-outputresource:%s;#1' % exefname
703 log.info("running mt.exe to update hg.exe's manifest in-place")
710 log.info("running mt.exe to update hg.exe's manifest in-place")
704 # supplying both -manifest and -inputresource to mt.exe makes
711 # supplying both -manifest and -inputresource to mt.exe makes
705 # it merge the embedded and supplied manifests in the -outputresource
712 # it merge the embedded and supplied manifests in the -outputresource
706 self.spawn(['mt.exe', '-nologo', '-manifest', manfname,
713 self.spawn(['mt.exe', '-nologo', '-manifest', manfname,
707 inputresource, outputresource])
714 inputresource, outputresource])
708 log.info("done updating hg.exe's manifest")
715 log.info("done updating hg.exe's manifest")
709 os.remove(manfname)
716 os.remove(manfname)
710
717
711 @property
718 @property
712 def hgexepath(self):
719 def hgexepath(self):
713 dir = os.path.dirname(self.get_ext_fullpath('dummy'))
720 dir = os.path.dirname(self.get_ext_fullpath('dummy'))
714 return os.path.join(self.build_temp, dir, 'hg.exe')
721 return os.path.join(self.build_temp, dir, 'hg.exe')
715
722
716 class hgbuilddoc(Command):
723 class hgbuilddoc(Command):
717 description = 'build documentation'
724 description = 'build documentation'
718 user_options = [
725 user_options = [
719 ('man', None, 'generate man pages'),
726 ('man', None, 'generate man pages'),
720 ('html', None, 'generate html pages'),
727 ('html', None, 'generate html pages'),
721 ]
728 ]
722
729
723 def initialize_options(self):
730 def initialize_options(self):
724 self.man = None
731 self.man = None
725 self.html = None
732 self.html = None
726
733
727 def finalize_options(self):
734 def finalize_options(self):
728 # If --man or --html are set, only generate what we're told to.
735 # If --man or --html are set, only generate what we're told to.
729 # Otherwise generate everything.
736 # Otherwise generate everything.
730 have_subset = self.man is not None or self.html is not None
737 have_subset = self.man is not None or self.html is not None
731
738
732 if have_subset:
739 if have_subset:
733 self.man = True if self.man else False
740 self.man = True if self.man else False
734 self.html = True if self.html else False
741 self.html = True if self.html else False
735 else:
742 else:
736 self.man = True
743 self.man = True
737 self.html = True
744 self.html = True
738
745
739 def run(self):
746 def run(self):
740 def normalizecrlf(p):
747 def normalizecrlf(p):
741 with open(p, 'rb') as fh:
748 with open(p, 'rb') as fh:
742 orig = fh.read()
749 orig = fh.read()
743
750
744 if b'\r\n' not in orig:
751 if b'\r\n' not in orig:
745 return
752 return
746
753
747 log.info('normalizing %s to LF line endings' % p)
754 log.info('normalizing %s to LF line endings' % p)
748 with open(p, 'wb') as fh:
755 with open(p, 'wb') as fh:
749 fh.write(orig.replace(b'\r\n', b'\n'))
756 fh.write(orig.replace(b'\r\n', b'\n'))
750
757
751 def gentxt(root):
758 def gentxt(root):
752 txt = 'doc/%s.txt' % root
759 txt = 'doc/%s.txt' % root
753 log.info('generating %s' % txt)
760 log.info('generating %s' % txt)
754 res, out, err = runcmd(
761 res, out, err = runcmd(
755 [sys.executable, 'gendoc.py', root],
762 [sys.executable, 'gendoc.py', root],
756 os.environ,
763 os.environ,
757 cwd='doc')
764 cwd='doc')
758 if res:
765 if res:
759 raise SystemExit('error running gendoc.py: %s' %
766 raise SystemExit('error running gendoc.py: %s' %
760 '\n'.join([out, err]))
767 '\n'.join([out, err]))
761
768
762 with open(txt, 'wb') as fh:
769 with open(txt, 'wb') as fh:
763 fh.write(out)
770 fh.write(out)
764
771
765 def gengendoc(root):
772 def gengendoc(root):
766 gendoc = 'doc/%s.gendoc.txt' % root
773 gendoc = 'doc/%s.gendoc.txt' % root
767
774
768 log.info('generating %s' % gendoc)
775 log.info('generating %s' % gendoc)
769 res, out, err = runcmd(
776 res, out, err = runcmd(
770 [sys.executable, 'gendoc.py', '%s.gendoc' % root],
777 [sys.executable, 'gendoc.py', '%s.gendoc' % root],
771 os.environ,
778 os.environ,
772 cwd='doc')
779 cwd='doc')
773 if res:
780 if res:
774 raise SystemExit('error running gendoc: %s' %
781 raise SystemExit('error running gendoc: %s' %
775 '\n'.join([out, err]))
782 '\n'.join([out, err]))
776
783
777 with open(gendoc, 'wb') as fh:
784 with open(gendoc, 'wb') as fh:
778 fh.write(out)
785 fh.write(out)
779
786
780 def genman(root):
787 def genman(root):
781 log.info('generating doc/%s' % root)
788 log.info('generating doc/%s' % root)
782 res, out, err = runcmd(
789 res, out, err = runcmd(
783 [sys.executable, 'runrst', 'hgmanpage', '--halt', 'warning',
790 [sys.executable, 'runrst', 'hgmanpage', '--halt', 'warning',
784 '--strip-elements-with-class', 'htmlonly',
791 '--strip-elements-with-class', 'htmlonly',
785 '%s.txt' % root, root],
792 '%s.txt' % root, root],
786 os.environ,
793 os.environ,
787 cwd='doc')
794 cwd='doc')
788 if res:
795 if res:
789 raise SystemExit('error running runrst: %s' %
796 raise SystemExit('error running runrst: %s' %
790 '\n'.join([out, err]))
797 '\n'.join([out, err]))
791
798
792 normalizecrlf('doc/%s' % root)
799 normalizecrlf('doc/%s' % root)
793
800
794 def genhtml(root):
801 def genhtml(root):
795 log.info('generating doc/%s.html' % root)
802 log.info('generating doc/%s.html' % root)
796 res, out, err = runcmd(
803 res, out, err = runcmd(
797 [sys.executable, 'runrst', 'html', '--halt', 'warning',
804 [sys.executable, 'runrst', 'html', '--halt', 'warning',
798 '--link-stylesheet', '--stylesheet-path', 'style.css',
805 '--link-stylesheet', '--stylesheet-path', 'style.css',
799 '%s.txt' % root, '%s.html' % root],
806 '%s.txt' % root, '%s.html' % root],
800 os.environ,
807 os.environ,
801 cwd='doc')
808 cwd='doc')
802 if res:
809 if res:
803 raise SystemExit('error running runrst: %s' %
810 raise SystemExit('error running runrst: %s' %
804 '\n'.join([out, err]))
811 '\n'.join([out, err]))
805
812
806 normalizecrlf('doc/%s.html' % root)
813 normalizecrlf('doc/%s.html' % root)
807
814
808 # This logic is duplicated in doc/Makefile.
815 # This logic is duplicated in doc/Makefile.
809 sources = set(f for f in os.listdir('mercurial/help')
816 sources = set(f for f in os.listdir('mercurial/help')
810 if re.search(r'[0-9]\.txt$', f))
817 if re.search(r'[0-9]\.txt$', f))
811
818
812 # common.txt is a one-off.
819 # common.txt is a one-off.
813 gentxt('common')
820 gentxt('common')
814
821
815 for source in sorted(sources):
822 for source in sorted(sources):
816 assert source[-4:] == '.txt'
823 assert source[-4:] == '.txt'
817 root = source[:-4]
824 root = source[:-4]
818
825
819 gentxt(root)
826 gentxt(root)
820 gengendoc(root)
827 gengendoc(root)
821
828
822 if self.man:
829 if self.man:
823 genman(root)
830 genman(root)
824 if self.html:
831 if self.html:
825 genhtml(root)
832 genhtml(root)
826
833
827 class hginstall(install):
834 class hginstall(install):
828
835
829 user_options = install.user_options + [
836 user_options = install.user_options + [
830 ('old-and-unmanageable', None,
837 ('old-and-unmanageable', None,
831 'noop, present for eggless setuptools compat'),
838 'noop, present for eggless setuptools compat'),
832 ('single-version-externally-managed', None,
839 ('single-version-externally-managed', None,
833 'noop, present for eggless setuptools compat'),
840 'noop, present for eggless setuptools compat'),
834 ]
841 ]
835
842
836 # Also helps setuptools not be sad while we refuse to create eggs.
843 # Also helps setuptools not be sad while we refuse to create eggs.
837 single_version_externally_managed = True
844 single_version_externally_managed = True
838
845
839 def get_sub_commands(self):
846 def get_sub_commands(self):
840 # Screen out egg related commands to prevent egg generation. But allow
847 # Screen out egg related commands to prevent egg generation. But allow
841 # mercurial.egg-info generation, since that is part of modern
848 # mercurial.egg-info generation, since that is part of modern
842 # packaging.
849 # packaging.
843 excl = set(['bdist_egg'])
850 excl = set(['bdist_egg'])
844 return filter(lambda x: x not in excl, install.get_sub_commands(self))
851 return filter(lambda x: x not in excl, install.get_sub_commands(self))
845
852
846 class hginstalllib(install_lib):
853 class hginstalllib(install_lib):
847 '''
854 '''
848 This is a specialization of install_lib that replaces the copy_file used
855 This is a specialization of install_lib that replaces the copy_file used
849 there so that it supports setting the mode of files after copying them,
856 there so that it supports setting the mode of files after copying them,
850 instead of just preserving the mode that the files originally had. If your
857 instead of just preserving the mode that the files originally had. If your
851 system has a umask of something like 027, preserving the permissions when
858 system has a umask of something like 027, preserving the permissions when
852 copying will lead to a broken install.
859 copying will lead to a broken install.
853
860
854 Note that just passing keep_permissions=False to copy_file would be
861 Note that just passing keep_permissions=False to copy_file would be
855 insufficient, as it might still be applying a umask.
862 insufficient, as it might still be applying a umask.
856 '''
863 '''
857
864
858 def run(self):
865 def run(self):
859 realcopyfile = file_util.copy_file
866 realcopyfile = file_util.copy_file
860 def copyfileandsetmode(*args, **kwargs):
867 def copyfileandsetmode(*args, **kwargs):
861 src, dst = args[0], args[1]
868 src, dst = args[0], args[1]
862 dst, copied = realcopyfile(*args, **kwargs)
869 dst, copied = realcopyfile(*args, **kwargs)
863 if copied:
870 if copied:
864 st = os.stat(src)
871 st = os.stat(src)
865 # Persist executable bit (apply it to group and other if user
872 # Persist executable bit (apply it to group and other if user
866 # has it)
873 # has it)
867 if st[stat.ST_MODE] & stat.S_IXUSR:
874 if st[stat.ST_MODE] & stat.S_IXUSR:
868 setmode = int('0755', 8)
875 setmode = int('0755', 8)
869 else:
876 else:
870 setmode = int('0644', 8)
877 setmode = int('0644', 8)
871 m = stat.S_IMODE(st[stat.ST_MODE])
878 m = stat.S_IMODE(st[stat.ST_MODE])
872 m = (m & ~int('0777', 8)) | setmode
879 m = (m & ~int('0777', 8)) | setmode
873 os.chmod(dst, m)
880 os.chmod(dst, m)
874 file_util.copy_file = copyfileandsetmode
881 file_util.copy_file = copyfileandsetmode
875 try:
882 try:
876 install_lib.run(self)
883 install_lib.run(self)
877 finally:
884 finally:
878 file_util.copy_file = realcopyfile
885 file_util.copy_file = realcopyfile
879
886
880 class hginstallscripts(install_scripts):
887 class hginstallscripts(install_scripts):
881 '''
888 '''
882 This is a specialization of install_scripts that replaces the @LIBDIR@ with
889 This is a specialization of install_scripts that replaces the @LIBDIR@ with
883 the configured directory for modules. If possible, the path is made relative
890 the configured directory for modules. If possible, the path is made relative
884 to the directory for scripts.
891 to the directory for scripts.
885 '''
892 '''
886
893
887 def initialize_options(self):
894 def initialize_options(self):
888 install_scripts.initialize_options(self)
895 install_scripts.initialize_options(self)
889
896
890 self.install_lib = None
897 self.install_lib = None
891
898
892 def finalize_options(self):
899 def finalize_options(self):
893 install_scripts.finalize_options(self)
900 install_scripts.finalize_options(self)
894 self.set_undefined_options('install',
901 self.set_undefined_options('install',
895 ('install_lib', 'install_lib'))
902 ('install_lib', 'install_lib'))
896
903
897 def run(self):
904 def run(self):
898 install_scripts.run(self)
905 install_scripts.run(self)
899
906
900 # It only makes sense to replace @LIBDIR@ with the install path if
907 # It only makes sense to replace @LIBDIR@ with the install path if
901 # the install path is known. For wheels, the logic below calculates
908 # the install path is known. For wheels, the logic below calculates
902 # the libdir to be "../..". This is because the internal layout of a
909 # the libdir to be "../..". This is because the internal layout of a
903 # wheel archive looks like:
910 # wheel archive looks like:
904 #
911 #
905 # mercurial-3.6.1.data/scripts/hg
912 # mercurial-3.6.1.data/scripts/hg
906 # mercurial/__init__.py
913 # mercurial/__init__.py
907 #
914 #
908 # When installing wheels, the subdirectories of the "<pkg>.data"
915 # When installing wheels, the subdirectories of the "<pkg>.data"
909 # directory are translated to system local paths and files therein
916 # directory are translated to system local paths and files therein
910 # are copied in place. The mercurial/* files are installed into the
917 # are copied in place. The mercurial/* files are installed into the
911 # site-packages directory. However, the site-packages directory
918 # site-packages directory. However, the site-packages directory
912 # isn't known until wheel install time. This means we have no clue
919 # isn't known until wheel install time. This means we have no clue
913 # at wheel generation time what the installed site-packages directory
920 # at wheel generation time what the installed site-packages directory
914 # will be. And, wheels don't appear to provide the ability to register
921 # will be. And, wheels don't appear to provide the ability to register
915 # custom code to run during wheel installation. This all means that
922 # custom code to run during wheel installation. This all means that
916 # we can't reliably set the libdir in wheels: the default behavior
923 # we can't reliably set the libdir in wheels: the default behavior
917 # of looking in sys.path must do.
924 # of looking in sys.path must do.
918
925
919 if (os.path.splitdrive(self.install_dir)[0] !=
926 if (os.path.splitdrive(self.install_dir)[0] !=
920 os.path.splitdrive(self.install_lib)[0]):
927 os.path.splitdrive(self.install_lib)[0]):
921 # can't make relative paths from one drive to another, so use an
928 # can't make relative paths from one drive to another, so use an
922 # absolute path instead
929 # absolute path instead
923 libdir = self.install_lib
930 libdir = self.install_lib
924 else:
931 else:
925 common = os.path.commonprefix((self.install_dir, self.install_lib))
932 common = os.path.commonprefix((self.install_dir, self.install_lib))
926 rest = self.install_dir[len(common):]
933 rest = self.install_dir[len(common):]
927 uplevel = len([n for n in os.path.split(rest) if n])
934 uplevel = len([n for n in os.path.split(rest) if n])
928
935
929 libdir = uplevel * ('..' + os.sep) + self.install_lib[len(common):]
936 libdir = uplevel * ('..' + os.sep) + self.install_lib[len(common):]
930
937
931 for outfile in self.outfiles:
938 for outfile in self.outfiles:
932 with open(outfile, 'rb') as fp:
939 with open(outfile, 'rb') as fp:
933 data = fp.read()
940 data = fp.read()
934
941
935 # skip binary files
942 # skip binary files
936 if b'\0' in data:
943 if b'\0' in data:
937 continue
944 continue
938
945
939 # During local installs, the shebang will be rewritten to the final
946 # During local installs, the shebang will be rewritten to the final
940 # install path. During wheel packaging, the shebang has a special
947 # install path. During wheel packaging, the shebang has a special
941 # value.
948 # value.
942 if data.startswith(b'#!python'):
949 if data.startswith(b'#!python'):
943 log.info('not rewriting @LIBDIR@ in %s because install path '
950 log.info('not rewriting @LIBDIR@ in %s because install path '
944 'not known' % outfile)
951 'not known' % outfile)
945 continue
952 continue
946
953
947 data = data.replace(b'@LIBDIR@', libdir.encode(libdir_escape))
954 data = data.replace(b'@LIBDIR@', libdir.encode(libdir_escape))
948 with open(outfile, 'wb') as fp:
955 with open(outfile, 'wb') as fp:
949 fp.write(data)
956 fp.write(data)
950
957
951 # virtualenv installs custom distutils/__init__.py and
958 # virtualenv installs custom distutils/__init__.py and
952 # distutils/distutils.cfg files which essentially proxy back to the
959 # distutils/distutils.cfg files which essentially proxy back to the
953 # "real" distutils in the main Python install. The presence of this
960 # "real" distutils in the main Python install. The presence of this
954 # directory causes py2exe to pick up the "hacked" distutils package
961 # directory causes py2exe to pick up the "hacked" distutils package
955 # from the virtualenv and "import distutils" will fail from the py2exe
962 # from the virtualenv and "import distutils" will fail from the py2exe
956 # build because the "real" distutils files can't be located.
963 # build because the "real" distutils files can't be located.
957 #
964 #
958 # We work around this by monkeypatching the py2exe code finding Python
965 # We work around this by monkeypatching the py2exe code finding Python
959 # modules to replace the found virtualenv distutils modules with the
966 # modules to replace the found virtualenv distutils modules with the
960 # original versions via filesystem scanning. This is a bit hacky. But
967 # original versions via filesystem scanning. This is a bit hacky. But
961 # it allows us to use virtualenvs for py2exe packaging, which is more
968 # it allows us to use virtualenvs for py2exe packaging, which is more
962 # deterministic and reproducible.
969 # deterministic and reproducible.
963 #
970 #
964 # It's worth noting that the common StackOverflow suggestions for this
971 # It's worth noting that the common StackOverflow suggestions for this
965 # problem involve copying the original distutils files into the
972 # problem involve copying the original distutils files into the
966 # virtualenv or into the staging directory after setup() is invoked.
973 # virtualenv or into the staging directory after setup() is invoked.
967 # The former is very brittle and can easily break setup(). Our hacking
974 # The former is very brittle and can easily break setup(). Our hacking
968 # of the found modules routine has a similar result as copying the files
975 # of the found modules routine has a similar result as copying the files
969 # manually. But it makes fewer assumptions about how py2exe works and
976 # manually. But it makes fewer assumptions about how py2exe works and
970 # is less brittle.
977 # is less brittle.
971
978
972 # This only catches virtualenvs made with virtualenv (as opposed to
979 # This only catches virtualenvs made with virtualenv (as opposed to
973 # venv, which is likely what Python 3 uses).
980 # venv, which is likely what Python 3 uses).
974 py2exehacked = py2exeloaded and getattr(sys, 'real_prefix', None) is not None
981 py2exehacked = py2exeloaded and getattr(sys, 'real_prefix', None) is not None
975
982
976 if py2exehacked:
983 if py2exehacked:
977 from distutils.command.py2exe import py2exe as buildpy2exe
984 from distutils.command.py2exe import py2exe as buildpy2exe
978 from py2exe.mf import Module as py2exemodule
985 from py2exe.mf import Module as py2exemodule
979
986
980 class hgbuildpy2exe(buildpy2exe):
987 class hgbuildpy2exe(buildpy2exe):
981 def find_needed_modules(self, mf, files, modules):
988 def find_needed_modules(self, mf, files, modules):
982 res = buildpy2exe.find_needed_modules(self, mf, files, modules)
989 res = buildpy2exe.find_needed_modules(self, mf, files, modules)
983
990
984 # Replace virtualenv's distutils modules with the real ones.
991 # Replace virtualenv's distutils modules with the real ones.
985 modules = {}
992 modules = {}
986 for k, v in res.modules.items():
993 for k, v in res.modules.items():
987 if k != 'distutils' and not k.startswith('distutils.'):
994 if k != 'distutils' and not k.startswith('distutils.'):
988 modules[k] = v
995 modules[k] = v
989
996
990 res.modules = modules
997 res.modules = modules
991
998
992 import opcode
999 import opcode
993 distutilsreal = os.path.join(os.path.dirname(opcode.__file__),
1000 distutilsreal = os.path.join(os.path.dirname(opcode.__file__),
994 'distutils')
1001 'distutils')
995
1002
996 for root, dirs, files in os.walk(distutilsreal):
1003 for root, dirs, files in os.walk(distutilsreal):
997 for f in sorted(files):
1004 for f in sorted(files):
998 if not f.endswith('.py'):
1005 if not f.endswith('.py'):
999 continue
1006 continue
1000
1007
1001 full = os.path.join(root, f)
1008 full = os.path.join(root, f)
1002
1009
1003 parents = ['distutils']
1010 parents = ['distutils']
1004
1011
1005 if root != distutilsreal:
1012 if root != distutilsreal:
1006 rel = os.path.relpath(root, distutilsreal)
1013 rel = os.path.relpath(root, distutilsreal)
1007 parents.extend(p for p in rel.split(os.sep))
1014 parents.extend(p for p in rel.split(os.sep))
1008
1015
1009 modname = '%s.%s' % ('.'.join(parents), f[:-3])
1016 modname = '%s.%s' % ('.'.join(parents), f[:-3])
1010
1017
1011 if modname.startswith('distutils.tests.'):
1018 if modname.startswith('distutils.tests.'):
1012 continue
1019 continue
1013
1020
1014 if modname.endswith('.__init__'):
1021 if modname.endswith('.__init__'):
1015 modname = modname[:-len('.__init__')]
1022 modname = modname[:-len('.__init__')]
1016 path = os.path.dirname(full)
1023 path = os.path.dirname(full)
1017 else:
1024 else:
1018 path = None
1025 path = None
1019
1026
1020 res.modules[modname] = py2exemodule(modname, full,
1027 res.modules[modname] = py2exemodule(modname, full,
1021 path=path)
1028 path=path)
1022
1029
1023 if 'distutils' not in res.modules:
1030 if 'distutils' not in res.modules:
1024 raise SystemExit('could not find distutils modules')
1031 raise SystemExit('could not find distutils modules')
1025
1032
1026 return res
1033 return res
1027
1034
1028 cmdclass = {'build': hgbuild,
1035 cmdclass = {'build': hgbuild,
1029 'build_doc': hgbuilddoc,
1036 'build_doc': hgbuilddoc,
1030 'build_mo': hgbuildmo,
1037 'build_mo': hgbuildmo,
1031 'build_ext': hgbuildext,
1038 'build_ext': hgbuildext,
1032 'build_py': hgbuildpy,
1039 'build_py': hgbuildpy,
1033 'build_scripts': hgbuildscripts,
1040 'build_scripts': hgbuildscripts,
1034 'build_hgextindex': buildhgextindex,
1041 'build_hgextindex': buildhgextindex,
1035 'install': hginstall,
1042 'install': hginstall,
1036 'install_lib': hginstalllib,
1043 'install_lib': hginstalllib,
1037 'install_scripts': hginstallscripts,
1044 'install_scripts': hginstallscripts,
1038 'build_hgexe': buildhgexe,
1045 'build_hgexe': buildhgexe,
1039 }
1046 }
1040
1047
1041 if py2exehacked:
1048 if py2exehacked:
1042 cmdclass['py2exe'] = hgbuildpy2exe
1049 cmdclass['py2exe'] = hgbuildpy2exe
1043
1050
1044 packages = ['mercurial',
1051 packages = ['mercurial',
1045 'mercurial.cext',
1052 'mercurial.cext',
1046 'mercurial.cffi',
1053 'mercurial.cffi',
1047 'mercurial.hgweb',
1054 'mercurial.hgweb',
1048 'mercurial.pure',
1055 'mercurial.pure',
1049 'mercurial.thirdparty',
1056 'mercurial.thirdparty',
1050 'mercurial.thirdparty.attr',
1057 'mercurial.thirdparty.attr',
1051 'mercurial.thirdparty.zope',
1058 'mercurial.thirdparty.zope',
1052 'mercurial.thirdparty.zope.interface',
1059 'mercurial.thirdparty.zope.interface',
1053 'mercurial.utils',
1060 'mercurial.utils',
1054 'mercurial.revlogutils',
1061 'mercurial.revlogutils',
1055 'mercurial.testing',
1062 'mercurial.testing',
1056 'hgext', 'hgext.convert', 'hgext.fsmonitor',
1063 'hgext', 'hgext.convert', 'hgext.fsmonitor',
1057 'hgext.fastannotate',
1064 'hgext.fastannotate',
1058 'hgext.fsmonitor.pywatchman',
1065 'hgext.fsmonitor.pywatchman',
1059 'hgext.infinitepush',
1066 'hgext.infinitepush',
1060 'hgext.highlight',
1067 'hgext.highlight',
1061 'hgext.largefiles', 'hgext.lfs', 'hgext.narrow',
1068 'hgext.largefiles', 'hgext.lfs', 'hgext.narrow',
1062 'hgext.remotefilelog',
1069 'hgext.remotefilelog',
1063 'hgext.zeroconf', 'hgext3rd',
1070 'hgext.zeroconf', 'hgext3rd',
1064 'hgdemandimport']
1071 'hgdemandimport']
1065 if sys.version_info[0] == 2:
1072 if sys.version_info[0] == 2:
1066 packages.extend(['mercurial.thirdparty.concurrent',
1073 packages.extend(['mercurial.thirdparty.concurrent',
1067 'mercurial.thirdparty.concurrent.futures'])
1074 'mercurial.thirdparty.concurrent.futures'])
1068
1075
1069 if 'HG_PY2EXE_EXTRA_INSTALL_PACKAGES' in os.environ:
1076 if 'HG_PY2EXE_EXTRA_INSTALL_PACKAGES' in os.environ:
1070 # py2exe can't cope with namespace packages very well, so we have to
1077 # py2exe can't cope with namespace packages very well, so we have to
1071 # install any hgext3rd.* extensions that we want in the final py2exe
1078 # install any hgext3rd.* extensions that we want in the final py2exe
1072 # image here. This is gross, but you gotta do what you gotta do.
1079 # image here. This is gross, but you gotta do what you gotta do.
1073 packages.extend(os.environ['HG_PY2EXE_EXTRA_INSTALL_PACKAGES'].split(' '))
1080 packages.extend(os.environ['HG_PY2EXE_EXTRA_INSTALL_PACKAGES'].split(' '))
1074
1081
1075 common_depends = ['mercurial/bitmanipulation.h',
1082 common_depends = ['mercurial/bitmanipulation.h',
1076 'mercurial/compat.h',
1083 'mercurial/compat.h',
1077 'mercurial/cext/util.h']
1084 'mercurial/cext/util.h']
1078 common_include_dirs = ['mercurial']
1085 common_include_dirs = ['mercurial']
1079
1086
1080 osutil_cflags = []
1087 osutil_cflags = []
1081 osutil_ldflags = []
1088 osutil_ldflags = []
1082
1089
1083 # platform specific macros
1090 # platform specific macros
1084 for plat, func in [('bsd', 'setproctitle')]:
1091 for plat, func in [('bsd', 'setproctitle')]:
1085 if re.search(plat, sys.platform) and hasfunction(new_compiler(), func):
1092 if re.search(plat, sys.platform) and hasfunction(new_compiler(), func):
1086 osutil_cflags.append('-DHAVE_%s' % func.upper())
1093 osutil_cflags.append('-DHAVE_%s' % func.upper())
1087
1094
1088 for plat, macro, code in [
1095 for plat, macro, code in [
1089 ('bsd|darwin', 'BSD_STATFS', '''
1096 ('bsd|darwin', 'BSD_STATFS', '''
1090 #include <sys/param.h>
1097 #include <sys/param.h>
1091 #include <sys/mount.h>
1098 #include <sys/mount.h>
1092 int main() { struct statfs s; return sizeof(s.f_fstypename); }
1099 int main() { struct statfs s; return sizeof(s.f_fstypename); }
1093 '''),
1100 '''),
1094 ('linux', 'LINUX_STATFS', '''
1101 ('linux', 'LINUX_STATFS', '''
1095 #include <linux/magic.h>
1102 #include <linux/magic.h>
1096 #include <sys/vfs.h>
1103 #include <sys/vfs.h>
1097 int main() { struct statfs s; return sizeof(s.f_type); }
1104 int main() { struct statfs s; return sizeof(s.f_type); }
1098 '''),
1105 '''),
1099 ]:
1106 ]:
1100 if re.search(plat, sys.platform) and cancompile(new_compiler(), code):
1107 if re.search(plat, sys.platform) and cancompile(new_compiler(), code):
1101 osutil_cflags.append('-DHAVE_%s' % macro)
1108 osutil_cflags.append('-DHAVE_%s' % macro)
1102
1109
1103 if sys.platform == 'darwin':
1110 if sys.platform == 'darwin':
1104 osutil_ldflags += ['-framework', 'ApplicationServices']
1111 osutil_ldflags += ['-framework', 'ApplicationServices']
1105
1112
1106 xdiff_srcs = [
1113 xdiff_srcs = [
1107 'mercurial/thirdparty/xdiff/xdiffi.c',
1114 'mercurial/thirdparty/xdiff/xdiffi.c',
1108 'mercurial/thirdparty/xdiff/xprepare.c',
1115 'mercurial/thirdparty/xdiff/xprepare.c',
1109 'mercurial/thirdparty/xdiff/xutils.c',
1116 'mercurial/thirdparty/xdiff/xutils.c',
1110 ]
1117 ]
1111
1118
1112 xdiff_headers = [
1119 xdiff_headers = [
1113 'mercurial/thirdparty/xdiff/xdiff.h',
1120 'mercurial/thirdparty/xdiff/xdiff.h',
1114 'mercurial/thirdparty/xdiff/xdiffi.h',
1121 'mercurial/thirdparty/xdiff/xdiffi.h',
1115 'mercurial/thirdparty/xdiff/xinclude.h',
1122 'mercurial/thirdparty/xdiff/xinclude.h',
1116 'mercurial/thirdparty/xdiff/xmacros.h',
1123 'mercurial/thirdparty/xdiff/xmacros.h',
1117 'mercurial/thirdparty/xdiff/xprepare.h',
1124 'mercurial/thirdparty/xdiff/xprepare.h',
1118 'mercurial/thirdparty/xdiff/xtypes.h',
1125 'mercurial/thirdparty/xdiff/xtypes.h',
1119 'mercurial/thirdparty/xdiff/xutils.h',
1126 'mercurial/thirdparty/xdiff/xutils.h',
1120 ]
1127 ]
1121
1128
1122 class RustCompilationError(CCompilerError):
1129 class RustCompilationError(CCompilerError):
1123 """Exception class for Rust compilation errors."""
1130 """Exception class for Rust compilation errors."""
1124
1131
1125 class RustExtension(Extension):
1132 class RustExtension(Extension):
1126 """Base classes for concrete Rust Extension classes.
1133 """Base classes for concrete Rust Extension classes.
1127 """
1134 """
1128
1135
1129 rusttargetdir = os.path.join('rust', 'target', 'release')
1136 rusttargetdir = os.path.join('rust', 'target', 'release')
1130
1137
1131 def __init__(self, mpath, sources, rustlibname, subcrate,
1138 def __init__(self, mpath, sources, rustlibname, subcrate,
1132 py3_features=None, **kw):
1139 py3_features=None, **kw):
1133 Extension.__init__(self, mpath, sources, **kw)
1140 Extension.__init__(self, mpath, sources, **kw)
1134 if hgrustext is None:
1141 if hgrustext is None:
1135 return
1142 return
1136 srcdir = self.rustsrcdir = os.path.join('rust', subcrate)
1143 srcdir = self.rustsrcdir = os.path.join('rust', subcrate)
1137 self.py3_features = py3_features
1144 self.py3_features = py3_features
1138
1145
1139 # adding Rust source and control files to depends so that the extension
1146 # adding Rust source and control files to depends so that the extension
1140 # gets rebuilt if they've changed
1147 # gets rebuilt if they've changed
1141 self.depends.append(os.path.join(srcdir, 'Cargo.toml'))
1148 self.depends.append(os.path.join(srcdir, 'Cargo.toml'))
1142 cargo_lock = os.path.join(srcdir, 'Cargo.lock')
1149 cargo_lock = os.path.join(srcdir, 'Cargo.lock')
1143 if os.path.exists(cargo_lock):
1150 if os.path.exists(cargo_lock):
1144 self.depends.append(cargo_lock)
1151 self.depends.append(cargo_lock)
1145 for dirpath, subdir, fnames in os.walk(os.path.join(srcdir, 'src')):
1152 for dirpath, subdir, fnames in os.walk(os.path.join(srcdir, 'src')):
1146 self.depends.extend(os.path.join(dirpath, fname)
1153 self.depends.extend(os.path.join(dirpath, fname)
1147 for fname in fnames
1154 for fname in fnames
1148 if os.path.splitext(fname)[1] == '.rs')
1155 if os.path.splitext(fname)[1] == '.rs')
1149
1156
1150 def rustbuild(self):
1157 def rustbuild(self):
1151 if hgrustext is None:
1158 if hgrustext is None:
1152 return
1159 return
1153 env = os.environ.copy()
1160 env = os.environ.copy()
1154 if 'HGTEST_RESTOREENV' in env:
1161 if 'HGTEST_RESTOREENV' in env:
1155 # Mercurial tests change HOME to a temporary directory,
1162 # Mercurial tests change HOME to a temporary directory,
1156 # but, if installed with rustup, the Rust toolchain needs
1163 # but, if installed with rustup, the Rust toolchain needs
1157 # HOME to be correct (otherwise the 'no default toolchain'
1164 # HOME to be correct (otherwise the 'no default toolchain'
1158 # error message is issued and the build fails).
1165 # error message is issued and the build fails).
1159 # This happens currently with test-hghave.t, which does
1166 # This happens currently with test-hghave.t, which does
1160 # invoke this build.
1167 # invoke this build.
1161
1168
1162 # Unix only fix (os.path.expanduser not really reliable if
1169 # Unix only fix (os.path.expanduser not really reliable if
1163 # HOME is shadowed like this)
1170 # HOME is shadowed like this)
1164 import pwd
1171 import pwd
1165 env['HOME'] = pwd.getpwuid(os.getuid()).pw_dir
1172 env['HOME'] = pwd.getpwuid(os.getuid()).pw_dir
1166
1173
1167 cargocmd = ['cargo', 'build', '-vv', '--release']
1174 cargocmd = ['cargo', 'build', '-vv', '--release']
1168 if sys.version_info[0] == 3 and self.py3_features is not None:
1175 if sys.version_info[0] == 3 and self.py3_features is not None:
1169 cargocmd.extend(('--features', self.py3_features,
1176 cargocmd.extend(('--features', self.py3_features,
1170 '--no-default-features'))
1177 '--no-default-features'))
1171 try:
1178 try:
1172 subprocess.check_call(cargocmd, env=env, cwd=self.rustsrcdir)
1179 subprocess.check_call(cargocmd, env=env, cwd=self.rustsrcdir)
1173 except OSError as exc:
1180 except OSError as exc:
1174 if exc.errno == errno.ENOENT:
1181 if exc.errno == errno.ENOENT:
1175 raise RustCompilationError("Cargo not found")
1182 raise RustCompilationError("Cargo not found")
1176 elif exc.errno == errno.EACCES:
1183 elif exc.errno == errno.EACCES:
1177 raise RustCompilationError(
1184 raise RustCompilationError(
1178 "Cargo found, but permisssion to execute it is denied")
1185 "Cargo found, but permisssion to execute it is denied")
1179 else:
1186 else:
1180 raise
1187 raise
1181 except subprocess.CalledProcessError:
1188 except subprocess.CalledProcessError:
1182 raise RustCompilationError(
1189 raise RustCompilationError(
1183 "Cargo failed. Working directory: %r, "
1190 "Cargo failed. Working directory: %r, "
1184 "command: %r, environment: %r"
1191 "command: %r, environment: %r"
1185 % (self.rustsrcdir, cargocmd, env))
1192 % (self.rustsrcdir, cargocmd, env))
1186
1193
1187 class RustEnhancedExtension(RustExtension):
1194 class RustEnhancedExtension(RustExtension):
1188 """A C Extension, conditionally enhanced with Rust code.
1195 """A C Extension, conditionally enhanced with Rust code.
1189
1196
1190 If the HGRUSTEXT environment variable is set to something else
1197 If the HGRUSTEXT environment variable is set to something else
1191 than 'cpython', the Rust sources get compiled and linked within the
1198 than 'cpython', the Rust sources get compiled and linked within the
1192 C target shared library object.
1199 C target shared library object.
1193 """
1200 """
1194
1201
1195 def __init__(self, mpath, sources, rustlibname, subcrate, **kw):
1202 def __init__(self, mpath, sources, rustlibname, subcrate, **kw):
1196 RustExtension.__init__(self, mpath, sources, rustlibname, subcrate,
1203 RustExtension.__init__(self, mpath, sources, rustlibname, subcrate,
1197 **kw)
1204 **kw)
1198 if hgrustext != 'direct-ffi':
1205 if hgrustext != 'direct-ffi':
1199 return
1206 return
1200 self.extra_compile_args.append('-DWITH_RUST')
1207 self.extra_compile_args.append('-DWITH_RUST')
1201 self.libraries.append(rustlibname)
1208 self.libraries.append(rustlibname)
1202 self.library_dirs.append(self.rusttargetdir)
1209 self.library_dirs.append(self.rusttargetdir)
1203
1210
1204 class RustStandaloneExtension(RustExtension):
1211 class RustStandaloneExtension(RustExtension):
1205
1212
1206 def __init__(self, pydottedname, rustcrate, dylibname, **kw):
1213 def __init__(self, pydottedname, rustcrate, dylibname, **kw):
1207 RustExtension.__init__(self, pydottedname, [], dylibname, rustcrate,
1214 RustExtension.__init__(self, pydottedname, [], dylibname, rustcrate,
1208 **kw)
1215 **kw)
1209 self.dylibname = dylibname
1216 self.dylibname = dylibname
1210
1217
1211 def build(self, target_dir):
1218 def build(self, target_dir):
1212 self.rustbuild()
1219 self.rustbuild()
1213 target = [target_dir]
1220 target = [target_dir]
1214 target.extend(self.name.split('.'))
1221 target.extend(self.name.split('.'))
1215 ext = '.so' # TODO Unix only
1222 ext = '.so' # TODO Unix only
1216 target[-1] += ext
1223 target[-1] += ext
1217 shutil.copy2(os.path.join(self.rusttargetdir, self.dylibname + ext),
1224 shutil.copy2(os.path.join(self.rusttargetdir, self.dylibname + ext),
1218 os.path.join(*target))
1225 os.path.join(*target))
1219
1226
1220
1227
1221 extmodules = [
1228 extmodules = [
1222 Extension('mercurial.cext.base85', ['mercurial/cext/base85.c'],
1229 Extension('mercurial.cext.base85', ['mercurial/cext/base85.c'],
1223 include_dirs=common_include_dirs,
1230 include_dirs=common_include_dirs,
1224 depends=common_depends),
1231 depends=common_depends),
1225 Extension('mercurial.cext.bdiff', ['mercurial/bdiff.c',
1232 Extension('mercurial.cext.bdiff', ['mercurial/bdiff.c',
1226 'mercurial/cext/bdiff.c'] + xdiff_srcs,
1233 'mercurial/cext/bdiff.c'] + xdiff_srcs,
1227 include_dirs=common_include_dirs,
1234 include_dirs=common_include_dirs,
1228 depends=common_depends + ['mercurial/bdiff.h'] + xdiff_headers),
1235 depends=common_depends + ['mercurial/bdiff.h'] + xdiff_headers),
1229 Extension('mercurial.cext.mpatch', ['mercurial/mpatch.c',
1236 Extension('mercurial.cext.mpatch', ['mercurial/mpatch.c',
1230 'mercurial/cext/mpatch.c'],
1237 'mercurial/cext/mpatch.c'],
1231 include_dirs=common_include_dirs,
1238 include_dirs=common_include_dirs,
1232 depends=common_depends),
1239 depends=common_depends),
1233 RustEnhancedExtension(
1240 RustEnhancedExtension(
1234 'mercurial.cext.parsers', ['mercurial/cext/charencode.c',
1241 'mercurial.cext.parsers', ['mercurial/cext/charencode.c',
1235 'mercurial/cext/dirs.c',
1242 'mercurial/cext/dirs.c',
1236 'mercurial/cext/manifest.c',
1243 'mercurial/cext/manifest.c',
1237 'mercurial/cext/parsers.c',
1244 'mercurial/cext/parsers.c',
1238 'mercurial/cext/pathencode.c',
1245 'mercurial/cext/pathencode.c',
1239 'mercurial/cext/revlog.c'],
1246 'mercurial/cext/revlog.c'],
1240 'hgdirectffi',
1247 'hgdirectffi',
1241 'hg-direct-ffi',
1248 'hg-direct-ffi',
1242 include_dirs=common_include_dirs,
1249 include_dirs=common_include_dirs,
1243 depends=common_depends + ['mercurial/cext/charencode.h',
1250 depends=common_depends + ['mercurial/cext/charencode.h',
1244 'mercurial/cext/revlog.h',
1251 'mercurial/cext/revlog.h',
1245 'rust/hg-core/src/ancestors.rs',
1252 'rust/hg-core/src/ancestors.rs',
1246 'rust/hg-core/src/lib.rs']),
1253 'rust/hg-core/src/lib.rs']),
1247 Extension('mercurial.cext.osutil', ['mercurial/cext/osutil.c'],
1254 Extension('mercurial.cext.osutil', ['mercurial/cext/osutil.c'],
1248 include_dirs=common_include_dirs,
1255 include_dirs=common_include_dirs,
1249 extra_compile_args=osutil_cflags,
1256 extra_compile_args=osutil_cflags,
1250 extra_link_args=osutil_ldflags,
1257 extra_link_args=osutil_ldflags,
1251 depends=common_depends),
1258 depends=common_depends),
1252 Extension(
1259 Extension(
1253 'mercurial.thirdparty.zope.interface._zope_interface_coptimizations', [
1260 'mercurial.thirdparty.zope.interface._zope_interface_coptimizations', [
1254 'mercurial/thirdparty/zope/interface/_zope_interface_coptimizations.c',
1261 'mercurial/thirdparty/zope/interface/_zope_interface_coptimizations.c',
1255 ]),
1262 ]),
1256 Extension('hgext.fsmonitor.pywatchman.bser',
1263 Extension('hgext.fsmonitor.pywatchman.bser',
1257 ['hgext/fsmonitor/pywatchman/bser.c']),
1264 ['hgext/fsmonitor/pywatchman/bser.c']),
1258 ]
1265 ]
1259
1266
1260 if hgrustext == 'cpython':
1267 if hgrustext == 'cpython':
1261 extmodules.append(
1268 extmodules.append(
1262 RustStandaloneExtension('mercurial.rustext', 'hg-cpython', 'librusthg',
1269 RustStandaloneExtension('mercurial.rustext', 'hg-cpython', 'librusthg',
1263 py3_features='python3')
1270 py3_features='python3')
1264 )
1271 )
1265
1272
1266
1273
1267 sys.path.insert(0, 'contrib/python-zstandard')
1274 sys.path.insert(0, 'contrib/python-zstandard')
1268 import setup_zstd
1275 import setup_zstd
1269 extmodules.append(setup_zstd.get_c_extension(
1276 extmodules.append(setup_zstd.get_c_extension(
1270 name='mercurial.zstd',
1277 name='mercurial.zstd',
1271 root=os.path.abspath(os.path.dirname(__file__))))
1278 root=os.path.abspath(os.path.dirname(__file__))))
1272
1279
1273 try:
1280 try:
1274 from distutils import cygwinccompiler
1281 from distutils import cygwinccompiler
1275
1282
1276 # the -mno-cygwin option has been deprecated for years
1283 # the -mno-cygwin option has been deprecated for years
1277 mingw32compilerclass = cygwinccompiler.Mingw32CCompiler
1284 mingw32compilerclass = cygwinccompiler.Mingw32CCompiler
1278
1285
1279 class HackedMingw32CCompiler(cygwinccompiler.Mingw32CCompiler):
1286 class HackedMingw32CCompiler(cygwinccompiler.Mingw32CCompiler):
1280 def __init__(self, *args, **kwargs):
1287 def __init__(self, *args, **kwargs):
1281 mingw32compilerclass.__init__(self, *args, **kwargs)
1288 mingw32compilerclass.__init__(self, *args, **kwargs)
1282 for i in 'compiler compiler_so linker_exe linker_so'.split():
1289 for i in 'compiler compiler_so linker_exe linker_so'.split():
1283 try:
1290 try:
1284 getattr(self, i).remove('-mno-cygwin')
1291 getattr(self, i).remove('-mno-cygwin')
1285 except ValueError:
1292 except ValueError:
1286 pass
1293 pass
1287
1294
1288 cygwinccompiler.Mingw32CCompiler = HackedMingw32CCompiler
1295 cygwinccompiler.Mingw32CCompiler = HackedMingw32CCompiler
1289 except ImportError:
1296 except ImportError:
1290 # the cygwinccompiler package is not available on some Python
1297 # the cygwinccompiler package is not available on some Python
1291 # distributions like the ones from the optware project for Synology
1298 # distributions like the ones from the optware project for Synology
1292 # DiskStation boxes
1299 # DiskStation boxes
1293 class HackedMingw32CCompiler(object):
1300 class HackedMingw32CCompiler(object):
1294 pass
1301 pass
1295
1302
1296 if os.name == 'nt':
1303 if os.name == 'nt':
1297 # Allow compiler/linker flags to be added to Visual Studio builds. Passing
1304 # Allow compiler/linker flags to be added to Visual Studio builds. Passing
1298 # extra_link_args to distutils.extensions.Extension() doesn't have any
1305 # extra_link_args to distutils.extensions.Extension() doesn't have any
1299 # effect.
1306 # effect.
1300 from distutils import msvccompiler
1307 from distutils import msvccompiler
1301
1308
1302 msvccompilerclass = msvccompiler.MSVCCompiler
1309 msvccompilerclass = msvccompiler.MSVCCompiler
1303
1310
1304 class HackedMSVCCompiler(msvccompiler.MSVCCompiler):
1311 class HackedMSVCCompiler(msvccompiler.MSVCCompiler):
1305 def initialize(self):
1312 def initialize(self):
1306 msvccompilerclass.initialize(self)
1313 msvccompilerclass.initialize(self)
1307 # "warning LNK4197: export 'func' specified multiple times"
1314 # "warning LNK4197: export 'func' specified multiple times"
1308 self.ldflags_shared.append('/ignore:4197')
1315 self.ldflags_shared.append('/ignore:4197')
1309 self.ldflags_shared_debug.append('/ignore:4197')
1316 self.ldflags_shared_debug.append('/ignore:4197')
1310
1317
1311 msvccompiler.MSVCCompiler = HackedMSVCCompiler
1318 msvccompiler.MSVCCompiler = HackedMSVCCompiler
1312
1319
1313 packagedata = {'mercurial': ['locale/*/LC_MESSAGES/hg.mo',
1320 packagedata = {'mercurial': ['locale/*/LC_MESSAGES/hg.mo',
1314 'help/*.txt',
1321 'help/*.txt',
1315 'help/internals/*.txt',
1322 'help/internals/*.txt',
1316 'default.d/*.rc',
1323 'default.d/*.rc',
1317 'dummycert.pem']}
1324 'dummycert.pem']}
1318
1325
1319 def ordinarypath(p):
1326 def ordinarypath(p):
1320 return p and p[0] != '.' and p[-1] != '~'
1327 return p and p[0] != '.' and p[-1] != '~'
1321
1328
1322 for root in ('templates',):
1329 for root in ('templates',):
1323 for curdir, dirs, files in os.walk(os.path.join('mercurial', root)):
1330 for curdir, dirs, files in os.walk(os.path.join('mercurial', root)):
1324 curdir = curdir.split(os.sep, 1)[1]
1331 curdir = curdir.split(os.sep, 1)[1]
1325 dirs[:] = filter(ordinarypath, dirs)
1332 dirs[:] = filter(ordinarypath, dirs)
1326 for f in filter(ordinarypath, files):
1333 for f in filter(ordinarypath, files):
1327 f = os.path.join(curdir, f)
1334 f = os.path.join(curdir, f)
1328 packagedata['mercurial'].append(f)
1335 packagedata['mercurial'].append(f)
1329
1336
1330 datafiles = []
1337 datafiles = []
1331
1338
1332 # distutils expects version to be str/unicode. Converting it to
1339 # distutils expects version to be str/unicode. Converting it to
1333 # unicode on Python 2 still works because it won't contain any
1340 # unicode on Python 2 still works because it won't contain any
1334 # non-ascii bytes and will be implicitly converted back to bytes
1341 # non-ascii bytes and will be implicitly converted back to bytes
1335 # when operated on.
1342 # when operated on.
1336 assert isinstance(version, bytes)
1343 assert isinstance(version, bytes)
1337 setupversion = version.decode('ascii')
1344 setupversion = version.decode('ascii')
1338
1345
1339 extra = {}
1346 extra = {}
1340
1347
1341 py2exepackages = [
1348 py2exepackages = [
1342 'hgdemandimport',
1349 'hgdemandimport',
1343 'hgext3rd',
1350 'hgext3rd',
1344 'hgext',
1351 'hgext',
1345 'email',
1352 'email',
1346 # implicitly imported per module policy
1353 # implicitly imported per module policy
1347 # (cffi wouldn't be used as a frozen exe)
1354 # (cffi wouldn't be used as a frozen exe)
1348 'mercurial.cext',
1355 'mercurial.cext',
1349 #'mercurial.cffi',
1356 #'mercurial.cffi',
1350 'mercurial.pure',
1357 'mercurial.pure',
1351 ]
1358 ]
1352
1359
1353 py2exeexcludes = []
1360 py2exeexcludes = []
1354 py2exedllexcludes = ['crypt32.dll']
1361 py2exedllexcludes = ['crypt32.dll']
1355
1362
1356 if issetuptools:
1363 if issetuptools:
1357 extra['python_requires'] = supportedpy
1364 extra['python_requires'] = supportedpy
1358
1365
1359 if py2exeloaded:
1366 if py2exeloaded:
1360 extra['console'] = [
1367 extra['console'] = [
1361 {'script':'hg',
1368 {'script':'hg',
1362 'copyright':'Copyright (C) 2005-2019 Matt Mackall and others',
1369 'copyright':'Copyright (C) 2005-2019 Matt Mackall and others',
1363 'product_version':version}]
1370 'product_version':version}]
1364 # Sub command of 'build' because 'py2exe' does not handle sub_commands.
1371 # Sub command of 'build' because 'py2exe' does not handle sub_commands.
1365 # Need to override hgbuild because it has a private copy of
1372 # Need to override hgbuild because it has a private copy of
1366 # build.sub_commands.
1373 # build.sub_commands.
1367 hgbuild.sub_commands.insert(0, ('build_hgextindex', None))
1374 hgbuild.sub_commands.insert(0, ('build_hgextindex', None))
1368 # put dlls in sub directory so that they won't pollute PATH
1375 # put dlls in sub directory so that they won't pollute PATH
1369 extra['zipfile'] = 'lib/library.zip'
1376 extra['zipfile'] = 'lib/library.zip'
1370
1377
1371 # We allow some configuration to be supplemented via environment
1378 # We allow some configuration to be supplemented via environment
1372 # variables. This is better than setup.cfg files because it allows
1379 # variables. This is better than setup.cfg files because it allows
1373 # supplementing configs instead of replacing them.
1380 # supplementing configs instead of replacing them.
1374 extrapackages = os.environ.get('HG_PY2EXE_EXTRA_PACKAGES')
1381 extrapackages = os.environ.get('HG_PY2EXE_EXTRA_PACKAGES')
1375 if extrapackages:
1382 if extrapackages:
1376 py2exepackages.extend(extrapackages.split(' '))
1383 py2exepackages.extend(extrapackages.split(' '))
1377
1384
1378 excludes = os.environ.get('HG_PY2EXE_EXTRA_EXCLUDES')
1385 excludes = os.environ.get('HG_PY2EXE_EXTRA_EXCLUDES')
1379 if excludes:
1386 if excludes:
1380 py2exeexcludes.extend(excludes.split(' '))
1387 py2exeexcludes.extend(excludes.split(' '))
1381
1388
1382 dllexcludes = os.environ.get('HG_PY2EXE_EXTRA_DLL_EXCLUDES')
1389 dllexcludes = os.environ.get('HG_PY2EXE_EXTRA_DLL_EXCLUDES')
1383 if dllexcludes:
1390 if dllexcludes:
1384 py2exedllexcludes.extend(dllexcludes.split(' '))
1391 py2exedllexcludes.extend(dllexcludes.split(' '))
1385
1392
1386 if os.name == 'nt':
1393 if os.name == 'nt':
1387 # Windows binary file versions for exe/dll files must have the
1394 # Windows binary file versions for exe/dll files must have the
1388 # form W.X.Y.Z, where W,X,Y,Z are numbers in the range 0..65535
1395 # form W.X.Y.Z, where W,X,Y,Z are numbers in the range 0..65535
1389 setupversion = setupversion.split(r'+', 1)[0]
1396 setupversion = setupversion.split(r'+', 1)[0]
1390
1397
1391 if sys.platform == 'darwin' and os.path.exists('/usr/bin/xcodebuild'):
1398 if sys.platform == 'darwin' and os.path.exists('/usr/bin/xcodebuild'):
1392 version = runcmd(['/usr/bin/xcodebuild', '-version'], {})[1].splitlines()
1399 version = runcmd(['/usr/bin/xcodebuild', '-version'], {})[1].splitlines()
1393 if version:
1400 if version:
1394 version = version[0]
1401 version = version[0]
1395 if sys.version_info[0] == 3:
1402 if sys.version_info[0] == 3:
1396 version = version.decode('utf-8')
1403 version = version.decode('utf-8')
1397 xcode4 = (version.startswith('Xcode') and
1404 xcode4 = (version.startswith('Xcode') and
1398 StrictVersion(version.split()[1]) >= StrictVersion('4.0'))
1405 StrictVersion(version.split()[1]) >= StrictVersion('4.0'))
1399 xcode51 = re.match(r'^Xcode\s+5\.1', version) is not None
1406 xcode51 = re.match(r'^Xcode\s+5\.1', version) is not None
1400 else:
1407 else:
1401 # xcodebuild returns empty on OS X Lion with XCode 4.3 not
1408 # xcodebuild returns empty on OS X Lion with XCode 4.3 not
1402 # installed, but instead with only command-line tools. Assume
1409 # installed, but instead with only command-line tools. Assume
1403 # that only happens on >= Lion, thus no PPC support.
1410 # that only happens on >= Lion, thus no PPC support.
1404 xcode4 = True
1411 xcode4 = True
1405 xcode51 = False
1412 xcode51 = False
1406
1413
1407 # XCode 4.0 dropped support for ppc architecture, which is hardcoded in
1414 # XCode 4.0 dropped support for ppc architecture, which is hardcoded in
1408 # distutils.sysconfig
1415 # distutils.sysconfig
1409 if xcode4:
1416 if xcode4:
1410 os.environ['ARCHFLAGS'] = ''
1417 os.environ['ARCHFLAGS'] = ''
1411
1418
1412 # XCode 5.1 changes clang such that it now fails to compile if the
1419 # XCode 5.1 changes clang such that it now fails to compile if the
1413 # -mno-fused-madd flag is passed, but the version of Python shipped with
1420 # -mno-fused-madd flag is passed, but the version of Python shipped with
1414 # OS X 10.9 Mavericks includes this flag. This causes problems in all
1421 # OS X 10.9 Mavericks includes this flag. This causes problems in all
1415 # C extension modules, and a bug has been filed upstream at
1422 # C extension modules, and a bug has been filed upstream at
1416 # http://bugs.python.org/issue21244. We also need to patch this here
1423 # http://bugs.python.org/issue21244. We also need to patch this here
1417 # so Mercurial can continue to compile in the meantime.
1424 # so Mercurial can continue to compile in the meantime.
1418 if xcode51:
1425 if xcode51:
1419 cflags = get_config_var('CFLAGS')
1426 cflags = get_config_var('CFLAGS')
1420 if cflags and re.search(r'-mno-fused-madd\b', cflags) is not None:
1427 if cflags and re.search(r'-mno-fused-madd\b', cflags) is not None:
1421 os.environ['CFLAGS'] = (
1428 os.environ['CFLAGS'] = (
1422 os.environ.get('CFLAGS', '') + ' -Qunused-arguments')
1429 os.environ.get('CFLAGS', '') + ' -Qunused-arguments')
1423
1430
1424 setup(name='mercurial',
1431 setup(name='mercurial',
1425 version=setupversion,
1432 version=setupversion,
1426 author='Matt Mackall and many others',
1433 author='Matt Mackall and many others',
1427 author_email='mercurial@mercurial-scm.org',
1434 author_email='mercurial@mercurial-scm.org',
1428 url='https://mercurial-scm.org/',
1435 url='https://mercurial-scm.org/',
1429 download_url='https://mercurial-scm.org/release/',
1436 download_url='https://mercurial-scm.org/release/',
1430 description=('Fast scalable distributed SCM (revision control, version '
1437 description=('Fast scalable distributed SCM (revision control, version '
1431 'control) system'),
1438 'control) system'),
1432 long_description=('Mercurial is a distributed SCM tool written in Python.'
1439 long_description=('Mercurial is a distributed SCM tool written in Python.'
1433 ' It is used by a number of large projects that require'
1440 ' It is used by a number of large projects that require'
1434 ' fast, reliable distributed revision control, such as '
1441 ' fast, reliable distributed revision control, such as '
1435 'Mozilla.'),
1442 'Mozilla.'),
1436 license='GNU GPLv2 or any later version',
1443 license='GNU GPLv2 or any later version',
1437 classifiers=[
1444 classifiers=[
1438 'Development Status :: 6 - Mature',
1445 'Development Status :: 6 - Mature',
1439 'Environment :: Console',
1446 'Environment :: Console',
1440 'Intended Audience :: Developers',
1447 'Intended Audience :: Developers',
1441 'Intended Audience :: System Administrators',
1448 'Intended Audience :: System Administrators',
1442 'License :: OSI Approved :: GNU General Public License (GPL)',
1449 'License :: OSI Approved :: GNU General Public License (GPL)',
1443 'Natural Language :: Danish',
1450 'Natural Language :: Danish',
1444 'Natural Language :: English',
1451 'Natural Language :: English',
1445 'Natural Language :: German',
1452 'Natural Language :: German',
1446 'Natural Language :: Italian',
1453 'Natural Language :: Italian',
1447 'Natural Language :: Japanese',
1454 'Natural Language :: Japanese',
1448 'Natural Language :: Portuguese (Brazilian)',
1455 'Natural Language :: Portuguese (Brazilian)',
1449 'Operating System :: Microsoft :: Windows',
1456 'Operating System :: Microsoft :: Windows',
1450 'Operating System :: OS Independent',
1457 'Operating System :: OS Independent',
1451 'Operating System :: POSIX',
1458 'Operating System :: POSIX',
1452 'Programming Language :: C',
1459 'Programming Language :: C',
1453 'Programming Language :: Python',
1460 'Programming Language :: Python',
1454 'Topic :: Software Development :: Version Control',
1461 'Topic :: Software Development :: Version Control',
1455 ],
1462 ],
1456 scripts=scripts,
1463 scripts=scripts,
1457 packages=packages,
1464 packages=packages,
1458 ext_modules=extmodules,
1465 ext_modules=extmodules,
1459 data_files=datafiles,
1466 data_files=datafiles,
1460 package_data=packagedata,
1467 package_data=packagedata,
1461 cmdclass=cmdclass,
1468 cmdclass=cmdclass,
1462 distclass=hgdist,
1469 distclass=hgdist,
1463 options={
1470 options={
1464 'py2exe': {
1471 'py2exe': {
1465 'bundle_files': 3,
1472 'bundle_files': 3,
1466 'dll_excludes': py2exedllexcludes,
1473 'dll_excludes': py2exedllexcludes,
1467 'excludes': py2exeexcludes,
1474 'excludes': py2exeexcludes,
1468 'packages': py2exepackages,
1475 'packages': py2exepackages,
1469 },
1476 },
1470 'bdist_mpkg': {
1477 'bdist_mpkg': {
1471 'zipdist': False,
1478 'zipdist': False,
1472 'license': 'COPYING',
1479 'license': 'COPYING',
1473 'readme': 'contrib/packaging/macosx/Readme.html',
1480 'readme': 'contrib/packaging/macosx/Readme.html',
1474 'welcome': 'contrib/packaging/macosx/Welcome.html',
1481 'welcome': 'contrib/packaging/macosx/Welcome.html',
1475 },
1482 },
1476 },
1483 },
1477 **extra)
1484 **extra)
General Comments 0
You need to be logged in to leave comments. Login now