##// END OF EJS Templates
url: support suppressing Accept header...
Gregory Szorc -
r37063:a708e1e4 default
parent child Browse files
Show More
@@ -1,3010 +1,3016 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import tempfile
24 import tempfile
25 import time
25 import time
26
26
27 from .i18n import _
27 from .i18n import _
28 from .node import (
28 from .node import (
29 bin,
29 bin,
30 hex,
30 hex,
31 nullhex,
31 nullhex,
32 nullid,
32 nullid,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from . import (
36 from . import (
37 bundle2,
37 bundle2,
38 changegroup,
38 changegroup,
39 cmdutil,
39 cmdutil,
40 color,
40 color,
41 context,
41 context,
42 dagparser,
42 dagparser,
43 dagutil,
43 dagutil,
44 encoding,
44 encoding,
45 error,
45 error,
46 exchange,
46 exchange,
47 extensions,
47 extensions,
48 filemerge,
48 filemerge,
49 fileset,
49 fileset,
50 formatter,
50 formatter,
51 hg,
51 hg,
52 httppeer,
52 httppeer,
53 localrepo,
53 localrepo,
54 lock as lockmod,
54 lock as lockmod,
55 logcmdutil,
55 logcmdutil,
56 merge as mergemod,
56 merge as mergemod,
57 obsolete,
57 obsolete,
58 obsutil,
58 obsutil,
59 phases,
59 phases,
60 policy,
60 policy,
61 pvec,
61 pvec,
62 pycompat,
62 pycompat,
63 registrar,
63 registrar,
64 repair,
64 repair,
65 revlog,
65 revlog,
66 revset,
66 revset,
67 revsetlang,
67 revsetlang,
68 scmutil,
68 scmutil,
69 setdiscovery,
69 setdiscovery,
70 simplemerge,
70 simplemerge,
71 smartset,
71 smartset,
72 sshpeer,
72 sshpeer,
73 sslutil,
73 sslutil,
74 streamclone,
74 streamclone,
75 templater,
75 templater,
76 treediscovery,
76 treediscovery,
77 upgrade,
77 upgrade,
78 url as urlmod,
78 url as urlmod,
79 util,
79 util,
80 vfs as vfsmod,
80 vfs as vfsmod,
81 wireprotoserver,
81 wireprotoserver,
82 )
82 )
83 from .utils import dateutil
83 from .utils import dateutil
84
84
85 release = lockmod.release
85 release = lockmod.release
86
86
87 command = registrar.command()
87 command = registrar.command()
88
88
89 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
89 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
90 def debugancestor(ui, repo, *args):
90 def debugancestor(ui, repo, *args):
91 """find the ancestor revision of two revisions in a given index"""
91 """find the ancestor revision of two revisions in a given index"""
92 if len(args) == 3:
92 if len(args) == 3:
93 index, rev1, rev2 = args
93 index, rev1, rev2 = args
94 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
94 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
95 lookup = r.lookup
95 lookup = r.lookup
96 elif len(args) == 2:
96 elif len(args) == 2:
97 if not repo:
97 if not repo:
98 raise error.Abort(_('there is no Mercurial repository here '
98 raise error.Abort(_('there is no Mercurial repository here '
99 '(.hg not found)'))
99 '(.hg not found)'))
100 rev1, rev2 = args
100 rev1, rev2 = args
101 r = repo.changelog
101 r = repo.changelog
102 lookup = repo.lookup
102 lookup = repo.lookup
103 else:
103 else:
104 raise error.Abort(_('either two or three arguments required'))
104 raise error.Abort(_('either two or three arguments required'))
105 a = r.ancestor(lookup(rev1), lookup(rev2))
105 a = r.ancestor(lookup(rev1), lookup(rev2))
106 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
106 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
107
107
108 @command('debugapplystreamclonebundle', [], 'FILE')
108 @command('debugapplystreamclonebundle', [], 'FILE')
109 def debugapplystreamclonebundle(ui, repo, fname):
109 def debugapplystreamclonebundle(ui, repo, fname):
110 """apply a stream clone bundle file"""
110 """apply a stream clone bundle file"""
111 f = hg.openpath(ui, fname)
111 f = hg.openpath(ui, fname)
112 gen = exchange.readbundle(ui, f, fname)
112 gen = exchange.readbundle(ui, f, fname)
113 gen.apply(repo)
113 gen.apply(repo)
114
114
115 @command('debugbuilddag',
115 @command('debugbuilddag',
116 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
116 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
117 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
117 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
118 ('n', 'new-file', None, _('add new file at each rev'))],
118 ('n', 'new-file', None, _('add new file at each rev'))],
119 _('[OPTION]... [TEXT]'))
119 _('[OPTION]... [TEXT]'))
120 def debugbuilddag(ui, repo, text=None,
120 def debugbuilddag(ui, repo, text=None,
121 mergeable_file=False,
121 mergeable_file=False,
122 overwritten_file=False,
122 overwritten_file=False,
123 new_file=False):
123 new_file=False):
124 """builds a repo with a given DAG from scratch in the current empty repo
124 """builds a repo with a given DAG from scratch in the current empty repo
125
125
126 The description of the DAG is read from stdin if not given on the
126 The description of the DAG is read from stdin if not given on the
127 command line.
127 command line.
128
128
129 Elements:
129 Elements:
130
130
131 - "+n" is a linear run of n nodes based on the current default parent
131 - "+n" is a linear run of n nodes based on the current default parent
132 - "." is a single node based on the current default parent
132 - "." is a single node based on the current default parent
133 - "$" resets the default parent to null (implied at the start);
133 - "$" resets the default parent to null (implied at the start);
134 otherwise the default parent is always the last node created
134 otherwise the default parent is always the last node created
135 - "<p" sets the default parent to the backref p
135 - "<p" sets the default parent to the backref p
136 - "*p" is a fork at parent p, which is a backref
136 - "*p" is a fork at parent p, which is a backref
137 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
137 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
138 - "/p2" is a merge of the preceding node and p2
138 - "/p2" is a merge of the preceding node and p2
139 - ":tag" defines a local tag for the preceding node
139 - ":tag" defines a local tag for the preceding node
140 - "@branch" sets the named branch for subsequent nodes
140 - "@branch" sets the named branch for subsequent nodes
141 - "#...\\n" is a comment up to the end of the line
141 - "#...\\n" is a comment up to the end of the line
142
142
143 Whitespace between the above elements is ignored.
143 Whitespace between the above elements is ignored.
144
144
145 A backref is either
145 A backref is either
146
146
147 - a number n, which references the node curr-n, where curr is the current
147 - a number n, which references the node curr-n, where curr is the current
148 node, or
148 node, or
149 - the name of a local tag you placed earlier using ":tag", or
149 - the name of a local tag you placed earlier using ":tag", or
150 - empty to denote the default parent.
150 - empty to denote the default parent.
151
151
152 All string valued-elements are either strictly alphanumeric, or must
152 All string valued-elements are either strictly alphanumeric, or must
153 be enclosed in double quotes ("..."), with "\\" as escape character.
153 be enclosed in double quotes ("..."), with "\\" as escape character.
154 """
154 """
155
155
156 if text is None:
156 if text is None:
157 ui.status(_("reading DAG from stdin\n"))
157 ui.status(_("reading DAG from stdin\n"))
158 text = ui.fin.read()
158 text = ui.fin.read()
159
159
160 cl = repo.changelog
160 cl = repo.changelog
161 if len(cl) > 0:
161 if len(cl) > 0:
162 raise error.Abort(_('repository is not empty'))
162 raise error.Abort(_('repository is not empty'))
163
163
164 # determine number of revs in DAG
164 # determine number of revs in DAG
165 total = 0
165 total = 0
166 for type, data in dagparser.parsedag(text):
166 for type, data in dagparser.parsedag(text):
167 if type == 'n':
167 if type == 'n':
168 total += 1
168 total += 1
169
169
170 if mergeable_file:
170 if mergeable_file:
171 linesperrev = 2
171 linesperrev = 2
172 # make a file with k lines per rev
172 # make a file with k lines per rev
173 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
173 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
174 initialmergedlines.append("")
174 initialmergedlines.append("")
175
175
176 tags = []
176 tags = []
177
177
178 wlock = lock = tr = None
178 wlock = lock = tr = None
179 try:
179 try:
180 wlock = repo.wlock()
180 wlock = repo.wlock()
181 lock = repo.lock()
181 lock = repo.lock()
182 tr = repo.transaction("builddag")
182 tr = repo.transaction("builddag")
183
183
184 at = -1
184 at = -1
185 atbranch = 'default'
185 atbranch = 'default'
186 nodeids = []
186 nodeids = []
187 id = 0
187 id = 0
188 ui.progress(_('building'), id, unit=_('revisions'), total=total)
188 ui.progress(_('building'), id, unit=_('revisions'), total=total)
189 for type, data in dagparser.parsedag(text):
189 for type, data in dagparser.parsedag(text):
190 if type == 'n':
190 if type == 'n':
191 ui.note(('node %s\n' % pycompat.bytestr(data)))
191 ui.note(('node %s\n' % pycompat.bytestr(data)))
192 id, ps = data
192 id, ps = data
193
193
194 files = []
194 files = []
195 filecontent = {}
195 filecontent = {}
196
196
197 p2 = None
197 p2 = None
198 if mergeable_file:
198 if mergeable_file:
199 fn = "mf"
199 fn = "mf"
200 p1 = repo[ps[0]]
200 p1 = repo[ps[0]]
201 if len(ps) > 1:
201 if len(ps) > 1:
202 p2 = repo[ps[1]]
202 p2 = repo[ps[1]]
203 pa = p1.ancestor(p2)
203 pa = p1.ancestor(p2)
204 base, local, other = [x[fn].data() for x in (pa, p1,
204 base, local, other = [x[fn].data() for x in (pa, p1,
205 p2)]
205 p2)]
206 m3 = simplemerge.Merge3Text(base, local, other)
206 m3 = simplemerge.Merge3Text(base, local, other)
207 ml = [l.strip() for l in m3.merge_lines()]
207 ml = [l.strip() for l in m3.merge_lines()]
208 ml.append("")
208 ml.append("")
209 elif at > 0:
209 elif at > 0:
210 ml = p1[fn].data().split("\n")
210 ml = p1[fn].data().split("\n")
211 else:
211 else:
212 ml = initialmergedlines
212 ml = initialmergedlines
213 ml[id * linesperrev] += " r%i" % id
213 ml[id * linesperrev] += " r%i" % id
214 mergedtext = "\n".join(ml)
214 mergedtext = "\n".join(ml)
215 files.append(fn)
215 files.append(fn)
216 filecontent[fn] = mergedtext
216 filecontent[fn] = mergedtext
217
217
218 if overwritten_file:
218 if overwritten_file:
219 fn = "of"
219 fn = "of"
220 files.append(fn)
220 files.append(fn)
221 filecontent[fn] = "r%i\n" % id
221 filecontent[fn] = "r%i\n" % id
222
222
223 if new_file:
223 if new_file:
224 fn = "nf%i" % id
224 fn = "nf%i" % id
225 files.append(fn)
225 files.append(fn)
226 filecontent[fn] = "r%i\n" % id
226 filecontent[fn] = "r%i\n" % id
227 if len(ps) > 1:
227 if len(ps) > 1:
228 if not p2:
228 if not p2:
229 p2 = repo[ps[1]]
229 p2 = repo[ps[1]]
230 for fn in p2:
230 for fn in p2:
231 if fn.startswith("nf"):
231 if fn.startswith("nf"):
232 files.append(fn)
232 files.append(fn)
233 filecontent[fn] = p2[fn].data()
233 filecontent[fn] = p2[fn].data()
234
234
235 def fctxfn(repo, cx, path):
235 def fctxfn(repo, cx, path):
236 if path in filecontent:
236 if path in filecontent:
237 return context.memfilectx(repo, cx, path,
237 return context.memfilectx(repo, cx, path,
238 filecontent[path])
238 filecontent[path])
239 return None
239 return None
240
240
241 if len(ps) == 0 or ps[0] < 0:
241 if len(ps) == 0 or ps[0] < 0:
242 pars = [None, None]
242 pars = [None, None]
243 elif len(ps) == 1:
243 elif len(ps) == 1:
244 pars = [nodeids[ps[0]], None]
244 pars = [nodeids[ps[0]], None]
245 else:
245 else:
246 pars = [nodeids[p] for p in ps]
246 pars = [nodeids[p] for p in ps]
247 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
247 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
248 date=(id, 0),
248 date=(id, 0),
249 user="debugbuilddag",
249 user="debugbuilddag",
250 extra={'branch': atbranch})
250 extra={'branch': atbranch})
251 nodeid = repo.commitctx(cx)
251 nodeid = repo.commitctx(cx)
252 nodeids.append(nodeid)
252 nodeids.append(nodeid)
253 at = id
253 at = id
254 elif type == 'l':
254 elif type == 'l':
255 id, name = data
255 id, name = data
256 ui.note(('tag %s\n' % name))
256 ui.note(('tag %s\n' % name))
257 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
257 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
258 elif type == 'a':
258 elif type == 'a':
259 ui.note(('branch %s\n' % data))
259 ui.note(('branch %s\n' % data))
260 atbranch = data
260 atbranch = data
261 ui.progress(_('building'), id, unit=_('revisions'), total=total)
261 ui.progress(_('building'), id, unit=_('revisions'), total=total)
262 tr.close()
262 tr.close()
263
263
264 if tags:
264 if tags:
265 repo.vfs.write("localtags", "".join(tags))
265 repo.vfs.write("localtags", "".join(tags))
266 finally:
266 finally:
267 ui.progress(_('building'), None)
267 ui.progress(_('building'), None)
268 release(tr, lock, wlock)
268 release(tr, lock, wlock)
269
269
270 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
270 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
271 indent_string = ' ' * indent
271 indent_string = ' ' * indent
272 if all:
272 if all:
273 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
273 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
274 % indent_string)
274 % indent_string)
275
275
276 def showchunks(named):
276 def showchunks(named):
277 ui.write("\n%s%s\n" % (indent_string, named))
277 ui.write("\n%s%s\n" % (indent_string, named))
278 for deltadata in gen.deltaiter():
278 for deltadata in gen.deltaiter():
279 node, p1, p2, cs, deltabase, delta, flags = deltadata
279 node, p1, p2, cs, deltabase, delta, flags = deltadata
280 ui.write("%s%s %s %s %s %s %d\n" %
280 ui.write("%s%s %s %s %s %s %d\n" %
281 (indent_string, hex(node), hex(p1), hex(p2),
281 (indent_string, hex(node), hex(p1), hex(p2),
282 hex(cs), hex(deltabase), len(delta)))
282 hex(cs), hex(deltabase), len(delta)))
283
283
284 chunkdata = gen.changelogheader()
284 chunkdata = gen.changelogheader()
285 showchunks("changelog")
285 showchunks("changelog")
286 chunkdata = gen.manifestheader()
286 chunkdata = gen.manifestheader()
287 showchunks("manifest")
287 showchunks("manifest")
288 for chunkdata in iter(gen.filelogheader, {}):
288 for chunkdata in iter(gen.filelogheader, {}):
289 fname = chunkdata['filename']
289 fname = chunkdata['filename']
290 showchunks(fname)
290 showchunks(fname)
291 else:
291 else:
292 if isinstance(gen, bundle2.unbundle20):
292 if isinstance(gen, bundle2.unbundle20):
293 raise error.Abort(_('use debugbundle2 for this file'))
293 raise error.Abort(_('use debugbundle2 for this file'))
294 chunkdata = gen.changelogheader()
294 chunkdata = gen.changelogheader()
295 for deltadata in gen.deltaiter():
295 for deltadata in gen.deltaiter():
296 node, p1, p2, cs, deltabase, delta, flags = deltadata
296 node, p1, p2, cs, deltabase, delta, flags = deltadata
297 ui.write("%s%s\n" % (indent_string, hex(node)))
297 ui.write("%s%s\n" % (indent_string, hex(node)))
298
298
299 def _debugobsmarkers(ui, part, indent=0, **opts):
299 def _debugobsmarkers(ui, part, indent=0, **opts):
300 """display version and markers contained in 'data'"""
300 """display version and markers contained in 'data'"""
301 opts = pycompat.byteskwargs(opts)
301 opts = pycompat.byteskwargs(opts)
302 data = part.read()
302 data = part.read()
303 indent_string = ' ' * indent
303 indent_string = ' ' * indent
304 try:
304 try:
305 version, markers = obsolete._readmarkers(data)
305 version, markers = obsolete._readmarkers(data)
306 except error.UnknownVersion as exc:
306 except error.UnknownVersion as exc:
307 msg = "%sunsupported version: %s (%d bytes)\n"
307 msg = "%sunsupported version: %s (%d bytes)\n"
308 msg %= indent_string, exc.version, len(data)
308 msg %= indent_string, exc.version, len(data)
309 ui.write(msg)
309 ui.write(msg)
310 else:
310 else:
311 msg = "%sversion: %d (%d bytes)\n"
311 msg = "%sversion: %d (%d bytes)\n"
312 msg %= indent_string, version, len(data)
312 msg %= indent_string, version, len(data)
313 ui.write(msg)
313 ui.write(msg)
314 fm = ui.formatter('debugobsolete', opts)
314 fm = ui.formatter('debugobsolete', opts)
315 for rawmarker in sorted(markers):
315 for rawmarker in sorted(markers):
316 m = obsutil.marker(None, rawmarker)
316 m = obsutil.marker(None, rawmarker)
317 fm.startitem()
317 fm.startitem()
318 fm.plain(indent_string)
318 fm.plain(indent_string)
319 cmdutil.showmarker(fm, m)
319 cmdutil.showmarker(fm, m)
320 fm.end()
320 fm.end()
321
321
322 def _debugphaseheads(ui, data, indent=0):
322 def _debugphaseheads(ui, data, indent=0):
323 """display version and markers contained in 'data'"""
323 """display version and markers contained in 'data'"""
324 indent_string = ' ' * indent
324 indent_string = ' ' * indent
325 headsbyphase = phases.binarydecode(data)
325 headsbyphase = phases.binarydecode(data)
326 for phase in phases.allphases:
326 for phase in phases.allphases:
327 for head in headsbyphase[phase]:
327 for head in headsbyphase[phase]:
328 ui.write(indent_string)
328 ui.write(indent_string)
329 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
329 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
330
330
331 def _quasirepr(thing):
331 def _quasirepr(thing):
332 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
332 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
333 return '{%s}' % (
333 return '{%s}' % (
334 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
334 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
335 return pycompat.bytestr(repr(thing))
335 return pycompat.bytestr(repr(thing))
336
336
337 def _debugbundle2(ui, gen, all=None, **opts):
337 def _debugbundle2(ui, gen, all=None, **opts):
338 """lists the contents of a bundle2"""
338 """lists the contents of a bundle2"""
339 if not isinstance(gen, bundle2.unbundle20):
339 if not isinstance(gen, bundle2.unbundle20):
340 raise error.Abort(_('not a bundle2 file'))
340 raise error.Abort(_('not a bundle2 file'))
341 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
341 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
342 parttypes = opts.get(r'part_type', [])
342 parttypes = opts.get(r'part_type', [])
343 for part in gen.iterparts():
343 for part in gen.iterparts():
344 if parttypes and part.type not in parttypes:
344 if parttypes and part.type not in parttypes:
345 continue
345 continue
346 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
346 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
347 if part.type == 'changegroup':
347 if part.type == 'changegroup':
348 version = part.params.get('version', '01')
348 version = part.params.get('version', '01')
349 cg = changegroup.getunbundler(version, part, 'UN')
349 cg = changegroup.getunbundler(version, part, 'UN')
350 if not ui.quiet:
350 if not ui.quiet:
351 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
351 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
352 if part.type == 'obsmarkers':
352 if part.type == 'obsmarkers':
353 if not ui.quiet:
353 if not ui.quiet:
354 _debugobsmarkers(ui, part, indent=4, **opts)
354 _debugobsmarkers(ui, part, indent=4, **opts)
355 if part.type == 'phase-heads':
355 if part.type == 'phase-heads':
356 if not ui.quiet:
356 if not ui.quiet:
357 _debugphaseheads(ui, part, indent=4)
357 _debugphaseheads(ui, part, indent=4)
358
358
359 @command('debugbundle',
359 @command('debugbundle',
360 [('a', 'all', None, _('show all details')),
360 [('a', 'all', None, _('show all details')),
361 ('', 'part-type', [], _('show only the named part type')),
361 ('', 'part-type', [], _('show only the named part type')),
362 ('', 'spec', None, _('print the bundlespec of the bundle'))],
362 ('', 'spec', None, _('print the bundlespec of the bundle'))],
363 _('FILE'),
363 _('FILE'),
364 norepo=True)
364 norepo=True)
365 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
365 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
366 """lists the contents of a bundle"""
366 """lists the contents of a bundle"""
367 with hg.openpath(ui, bundlepath) as f:
367 with hg.openpath(ui, bundlepath) as f:
368 if spec:
368 if spec:
369 spec = exchange.getbundlespec(ui, f)
369 spec = exchange.getbundlespec(ui, f)
370 ui.write('%s\n' % spec)
370 ui.write('%s\n' % spec)
371 return
371 return
372
372
373 gen = exchange.readbundle(ui, f, bundlepath)
373 gen = exchange.readbundle(ui, f, bundlepath)
374 if isinstance(gen, bundle2.unbundle20):
374 if isinstance(gen, bundle2.unbundle20):
375 return _debugbundle2(ui, gen, all=all, **opts)
375 return _debugbundle2(ui, gen, all=all, **opts)
376 _debugchangegroup(ui, gen, all=all, **opts)
376 _debugchangegroup(ui, gen, all=all, **opts)
377
377
378 @command('debugcapabilities',
378 @command('debugcapabilities',
379 [], _('PATH'),
379 [], _('PATH'),
380 norepo=True)
380 norepo=True)
381 def debugcapabilities(ui, path, **opts):
381 def debugcapabilities(ui, path, **opts):
382 """lists the capabilities of a remote peer"""
382 """lists the capabilities of a remote peer"""
383 opts = pycompat.byteskwargs(opts)
383 opts = pycompat.byteskwargs(opts)
384 peer = hg.peer(ui, opts, path)
384 peer = hg.peer(ui, opts, path)
385 caps = peer.capabilities()
385 caps = peer.capabilities()
386 ui.write(('Main capabilities:\n'))
386 ui.write(('Main capabilities:\n'))
387 for c in sorted(caps):
387 for c in sorted(caps):
388 ui.write((' %s\n') % c)
388 ui.write((' %s\n') % c)
389 b2caps = bundle2.bundle2caps(peer)
389 b2caps = bundle2.bundle2caps(peer)
390 if b2caps:
390 if b2caps:
391 ui.write(('Bundle2 capabilities:\n'))
391 ui.write(('Bundle2 capabilities:\n'))
392 for key, values in sorted(b2caps.iteritems()):
392 for key, values in sorted(b2caps.iteritems()):
393 ui.write((' %s\n') % key)
393 ui.write((' %s\n') % key)
394 for v in values:
394 for v in values:
395 ui.write((' %s\n') % v)
395 ui.write((' %s\n') % v)
396
396
397 @command('debugcheckstate', [], '')
397 @command('debugcheckstate', [], '')
398 def debugcheckstate(ui, repo):
398 def debugcheckstate(ui, repo):
399 """validate the correctness of the current dirstate"""
399 """validate the correctness of the current dirstate"""
400 parent1, parent2 = repo.dirstate.parents()
400 parent1, parent2 = repo.dirstate.parents()
401 m1 = repo[parent1].manifest()
401 m1 = repo[parent1].manifest()
402 m2 = repo[parent2].manifest()
402 m2 = repo[parent2].manifest()
403 errors = 0
403 errors = 0
404 for f in repo.dirstate:
404 for f in repo.dirstate:
405 state = repo.dirstate[f]
405 state = repo.dirstate[f]
406 if state in "nr" and f not in m1:
406 if state in "nr" and f not in m1:
407 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
407 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
408 errors += 1
408 errors += 1
409 if state in "a" and f in m1:
409 if state in "a" and f in m1:
410 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
410 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
411 errors += 1
411 errors += 1
412 if state in "m" and f not in m1 and f not in m2:
412 if state in "m" and f not in m1 and f not in m2:
413 ui.warn(_("%s in state %s, but not in either manifest\n") %
413 ui.warn(_("%s in state %s, but not in either manifest\n") %
414 (f, state))
414 (f, state))
415 errors += 1
415 errors += 1
416 for f in m1:
416 for f in m1:
417 state = repo.dirstate[f]
417 state = repo.dirstate[f]
418 if state not in "nrm":
418 if state not in "nrm":
419 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
419 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
420 errors += 1
420 errors += 1
421 if errors:
421 if errors:
422 error = _(".hg/dirstate inconsistent with current parent's manifest")
422 error = _(".hg/dirstate inconsistent with current parent's manifest")
423 raise error.Abort(error)
423 raise error.Abort(error)
424
424
425 @command('debugcolor',
425 @command('debugcolor',
426 [('', 'style', None, _('show all configured styles'))],
426 [('', 'style', None, _('show all configured styles'))],
427 'hg debugcolor')
427 'hg debugcolor')
428 def debugcolor(ui, repo, **opts):
428 def debugcolor(ui, repo, **opts):
429 """show available color, effects or style"""
429 """show available color, effects or style"""
430 ui.write(('color mode: %s\n') % ui._colormode)
430 ui.write(('color mode: %s\n') % ui._colormode)
431 if opts.get(r'style'):
431 if opts.get(r'style'):
432 return _debugdisplaystyle(ui)
432 return _debugdisplaystyle(ui)
433 else:
433 else:
434 return _debugdisplaycolor(ui)
434 return _debugdisplaycolor(ui)
435
435
436 def _debugdisplaycolor(ui):
436 def _debugdisplaycolor(ui):
437 ui = ui.copy()
437 ui = ui.copy()
438 ui._styles.clear()
438 ui._styles.clear()
439 for effect in color._activeeffects(ui).keys():
439 for effect in color._activeeffects(ui).keys():
440 ui._styles[effect] = effect
440 ui._styles[effect] = effect
441 if ui._terminfoparams:
441 if ui._terminfoparams:
442 for k, v in ui.configitems('color'):
442 for k, v in ui.configitems('color'):
443 if k.startswith('color.'):
443 if k.startswith('color.'):
444 ui._styles[k] = k[6:]
444 ui._styles[k] = k[6:]
445 elif k.startswith('terminfo.'):
445 elif k.startswith('terminfo.'):
446 ui._styles[k] = k[9:]
446 ui._styles[k] = k[9:]
447 ui.write(_('available colors:\n'))
447 ui.write(_('available colors:\n'))
448 # sort label with a '_' after the other to group '_background' entry.
448 # sort label with a '_' after the other to group '_background' entry.
449 items = sorted(ui._styles.items(),
449 items = sorted(ui._styles.items(),
450 key=lambda i: ('_' in i[0], i[0], i[1]))
450 key=lambda i: ('_' in i[0], i[0], i[1]))
451 for colorname, label in items:
451 for colorname, label in items:
452 ui.write(('%s\n') % colorname, label=label)
452 ui.write(('%s\n') % colorname, label=label)
453
453
454 def _debugdisplaystyle(ui):
454 def _debugdisplaystyle(ui):
455 ui.write(_('available style:\n'))
455 ui.write(_('available style:\n'))
456 width = max(len(s) for s in ui._styles)
456 width = max(len(s) for s in ui._styles)
457 for label, effects in sorted(ui._styles.items()):
457 for label, effects in sorted(ui._styles.items()):
458 ui.write('%s' % label, label=label)
458 ui.write('%s' % label, label=label)
459 if effects:
459 if effects:
460 # 50
460 # 50
461 ui.write(': ')
461 ui.write(': ')
462 ui.write(' ' * (max(0, width - len(label))))
462 ui.write(' ' * (max(0, width - len(label))))
463 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
463 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
464 ui.write('\n')
464 ui.write('\n')
465
465
466 @command('debugcreatestreamclonebundle', [], 'FILE')
466 @command('debugcreatestreamclonebundle', [], 'FILE')
467 def debugcreatestreamclonebundle(ui, repo, fname):
467 def debugcreatestreamclonebundle(ui, repo, fname):
468 """create a stream clone bundle file
468 """create a stream clone bundle file
469
469
470 Stream bundles are special bundles that are essentially archives of
470 Stream bundles are special bundles that are essentially archives of
471 revlog files. They are commonly used for cloning very quickly.
471 revlog files. They are commonly used for cloning very quickly.
472 """
472 """
473 # TODO we may want to turn this into an abort when this functionality
473 # TODO we may want to turn this into an abort when this functionality
474 # is moved into `hg bundle`.
474 # is moved into `hg bundle`.
475 if phases.hassecret(repo):
475 if phases.hassecret(repo):
476 ui.warn(_('(warning: stream clone bundle will contain secret '
476 ui.warn(_('(warning: stream clone bundle will contain secret '
477 'revisions)\n'))
477 'revisions)\n'))
478
478
479 requirements, gen = streamclone.generatebundlev1(repo)
479 requirements, gen = streamclone.generatebundlev1(repo)
480 changegroup.writechunks(ui, gen, fname)
480 changegroup.writechunks(ui, gen, fname)
481
481
482 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
482 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
483
483
484 @command('debugdag',
484 @command('debugdag',
485 [('t', 'tags', None, _('use tags as labels')),
485 [('t', 'tags', None, _('use tags as labels')),
486 ('b', 'branches', None, _('annotate with branch names')),
486 ('b', 'branches', None, _('annotate with branch names')),
487 ('', 'dots', None, _('use dots for runs')),
487 ('', 'dots', None, _('use dots for runs')),
488 ('s', 'spaces', None, _('separate elements by spaces'))],
488 ('s', 'spaces', None, _('separate elements by spaces'))],
489 _('[OPTION]... [FILE [REV]...]'),
489 _('[OPTION]... [FILE [REV]...]'),
490 optionalrepo=True)
490 optionalrepo=True)
491 def debugdag(ui, repo, file_=None, *revs, **opts):
491 def debugdag(ui, repo, file_=None, *revs, **opts):
492 """format the changelog or an index DAG as a concise textual description
492 """format the changelog or an index DAG as a concise textual description
493
493
494 If you pass a revlog index, the revlog's DAG is emitted. If you list
494 If you pass a revlog index, the revlog's DAG is emitted. If you list
495 revision numbers, they get labeled in the output as rN.
495 revision numbers, they get labeled in the output as rN.
496
496
497 Otherwise, the changelog DAG of the current repo is emitted.
497 Otherwise, the changelog DAG of the current repo is emitted.
498 """
498 """
499 spaces = opts.get(r'spaces')
499 spaces = opts.get(r'spaces')
500 dots = opts.get(r'dots')
500 dots = opts.get(r'dots')
501 if file_:
501 if file_:
502 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
502 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
503 file_)
503 file_)
504 revs = set((int(r) for r in revs))
504 revs = set((int(r) for r in revs))
505 def events():
505 def events():
506 for r in rlog:
506 for r in rlog:
507 yield 'n', (r, list(p for p in rlog.parentrevs(r)
507 yield 'n', (r, list(p for p in rlog.parentrevs(r)
508 if p != -1))
508 if p != -1))
509 if r in revs:
509 if r in revs:
510 yield 'l', (r, "r%i" % r)
510 yield 'l', (r, "r%i" % r)
511 elif repo:
511 elif repo:
512 cl = repo.changelog
512 cl = repo.changelog
513 tags = opts.get(r'tags')
513 tags = opts.get(r'tags')
514 branches = opts.get(r'branches')
514 branches = opts.get(r'branches')
515 if tags:
515 if tags:
516 labels = {}
516 labels = {}
517 for l, n in repo.tags().items():
517 for l, n in repo.tags().items():
518 labels.setdefault(cl.rev(n), []).append(l)
518 labels.setdefault(cl.rev(n), []).append(l)
519 def events():
519 def events():
520 b = "default"
520 b = "default"
521 for r in cl:
521 for r in cl:
522 if branches:
522 if branches:
523 newb = cl.read(cl.node(r))[5]['branch']
523 newb = cl.read(cl.node(r))[5]['branch']
524 if newb != b:
524 if newb != b:
525 yield 'a', newb
525 yield 'a', newb
526 b = newb
526 b = newb
527 yield 'n', (r, list(p for p in cl.parentrevs(r)
527 yield 'n', (r, list(p for p in cl.parentrevs(r)
528 if p != -1))
528 if p != -1))
529 if tags:
529 if tags:
530 ls = labels.get(r)
530 ls = labels.get(r)
531 if ls:
531 if ls:
532 for l in ls:
532 for l in ls:
533 yield 'l', (r, l)
533 yield 'l', (r, l)
534 else:
534 else:
535 raise error.Abort(_('need repo for changelog dag'))
535 raise error.Abort(_('need repo for changelog dag'))
536
536
537 for line in dagparser.dagtextlines(events(),
537 for line in dagparser.dagtextlines(events(),
538 addspaces=spaces,
538 addspaces=spaces,
539 wraplabels=True,
539 wraplabels=True,
540 wrapannotations=True,
540 wrapannotations=True,
541 wrapnonlinear=dots,
541 wrapnonlinear=dots,
542 usedots=dots,
542 usedots=dots,
543 maxlinewidth=70):
543 maxlinewidth=70):
544 ui.write(line)
544 ui.write(line)
545 ui.write("\n")
545 ui.write("\n")
546
546
547 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
547 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
548 def debugdata(ui, repo, file_, rev=None, **opts):
548 def debugdata(ui, repo, file_, rev=None, **opts):
549 """dump the contents of a data file revision"""
549 """dump the contents of a data file revision"""
550 opts = pycompat.byteskwargs(opts)
550 opts = pycompat.byteskwargs(opts)
551 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
551 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
552 if rev is not None:
552 if rev is not None:
553 raise error.CommandError('debugdata', _('invalid arguments'))
553 raise error.CommandError('debugdata', _('invalid arguments'))
554 file_, rev = None, file_
554 file_, rev = None, file_
555 elif rev is None:
555 elif rev is None:
556 raise error.CommandError('debugdata', _('invalid arguments'))
556 raise error.CommandError('debugdata', _('invalid arguments'))
557 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
557 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
558 try:
558 try:
559 ui.write(r.revision(r.lookup(rev), raw=True))
559 ui.write(r.revision(r.lookup(rev), raw=True))
560 except KeyError:
560 except KeyError:
561 raise error.Abort(_('invalid revision identifier %s') % rev)
561 raise error.Abort(_('invalid revision identifier %s') % rev)
562
562
563 @command('debugdate',
563 @command('debugdate',
564 [('e', 'extended', None, _('try extended date formats'))],
564 [('e', 'extended', None, _('try extended date formats'))],
565 _('[-e] DATE [RANGE]'),
565 _('[-e] DATE [RANGE]'),
566 norepo=True, optionalrepo=True)
566 norepo=True, optionalrepo=True)
567 def debugdate(ui, date, range=None, **opts):
567 def debugdate(ui, date, range=None, **opts):
568 """parse and display a date"""
568 """parse and display a date"""
569 if opts[r"extended"]:
569 if opts[r"extended"]:
570 d = dateutil.parsedate(date, util.extendeddateformats)
570 d = dateutil.parsedate(date, util.extendeddateformats)
571 else:
571 else:
572 d = dateutil.parsedate(date)
572 d = dateutil.parsedate(date)
573 ui.write(("internal: %d %d\n") % d)
573 ui.write(("internal: %d %d\n") % d)
574 ui.write(("standard: %s\n") % dateutil.datestr(d))
574 ui.write(("standard: %s\n") % dateutil.datestr(d))
575 if range:
575 if range:
576 m = dateutil.matchdate(range)
576 m = dateutil.matchdate(range)
577 ui.write(("match: %s\n") % m(d[0]))
577 ui.write(("match: %s\n") % m(d[0]))
578
578
579 @command('debugdeltachain',
579 @command('debugdeltachain',
580 cmdutil.debugrevlogopts + cmdutil.formatteropts,
580 cmdutil.debugrevlogopts + cmdutil.formatteropts,
581 _('-c|-m|FILE'),
581 _('-c|-m|FILE'),
582 optionalrepo=True)
582 optionalrepo=True)
583 def debugdeltachain(ui, repo, file_=None, **opts):
583 def debugdeltachain(ui, repo, file_=None, **opts):
584 """dump information about delta chains in a revlog
584 """dump information about delta chains in a revlog
585
585
586 Output can be templatized. Available template keywords are:
586 Output can be templatized. Available template keywords are:
587
587
588 :``rev``: revision number
588 :``rev``: revision number
589 :``chainid``: delta chain identifier (numbered by unique base)
589 :``chainid``: delta chain identifier (numbered by unique base)
590 :``chainlen``: delta chain length to this revision
590 :``chainlen``: delta chain length to this revision
591 :``prevrev``: previous revision in delta chain
591 :``prevrev``: previous revision in delta chain
592 :``deltatype``: role of delta / how it was computed
592 :``deltatype``: role of delta / how it was computed
593 :``compsize``: compressed size of revision
593 :``compsize``: compressed size of revision
594 :``uncompsize``: uncompressed size of revision
594 :``uncompsize``: uncompressed size of revision
595 :``chainsize``: total size of compressed revisions in chain
595 :``chainsize``: total size of compressed revisions in chain
596 :``chainratio``: total chain size divided by uncompressed revision size
596 :``chainratio``: total chain size divided by uncompressed revision size
597 (new delta chains typically start at ratio 2.00)
597 (new delta chains typically start at ratio 2.00)
598 :``lindist``: linear distance from base revision in delta chain to end
598 :``lindist``: linear distance from base revision in delta chain to end
599 of this revision
599 of this revision
600 :``extradist``: total size of revisions not part of this delta chain from
600 :``extradist``: total size of revisions not part of this delta chain from
601 base of delta chain to end of this revision; a measurement
601 base of delta chain to end of this revision; a measurement
602 of how much extra data we need to read/seek across to read
602 of how much extra data we need to read/seek across to read
603 the delta chain for this revision
603 the delta chain for this revision
604 :``extraratio``: extradist divided by chainsize; another representation of
604 :``extraratio``: extradist divided by chainsize; another representation of
605 how much unrelated data is needed to load this delta chain
605 how much unrelated data is needed to load this delta chain
606
606
607 If the repository is configured to use the sparse read, additional keywords
607 If the repository is configured to use the sparse read, additional keywords
608 are available:
608 are available:
609
609
610 :``readsize``: total size of data read from the disk for a revision
610 :``readsize``: total size of data read from the disk for a revision
611 (sum of the sizes of all the blocks)
611 (sum of the sizes of all the blocks)
612 :``largestblock``: size of the largest block of data read from the disk
612 :``largestblock``: size of the largest block of data read from the disk
613 :``readdensity``: density of useful bytes in the data read from the disk
613 :``readdensity``: density of useful bytes in the data read from the disk
614 :``srchunks``: in how many data hunks the whole revision would be read
614 :``srchunks``: in how many data hunks the whole revision would be read
615
615
616 The sparse read can be enabled with experimental.sparse-read = True
616 The sparse read can be enabled with experimental.sparse-read = True
617 """
617 """
618 opts = pycompat.byteskwargs(opts)
618 opts = pycompat.byteskwargs(opts)
619 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
619 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
620 index = r.index
620 index = r.index
621 generaldelta = r.version & revlog.FLAG_GENERALDELTA
621 generaldelta = r.version & revlog.FLAG_GENERALDELTA
622 withsparseread = getattr(r, '_withsparseread', False)
622 withsparseread = getattr(r, '_withsparseread', False)
623
623
624 def revinfo(rev):
624 def revinfo(rev):
625 e = index[rev]
625 e = index[rev]
626 compsize = e[1]
626 compsize = e[1]
627 uncompsize = e[2]
627 uncompsize = e[2]
628 chainsize = 0
628 chainsize = 0
629
629
630 if generaldelta:
630 if generaldelta:
631 if e[3] == e[5]:
631 if e[3] == e[5]:
632 deltatype = 'p1'
632 deltatype = 'p1'
633 elif e[3] == e[6]:
633 elif e[3] == e[6]:
634 deltatype = 'p2'
634 deltatype = 'p2'
635 elif e[3] == rev - 1:
635 elif e[3] == rev - 1:
636 deltatype = 'prev'
636 deltatype = 'prev'
637 elif e[3] == rev:
637 elif e[3] == rev:
638 deltatype = 'base'
638 deltatype = 'base'
639 else:
639 else:
640 deltatype = 'other'
640 deltatype = 'other'
641 else:
641 else:
642 if e[3] == rev:
642 if e[3] == rev:
643 deltatype = 'base'
643 deltatype = 'base'
644 else:
644 else:
645 deltatype = 'prev'
645 deltatype = 'prev'
646
646
647 chain = r._deltachain(rev)[0]
647 chain = r._deltachain(rev)[0]
648 for iterrev in chain:
648 for iterrev in chain:
649 e = index[iterrev]
649 e = index[iterrev]
650 chainsize += e[1]
650 chainsize += e[1]
651
651
652 return compsize, uncompsize, deltatype, chain, chainsize
652 return compsize, uncompsize, deltatype, chain, chainsize
653
653
654 fm = ui.formatter('debugdeltachain', opts)
654 fm = ui.formatter('debugdeltachain', opts)
655
655
656 fm.plain(' rev chain# chainlen prev delta '
656 fm.plain(' rev chain# chainlen prev delta '
657 'size rawsize chainsize ratio lindist extradist '
657 'size rawsize chainsize ratio lindist extradist '
658 'extraratio')
658 'extraratio')
659 if withsparseread:
659 if withsparseread:
660 fm.plain(' readsize largestblk rddensity srchunks')
660 fm.plain(' readsize largestblk rddensity srchunks')
661 fm.plain('\n')
661 fm.plain('\n')
662
662
663 chainbases = {}
663 chainbases = {}
664 for rev in r:
664 for rev in r:
665 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
665 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
666 chainbase = chain[0]
666 chainbase = chain[0]
667 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
667 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
668 start = r.start
668 start = r.start
669 length = r.length
669 length = r.length
670 basestart = start(chainbase)
670 basestart = start(chainbase)
671 revstart = start(rev)
671 revstart = start(rev)
672 lineardist = revstart + comp - basestart
672 lineardist = revstart + comp - basestart
673 extradist = lineardist - chainsize
673 extradist = lineardist - chainsize
674 try:
674 try:
675 prevrev = chain[-2]
675 prevrev = chain[-2]
676 except IndexError:
676 except IndexError:
677 prevrev = -1
677 prevrev = -1
678
678
679 chainratio = float(chainsize) / float(uncomp)
679 chainratio = float(chainsize) / float(uncomp)
680 extraratio = float(extradist) / float(chainsize)
680 extraratio = float(extradist) / float(chainsize)
681
681
682 fm.startitem()
682 fm.startitem()
683 fm.write('rev chainid chainlen prevrev deltatype compsize '
683 fm.write('rev chainid chainlen prevrev deltatype compsize '
684 'uncompsize chainsize chainratio lindist extradist '
684 'uncompsize chainsize chainratio lindist extradist '
685 'extraratio',
685 'extraratio',
686 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
686 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
687 rev, chainid, len(chain), prevrev, deltatype, comp,
687 rev, chainid, len(chain), prevrev, deltatype, comp,
688 uncomp, chainsize, chainratio, lineardist, extradist,
688 uncomp, chainsize, chainratio, lineardist, extradist,
689 extraratio,
689 extraratio,
690 rev=rev, chainid=chainid, chainlen=len(chain),
690 rev=rev, chainid=chainid, chainlen=len(chain),
691 prevrev=prevrev, deltatype=deltatype, compsize=comp,
691 prevrev=prevrev, deltatype=deltatype, compsize=comp,
692 uncompsize=uncomp, chainsize=chainsize,
692 uncompsize=uncomp, chainsize=chainsize,
693 chainratio=chainratio, lindist=lineardist,
693 chainratio=chainratio, lindist=lineardist,
694 extradist=extradist, extraratio=extraratio)
694 extradist=extradist, extraratio=extraratio)
695 if withsparseread:
695 if withsparseread:
696 readsize = 0
696 readsize = 0
697 largestblock = 0
697 largestblock = 0
698 srchunks = 0
698 srchunks = 0
699
699
700 for revschunk in revlog._slicechunk(r, chain):
700 for revschunk in revlog._slicechunk(r, chain):
701 srchunks += 1
701 srchunks += 1
702 blkend = start(revschunk[-1]) + length(revschunk[-1])
702 blkend = start(revschunk[-1]) + length(revschunk[-1])
703 blksize = blkend - start(revschunk[0])
703 blksize = blkend - start(revschunk[0])
704
704
705 readsize += blksize
705 readsize += blksize
706 if largestblock < blksize:
706 if largestblock < blksize:
707 largestblock = blksize
707 largestblock = blksize
708
708
709 readdensity = float(chainsize) / float(readsize)
709 readdensity = float(chainsize) / float(readsize)
710
710
711 fm.write('readsize largestblock readdensity srchunks',
711 fm.write('readsize largestblock readdensity srchunks',
712 ' %10d %10d %9.5f %8d',
712 ' %10d %10d %9.5f %8d',
713 readsize, largestblock, readdensity, srchunks,
713 readsize, largestblock, readdensity, srchunks,
714 readsize=readsize, largestblock=largestblock,
714 readsize=readsize, largestblock=largestblock,
715 readdensity=readdensity, srchunks=srchunks)
715 readdensity=readdensity, srchunks=srchunks)
716
716
717 fm.plain('\n')
717 fm.plain('\n')
718
718
719 fm.end()
719 fm.end()
720
720
721 @command('debugdirstate|debugstate',
721 @command('debugdirstate|debugstate',
722 [('', 'nodates', None, _('do not display the saved mtime')),
722 [('', 'nodates', None, _('do not display the saved mtime')),
723 ('', 'datesort', None, _('sort by saved mtime'))],
723 ('', 'datesort', None, _('sort by saved mtime'))],
724 _('[OPTION]...'))
724 _('[OPTION]...'))
725 def debugstate(ui, repo, **opts):
725 def debugstate(ui, repo, **opts):
726 """show the contents of the current dirstate"""
726 """show the contents of the current dirstate"""
727
727
728 nodates = opts.get(r'nodates')
728 nodates = opts.get(r'nodates')
729 datesort = opts.get(r'datesort')
729 datesort = opts.get(r'datesort')
730
730
731 timestr = ""
731 timestr = ""
732 if datesort:
732 if datesort:
733 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
733 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
734 else:
734 else:
735 keyfunc = None # sort by filename
735 keyfunc = None # sort by filename
736 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
736 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
737 if ent[3] == -1:
737 if ent[3] == -1:
738 timestr = 'unset '
738 timestr = 'unset '
739 elif nodates:
739 elif nodates:
740 timestr = 'set '
740 timestr = 'set '
741 else:
741 else:
742 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
742 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
743 time.localtime(ent[3]))
743 time.localtime(ent[3]))
744 timestr = encoding.strtolocal(timestr)
744 timestr = encoding.strtolocal(timestr)
745 if ent[1] & 0o20000:
745 if ent[1] & 0o20000:
746 mode = 'lnk'
746 mode = 'lnk'
747 else:
747 else:
748 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
748 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
749 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
749 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
750 for f in repo.dirstate.copies():
750 for f in repo.dirstate.copies():
751 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
751 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
752
752
753 @command('debugdiscovery',
753 @command('debugdiscovery',
754 [('', 'old', None, _('use old-style discovery')),
754 [('', 'old', None, _('use old-style discovery')),
755 ('', 'nonheads', None,
755 ('', 'nonheads', None,
756 _('use old-style discovery with non-heads included')),
756 _('use old-style discovery with non-heads included')),
757 ('', 'rev', [], 'restrict discovery to this set of revs'),
757 ('', 'rev', [], 'restrict discovery to this set of revs'),
758 ] + cmdutil.remoteopts,
758 ] + cmdutil.remoteopts,
759 _('[--rev REV] [OTHER]'))
759 _('[--rev REV] [OTHER]'))
760 def debugdiscovery(ui, repo, remoteurl="default", **opts):
760 def debugdiscovery(ui, repo, remoteurl="default", **opts):
761 """runs the changeset discovery protocol in isolation"""
761 """runs the changeset discovery protocol in isolation"""
762 opts = pycompat.byteskwargs(opts)
762 opts = pycompat.byteskwargs(opts)
763 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
763 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
764 remote = hg.peer(repo, opts, remoteurl)
764 remote = hg.peer(repo, opts, remoteurl)
765 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
765 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
766
766
767 # make sure tests are repeatable
767 # make sure tests are repeatable
768 random.seed(12323)
768 random.seed(12323)
769
769
770 def doit(pushedrevs, remoteheads, remote=remote):
770 def doit(pushedrevs, remoteheads, remote=remote):
771 if opts.get('old'):
771 if opts.get('old'):
772 if not util.safehasattr(remote, 'branches'):
772 if not util.safehasattr(remote, 'branches'):
773 # enable in-client legacy support
773 # enable in-client legacy support
774 remote = localrepo.locallegacypeer(remote.local())
774 remote = localrepo.locallegacypeer(remote.local())
775 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
775 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
776 force=True)
776 force=True)
777 common = set(common)
777 common = set(common)
778 if not opts.get('nonheads'):
778 if not opts.get('nonheads'):
779 ui.write(("unpruned common: %s\n") %
779 ui.write(("unpruned common: %s\n") %
780 " ".join(sorted(short(n) for n in common)))
780 " ".join(sorted(short(n) for n in common)))
781 dag = dagutil.revlogdag(repo.changelog)
781 dag = dagutil.revlogdag(repo.changelog)
782 all = dag.ancestorset(dag.internalizeall(common))
782 all = dag.ancestorset(dag.internalizeall(common))
783 common = dag.externalizeall(dag.headsetofconnecteds(all))
783 common = dag.externalizeall(dag.headsetofconnecteds(all))
784 else:
784 else:
785 nodes = None
785 nodes = None
786 if pushedrevs:
786 if pushedrevs:
787 revs = scmutil.revrange(repo, pushedrevs)
787 revs = scmutil.revrange(repo, pushedrevs)
788 nodes = [repo[r].node() for r in revs]
788 nodes = [repo[r].node() for r in revs]
789 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
789 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
790 ancestorsof=nodes)
790 ancestorsof=nodes)
791 common = set(common)
791 common = set(common)
792 rheads = set(hds)
792 rheads = set(hds)
793 lheads = set(repo.heads())
793 lheads = set(repo.heads())
794 ui.write(("common heads: %s\n") %
794 ui.write(("common heads: %s\n") %
795 " ".join(sorted(short(n) for n in common)))
795 " ".join(sorted(short(n) for n in common)))
796 if lheads <= common:
796 if lheads <= common:
797 ui.write(("local is subset\n"))
797 ui.write(("local is subset\n"))
798 elif rheads <= common:
798 elif rheads <= common:
799 ui.write(("remote is subset\n"))
799 ui.write(("remote is subset\n"))
800
800
801 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
801 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
802 localrevs = opts['rev']
802 localrevs = opts['rev']
803 doit(localrevs, remoterevs)
803 doit(localrevs, remoterevs)
804
804
805 _chunksize = 4 << 10
805 _chunksize = 4 << 10
806
806
807 @command('debugdownload',
807 @command('debugdownload',
808 [
808 [
809 ('o', 'output', '', _('path')),
809 ('o', 'output', '', _('path')),
810 ],
810 ],
811 optionalrepo=True)
811 optionalrepo=True)
812 def debugdownload(ui, repo, url, output=None, **opts):
812 def debugdownload(ui, repo, url, output=None, **opts):
813 """download a resource using Mercurial logic and config
813 """download a resource using Mercurial logic and config
814 """
814 """
815 fh = urlmod.open(ui, url, output)
815 fh = urlmod.open(ui, url, output)
816
816
817 dest = ui
817 dest = ui
818 if output:
818 if output:
819 dest = open(output, "wb", _chunksize)
819 dest = open(output, "wb", _chunksize)
820 try:
820 try:
821 data = fh.read(_chunksize)
821 data = fh.read(_chunksize)
822 while data:
822 while data:
823 dest.write(data)
823 dest.write(data)
824 data = fh.read(_chunksize)
824 data = fh.read(_chunksize)
825 finally:
825 finally:
826 if output:
826 if output:
827 dest.close()
827 dest.close()
828
828
829 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
829 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
830 def debugextensions(ui, **opts):
830 def debugextensions(ui, **opts):
831 '''show information about active extensions'''
831 '''show information about active extensions'''
832 opts = pycompat.byteskwargs(opts)
832 opts = pycompat.byteskwargs(opts)
833 exts = extensions.extensions(ui)
833 exts = extensions.extensions(ui)
834 hgver = util.version()
834 hgver = util.version()
835 fm = ui.formatter('debugextensions', opts)
835 fm = ui.formatter('debugextensions', opts)
836 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
836 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
837 isinternal = extensions.ismoduleinternal(extmod)
837 isinternal = extensions.ismoduleinternal(extmod)
838 extsource = pycompat.fsencode(extmod.__file__)
838 extsource = pycompat.fsencode(extmod.__file__)
839 if isinternal:
839 if isinternal:
840 exttestedwith = [] # never expose magic string to users
840 exttestedwith = [] # never expose magic string to users
841 else:
841 else:
842 exttestedwith = getattr(extmod, 'testedwith', '').split()
842 exttestedwith = getattr(extmod, 'testedwith', '').split()
843 extbuglink = getattr(extmod, 'buglink', None)
843 extbuglink = getattr(extmod, 'buglink', None)
844
844
845 fm.startitem()
845 fm.startitem()
846
846
847 if ui.quiet or ui.verbose:
847 if ui.quiet or ui.verbose:
848 fm.write('name', '%s\n', extname)
848 fm.write('name', '%s\n', extname)
849 else:
849 else:
850 fm.write('name', '%s', extname)
850 fm.write('name', '%s', extname)
851 if isinternal or hgver in exttestedwith:
851 if isinternal or hgver in exttestedwith:
852 fm.plain('\n')
852 fm.plain('\n')
853 elif not exttestedwith:
853 elif not exttestedwith:
854 fm.plain(_(' (untested!)\n'))
854 fm.plain(_(' (untested!)\n'))
855 else:
855 else:
856 lasttestedversion = exttestedwith[-1]
856 lasttestedversion = exttestedwith[-1]
857 fm.plain(' (%s!)\n' % lasttestedversion)
857 fm.plain(' (%s!)\n' % lasttestedversion)
858
858
859 fm.condwrite(ui.verbose and extsource, 'source',
859 fm.condwrite(ui.verbose and extsource, 'source',
860 _(' location: %s\n'), extsource or "")
860 _(' location: %s\n'), extsource or "")
861
861
862 if ui.verbose:
862 if ui.verbose:
863 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
863 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
864 fm.data(bundled=isinternal)
864 fm.data(bundled=isinternal)
865
865
866 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
866 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
867 _(' tested with: %s\n'),
867 _(' tested with: %s\n'),
868 fm.formatlist(exttestedwith, name='ver'))
868 fm.formatlist(exttestedwith, name='ver'))
869
869
870 fm.condwrite(ui.verbose and extbuglink, 'buglink',
870 fm.condwrite(ui.verbose and extbuglink, 'buglink',
871 _(' bug reporting: %s\n'), extbuglink or "")
871 _(' bug reporting: %s\n'), extbuglink or "")
872
872
873 fm.end()
873 fm.end()
874
874
875 @command('debugfileset',
875 @command('debugfileset',
876 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
876 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
877 _('[-r REV] FILESPEC'))
877 _('[-r REV] FILESPEC'))
878 def debugfileset(ui, repo, expr, **opts):
878 def debugfileset(ui, repo, expr, **opts):
879 '''parse and apply a fileset specification'''
879 '''parse and apply a fileset specification'''
880 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
880 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
881 if ui.verbose:
881 if ui.verbose:
882 tree = fileset.parse(expr)
882 tree = fileset.parse(expr)
883 ui.note(fileset.prettyformat(tree), "\n")
883 ui.note(fileset.prettyformat(tree), "\n")
884
884
885 for f in ctx.getfileset(expr):
885 for f in ctx.getfileset(expr):
886 ui.write("%s\n" % f)
886 ui.write("%s\n" % f)
887
887
888 @command('debugformat',
888 @command('debugformat',
889 [] + cmdutil.formatteropts,
889 [] + cmdutil.formatteropts,
890 _(''))
890 _(''))
891 def debugformat(ui, repo, **opts):
891 def debugformat(ui, repo, **opts):
892 """display format information about the current repository
892 """display format information about the current repository
893
893
894 Use --verbose to get extra information about current config value and
894 Use --verbose to get extra information about current config value and
895 Mercurial default."""
895 Mercurial default."""
896 opts = pycompat.byteskwargs(opts)
896 opts = pycompat.byteskwargs(opts)
897 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
897 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
898 maxvariantlength = max(len('format-variant'), maxvariantlength)
898 maxvariantlength = max(len('format-variant'), maxvariantlength)
899
899
900 def makeformatname(name):
900 def makeformatname(name):
901 return '%s:' + (' ' * (maxvariantlength - len(name)))
901 return '%s:' + (' ' * (maxvariantlength - len(name)))
902
902
903 fm = ui.formatter('debugformat', opts)
903 fm = ui.formatter('debugformat', opts)
904 if fm.isplain():
904 if fm.isplain():
905 def formatvalue(value):
905 def formatvalue(value):
906 if util.safehasattr(value, 'startswith'):
906 if util.safehasattr(value, 'startswith'):
907 return value
907 return value
908 if value:
908 if value:
909 return 'yes'
909 return 'yes'
910 else:
910 else:
911 return 'no'
911 return 'no'
912 else:
912 else:
913 formatvalue = pycompat.identity
913 formatvalue = pycompat.identity
914
914
915 fm.plain('format-variant')
915 fm.plain('format-variant')
916 fm.plain(' ' * (maxvariantlength - len('format-variant')))
916 fm.plain(' ' * (maxvariantlength - len('format-variant')))
917 fm.plain(' repo')
917 fm.plain(' repo')
918 if ui.verbose:
918 if ui.verbose:
919 fm.plain(' config default')
919 fm.plain(' config default')
920 fm.plain('\n')
920 fm.plain('\n')
921 for fv in upgrade.allformatvariant:
921 for fv in upgrade.allformatvariant:
922 fm.startitem()
922 fm.startitem()
923 repovalue = fv.fromrepo(repo)
923 repovalue = fv.fromrepo(repo)
924 configvalue = fv.fromconfig(repo)
924 configvalue = fv.fromconfig(repo)
925
925
926 if repovalue != configvalue:
926 if repovalue != configvalue:
927 namelabel = 'formatvariant.name.mismatchconfig'
927 namelabel = 'formatvariant.name.mismatchconfig'
928 repolabel = 'formatvariant.repo.mismatchconfig'
928 repolabel = 'formatvariant.repo.mismatchconfig'
929 elif repovalue != fv.default:
929 elif repovalue != fv.default:
930 namelabel = 'formatvariant.name.mismatchdefault'
930 namelabel = 'formatvariant.name.mismatchdefault'
931 repolabel = 'formatvariant.repo.mismatchdefault'
931 repolabel = 'formatvariant.repo.mismatchdefault'
932 else:
932 else:
933 namelabel = 'formatvariant.name.uptodate'
933 namelabel = 'formatvariant.name.uptodate'
934 repolabel = 'formatvariant.repo.uptodate'
934 repolabel = 'formatvariant.repo.uptodate'
935
935
936 fm.write('name', makeformatname(fv.name), fv.name,
936 fm.write('name', makeformatname(fv.name), fv.name,
937 label=namelabel)
937 label=namelabel)
938 fm.write('repo', ' %3s', formatvalue(repovalue),
938 fm.write('repo', ' %3s', formatvalue(repovalue),
939 label=repolabel)
939 label=repolabel)
940 if fv.default != configvalue:
940 if fv.default != configvalue:
941 configlabel = 'formatvariant.config.special'
941 configlabel = 'formatvariant.config.special'
942 else:
942 else:
943 configlabel = 'formatvariant.config.default'
943 configlabel = 'formatvariant.config.default'
944 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
944 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
945 label=configlabel)
945 label=configlabel)
946 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
946 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
947 label='formatvariant.default')
947 label='formatvariant.default')
948 fm.plain('\n')
948 fm.plain('\n')
949 fm.end()
949 fm.end()
950
950
951 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
951 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
952 def debugfsinfo(ui, path="."):
952 def debugfsinfo(ui, path="."):
953 """show information detected about current filesystem"""
953 """show information detected about current filesystem"""
954 ui.write(('path: %s\n') % path)
954 ui.write(('path: %s\n') % path)
955 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
955 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
956 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
956 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
957 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
957 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
958 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
958 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
959 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
959 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
960 casesensitive = '(unknown)'
960 casesensitive = '(unknown)'
961 try:
961 try:
962 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
962 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
963 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
963 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
964 except OSError:
964 except OSError:
965 pass
965 pass
966 ui.write(('case-sensitive: %s\n') % casesensitive)
966 ui.write(('case-sensitive: %s\n') % casesensitive)
967
967
968 @command('debuggetbundle',
968 @command('debuggetbundle',
969 [('H', 'head', [], _('id of head node'), _('ID')),
969 [('H', 'head', [], _('id of head node'), _('ID')),
970 ('C', 'common', [], _('id of common node'), _('ID')),
970 ('C', 'common', [], _('id of common node'), _('ID')),
971 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
971 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
972 _('REPO FILE [-H|-C ID]...'),
972 _('REPO FILE [-H|-C ID]...'),
973 norepo=True)
973 norepo=True)
974 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
974 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
975 """retrieves a bundle from a repo
975 """retrieves a bundle from a repo
976
976
977 Every ID must be a full-length hex node id string. Saves the bundle to the
977 Every ID must be a full-length hex node id string. Saves the bundle to the
978 given file.
978 given file.
979 """
979 """
980 opts = pycompat.byteskwargs(opts)
980 opts = pycompat.byteskwargs(opts)
981 repo = hg.peer(ui, opts, repopath)
981 repo = hg.peer(ui, opts, repopath)
982 if not repo.capable('getbundle'):
982 if not repo.capable('getbundle'):
983 raise error.Abort("getbundle() not supported by target repository")
983 raise error.Abort("getbundle() not supported by target repository")
984 args = {}
984 args = {}
985 if common:
985 if common:
986 args[r'common'] = [bin(s) for s in common]
986 args[r'common'] = [bin(s) for s in common]
987 if head:
987 if head:
988 args[r'heads'] = [bin(s) for s in head]
988 args[r'heads'] = [bin(s) for s in head]
989 # TODO: get desired bundlecaps from command line.
989 # TODO: get desired bundlecaps from command line.
990 args[r'bundlecaps'] = None
990 args[r'bundlecaps'] = None
991 bundle = repo.getbundle('debug', **args)
991 bundle = repo.getbundle('debug', **args)
992
992
993 bundletype = opts.get('type', 'bzip2').lower()
993 bundletype = opts.get('type', 'bzip2').lower()
994 btypes = {'none': 'HG10UN',
994 btypes = {'none': 'HG10UN',
995 'bzip2': 'HG10BZ',
995 'bzip2': 'HG10BZ',
996 'gzip': 'HG10GZ',
996 'gzip': 'HG10GZ',
997 'bundle2': 'HG20'}
997 'bundle2': 'HG20'}
998 bundletype = btypes.get(bundletype)
998 bundletype = btypes.get(bundletype)
999 if bundletype not in bundle2.bundletypes:
999 if bundletype not in bundle2.bundletypes:
1000 raise error.Abort(_('unknown bundle type specified with --type'))
1000 raise error.Abort(_('unknown bundle type specified with --type'))
1001 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1001 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1002
1002
1003 @command('debugignore', [], '[FILE]')
1003 @command('debugignore', [], '[FILE]')
1004 def debugignore(ui, repo, *files, **opts):
1004 def debugignore(ui, repo, *files, **opts):
1005 """display the combined ignore pattern and information about ignored files
1005 """display the combined ignore pattern and information about ignored files
1006
1006
1007 With no argument display the combined ignore pattern.
1007 With no argument display the combined ignore pattern.
1008
1008
1009 Given space separated file names, shows if the given file is ignored and
1009 Given space separated file names, shows if the given file is ignored and
1010 if so, show the ignore rule (file and line number) that matched it.
1010 if so, show the ignore rule (file and line number) that matched it.
1011 """
1011 """
1012 ignore = repo.dirstate._ignore
1012 ignore = repo.dirstate._ignore
1013 if not files:
1013 if not files:
1014 # Show all the patterns
1014 # Show all the patterns
1015 ui.write("%s\n" % pycompat.byterepr(ignore))
1015 ui.write("%s\n" % pycompat.byterepr(ignore))
1016 else:
1016 else:
1017 m = scmutil.match(repo[None], pats=files)
1017 m = scmutil.match(repo[None], pats=files)
1018 for f in m.files():
1018 for f in m.files():
1019 nf = util.normpath(f)
1019 nf = util.normpath(f)
1020 ignored = None
1020 ignored = None
1021 ignoredata = None
1021 ignoredata = None
1022 if nf != '.':
1022 if nf != '.':
1023 if ignore(nf):
1023 if ignore(nf):
1024 ignored = nf
1024 ignored = nf
1025 ignoredata = repo.dirstate._ignorefileandline(nf)
1025 ignoredata = repo.dirstate._ignorefileandline(nf)
1026 else:
1026 else:
1027 for p in util.finddirs(nf):
1027 for p in util.finddirs(nf):
1028 if ignore(p):
1028 if ignore(p):
1029 ignored = p
1029 ignored = p
1030 ignoredata = repo.dirstate._ignorefileandline(p)
1030 ignoredata = repo.dirstate._ignorefileandline(p)
1031 break
1031 break
1032 if ignored:
1032 if ignored:
1033 if ignored == nf:
1033 if ignored == nf:
1034 ui.write(_("%s is ignored\n") % m.uipath(f))
1034 ui.write(_("%s is ignored\n") % m.uipath(f))
1035 else:
1035 else:
1036 ui.write(_("%s is ignored because of "
1036 ui.write(_("%s is ignored because of "
1037 "containing folder %s\n")
1037 "containing folder %s\n")
1038 % (m.uipath(f), ignored))
1038 % (m.uipath(f), ignored))
1039 ignorefile, lineno, line = ignoredata
1039 ignorefile, lineno, line = ignoredata
1040 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1040 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1041 % (ignorefile, lineno, line))
1041 % (ignorefile, lineno, line))
1042 else:
1042 else:
1043 ui.write(_("%s is not ignored\n") % m.uipath(f))
1043 ui.write(_("%s is not ignored\n") % m.uipath(f))
1044
1044
1045 @command('debugindex', cmdutil.debugrevlogopts +
1045 @command('debugindex', cmdutil.debugrevlogopts +
1046 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1046 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1047 _('[-f FORMAT] -c|-m|FILE'),
1047 _('[-f FORMAT] -c|-m|FILE'),
1048 optionalrepo=True)
1048 optionalrepo=True)
1049 def debugindex(ui, repo, file_=None, **opts):
1049 def debugindex(ui, repo, file_=None, **opts):
1050 """dump the contents of an index file"""
1050 """dump the contents of an index file"""
1051 opts = pycompat.byteskwargs(opts)
1051 opts = pycompat.byteskwargs(opts)
1052 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1052 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1053 format = opts.get('format', 0)
1053 format = opts.get('format', 0)
1054 if format not in (0, 1):
1054 if format not in (0, 1):
1055 raise error.Abort(_("unknown format %d") % format)
1055 raise error.Abort(_("unknown format %d") % format)
1056
1056
1057 generaldelta = r.version & revlog.FLAG_GENERALDELTA
1057 generaldelta = r.version & revlog.FLAG_GENERALDELTA
1058 if generaldelta:
1058 if generaldelta:
1059 basehdr = ' delta'
1059 basehdr = ' delta'
1060 else:
1060 else:
1061 basehdr = ' base'
1061 basehdr = ' base'
1062
1062
1063 if ui.debugflag:
1063 if ui.debugflag:
1064 shortfn = hex
1064 shortfn = hex
1065 else:
1065 else:
1066 shortfn = short
1066 shortfn = short
1067
1067
1068 # There might not be anything in r, so have a sane default
1068 # There might not be anything in r, so have a sane default
1069 idlen = 12
1069 idlen = 12
1070 for i in r:
1070 for i in r:
1071 idlen = len(shortfn(r.node(i)))
1071 idlen = len(shortfn(r.node(i)))
1072 break
1072 break
1073
1073
1074 if format == 0:
1074 if format == 0:
1075 ui.write((" rev offset length " + basehdr + " linkrev"
1075 ui.write((" rev offset length " + basehdr + " linkrev"
1076 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
1076 " %s %s p2\n") % ("nodeid".ljust(idlen), "p1".ljust(idlen)))
1077 elif format == 1:
1077 elif format == 1:
1078 ui.write((" rev flag offset length"
1078 ui.write((" rev flag offset length"
1079 " size " + basehdr + " link p1 p2"
1079 " size " + basehdr + " link p1 p2"
1080 " %s\n") % "nodeid".rjust(idlen))
1080 " %s\n") % "nodeid".rjust(idlen))
1081
1081
1082 for i in r:
1082 for i in r:
1083 node = r.node(i)
1083 node = r.node(i)
1084 if generaldelta:
1084 if generaldelta:
1085 base = r.deltaparent(i)
1085 base = r.deltaparent(i)
1086 else:
1086 else:
1087 base = r.chainbase(i)
1087 base = r.chainbase(i)
1088 if format == 0:
1088 if format == 0:
1089 try:
1089 try:
1090 pp = r.parents(node)
1090 pp = r.parents(node)
1091 except Exception:
1091 except Exception:
1092 pp = [nullid, nullid]
1092 pp = [nullid, nullid]
1093 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1093 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1094 i, r.start(i), r.length(i), base, r.linkrev(i),
1094 i, r.start(i), r.length(i), base, r.linkrev(i),
1095 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1095 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1096 elif format == 1:
1096 elif format == 1:
1097 pr = r.parentrevs(i)
1097 pr = r.parentrevs(i)
1098 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1098 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % (
1099 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1099 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1100 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
1100 base, r.linkrev(i), pr[0], pr[1], shortfn(node)))
1101
1101
1102 @command('debugindexdot', cmdutil.debugrevlogopts,
1102 @command('debugindexdot', cmdutil.debugrevlogopts,
1103 _('-c|-m|FILE'), optionalrepo=True)
1103 _('-c|-m|FILE'), optionalrepo=True)
1104 def debugindexdot(ui, repo, file_=None, **opts):
1104 def debugindexdot(ui, repo, file_=None, **opts):
1105 """dump an index DAG as a graphviz dot file"""
1105 """dump an index DAG as a graphviz dot file"""
1106 opts = pycompat.byteskwargs(opts)
1106 opts = pycompat.byteskwargs(opts)
1107 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1107 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1108 ui.write(("digraph G {\n"))
1108 ui.write(("digraph G {\n"))
1109 for i in r:
1109 for i in r:
1110 node = r.node(i)
1110 node = r.node(i)
1111 pp = r.parents(node)
1111 pp = r.parents(node)
1112 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1112 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1113 if pp[1] != nullid:
1113 if pp[1] != nullid:
1114 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1114 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1115 ui.write("}\n")
1115 ui.write("}\n")
1116
1116
1117 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1117 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1118 def debuginstall(ui, **opts):
1118 def debuginstall(ui, **opts):
1119 '''test Mercurial installation
1119 '''test Mercurial installation
1120
1120
1121 Returns 0 on success.
1121 Returns 0 on success.
1122 '''
1122 '''
1123 opts = pycompat.byteskwargs(opts)
1123 opts = pycompat.byteskwargs(opts)
1124
1124
1125 def writetemp(contents):
1125 def writetemp(contents):
1126 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1126 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1127 f = os.fdopen(fd, r"wb")
1127 f = os.fdopen(fd, r"wb")
1128 f.write(contents)
1128 f.write(contents)
1129 f.close()
1129 f.close()
1130 return name
1130 return name
1131
1131
1132 problems = 0
1132 problems = 0
1133
1133
1134 fm = ui.formatter('debuginstall', opts)
1134 fm = ui.formatter('debuginstall', opts)
1135 fm.startitem()
1135 fm.startitem()
1136
1136
1137 # encoding
1137 # encoding
1138 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1138 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1139 err = None
1139 err = None
1140 try:
1140 try:
1141 codecs.lookup(pycompat.sysstr(encoding.encoding))
1141 codecs.lookup(pycompat.sysstr(encoding.encoding))
1142 except LookupError as inst:
1142 except LookupError as inst:
1143 err = util.forcebytestr(inst)
1143 err = util.forcebytestr(inst)
1144 problems += 1
1144 problems += 1
1145 fm.condwrite(err, 'encodingerror', _(" %s\n"
1145 fm.condwrite(err, 'encodingerror', _(" %s\n"
1146 " (check that your locale is properly set)\n"), err)
1146 " (check that your locale is properly set)\n"), err)
1147
1147
1148 # Python
1148 # Python
1149 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1149 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1150 pycompat.sysexecutable)
1150 pycompat.sysexecutable)
1151 fm.write('pythonver', _("checking Python version (%s)\n"),
1151 fm.write('pythonver', _("checking Python version (%s)\n"),
1152 ("%d.%d.%d" % sys.version_info[:3]))
1152 ("%d.%d.%d" % sys.version_info[:3]))
1153 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1153 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1154 os.path.dirname(pycompat.fsencode(os.__file__)))
1154 os.path.dirname(pycompat.fsencode(os.__file__)))
1155
1155
1156 security = set(sslutil.supportedprotocols)
1156 security = set(sslutil.supportedprotocols)
1157 if sslutil.hassni:
1157 if sslutil.hassni:
1158 security.add('sni')
1158 security.add('sni')
1159
1159
1160 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1160 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1161 fm.formatlist(sorted(security), name='protocol',
1161 fm.formatlist(sorted(security), name='protocol',
1162 fmt='%s', sep=','))
1162 fmt='%s', sep=','))
1163
1163
1164 # These are warnings, not errors. So don't increment problem count. This
1164 # These are warnings, not errors. So don't increment problem count. This
1165 # may change in the future.
1165 # may change in the future.
1166 if 'tls1.2' not in security:
1166 if 'tls1.2' not in security:
1167 fm.plain(_(' TLS 1.2 not supported by Python install; '
1167 fm.plain(_(' TLS 1.2 not supported by Python install; '
1168 'network connections lack modern security\n'))
1168 'network connections lack modern security\n'))
1169 if 'sni' not in security:
1169 if 'sni' not in security:
1170 fm.plain(_(' SNI not supported by Python install; may have '
1170 fm.plain(_(' SNI not supported by Python install; may have '
1171 'connectivity issues with some servers\n'))
1171 'connectivity issues with some servers\n'))
1172
1172
1173 # TODO print CA cert info
1173 # TODO print CA cert info
1174
1174
1175 # hg version
1175 # hg version
1176 hgver = util.version()
1176 hgver = util.version()
1177 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1177 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1178 hgver.split('+')[0])
1178 hgver.split('+')[0])
1179 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1179 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1180 '+'.join(hgver.split('+')[1:]))
1180 '+'.join(hgver.split('+')[1:]))
1181
1181
1182 # compiled modules
1182 # compiled modules
1183 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1183 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1184 policy.policy)
1184 policy.policy)
1185 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1185 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1186 os.path.dirname(pycompat.fsencode(__file__)))
1186 os.path.dirname(pycompat.fsencode(__file__)))
1187
1187
1188 if policy.policy in ('c', 'allow'):
1188 if policy.policy in ('c', 'allow'):
1189 err = None
1189 err = None
1190 try:
1190 try:
1191 from .cext import (
1191 from .cext import (
1192 base85,
1192 base85,
1193 bdiff,
1193 bdiff,
1194 mpatch,
1194 mpatch,
1195 osutil,
1195 osutil,
1196 )
1196 )
1197 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1197 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1198 except Exception as inst:
1198 except Exception as inst:
1199 err = util.forcebytestr(inst)
1199 err = util.forcebytestr(inst)
1200 problems += 1
1200 problems += 1
1201 fm.condwrite(err, 'extensionserror', " %s\n", err)
1201 fm.condwrite(err, 'extensionserror', " %s\n", err)
1202
1202
1203 compengines = util.compengines._engines.values()
1203 compengines = util.compengines._engines.values()
1204 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1204 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1205 fm.formatlist(sorted(e.name() for e in compengines),
1205 fm.formatlist(sorted(e.name() for e in compengines),
1206 name='compengine', fmt='%s', sep=', '))
1206 name='compengine', fmt='%s', sep=', '))
1207 fm.write('compenginesavail', _('checking available compression engines '
1207 fm.write('compenginesavail', _('checking available compression engines '
1208 '(%s)\n'),
1208 '(%s)\n'),
1209 fm.formatlist(sorted(e.name() for e in compengines
1209 fm.formatlist(sorted(e.name() for e in compengines
1210 if e.available()),
1210 if e.available()),
1211 name='compengine', fmt='%s', sep=', '))
1211 name='compengine', fmt='%s', sep=', '))
1212 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1212 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1213 fm.write('compenginesserver', _('checking available compression engines '
1213 fm.write('compenginesserver', _('checking available compression engines '
1214 'for wire protocol (%s)\n'),
1214 'for wire protocol (%s)\n'),
1215 fm.formatlist([e.name() for e in wirecompengines
1215 fm.formatlist([e.name() for e in wirecompengines
1216 if e.wireprotosupport()],
1216 if e.wireprotosupport()],
1217 name='compengine', fmt='%s', sep=', '))
1217 name='compengine', fmt='%s', sep=', '))
1218 re2 = 'missing'
1218 re2 = 'missing'
1219 if util._re2:
1219 if util._re2:
1220 re2 = 'available'
1220 re2 = 'available'
1221 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1221 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1222 fm.data(re2=bool(util._re2))
1222 fm.data(re2=bool(util._re2))
1223
1223
1224 # templates
1224 # templates
1225 p = templater.templatepaths()
1225 p = templater.templatepaths()
1226 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1226 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1227 fm.condwrite(not p, '', _(" no template directories found\n"))
1227 fm.condwrite(not p, '', _(" no template directories found\n"))
1228 if p:
1228 if p:
1229 m = templater.templatepath("map-cmdline.default")
1229 m = templater.templatepath("map-cmdline.default")
1230 if m:
1230 if m:
1231 # template found, check if it is working
1231 # template found, check if it is working
1232 err = None
1232 err = None
1233 try:
1233 try:
1234 templater.templater.frommapfile(m)
1234 templater.templater.frommapfile(m)
1235 except Exception as inst:
1235 except Exception as inst:
1236 err = util.forcebytestr(inst)
1236 err = util.forcebytestr(inst)
1237 p = None
1237 p = None
1238 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1238 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1239 else:
1239 else:
1240 p = None
1240 p = None
1241 fm.condwrite(p, 'defaulttemplate',
1241 fm.condwrite(p, 'defaulttemplate',
1242 _("checking default template (%s)\n"), m)
1242 _("checking default template (%s)\n"), m)
1243 fm.condwrite(not m, 'defaulttemplatenotfound',
1243 fm.condwrite(not m, 'defaulttemplatenotfound',
1244 _(" template '%s' not found\n"), "default")
1244 _(" template '%s' not found\n"), "default")
1245 if not p:
1245 if not p:
1246 problems += 1
1246 problems += 1
1247 fm.condwrite(not p, '',
1247 fm.condwrite(not p, '',
1248 _(" (templates seem to have been installed incorrectly)\n"))
1248 _(" (templates seem to have been installed incorrectly)\n"))
1249
1249
1250 # editor
1250 # editor
1251 editor = ui.geteditor()
1251 editor = ui.geteditor()
1252 editor = util.expandpath(editor)
1252 editor = util.expandpath(editor)
1253 editorbin = util.shellsplit(editor)[0]
1253 editorbin = util.shellsplit(editor)[0]
1254 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1254 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1255 cmdpath = util.findexe(editorbin)
1255 cmdpath = util.findexe(editorbin)
1256 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1256 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1257 _(" No commit editor set and can't find %s in PATH\n"
1257 _(" No commit editor set and can't find %s in PATH\n"
1258 " (specify a commit editor in your configuration"
1258 " (specify a commit editor in your configuration"
1259 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1259 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1260 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1260 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1261 _(" Can't find editor '%s' in PATH\n"
1261 _(" Can't find editor '%s' in PATH\n"
1262 " (specify a commit editor in your configuration"
1262 " (specify a commit editor in your configuration"
1263 " file)\n"), not cmdpath and editorbin)
1263 " file)\n"), not cmdpath and editorbin)
1264 if not cmdpath and editor != 'vi':
1264 if not cmdpath and editor != 'vi':
1265 problems += 1
1265 problems += 1
1266
1266
1267 # check username
1267 # check username
1268 username = None
1268 username = None
1269 err = None
1269 err = None
1270 try:
1270 try:
1271 username = ui.username()
1271 username = ui.username()
1272 except error.Abort as e:
1272 except error.Abort as e:
1273 err = util.forcebytestr(e)
1273 err = util.forcebytestr(e)
1274 problems += 1
1274 problems += 1
1275
1275
1276 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1276 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1277 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1277 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1278 " (specify a username in your configuration file)\n"), err)
1278 " (specify a username in your configuration file)\n"), err)
1279
1279
1280 fm.condwrite(not problems, '',
1280 fm.condwrite(not problems, '',
1281 _("no problems detected\n"))
1281 _("no problems detected\n"))
1282 if not problems:
1282 if not problems:
1283 fm.data(problems=problems)
1283 fm.data(problems=problems)
1284 fm.condwrite(problems, 'problems',
1284 fm.condwrite(problems, 'problems',
1285 _("%d problems detected,"
1285 _("%d problems detected,"
1286 " please check your install!\n"), problems)
1286 " please check your install!\n"), problems)
1287 fm.end()
1287 fm.end()
1288
1288
1289 return problems
1289 return problems
1290
1290
1291 @command('debugknown', [], _('REPO ID...'), norepo=True)
1291 @command('debugknown', [], _('REPO ID...'), norepo=True)
1292 def debugknown(ui, repopath, *ids, **opts):
1292 def debugknown(ui, repopath, *ids, **opts):
1293 """test whether node ids are known to a repo
1293 """test whether node ids are known to a repo
1294
1294
1295 Every ID must be a full-length hex node id string. Returns a list of 0s
1295 Every ID must be a full-length hex node id string. Returns a list of 0s
1296 and 1s indicating unknown/known.
1296 and 1s indicating unknown/known.
1297 """
1297 """
1298 opts = pycompat.byteskwargs(opts)
1298 opts = pycompat.byteskwargs(opts)
1299 repo = hg.peer(ui, opts, repopath)
1299 repo = hg.peer(ui, opts, repopath)
1300 if not repo.capable('known'):
1300 if not repo.capable('known'):
1301 raise error.Abort("known() not supported by target repository")
1301 raise error.Abort("known() not supported by target repository")
1302 flags = repo.known([bin(s) for s in ids])
1302 flags = repo.known([bin(s) for s in ids])
1303 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1303 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1304
1304
1305 @command('debuglabelcomplete', [], _('LABEL...'))
1305 @command('debuglabelcomplete', [], _('LABEL...'))
1306 def debuglabelcomplete(ui, repo, *args):
1306 def debuglabelcomplete(ui, repo, *args):
1307 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1307 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1308 debugnamecomplete(ui, repo, *args)
1308 debugnamecomplete(ui, repo, *args)
1309
1309
1310 @command('debuglocks',
1310 @command('debuglocks',
1311 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1311 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1312 ('W', 'force-wlock', None,
1312 ('W', 'force-wlock', None,
1313 _('free the working state lock (DANGEROUS)')),
1313 _('free the working state lock (DANGEROUS)')),
1314 ('s', 'set-lock', None, _('set the store lock until stopped')),
1314 ('s', 'set-lock', None, _('set the store lock until stopped')),
1315 ('S', 'set-wlock', None,
1315 ('S', 'set-wlock', None,
1316 _('set the working state lock until stopped'))],
1316 _('set the working state lock until stopped'))],
1317 _('[OPTION]...'))
1317 _('[OPTION]...'))
1318 def debuglocks(ui, repo, **opts):
1318 def debuglocks(ui, repo, **opts):
1319 """show or modify state of locks
1319 """show or modify state of locks
1320
1320
1321 By default, this command will show which locks are held. This
1321 By default, this command will show which locks are held. This
1322 includes the user and process holding the lock, the amount of time
1322 includes the user and process holding the lock, the amount of time
1323 the lock has been held, and the machine name where the process is
1323 the lock has been held, and the machine name where the process is
1324 running if it's not local.
1324 running if it's not local.
1325
1325
1326 Locks protect the integrity of Mercurial's data, so should be
1326 Locks protect the integrity of Mercurial's data, so should be
1327 treated with care. System crashes or other interruptions may cause
1327 treated with care. System crashes or other interruptions may cause
1328 locks to not be properly released, though Mercurial will usually
1328 locks to not be properly released, though Mercurial will usually
1329 detect and remove such stale locks automatically.
1329 detect and remove such stale locks automatically.
1330
1330
1331 However, detecting stale locks may not always be possible (for
1331 However, detecting stale locks may not always be possible (for
1332 instance, on a shared filesystem). Removing locks may also be
1332 instance, on a shared filesystem). Removing locks may also be
1333 blocked by filesystem permissions.
1333 blocked by filesystem permissions.
1334
1334
1335 Setting a lock will prevent other commands from changing the data.
1335 Setting a lock will prevent other commands from changing the data.
1336 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1336 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1337 The set locks are removed when the command exits.
1337 The set locks are removed when the command exits.
1338
1338
1339 Returns 0 if no locks are held.
1339 Returns 0 if no locks are held.
1340
1340
1341 """
1341 """
1342
1342
1343 if opts.get(r'force_lock'):
1343 if opts.get(r'force_lock'):
1344 repo.svfs.unlink('lock')
1344 repo.svfs.unlink('lock')
1345 if opts.get(r'force_wlock'):
1345 if opts.get(r'force_wlock'):
1346 repo.vfs.unlink('wlock')
1346 repo.vfs.unlink('wlock')
1347 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1347 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1348 return 0
1348 return 0
1349
1349
1350 locks = []
1350 locks = []
1351 try:
1351 try:
1352 if opts.get(r'set_wlock'):
1352 if opts.get(r'set_wlock'):
1353 try:
1353 try:
1354 locks.append(repo.wlock(False))
1354 locks.append(repo.wlock(False))
1355 except error.LockHeld:
1355 except error.LockHeld:
1356 raise error.Abort(_('wlock is already held'))
1356 raise error.Abort(_('wlock is already held'))
1357 if opts.get(r'set_lock'):
1357 if opts.get(r'set_lock'):
1358 try:
1358 try:
1359 locks.append(repo.lock(False))
1359 locks.append(repo.lock(False))
1360 except error.LockHeld:
1360 except error.LockHeld:
1361 raise error.Abort(_('lock is already held'))
1361 raise error.Abort(_('lock is already held'))
1362 if len(locks):
1362 if len(locks):
1363 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1363 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1364 return 0
1364 return 0
1365 finally:
1365 finally:
1366 release(*locks)
1366 release(*locks)
1367
1367
1368 now = time.time()
1368 now = time.time()
1369 held = 0
1369 held = 0
1370
1370
1371 def report(vfs, name, method):
1371 def report(vfs, name, method):
1372 # this causes stale locks to get reaped for more accurate reporting
1372 # this causes stale locks to get reaped for more accurate reporting
1373 try:
1373 try:
1374 l = method(False)
1374 l = method(False)
1375 except error.LockHeld:
1375 except error.LockHeld:
1376 l = None
1376 l = None
1377
1377
1378 if l:
1378 if l:
1379 l.release()
1379 l.release()
1380 else:
1380 else:
1381 try:
1381 try:
1382 st = vfs.lstat(name)
1382 st = vfs.lstat(name)
1383 age = now - st[stat.ST_MTIME]
1383 age = now - st[stat.ST_MTIME]
1384 user = util.username(st.st_uid)
1384 user = util.username(st.st_uid)
1385 locker = vfs.readlock(name)
1385 locker = vfs.readlock(name)
1386 if ":" in locker:
1386 if ":" in locker:
1387 host, pid = locker.split(':')
1387 host, pid = locker.split(':')
1388 if host == socket.gethostname():
1388 if host == socket.gethostname():
1389 locker = 'user %s, process %s' % (user, pid)
1389 locker = 'user %s, process %s' % (user, pid)
1390 else:
1390 else:
1391 locker = 'user %s, process %s, host %s' \
1391 locker = 'user %s, process %s, host %s' \
1392 % (user, pid, host)
1392 % (user, pid, host)
1393 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1393 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1394 return 1
1394 return 1
1395 except OSError as e:
1395 except OSError as e:
1396 if e.errno != errno.ENOENT:
1396 if e.errno != errno.ENOENT:
1397 raise
1397 raise
1398
1398
1399 ui.write(("%-6s free\n") % (name + ":"))
1399 ui.write(("%-6s free\n") % (name + ":"))
1400 return 0
1400 return 0
1401
1401
1402 held += report(repo.svfs, "lock", repo.lock)
1402 held += report(repo.svfs, "lock", repo.lock)
1403 held += report(repo.vfs, "wlock", repo.wlock)
1403 held += report(repo.vfs, "wlock", repo.wlock)
1404
1404
1405 return held
1405 return held
1406
1406
1407 @command('debugmergestate', [], '')
1407 @command('debugmergestate', [], '')
1408 def debugmergestate(ui, repo, *args):
1408 def debugmergestate(ui, repo, *args):
1409 """print merge state
1409 """print merge state
1410
1410
1411 Use --verbose to print out information about whether v1 or v2 merge state
1411 Use --verbose to print out information about whether v1 or v2 merge state
1412 was chosen."""
1412 was chosen."""
1413 def _hashornull(h):
1413 def _hashornull(h):
1414 if h == nullhex:
1414 if h == nullhex:
1415 return 'null'
1415 return 'null'
1416 else:
1416 else:
1417 return h
1417 return h
1418
1418
1419 def printrecords(version):
1419 def printrecords(version):
1420 ui.write(('* version %d records\n') % version)
1420 ui.write(('* version %d records\n') % version)
1421 if version == 1:
1421 if version == 1:
1422 records = v1records
1422 records = v1records
1423 else:
1423 else:
1424 records = v2records
1424 records = v2records
1425
1425
1426 for rtype, record in records:
1426 for rtype, record in records:
1427 # pretty print some record types
1427 # pretty print some record types
1428 if rtype == 'L':
1428 if rtype == 'L':
1429 ui.write(('local: %s\n') % record)
1429 ui.write(('local: %s\n') % record)
1430 elif rtype == 'O':
1430 elif rtype == 'O':
1431 ui.write(('other: %s\n') % record)
1431 ui.write(('other: %s\n') % record)
1432 elif rtype == 'm':
1432 elif rtype == 'm':
1433 driver, mdstate = record.split('\0', 1)
1433 driver, mdstate = record.split('\0', 1)
1434 ui.write(('merge driver: %s (state "%s")\n')
1434 ui.write(('merge driver: %s (state "%s")\n')
1435 % (driver, mdstate))
1435 % (driver, mdstate))
1436 elif rtype in 'FDC':
1436 elif rtype in 'FDC':
1437 r = record.split('\0')
1437 r = record.split('\0')
1438 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1438 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1439 if version == 1:
1439 if version == 1:
1440 onode = 'not stored in v1 format'
1440 onode = 'not stored in v1 format'
1441 flags = r[7]
1441 flags = r[7]
1442 else:
1442 else:
1443 onode, flags = r[7:9]
1443 onode, flags = r[7:9]
1444 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1444 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1445 % (f, rtype, state, _hashornull(hash)))
1445 % (f, rtype, state, _hashornull(hash)))
1446 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1446 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1447 ui.write((' ancestor path: %s (node %s)\n')
1447 ui.write((' ancestor path: %s (node %s)\n')
1448 % (afile, _hashornull(anode)))
1448 % (afile, _hashornull(anode)))
1449 ui.write((' other path: %s (node %s)\n')
1449 ui.write((' other path: %s (node %s)\n')
1450 % (ofile, _hashornull(onode)))
1450 % (ofile, _hashornull(onode)))
1451 elif rtype == 'f':
1451 elif rtype == 'f':
1452 filename, rawextras = record.split('\0', 1)
1452 filename, rawextras = record.split('\0', 1)
1453 extras = rawextras.split('\0')
1453 extras = rawextras.split('\0')
1454 i = 0
1454 i = 0
1455 extrastrings = []
1455 extrastrings = []
1456 while i < len(extras):
1456 while i < len(extras):
1457 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1457 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1458 i += 2
1458 i += 2
1459
1459
1460 ui.write(('file extras: %s (%s)\n')
1460 ui.write(('file extras: %s (%s)\n')
1461 % (filename, ', '.join(extrastrings)))
1461 % (filename, ', '.join(extrastrings)))
1462 elif rtype == 'l':
1462 elif rtype == 'l':
1463 labels = record.split('\0', 2)
1463 labels = record.split('\0', 2)
1464 labels = [l for l in labels if len(l) > 0]
1464 labels = [l for l in labels if len(l) > 0]
1465 ui.write(('labels:\n'))
1465 ui.write(('labels:\n'))
1466 ui.write((' local: %s\n' % labels[0]))
1466 ui.write((' local: %s\n' % labels[0]))
1467 ui.write((' other: %s\n' % labels[1]))
1467 ui.write((' other: %s\n' % labels[1]))
1468 if len(labels) > 2:
1468 if len(labels) > 2:
1469 ui.write((' base: %s\n' % labels[2]))
1469 ui.write((' base: %s\n' % labels[2]))
1470 else:
1470 else:
1471 ui.write(('unrecognized entry: %s\t%s\n')
1471 ui.write(('unrecognized entry: %s\t%s\n')
1472 % (rtype, record.replace('\0', '\t')))
1472 % (rtype, record.replace('\0', '\t')))
1473
1473
1474 # Avoid mergestate.read() since it may raise an exception for unsupported
1474 # Avoid mergestate.read() since it may raise an exception for unsupported
1475 # merge state records. We shouldn't be doing this, but this is OK since this
1475 # merge state records. We shouldn't be doing this, but this is OK since this
1476 # command is pretty low-level.
1476 # command is pretty low-level.
1477 ms = mergemod.mergestate(repo)
1477 ms = mergemod.mergestate(repo)
1478
1478
1479 # sort so that reasonable information is on top
1479 # sort so that reasonable information is on top
1480 v1records = ms._readrecordsv1()
1480 v1records = ms._readrecordsv1()
1481 v2records = ms._readrecordsv2()
1481 v2records = ms._readrecordsv2()
1482 order = 'LOml'
1482 order = 'LOml'
1483 def key(r):
1483 def key(r):
1484 idx = order.find(r[0])
1484 idx = order.find(r[0])
1485 if idx == -1:
1485 if idx == -1:
1486 return (1, r[1])
1486 return (1, r[1])
1487 else:
1487 else:
1488 return (0, idx)
1488 return (0, idx)
1489 v1records.sort(key=key)
1489 v1records.sort(key=key)
1490 v2records.sort(key=key)
1490 v2records.sort(key=key)
1491
1491
1492 if not v1records and not v2records:
1492 if not v1records and not v2records:
1493 ui.write(('no merge state found\n'))
1493 ui.write(('no merge state found\n'))
1494 elif not v2records:
1494 elif not v2records:
1495 ui.note(('no version 2 merge state\n'))
1495 ui.note(('no version 2 merge state\n'))
1496 printrecords(1)
1496 printrecords(1)
1497 elif ms._v1v2match(v1records, v2records):
1497 elif ms._v1v2match(v1records, v2records):
1498 ui.note(('v1 and v2 states match: using v2\n'))
1498 ui.note(('v1 and v2 states match: using v2\n'))
1499 printrecords(2)
1499 printrecords(2)
1500 else:
1500 else:
1501 ui.note(('v1 and v2 states mismatch: using v1\n'))
1501 ui.note(('v1 and v2 states mismatch: using v1\n'))
1502 printrecords(1)
1502 printrecords(1)
1503 if ui.verbose:
1503 if ui.verbose:
1504 printrecords(2)
1504 printrecords(2)
1505
1505
1506 @command('debugnamecomplete', [], _('NAME...'))
1506 @command('debugnamecomplete', [], _('NAME...'))
1507 def debugnamecomplete(ui, repo, *args):
1507 def debugnamecomplete(ui, repo, *args):
1508 '''complete "names" - tags, open branch names, bookmark names'''
1508 '''complete "names" - tags, open branch names, bookmark names'''
1509
1509
1510 names = set()
1510 names = set()
1511 # since we previously only listed open branches, we will handle that
1511 # since we previously only listed open branches, we will handle that
1512 # specially (after this for loop)
1512 # specially (after this for loop)
1513 for name, ns in repo.names.iteritems():
1513 for name, ns in repo.names.iteritems():
1514 if name != 'branches':
1514 if name != 'branches':
1515 names.update(ns.listnames(repo))
1515 names.update(ns.listnames(repo))
1516 names.update(tag for (tag, heads, tip, closed)
1516 names.update(tag for (tag, heads, tip, closed)
1517 in repo.branchmap().iterbranches() if not closed)
1517 in repo.branchmap().iterbranches() if not closed)
1518 completions = set()
1518 completions = set()
1519 if not args:
1519 if not args:
1520 args = ['']
1520 args = ['']
1521 for a in args:
1521 for a in args:
1522 completions.update(n for n in names if n.startswith(a))
1522 completions.update(n for n in names if n.startswith(a))
1523 ui.write('\n'.join(sorted(completions)))
1523 ui.write('\n'.join(sorted(completions)))
1524 ui.write('\n')
1524 ui.write('\n')
1525
1525
1526 @command('debugobsolete',
1526 @command('debugobsolete',
1527 [('', 'flags', 0, _('markers flag')),
1527 [('', 'flags', 0, _('markers flag')),
1528 ('', 'record-parents', False,
1528 ('', 'record-parents', False,
1529 _('record parent information for the precursor')),
1529 _('record parent information for the precursor')),
1530 ('r', 'rev', [], _('display markers relevant to REV')),
1530 ('r', 'rev', [], _('display markers relevant to REV')),
1531 ('', 'exclusive', False, _('restrict display to markers only '
1531 ('', 'exclusive', False, _('restrict display to markers only '
1532 'relevant to REV')),
1532 'relevant to REV')),
1533 ('', 'index', False, _('display index of the marker')),
1533 ('', 'index', False, _('display index of the marker')),
1534 ('', 'delete', [], _('delete markers specified by indices')),
1534 ('', 'delete', [], _('delete markers specified by indices')),
1535 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1535 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1536 _('[OBSOLETED [REPLACEMENT ...]]'))
1536 _('[OBSOLETED [REPLACEMENT ...]]'))
1537 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1537 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1538 """create arbitrary obsolete marker
1538 """create arbitrary obsolete marker
1539
1539
1540 With no arguments, displays the list of obsolescence markers."""
1540 With no arguments, displays the list of obsolescence markers."""
1541
1541
1542 opts = pycompat.byteskwargs(opts)
1542 opts = pycompat.byteskwargs(opts)
1543
1543
1544 def parsenodeid(s):
1544 def parsenodeid(s):
1545 try:
1545 try:
1546 # We do not use revsingle/revrange functions here to accept
1546 # We do not use revsingle/revrange functions here to accept
1547 # arbitrary node identifiers, possibly not present in the
1547 # arbitrary node identifiers, possibly not present in the
1548 # local repository.
1548 # local repository.
1549 n = bin(s)
1549 n = bin(s)
1550 if len(n) != len(nullid):
1550 if len(n) != len(nullid):
1551 raise TypeError()
1551 raise TypeError()
1552 return n
1552 return n
1553 except TypeError:
1553 except TypeError:
1554 raise error.Abort('changeset references must be full hexadecimal '
1554 raise error.Abort('changeset references must be full hexadecimal '
1555 'node identifiers')
1555 'node identifiers')
1556
1556
1557 if opts.get('delete'):
1557 if opts.get('delete'):
1558 indices = []
1558 indices = []
1559 for v in opts.get('delete'):
1559 for v in opts.get('delete'):
1560 try:
1560 try:
1561 indices.append(int(v))
1561 indices.append(int(v))
1562 except ValueError:
1562 except ValueError:
1563 raise error.Abort(_('invalid index value: %r') % v,
1563 raise error.Abort(_('invalid index value: %r') % v,
1564 hint=_('use integers for indices'))
1564 hint=_('use integers for indices'))
1565
1565
1566 if repo.currenttransaction():
1566 if repo.currenttransaction():
1567 raise error.Abort(_('cannot delete obsmarkers in the middle '
1567 raise error.Abort(_('cannot delete obsmarkers in the middle '
1568 'of transaction.'))
1568 'of transaction.'))
1569
1569
1570 with repo.lock():
1570 with repo.lock():
1571 n = repair.deleteobsmarkers(repo.obsstore, indices)
1571 n = repair.deleteobsmarkers(repo.obsstore, indices)
1572 ui.write(_('deleted %i obsolescence markers\n') % n)
1572 ui.write(_('deleted %i obsolescence markers\n') % n)
1573
1573
1574 return
1574 return
1575
1575
1576 if precursor is not None:
1576 if precursor is not None:
1577 if opts['rev']:
1577 if opts['rev']:
1578 raise error.Abort('cannot select revision when creating marker')
1578 raise error.Abort('cannot select revision when creating marker')
1579 metadata = {}
1579 metadata = {}
1580 metadata['user'] = opts['user'] or ui.username()
1580 metadata['user'] = opts['user'] or ui.username()
1581 succs = tuple(parsenodeid(succ) for succ in successors)
1581 succs = tuple(parsenodeid(succ) for succ in successors)
1582 l = repo.lock()
1582 l = repo.lock()
1583 try:
1583 try:
1584 tr = repo.transaction('debugobsolete')
1584 tr = repo.transaction('debugobsolete')
1585 try:
1585 try:
1586 date = opts.get('date')
1586 date = opts.get('date')
1587 if date:
1587 if date:
1588 date = dateutil.parsedate(date)
1588 date = dateutil.parsedate(date)
1589 else:
1589 else:
1590 date = None
1590 date = None
1591 prec = parsenodeid(precursor)
1591 prec = parsenodeid(precursor)
1592 parents = None
1592 parents = None
1593 if opts['record_parents']:
1593 if opts['record_parents']:
1594 if prec not in repo.unfiltered():
1594 if prec not in repo.unfiltered():
1595 raise error.Abort('cannot used --record-parents on '
1595 raise error.Abort('cannot used --record-parents on '
1596 'unknown changesets')
1596 'unknown changesets')
1597 parents = repo.unfiltered()[prec].parents()
1597 parents = repo.unfiltered()[prec].parents()
1598 parents = tuple(p.node() for p in parents)
1598 parents = tuple(p.node() for p in parents)
1599 repo.obsstore.create(tr, prec, succs, opts['flags'],
1599 repo.obsstore.create(tr, prec, succs, opts['flags'],
1600 parents=parents, date=date,
1600 parents=parents, date=date,
1601 metadata=metadata, ui=ui)
1601 metadata=metadata, ui=ui)
1602 tr.close()
1602 tr.close()
1603 except ValueError as exc:
1603 except ValueError as exc:
1604 raise error.Abort(_('bad obsmarker input: %s') %
1604 raise error.Abort(_('bad obsmarker input: %s') %
1605 pycompat.bytestr(exc))
1605 pycompat.bytestr(exc))
1606 finally:
1606 finally:
1607 tr.release()
1607 tr.release()
1608 finally:
1608 finally:
1609 l.release()
1609 l.release()
1610 else:
1610 else:
1611 if opts['rev']:
1611 if opts['rev']:
1612 revs = scmutil.revrange(repo, opts['rev'])
1612 revs = scmutil.revrange(repo, opts['rev'])
1613 nodes = [repo[r].node() for r in revs]
1613 nodes = [repo[r].node() for r in revs]
1614 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1614 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1615 exclusive=opts['exclusive']))
1615 exclusive=opts['exclusive']))
1616 markers.sort(key=lambda x: x._data)
1616 markers.sort(key=lambda x: x._data)
1617 else:
1617 else:
1618 markers = obsutil.getmarkers(repo)
1618 markers = obsutil.getmarkers(repo)
1619
1619
1620 markerstoiter = markers
1620 markerstoiter = markers
1621 isrelevant = lambda m: True
1621 isrelevant = lambda m: True
1622 if opts.get('rev') and opts.get('index'):
1622 if opts.get('rev') and opts.get('index'):
1623 markerstoiter = obsutil.getmarkers(repo)
1623 markerstoiter = obsutil.getmarkers(repo)
1624 markerset = set(markers)
1624 markerset = set(markers)
1625 isrelevant = lambda m: m in markerset
1625 isrelevant = lambda m: m in markerset
1626
1626
1627 fm = ui.formatter('debugobsolete', opts)
1627 fm = ui.formatter('debugobsolete', opts)
1628 for i, m in enumerate(markerstoiter):
1628 for i, m in enumerate(markerstoiter):
1629 if not isrelevant(m):
1629 if not isrelevant(m):
1630 # marker can be irrelevant when we're iterating over a set
1630 # marker can be irrelevant when we're iterating over a set
1631 # of markers (markerstoiter) which is bigger than the set
1631 # of markers (markerstoiter) which is bigger than the set
1632 # of markers we want to display (markers)
1632 # of markers we want to display (markers)
1633 # this can happen if both --index and --rev options are
1633 # this can happen if both --index and --rev options are
1634 # provided and thus we need to iterate over all of the markers
1634 # provided and thus we need to iterate over all of the markers
1635 # to get the correct indices, but only display the ones that
1635 # to get the correct indices, but only display the ones that
1636 # are relevant to --rev value
1636 # are relevant to --rev value
1637 continue
1637 continue
1638 fm.startitem()
1638 fm.startitem()
1639 ind = i if opts.get('index') else None
1639 ind = i if opts.get('index') else None
1640 cmdutil.showmarker(fm, m, index=ind)
1640 cmdutil.showmarker(fm, m, index=ind)
1641 fm.end()
1641 fm.end()
1642
1642
1643 @command('debugpathcomplete',
1643 @command('debugpathcomplete',
1644 [('f', 'full', None, _('complete an entire path')),
1644 [('f', 'full', None, _('complete an entire path')),
1645 ('n', 'normal', None, _('show only normal files')),
1645 ('n', 'normal', None, _('show only normal files')),
1646 ('a', 'added', None, _('show only added files')),
1646 ('a', 'added', None, _('show only added files')),
1647 ('r', 'removed', None, _('show only removed files'))],
1647 ('r', 'removed', None, _('show only removed files'))],
1648 _('FILESPEC...'))
1648 _('FILESPEC...'))
1649 def debugpathcomplete(ui, repo, *specs, **opts):
1649 def debugpathcomplete(ui, repo, *specs, **opts):
1650 '''complete part or all of a tracked path
1650 '''complete part or all of a tracked path
1651
1651
1652 This command supports shells that offer path name completion. It
1652 This command supports shells that offer path name completion. It
1653 currently completes only files already known to the dirstate.
1653 currently completes only files already known to the dirstate.
1654
1654
1655 Completion extends only to the next path segment unless
1655 Completion extends only to the next path segment unless
1656 --full is specified, in which case entire paths are used.'''
1656 --full is specified, in which case entire paths are used.'''
1657
1657
1658 def complete(path, acceptable):
1658 def complete(path, acceptable):
1659 dirstate = repo.dirstate
1659 dirstate = repo.dirstate
1660 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1660 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1661 rootdir = repo.root + pycompat.ossep
1661 rootdir = repo.root + pycompat.ossep
1662 if spec != repo.root and not spec.startswith(rootdir):
1662 if spec != repo.root and not spec.startswith(rootdir):
1663 return [], []
1663 return [], []
1664 if os.path.isdir(spec):
1664 if os.path.isdir(spec):
1665 spec += '/'
1665 spec += '/'
1666 spec = spec[len(rootdir):]
1666 spec = spec[len(rootdir):]
1667 fixpaths = pycompat.ossep != '/'
1667 fixpaths = pycompat.ossep != '/'
1668 if fixpaths:
1668 if fixpaths:
1669 spec = spec.replace(pycompat.ossep, '/')
1669 spec = spec.replace(pycompat.ossep, '/')
1670 speclen = len(spec)
1670 speclen = len(spec)
1671 fullpaths = opts[r'full']
1671 fullpaths = opts[r'full']
1672 files, dirs = set(), set()
1672 files, dirs = set(), set()
1673 adddir, addfile = dirs.add, files.add
1673 adddir, addfile = dirs.add, files.add
1674 for f, st in dirstate.iteritems():
1674 for f, st in dirstate.iteritems():
1675 if f.startswith(spec) and st[0] in acceptable:
1675 if f.startswith(spec) and st[0] in acceptable:
1676 if fixpaths:
1676 if fixpaths:
1677 f = f.replace('/', pycompat.ossep)
1677 f = f.replace('/', pycompat.ossep)
1678 if fullpaths:
1678 if fullpaths:
1679 addfile(f)
1679 addfile(f)
1680 continue
1680 continue
1681 s = f.find(pycompat.ossep, speclen)
1681 s = f.find(pycompat.ossep, speclen)
1682 if s >= 0:
1682 if s >= 0:
1683 adddir(f[:s])
1683 adddir(f[:s])
1684 else:
1684 else:
1685 addfile(f)
1685 addfile(f)
1686 return files, dirs
1686 return files, dirs
1687
1687
1688 acceptable = ''
1688 acceptable = ''
1689 if opts[r'normal']:
1689 if opts[r'normal']:
1690 acceptable += 'nm'
1690 acceptable += 'nm'
1691 if opts[r'added']:
1691 if opts[r'added']:
1692 acceptable += 'a'
1692 acceptable += 'a'
1693 if opts[r'removed']:
1693 if opts[r'removed']:
1694 acceptable += 'r'
1694 acceptable += 'r'
1695 cwd = repo.getcwd()
1695 cwd = repo.getcwd()
1696 if not specs:
1696 if not specs:
1697 specs = ['.']
1697 specs = ['.']
1698
1698
1699 files, dirs = set(), set()
1699 files, dirs = set(), set()
1700 for spec in specs:
1700 for spec in specs:
1701 f, d = complete(spec, acceptable or 'nmar')
1701 f, d = complete(spec, acceptable or 'nmar')
1702 files.update(f)
1702 files.update(f)
1703 dirs.update(d)
1703 dirs.update(d)
1704 files.update(dirs)
1704 files.update(dirs)
1705 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1705 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1706 ui.write('\n')
1706 ui.write('\n')
1707
1707
1708 @command('debugpeer', [], _('PATH'), norepo=True)
1708 @command('debugpeer', [], _('PATH'), norepo=True)
1709 def debugpeer(ui, path):
1709 def debugpeer(ui, path):
1710 """establish a connection to a peer repository"""
1710 """establish a connection to a peer repository"""
1711 # Always enable peer request logging. Requires --debug to display
1711 # Always enable peer request logging. Requires --debug to display
1712 # though.
1712 # though.
1713 overrides = {
1713 overrides = {
1714 ('devel', 'debug.peer-request'): True,
1714 ('devel', 'debug.peer-request'): True,
1715 }
1715 }
1716
1716
1717 with ui.configoverride(overrides):
1717 with ui.configoverride(overrides):
1718 peer = hg.peer(ui, {}, path)
1718 peer = hg.peer(ui, {}, path)
1719
1719
1720 local = peer.local() is not None
1720 local = peer.local() is not None
1721 canpush = peer.canpush()
1721 canpush = peer.canpush()
1722
1722
1723 ui.write(_('url: %s\n') % peer.url())
1723 ui.write(_('url: %s\n') % peer.url())
1724 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1724 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1725 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1725 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1726
1726
1727 @command('debugpickmergetool',
1727 @command('debugpickmergetool',
1728 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1728 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1729 ('', 'changedelete', None, _('emulate merging change and delete')),
1729 ('', 'changedelete', None, _('emulate merging change and delete')),
1730 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1730 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1731 _('[PATTERN]...'),
1731 _('[PATTERN]...'),
1732 inferrepo=True)
1732 inferrepo=True)
1733 def debugpickmergetool(ui, repo, *pats, **opts):
1733 def debugpickmergetool(ui, repo, *pats, **opts):
1734 """examine which merge tool is chosen for specified file
1734 """examine which merge tool is chosen for specified file
1735
1735
1736 As described in :hg:`help merge-tools`, Mercurial examines
1736 As described in :hg:`help merge-tools`, Mercurial examines
1737 configurations below in this order to decide which merge tool is
1737 configurations below in this order to decide which merge tool is
1738 chosen for specified file.
1738 chosen for specified file.
1739
1739
1740 1. ``--tool`` option
1740 1. ``--tool`` option
1741 2. ``HGMERGE`` environment variable
1741 2. ``HGMERGE`` environment variable
1742 3. configurations in ``merge-patterns`` section
1742 3. configurations in ``merge-patterns`` section
1743 4. configuration of ``ui.merge``
1743 4. configuration of ``ui.merge``
1744 5. configurations in ``merge-tools`` section
1744 5. configurations in ``merge-tools`` section
1745 6. ``hgmerge`` tool (for historical reason only)
1745 6. ``hgmerge`` tool (for historical reason only)
1746 7. default tool for fallback (``:merge`` or ``:prompt``)
1746 7. default tool for fallback (``:merge`` or ``:prompt``)
1747
1747
1748 This command writes out examination result in the style below::
1748 This command writes out examination result in the style below::
1749
1749
1750 FILE = MERGETOOL
1750 FILE = MERGETOOL
1751
1751
1752 By default, all files known in the first parent context of the
1752 By default, all files known in the first parent context of the
1753 working directory are examined. Use file patterns and/or -I/-X
1753 working directory are examined. Use file patterns and/or -I/-X
1754 options to limit target files. -r/--rev is also useful to examine
1754 options to limit target files. -r/--rev is also useful to examine
1755 files in another context without actual updating to it.
1755 files in another context without actual updating to it.
1756
1756
1757 With --debug, this command shows warning messages while matching
1757 With --debug, this command shows warning messages while matching
1758 against ``merge-patterns`` and so on, too. It is recommended to
1758 against ``merge-patterns`` and so on, too. It is recommended to
1759 use this option with explicit file patterns and/or -I/-X options,
1759 use this option with explicit file patterns and/or -I/-X options,
1760 because this option increases amount of output per file according
1760 because this option increases amount of output per file according
1761 to configurations in hgrc.
1761 to configurations in hgrc.
1762
1762
1763 With -v/--verbose, this command shows configurations below at
1763 With -v/--verbose, this command shows configurations below at
1764 first (only if specified).
1764 first (only if specified).
1765
1765
1766 - ``--tool`` option
1766 - ``--tool`` option
1767 - ``HGMERGE`` environment variable
1767 - ``HGMERGE`` environment variable
1768 - configuration of ``ui.merge``
1768 - configuration of ``ui.merge``
1769
1769
1770 If merge tool is chosen before matching against
1770 If merge tool is chosen before matching against
1771 ``merge-patterns``, this command can't show any helpful
1771 ``merge-patterns``, this command can't show any helpful
1772 information, even with --debug. In such case, information above is
1772 information, even with --debug. In such case, information above is
1773 useful to know why a merge tool is chosen.
1773 useful to know why a merge tool is chosen.
1774 """
1774 """
1775 opts = pycompat.byteskwargs(opts)
1775 opts = pycompat.byteskwargs(opts)
1776 overrides = {}
1776 overrides = {}
1777 if opts['tool']:
1777 if opts['tool']:
1778 overrides[('ui', 'forcemerge')] = opts['tool']
1778 overrides[('ui', 'forcemerge')] = opts['tool']
1779 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1779 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1780
1780
1781 with ui.configoverride(overrides, 'debugmergepatterns'):
1781 with ui.configoverride(overrides, 'debugmergepatterns'):
1782 hgmerge = encoding.environ.get("HGMERGE")
1782 hgmerge = encoding.environ.get("HGMERGE")
1783 if hgmerge is not None:
1783 if hgmerge is not None:
1784 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1784 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1785 uimerge = ui.config("ui", "merge")
1785 uimerge = ui.config("ui", "merge")
1786 if uimerge:
1786 if uimerge:
1787 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1787 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1788
1788
1789 ctx = scmutil.revsingle(repo, opts.get('rev'))
1789 ctx = scmutil.revsingle(repo, opts.get('rev'))
1790 m = scmutil.match(ctx, pats, opts)
1790 m = scmutil.match(ctx, pats, opts)
1791 changedelete = opts['changedelete']
1791 changedelete = opts['changedelete']
1792 for path in ctx.walk(m):
1792 for path in ctx.walk(m):
1793 fctx = ctx[path]
1793 fctx = ctx[path]
1794 try:
1794 try:
1795 if not ui.debugflag:
1795 if not ui.debugflag:
1796 ui.pushbuffer(error=True)
1796 ui.pushbuffer(error=True)
1797 tool, toolpath = filemerge._picktool(repo, ui, path,
1797 tool, toolpath = filemerge._picktool(repo, ui, path,
1798 fctx.isbinary(),
1798 fctx.isbinary(),
1799 'l' in fctx.flags(),
1799 'l' in fctx.flags(),
1800 changedelete)
1800 changedelete)
1801 finally:
1801 finally:
1802 if not ui.debugflag:
1802 if not ui.debugflag:
1803 ui.popbuffer()
1803 ui.popbuffer()
1804 ui.write(('%s = %s\n') % (path, tool))
1804 ui.write(('%s = %s\n') % (path, tool))
1805
1805
1806 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1806 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1807 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1807 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1808 '''access the pushkey key/value protocol
1808 '''access the pushkey key/value protocol
1809
1809
1810 With two args, list the keys in the given namespace.
1810 With two args, list the keys in the given namespace.
1811
1811
1812 With five args, set a key to new if it currently is set to old.
1812 With five args, set a key to new if it currently is set to old.
1813 Reports success or failure.
1813 Reports success or failure.
1814 '''
1814 '''
1815
1815
1816 target = hg.peer(ui, {}, repopath)
1816 target = hg.peer(ui, {}, repopath)
1817 if keyinfo:
1817 if keyinfo:
1818 key, old, new = keyinfo
1818 key, old, new = keyinfo
1819 r = target.pushkey(namespace, key, old, new)
1819 r = target.pushkey(namespace, key, old, new)
1820 ui.status(pycompat.bytestr(r) + '\n')
1820 ui.status(pycompat.bytestr(r) + '\n')
1821 return not r
1821 return not r
1822 else:
1822 else:
1823 for k, v in sorted(target.listkeys(namespace).iteritems()):
1823 for k, v in sorted(target.listkeys(namespace).iteritems()):
1824 ui.write("%s\t%s\n" % (util.escapestr(k),
1824 ui.write("%s\t%s\n" % (util.escapestr(k),
1825 util.escapestr(v)))
1825 util.escapestr(v)))
1826
1826
1827 @command('debugpvec', [], _('A B'))
1827 @command('debugpvec', [], _('A B'))
1828 def debugpvec(ui, repo, a, b=None):
1828 def debugpvec(ui, repo, a, b=None):
1829 ca = scmutil.revsingle(repo, a)
1829 ca = scmutil.revsingle(repo, a)
1830 cb = scmutil.revsingle(repo, b)
1830 cb = scmutil.revsingle(repo, b)
1831 pa = pvec.ctxpvec(ca)
1831 pa = pvec.ctxpvec(ca)
1832 pb = pvec.ctxpvec(cb)
1832 pb = pvec.ctxpvec(cb)
1833 if pa == pb:
1833 if pa == pb:
1834 rel = "="
1834 rel = "="
1835 elif pa > pb:
1835 elif pa > pb:
1836 rel = ">"
1836 rel = ">"
1837 elif pa < pb:
1837 elif pa < pb:
1838 rel = "<"
1838 rel = "<"
1839 elif pa | pb:
1839 elif pa | pb:
1840 rel = "|"
1840 rel = "|"
1841 ui.write(_("a: %s\n") % pa)
1841 ui.write(_("a: %s\n") % pa)
1842 ui.write(_("b: %s\n") % pb)
1842 ui.write(_("b: %s\n") % pb)
1843 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1843 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1844 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1844 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1845 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1845 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1846 pa.distance(pb), rel))
1846 pa.distance(pb), rel))
1847
1847
1848 @command('debugrebuilddirstate|debugrebuildstate',
1848 @command('debugrebuilddirstate|debugrebuildstate',
1849 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1849 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1850 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1850 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1851 'the working copy parent')),
1851 'the working copy parent')),
1852 ],
1852 ],
1853 _('[-r REV]'))
1853 _('[-r REV]'))
1854 def debugrebuilddirstate(ui, repo, rev, **opts):
1854 def debugrebuilddirstate(ui, repo, rev, **opts):
1855 """rebuild the dirstate as it would look like for the given revision
1855 """rebuild the dirstate as it would look like for the given revision
1856
1856
1857 If no revision is specified the first current parent will be used.
1857 If no revision is specified the first current parent will be used.
1858
1858
1859 The dirstate will be set to the files of the given revision.
1859 The dirstate will be set to the files of the given revision.
1860 The actual working directory content or existing dirstate
1860 The actual working directory content or existing dirstate
1861 information such as adds or removes is not considered.
1861 information such as adds or removes is not considered.
1862
1862
1863 ``minimal`` will only rebuild the dirstate status for files that claim to be
1863 ``minimal`` will only rebuild the dirstate status for files that claim to be
1864 tracked but are not in the parent manifest, or that exist in the parent
1864 tracked but are not in the parent manifest, or that exist in the parent
1865 manifest but are not in the dirstate. It will not change adds, removes, or
1865 manifest but are not in the dirstate. It will not change adds, removes, or
1866 modified files that are in the working copy parent.
1866 modified files that are in the working copy parent.
1867
1867
1868 One use of this command is to make the next :hg:`status` invocation
1868 One use of this command is to make the next :hg:`status` invocation
1869 check the actual file content.
1869 check the actual file content.
1870 """
1870 """
1871 ctx = scmutil.revsingle(repo, rev)
1871 ctx = scmutil.revsingle(repo, rev)
1872 with repo.wlock():
1872 with repo.wlock():
1873 dirstate = repo.dirstate
1873 dirstate = repo.dirstate
1874 changedfiles = None
1874 changedfiles = None
1875 # See command doc for what minimal does.
1875 # See command doc for what minimal does.
1876 if opts.get(r'minimal'):
1876 if opts.get(r'minimal'):
1877 manifestfiles = set(ctx.manifest().keys())
1877 manifestfiles = set(ctx.manifest().keys())
1878 dirstatefiles = set(dirstate)
1878 dirstatefiles = set(dirstate)
1879 manifestonly = manifestfiles - dirstatefiles
1879 manifestonly = manifestfiles - dirstatefiles
1880 dsonly = dirstatefiles - manifestfiles
1880 dsonly = dirstatefiles - manifestfiles
1881 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1881 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1882 changedfiles = manifestonly | dsnotadded
1882 changedfiles = manifestonly | dsnotadded
1883
1883
1884 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1884 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1885
1885
1886 @command('debugrebuildfncache', [], '')
1886 @command('debugrebuildfncache', [], '')
1887 def debugrebuildfncache(ui, repo):
1887 def debugrebuildfncache(ui, repo):
1888 """rebuild the fncache file"""
1888 """rebuild the fncache file"""
1889 repair.rebuildfncache(ui, repo)
1889 repair.rebuildfncache(ui, repo)
1890
1890
1891 @command('debugrename',
1891 @command('debugrename',
1892 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1892 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1893 _('[-r REV] FILE'))
1893 _('[-r REV] FILE'))
1894 def debugrename(ui, repo, file1, *pats, **opts):
1894 def debugrename(ui, repo, file1, *pats, **opts):
1895 """dump rename information"""
1895 """dump rename information"""
1896
1896
1897 opts = pycompat.byteskwargs(opts)
1897 opts = pycompat.byteskwargs(opts)
1898 ctx = scmutil.revsingle(repo, opts.get('rev'))
1898 ctx = scmutil.revsingle(repo, opts.get('rev'))
1899 m = scmutil.match(ctx, (file1,) + pats, opts)
1899 m = scmutil.match(ctx, (file1,) + pats, opts)
1900 for abs in ctx.walk(m):
1900 for abs in ctx.walk(m):
1901 fctx = ctx[abs]
1901 fctx = ctx[abs]
1902 o = fctx.filelog().renamed(fctx.filenode())
1902 o = fctx.filelog().renamed(fctx.filenode())
1903 rel = m.rel(abs)
1903 rel = m.rel(abs)
1904 if o:
1904 if o:
1905 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1905 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1906 else:
1906 else:
1907 ui.write(_("%s not renamed\n") % rel)
1907 ui.write(_("%s not renamed\n") % rel)
1908
1908
1909 @command('debugrevlog', cmdutil.debugrevlogopts +
1909 @command('debugrevlog', cmdutil.debugrevlogopts +
1910 [('d', 'dump', False, _('dump index data'))],
1910 [('d', 'dump', False, _('dump index data'))],
1911 _('-c|-m|FILE'),
1911 _('-c|-m|FILE'),
1912 optionalrepo=True)
1912 optionalrepo=True)
1913 def debugrevlog(ui, repo, file_=None, **opts):
1913 def debugrevlog(ui, repo, file_=None, **opts):
1914 """show data and statistics about a revlog"""
1914 """show data and statistics about a revlog"""
1915 opts = pycompat.byteskwargs(opts)
1915 opts = pycompat.byteskwargs(opts)
1916 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1916 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1917
1917
1918 if opts.get("dump"):
1918 if opts.get("dump"):
1919 numrevs = len(r)
1919 numrevs = len(r)
1920 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1920 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1921 " rawsize totalsize compression heads chainlen\n"))
1921 " rawsize totalsize compression heads chainlen\n"))
1922 ts = 0
1922 ts = 0
1923 heads = set()
1923 heads = set()
1924
1924
1925 for rev in xrange(numrevs):
1925 for rev in xrange(numrevs):
1926 dbase = r.deltaparent(rev)
1926 dbase = r.deltaparent(rev)
1927 if dbase == -1:
1927 if dbase == -1:
1928 dbase = rev
1928 dbase = rev
1929 cbase = r.chainbase(rev)
1929 cbase = r.chainbase(rev)
1930 clen = r.chainlen(rev)
1930 clen = r.chainlen(rev)
1931 p1, p2 = r.parentrevs(rev)
1931 p1, p2 = r.parentrevs(rev)
1932 rs = r.rawsize(rev)
1932 rs = r.rawsize(rev)
1933 ts = ts + rs
1933 ts = ts + rs
1934 heads -= set(r.parentrevs(rev))
1934 heads -= set(r.parentrevs(rev))
1935 heads.add(rev)
1935 heads.add(rev)
1936 try:
1936 try:
1937 compression = ts / r.end(rev)
1937 compression = ts / r.end(rev)
1938 except ZeroDivisionError:
1938 except ZeroDivisionError:
1939 compression = 0
1939 compression = 0
1940 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1940 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1941 "%11d %5d %8d\n" %
1941 "%11d %5d %8d\n" %
1942 (rev, p1, p2, r.start(rev), r.end(rev),
1942 (rev, p1, p2, r.start(rev), r.end(rev),
1943 r.start(dbase), r.start(cbase),
1943 r.start(dbase), r.start(cbase),
1944 r.start(p1), r.start(p2),
1944 r.start(p1), r.start(p2),
1945 rs, ts, compression, len(heads), clen))
1945 rs, ts, compression, len(heads), clen))
1946 return 0
1946 return 0
1947
1947
1948 v = r.version
1948 v = r.version
1949 format = v & 0xFFFF
1949 format = v & 0xFFFF
1950 flags = []
1950 flags = []
1951 gdelta = False
1951 gdelta = False
1952 if v & revlog.FLAG_INLINE_DATA:
1952 if v & revlog.FLAG_INLINE_DATA:
1953 flags.append('inline')
1953 flags.append('inline')
1954 if v & revlog.FLAG_GENERALDELTA:
1954 if v & revlog.FLAG_GENERALDELTA:
1955 gdelta = True
1955 gdelta = True
1956 flags.append('generaldelta')
1956 flags.append('generaldelta')
1957 if not flags:
1957 if not flags:
1958 flags = ['(none)']
1958 flags = ['(none)']
1959
1959
1960 nummerges = 0
1960 nummerges = 0
1961 numfull = 0
1961 numfull = 0
1962 numprev = 0
1962 numprev = 0
1963 nump1 = 0
1963 nump1 = 0
1964 nump2 = 0
1964 nump2 = 0
1965 numother = 0
1965 numother = 0
1966 nump1prev = 0
1966 nump1prev = 0
1967 nump2prev = 0
1967 nump2prev = 0
1968 chainlengths = []
1968 chainlengths = []
1969 chainbases = []
1969 chainbases = []
1970 chainspans = []
1970 chainspans = []
1971
1971
1972 datasize = [None, 0, 0]
1972 datasize = [None, 0, 0]
1973 fullsize = [None, 0, 0]
1973 fullsize = [None, 0, 0]
1974 deltasize = [None, 0, 0]
1974 deltasize = [None, 0, 0]
1975 chunktypecounts = {}
1975 chunktypecounts = {}
1976 chunktypesizes = {}
1976 chunktypesizes = {}
1977
1977
1978 def addsize(size, l):
1978 def addsize(size, l):
1979 if l[0] is None or size < l[0]:
1979 if l[0] is None or size < l[0]:
1980 l[0] = size
1980 l[0] = size
1981 if size > l[1]:
1981 if size > l[1]:
1982 l[1] = size
1982 l[1] = size
1983 l[2] += size
1983 l[2] += size
1984
1984
1985 numrevs = len(r)
1985 numrevs = len(r)
1986 for rev in xrange(numrevs):
1986 for rev in xrange(numrevs):
1987 p1, p2 = r.parentrevs(rev)
1987 p1, p2 = r.parentrevs(rev)
1988 delta = r.deltaparent(rev)
1988 delta = r.deltaparent(rev)
1989 if format > 0:
1989 if format > 0:
1990 addsize(r.rawsize(rev), datasize)
1990 addsize(r.rawsize(rev), datasize)
1991 if p2 != nullrev:
1991 if p2 != nullrev:
1992 nummerges += 1
1992 nummerges += 1
1993 size = r.length(rev)
1993 size = r.length(rev)
1994 if delta == nullrev:
1994 if delta == nullrev:
1995 chainlengths.append(0)
1995 chainlengths.append(0)
1996 chainbases.append(r.start(rev))
1996 chainbases.append(r.start(rev))
1997 chainspans.append(size)
1997 chainspans.append(size)
1998 numfull += 1
1998 numfull += 1
1999 addsize(size, fullsize)
1999 addsize(size, fullsize)
2000 else:
2000 else:
2001 chainlengths.append(chainlengths[delta] + 1)
2001 chainlengths.append(chainlengths[delta] + 1)
2002 baseaddr = chainbases[delta]
2002 baseaddr = chainbases[delta]
2003 revaddr = r.start(rev)
2003 revaddr = r.start(rev)
2004 chainbases.append(baseaddr)
2004 chainbases.append(baseaddr)
2005 chainspans.append((revaddr - baseaddr) + size)
2005 chainspans.append((revaddr - baseaddr) + size)
2006 addsize(size, deltasize)
2006 addsize(size, deltasize)
2007 if delta == rev - 1:
2007 if delta == rev - 1:
2008 numprev += 1
2008 numprev += 1
2009 if delta == p1:
2009 if delta == p1:
2010 nump1prev += 1
2010 nump1prev += 1
2011 elif delta == p2:
2011 elif delta == p2:
2012 nump2prev += 1
2012 nump2prev += 1
2013 elif delta == p1:
2013 elif delta == p1:
2014 nump1 += 1
2014 nump1 += 1
2015 elif delta == p2:
2015 elif delta == p2:
2016 nump2 += 1
2016 nump2 += 1
2017 elif delta != nullrev:
2017 elif delta != nullrev:
2018 numother += 1
2018 numother += 1
2019
2019
2020 # Obtain data on the raw chunks in the revlog.
2020 # Obtain data on the raw chunks in the revlog.
2021 segment = r._getsegmentforrevs(rev, rev)[1]
2021 segment = r._getsegmentforrevs(rev, rev)[1]
2022 if segment:
2022 if segment:
2023 chunktype = bytes(segment[0:1])
2023 chunktype = bytes(segment[0:1])
2024 else:
2024 else:
2025 chunktype = 'empty'
2025 chunktype = 'empty'
2026
2026
2027 if chunktype not in chunktypecounts:
2027 if chunktype not in chunktypecounts:
2028 chunktypecounts[chunktype] = 0
2028 chunktypecounts[chunktype] = 0
2029 chunktypesizes[chunktype] = 0
2029 chunktypesizes[chunktype] = 0
2030
2030
2031 chunktypecounts[chunktype] += 1
2031 chunktypecounts[chunktype] += 1
2032 chunktypesizes[chunktype] += size
2032 chunktypesizes[chunktype] += size
2033
2033
2034 # Adjust size min value for empty cases
2034 # Adjust size min value for empty cases
2035 for size in (datasize, fullsize, deltasize):
2035 for size in (datasize, fullsize, deltasize):
2036 if size[0] is None:
2036 if size[0] is None:
2037 size[0] = 0
2037 size[0] = 0
2038
2038
2039 numdeltas = numrevs - numfull
2039 numdeltas = numrevs - numfull
2040 numoprev = numprev - nump1prev - nump2prev
2040 numoprev = numprev - nump1prev - nump2prev
2041 totalrawsize = datasize[2]
2041 totalrawsize = datasize[2]
2042 datasize[2] /= numrevs
2042 datasize[2] /= numrevs
2043 fulltotal = fullsize[2]
2043 fulltotal = fullsize[2]
2044 fullsize[2] /= numfull
2044 fullsize[2] /= numfull
2045 deltatotal = deltasize[2]
2045 deltatotal = deltasize[2]
2046 if numrevs - numfull > 0:
2046 if numrevs - numfull > 0:
2047 deltasize[2] /= numrevs - numfull
2047 deltasize[2] /= numrevs - numfull
2048 totalsize = fulltotal + deltatotal
2048 totalsize = fulltotal + deltatotal
2049 avgchainlen = sum(chainlengths) / numrevs
2049 avgchainlen = sum(chainlengths) / numrevs
2050 maxchainlen = max(chainlengths)
2050 maxchainlen = max(chainlengths)
2051 maxchainspan = max(chainspans)
2051 maxchainspan = max(chainspans)
2052 compratio = 1
2052 compratio = 1
2053 if totalsize:
2053 if totalsize:
2054 compratio = totalrawsize / totalsize
2054 compratio = totalrawsize / totalsize
2055
2055
2056 basedfmtstr = '%%%dd\n'
2056 basedfmtstr = '%%%dd\n'
2057 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2057 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2058
2058
2059 def dfmtstr(max):
2059 def dfmtstr(max):
2060 return basedfmtstr % len(str(max))
2060 return basedfmtstr % len(str(max))
2061 def pcfmtstr(max, padding=0):
2061 def pcfmtstr(max, padding=0):
2062 return basepcfmtstr % (len(str(max)), ' ' * padding)
2062 return basepcfmtstr % (len(str(max)), ' ' * padding)
2063
2063
2064 def pcfmt(value, total):
2064 def pcfmt(value, total):
2065 if total:
2065 if total:
2066 return (value, 100 * float(value) / total)
2066 return (value, 100 * float(value) / total)
2067 else:
2067 else:
2068 return value, 100.0
2068 return value, 100.0
2069
2069
2070 ui.write(('format : %d\n') % format)
2070 ui.write(('format : %d\n') % format)
2071 ui.write(('flags : %s\n') % ', '.join(flags))
2071 ui.write(('flags : %s\n') % ', '.join(flags))
2072
2072
2073 ui.write('\n')
2073 ui.write('\n')
2074 fmt = pcfmtstr(totalsize)
2074 fmt = pcfmtstr(totalsize)
2075 fmt2 = dfmtstr(totalsize)
2075 fmt2 = dfmtstr(totalsize)
2076 ui.write(('revisions : ') + fmt2 % numrevs)
2076 ui.write(('revisions : ') + fmt2 % numrevs)
2077 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2077 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2078 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2078 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2079 ui.write(('revisions : ') + fmt2 % numrevs)
2079 ui.write(('revisions : ') + fmt2 % numrevs)
2080 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2080 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2081 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2081 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2082 ui.write(('revision size : ') + fmt2 % totalsize)
2082 ui.write(('revision size : ') + fmt2 % totalsize)
2083 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2083 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2084 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2084 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2085
2085
2086 def fmtchunktype(chunktype):
2086 def fmtchunktype(chunktype):
2087 if chunktype == 'empty':
2087 if chunktype == 'empty':
2088 return ' %s : ' % chunktype
2088 return ' %s : ' % chunktype
2089 elif chunktype in pycompat.bytestr(string.ascii_letters):
2089 elif chunktype in pycompat.bytestr(string.ascii_letters):
2090 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2090 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2091 else:
2091 else:
2092 return ' 0x%s : ' % hex(chunktype)
2092 return ' 0x%s : ' % hex(chunktype)
2093
2093
2094 ui.write('\n')
2094 ui.write('\n')
2095 ui.write(('chunks : ') + fmt2 % numrevs)
2095 ui.write(('chunks : ') + fmt2 % numrevs)
2096 for chunktype in sorted(chunktypecounts):
2096 for chunktype in sorted(chunktypecounts):
2097 ui.write(fmtchunktype(chunktype))
2097 ui.write(fmtchunktype(chunktype))
2098 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2098 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2099 ui.write(('chunks size : ') + fmt2 % totalsize)
2099 ui.write(('chunks size : ') + fmt2 % totalsize)
2100 for chunktype in sorted(chunktypecounts):
2100 for chunktype in sorted(chunktypecounts):
2101 ui.write(fmtchunktype(chunktype))
2101 ui.write(fmtchunktype(chunktype))
2102 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2102 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2103
2103
2104 ui.write('\n')
2104 ui.write('\n')
2105 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2105 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2106 ui.write(('avg chain length : ') + fmt % avgchainlen)
2106 ui.write(('avg chain length : ') + fmt % avgchainlen)
2107 ui.write(('max chain length : ') + fmt % maxchainlen)
2107 ui.write(('max chain length : ') + fmt % maxchainlen)
2108 ui.write(('max chain reach : ') + fmt % maxchainspan)
2108 ui.write(('max chain reach : ') + fmt % maxchainspan)
2109 ui.write(('compression ratio : ') + fmt % compratio)
2109 ui.write(('compression ratio : ') + fmt % compratio)
2110
2110
2111 if format > 0:
2111 if format > 0:
2112 ui.write('\n')
2112 ui.write('\n')
2113 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2113 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2114 % tuple(datasize))
2114 % tuple(datasize))
2115 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2115 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2116 % tuple(fullsize))
2116 % tuple(fullsize))
2117 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2117 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2118 % tuple(deltasize))
2118 % tuple(deltasize))
2119
2119
2120 if numdeltas > 0:
2120 if numdeltas > 0:
2121 ui.write('\n')
2121 ui.write('\n')
2122 fmt = pcfmtstr(numdeltas)
2122 fmt = pcfmtstr(numdeltas)
2123 fmt2 = pcfmtstr(numdeltas, 4)
2123 fmt2 = pcfmtstr(numdeltas, 4)
2124 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2124 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2125 if numprev > 0:
2125 if numprev > 0:
2126 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2126 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2127 numprev))
2127 numprev))
2128 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2128 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2129 numprev))
2129 numprev))
2130 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2130 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2131 numprev))
2131 numprev))
2132 if gdelta:
2132 if gdelta:
2133 ui.write(('deltas against p1 : ')
2133 ui.write(('deltas against p1 : ')
2134 + fmt % pcfmt(nump1, numdeltas))
2134 + fmt % pcfmt(nump1, numdeltas))
2135 ui.write(('deltas against p2 : ')
2135 ui.write(('deltas against p2 : ')
2136 + fmt % pcfmt(nump2, numdeltas))
2136 + fmt % pcfmt(nump2, numdeltas))
2137 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2137 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2138 numdeltas))
2138 numdeltas))
2139
2139
2140 @command('debugrevspec',
2140 @command('debugrevspec',
2141 [('', 'optimize', None,
2141 [('', 'optimize', None,
2142 _('print parsed tree after optimizing (DEPRECATED)')),
2142 _('print parsed tree after optimizing (DEPRECATED)')),
2143 ('', 'show-revs', True, _('print list of result revisions (default)')),
2143 ('', 'show-revs', True, _('print list of result revisions (default)')),
2144 ('s', 'show-set', None, _('print internal representation of result set')),
2144 ('s', 'show-set', None, _('print internal representation of result set')),
2145 ('p', 'show-stage', [],
2145 ('p', 'show-stage', [],
2146 _('print parsed tree at the given stage'), _('NAME')),
2146 _('print parsed tree at the given stage'), _('NAME')),
2147 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2147 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2148 ('', 'verify-optimized', False, _('verify optimized result')),
2148 ('', 'verify-optimized', False, _('verify optimized result')),
2149 ],
2149 ],
2150 ('REVSPEC'))
2150 ('REVSPEC'))
2151 def debugrevspec(ui, repo, expr, **opts):
2151 def debugrevspec(ui, repo, expr, **opts):
2152 """parse and apply a revision specification
2152 """parse and apply a revision specification
2153
2153
2154 Use -p/--show-stage option to print the parsed tree at the given stages.
2154 Use -p/--show-stage option to print the parsed tree at the given stages.
2155 Use -p all to print tree at every stage.
2155 Use -p all to print tree at every stage.
2156
2156
2157 Use --no-show-revs option with -s or -p to print only the set
2157 Use --no-show-revs option with -s or -p to print only the set
2158 representation or the parsed tree respectively.
2158 representation or the parsed tree respectively.
2159
2159
2160 Use --verify-optimized to compare the optimized result with the unoptimized
2160 Use --verify-optimized to compare the optimized result with the unoptimized
2161 one. Returns 1 if the optimized result differs.
2161 one. Returns 1 if the optimized result differs.
2162 """
2162 """
2163 opts = pycompat.byteskwargs(opts)
2163 opts = pycompat.byteskwargs(opts)
2164 aliases = ui.configitems('revsetalias')
2164 aliases = ui.configitems('revsetalias')
2165 stages = [
2165 stages = [
2166 ('parsed', lambda tree: tree),
2166 ('parsed', lambda tree: tree),
2167 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2167 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2168 ui.warn)),
2168 ui.warn)),
2169 ('concatenated', revsetlang.foldconcat),
2169 ('concatenated', revsetlang.foldconcat),
2170 ('analyzed', revsetlang.analyze),
2170 ('analyzed', revsetlang.analyze),
2171 ('optimized', revsetlang.optimize),
2171 ('optimized', revsetlang.optimize),
2172 ]
2172 ]
2173 if opts['no_optimized']:
2173 if opts['no_optimized']:
2174 stages = stages[:-1]
2174 stages = stages[:-1]
2175 if opts['verify_optimized'] and opts['no_optimized']:
2175 if opts['verify_optimized'] and opts['no_optimized']:
2176 raise error.Abort(_('cannot use --verify-optimized with '
2176 raise error.Abort(_('cannot use --verify-optimized with '
2177 '--no-optimized'))
2177 '--no-optimized'))
2178 stagenames = set(n for n, f in stages)
2178 stagenames = set(n for n, f in stages)
2179
2179
2180 showalways = set()
2180 showalways = set()
2181 showchanged = set()
2181 showchanged = set()
2182 if ui.verbose and not opts['show_stage']:
2182 if ui.verbose and not opts['show_stage']:
2183 # show parsed tree by --verbose (deprecated)
2183 # show parsed tree by --verbose (deprecated)
2184 showalways.add('parsed')
2184 showalways.add('parsed')
2185 showchanged.update(['expanded', 'concatenated'])
2185 showchanged.update(['expanded', 'concatenated'])
2186 if opts['optimize']:
2186 if opts['optimize']:
2187 showalways.add('optimized')
2187 showalways.add('optimized')
2188 if opts['show_stage'] and opts['optimize']:
2188 if opts['show_stage'] and opts['optimize']:
2189 raise error.Abort(_('cannot use --optimize with --show-stage'))
2189 raise error.Abort(_('cannot use --optimize with --show-stage'))
2190 if opts['show_stage'] == ['all']:
2190 if opts['show_stage'] == ['all']:
2191 showalways.update(stagenames)
2191 showalways.update(stagenames)
2192 else:
2192 else:
2193 for n in opts['show_stage']:
2193 for n in opts['show_stage']:
2194 if n not in stagenames:
2194 if n not in stagenames:
2195 raise error.Abort(_('invalid stage name: %s') % n)
2195 raise error.Abort(_('invalid stage name: %s') % n)
2196 showalways.update(opts['show_stage'])
2196 showalways.update(opts['show_stage'])
2197
2197
2198 treebystage = {}
2198 treebystage = {}
2199 printedtree = None
2199 printedtree = None
2200 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2200 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2201 for n, f in stages:
2201 for n, f in stages:
2202 treebystage[n] = tree = f(tree)
2202 treebystage[n] = tree = f(tree)
2203 if n in showalways or (n in showchanged and tree != printedtree):
2203 if n in showalways or (n in showchanged and tree != printedtree):
2204 if opts['show_stage'] or n != 'parsed':
2204 if opts['show_stage'] or n != 'parsed':
2205 ui.write(("* %s:\n") % n)
2205 ui.write(("* %s:\n") % n)
2206 ui.write(revsetlang.prettyformat(tree), "\n")
2206 ui.write(revsetlang.prettyformat(tree), "\n")
2207 printedtree = tree
2207 printedtree = tree
2208
2208
2209 if opts['verify_optimized']:
2209 if opts['verify_optimized']:
2210 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2210 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2211 brevs = revset.makematcher(treebystage['optimized'])(repo)
2211 brevs = revset.makematcher(treebystage['optimized'])(repo)
2212 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2212 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2213 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2213 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2214 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2214 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2215 arevs = list(arevs)
2215 arevs = list(arevs)
2216 brevs = list(brevs)
2216 brevs = list(brevs)
2217 if arevs == brevs:
2217 if arevs == brevs:
2218 return 0
2218 return 0
2219 ui.write(('--- analyzed\n'), label='diff.file_a')
2219 ui.write(('--- analyzed\n'), label='diff.file_a')
2220 ui.write(('+++ optimized\n'), label='diff.file_b')
2220 ui.write(('+++ optimized\n'), label='diff.file_b')
2221 sm = difflib.SequenceMatcher(None, arevs, brevs)
2221 sm = difflib.SequenceMatcher(None, arevs, brevs)
2222 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2222 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2223 if tag in ('delete', 'replace'):
2223 if tag in ('delete', 'replace'):
2224 for c in arevs[alo:ahi]:
2224 for c in arevs[alo:ahi]:
2225 ui.write('-%s\n' % c, label='diff.deleted')
2225 ui.write('-%s\n' % c, label='diff.deleted')
2226 if tag in ('insert', 'replace'):
2226 if tag in ('insert', 'replace'):
2227 for c in brevs[blo:bhi]:
2227 for c in brevs[blo:bhi]:
2228 ui.write('+%s\n' % c, label='diff.inserted')
2228 ui.write('+%s\n' % c, label='diff.inserted')
2229 if tag == 'equal':
2229 if tag == 'equal':
2230 for c in arevs[alo:ahi]:
2230 for c in arevs[alo:ahi]:
2231 ui.write(' %s\n' % c)
2231 ui.write(' %s\n' % c)
2232 return 1
2232 return 1
2233
2233
2234 func = revset.makematcher(tree)
2234 func = revset.makematcher(tree)
2235 revs = func(repo)
2235 revs = func(repo)
2236 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2236 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2237 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2237 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2238 if not opts['show_revs']:
2238 if not opts['show_revs']:
2239 return
2239 return
2240 for c in revs:
2240 for c in revs:
2241 ui.write("%d\n" % c)
2241 ui.write("%d\n" % c)
2242
2242
2243 @command('debugserve', [
2243 @command('debugserve', [
2244 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2244 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2245 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2245 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2246 ('', 'logiofile', '', _('file to log server I/O to')),
2246 ('', 'logiofile', '', _('file to log server I/O to')),
2247 ], '')
2247 ], '')
2248 def debugserve(ui, repo, **opts):
2248 def debugserve(ui, repo, **opts):
2249 """run a server with advanced settings
2249 """run a server with advanced settings
2250
2250
2251 This command is similar to :hg:`serve`. It exists partially as a
2251 This command is similar to :hg:`serve`. It exists partially as a
2252 workaround to the fact that ``hg serve --stdio`` must have specific
2252 workaround to the fact that ``hg serve --stdio`` must have specific
2253 arguments for security reasons.
2253 arguments for security reasons.
2254 """
2254 """
2255 opts = pycompat.byteskwargs(opts)
2255 opts = pycompat.byteskwargs(opts)
2256
2256
2257 if not opts['sshstdio']:
2257 if not opts['sshstdio']:
2258 raise error.Abort(_('only --sshstdio is currently supported'))
2258 raise error.Abort(_('only --sshstdio is currently supported'))
2259
2259
2260 logfh = None
2260 logfh = None
2261
2261
2262 if opts['logiofd'] and opts['logiofile']:
2262 if opts['logiofd'] and opts['logiofile']:
2263 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2263 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2264
2264
2265 if opts['logiofd']:
2265 if opts['logiofd']:
2266 # Line buffered because output is line based.
2266 # Line buffered because output is line based.
2267 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2267 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2268 elif opts['logiofile']:
2268 elif opts['logiofile']:
2269 logfh = open(opts['logiofile'], 'ab', 1)
2269 logfh = open(opts['logiofile'], 'ab', 1)
2270
2270
2271 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2271 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2272 s.serve_forever()
2272 s.serve_forever()
2273
2273
2274 @command('debugsetparents', [], _('REV1 [REV2]'))
2274 @command('debugsetparents', [], _('REV1 [REV2]'))
2275 def debugsetparents(ui, repo, rev1, rev2=None):
2275 def debugsetparents(ui, repo, rev1, rev2=None):
2276 """manually set the parents of the current working directory
2276 """manually set the parents of the current working directory
2277
2277
2278 This is useful for writing repository conversion tools, but should
2278 This is useful for writing repository conversion tools, but should
2279 be used with care. For example, neither the working directory nor the
2279 be used with care. For example, neither the working directory nor the
2280 dirstate is updated, so file status may be incorrect after running this
2280 dirstate is updated, so file status may be incorrect after running this
2281 command.
2281 command.
2282
2282
2283 Returns 0 on success.
2283 Returns 0 on success.
2284 """
2284 """
2285
2285
2286 r1 = scmutil.revsingle(repo, rev1).node()
2286 r1 = scmutil.revsingle(repo, rev1).node()
2287 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2287 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2288
2288
2289 with repo.wlock():
2289 with repo.wlock():
2290 repo.setparents(r1, r2)
2290 repo.setparents(r1, r2)
2291
2291
2292 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2292 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2293 def debugssl(ui, repo, source=None, **opts):
2293 def debugssl(ui, repo, source=None, **opts):
2294 '''test a secure connection to a server
2294 '''test a secure connection to a server
2295
2295
2296 This builds the certificate chain for the server on Windows, installing the
2296 This builds the certificate chain for the server on Windows, installing the
2297 missing intermediates and trusted root via Windows Update if necessary. It
2297 missing intermediates and trusted root via Windows Update if necessary. It
2298 does nothing on other platforms.
2298 does nothing on other platforms.
2299
2299
2300 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2300 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2301 that server is used. See :hg:`help urls` for more information.
2301 that server is used. See :hg:`help urls` for more information.
2302
2302
2303 If the update succeeds, retry the original operation. Otherwise, the cause
2303 If the update succeeds, retry the original operation. Otherwise, the cause
2304 of the SSL error is likely another issue.
2304 of the SSL error is likely another issue.
2305 '''
2305 '''
2306 if not pycompat.iswindows:
2306 if not pycompat.iswindows:
2307 raise error.Abort(_('certificate chain building is only possible on '
2307 raise error.Abort(_('certificate chain building is only possible on '
2308 'Windows'))
2308 'Windows'))
2309
2309
2310 if not source:
2310 if not source:
2311 if not repo:
2311 if not repo:
2312 raise error.Abort(_("there is no Mercurial repository here, and no "
2312 raise error.Abort(_("there is no Mercurial repository here, and no "
2313 "server specified"))
2313 "server specified"))
2314 source = "default"
2314 source = "default"
2315
2315
2316 source, branches = hg.parseurl(ui.expandpath(source))
2316 source, branches = hg.parseurl(ui.expandpath(source))
2317 url = util.url(source)
2317 url = util.url(source)
2318 addr = None
2318 addr = None
2319
2319
2320 defaultport = {'https': 443, 'ssh': 22}
2320 defaultport = {'https': 443, 'ssh': 22}
2321 if url.scheme in defaultport:
2321 if url.scheme in defaultport:
2322 try:
2322 try:
2323 addr = (url.host, int(url.port or defaultport[url.scheme]))
2323 addr = (url.host, int(url.port or defaultport[url.scheme]))
2324 except ValueError:
2324 except ValueError:
2325 raise error.Abort(_("malformed port number in URL"))
2325 raise error.Abort(_("malformed port number in URL"))
2326 else:
2326 else:
2327 raise error.Abort(_("only https and ssh connections are supported"))
2327 raise error.Abort(_("only https and ssh connections are supported"))
2328
2328
2329 from . import win32
2329 from . import win32
2330
2330
2331 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2331 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2332 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2332 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2333
2333
2334 try:
2334 try:
2335 s.connect(addr)
2335 s.connect(addr)
2336 cert = s.getpeercert(True)
2336 cert = s.getpeercert(True)
2337
2337
2338 ui.status(_('checking the certificate chain for %s\n') % url.host)
2338 ui.status(_('checking the certificate chain for %s\n') % url.host)
2339
2339
2340 complete = win32.checkcertificatechain(cert, build=False)
2340 complete = win32.checkcertificatechain(cert, build=False)
2341
2341
2342 if not complete:
2342 if not complete:
2343 ui.status(_('certificate chain is incomplete, updating... '))
2343 ui.status(_('certificate chain is incomplete, updating... '))
2344
2344
2345 if not win32.checkcertificatechain(cert):
2345 if not win32.checkcertificatechain(cert):
2346 ui.status(_('failed.\n'))
2346 ui.status(_('failed.\n'))
2347 else:
2347 else:
2348 ui.status(_('done.\n'))
2348 ui.status(_('done.\n'))
2349 else:
2349 else:
2350 ui.status(_('full certificate chain is available\n'))
2350 ui.status(_('full certificate chain is available\n'))
2351 finally:
2351 finally:
2352 s.close()
2352 s.close()
2353
2353
2354 @command('debugsub',
2354 @command('debugsub',
2355 [('r', 'rev', '',
2355 [('r', 'rev', '',
2356 _('revision to check'), _('REV'))],
2356 _('revision to check'), _('REV'))],
2357 _('[-r REV] [REV]'))
2357 _('[-r REV] [REV]'))
2358 def debugsub(ui, repo, rev=None):
2358 def debugsub(ui, repo, rev=None):
2359 ctx = scmutil.revsingle(repo, rev, None)
2359 ctx = scmutil.revsingle(repo, rev, None)
2360 for k, v in sorted(ctx.substate.items()):
2360 for k, v in sorted(ctx.substate.items()):
2361 ui.write(('path %s\n') % k)
2361 ui.write(('path %s\n') % k)
2362 ui.write((' source %s\n') % v[0])
2362 ui.write((' source %s\n') % v[0])
2363 ui.write((' revision %s\n') % v[1])
2363 ui.write((' revision %s\n') % v[1])
2364
2364
2365 @command('debugsuccessorssets',
2365 @command('debugsuccessorssets',
2366 [('', 'closest', False, _('return closest successors sets only'))],
2366 [('', 'closest', False, _('return closest successors sets only'))],
2367 _('[REV]'))
2367 _('[REV]'))
2368 def debugsuccessorssets(ui, repo, *revs, **opts):
2368 def debugsuccessorssets(ui, repo, *revs, **opts):
2369 """show set of successors for revision
2369 """show set of successors for revision
2370
2370
2371 A successors set of changeset A is a consistent group of revisions that
2371 A successors set of changeset A is a consistent group of revisions that
2372 succeed A. It contains non-obsolete changesets only unless closests
2372 succeed A. It contains non-obsolete changesets only unless closests
2373 successors set is set.
2373 successors set is set.
2374
2374
2375 In most cases a changeset A has a single successors set containing a single
2375 In most cases a changeset A has a single successors set containing a single
2376 successor (changeset A replaced by A').
2376 successor (changeset A replaced by A').
2377
2377
2378 A changeset that is made obsolete with no successors are called "pruned".
2378 A changeset that is made obsolete with no successors are called "pruned".
2379 Such changesets have no successors sets at all.
2379 Such changesets have no successors sets at all.
2380
2380
2381 A changeset that has been "split" will have a successors set containing
2381 A changeset that has been "split" will have a successors set containing
2382 more than one successor.
2382 more than one successor.
2383
2383
2384 A changeset that has been rewritten in multiple different ways is called
2384 A changeset that has been rewritten in multiple different ways is called
2385 "divergent". Such changesets have multiple successor sets (each of which
2385 "divergent". Such changesets have multiple successor sets (each of which
2386 may also be split, i.e. have multiple successors).
2386 may also be split, i.e. have multiple successors).
2387
2387
2388 Results are displayed as follows::
2388 Results are displayed as follows::
2389
2389
2390 <rev1>
2390 <rev1>
2391 <successors-1A>
2391 <successors-1A>
2392 <rev2>
2392 <rev2>
2393 <successors-2A>
2393 <successors-2A>
2394 <successors-2B1> <successors-2B2> <successors-2B3>
2394 <successors-2B1> <successors-2B2> <successors-2B3>
2395
2395
2396 Here rev2 has two possible (i.e. divergent) successors sets. The first
2396 Here rev2 has two possible (i.e. divergent) successors sets. The first
2397 holds one element, whereas the second holds three (i.e. the changeset has
2397 holds one element, whereas the second holds three (i.e. the changeset has
2398 been split).
2398 been split).
2399 """
2399 """
2400 # passed to successorssets caching computation from one call to another
2400 # passed to successorssets caching computation from one call to another
2401 cache = {}
2401 cache = {}
2402 ctx2str = bytes
2402 ctx2str = bytes
2403 node2str = short
2403 node2str = short
2404 for rev in scmutil.revrange(repo, revs):
2404 for rev in scmutil.revrange(repo, revs):
2405 ctx = repo[rev]
2405 ctx = repo[rev]
2406 ui.write('%s\n'% ctx2str(ctx))
2406 ui.write('%s\n'% ctx2str(ctx))
2407 for succsset in obsutil.successorssets(repo, ctx.node(),
2407 for succsset in obsutil.successorssets(repo, ctx.node(),
2408 closest=opts[r'closest'],
2408 closest=opts[r'closest'],
2409 cache=cache):
2409 cache=cache):
2410 if succsset:
2410 if succsset:
2411 ui.write(' ')
2411 ui.write(' ')
2412 ui.write(node2str(succsset[0]))
2412 ui.write(node2str(succsset[0]))
2413 for node in succsset[1:]:
2413 for node in succsset[1:]:
2414 ui.write(' ')
2414 ui.write(' ')
2415 ui.write(node2str(node))
2415 ui.write(node2str(node))
2416 ui.write('\n')
2416 ui.write('\n')
2417
2417
2418 @command('debugtemplate',
2418 @command('debugtemplate',
2419 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2419 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2420 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2420 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2421 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2421 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2422 optionalrepo=True)
2422 optionalrepo=True)
2423 def debugtemplate(ui, repo, tmpl, **opts):
2423 def debugtemplate(ui, repo, tmpl, **opts):
2424 """parse and apply a template
2424 """parse and apply a template
2425
2425
2426 If -r/--rev is given, the template is processed as a log template and
2426 If -r/--rev is given, the template is processed as a log template and
2427 applied to the given changesets. Otherwise, it is processed as a generic
2427 applied to the given changesets. Otherwise, it is processed as a generic
2428 template.
2428 template.
2429
2429
2430 Use --verbose to print the parsed tree.
2430 Use --verbose to print the parsed tree.
2431 """
2431 """
2432 revs = None
2432 revs = None
2433 if opts[r'rev']:
2433 if opts[r'rev']:
2434 if repo is None:
2434 if repo is None:
2435 raise error.RepoError(_('there is no Mercurial repository here '
2435 raise error.RepoError(_('there is no Mercurial repository here '
2436 '(.hg not found)'))
2436 '(.hg not found)'))
2437 revs = scmutil.revrange(repo, opts[r'rev'])
2437 revs = scmutil.revrange(repo, opts[r'rev'])
2438
2438
2439 props = {}
2439 props = {}
2440 for d in opts[r'define']:
2440 for d in opts[r'define']:
2441 try:
2441 try:
2442 k, v = (e.strip() for e in d.split('=', 1))
2442 k, v = (e.strip() for e in d.split('=', 1))
2443 if not k or k == 'ui':
2443 if not k or k == 'ui':
2444 raise ValueError
2444 raise ValueError
2445 props[k] = v
2445 props[k] = v
2446 except ValueError:
2446 except ValueError:
2447 raise error.Abort(_('malformed keyword definition: %s') % d)
2447 raise error.Abort(_('malformed keyword definition: %s') % d)
2448
2448
2449 if ui.verbose:
2449 if ui.verbose:
2450 aliases = ui.configitems('templatealias')
2450 aliases = ui.configitems('templatealias')
2451 tree = templater.parse(tmpl)
2451 tree = templater.parse(tmpl)
2452 ui.note(templater.prettyformat(tree), '\n')
2452 ui.note(templater.prettyformat(tree), '\n')
2453 newtree = templater.expandaliases(tree, aliases)
2453 newtree = templater.expandaliases(tree, aliases)
2454 if newtree != tree:
2454 if newtree != tree:
2455 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2455 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2456
2456
2457 if revs is None:
2457 if revs is None:
2458 tres = formatter.templateresources(ui, repo)
2458 tres = formatter.templateresources(ui, repo)
2459 t = formatter.maketemplater(ui, tmpl, resources=tres)
2459 t = formatter.maketemplater(ui, tmpl, resources=tres)
2460 ui.write(t.renderdefault(props))
2460 ui.write(t.renderdefault(props))
2461 else:
2461 else:
2462 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2462 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2463 for r in revs:
2463 for r in revs:
2464 displayer.show(repo[r], **pycompat.strkwargs(props))
2464 displayer.show(repo[r], **pycompat.strkwargs(props))
2465 displayer.close()
2465 displayer.close()
2466
2466
2467 @command('debuguigetpass', [
2467 @command('debuguigetpass', [
2468 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2468 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2469 ], _('[-p TEXT]'), norepo=True)
2469 ], _('[-p TEXT]'), norepo=True)
2470 def debuguigetpass(ui, prompt=''):
2470 def debuguigetpass(ui, prompt=''):
2471 """show prompt to type password"""
2471 """show prompt to type password"""
2472 r = ui.getpass(prompt)
2472 r = ui.getpass(prompt)
2473 ui.write(('respose: %s\n') % r)
2473 ui.write(('respose: %s\n') % r)
2474
2474
2475 @command('debuguiprompt', [
2475 @command('debuguiprompt', [
2476 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2476 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2477 ], _('[-p TEXT]'), norepo=True)
2477 ], _('[-p TEXT]'), norepo=True)
2478 def debuguiprompt(ui, prompt=''):
2478 def debuguiprompt(ui, prompt=''):
2479 """show plain prompt"""
2479 """show plain prompt"""
2480 r = ui.prompt(prompt)
2480 r = ui.prompt(prompt)
2481 ui.write(('response: %s\n') % r)
2481 ui.write(('response: %s\n') % r)
2482
2482
2483 @command('debugupdatecaches', [])
2483 @command('debugupdatecaches', [])
2484 def debugupdatecaches(ui, repo, *pats, **opts):
2484 def debugupdatecaches(ui, repo, *pats, **opts):
2485 """warm all known caches in the repository"""
2485 """warm all known caches in the repository"""
2486 with repo.wlock(), repo.lock():
2486 with repo.wlock(), repo.lock():
2487 repo.updatecaches(full=True)
2487 repo.updatecaches(full=True)
2488
2488
2489 @command('debugupgraderepo', [
2489 @command('debugupgraderepo', [
2490 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2490 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2491 ('', 'run', False, _('performs an upgrade')),
2491 ('', 'run', False, _('performs an upgrade')),
2492 ])
2492 ])
2493 def debugupgraderepo(ui, repo, run=False, optimize=None):
2493 def debugupgraderepo(ui, repo, run=False, optimize=None):
2494 """upgrade a repository to use different features
2494 """upgrade a repository to use different features
2495
2495
2496 If no arguments are specified, the repository is evaluated for upgrade
2496 If no arguments are specified, the repository is evaluated for upgrade
2497 and a list of problems and potential optimizations is printed.
2497 and a list of problems and potential optimizations is printed.
2498
2498
2499 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2499 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2500 can be influenced via additional arguments. More details will be provided
2500 can be influenced via additional arguments. More details will be provided
2501 by the command output when run without ``--run``.
2501 by the command output when run without ``--run``.
2502
2502
2503 During the upgrade, the repository will be locked and no writes will be
2503 During the upgrade, the repository will be locked and no writes will be
2504 allowed.
2504 allowed.
2505
2505
2506 At the end of the upgrade, the repository may not be readable while new
2506 At the end of the upgrade, the repository may not be readable while new
2507 repository data is swapped in. This window will be as long as it takes to
2507 repository data is swapped in. This window will be as long as it takes to
2508 rename some directories inside the ``.hg`` directory. On most machines, this
2508 rename some directories inside the ``.hg`` directory. On most machines, this
2509 should complete almost instantaneously and the chances of a consumer being
2509 should complete almost instantaneously and the chances of a consumer being
2510 unable to access the repository should be low.
2510 unable to access the repository should be low.
2511 """
2511 """
2512 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2512 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2513
2513
2514 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2514 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2515 inferrepo=True)
2515 inferrepo=True)
2516 def debugwalk(ui, repo, *pats, **opts):
2516 def debugwalk(ui, repo, *pats, **opts):
2517 """show how files match on given patterns"""
2517 """show how files match on given patterns"""
2518 opts = pycompat.byteskwargs(opts)
2518 opts = pycompat.byteskwargs(opts)
2519 m = scmutil.match(repo[None], pats, opts)
2519 m = scmutil.match(repo[None], pats, opts)
2520 ui.write(('matcher: %r\n' % m))
2520 ui.write(('matcher: %r\n' % m))
2521 items = list(repo[None].walk(m))
2521 items = list(repo[None].walk(m))
2522 if not items:
2522 if not items:
2523 return
2523 return
2524 f = lambda fn: fn
2524 f = lambda fn: fn
2525 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2525 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2526 f = lambda fn: util.normpath(fn)
2526 f = lambda fn: util.normpath(fn)
2527 fmt = 'f %%-%ds %%-%ds %%s' % (
2527 fmt = 'f %%-%ds %%-%ds %%s' % (
2528 max([len(abs) for abs in items]),
2528 max([len(abs) for abs in items]),
2529 max([len(m.rel(abs)) for abs in items]))
2529 max([len(m.rel(abs)) for abs in items]))
2530 for abs in items:
2530 for abs in items:
2531 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2531 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2532 ui.write("%s\n" % line.rstrip())
2532 ui.write("%s\n" % line.rstrip())
2533
2533
2534 @command('debugwhyunstable', [], _('REV'))
2534 @command('debugwhyunstable', [], _('REV'))
2535 def debugwhyunstable(ui, repo, rev):
2535 def debugwhyunstable(ui, repo, rev):
2536 """explain instabilities of a changeset"""
2536 """explain instabilities of a changeset"""
2537 for entry in obsutil.whyunstable(repo, repo[rev]):
2537 for entry in obsutil.whyunstable(repo, repo[rev]):
2538 dnodes = ''
2538 dnodes = ''
2539 if entry.get('divergentnodes'):
2539 if entry.get('divergentnodes'):
2540 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2540 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2541 for ctx in entry['divergentnodes']) + ' '
2541 for ctx in entry['divergentnodes']) + ' '
2542 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2542 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2543 entry['reason'], entry['node']))
2543 entry['reason'], entry['node']))
2544
2544
2545 @command('debugwireargs',
2545 @command('debugwireargs',
2546 [('', 'three', '', 'three'),
2546 [('', 'three', '', 'three'),
2547 ('', 'four', '', 'four'),
2547 ('', 'four', '', 'four'),
2548 ('', 'five', '', 'five'),
2548 ('', 'five', '', 'five'),
2549 ] + cmdutil.remoteopts,
2549 ] + cmdutil.remoteopts,
2550 _('REPO [OPTIONS]... [ONE [TWO]]'),
2550 _('REPO [OPTIONS]... [ONE [TWO]]'),
2551 norepo=True)
2551 norepo=True)
2552 def debugwireargs(ui, repopath, *vals, **opts):
2552 def debugwireargs(ui, repopath, *vals, **opts):
2553 opts = pycompat.byteskwargs(opts)
2553 opts = pycompat.byteskwargs(opts)
2554 repo = hg.peer(ui, opts, repopath)
2554 repo = hg.peer(ui, opts, repopath)
2555 for opt in cmdutil.remoteopts:
2555 for opt in cmdutil.remoteopts:
2556 del opts[opt[1]]
2556 del opts[opt[1]]
2557 args = {}
2557 args = {}
2558 for k, v in opts.iteritems():
2558 for k, v in opts.iteritems():
2559 if v:
2559 if v:
2560 args[k] = v
2560 args[k] = v
2561 args = pycompat.strkwargs(args)
2561 args = pycompat.strkwargs(args)
2562 # run twice to check that we don't mess up the stream for the next command
2562 # run twice to check that we don't mess up the stream for the next command
2563 res1 = repo.debugwireargs(*vals, **args)
2563 res1 = repo.debugwireargs(*vals, **args)
2564 res2 = repo.debugwireargs(*vals, **args)
2564 res2 = repo.debugwireargs(*vals, **args)
2565 ui.write("%s\n" % res1)
2565 ui.write("%s\n" % res1)
2566 if res1 != res2:
2566 if res1 != res2:
2567 ui.warn("%s\n" % res2)
2567 ui.warn("%s\n" % res2)
2568
2568
2569 def _parsewirelangblocks(fh):
2569 def _parsewirelangblocks(fh):
2570 activeaction = None
2570 activeaction = None
2571 blocklines = []
2571 blocklines = []
2572
2572
2573 for line in fh:
2573 for line in fh:
2574 line = line.rstrip()
2574 line = line.rstrip()
2575 if not line:
2575 if not line:
2576 continue
2576 continue
2577
2577
2578 if line.startswith(b'#'):
2578 if line.startswith(b'#'):
2579 continue
2579 continue
2580
2580
2581 if not line.startswith(' '):
2581 if not line.startswith(' '):
2582 # New block. Flush previous one.
2582 # New block. Flush previous one.
2583 if activeaction:
2583 if activeaction:
2584 yield activeaction, blocklines
2584 yield activeaction, blocklines
2585
2585
2586 activeaction = line
2586 activeaction = line
2587 blocklines = []
2587 blocklines = []
2588 continue
2588 continue
2589
2589
2590 # Else we start with an indent.
2590 # Else we start with an indent.
2591
2591
2592 if not activeaction:
2592 if not activeaction:
2593 raise error.Abort(_('indented line outside of block'))
2593 raise error.Abort(_('indented line outside of block'))
2594
2594
2595 blocklines.append(line)
2595 blocklines.append(line)
2596
2596
2597 # Flush last block.
2597 # Flush last block.
2598 if activeaction:
2598 if activeaction:
2599 yield activeaction, blocklines
2599 yield activeaction, blocklines
2600
2600
2601 @command('debugwireproto',
2601 @command('debugwireproto',
2602 [
2602 [
2603 ('', 'localssh', False, _('start an SSH server for this repo')),
2603 ('', 'localssh', False, _('start an SSH server for this repo')),
2604 ('', 'peer', '', _('construct a specific version of the peer')),
2604 ('', 'peer', '', _('construct a specific version of the peer')),
2605 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2605 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2606 ] + cmdutil.remoteopts,
2606 ] + cmdutil.remoteopts,
2607 _('[PATH]'),
2607 _('[PATH]'),
2608 optionalrepo=True)
2608 optionalrepo=True)
2609 def debugwireproto(ui, repo, path=None, **opts):
2609 def debugwireproto(ui, repo, path=None, **opts):
2610 """send wire protocol commands to a server
2610 """send wire protocol commands to a server
2611
2611
2612 This command can be used to issue wire protocol commands to remote
2612 This command can be used to issue wire protocol commands to remote
2613 peers and to debug the raw data being exchanged.
2613 peers and to debug the raw data being exchanged.
2614
2614
2615 ``--localssh`` will start an SSH server against the current repository
2615 ``--localssh`` will start an SSH server against the current repository
2616 and connect to that. By default, the connection will perform a handshake
2616 and connect to that. By default, the connection will perform a handshake
2617 and establish an appropriate peer instance.
2617 and establish an appropriate peer instance.
2618
2618
2619 ``--peer`` can be used to bypass the handshake protocol and construct a
2619 ``--peer`` can be used to bypass the handshake protocol and construct a
2620 peer instance using the specified class type. Valid values are ``raw``,
2620 peer instance using the specified class type. Valid values are ``raw``,
2621 ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending raw data
2621 ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending raw data
2622 payloads and don't support higher-level command actions.
2622 payloads and don't support higher-level command actions.
2623
2623
2624 ``--noreadstderr`` can be used to disable automatic reading from stderr
2624 ``--noreadstderr`` can be used to disable automatic reading from stderr
2625 of the peer (for SSH connections only). Disabling automatic reading of
2625 of the peer (for SSH connections only). Disabling automatic reading of
2626 stderr is useful for making output more deterministic.
2626 stderr is useful for making output more deterministic.
2627
2627
2628 Commands are issued via a mini language which is specified via stdin.
2628 Commands are issued via a mini language which is specified via stdin.
2629 The language consists of individual actions to perform. An action is
2629 The language consists of individual actions to perform. An action is
2630 defined by a block. A block is defined as a line with no leading
2630 defined by a block. A block is defined as a line with no leading
2631 space followed by 0 or more lines with leading space. Blocks are
2631 space followed by 0 or more lines with leading space. Blocks are
2632 effectively a high-level command with additional metadata.
2632 effectively a high-level command with additional metadata.
2633
2633
2634 Lines beginning with ``#`` are ignored.
2634 Lines beginning with ``#`` are ignored.
2635
2635
2636 The following sections denote available actions.
2636 The following sections denote available actions.
2637
2637
2638 raw
2638 raw
2639 ---
2639 ---
2640
2640
2641 Send raw data to the server.
2641 Send raw data to the server.
2642
2642
2643 The block payload contains the raw data to send as one atomic send
2643 The block payload contains the raw data to send as one atomic send
2644 operation. The data may not actually be delivered in a single system
2644 operation. The data may not actually be delivered in a single system
2645 call: it depends on the abilities of the transport being used.
2645 call: it depends on the abilities of the transport being used.
2646
2646
2647 Each line in the block is de-indented and concatenated. Then, that
2647 Each line in the block is de-indented and concatenated. Then, that
2648 value is evaluated as a Python b'' literal. This allows the use of
2648 value is evaluated as a Python b'' literal. This allows the use of
2649 backslash escaping, etc.
2649 backslash escaping, etc.
2650
2650
2651 raw+
2651 raw+
2652 ----
2652 ----
2653
2653
2654 Behaves like ``raw`` except flushes output afterwards.
2654 Behaves like ``raw`` except flushes output afterwards.
2655
2655
2656 command <X>
2656 command <X>
2657 -----------
2657 -----------
2658
2658
2659 Send a request to run a named command, whose name follows the ``command``
2659 Send a request to run a named command, whose name follows the ``command``
2660 string.
2660 string.
2661
2661
2662 Arguments to the command are defined as lines in this block. The format of
2662 Arguments to the command are defined as lines in this block. The format of
2663 each line is ``<key> <value>``. e.g.::
2663 each line is ``<key> <value>``. e.g.::
2664
2664
2665 command listkeys
2665 command listkeys
2666 namespace bookmarks
2666 namespace bookmarks
2667
2667
2668 Values are interpreted as Python b'' literals. This allows encoding
2668 Values are interpreted as Python b'' literals. This allows encoding
2669 special byte sequences via backslash escaping.
2669 special byte sequences via backslash escaping.
2670
2670
2671 The following arguments have special meaning:
2671 The following arguments have special meaning:
2672
2672
2673 ``PUSHFILE``
2673 ``PUSHFILE``
2674 When defined, the *push* mechanism of the peer will be used instead
2674 When defined, the *push* mechanism of the peer will be used instead
2675 of the static request-response mechanism and the content of the
2675 of the static request-response mechanism and the content of the
2676 file specified in the value of this argument will be sent as the
2676 file specified in the value of this argument will be sent as the
2677 command payload.
2677 command payload.
2678
2678
2679 This can be used to submit a local bundle file to the remote.
2679 This can be used to submit a local bundle file to the remote.
2680
2680
2681 batchbegin
2681 batchbegin
2682 ----------
2682 ----------
2683
2683
2684 Instruct the peer to begin a batched send.
2684 Instruct the peer to begin a batched send.
2685
2685
2686 All ``command`` blocks are queued for execution until the next
2686 All ``command`` blocks are queued for execution until the next
2687 ``batchsubmit`` block.
2687 ``batchsubmit`` block.
2688
2688
2689 batchsubmit
2689 batchsubmit
2690 -----------
2690 -----------
2691
2691
2692 Submit previously queued ``command`` blocks as a batch request.
2692 Submit previously queued ``command`` blocks as a batch request.
2693
2693
2694 This action MUST be paired with a ``batchbegin`` action.
2694 This action MUST be paired with a ``batchbegin`` action.
2695
2695
2696 httprequest <method> <path>
2696 httprequest <method> <path>
2697 ---------------------------
2697 ---------------------------
2698
2698
2699 (HTTP peer only)
2699 (HTTP peer only)
2700
2700
2701 Send an HTTP request to the peer.
2701 Send an HTTP request to the peer.
2702
2702
2703 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2703 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2704
2704
2705 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2705 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2706 headers to add to the request. e.g. ``Accept: foo``.
2706 headers to add to the request. e.g. ``Accept: foo``.
2707
2707
2708 The following arguments are special:
2708 The following arguments are special:
2709
2709
2710 ``BODYFILE``
2710 ``BODYFILE``
2711 The content of the file defined as the value to this argument will be
2711 The content of the file defined as the value to this argument will be
2712 transferred verbatim as the HTTP request body.
2712 transferred verbatim as the HTTP request body.
2713
2713
2714 close
2714 close
2715 -----
2715 -----
2716
2716
2717 Close the connection to the server.
2717 Close the connection to the server.
2718
2718
2719 flush
2719 flush
2720 -----
2720 -----
2721
2721
2722 Flush data written to the server.
2722 Flush data written to the server.
2723
2723
2724 readavailable
2724 readavailable
2725 -------------
2725 -------------
2726
2726
2727 Close the write end of the connection and read all available data from
2727 Close the write end of the connection and read all available data from
2728 the server.
2728 the server.
2729
2729
2730 If the connection to the server encompasses multiple pipes, we poll both
2730 If the connection to the server encompasses multiple pipes, we poll both
2731 pipes and read available data.
2731 pipes and read available data.
2732
2732
2733 readline
2733 readline
2734 --------
2734 --------
2735
2735
2736 Read a line of output from the server. If there are multiple output
2736 Read a line of output from the server. If there are multiple output
2737 pipes, reads only the main pipe.
2737 pipes, reads only the main pipe.
2738
2738
2739 ereadline
2739 ereadline
2740 ---------
2740 ---------
2741
2741
2742 Like ``readline``, but read from the stderr pipe, if available.
2742 Like ``readline``, but read from the stderr pipe, if available.
2743
2743
2744 read <X>
2744 read <X>
2745 --------
2745 --------
2746
2746
2747 ``read()`` N bytes from the server's main output pipe.
2747 ``read()`` N bytes from the server's main output pipe.
2748
2748
2749 eread <X>
2749 eread <X>
2750 ---------
2750 ---------
2751
2751
2752 ``read()`` N bytes from the server's stderr pipe, if available.
2752 ``read()`` N bytes from the server's stderr pipe, if available.
2753 """
2753 """
2754 opts = pycompat.byteskwargs(opts)
2754 opts = pycompat.byteskwargs(opts)
2755
2755
2756 if opts['localssh'] and not repo:
2756 if opts['localssh'] and not repo:
2757 raise error.Abort(_('--localssh requires a repository'))
2757 raise error.Abort(_('--localssh requires a repository'))
2758
2758
2759 if opts['peer'] and opts['peer'] not in ('raw', 'ssh1', 'ssh2'):
2759 if opts['peer'] and opts['peer'] not in ('raw', 'ssh1', 'ssh2'):
2760 raise error.Abort(_('invalid value for --peer'),
2760 raise error.Abort(_('invalid value for --peer'),
2761 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2761 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2762
2762
2763 if path and opts['localssh']:
2763 if path and opts['localssh']:
2764 raise error.Abort(_('cannot specify --localssh with an explicit '
2764 raise error.Abort(_('cannot specify --localssh with an explicit '
2765 'path'))
2765 'path'))
2766
2766
2767 if ui.interactive():
2767 if ui.interactive():
2768 ui.write(_('(waiting for commands on stdin)\n'))
2768 ui.write(_('(waiting for commands on stdin)\n'))
2769
2769
2770 blocks = list(_parsewirelangblocks(ui.fin))
2770 blocks = list(_parsewirelangblocks(ui.fin))
2771
2771
2772 proc = None
2772 proc = None
2773 stdin = None
2773 stdin = None
2774 stdout = None
2774 stdout = None
2775 stderr = None
2775 stderr = None
2776 opener = None
2776 opener = None
2777
2777
2778 if opts['localssh']:
2778 if opts['localssh']:
2779 # We start the SSH server in its own process so there is process
2779 # We start the SSH server in its own process so there is process
2780 # separation. This prevents a whole class of potential bugs around
2780 # separation. This prevents a whole class of potential bugs around
2781 # shared state from interfering with server operation.
2781 # shared state from interfering with server operation.
2782 args = util.hgcmd() + [
2782 args = util.hgcmd() + [
2783 '-R', repo.root,
2783 '-R', repo.root,
2784 'debugserve', '--sshstdio',
2784 'debugserve', '--sshstdio',
2785 ]
2785 ]
2786 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2786 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2787 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2787 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2788 bufsize=0)
2788 bufsize=0)
2789
2789
2790 stdin = proc.stdin
2790 stdin = proc.stdin
2791 stdout = proc.stdout
2791 stdout = proc.stdout
2792 stderr = proc.stderr
2792 stderr = proc.stderr
2793
2793
2794 # We turn the pipes into observers so we can log I/O.
2794 # We turn the pipes into observers so we can log I/O.
2795 if ui.verbose or opts['peer'] == 'raw':
2795 if ui.verbose or opts['peer'] == 'raw':
2796 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2796 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2797 logdata=True)
2797 logdata=True)
2798 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2798 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2799 logdata=True)
2799 logdata=True)
2800 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2800 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2801 logdata=True)
2801 logdata=True)
2802
2802
2803 # --localssh also implies the peer connection settings.
2803 # --localssh also implies the peer connection settings.
2804
2804
2805 url = 'ssh://localserver'
2805 url = 'ssh://localserver'
2806 autoreadstderr = not opts['noreadstderr']
2806 autoreadstderr = not opts['noreadstderr']
2807
2807
2808 if opts['peer'] == 'ssh1':
2808 if opts['peer'] == 'ssh1':
2809 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2809 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2810 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2810 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2811 None, autoreadstderr=autoreadstderr)
2811 None, autoreadstderr=autoreadstderr)
2812 elif opts['peer'] == 'ssh2':
2812 elif opts['peer'] == 'ssh2':
2813 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2813 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2814 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2814 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2815 None, autoreadstderr=autoreadstderr)
2815 None, autoreadstderr=autoreadstderr)
2816 elif opts['peer'] == 'raw':
2816 elif opts['peer'] == 'raw':
2817 ui.write(_('using raw connection to peer\n'))
2817 ui.write(_('using raw connection to peer\n'))
2818 peer = None
2818 peer = None
2819 else:
2819 else:
2820 ui.write(_('creating ssh peer from handshake results\n'))
2820 ui.write(_('creating ssh peer from handshake results\n'))
2821 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2821 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2822 autoreadstderr=autoreadstderr)
2822 autoreadstderr=autoreadstderr)
2823
2823
2824 elif path:
2824 elif path:
2825 # We bypass hg.peer() so we can proxy the sockets.
2825 # We bypass hg.peer() so we can proxy the sockets.
2826 # TODO consider not doing this because we skip
2826 # TODO consider not doing this because we skip
2827 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
2827 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
2828 u = util.url(path)
2828 u = util.url(path)
2829 if u.scheme != 'http':
2829 if u.scheme != 'http':
2830 raise error.Abort(_('only http:// paths are currently supported'))
2830 raise error.Abort(_('only http:// paths are currently supported'))
2831
2831
2832 url, authinfo = u.authinfo()
2832 url, authinfo = u.authinfo()
2833 openerargs = {}
2833 openerargs = {}
2834
2834
2835 # Turn pipes/sockets into observers so we can log I/O.
2835 # Turn pipes/sockets into observers so we can log I/O.
2836 if ui.verbose:
2836 if ui.verbose:
2837 openerargs = {
2837 openerargs = {
2838 r'loggingfh': ui,
2838 r'loggingfh': ui,
2839 r'loggingname': b's',
2839 r'loggingname': b's',
2840 r'loggingopts': {
2840 r'loggingopts': {
2841 r'logdata': True,
2841 r'logdata': True,
2842 r'logdataapis': False,
2842 r'logdataapis': False,
2843 },
2843 },
2844 }
2844 }
2845
2845
2846 if ui.debugflag:
2846 if ui.debugflag:
2847 openerargs[r'loggingopts'][r'logdataapis'] = True
2847 openerargs[r'loggingopts'][r'logdataapis'] = True
2848
2848
2849 # Don't send default headers when in raw mode. This allows us to
2850 # bypass most of the behavior of our URL handling code so we can
2851 # have near complete control over what's sent on the wire.
2852 if opts['peer'] == 'raw':
2853 openerargs[r'sendaccept'] = False
2854
2849 opener = urlmod.opener(ui, authinfo, **openerargs)
2855 opener = urlmod.opener(ui, authinfo, **openerargs)
2850
2856
2851 if opts['peer'] == 'raw':
2857 if opts['peer'] == 'raw':
2852 ui.write(_('using raw connection to peer\n'))
2858 ui.write(_('using raw connection to peer\n'))
2853 peer = None
2859 peer = None
2854 elif opts['peer']:
2860 elif opts['peer']:
2855 raise error.Abort(_('--peer %s not supported with HTTP peers') %
2861 raise error.Abort(_('--peer %s not supported with HTTP peers') %
2856 opts['peer'])
2862 opts['peer'])
2857 else:
2863 else:
2858 peer = httppeer.httppeer(ui, path, url, opener)
2864 peer = httppeer.httppeer(ui, path, url, opener)
2859 peer._fetchcaps()
2865 peer._fetchcaps()
2860
2866
2861 # We /could/ populate stdin/stdout with sock.makefile()...
2867 # We /could/ populate stdin/stdout with sock.makefile()...
2862 else:
2868 else:
2863 raise error.Abort(_('unsupported connection configuration'))
2869 raise error.Abort(_('unsupported connection configuration'))
2864
2870
2865 batchedcommands = None
2871 batchedcommands = None
2866
2872
2867 # Now perform actions based on the parsed wire language instructions.
2873 # Now perform actions based on the parsed wire language instructions.
2868 for action, lines in blocks:
2874 for action, lines in blocks:
2869 if action in ('raw', 'raw+'):
2875 if action in ('raw', 'raw+'):
2870 if not stdin:
2876 if not stdin:
2871 raise error.Abort(_('cannot call raw/raw+ on this peer'))
2877 raise error.Abort(_('cannot call raw/raw+ on this peer'))
2872
2878
2873 # Concatenate the data together.
2879 # Concatenate the data together.
2874 data = ''.join(l.lstrip() for l in lines)
2880 data = ''.join(l.lstrip() for l in lines)
2875 data = util.unescapestr(data)
2881 data = util.unescapestr(data)
2876 stdin.write(data)
2882 stdin.write(data)
2877
2883
2878 if action == 'raw+':
2884 if action == 'raw+':
2879 stdin.flush()
2885 stdin.flush()
2880 elif action == 'flush':
2886 elif action == 'flush':
2881 if not stdin:
2887 if not stdin:
2882 raise error.Abort(_('cannot call flush on this peer'))
2888 raise error.Abort(_('cannot call flush on this peer'))
2883 stdin.flush()
2889 stdin.flush()
2884 elif action.startswith('command'):
2890 elif action.startswith('command'):
2885 if not peer:
2891 if not peer:
2886 raise error.Abort(_('cannot send commands unless peer instance '
2892 raise error.Abort(_('cannot send commands unless peer instance '
2887 'is available'))
2893 'is available'))
2888
2894
2889 command = action.split(' ', 1)[1]
2895 command = action.split(' ', 1)[1]
2890
2896
2891 args = {}
2897 args = {}
2892 for line in lines:
2898 for line in lines:
2893 # We need to allow empty values.
2899 # We need to allow empty values.
2894 fields = line.lstrip().split(' ', 1)
2900 fields = line.lstrip().split(' ', 1)
2895 if len(fields) == 1:
2901 if len(fields) == 1:
2896 key = fields[0]
2902 key = fields[0]
2897 value = ''
2903 value = ''
2898 else:
2904 else:
2899 key, value = fields
2905 key, value = fields
2900
2906
2901 args[key] = util.unescapestr(value)
2907 args[key] = util.unescapestr(value)
2902
2908
2903 if batchedcommands is not None:
2909 if batchedcommands is not None:
2904 batchedcommands.append((command, args))
2910 batchedcommands.append((command, args))
2905 continue
2911 continue
2906
2912
2907 ui.status(_('sending %s command\n') % command)
2913 ui.status(_('sending %s command\n') % command)
2908
2914
2909 if 'PUSHFILE' in args:
2915 if 'PUSHFILE' in args:
2910 with open(args['PUSHFILE'], r'rb') as fh:
2916 with open(args['PUSHFILE'], r'rb') as fh:
2911 del args['PUSHFILE']
2917 del args['PUSHFILE']
2912 res, output = peer._callpush(command, fh,
2918 res, output = peer._callpush(command, fh,
2913 **pycompat.strkwargs(args))
2919 **pycompat.strkwargs(args))
2914 ui.status(_('result: %s\n') % util.escapedata(res))
2920 ui.status(_('result: %s\n') % util.escapedata(res))
2915 ui.status(_('remote output: %s\n') %
2921 ui.status(_('remote output: %s\n') %
2916 util.escapedata(output))
2922 util.escapedata(output))
2917 else:
2923 else:
2918 res = peer._call(command, **pycompat.strkwargs(args))
2924 res = peer._call(command, **pycompat.strkwargs(args))
2919 ui.status(_('response: %s\n') % util.escapedata(res))
2925 ui.status(_('response: %s\n') % util.escapedata(res))
2920
2926
2921 elif action == 'batchbegin':
2927 elif action == 'batchbegin':
2922 if batchedcommands is not None:
2928 if batchedcommands is not None:
2923 raise error.Abort(_('nested batchbegin not allowed'))
2929 raise error.Abort(_('nested batchbegin not allowed'))
2924
2930
2925 batchedcommands = []
2931 batchedcommands = []
2926 elif action == 'batchsubmit':
2932 elif action == 'batchsubmit':
2927 # There is a batching API we could go through. But it would be
2933 # There is a batching API we could go through. But it would be
2928 # difficult to normalize requests into function calls. It is easier
2934 # difficult to normalize requests into function calls. It is easier
2929 # to bypass this layer and normalize to commands + args.
2935 # to bypass this layer and normalize to commands + args.
2930 ui.status(_('sending batch with %d sub-commands\n') %
2936 ui.status(_('sending batch with %d sub-commands\n') %
2931 len(batchedcommands))
2937 len(batchedcommands))
2932 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
2938 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
2933 ui.status(_('response #%d: %s\n') % (i, util.escapedata(chunk)))
2939 ui.status(_('response #%d: %s\n') % (i, util.escapedata(chunk)))
2934
2940
2935 batchedcommands = None
2941 batchedcommands = None
2936
2942
2937 elif action.startswith('httprequest '):
2943 elif action.startswith('httprequest '):
2938 if not opener:
2944 if not opener:
2939 raise error.Abort(_('cannot use httprequest without an HTTP '
2945 raise error.Abort(_('cannot use httprequest without an HTTP '
2940 'peer'))
2946 'peer'))
2941
2947
2942 request = action.split(' ', 2)
2948 request = action.split(' ', 2)
2943 if len(request) != 3:
2949 if len(request) != 3:
2944 raise error.Abort(_('invalid httprequest: expected format is '
2950 raise error.Abort(_('invalid httprequest: expected format is '
2945 '"httprequest <method> <path>'))
2951 '"httprequest <method> <path>'))
2946
2952
2947 method, httppath = request[1:]
2953 method, httppath = request[1:]
2948 headers = {}
2954 headers = {}
2949 body = None
2955 body = None
2950 for line in lines:
2956 for line in lines:
2951 line = line.lstrip()
2957 line = line.lstrip()
2952 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
2958 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
2953 if m:
2959 if m:
2954 headers[m.group(1)] = m.group(2)
2960 headers[m.group(1)] = m.group(2)
2955 continue
2961 continue
2956
2962
2957 if line.startswith(b'BODYFILE '):
2963 if line.startswith(b'BODYFILE '):
2958 with open(line.split(b' ', 1), 'rb') as fh:
2964 with open(line.split(b' ', 1), 'rb') as fh:
2959 body = fh.read()
2965 body = fh.read()
2960 else:
2966 else:
2961 raise error.Abort(_('unknown argument to httprequest: %s') %
2967 raise error.Abort(_('unknown argument to httprequest: %s') %
2962 line)
2968 line)
2963
2969
2964 url = path + httppath
2970 url = path + httppath
2965 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
2971 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
2966
2972
2967 try:
2973 try:
2968 opener.open(req).read()
2974 opener.open(req).read()
2969 except util.urlerr.urlerror as e:
2975 except util.urlerr.urlerror as e:
2970 e.read()
2976 e.read()
2971
2977
2972 elif action == 'close':
2978 elif action == 'close':
2973 peer.close()
2979 peer.close()
2974 elif action == 'readavailable':
2980 elif action == 'readavailable':
2975 if not stdout or not stderr:
2981 if not stdout or not stderr:
2976 raise error.Abort(_('readavailable not available on this peer'))
2982 raise error.Abort(_('readavailable not available on this peer'))
2977
2983
2978 stdin.close()
2984 stdin.close()
2979 stdout.read()
2985 stdout.read()
2980 stderr.read()
2986 stderr.read()
2981
2987
2982 elif action == 'readline':
2988 elif action == 'readline':
2983 if not stdout:
2989 if not stdout:
2984 raise error.Abort(_('readline not available on this peer'))
2990 raise error.Abort(_('readline not available on this peer'))
2985 stdout.readline()
2991 stdout.readline()
2986 elif action == 'ereadline':
2992 elif action == 'ereadline':
2987 if not stderr:
2993 if not stderr:
2988 raise error.Abort(_('ereadline not available on this peer'))
2994 raise error.Abort(_('ereadline not available on this peer'))
2989 stderr.readline()
2995 stderr.readline()
2990 elif action.startswith('read '):
2996 elif action.startswith('read '):
2991 count = int(action.split(' ', 1)[1])
2997 count = int(action.split(' ', 1)[1])
2992 if not stdout:
2998 if not stdout:
2993 raise error.Abort(_('read not available on this peer'))
2999 raise error.Abort(_('read not available on this peer'))
2994 stdout.read(count)
3000 stdout.read(count)
2995 elif action.startswith('eread '):
3001 elif action.startswith('eread '):
2996 count = int(action.split(' ', 1)[1])
3002 count = int(action.split(' ', 1)[1])
2997 if not stderr:
3003 if not stderr:
2998 raise error.Abort(_('eread not available on this peer'))
3004 raise error.Abort(_('eread not available on this peer'))
2999 stderr.read(count)
3005 stderr.read(count)
3000 else:
3006 else:
3001 raise error.Abort(_('unknown action: %s') % action)
3007 raise error.Abort(_('unknown action: %s') % action)
3002
3008
3003 if batchedcommands is not None:
3009 if batchedcommands is not None:
3004 raise error.Abort(_('unclosed "batchbegin" request'))
3010 raise error.Abort(_('unclosed "batchbegin" request'))
3005
3011
3006 if peer:
3012 if peer:
3007 peer.close()
3013 peer.close()
3008
3014
3009 if proc:
3015 if proc:
3010 proc.kill()
3016 proc.kill()
@@ -1,577 +1,582 b''
1 # url.py - HTTP handling for mercurial
1 # url.py - HTTP handling for mercurial
2 #
2 #
3 # Copyright 2005, 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006, 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br>
4 # Copyright 2006, 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 from __future__ import absolute_import
10 from __future__ import absolute_import
11
11
12 import base64
12 import base64
13 import os
13 import os
14 import socket
14 import socket
15
15
16 from .i18n import _
16 from .i18n import _
17 from . import (
17 from . import (
18 encoding,
18 encoding,
19 error,
19 error,
20 httpconnection as httpconnectionmod,
20 httpconnection as httpconnectionmod,
21 keepalive,
21 keepalive,
22 pycompat,
22 pycompat,
23 sslutil,
23 sslutil,
24 urllibcompat,
24 urllibcompat,
25 util,
25 util,
26 )
26 )
27
27
28 httplib = util.httplib
28 httplib = util.httplib
29 stringio = util.stringio
29 stringio = util.stringio
30 urlerr = util.urlerr
30 urlerr = util.urlerr
31 urlreq = util.urlreq
31 urlreq = util.urlreq
32
32
33 def escape(s, quote=None):
33 def escape(s, quote=None):
34 '''Replace special characters "&", "<" and ">" to HTML-safe sequences.
34 '''Replace special characters "&", "<" and ">" to HTML-safe sequences.
35 If the optional flag quote is true, the quotation mark character (")
35 If the optional flag quote is true, the quotation mark character (")
36 is also translated.
36 is also translated.
37
37
38 This is the same as cgi.escape in Python, but always operates on
38 This is the same as cgi.escape in Python, but always operates on
39 bytes, whereas cgi.escape in Python 3 only works on unicodes.
39 bytes, whereas cgi.escape in Python 3 only works on unicodes.
40 '''
40 '''
41 s = s.replace(b"&", b"&amp;")
41 s = s.replace(b"&", b"&amp;")
42 s = s.replace(b"<", b"&lt;")
42 s = s.replace(b"<", b"&lt;")
43 s = s.replace(b">", b"&gt;")
43 s = s.replace(b">", b"&gt;")
44 if quote:
44 if quote:
45 s = s.replace(b'"', b"&quot;")
45 s = s.replace(b'"', b"&quot;")
46 return s
46 return s
47
47
48 class passwordmgr(object):
48 class passwordmgr(object):
49 def __init__(self, ui, passwddb):
49 def __init__(self, ui, passwddb):
50 self.ui = ui
50 self.ui = ui
51 self.passwddb = passwddb
51 self.passwddb = passwddb
52
52
53 def add_password(self, realm, uri, user, passwd):
53 def add_password(self, realm, uri, user, passwd):
54 return self.passwddb.add_password(realm, uri, user, passwd)
54 return self.passwddb.add_password(realm, uri, user, passwd)
55
55
56 def find_user_password(self, realm, authuri):
56 def find_user_password(self, realm, authuri):
57 authinfo = self.passwddb.find_user_password(realm, authuri)
57 authinfo = self.passwddb.find_user_password(realm, authuri)
58 user, passwd = authinfo
58 user, passwd = authinfo
59 if user and passwd:
59 if user and passwd:
60 self._writedebug(user, passwd)
60 self._writedebug(user, passwd)
61 return (user, passwd)
61 return (user, passwd)
62
62
63 if not user or not passwd:
63 if not user or not passwd:
64 res = httpconnectionmod.readauthforuri(self.ui, authuri, user)
64 res = httpconnectionmod.readauthforuri(self.ui, authuri, user)
65 if res:
65 if res:
66 group, auth = res
66 group, auth = res
67 user, passwd = auth.get('username'), auth.get('password')
67 user, passwd = auth.get('username'), auth.get('password')
68 self.ui.debug("using auth.%s.* for authentication\n" % group)
68 self.ui.debug("using auth.%s.* for authentication\n" % group)
69 if not user or not passwd:
69 if not user or not passwd:
70 u = util.url(pycompat.bytesurl(authuri))
70 u = util.url(pycompat.bytesurl(authuri))
71 u.query = None
71 u.query = None
72 if not self.ui.interactive():
72 if not self.ui.interactive():
73 raise error.Abort(_('http authorization required for %s') %
73 raise error.Abort(_('http authorization required for %s') %
74 util.hidepassword(bytes(u)))
74 util.hidepassword(bytes(u)))
75
75
76 self.ui.write(_("http authorization required for %s\n") %
76 self.ui.write(_("http authorization required for %s\n") %
77 util.hidepassword(bytes(u)))
77 util.hidepassword(bytes(u)))
78 self.ui.write(_("realm: %s\n") % pycompat.bytesurl(realm))
78 self.ui.write(_("realm: %s\n") % pycompat.bytesurl(realm))
79 if user:
79 if user:
80 self.ui.write(_("user: %s\n") % user)
80 self.ui.write(_("user: %s\n") % user)
81 else:
81 else:
82 user = self.ui.prompt(_("user:"), default=None)
82 user = self.ui.prompt(_("user:"), default=None)
83
83
84 if not passwd:
84 if not passwd:
85 passwd = self.ui.getpass()
85 passwd = self.ui.getpass()
86
86
87 self.passwddb.add_password(realm, authuri, user, passwd)
87 self.passwddb.add_password(realm, authuri, user, passwd)
88 self._writedebug(user, passwd)
88 self._writedebug(user, passwd)
89 return (user, passwd)
89 return (user, passwd)
90
90
91 def _writedebug(self, user, passwd):
91 def _writedebug(self, user, passwd):
92 msg = _('http auth: user %s, password %s\n')
92 msg = _('http auth: user %s, password %s\n')
93 self.ui.debug(msg % (user, passwd and '*' * len(passwd) or 'not set'))
93 self.ui.debug(msg % (user, passwd and '*' * len(passwd) or 'not set'))
94
94
95 def find_stored_password(self, authuri):
95 def find_stored_password(self, authuri):
96 return self.passwddb.find_user_password(None, authuri)
96 return self.passwddb.find_user_password(None, authuri)
97
97
98 class proxyhandler(urlreq.proxyhandler):
98 class proxyhandler(urlreq.proxyhandler):
99 def __init__(self, ui):
99 def __init__(self, ui):
100 proxyurl = (ui.config("http_proxy", "host") or
100 proxyurl = (ui.config("http_proxy", "host") or
101 encoding.environ.get('http_proxy'))
101 encoding.environ.get('http_proxy'))
102 # XXX proxyauthinfo = None
102 # XXX proxyauthinfo = None
103
103
104 if proxyurl:
104 if proxyurl:
105 # proxy can be proper url or host[:port]
105 # proxy can be proper url or host[:port]
106 if not (proxyurl.startswith('http:') or
106 if not (proxyurl.startswith('http:') or
107 proxyurl.startswith('https:')):
107 proxyurl.startswith('https:')):
108 proxyurl = 'http://' + proxyurl + '/'
108 proxyurl = 'http://' + proxyurl + '/'
109 proxy = util.url(proxyurl)
109 proxy = util.url(proxyurl)
110 if not proxy.user:
110 if not proxy.user:
111 proxy.user = ui.config("http_proxy", "user")
111 proxy.user = ui.config("http_proxy", "user")
112 proxy.passwd = ui.config("http_proxy", "passwd")
112 proxy.passwd = ui.config("http_proxy", "passwd")
113
113
114 # see if we should use a proxy for this url
114 # see if we should use a proxy for this url
115 no_list = ["localhost", "127.0.0.1"]
115 no_list = ["localhost", "127.0.0.1"]
116 no_list.extend([p.lower() for
116 no_list.extend([p.lower() for
117 p in ui.configlist("http_proxy", "no")])
117 p in ui.configlist("http_proxy", "no")])
118 no_list.extend([p.strip().lower() for
118 no_list.extend([p.strip().lower() for
119 p in encoding.environ.get("no_proxy", '').split(',')
119 p in encoding.environ.get("no_proxy", '').split(',')
120 if p.strip()])
120 if p.strip()])
121 # "http_proxy.always" config is for running tests on localhost
121 # "http_proxy.always" config is for running tests on localhost
122 if ui.configbool("http_proxy", "always"):
122 if ui.configbool("http_proxy", "always"):
123 self.no_list = []
123 self.no_list = []
124 else:
124 else:
125 self.no_list = no_list
125 self.no_list = no_list
126
126
127 proxyurl = bytes(proxy)
127 proxyurl = bytes(proxy)
128 proxies = {'http': proxyurl, 'https': proxyurl}
128 proxies = {'http': proxyurl, 'https': proxyurl}
129 ui.debug('proxying through %s\n' % util.hidepassword(proxyurl))
129 ui.debug('proxying through %s\n' % util.hidepassword(proxyurl))
130 else:
130 else:
131 proxies = {}
131 proxies = {}
132
132
133 urlreq.proxyhandler.__init__(self, proxies)
133 urlreq.proxyhandler.__init__(self, proxies)
134 self.ui = ui
134 self.ui = ui
135
135
136 def proxy_open(self, req, proxy, type_):
136 def proxy_open(self, req, proxy, type_):
137 host = urllibcompat.gethost(req).split(':')[0]
137 host = urllibcompat.gethost(req).split(':')[0]
138 for e in self.no_list:
138 for e in self.no_list:
139 if host == e:
139 if host == e:
140 return None
140 return None
141 if e.startswith('*.') and host.endswith(e[2:]):
141 if e.startswith('*.') and host.endswith(e[2:]):
142 return None
142 return None
143 if e.startswith('.') and host.endswith(e[1:]):
143 if e.startswith('.') and host.endswith(e[1:]):
144 return None
144 return None
145
145
146 return urlreq.proxyhandler.proxy_open(self, req, proxy, type_)
146 return urlreq.proxyhandler.proxy_open(self, req, proxy, type_)
147
147
148 def _gen_sendfile(orgsend):
148 def _gen_sendfile(orgsend):
149 def _sendfile(self, data):
149 def _sendfile(self, data):
150 # send a file
150 # send a file
151 if isinstance(data, httpconnectionmod.httpsendfile):
151 if isinstance(data, httpconnectionmod.httpsendfile):
152 # if auth required, some data sent twice, so rewind here
152 # if auth required, some data sent twice, so rewind here
153 data.seek(0)
153 data.seek(0)
154 for chunk in util.filechunkiter(data):
154 for chunk in util.filechunkiter(data):
155 orgsend(self, chunk)
155 orgsend(self, chunk)
156 else:
156 else:
157 orgsend(self, data)
157 orgsend(self, data)
158 return _sendfile
158 return _sendfile
159
159
160 has_https = util.safehasattr(urlreq, 'httpshandler')
160 has_https = util.safehasattr(urlreq, 'httpshandler')
161
161
162 class httpconnection(keepalive.HTTPConnection):
162 class httpconnection(keepalive.HTTPConnection):
163 # must be able to send big bundle as stream.
163 # must be able to send big bundle as stream.
164 send = _gen_sendfile(keepalive.HTTPConnection.send)
164 send = _gen_sendfile(keepalive.HTTPConnection.send)
165
165
166 def getresponse(self):
166 def getresponse(self):
167 proxyres = getattr(self, 'proxyres', None)
167 proxyres = getattr(self, 'proxyres', None)
168 if proxyres:
168 if proxyres:
169 if proxyres.will_close:
169 if proxyres.will_close:
170 self.close()
170 self.close()
171 self.proxyres = None
171 self.proxyres = None
172 return proxyres
172 return proxyres
173 return keepalive.HTTPConnection.getresponse(self)
173 return keepalive.HTTPConnection.getresponse(self)
174
174
175 # general transaction handler to support different ways to handle
175 # general transaction handler to support different ways to handle
176 # HTTPS proxying before and after Python 2.6.3.
176 # HTTPS proxying before and after Python 2.6.3.
177 def _generic_start_transaction(handler, h, req):
177 def _generic_start_transaction(handler, h, req):
178 tunnel_host = getattr(req, '_tunnel_host', None)
178 tunnel_host = getattr(req, '_tunnel_host', None)
179 if tunnel_host:
179 if tunnel_host:
180 if tunnel_host[:7] not in ['http://', 'https:/']:
180 if tunnel_host[:7] not in ['http://', 'https:/']:
181 tunnel_host = 'https://' + tunnel_host
181 tunnel_host = 'https://' + tunnel_host
182 new_tunnel = True
182 new_tunnel = True
183 else:
183 else:
184 tunnel_host = urllibcompat.getselector(req)
184 tunnel_host = urllibcompat.getselector(req)
185 new_tunnel = False
185 new_tunnel = False
186
186
187 if new_tunnel or tunnel_host == urllibcompat.getfullurl(req): # has proxy
187 if new_tunnel or tunnel_host == urllibcompat.getfullurl(req): # has proxy
188 u = util.url(tunnel_host)
188 u = util.url(tunnel_host)
189 if new_tunnel or u.scheme == 'https': # only use CONNECT for HTTPS
189 if new_tunnel or u.scheme == 'https': # only use CONNECT for HTTPS
190 h.realhostport = ':'.join([u.host, (u.port or '443')])
190 h.realhostport = ':'.join([u.host, (u.port or '443')])
191 h.headers = req.headers.copy()
191 h.headers = req.headers.copy()
192 h.headers.update(handler.parent.addheaders)
192 h.headers.update(handler.parent.addheaders)
193 return
193 return
194
194
195 h.realhostport = None
195 h.realhostport = None
196 h.headers = None
196 h.headers = None
197
197
198 def _generic_proxytunnel(self):
198 def _generic_proxytunnel(self):
199 proxyheaders = dict(
199 proxyheaders = dict(
200 [(x, self.headers[x]) for x in self.headers
200 [(x, self.headers[x]) for x in self.headers
201 if x.lower().startswith('proxy-')])
201 if x.lower().startswith('proxy-')])
202 self.send('CONNECT %s HTTP/1.0\r\n' % self.realhostport)
202 self.send('CONNECT %s HTTP/1.0\r\n' % self.realhostport)
203 for header in proxyheaders.iteritems():
203 for header in proxyheaders.iteritems():
204 self.send('%s: %s\r\n' % header)
204 self.send('%s: %s\r\n' % header)
205 self.send('\r\n')
205 self.send('\r\n')
206
206
207 # majority of the following code is duplicated from
207 # majority of the following code is duplicated from
208 # httplib.HTTPConnection as there are no adequate places to
208 # httplib.HTTPConnection as there are no adequate places to
209 # override functions to provide the needed functionality
209 # override functions to provide the needed functionality
210 res = self.response_class(self.sock,
210 res = self.response_class(self.sock,
211 strict=self.strict,
211 strict=self.strict,
212 method=self._method)
212 method=self._method)
213
213
214 while True:
214 while True:
215 version, status, reason = res._read_status()
215 version, status, reason = res._read_status()
216 if status != httplib.CONTINUE:
216 if status != httplib.CONTINUE:
217 break
217 break
218 # skip lines that are all whitespace
218 # skip lines that are all whitespace
219 list(iter(lambda: res.fp.readline().strip(), ''))
219 list(iter(lambda: res.fp.readline().strip(), ''))
220 res.status = status
220 res.status = status
221 res.reason = reason.strip()
221 res.reason = reason.strip()
222
222
223 if res.status == 200:
223 if res.status == 200:
224 # skip lines until we find a blank line
224 # skip lines until we find a blank line
225 list(iter(res.fp.readline, '\r\n'))
225 list(iter(res.fp.readline, '\r\n'))
226 return True
226 return True
227
227
228 if version == 'HTTP/1.0':
228 if version == 'HTTP/1.0':
229 res.version = 10
229 res.version = 10
230 elif version.startswith('HTTP/1.'):
230 elif version.startswith('HTTP/1.'):
231 res.version = 11
231 res.version = 11
232 elif version == 'HTTP/0.9':
232 elif version == 'HTTP/0.9':
233 res.version = 9
233 res.version = 9
234 else:
234 else:
235 raise httplib.UnknownProtocol(version)
235 raise httplib.UnknownProtocol(version)
236
236
237 if res.version == 9:
237 if res.version == 9:
238 res.length = None
238 res.length = None
239 res.chunked = 0
239 res.chunked = 0
240 res.will_close = 1
240 res.will_close = 1
241 res.msg = httplib.HTTPMessage(stringio())
241 res.msg = httplib.HTTPMessage(stringio())
242 return False
242 return False
243
243
244 res.msg = httplib.HTTPMessage(res.fp)
244 res.msg = httplib.HTTPMessage(res.fp)
245 res.msg.fp = None
245 res.msg.fp = None
246
246
247 # are we using the chunked-style of transfer encoding?
247 # are we using the chunked-style of transfer encoding?
248 trenc = res.msg.getheader('transfer-encoding')
248 trenc = res.msg.getheader('transfer-encoding')
249 if trenc and trenc.lower() == "chunked":
249 if trenc and trenc.lower() == "chunked":
250 res.chunked = 1
250 res.chunked = 1
251 res.chunk_left = None
251 res.chunk_left = None
252 else:
252 else:
253 res.chunked = 0
253 res.chunked = 0
254
254
255 # will the connection close at the end of the response?
255 # will the connection close at the end of the response?
256 res.will_close = res._check_close()
256 res.will_close = res._check_close()
257
257
258 # do we have a Content-Length?
258 # do we have a Content-Length?
259 # NOTE: RFC 2616, section 4.4, #3 says we ignore this if
259 # NOTE: RFC 2616, section 4.4, #3 says we ignore this if
260 # transfer-encoding is "chunked"
260 # transfer-encoding is "chunked"
261 length = res.msg.getheader('content-length')
261 length = res.msg.getheader('content-length')
262 if length and not res.chunked:
262 if length and not res.chunked:
263 try:
263 try:
264 res.length = int(length)
264 res.length = int(length)
265 except ValueError:
265 except ValueError:
266 res.length = None
266 res.length = None
267 else:
267 else:
268 if res.length < 0: # ignore nonsensical negative lengths
268 if res.length < 0: # ignore nonsensical negative lengths
269 res.length = None
269 res.length = None
270 else:
270 else:
271 res.length = None
271 res.length = None
272
272
273 # does the body have a fixed length? (of zero)
273 # does the body have a fixed length? (of zero)
274 if (status == httplib.NO_CONTENT or status == httplib.NOT_MODIFIED or
274 if (status == httplib.NO_CONTENT or status == httplib.NOT_MODIFIED or
275 100 <= status < 200 or # 1xx codes
275 100 <= status < 200 or # 1xx codes
276 res._method == 'HEAD'):
276 res._method == 'HEAD'):
277 res.length = 0
277 res.length = 0
278
278
279 # if the connection remains open, and we aren't using chunked, and
279 # if the connection remains open, and we aren't using chunked, and
280 # a content-length was not provided, then assume that the connection
280 # a content-length was not provided, then assume that the connection
281 # WILL close.
281 # WILL close.
282 if (not res.will_close and
282 if (not res.will_close and
283 not res.chunked and
283 not res.chunked and
284 res.length is None):
284 res.length is None):
285 res.will_close = 1
285 res.will_close = 1
286
286
287 self.proxyres = res
287 self.proxyres = res
288
288
289 return False
289 return False
290
290
291 class httphandler(keepalive.HTTPHandler):
291 class httphandler(keepalive.HTTPHandler):
292 def http_open(self, req):
292 def http_open(self, req):
293 return self.do_open(httpconnection, req)
293 return self.do_open(httpconnection, req)
294
294
295 def _start_transaction(self, h, req):
295 def _start_transaction(self, h, req):
296 _generic_start_transaction(self, h, req)
296 _generic_start_transaction(self, h, req)
297 return keepalive.HTTPHandler._start_transaction(self, h, req)
297 return keepalive.HTTPHandler._start_transaction(self, h, req)
298
298
299 class logginghttpconnection(keepalive.HTTPConnection):
299 class logginghttpconnection(keepalive.HTTPConnection):
300 def __init__(self, createconn, *args, **kwargs):
300 def __init__(self, createconn, *args, **kwargs):
301 keepalive.HTTPConnection.__init__(self, *args, **kwargs)
301 keepalive.HTTPConnection.__init__(self, *args, **kwargs)
302 self._create_connection = createconn
302 self._create_connection = createconn
303
303
304 class logginghttphandler(httphandler):
304 class logginghttphandler(httphandler):
305 """HTTP handler that logs socket I/O."""
305 """HTTP handler that logs socket I/O."""
306 def __init__(self, logfh, name, observeropts):
306 def __init__(self, logfh, name, observeropts):
307 super(logginghttphandler, self).__init__()
307 super(logginghttphandler, self).__init__()
308
308
309 self._logfh = logfh
309 self._logfh = logfh
310 self._logname = name
310 self._logname = name
311 self._observeropts = observeropts
311 self._observeropts = observeropts
312
312
313 # do_open() calls the passed class to instantiate an HTTPConnection. We
313 # do_open() calls the passed class to instantiate an HTTPConnection. We
314 # pass in a callable method that creates a custom HTTPConnection instance
314 # pass in a callable method that creates a custom HTTPConnection instance
315 # whose callback to create the socket knows how to proxy the socket.
315 # whose callback to create the socket knows how to proxy the socket.
316 def http_open(self, req):
316 def http_open(self, req):
317 return self.do_open(self._makeconnection, req)
317 return self.do_open(self._makeconnection, req)
318
318
319 def _makeconnection(self, *args, **kwargs):
319 def _makeconnection(self, *args, **kwargs):
320 def createconnection(*args, **kwargs):
320 def createconnection(*args, **kwargs):
321 sock = socket.create_connection(*args, **kwargs)
321 sock = socket.create_connection(*args, **kwargs)
322 return util.makeloggingsocket(self._logfh, sock, self._logname,
322 return util.makeloggingsocket(self._logfh, sock, self._logname,
323 **self._observeropts)
323 **self._observeropts)
324
324
325 return logginghttpconnection(createconnection, *args, **kwargs)
325 return logginghttpconnection(createconnection, *args, **kwargs)
326
326
327 if has_https:
327 if has_https:
328 class httpsconnection(httplib.HTTPConnection):
328 class httpsconnection(httplib.HTTPConnection):
329 response_class = keepalive.HTTPResponse
329 response_class = keepalive.HTTPResponse
330 default_port = httplib.HTTPS_PORT
330 default_port = httplib.HTTPS_PORT
331 # must be able to send big bundle as stream.
331 # must be able to send big bundle as stream.
332 send = _gen_sendfile(keepalive.safesend)
332 send = _gen_sendfile(keepalive.safesend)
333 getresponse = keepalive.wrapgetresponse(httplib.HTTPConnection)
333 getresponse = keepalive.wrapgetresponse(httplib.HTTPConnection)
334
334
335 def __init__(self, host, port=None, key_file=None, cert_file=None,
335 def __init__(self, host, port=None, key_file=None, cert_file=None,
336 *args, **kwargs):
336 *args, **kwargs):
337 httplib.HTTPConnection.__init__(self, host, port, *args, **kwargs)
337 httplib.HTTPConnection.__init__(self, host, port, *args, **kwargs)
338 self.key_file = key_file
338 self.key_file = key_file
339 self.cert_file = cert_file
339 self.cert_file = cert_file
340
340
341 def connect(self):
341 def connect(self):
342 self.sock = socket.create_connection((self.host, self.port))
342 self.sock = socket.create_connection((self.host, self.port))
343
343
344 host = self.host
344 host = self.host
345 if self.realhostport: # use CONNECT proxy
345 if self.realhostport: # use CONNECT proxy
346 _generic_proxytunnel(self)
346 _generic_proxytunnel(self)
347 host = self.realhostport.rsplit(':', 1)[0]
347 host = self.realhostport.rsplit(':', 1)[0]
348 self.sock = sslutil.wrapsocket(
348 self.sock = sslutil.wrapsocket(
349 self.sock, self.key_file, self.cert_file, ui=self.ui,
349 self.sock, self.key_file, self.cert_file, ui=self.ui,
350 serverhostname=host)
350 serverhostname=host)
351 sslutil.validatesocket(self.sock)
351 sslutil.validatesocket(self.sock)
352
352
353 class httpshandler(keepalive.KeepAliveHandler, urlreq.httpshandler):
353 class httpshandler(keepalive.KeepAliveHandler, urlreq.httpshandler):
354 def __init__(self, ui):
354 def __init__(self, ui):
355 keepalive.KeepAliveHandler.__init__(self)
355 keepalive.KeepAliveHandler.__init__(self)
356 urlreq.httpshandler.__init__(self)
356 urlreq.httpshandler.__init__(self)
357 self.ui = ui
357 self.ui = ui
358 self.pwmgr = passwordmgr(self.ui,
358 self.pwmgr = passwordmgr(self.ui,
359 self.ui.httppasswordmgrdb)
359 self.ui.httppasswordmgrdb)
360
360
361 def _start_transaction(self, h, req):
361 def _start_transaction(self, h, req):
362 _generic_start_transaction(self, h, req)
362 _generic_start_transaction(self, h, req)
363 return keepalive.KeepAliveHandler._start_transaction(self, h, req)
363 return keepalive.KeepAliveHandler._start_transaction(self, h, req)
364
364
365 def https_open(self, req):
365 def https_open(self, req):
366 # urllibcompat.getfullurl() does not contain credentials
366 # urllibcompat.getfullurl() does not contain credentials
367 # and we may need them to match the certificates.
367 # and we may need them to match the certificates.
368 url = urllibcompat.getfullurl(req)
368 url = urllibcompat.getfullurl(req)
369 user, password = self.pwmgr.find_stored_password(url)
369 user, password = self.pwmgr.find_stored_password(url)
370 res = httpconnectionmod.readauthforuri(self.ui, url, user)
370 res = httpconnectionmod.readauthforuri(self.ui, url, user)
371 if res:
371 if res:
372 group, auth = res
372 group, auth = res
373 self.auth = auth
373 self.auth = auth
374 self.ui.debug("using auth.%s.* for authentication\n" % group)
374 self.ui.debug("using auth.%s.* for authentication\n" % group)
375 else:
375 else:
376 self.auth = None
376 self.auth = None
377 return self.do_open(self._makeconnection, req)
377 return self.do_open(self._makeconnection, req)
378
378
379 def _makeconnection(self, host, port=None, *args, **kwargs):
379 def _makeconnection(self, host, port=None, *args, **kwargs):
380 keyfile = None
380 keyfile = None
381 certfile = None
381 certfile = None
382
382
383 if len(args) >= 1: # key_file
383 if len(args) >= 1: # key_file
384 keyfile = args[0]
384 keyfile = args[0]
385 if len(args) >= 2: # cert_file
385 if len(args) >= 2: # cert_file
386 certfile = args[1]
386 certfile = args[1]
387 args = args[2:]
387 args = args[2:]
388
388
389 # if the user has specified different key/cert files in
389 # if the user has specified different key/cert files in
390 # hgrc, we prefer these
390 # hgrc, we prefer these
391 if self.auth and 'key' in self.auth and 'cert' in self.auth:
391 if self.auth and 'key' in self.auth and 'cert' in self.auth:
392 keyfile = self.auth['key']
392 keyfile = self.auth['key']
393 certfile = self.auth['cert']
393 certfile = self.auth['cert']
394
394
395 conn = httpsconnection(host, port, keyfile, certfile, *args,
395 conn = httpsconnection(host, port, keyfile, certfile, *args,
396 **kwargs)
396 **kwargs)
397 conn.ui = self.ui
397 conn.ui = self.ui
398 return conn
398 return conn
399
399
400 class httpdigestauthhandler(urlreq.httpdigestauthhandler):
400 class httpdigestauthhandler(urlreq.httpdigestauthhandler):
401 def __init__(self, *args, **kwargs):
401 def __init__(self, *args, **kwargs):
402 urlreq.httpdigestauthhandler.__init__(self, *args, **kwargs)
402 urlreq.httpdigestauthhandler.__init__(self, *args, **kwargs)
403 self.retried_req = None
403 self.retried_req = None
404
404
405 def reset_retry_count(self):
405 def reset_retry_count(self):
406 # Python 2.6.5 will call this on 401 or 407 errors and thus loop
406 # Python 2.6.5 will call this on 401 or 407 errors and thus loop
407 # forever. We disable reset_retry_count completely and reset in
407 # forever. We disable reset_retry_count completely and reset in
408 # http_error_auth_reqed instead.
408 # http_error_auth_reqed instead.
409 pass
409 pass
410
410
411 def http_error_auth_reqed(self, auth_header, host, req, headers):
411 def http_error_auth_reqed(self, auth_header, host, req, headers):
412 # Reset the retry counter once for each request.
412 # Reset the retry counter once for each request.
413 if req is not self.retried_req:
413 if req is not self.retried_req:
414 self.retried_req = req
414 self.retried_req = req
415 self.retried = 0
415 self.retried = 0
416 return urlreq.httpdigestauthhandler.http_error_auth_reqed(
416 return urlreq.httpdigestauthhandler.http_error_auth_reqed(
417 self, auth_header, host, req, headers)
417 self, auth_header, host, req, headers)
418
418
419 class httpbasicauthhandler(urlreq.httpbasicauthhandler):
419 class httpbasicauthhandler(urlreq.httpbasicauthhandler):
420 def __init__(self, *args, **kwargs):
420 def __init__(self, *args, **kwargs):
421 self.auth = None
421 self.auth = None
422 urlreq.httpbasicauthhandler.__init__(self, *args, **kwargs)
422 urlreq.httpbasicauthhandler.__init__(self, *args, **kwargs)
423 self.retried_req = None
423 self.retried_req = None
424
424
425 def http_request(self, request):
425 def http_request(self, request):
426 if self.auth:
426 if self.auth:
427 request.add_unredirected_header(self.auth_header, self.auth)
427 request.add_unredirected_header(self.auth_header, self.auth)
428
428
429 return request
429 return request
430
430
431 def https_request(self, request):
431 def https_request(self, request):
432 if self.auth:
432 if self.auth:
433 request.add_unredirected_header(self.auth_header, self.auth)
433 request.add_unredirected_header(self.auth_header, self.auth)
434
434
435 return request
435 return request
436
436
437 def reset_retry_count(self):
437 def reset_retry_count(self):
438 # Python 2.6.5 will call this on 401 or 407 errors and thus loop
438 # Python 2.6.5 will call this on 401 or 407 errors and thus loop
439 # forever. We disable reset_retry_count completely and reset in
439 # forever. We disable reset_retry_count completely and reset in
440 # http_error_auth_reqed instead.
440 # http_error_auth_reqed instead.
441 pass
441 pass
442
442
443 def http_error_auth_reqed(self, auth_header, host, req, headers):
443 def http_error_auth_reqed(self, auth_header, host, req, headers):
444 # Reset the retry counter once for each request.
444 # Reset the retry counter once for each request.
445 if req is not self.retried_req:
445 if req is not self.retried_req:
446 self.retried_req = req
446 self.retried_req = req
447 self.retried = 0
447 self.retried = 0
448 return urlreq.httpbasicauthhandler.http_error_auth_reqed(
448 return urlreq.httpbasicauthhandler.http_error_auth_reqed(
449 self, auth_header, host, req, headers)
449 self, auth_header, host, req, headers)
450
450
451 def retry_http_basic_auth(self, host, req, realm):
451 def retry_http_basic_auth(self, host, req, realm):
452 user, pw = self.passwd.find_user_password(
452 user, pw = self.passwd.find_user_password(
453 realm, urllibcompat.getfullurl(req))
453 realm, urllibcompat.getfullurl(req))
454 if pw is not None:
454 if pw is not None:
455 raw = "%s:%s" % (pycompat.bytesurl(user), pycompat.bytesurl(pw))
455 raw = "%s:%s" % (pycompat.bytesurl(user), pycompat.bytesurl(pw))
456 auth = r'Basic %s' % pycompat.strurl(base64.b64encode(raw).strip())
456 auth = r'Basic %s' % pycompat.strurl(base64.b64encode(raw).strip())
457 if req.get_header(self.auth_header, None) == auth:
457 if req.get_header(self.auth_header, None) == auth:
458 return None
458 return None
459 self.auth = auth
459 self.auth = auth
460 req.add_unredirected_header(self.auth_header, auth)
460 req.add_unredirected_header(self.auth_header, auth)
461 return self.parent.open(req)
461 return self.parent.open(req)
462 else:
462 else:
463 return None
463 return None
464
464
465 class cookiehandler(urlreq.basehandler):
465 class cookiehandler(urlreq.basehandler):
466 def __init__(self, ui):
466 def __init__(self, ui):
467 self.cookiejar = None
467 self.cookiejar = None
468
468
469 cookiefile = ui.config('auth', 'cookiefile')
469 cookiefile = ui.config('auth', 'cookiefile')
470 if not cookiefile:
470 if not cookiefile:
471 return
471 return
472
472
473 cookiefile = util.expandpath(cookiefile)
473 cookiefile = util.expandpath(cookiefile)
474 try:
474 try:
475 cookiejar = util.cookielib.MozillaCookieJar(cookiefile)
475 cookiejar = util.cookielib.MozillaCookieJar(cookiefile)
476 cookiejar.load()
476 cookiejar.load()
477 self.cookiejar = cookiejar
477 self.cookiejar = cookiejar
478 except util.cookielib.LoadError as e:
478 except util.cookielib.LoadError as e:
479 ui.warn(_('(error loading cookie file %s: %s; continuing without '
479 ui.warn(_('(error loading cookie file %s: %s; continuing without '
480 'cookies)\n') % (cookiefile, util.forcebytestr(e)))
480 'cookies)\n') % (cookiefile, util.forcebytestr(e)))
481
481
482 def http_request(self, request):
482 def http_request(self, request):
483 if self.cookiejar:
483 if self.cookiejar:
484 self.cookiejar.add_cookie_header(request)
484 self.cookiejar.add_cookie_header(request)
485
485
486 return request
486 return request
487
487
488 def https_request(self, request):
488 def https_request(self, request):
489 if self.cookiejar:
489 if self.cookiejar:
490 self.cookiejar.add_cookie_header(request)
490 self.cookiejar.add_cookie_header(request)
491
491
492 return request
492 return request
493
493
494 handlerfuncs = []
494 handlerfuncs = []
495
495
496 def opener(ui, authinfo=None, useragent=None, loggingfh=None,
496 def opener(ui, authinfo=None, useragent=None, loggingfh=None,
497 loggingname=b's', loggingopts=None):
497 loggingname=b's', loggingopts=None, sendaccept=True):
498 '''
498 '''
499 construct an opener suitable for urllib2
499 construct an opener suitable for urllib2
500 authinfo will be added to the password manager
500 authinfo will be added to the password manager
501
501
502 The opener can be configured to log socket events if the various
502 The opener can be configured to log socket events if the various
503 ``logging*`` arguments are specified.
503 ``logging*`` arguments are specified.
504
504
505 ``loggingfh`` denotes a file object to log events to.
505 ``loggingfh`` denotes a file object to log events to.
506 ``loggingname`` denotes the name of the to print when logging.
506 ``loggingname`` denotes the name of the to print when logging.
507 ``loggingopts`` is a dict of keyword arguments to pass to the constructed
507 ``loggingopts`` is a dict of keyword arguments to pass to the constructed
508 ``util.socketobserver`` instance.
508 ``util.socketobserver`` instance.
509
510 ``sendaccept`` allows controlling whether the ``Accept`` request header
511 is sent. The header is sent by default.
509 '''
512 '''
510 handlers = []
513 handlers = []
511
514
512 if loggingfh:
515 if loggingfh:
513 handlers.append(logginghttphandler(loggingfh, loggingname,
516 handlers.append(logginghttphandler(loggingfh, loggingname,
514 loggingopts or {}))
517 loggingopts or {}))
515 # We don't yet support HTTPS when logging I/O. If we attempt to open
518 # We don't yet support HTTPS when logging I/O. If we attempt to open
516 # an HTTPS URL, we'll likely fail due to unknown protocol.
519 # an HTTPS URL, we'll likely fail due to unknown protocol.
517
520
518 else:
521 else:
519 handlers.append(httphandler())
522 handlers.append(httphandler())
520 if has_https:
523 if has_https:
521 handlers.append(httpshandler(ui))
524 handlers.append(httpshandler(ui))
522
525
523 handlers.append(proxyhandler(ui))
526 handlers.append(proxyhandler(ui))
524
527
525 passmgr = passwordmgr(ui, ui.httppasswordmgrdb)
528 passmgr = passwordmgr(ui, ui.httppasswordmgrdb)
526 if authinfo is not None:
529 if authinfo is not None:
527 realm, uris, user, passwd = authinfo
530 realm, uris, user, passwd = authinfo
528 saveduser, savedpass = passmgr.find_stored_password(uris[0])
531 saveduser, savedpass = passmgr.find_stored_password(uris[0])
529 if user != saveduser or passwd:
532 if user != saveduser or passwd:
530 passmgr.add_password(realm, uris, user, passwd)
533 passmgr.add_password(realm, uris, user, passwd)
531 ui.debug('http auth: user %s, password %s\n' %
534 ui.debug('http auth: user %s, password %s\n' %
532 (user, passwd and '*' * len(passwd) or 'not set'))
535 (user, passwd and '*' * len(passwd) or 'not set'))
533
536
534 handlers.extend((httpbasicauthhandler(passmgr),
537 handlers.extend((httpbasicauthhandler(passmgr),
535 httpdigestauthhandler(passmgr)))
538 httpdigestauthhandler(passmgr)))
536 handlers.extend([h(ui, passmgr) for h in handlerfuncs])
539 handlers.extend([h(ui, passmgr) for h in handlerfuncs])
537 handlers.append(cookiehandler(ui))
540 handlers.append(cookiehandler(ui))
538 opener = urlreq.buildopener(*handlers)
541 opener = urlreq.buildopener(*handlers)
539
542
540 # The user agent should should *NOT* be used by servers for e.g.
543 # The user agent should should *NOT* be used by servers for e.g.
541 # protocol detection or feature negotiation: there are other
544 # protocol detection or feature negotiation: there are other
542 # facilities for that.
545 # facilities for that.
543 #
546 #
544 # "mercurial/proto-1.0" was the original user agent string and
547 # "mercurial/proto-1.0" was the original user agent string and
545 # exists for backwards compatibility reasons.
548 # exists for backwards compatibility reasons.
546 #
549 #
547 # The "(Mercurial %s)" string contains the distribution
550 # The "(Mercurial %s)" string contains the distribution
548 # name and version. Other client implementations should choose their
551 # name and version. Other client implementations should choose their
549 # own distribution name. Since servers should not be using the user
552 # own distribution name. Since servers should not be using the user
550 # agent string for anything, clients should be able to define whatever
553 # agent string for anything, clients should be able to define whatever
551 # user agent they deem appropriate.
554 # user agent they deem appropriate.
552 #
555 #
553 # The custom user agent is for lfs, because unfortunately some servers
556 # The custom user agent is for lfs, because unfortunately some servers
554 # do look at this value.
557 # do look at this value.
555 if not useragent:
558 if not useragent:
556 agent = 'mercurial/proto-1.0 (Mercurial %s)' % util.version()
559 agent = 'mercurial/proto-1.0 (Mercurial %s)' % util.version()
557 opener.addheaders = [(r'User-agent', pycompat.sysstr(agent))]
560 opener.addheaders = [(r'User-agent', pycompat.sysstr(agent))]
558 else:
561 else:
559 opener.addheaders = [(r'User-agent', pycompat.sysstr(useragent))]
562 opener.addheaders = [(r'User-agent', pycompat.sysstr(useragent))]
560
563
561 # This header should only be needed by wire protocol requests. But it has
564 # This header should only be needed by wire protocol requests. But it has
562 # been sent on all requests since forever. We keep sending it for backwards
565 # been sent on all requests since forever. We keep sending it for backwards
563 # compatibility reasons. Modern versions of the wire protocol use
566 # compatibility reasons. Modern versions of the wire protocol use
564 # X-HgProto-<N> for advertising client support.
567 # X-HgProto-<N> for advertising client support.
565 opener.addheaders.append((r'Accept', r'application/mercurial-0.1'))
568 if sendaccept:
569 opener.addheaders.append((r'Accept', r'application/mercurial-0.1'))
570
566 return opener
571 return opener
567
572
568 def open(ui, url_, data=None):
573 def open(ui, url_, data=None):
569 u = util.url(url_)
574 u = util.url(url_)
570 if u.scheme:
575 if u.scheme:
571 u.scheme = u.scheme.lower()
576 u.scheme = u.scheme.lower()
572 url_, authinfo = u.authinfo()
577 url_, authinfo = u.authinfo()
573 else:
578 else:
574 path = util.normpath(os.path.abspath(url_))
579 path = util.normpath(os.path.abspath(url_))
575 url_ = 'file://' + urlreq.pathname2url(path)
580 url_ = 'file://' + urlreq.pathname2url(path)
576 authinfo = None
581 authinfo = None
577 return opener(ui, authinfo).open(pycompat.strurl(url_), data)
582 return opener(ui, authinfo).open(pycompat.strurl(url_), data)
@@ -1,241 +1,239 b''
1 $ cat >> $HGRCPATH << EOF
1 $ cat >> $HGRCPATH << EOF
2 > [web]
2 > [web]
3 > push_ssl = false
3 > push_ssl = false
4 > allow_push = *
4 > allow_push = *
5 > EOF
5 > EOF
6
6
7 $ hg init server
7 $ hg init server
8 $ cd server
8 $ cd server
9 $ touch a
9 $ touch a
10 $ hg -q commit -A -m initial
10 $ hg -q commit -A -m initial
11 $ cd ..
11 $ cd ..
12
12
13 $ hg serve -R server -p $HGPORT -d --pid-file hg.pid
13 $ hg serve -R server -p $HGPORT -d --pid-file hg.pid
14 $ cat hg.pid >> $DAEMON_PIDS
14 $ cat hg.pid >> $DAEMON_PIDS
15
15
16 compression formats are advertised in compression capability
16 compression formats are advertised in compression capability
17
17
18 #if zstd
18 #if zstd
19 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=capabilities' | tr ' ' '\n' | grep '^compression=zstd,zlib$' > /dev/null
19 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=capabilities' | tr ' ' '\n' | grep '^compression=zstd,zlib$' > /dev/null
20 #else
20 #else
21 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=capabilities' | tr ' ' '\n' | grep '^compression=zlib$' > /dev/null
21 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=capabilities' | tr ' ' '\n' | grep '^compression=zlib$' > /dev/null
22 #endif
22 #endif
23
23
24 $ killdaemons.py
24 $ killdaemons.py
25
25
26 server.compressionengines can replace engines list wholesale
26 server.compressionengines can replace engines list wholesale
27
27
28 $ hg serve --config server.compressionengines=none -R server -p $HGPORT -d --pid-file hg.pid
28 $ hg serve --config server.compressionengines=none -R server -p $HGPORT -d --pid-file hg.pid
29 $ cat hg.pid > $DAEMON_PIDS
29 $ cat hg.pid > $DAEMON_PIDS
30 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=capabilities' | tr ' ' '\n' | grep '^compression=none$' > /dev/null
30 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=capabilities' | tr ' ' '\n' | grep '^compression=none$' > /dev/null
31
31
32 $ killdaemons.py
32 $ killdaemons.py
33
33
34 Order of engines can also change
34 Order of engines can also change
35
35
36 $ hg serve --config server.compressionengines=none,zlib -R server -p $HGPORT -d --pid-file hg.pid
36 $ hg serve --config server.compressionengines=none,zlib -R server -p $HGPORT -d --pid-file hg.pid
37 $ cat hg.pid > $DAEMON_PIDS
37 $ cat hg.pid > $DAEMON_PIDS
38 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=capabilities' | tr ' ' '\n' | grep '^compression=none,zlib$' > /dev/null
38 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=capabilities' | tr ' ' '\n' | grep '^compression=none,zlib$' > /dev/null
39
39
40 $ killdaemons.py
40 $ killdaemons.py
41
41
42 Start a default server again
42 Start a default server again
43
43
44 $ hg serve -R server -p $HGPORT -d --pid-file hg.pid
44 $ hg serve -R server -p $HGPORT -d --pid-file hg.pid
45 $ cat hg.pid > $DAEMON_PIDS
45 $ cat hg.pid > $DAEMON_PIDS
46
46
47 Server should send application/mercurial-0.1 to clients if no Accept is used
47 Server should send application/mercurial-0.1 to clients if no Accept is used
48
48
49 $ get-with-headers.py --headeronly $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' -
49 $ get-with-headers.py --headeronly $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' -
50 200 Script output follows
50 200 Script output follows
51 content-type: application/mercurial-0.1
51 content-type: application/mercurial-0.1
52 date: $HTTP_DATE$
52 date: $HTTP_DATE$
53 server: testing stub value
53 server: testing stub value
54 transfer-encoding: chunked
54 transfer-encoding: chunked
55
55
56 Server should send application/mercurial-0.1 when client says it wants it
56 Server should send application/mercurial-0.1 when client says it wants it
57
57
58 $ get-with-headers.py --hgproto '0.1' --headeronly $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' -
58 $ get-with-headers.py --hgproto '0.1' --headeronly $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' -
59 200 Script output follows
59 200 Script output follows
60 content-type: application/mercurial-0.1
60 content-type: application/mercurial-0.1
61 date: $HTTP_DATE$
61 date: $HTTP_DATE$
62 server: testing stub value
62 server: testing stub value
63 transfer-encoding: chunked
63 transfer-encoding: chunked
64
64
65 Server should send application/mercurial-0.2 when client says it wants it
65 Server should send application/mercurial-0.2 when client says it wants it
66
66
67 $ get-with-headers.py --hgproto '0.2' --headeronly $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' -
67 $ get-with-headers.py --hgproto '0.2' --headeronly $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' -
68 200 Script output follows
68 200 Script output follows
69 content-type: application/mercurial-0.2
69 content-type: application/mercurial-0.2
70 date: $HTTP_DATE$
70 date: $HTTP_DATE$
71 server: testing stub value
71 server: testing stub value
72 transfer-encoding: chunked
72 transfer-encoding: chunked
73
73
74 $ get-with-headers.py --hgproto '0.1 0.2' --headeronly $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' -
74 $ get-with-headers.py --hgproto '0.1 0.2' --headeronly $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' -
75 200 Script output follows
75 200 Script output follows
76 content-type: application/mercurial-0.2
76 content-type: application/mercurial-0.2
77 date: $HTTP_DATE$
77 date: $HTTP_DATE$
78 server: testing stub value
78 server: testing stub value
79 transfer-encoding: chunked
79 transfer-encoding: chunked
80
80
81 Requesting a compression format that server doesn't support results will fall back to 0.1
81 Requesting a compression format that server doesn't support results will fall back to 0.1
82
82
83 $ get-with-headers.py --hgproto '0.2 comp=aa' --headeronly $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' -
83 $ get-with-headers.py --hgproto '0.2 comp=aa' --headeronly $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' -
84 200 Script output follows
84 200 Script output follows
85 content-type: application/mercurial-0.1
85 content-type: application/mercurial-0.1
86 date: $HTTP_DATE$
86 date: $HTTP_DATE$
87 server: testing stub value
87 server: testing stub value
88 transfer-encoding: chunked
88 transfer-encoding: chunked
89
89
90 #if zstd
90 #if zstd
91 zstd is used if available
91 zstd is used if available
92
92
93 $ get-with-headers.py --hgproto '0.2 comp=zstd' $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' > resp
93 $ get-with-headers.py --hgproto '0.2 comp=zstd' $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' > resp
94 $ f --size --hexdump --bytes 36 --sha1 resp
94 $ f --size --hexdump --bytes 36 --sha1 resp
95 resp: size=248, sha1=4d8d8f87fb82bd542ce52881fdc94f850748
95 resp: size=248, sha1=4d8d8f87fb82bd542ce52881fdc94f850748
96 0000: 32 30 30 20 53 63 72 69 70 74 20 6f 75 74 70 75 |200 Script outpu|
96 0000: 32 30 30 20 53 63 72 69 70 74 20 6f 75 74 70 75 |200 Script outpu|
97 0010: 74 20 66 6f 6c 6c 6f 77 73 0a 0a 04 7a 73 74 64 |t follows...zstd|
97 0010: 74 20 66 6f 6c 6c 6f 77 73 0a 0a 04 7a 73 74 64 |t follows...zstd|
98 0020: 28 b5 2f fd |(./.|
98 0020: 28 b5 2f fd |(./.|
99
99
100 #endif
100 #endif
101
101
102 application/mercurial-0.2 is not yet used on non-streaming responses
102 application/mercurial-0.2 is not yet used on non-streaming responses
103
103
104 $ get-with-headers.py --hgproto '0.2' $LOCALIP:$HGPORT '?cmd=heads' -
104 $ get-with-headers.py --hgproto '0.2' $LOCALIP:$HGPORT '?cmd=heads' -
105 200 Script output follows
105 200 Script output follows
106 content-length: 41
106 content-length: 41
107 content-type: application/mercurial-0.1
107 content-type: application/mercurial-0.1
108 date: $HTTP_DATE$
108 date: $HTTP_DATE$
109 server: testing stub value
109 server: testing stub value
110
110
111 e93700bd72895c5addab234c56d4024b487a362f
111 e93700bd72895c5addab234c56d4024b487a362f
112
112
113 Now test protocol preference usage
113 Now test protocol preference usage
114
114
115 $ killdaemons.py
115 $ killdaemons.py
116 $ hg serve --config server.compressionengines=none,zlib -R server -p $HGPORT -d --pid-file hg.pid
116 $ hg serve --config server.compressionengines=none,zlib -R server -p $HGPORT -d --pid-file hg.pid
117 $ cat hg.pid > $DAEMON_PIDS
117 $ cat hg.pid > $DAEMON_PIDS
118
118
119 No Accept will send 0.1+zlib, even though "none" is preferred b/c "none" isn't supported on 0.1
119 No Accept will send 0.1+zlib, even though "none" is preferred b/c "none" isn't supported on 0.1
120
120
121 $ get-with-headers.py --headeronly $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' Content-Type
121 $ get-with-headers.py --headeronly $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' Content-Type
122 200 Script output follows
122 200 Script output follows
123 content-type: application/mercurial-0.1
123 content-type: application/mercurial-0.1
124
124
125 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' > resp
125 $ get-with-headers.py $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' > resp
126 $ f --size --hexdump --bytes 28 --sha1 resp
126 $ f --size --hexdump --bytes 28 --sha1 resp
127 resp: size=227, sha1=35a4c074da74f32f5440da3cbf04
127 resp: size=227, sha1=35a4c074da74f32f5440da3cbf04
128 0000: 32 30 30 20 53 63 72 69 70 74 20 6f 75 74 70 75 |200 Script outpu|
128 0000: 32 30 30 20 53 63 72 69 70 74 20 6f 75 74 70 75 |200 Script outpu|
129 0010: 74 20 66 6f 6c 6c 6f 77 73 0a 0a 78 |t follows..x|
129 0010: 74 20 66 6f 6c 6c 6f 77 73 0a 0a 78 |t follows..x|
130
130
131 Explicit 0.1 will send zlib because "none" isn't supported on 0.1
131 Explicit 0.1 will send zlib because "none" isn't supported on 0.1
132
132
133 $ get-with-headers.py --hgproto '0.1' $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' > resp
133 $ get-with-headers.py --hgproto '0.1' $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' > resp
134 $ f --size --hexdump --bytes 28 --sha1 resp
134 $ f --size --hexdump --bytes 28 --sha1 resp
135 resp: size=227, sha1=35a4c074da74f32f5440da3cbf04
135 resp: size=227, sha1=35a4c074da74f32f5440da3cbf04
136 0000: 32 30 30 20 53 63 72 69 70 74 20 6f 75 74 70 75 |200 Script outpu|
136 0000: 32 30 30 20 53 63 72 69 70 74 20 6f 75 74 70 75 |200 Script outpu|
137 0010: 74 20 66 6f 6c 6c 6f 77 73 0a 0a 78 |t follows..x|
137 0010: 74 20 66 6f 6c 6c 6f 77 73 0a 0a 78 |t follows..x|
138
138
139 0.2 with no compression will get "none" because that is server's preference
139 0.2 with no compression will get "none" because that is server's preference
140 (spec says ZL and UN are implicitly supported)
140 (spec says ZL and UN are implicitly supported)
141
141
142 $ get-with-headers.py --hgproto '0.2' $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' > resp
142 $ get-with-headers.py --hgproto '0.2' $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' > resp
143 $ f --size --hexdump --bytes 32 --sha1 resp
143 $ f --size --hexdump --bytes 32 --sha1 resp
144 resp: size=432, sha1=ac931b412ec185a02e0e5bcff98dac83
144 resp: size=432, sha1=ac931b412ec185a02e0e5bcff98dac83
145 0000: 32 30 30 20 53 63 72 69 70 74 20 6f 75 74 70 75 |200 Script outpu|
145 0000: 32 30 30 20 53 63 72 69 70 74 20 6f 75 74 70 75 |200 Script outpu|
146 0010: 74 20 66 6f 6c 6c 6f 77 73 0a 0a 04 6e 6f 6e 65 |t follows...none|
146 0010: 74 20 66 6f 6c 6c 6f 77 73 0a 0a 04 6e 6f 6e 65 |t follows...none|
147
147
148 Client receives server preference even if local order doesn't match
148 Client receives server preference even if local order doesn't match
149
149
150 $ get-with-headers.py --hgproto '0.2 comp=zlib,none' $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' > resp
150 $ get-with-headers.py --hgproto '0.2 comp=zlib,none' $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' > resp
151 $ f --size --hexdump --bytes 32 --sha1 resp
151 $ f --size --hexdump --bytes 32 --sha1 resp
152 resp: size=432, sha1=ac931b412ec185a02e0e5bcff98dac83
152 resp: size=432, sha1=ac931b412ec185a02e0e5bcff98dac83
153 0000: 32 30 30 20 53 63 72 69 70 74 20 6f 75 74 70 75 |200 Script outpu|
153 0000: 32 30 30 20 53 63 72 69 70 74 20 6f 75 74 70 75 |200 Script outpu|
154 0010: 74 20 66 6f 6c 6c 6f 77 73 0a 0a 04 6e 6f 6e 65 |t follows...none|
154 0010: 74 20 66 6f 6c 6c 6f 77 73 0a 0a 04 6e 6f 6e 65 |t follows...none|
155
155
156 Client receives only supported format even if not server preferred format
156 Client receives only supported format even if not server preferred format
157
157
158 $ get-with-headers.py --hgproto '0.2 comp=zlib' $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' > resp
158 $ get-with-headers.py --hgproto '0.2 comp=zlib' $LOCALIP:$HGPORT '?cmd=getbundle&heads=e93700bd72895c5addab234c56d4024b487a362f&common=0000000000000000000000000000000000000000' > resp
159 $ f --size --hexdump --bytes 33 --sha1 resp
159 $ f --size --hexdump --bytes 33 --sha1 resp
160 resp: size=232, sha1=a1c727f0c9693ca15742a75c30419bc36
160 resp: size=232, sha1=a1c727f0c9693ca15742a75c30419bc36
161 0000: 32 30 30 20 53 63 72 69 70 74 20 6f 75 74 70 75 |200 Script outpu|
161 0000: 32 30 30 20 53 63 72 69 70 74 20 6f 75 74 70 75 |200 Script outpu|
162 0010: 74 20 66 6f 6c 6c 6f 77 73 0a 0a 04 7a 6c 69 62 |t follows...zlib|
162 0010: 74 20 66 6f 6c 6c 6f 77 73 0a 0a 04 7a 6c 69 62 |t follows...zlib|
163 0020: 78 |x|
163 0020: 78 |x|
164
164
165 $ killdaemons.py
165 $ killdaemons.py
166 $ cd ..
166 $ cd ..
167
167
168 Test listkeys for listing namespaces
168 Test listkeys for listing namespaces
169
169
170 $ hg init empty
170 $ hg init empty
171 $ hg -R empty serve -p $HGPORT -d --pid-file hg.pid
171 $ hg -R empty serve -p $HGPORT -d --pid-file hg.pid
172 $ cat hg.pid > $DAEMON_PIDS
172 $ cat hg.pid > $DAEMON_PIDS
173
173
174 $ hg --verbose debugwireproto http://$LOCALIP:$HGPORT << EOF
174 $ hg --verbose debugwireproto http://$LOCALIP:$HGPORT << EOF
175 > command listkeys
175 > command listkeys
176 > namespace namespaces
176 > namespace namespaces
177 > EOF
177 > EOF
178 s> GET /?cmd=capabilities HTTP/1.1\r\n
178 s> GET /?cmd=capabilities HTTP/1.1\r\n
179 s> Accept-Encoding: identity\r\n
179 s> Accept-Encoding: identity\r\n
180 s> accept: application/mercurial-0.1\r\n
180 s> accept: application/mercurial-0.1\r\n
181 s> host: $LOCALIP:$HGPORT\r\n (glob)
181 s> host: $LOCALIP:$HGPORT\r\n (glob)
182 s> user-agent: mercurial/proto-1.0 (Mercurial *)\r\n (glob)
182 s> user-agent: mercurial/proto-1.0 (Mercurial *)\r\n (glob)
183 s> \r\n
183 s> \r\n
184 s> makefile('rb', None)
184 s> makefile('rb', None)
185 s> HTTP/1.1 200 Script output follows\r\n
185 s> HTTP/1.1 200 Script output follows\r\n
186 s> Server: testing stub value\r\n
186 s> Server: testing stub value\r\n
187 s> Date: $HTTP_DATE$\r\n
187 s> Date: $HTTP_DATE$\r\n
188 s> Content-Type: application/mercurial-0.1\r\n
188 s> Content-Type: application/mercurial-0.1\r\n
189 s> Content-Length: *\r\n (glob)
189 s> Content-Length: *\r\n (glob)
190 s> \r\n
190 s> \r\n
191 s> lookup branchmap pushkey known getbundle unbundlehash batch changegroupsubset streamreqs=generaldelta,revlogv1 $USUAL_BUNDLE2_CAPS_SERVER$ unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=$BUNDLE2_COMPRESSIONS$
191 s> lookup branchmap pushkey known getbundle unbundlehash batch changegroupsubset streamreqs=generaldelta,revlogv1 $USUAL_BUNDLE2_CAPS_SERVER$ unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 httpmediatype=0.1rx,0.1tx,0.2tx compression=$BUNDLE2_COMPRESSIONS$
192 sending listkeys command
192 sending listkeys command
193 s> GET /?cmd=listkeys HTTP/1.1\r\n
193 s> GET /?cmd=listkeys HTTP/1.1\r\n
194 s> Accept-Encoding: identity\r\n
194 s> Accept-Encoding: identity\r\n
195 s> vary: X-HgArg-1,X-HgProto-1\r\n
195 s> vary: X-HgArg-1,X-HgProto-1\r\n
196 s> x-hgarg-1: namespace=namespaces\r\n
196 s> x-hgarg-1: namespace=namespaces\r\n
197 s> x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$\r\n
197 s> x-hgproto-1: 0.1 0.2 comp=$USUAL_COMPRESSIONS$\r\n
198 s> accept: application/mercurial-0.1\r\n
198 s> accept: application/mercurial-0.1\r\n
199 s> host: $LOCALIP:$HGPORT\r\n (glob)
199 s> host: $LOCALIP:$HGPORT\r\n (glob)
200 s> user-agent: mercurial/proto-1.0 (Mercurial *)\r\n (glob)
200 s> user-agent: mercurial/proto-1.0 (Mercurial *)\r\n (glob)
201 s> \r\n
201 s> \r\n
202 s> makefile('rb', None)
202 s> makefile('rb', None)
203 s> HTTP/1.1 200 Script output follows\r\n
203 s> HTTP/1.1 200 Script output follows\r\n
204 s> Server: testing stub value\r\n
204 s> Server: testing stub value\r\n
205 s> Date: $HTTP_DATE$\r\n
205 s> Date: $HTTP_DATE$\r\n
206 s> Content-Type: application/mercurial-0.1\r\n
206 s> Content-Type: application/mercurial-0.1\r\n
207 s> Content-Length: 30\r\n
207 s> Content-Length: 30\r\n
208 s> \r\n
208 s> \r\n
209 s> bookmarks \n
209 s> bookmarks \n
210 s> namespaces \n
210 s> namespaces \n
211 s> phases
211 s> phases
212 response: bookmarks \nnamespaces \nphases
212 response: bookmarks \nnamespaces \nphases
213
213
214 Same thing, but with "httprequest" command
214 Same thing, but with "httprequest" command
215
215
216 $ hg --verbose debugwireproto --peer raw http://$LOCALIP:$HGPORT << EOF
216 $ hg --verbose debugwireproto --peer raw http://$LOCALIP:$HGPORT << EOF
217 > httprequest GET ?cmd=listkeys
217 > httprequest GET ?cmd=listkeys
218 > accept: application/mercurial-0.1
218 > user-agent: test
219 > user-agent: mercurial/proto-1.0 (Mercurial 42)
220 > x-hgarg-1: namespace=namespaces
219 > x-hgarg-1: namespace=namespaces
221 > EOF
220 > EOF
222 using raw connection to peer
221 using raw connection to peer
223 s> GET /?cmd=listkeys HTTP/1.1\r\n
222 s> GET /?cmd=listkeys HTTP/1.1\r\n
224 s> Accept-Encoding: identity\r\n
223 s> Accept-Encoding: identity\r\n
225 s> accept: application/mercurial-0.1\r\n
224 s> user-agent: test\r\n
226 s> user-agent: mercurial/proto-1.0 (Mercurial 42)\r\n
227 s> x-hgarg-1: namespace=namespaces\r\n
225 s> x-hgarg-1: namespace=namespaces\r\n
228 s> host: $LOCALIP:$HGPORT\r\n (glob)
226 s> host: $LOCALIP:$HGPORT\r\n (glob)
229 s> \r\n
227 s> \r\n
230 s> makefile('rb', None)
228 s> makefile('rb', None)
231 s> HTTP/1.1 200 Script output follows\r\n
229 s> HTTP/1.1 200 Script output follows\r\n
232 s> Server: testing stub value\r\n
230 s> Server: testing stub value\r\n
233 s> Date: $HTTP_DATE$\r\n
231 s> Date: $HTTP_DATE$\r\n
234 s> Content-Type: application/mercurial-0.1\r\n
232 s> Content-Type: application/mercurial-0.1\r\n
235 s> Content-Length: 30\r\n
233 s> Content-Length: 30\r\n
236 s> \r\n
234 s> \r\n
237 s> bookmarks \n
235 s> bookmarks \n
238 s> namespaces \n
236 s> namespaces \n
239 s> phases
237 s> phases
240
238
241 $ killdaemons.py
239 $ killdaemons.py
General Comments 0
You need to be logged in to leave comments. Login now