##// END OF EJS Templates
wireproto: syntax for encoding CBOR into frames...
Gregory Szorc -
r37306:cc5a040f default
parent child Browse files
Show More
@@ -1,3076 +1,3079 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import tempfile
24 import tempfile
25 import time
25 import time
26
26
27 from .i18n import _
27 from .i18n import _
28 from .node import (
28 from .node import (
29 bin,
29 bin,
30 hex,
30 hex,
31 nullhex,
31 nullhex,
32 nullid,
32 nullid,
33 nullrev,
33 nullrev,
34 short,
34 short,
35 )
35 )
36 from . import (
36 from . import (
37 bundle2,
37 bundle2,
38 changegroup,
38 changegroup,
39 cmdutil,
39 cmdutil,
40 color,
40 color,
41 context,
41 context,
42 dagparser,
42 dagparser,
43 dagutil,
43 dagutil,
44 encoding,
44 encoding,
45 error,
45 error,
46 exchange,
46 exchange,
47 extensions,
47 extensions,
48 filemerge,
48 filemerge,
49 fileset,
49 fileset,
50 formatter,
50 formatter,
51 hg,
51 hg,
52 httppeer,
52 httppeer,
53 localrepo,
53 localrepo,
54 lock as lockmod,
54 lock as lockmod,
55 logcmdutil,
55 logcmdutil,
56 merge as mergemod,
56 merge as mergemod,
57 obsolete,
57 obsolete,
58 obsutil,
58 obsutil,
59 phases,
59 phases,
60 policy,
60 policy,
61 pvec,
61 pvec,
62 pycompat,
62 pycompat,
63 registrar,
63 registrar,
64 repair,
64 repair,
65 revlog,
65 revlog,
66 revset,
66 revset,
67 revsetlang,
67 revsetlang,
68 scmutil,
68 scmutil,
69 setdiscovery,
69 setdiscovery,
70 simplemerge,
70 simplemerge,
71 smartset,
71 smartset,
72 sshpeer,
72 sshpeer,
73 sslutil,
73 sslutil,
74 streamclone,
74 streamclone,
75 templater,
75 templater,
76 treediscovery,
76 treediscovery,
77 upgrade,
77 upgrade,
78 url as urlmod,
78 url as urlmod,
79 util,
79 util,
80 vfs as vfsmod,
80 vfs as vfsmod,
81 wireprotoframing,
81 wireprotoframing,
82 wireprotoserver,
82 wireprotoserver,
83 )
83 )
84 from .utils import (
84 from .utils import (
85 dateutil,
85 dateutil,
86 procutil,
86 procutil,
87 stringutil,
87 stringutil,
88 )
88 )
89
89
90 release = lockmod.release
90 release = lockmod.release
91
91
92 command = registrar.command()
92 command = registrar.command()
93
93
94 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
94 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
95 def debugancestor(ui, repo, *args):
95 def debugancestor(ui, repo, *args):
96 """find the ancestor revision of two revisions in a given index"""
96 """find the ancestor revision of two revisions in a given index"""
97 if len(args) == 3:
97 if len(args) == 3:
98 index, rev1, rev2 = args
98 index, rev1, rev2 = args
99 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
99 r = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False), index)
100 lookup = r.lookup
100 lookup = r.lookup
101 elif len(args) == 2:
101 elif len(args) == 2:
102 if not repo:
102 if not repo:
103 raise error.Abort(_('there is no Mercurial repository here '
103 raise error.Abort(_('there is no Mercurial repository here '
104 '(.hg not found)'))
104 '(.hg not found)'))
105 rev1, rev2 = args
105 rev1, rev2 = args
106 r = repo.changelog
106 r = repo.changelog
107 lookup = repo.lookup
107 lookup = repo.lookup
108 else:
108 else:
109 raise error.Abort(_('either two or three arguments required'))
109 raise error.Abort(_('either two or three arguments required'))
110 a = r.ancestor(lookup(rev1), lookup(rev2))
110 a = r.ancestor(lookup(rev1), lookup(rev2))
111 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
111 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
112
112
113 @command('debugapplystreamclonebundle', [], 'FILE')
113 @command('debugapplystreamclonebundle', [], 'FILE')
114 def debugapplystreamclonebundle(ui, repo, fname):
114 def debugapplystreamclonebundle(ui, repo, fname):
115 """apply a stream clone bundle file"""
115 """apply a stream clone bundle file"""
116 f = hg.openpath(ui, fname)
116 f = hg.openpath(ui, fname)
117 gen = exchange.readbundle(ui, f, fname)
117 gen = exchange.readbundle(ui, f, fname)
118 gen.apply(repo)
118 gen.apply(repo)
119
119
120 @command('debugbuilddag',
120 @command('debugbuilddag',
121 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
121 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
122 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
122 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
123 ('n', 'new-file', None, _('add new file at each rev'))],
123 ('n', 'new-file', None, _('add new file at each rev'))],
124 _('[OPTION]... [TEXT]'))
124 _('[OPTION]... [TEXT]'))
125 def debugbuilddag(ui, repo, text=None,
125 def debugbuilddag(ui, repo, text=None,
126 mergeable_file=False,
126 mergeable_file=False,
127 overwritten_file=False,
127 overwritten_file=False,
128 new_file=False):
128 new_file=False):
129 """builds a repo with a given DAG from scratch in the current empty repo
129 """builds a repo with a given DAG from scratch in the current empty repo
130
130
131 The description of the DAG is read from stdin if not given on the
131 The description of the DAG is read from stdin if not given on the
132 command line.
132 command line.
133
133
134 Elements:
134 Elements:
135
135
136 - "+n" is a linear run of n nodes based on the current default parent
136 - "+n" is a linear run of n nodes based on the current default parent
137 - "." is a single node based on the current default parent
137 - "." is a single node based on the current default parent
138 - "$" resets the default parent to null (implied at the start);
138 - "$" resets the default parent to null (implied at the start);
139 otherwise the default parent is always the last node created
139 otherwise the default parent is always the last node created
140 - "<p" sets the default parent to the backref p
140 - "<p" sets the default parent to the backref p
141 - "*p" is a fork at parent p, which is a backref
141 - "*p" is a fork at parent p, which is a backref
142 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
142 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
143 - "/p2" is a merge of the preceding node and p2
143 - "/p2" is a merge of the preceding node and p2
144 - ":tag" defines a local tag for the preceding node
144 - ":tag" defines a local tag for the preceding node
145 - "@branch" sets the named branch for subsequent nodes
145 - "@branch" sets the named branch for subsequent nodes
146 - "#...\\n" is a comment up to the end of the line
146 - "#...\\n" is a comment up to the end of the line
147
147
148 Whitespace between the above elements is ignored.
148 Whitespace between the above elements is ignored.
149
149
150 A backref is either
150 A backref is either
151
151
152 - a number n, which references the node curr-n, where curr is the current
152 - a number n, which references the node curr-n, where curr is the current
153 node, or
153 node, or
154 - the name of a local tag you placed earlier using ":tag", or
154 - the name of a local tag you placed earlier using ":tag", or
155 - empty to denote the default parent.
155 - empty to denote the default parent.
156
156
157 All string valued-elements are either strictly alphanumeric, or must
157 All string valued-elements are either strictly alphanumeric, or must
158 be enclosed in double quotes ("..."), with "\\" as escape character.
158 be enclosed in double quotes ("..."), with "\\" as escape character.
159 """
159 """
160
160
161 if text is None:
161 if text is None:
162 ui.status(_("reading DAG from stdin\n"))
162 ui.status(_("reading DAG from stdin\n"))
163 text = ui.fin.read()
163 text = ui.fin.read()
164
164
165 cl = repo.changelog
165 cl = repo.changelog
166 if len(cl) > 0:
166 if len(cl) > 0:
167 raise error.Abort(_('repository is not empty'))
167 raise error.Abort(_('repository is not empty'))
168
168
169 # determine number of revs in DAG
169 # determine number of revs in DAG
170 total = 0
170 total = 0
171 for type, data in dagparser.parsedag(text):
171 for type, data in dagparser.parsedag(text):
172 if type == 'n':
172 if type == 'n':
173 total += 1
173 total += 1
174
174
175 if mergeable_file:
175 if mergeable_file:
176 linesperrev = 2
176 linesperrev = 2
177 # make a file with k lines per rev
177 # make a file with k lines per rev
178 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
178 initialmergedlines = ['%d' % i for i in xrange(0, total * linesperrev)]
179 initialmergedlines.append("")
179 initialmergedlines.append("")
180
180
181 tags = []
181 tags = []
182
182
183 wlock = lock = tr = None
183 wlock = lock = tr = None
184 try:
184 try:
185 wlock = repo.wlock()
185 wlock = repo.wlock()
186 lock = repo.lock()
186 lock = repo.lock()
187 tr = repo.transaction("builddag")
187 tr = repo.transaction("builddag")
188
188
189 at = -1
189 at = -1
190 atbranch = 'default'
190 atbranch = 'default'
191 nodeids = []
191 nodeids = []
192 id = 0
192 id = 0
193 ui.progress(_('building'), id, unit=_('revisions'), total=total)
193 ui.progress(_('building'), id, unit=_('revisions'), total=total)
194 for type, data in dagparser.parsedag(text):
194 for type, data in dagparser.parsedag(text):
195 if type == 'n':
195 if type == 'n':
196 ui.note(('node %s\n' % pycompat.bytestr(data)))
196 ui.note(('node %s\n' % pycompat.bytestr(data)))
197 id, ps = data
197 id, ps = data
198
198
199 files = []
199 files = []
200 filecontent = {}
200 filecontent = {}
201
201
202 p2 = None
202 p2 = None
203 if mergeable_file:
203 if mergeable_file:
204 fn = "mf"
204 fn = "mf"
205 p1 = repo[ps[0]]
205 p1 = repo[ps[0]]
206 if len(ps) > 1:
206 if len(ps) > 1:
207 p2 = repo[ps[1]]
207 p2 = repo[ps[1]]
208 pa = p1.ancestor(p2)
208 pa = p1.ancestor(p2)
209 base, local, other = [x[fn].data() for x in (pa, p1,
209 base, local, other = [x[fn].data() for x in (pa, p1,
210 p2)]
210 p2)]
211 m3 = simplemerge.Merge3Text(base, local, other)
211 m3 = simplemerge.Merge3Text(base, local, other)
212 ml = [l.strip() for l in m3.merge_lines()]
212 ml = [l.strip() for l in m3.merge_lines()]
213 ml.append("")
213 ml.append("")
214 elif at > 0:
214 elif at > 0:
215 ml = p1[fn].data().split("\n")
215 ml = p1[fn].data().split("\n")
216 else:
216 else:
217 ml = initialmergedlines
217 ml = initialmergedlines
218 ml[id * linesperrev] += " r%i" % id
218 ml[id * linesperrev] += " r%i" % id
219 mergedtext = "\n".join(ml)
219 mergedtext = "\n".join(ml)
220 files.append(fn)
220 files.append(fn)
221 filecontent[fn] = mergedtext
221 filecontent[fn] = mergedtext
222
222
223 if overwritten_file:
223 if overwritten_file:
224 fn = "of"
224 fn = "of"
225 files.append(fn)
225 files.append(fn)
226 filecontent[fn] = "r%i\n" % id
226 filecontent[fn] = "r%i\n" % id
227
227
228 if new_file:
228 if new_file:
229 fn = "nf%i" % id
229 fn = "nf%i" % id
230 files.append(fn)
230 files.append(fn)
231 filecontent[fn] = "r%i\n" % id
231 filecontent[fn] = "r%i\n" % id
232 if len(ps) > 1:
232 if len(ps) > 1:
233 if not p2:
233 if not p2:
234 p2 = repo[ps[1]]
234 p2 = repo[ps[1]]
235 for fn in p2:
235 for fn in p2:
236 if fn.startswith("nf"):
236 if fn.startswith("nf"):
237 files.append(fn)
237 files.append(fn)
238 filecontent[fn] = p2[fn].data()
238 filecontent[fn] = p2[fn].data()
239
239
240 def fctxfn(repo, cx, path):
240 def fctxfn(repo, cx, path):
241 if path in filecontent:
241 if path in filecontent:
242 return context.memfilectx(repo, cx, path,
242 return context.memfilectx(repo, cx, path,
243 filecontent[path])
243 filecontent[path])
244 return None
244 return None
245
245
246 if len(ps) == 0 or ps[0] < 0:
246 if len(ps) == 0 or ps[0] < 0:
247 pars = [None, None]
247 pars = [None, None]
248 elif len(ps) == 1:
248 elif len(ps) == 1:
249 pars = [nodeids[ps[0]], None]
249 pars = [nodeids[ps[0]], None]
250 else:
250 else:
251 pars = [nodeids[p] for p in ps]
251 pars = [nodeids[p] for p in ps]
252 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
252 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
253 date=(id, 0),
253 date=(id, 0),
254 user="debugbuilddag",
254 user="debugbuilddag",
255 extra={'branch': atbranch})
255 extra={'branch': atbranch})
256 nodeid = repo.commitctx(cx)
256 nodeid = repo.commitctx(cx)
257 nodeids.append(nodeid)
257 nodeids.append(nodeid)
258 at = id
258 at = id
259 elif type == 'l':
259 elif type == 'l':
260 id, name = data
260 id, name = data
261 ui.note(('tag %s\n' % name))
261 ui.note(('tag %s\n' % name))
262 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
262 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
263 elif type == 'a':
263 elif type == 'a':
264 ui.note(('branch %s\n' % data))
264 ui.note(('branch %s\n' % data))
265 atbranch = data
265 atbranch = data
266 ui.progress(_('building'), id, unit=_('revisions'), total=total)
266 ui.progress(_('building'), id, unit=_('revisions'), total=total)
267 tr.close()
267 tr.close()
268
268
269 if tags:
269 if tags:
270 repo.vfs.write("localtags", "".join(tags))
270 repo.vfs.write("localtags", "".join(tags))
271 finally:
271 finally:
272 ui.progress(_('building'), None)
272 ui.progress(_('building'), None)
273 release(tr, lock, wlock)
273 release(tr, lock, wlock)
274
274
275 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
275 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
276 indent_string = ' ' * indent
276 indent_string = ' ' * indent
277 if all:
277 if all:
278 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
278 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
279 % indent_string)
279 % indent_string)
280
280
281 def showchunks(named):
281 def showchunks(named):
282 ui.write("\n%s%s\n" % (indent_string, named))
282 ui.write("\n%s%s\n" % (indent_string, named))
283 for deltadata in gen.deltaiter():
283 for deltadata in gen.deltaiter():
284 node, p1, p2, cs, deltabase, delta, flags = deltadata
284 node, p1, p2, cs, deltabase, delta, flags = deltadata
285 ui.write("%s%s %s %s %s %s %d\n" %
285 ui.write("%s%s %s %s %s %s %d\n" %
286 (indent_string, hex(node), hex(p1), hex(p2),
286 (indent_string, hex(node), hex(p1), hex(p2),
287 hex(cs), hex(deltabase), len(delta)))
287 hex(cs), hex(deltabase), len(delta)))
288
288
289 chunkdata = gen.changelogheader()
289 chunkdata = gen.changelogheader()
290 showchunks("changelog")
290 showchunks("changelog")
291 chunkdata = gen.manifestheader()
291 chunkdata = gen.manifestheader()
292 showchunks("manifest")
292 showchunks("manifest")
293 for chunkdata in iter(gen.filelogheader, {}):
293 for chunkdata in iter(gen.filelogheader, {}):
294 fname = chunkdata['filename']
294 fname = chunkdata['filename']
295 showchunks(fname)
295 showchunks(fname)
296 else:
296 else:
297 if isinstance(gen, bundle2.unbundle20):
297 if isinstance(gen, bundle2.unbundle20):
298 raise error.Abort(_('use debugbundle2 for this file'))
298 raise error.Abort(_('use debugbundle2 for this file'))
299 chunkdata = gen.changelogheader()
299 chunkdata = gen.changelogheader()
300 for deltadata in gen.deltaiter():
300 for deltadata in gen.deltaiter():
301 node, p1, p2, cs, deltabase, delta, flags = deltadata
301 node, p1, p2, cs, deltabase, delta, flags = deltadata
302 ui.write("%s%s\n" % (indent_string, hex(node)))
302 ui.write("%s%s\n" % (indent_string, hex(node)))
303
303
304 def _debugobsmarkers(ui, part, indent=0, **opts):
304 def _debugobsmarkers(ui, part, indent=0, **opts):
305 """display version and markers contained in 'data'"""
305 """display version and markers contained in 'data'"""
306 opts = pycompat.byteskwargs(opts)
306 opts = pycompat.byteskwargs(opts)
307 data = part.read()
307 data = part.read()
308 indent_string = ' ' * indent
308 indent_string = ' ' * indent
309 try:
309 try:
310 version, markers = obsolete._readmarkers(data)
310 version, markers = obsolete._readmarkers(data)
311 except error.UnknownVersion as exc:
311 except error.UnknownVersion as exc:
312 msg = "%sunsupported version: %s (%d bytes)\n"
312 msg = "%sunsupported version: %s (%d bytes)\n"
313 msg %= indent_string, exc.version, len(data)
313 msg %= indent_string, exc.version, len(data)
314 ui.write(msg)
314 ui.write(msg)
315 else:
315 else:
316 msg = "%sversion: %d (%d bytes)\n"
316 msg = "%sversion: %d (%d bytes)\n"
317 msg %= indent_string, version, len(data)
317 msg %= indent_string, version, len(data)
318 ui.write(msg)
318 ui.write(msg)
319 fm = ui.formatter('debugobsolete', opts)
319 fm = ui.formatter('debugobsolete', opts)
320 for rawmarker in sorted(markers):
320 for rawmarker in sorted(markers):
321 m = obsutil.marker(None, rawmarker)
321 m = obsutil.marker(None, rawmarker)
322 fm.startitem()
322 fm.startitem()
323 fm.plain(indent_string)
323 fm.plain(indent_string)
324 cmdutil.showmarker(fm, m)
324 cmdutil.showmarker(fm, m)
325 fm.end()
325 fm.end()
326
326
327 def _debugphaseheads(ui, data, indent=0):
327 def _debugphaseheads(ui, data, indent=0):
328 """display version and markers contained in 'data'"""
328 """display version and markers contained in 'data'"""
329 indent_string = ' ' * indent
329 indent_string = ' ' * indent
330 headsbyphase = phases.binarydecode(data)
330 headsbyphase = phases.binarydecode(data)
331 for phase in phases.allphases:
331 for phase in phases.allphases:
332 for head in headsbyphase[phase]:
332 for head in headsbyphase[phase]:
333 ui.write(indent_string)
333 ui.write(indent_string)
334 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
334 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
335
335
336 def _quasirepr(thing):
336 def _quasirepr(thing):
337 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
337 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
338 return '{%s}' % (
338 return '{%s}' % (
339 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
339 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
340 return pycompat.bytestr(repr(thing))
340 return pycompat.bytestr(repr(thing))
341
341
342 def _debugbundle2(ui, gen, all=None, **opts):
342 def _debugbundle2(ui, gen, all=None, **opts):
343 """lists the contents of a bundle2"""
343 """lists the contents of a bundle2"""
344 if not isinstance(gen, bundle2.unbundle20):
344 if not isinstance(gen, bundle2.unbundle20):
345 raise error.Abort(_('not a bundle2 file'))
345 raise error.Abort(_('not a bundle2 file'))
346 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
346 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
347 parttypes = opts.get(r'part_type', [])
347 parttypes = opts.get(r'part_type', [])
348 for part in gen.iterparts():
348 for part in gen.iterparts():
349 if parttypes and part.type not in parttypes:
349 if parttypes and part.type not in parttypes:
350 continue
350 continue
351 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
351 ui.write('%s -- %s\n' % (part.type, _quasirepr(part.params)))
352 if part.type == 'changegroup':
352 if part.type == 'changegroup':
353 version = part.params.get('version', '01')
353 version = part.params.get('version', '01')
354 cg = changegroup.getunbundler(version, part, 'UN')
354 cg = changegroup.getunbundler(version, part, 'UN')
355 if not ui.quiet:
355 if not ui.quiet:
356 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
356 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
357 if part.type == 'obsmarkers':
357 if part.type == 'obsmarkers':
358 if not ui.quiet:
358 if not ui.quiet:
359 _debugobsmarkers(ui, part, indent=4, **opts)
359 _debugobsmarkers(ui, part, indent=4, **opts)
360 if part.type == 'phase-heads':
360 if part.type == 'phase-heads':
361 if not ui.quiet:
361 if not ui.quiet:
362 _debugphaseheads(ui, part, indent=4)
362 _debugphaseheads(ui, part, indent=4)
363
363
364 @command('debugbundle',
364 @command('debugbundle',
365 [('a', 'all', None, _('show all details')),
365 [('a', 'all', None, _('show all details')),
366 ('', 'part-type', [], _('show only the named part type')),
366 ('', 'part-type', [], _('show only the named part type')),
367 ('', 'spec', None, _('print the bundlespec of the bundle'))],
367 ('', 'spec', None, _('print the bundlespec of the bundle'))],
368 _('FILE'),
368 _('FILE'),
369 norepo=True)
369 norepo=True)
370 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
370 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
371 """lists the contents of a bundle"""
371 """lists the contents of a bundle"""
372 with hg.openpath(ui, bundlepath) as f:
372 with hg.openpath(ui, bundlepath) as f:
373 if spec:
373 if spec:
374 spec = exchange.getbundlespec(ui, f)
374 spec = exchange.getbundlespec(ui, f)
375 ui.write('%s\n' % spec)
375 ui.write('%s\n' % spec)
376 return
376 return
377
377
378 gen = exchange.readbundle(ui, f, bundlepath)
378 gen = exchange.readbundle(ui, f, bundlepath)
379 if isinstance(gen, bundle2.unbundle20):
379 if isinstance(gen, bundle2.unbundle20):
380 return _debugbundle2(ui, gen, all=all, **opts)
380 return _debugbundle2(ui, gen, all=all, **opts)
381 _debugchangegroup(ui, gen, all=all, **opts)
381 _debugchangegroup(ui, gen, all=all, **opts)
382
382
383 @command('debugcapabilities',
383 @command('debugcapabilities',
384 [], _('PATH'),
384 [], _('PATH'),
385 norepo=True)
385 norepo=True)
386 def debugcapabilities(ui, path, **opts):
386 def debugcapabilities(ui, path, **opts):
387 """lists the capabilities of a remote peer"""
387 """lists the capabilities of a remote peer"""
388 opts = pycompat.byteskwargs(opts)
388 opts = pycompat.byteskwargs(opts)
389 peer = hg.peer(ui, opts, path)
389 peer = hg.peer(ui, opts, path)
390 caps = peer.capabilities()
390 caps = peer.capabilities()
391 ui.write(('Main capabilities:\n'))
391 ui.write(('Main capabilities:\n'))
392 for c in sorted(caps):
392 for c in sorted(caps):
393 ui.write((' %s\n') % c)
393 ui.write((' %s\n') % c)
394 b2caps = bundle2.bundle2caps(peer)
394 b2caps = bundle2.bundle2caps(peer)
395 if b2caps:
395 if b2caps:
396 ui.write(('Bundle2 capabilities:\n'))
396 ui.write(('Bundle2 capabilities:\n'))
397 for key, values in sorted(b2caps.iteritems()):
397 for key, values in sorted(b2caps.iteritems()):
398 ui.write((' %s\n') % key)
398 ui.write((' %s\n') % key)
399 for v in values:
399 for v in values:
400 ui.write((' %s\n') % v)
400 ui.write((' %s\n') % v)
401
401
402 @command('debugcheckstate', [], '')
402 @command('debugcheckstate', [], '')
403 def debugcheckstate(ui, repo):
403 def debugcheckstate(ui, repo):
404 """validate the correctness of the current dirstate"""
404 """validate the correctness of the current dirstate"""
405 parent1, parent2 = repo.dirstate.parents()
405 parent1, parent2 = repo.dirstate.parents()
406 m1 = repo[parent1].manifest()
406 m1 = repo[parent1].manifest()
407 m2 = repo[parent2].manifest()
407 m2 = repo[parent2].manifest()
408 errors = 0
408 errors = 0
409 for f in repo.dirstate:
409 for f in repo.dirstate:
410 state = repo.dirstate[f]
410 state = repo.dirstate[f]
411 if state in "nr" and f not in m1:
411 if state in "nr" and f not in m1:
412 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
412 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
413 errors += 1
413 errors += 1
414 if state in "a" and f in m1:
414 if state in "a" and f in m1:
415 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
415 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
416 errors += 1
416 errors += 1
417 if state in "m" and f not in m1 and f not in m2:
417 if state in "m" and f not in m1 and f not in m2:
418 ui.warn(_("%s in state %s, but not in either manifest\n") %
418 ui.warn(_("%s in state %s, but not in either manifest\n") %
419 (f, state))
419 (f, state))
420 errors += 1
420 errors += 1
421 for f in m1:
421 for f in m1:
422 state = repo.dirstate[f]
422 state = repo.dirstate[f]
423 if state not in "nrm":
423 if state not in "nrm":
424 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
424 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
425 errors += 1
425 errors += 1
426 if errors:
426 if errors:
427 error = _(".hg/dirstate inconsistent with current parent's manifest")
427 error = _(".hg/dirstate inconsistent with current parent's manifest")
428 raise error.Abort(error)
428 raise error.Abort(error)
429
429
430 @command('debugcolor',
430 @command('debugcolor',
431 [('', 'style', None, _('show all configured styles'))],
431 [('', 'style', None, _('show all configured styles'))],
432 'hg debugcolor')
432 'hg debugcolor')
433 def debugcolor(ui, repo, **opts):
433 def debugcolor(ui, repo, **opts):
434 """show available color, effects or style"""
434 """show available color, effects or style"""
435 ui.write(('color mode: %s\n') % ui._colormode)
435 ui.write(('color mode: %s\n') % ui._colormode)
436 if opts.get(r'style'):
436 if opts.get(r'style'):
437 return _debugdisplaystyle(ui)
437 return _debugdisplaystyle(ui)
438 else:
438 else:
439 return _debugdisplaycolor(ui)
439 return _debugdisplaycolor(ui)
440
440
441 def _debugdisplaycolor(ui):
441 def _debugdisplaycolor(ui):
442 ui = ui.copy()
442 ui = ui.copy()
443 ui._styles.clear()
443 ui._styles.clear()
444 for effect in color._activeeffects(ui).keys():
444 for effect in color._activeeffects(ui).keys():
445 ui._styles[effect] = effect
445 ui._styles[effect] = effect
446 if ui._terminfoparams:
446 if ui._terminfoparams:
447 for k, v in ui.configitems('color'):
447 for k, v in ui.configitems('color'):
448 if k.startswith('color.'):
448 if k.startswith('color.'):
449 ui._styles[k] = k[6:]
449 ui._styles[k] = k[6:]
450 elif k.startswith('terminfo.'):
450 elif k.startswith('terminfo.'):
451 ui._styles[k] = k[9:]
451 ui._styles[k] = k[9:]
452 ui.write(_('available colors:\n'))
452 ui.write(_('available colors:\n'))
453 # sort label with a '_' after the other to group '_background' entry.
453 # sort label with a '_' after the other to group '_background' entry.
454 items = sorted(ui._styles.items(),
454 items = sorted(ui._styles.items(),
455 key=lambda i: ('_' in i[0], i[0], i[1]))
455 key=lambda i: ('_' in i[0], i[0], i[1]))
456 for colorname, label in items:
456 for colorname, label in items:
457 ui.write(('%s\n') % colorname, label=label)
457 ui.write(('%s\n') % colorname, label=label)
458
458
459 def _debugdisplaystyle(ui):
459 def _debugdisplaystyle(ui):
460 ui.write(_('available style:\n'))
460 ui.write(_('available style:\n'))
461 width = max(len(s) for s in ui._styles)
461 width = max(len(s) for s in ui._styles)
462 for label, effects in sorted(ui._styles.items()):
462 for label, effects in sorted(ui._styles.items()):
463 ui.write('%s' % label, label=label)
463 ui.write('%s' % label, label=label)
464 if effects:
464 if effects:
465 # 50
465 # 50
466 ui.write(': ')
466 ui.write(': ')
467 ui.write(' ' * (max(0, width - len(label))))
467 ui.write(' ' * (max(0, width - len(label))))
468 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
468 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
469 ui.write('\n')
469 ui.write('\n')
470
470
471 @command('debugcreatestreamclonebundle', [], 'FILE')
471 @command('debugcreatestreamclonebundle', [], 'FILE')
472 def debugcreatestreamclonebundle(ui, repo, fname):
472 def debugcreatestreamclonebundle(ui, repo, fname):
473 """create a stream clone bundle file
473 """create a stream clone bundle file
474
474
475 Stream bundles are special bundles that are essentially archives of
475 Stream bundles are special bundles that are essentially archives of
476 revlog files. They are commonly used for cloning very quickly.
476 revlog files. They are commonly used for cloning very quickly.
477 """
477 """
478 # TODO we may want to turn this into an abort when this functionality
478 # TODO we may want to turn this into an abort when this functionality
479 # is moved into `hg bundle`.
479 # is moved into `hg bundle`.
480 if phases.hassecret(repo):
480 if phases.hassecret(repo):
481 ui.warn(_('(warning: stream clone bundle will contain secret '
481 ui.warn(_('(warning: stream clone bundle will contain secret '
482 'revisions)\n'))
482 'revisions)\n'))
483
483
484 requirements, gen = streamclone.generatebundlev1(repo)
484 requirements, gen = streamclone.generatebundlev1(repo)
485 changegroup.writechunks(ui, gen, fname)
485 changegroup.writechunks(ui, gen, fname)
486
486
487 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
487 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
488
488
489 @command('debugdag',
489 @command('debugdag',
490 [('t', 'tags', None, _('use tags as labels')),
490 [('t', 'tags', None, _('use tags as labels')),
491 ('b', 'branches', None, _('annotate with branch names')),
491 ('b', 'branches', None, _('annotate with branch names')),
492 ('', 'dots', None, _('use dots for runs')),
492 ('', 'dots', None, _('use dots for runs')),
493 ('s', 'spaces', None, _('separate elements by spaces'))],
493 ('s', 'spaces', None, _('separate elements by spaces'))],
494 _('[OPTION]... [FILE [REV]...]'),
494 _('[OPTION]... [FILE [REV]...]'),
495 optionalrepo=True)
495 optionalrepo=True)
496 def debugdag(ui, repo, file_=None, *revs, **opts):
496 def debugdag(ui, repo, file_=None, *revs, **opts):
497 """format the changelog or an index DAG as a concise textual description
497 """format the changelog or an index DAG as a concise textual description
498
498
499 If you pass a revlog index, the revlog's DAG is emitted. If you list
499 If you pass a revlog index, the revlog's DAG is emitted. If you list
500 revision numbers, they get labeled in the output as rN.
500 revision numbers, they get labeled in the output as rN.
501
501
502 Otherwise, the changelog DAG of the current repo is emitted.
502 Otherwise, the changelog DAG of the current repo is emitted.
503 """
503 """
504 spaces = opts.get(r'spaces')
504 spaces = opts.get(r'spaces')
505 dots = opts.get(r'dots')
505 dots = opts.get(r'dots')
506 if file_:
506 if file_:
507 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
507 rlog = revlog.revlog(vfsmod.vfs(pycompat.getcwd(), audit=False),
508 file_)
508 file_)
509 revs = set((int(r) for r in revs))
509 revs = set((int(r) for r in revs))
510 def events():
510 def events():
511 for r in rlog:
511 for r in rlog:
512 yield 'n', (r, list(p for p in rlog.parentrevs(r)
512 yield 'n', (r, list(p for p in rlog.parentrevs(r)
513 if p != -1))
513 if p != -1))
514 if r in revs:
514 if r in revs:
515 yield 'l', (r, "r%i" % r)
515 yield 'l', (r, "r%i" % r)
516 elif repo:
516 elif repo:
517 cl = repo.changelog
517 cl = repo.changelog
518 tags = opts.get(r'tags')
518 tags = opts.get(r'tags')
519 branches = opts.get(r'branches')
519 branches = opts.get(r'branches')
520 if tags:
520 if tags:
521 labels = {}
521 labels = {}
522 for l, n in repo.tags().items():
522 for l, n in repo.tags().items():
523 labels.setdefault(cl.rev(n), []).append(l)
523 labels.setdefault(cl.rev(n), []).append(l)
524 def events():
524 def events():
525 b = "default"
525 b = "default"
526 for r in cl:
526 for r in cl:
527 if branches:
527 if branches:
528 newb = cl.read(cl.node(r))[5]['branch']
528 newb = cl.read(cl.node(r))[5]['branch']
529 if newb != b:
529 if newb != b:
530 yield 'a', newb
530 yield 'a', newb
531 b = newb
531 b = newb
532 yield 'n', (r, list(p for p in cl.parentrevs(r)
532 yield 'n', (r, list(p for p in cl.parentrevs(r)
533 if p != -1))
533 if p != -1))
534 if tags:
534 if tags:
535 ls = labels.get(r)
535 ls = labels.get(r)
536 if ls:
536 if ls:
537 for l in ls:
537 for l in ls:
538 yield 'l', (r, l)
538 yield 'l', (r, l)
539 else:
539 else:
540 raise error.Abort(_('need repo for changelog dag'))
540 raise error.Abort(_('need repo for changelog dag'))
541
541
542 for line in dagparser.dagtextlines(events(),
542 for line in dagparser.dagtextlines(events(),
543 addspaces=spaces,
543 addspaces=spaces,
544 wraplabels=True,
544 wraplabels=True,
545 wrapannotations=True,
545 wrapannotations=True,
546 wrapnonlinear=dots,
546 wrapnonlinear=dots,
547 usedots=dots,
547 usedots=dots,
548 maxlinewidth=70):
548 maxlinewidth=70):
549 ui.write(line)
549 ui.write(line)
550 ui.write("\n")
550 ui.write("\n")
551
551
552 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
552 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
553 def debugdata(ui, repo, file_, rev=None, **opts):
553 def debugdata(ui, repo, file_, rev=None, **opts):
554 """dump the contents of a data file revision"""
554 """dump the contents of a data file revision"""
555 opts = pycompat.byteskwargs(opts)
555 opts = pycompat.byteskwargs(opts)
556 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
556 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
557 if rev is not None:
557 if rev is not None:
558 raise error.CommandError('debugdata', _('invalid arguments'))
558 raise error.CommandError('debugdata', _('invalid arguments'))
559 file_, rev = None, file_
559 file_, rev = None, file_
560 elif rev is None:
560 elif rev is None:
561 raise error.CommandError('debugdata', _('invalid arguments'))
561 raise error.CommandError('debugdata', _('invalid arguments'))
562 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
562 r = cmdutil.openrevlog(repo, 'debugdata', file_, opts)
563 try:
563 try:
564 ui.write(r.revision(r.lookup(rev), raw=True))
564 ui.write(r.revision(r.lookup(rev), raw=True))
565 except KeyError:
565 except KeyError:
566 raise error.Abort(_('invalid revision identifier %s') % rev)
566 raise error.Abort(_('invalid revision identifier %s') % rev)
567
567
568 @command('debugdate',
568 @command('debugdate',
569 [('e', 'extended', None, _('try extended date formats'))],
569 [('e', 'extended', None, _('try extended date formats'))],
570 _('[-e] DATE [RANGE]'),
570 _('[-e] DATE [RANGE]'),
571 norepo=True, optionalrepo=True)
571 norepo=True, optionalrepo=True)
572 def debugdate(ui, date, range=None, **opts):
572 def debugdate(ui, date, range=None, **opts):
573 """parse and display a date"""
573 """parse and display a date"""
574 if opts[r"extended"]:
574 if opts[r"extended"]:
575 d = dateutil.parsedate(date, util.extendeddateformats)
575 d = dateutil.parsedate(date, util.extendeddateformats)
576 else:
576 else:
577 d = dateutil.parsedate(date)
577 d = dateutil.parsedate(date)
578 ui.write(("internal: %d %d\n") % d)
578 ui.write(("internal: %d %d\n") % d)
579 ui.write(("standard: %s\n") % dateutil.datestr(d))
579 ui.write(("standard: %s\n") % dateutil.datestr(d))
580 if range:
580 if range:
581 m = dateutil.matchdate(range)
581 m = dateutil.matchdate(range)
582 ui.write(("match: %s\n") % m(d[0]))
582 ui.write(("match: %s\n") % m(d[0]))
583
583
584 @command('debugdeltachain',
584 @command('debugdeltachain',
585 cmdutil.debugrevlogopts + cmdutil.formatteropts,
585 cmdutil.debugrevlogopts + cmdutil.formatteropts,
586 _('-c|-m|FILE'),
586 _('-c|-m|FILE'),
587 optionalrepo=True)
587 optionalrepo=True)
588 def debugdeltachain(ui, repo, file_=None, **opts):
588 def debugdeltachain(ui, repo, file_=None, **opts):
589 """dump information about delta chains in a revlog
589 """dump information about delta chains in a revlog
590
590
591 Output can be templatized. Available template keywords are:
591 Output can be templatized. Available template keywords are:
592
592
593 :``rev``: revision number
593 :``rev``: revision number
594 :``chainid``: delta chain identifier (numbered by unique base)
594 :``chainid``: delta chain identifier (numbered by unique base)
595 :``chainlen``: delta chain length to this revision
595 :``chainlen``: delta chain length to this revision
596 :``prevrev``: previous revision in delta chain
596 :``prevrev``: previous revision in delta chain
597 :``deltatype``: role of delta / how it was computed
597 :``deltatype``: role of delta / how it was computed
598 :``compsize``: compressed size of revision
598 :``compsize``: compressed size of revision
599 :``uncompsize``: uncompressed size of revision
599 :``uncompsize``: uncompressed size of revision
600 :``chainsize``: total size of compressed revisions in chain
600 :``chainsize``: total size of compressed revisions in chain
601 :``chainratio``: total chain size divided by uncompressed revision size
601 :``chainratio``: total chain size divided by uncompressed revision size
602 (new delta chains typically start at ratio 2.00)
602 (new delta chains typically start at ratio 2.00)
603 :``lindist``: linear distance from base revision in delta chain to end
603 :``lindist``: linear distance from base revision in delta chain to end
604 of this revision
604 of this revision
605 :``extradist``: total size of revisions not part of this delta chain from
605 :``extradist``: total size of revisions not part of this delta chain from
606 base of delta chain to end of this revision; a measurement
606 base of delta chain to end of this revision; a measurement
607 of how much extra data we need to read/seek across to read
607 of how much extra data we need to read/seek across to read
608 the delta chain for this revision
608 the delta chain for this revision
609 :``extraratio``: extradist divided by chainsize; another representation of
609 :``extraratio``: extradist divided by chainsize; another representation of
610 how much unrelated data is needed to load this delta chain
610 how much unrelated data is needed to load this delta chain
611
611
612 If the repository is configured to use the sparse read, additional keywords
612 If the repository is configured to use the sparse read, additional keywords
613 are available:
613 are available:
614
614
615 :``readsize``: total size of data read from the disk for a revision
615 :``readsize``: total size of data read from the disk for a revision
616 (sum of the sizes of all the blocks)
616 (sum of the sizes of all the blocks)
617 :``largestblock``: size of the largest block of data read from the disk
617 :``largestblock``: size of the largest block of data read from the disk
618 :``readdensity``: density of useful bytes in the data read from the disk
618 :``readdensity``: density of useful bytes in the data read from the disk
619 :``srchunks``: in how many data hunks the whole revision would be read
619 :``srchunks``: in how many data hunks the whole revision would be read
620
620
621 The sparse read can be enabled with experimental.sparse-read = True
621 The sparse read can be enabled with experimental.sparse-read = True
622 """
622 """
623 opts = pycompat.byteskwargs(opts)
623 opts = pycompat.byteskwargs(opts)
624 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
624 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
625 index = r.index
625 index = r.index
626 generaldelta = r.version & revlog.FLAG_GENERALDELTA
626 generaldelta = r.version & revlog.FLAG_GENERALDELTA
627 withsparseread = getattr(r, '_withsparseread', False)
627 withsparseread = getattr(r, '_withsparseread', False)
628
628
629 def revinfo(rev):
629 def revinfo(rev):
630 e = index[rev]
630 e = index[rev]
631 compsize = e[1]
631 compsize = e[1]
632 uncompsize = e[2]
632 uncompsize = e[2]
633 chainsize = 0
633 chainsize = 0
634
634
635 if generaldelta:
635 if generaldelta:
636 if e[3] == e[5]:
636 if e[3] == e[5]:
637 deltatype = 'p1'
637 deltatype = 'p1'
638 elif e[3] == e[6]:
638 elif e[3] == e[6]:
639 deltatype = 'p2'
639 deltatype = 'p2'
640 elif e[3] == rev - 1:
640 elif e[3] == rev - 1:
641 deltatype = 'prev'
641 deltatype = 'prev'
642 elif e[3] == rev:
642 elif e[3] == rev:
643 deltatype = 'base'
643 deltatype = 'base'
644 else:
644 else:
645 deltatype = 'other'
645 deltatype = 'other'
646 else:
646 else:
647 if e[3] == rev:
647 if e[3] == rev:
648 deltatype = 'base'
648 deltatype = 'base'
649 else:
649 else:
650 deltatype = 'prev'
650 deltatype = 'prev'
651
651
652 chain = r._deltachain(rev)[0]
652 chain = r._deltachain(rev)[0]
653 for iterrev in chain:
653 for iterrev in chain:
654 e = index[iterrev]
654 e = index[iterrev]
655 chainsize += e[1]
655 chainsize += e[1]
656
656
657 return compsize, uncompsize, deltatype, chain, chainsize
657 return compsize, uncompsize, deltatype, chain, chainsize
658
658
659 fm = ui.formatter('debugdeltachain', opts)
659 fm = ui.formatter('debugdeltachain', opts)
660
660
661 fm.plain(' rev chain# chainlen prev delta '
661 fm.plain(' rev chain# chainlen prev delta '
662 'size rawsize chainsize ratio lindist extradist '
662 'size rawsize chainsize ratio lindist extradist '
663 'extraratio')
663 'extraratio')
664 if withsparseread:
664 if withsparseread:
665 fm.plain(' readsize largestblk rddensity srchunks')
665 fm.plain(' readsize largestblk rddensity srchunks')
666 fm.plain('\n')
666 fm.plain('\n')
667
667
668 chainbases = {}
668 chainbases = {}
669 for rev in r:
669 for rev in r:
670 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
670 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
671 chainbase = chain[0]
671 chainbase = chain[0]
672 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
672 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
673 start = r.start
673 start = r.start
674 length = r.length
674 length = r.length
675 basestart = start(chainbase)
675 basestart = start(chainbase)
676 revstart = start(rev)
676 revstart = start(rev)
677 lineardist = revstart + comp - basestart
677 lineardist = revstart + comp - basestart
678 extradist = lineardist - chainsize
678 extradist = lineardist - chainsize
679 try:
679 try:
680 prevrev = chain[-2]
680 prevrev = chain[-2]
681 except IndexError:
681 except IndexError:
682 prevrev = -1
682 prevrev = -1
683
683
684 chainratio = float(chainsize) / float(uncomp)
684 chainratio = float(chainsize) / float(uncomp)
685 extraratio = float(extradist) / float(chainsize)
685 extraratio = float(extradist) / float(chainsize)
686
686
687 fm.startitem()
687 fm.startitem()
688 fm.write('rev chainid chainlen prevrev deltatype compsize '
688 fm.write('rev chainid chainlen prevrev deltatype compsize '
689 'uncompsize chainsize chainratio lindist extradist '
689 'uncompsize chainsize chainratio lindist extradist '
690 'extraratio',
690 'extraratio',
691 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
691 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
692 rev, chainid, len(chain), prevrev, deltatype, comp,
692 rev, chainid, len(chain), prevrev, deltatype, comp,
693 uncomp, chainsize, chainratio, lineardist, extradist,
693 uncomp, chainsize, chainratio, lineardist, extradist,
694 extraratio,
694 extraratio,
695 rev=rev, chainid=chainid, chainlen=len(chain),
695 rev=rev, chainid=chainid, chainlen=len(chain),
696 prevrev=prevrev, deltatype=deltatype, compsize=comp,
696 prevrev=prevrev, deltatype=deltatype, compsize=comp,
697 uncompsize=uncomp, chainsize=chainsize,
697 uncompsize=uncomp, chainsize=chainsize,
698 chainratio=chainratio, lindist=lineardist,
698 chainratio=chainratio, lindist=lineardist,
699 extradist=extradist, extraratio=extraratio)
699 extradist=extradist, extraratio=extraratio)
700 if withsparseread:
700 if withsparseread:
701 readsize = 0
701 readsize = 0
702 largestblock = 0
702 largestblock = 0
703 srchunks = 0
703 srchunks = 0
704
704
705 for revschunk in revlog._slicechunk(r, chain):
705 for revschunk in revlog._slicechunk(r, chain):
706 srchunks += 1
706 srchunks += 1
707 blkend = start(revschunk[-1]) + length(revschunk[-1])
707 blkend = start(revschunk[-1]) + length(revschunk[-1])
708 blksize = blkend - start(revschunk[0])
708 blksize = blkend - start(revschunk[0])
709
709
710 readsize += blksize
710 readsize += blksize
711 if largestblock < blksize:
711 if largestblock < blksize:
712 largestblock = blksize
712 largestblock = blksize
713
713
714 readdensity = float(chainsize) / float(readsize)
714 readdensity = float(chainsize) / float(readsize)
715
715
716 fm.write('readsize largestblock readdensity srchunks',
716 fm.write('readsize largestblock readdensity srchunks',
717 ' %10d %10d %9.5f %8d',
717 ' %10d %10d %9.5f %8d',
718 readsize, largestblock, readdensity, srchunks,
718 readsize, largestblock, readdensity, srchunks,
719 readsize=readsize, largestblock=largestblock,
719 readsize=readsize, largestblock=largestblock,
720 readdensity=readdensity, srchunks=srchunks)
720 readdensity=readdensity, srchunks=srchunks)
721
721
722 fm.plain('\n')
722 fm.plain('\n')
723
723
724 fm.end()
724 fm.end()
725
725
726 @command('debugdirstate|debugstate',
726 @command('debugdirstate|debugstate',
727 [('', 'nodates', None, _('do not display the saved mtime')),
727 [('', 'nodates', None, _('do not display the saved mtime')),
728 ('', 'datesort', None, _('sort by saved mtime'))],
728 ('', 'datesort', None, _('sort by saved mtime'))],
729 _('[OPTION]...'))
729 _('[OPTION]...'))
730 def debugstate(ui, repo, **opts):
730 def debugstate(ui, repo, **opts):
731 """show the contents of the current dirstate"""
731 """show the contents of the current dirstate"""
732
732
733 nodates = opts.get(r'nodates')
733 nodates = opts.get(r'nodates')
734 datesort = opts.get(r'datesort')
734 datesort = opts.get(r'datesort')
735
735
736 timestr = ""
736 timestr = ""
737 if datesort:
737 if datesort:
738 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
738 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
739 else:
739 else:
740 keyfunc = None # sort by filename
740 keyfunc = None # sort by filename
741 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
741 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
742 if ent[3] == -1:
742 if ent[3] == -1:
743 timestr = 'unset '
743 timestr = 'unset '
744 elif nodates:
744 elif nodates:
745 timestr = 'set '
745 timestr = 'set '
746 else:
746 else:
747 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
747 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
748 time.localtime(ent[3]))
748 time.localtime(ent[3]))
749 timestr = encoding.strtolocal(timestr)
749 timestr = encoding.strtolocal(timestr)
750 if ent[1] & 0o20000:
750 if ent[1] & 0o20000:
751 mode = 'lnk'
751 mode = 'lnk'
752 else:
752 else:
753 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
753 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
754 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
754 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
755 for f in repo.dirstate.copies():
755 for f in repo.dirstate.copies():
756 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
756 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
757
757
758 @command('debugdiscovery',
758 @command('debugdiscovery',
759 [('', 'old', None, _('use old-style discovery')),
759 [('', 'old', None, _('use old-style discovery')),
760 ('', 'nonheads', None,
760 ('', 'nonheads', None,
761 _('use old-style discovery with non-heads included')),
761 _('use old-style discovery with non-heads included')),
762 ('', 'rev', [], 'restrict discovery to this set of revs'),
762 ('', 'rev', [], 'restrict discovery to this set of revs'),
763 ] + cmdutil.remoteopts,
763 ] + cmdutil.remoteopts,
764 _('[--rev REV] [OTHER]'))
764 _('[--rev REV] [OTHER]'))
765 def debugdiscovery(ui, repo, remoteurl="default", **opts):
765 def debugdiscovery(ui, repo, remoteurl="default", **opts):
766 """runs the changeset discovery protocol in isolation"""
766 """runs the changeset discovery protocol in isolation"""
767 opts = pycompat.byteskwargs(opts)
767 opts = pycompat.byteskwargs(opts)
768 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
768 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
769 remote = hg.peer(repo, opts, remoteurl)
769 remote = hg.peer(repo, opts, remoteurl)
770 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
770 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
771
771
772 # make sure tests are repeatable
772 # make sure tests are repeatable
773 random.seed(12323)
773 random.seed(12323)
774
774
775 def doit(pushedrevs, remoteheads, remote=remote):
775 def doit(pushedrevs, remoteheads, remote=remote):
776 if opts.get('old'):
776 if opts.get('old'):
777 if not util.safehasattr(remote, 'branches'):
777 if not util.safehasattr(remote, 'branches'):
778 # enable in-client legacy support
778 # enable in-client legacy support
779 remote = localrepo.locallegacypeer(remote.local())
779 remote = localrepo.locallegacypeer(remote.local())
780 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
780 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
781 force=True)
781 force=True)
782 common = set(common)
782 common = set(common)
783 if not opts.get('nonheads'):
783 if not opts.get('nonheads'):
784 ui.write(("unpruned common: %s\n") %
784 ui.write(("unpruned common: %s\n") %
785 " ".join(sorted(short(n) for n in common)))
785 " ".join(sorted(short(n) for n in common)))
786 dag = dagutil.revlogdag(repo.changelog)
786 dag = dagutil.revlogdag(repo.changelog)
787 all = dag.ancestorset(dag.internalizeall(common))
787 all = dag.ancestorset(dag.internalizeall(common))
788 common = dag.externalizeall(dag.headsetofconnecteds(all))
788 common = dag.externalizeall(dag.headsetofconnecteds(all))
789 else:
789 else:
790 nodes = None
790 nodes = None
791 if pushedrevs:
791 if pushedrevs:
792 revs = scmutil.revrange(repo, pushedrevs)
792 revs = scmutil.revrange(repo, pushedrevs)
793 nodes = [repo[r].node() for r in revs]
793 nodes = [repo[r].node() for r in revs]
794 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
794 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
795 ancestorsof=nodes)
795 ancestorsof=nodes)
796 common = set(common)
796 common = set(common)
797 rheads = set(hds)
797 rheads = set(hds)
798 lheads = set(repo.heads())
798 lheads = set(repo.heads())
799 ui.write(("common heads: %s\n") %
799 ui.write(("common heads: %s\n") %
800 " ".join(sorted(short(n) for n in common)))
800 " ".join(sorted(short(n) for n in common)))
801 if lheads <= common:
801 if lheads <= common:
802 ui.write(("local is subset\n"))
802 ui.write(("local is subset\n"))
803 elif rheads <= common:
803 elif rheads <= common:
804 ui.write(("remote is subset\n"))
804 ui.write(("remote is subset\n"))
805
805
806 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
806 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
807 localrevs = opts['rev']
807 localrevs = opts['rev']
808 doit(localrevs, remoterevs)
808 doit(localrevs, remoterevs)
809
809
810 _chunksize = 4 << 10
810 _chunksize = 4 << 10
811
811
812 @command('debugdownload',
812 @command('debugdownload',
813 [
813 [
814 ('o', 'output', '', _('path')),
814 ('o', 'output', '', _('path')),
815 ],
815 ],
816 optionalrepo=True)
816 optionalrepo=True)
817 def debugdownload(ui, repo, url, output=None, **opts):
817 def debugdownload(ui, repo, url, output=None, **opts):
818 """download a resource using Mercurial logic and config
818 """download a resource using Mercurial logic and config
819 """
819 """
820 fh = urlmod.open(ui, url, output)
820 fh = urlmod.open(ui, url, output)
821
821
822 dest = ui
822 dest = ui
823 if output:
823 if output:
824 dest = open(output, "wb", _chunksize)
824 dest = open(output, "wb", _chunksize)
825 try:
825 try:
826 data = fh.read(_chunksize)
826 data = fh.read(_chunksize)
827 while data:
827 while data:
828 dest.write(data)
828 dest.write(data)
829 data = fh.read(_chunksize)
829 data = fh.read(_chunksize)
830 finally:
830 finally:
831 if output:
831 if output:
832 dest.close()
832 dest.close()
833
833
834 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
834 @command('debugextensions', cmdutil.formatteropts, [], norepo=True)
835 def debugextensions(ui, **opts):
835 def debugextensions(ui, **opts):
836 '''show information about active extensions'''
836 '''show information about active extensions'''
837 opts = pycompat.byteskwargs(opts)
837 opts = pycompat.byteskwargs(opts)
838 exts = extensions.extensions(ui)
838 exts = extensions.extensions(ui)
839 hgver = util.version()
839 hgver = util.version()
840 fm = ui.formatter('debugextensions', opts)
840 fm = ui.formatter('debugextensions', opts)
841 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
841 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
842 isinternal = extensions.ismoduleinternal(extmod)
842 isinternal = extensions.ismoduleinternal(extmod)
843 extsource = pycompat.fsencode(extmod.__file__)
843 extsource = pycompat.fsencode(extmod.__file__)
844 if isinternal:
844 if isinternal:
845 exttestedwith = [] # never expose magic string to users
845 exttestedwith = [] # never expose magic string to users
846 else:
846 else:
847 exttestedwith = getattr(extmod, 'testedwith', '').split()
847 exttestedwith = getattr(extmod, 'testedwith', '').split()
848 extbuglink = getattr(extmod, 'buglink', None)
848 extbuglink = getattr(extmod, 'buglink', None)
849
849
850 fm.startitem()
850 fm.startitem()
851
851
852 if ui.quiet or ui.verbose:
852 if ui.quiet or ui.verbose:
853 fm.write('name', '%s\n', extname)
853 fm.write('name', '%s\n', extname)
854 else:
854 else:
855 fm.write('name', '%s', extname)
855 fm.write('name', '%s', extname)
856 if isinternal or hgver in exttestedwith:
856 if isinternal or hgver in exttestedwith:
857 fm.plain('\n')
857 fm.plain('\n')
858 elif not exttestedwith:
858 elif not exttestedwith:
859 fm.plain(_(' (untested!)\n'))
859 fm.plain(_(' (untested!)\n'))
860 else:
860 else:
861 lasttestedversion = exttestedwith[-1]
861 lasttestedversion = exttestedwith[-1]
862 fm.plain(' (%s!)\n' % lasttestedversion)
862 fm.plain(' (%s!)\n' % lasttestedversion)
863
863
864 fm.condwrite(ui.verbose and extsource, 'source',
864 fm.condwrite(ui.verbose and extsource, 'source',
865 _(' location: %s\n'), extsource or "")
865 _(' location: %s\n'), extsource or "")
866
866
867 if ui.verbose:
867 if ui.verbose:
868 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
868 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
869 fm.data(bundled=isinternal)
869 fm.data(bundled=isinternal)
870
870
871 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
871 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
872 _(' tested with: %s\n'),
872 _(' tested with: %s\n'),
873 fm.formatlist(exttestedwith, name='ver'))
873 fm.formatlist(exttestedwith, name='ver'))
874
874
875 fm.condwrite(ui.verbose and extbuglink, 'buglink',
875 fm.condwrite(ui.verbose and extbuglink, 'buglink',
876 _(' bug reporting: %s\n'), extbuglink or "")
876 _(' bug reporting: %s\n'), extbuglink or "")
877
877
878 fm.end()
878 fm.end()
879
879
880 @command('debugfileset',
880 @command('debugfileset',
881 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
881 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))],
882 _('[-r REV] FILESPEC'))
882 _('[-r REV] FILESPEC'))
883 def debugfileset(ui, repo, expr, **opts):
883 def debugfileset(ui, repo, expr, **opts):
884 '''parse and apply a fileset specification'''
884 '''parse and apply a fileset specification'''
885 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
885 ctx = scmutil.revsingle(repo, opts.get(r'rev'), None)
886 if ui.verbose:
886 if ui.verbose:
887 tree = fileset.parse(expr)
887 tree = fileset.parse(expr)
888 ui.note(fileset.prettyformat(tree), "\n")
888 ui.note(fileset.prettyformat(tree), "\n")
889
889
890 for f in ctx.getfileset(expr):
890 for f in ctx.getfileset(expr):
891 ui.write("%s\n" % f)
891 ui.write("%s\n" % f)
892
892
893 @command('debugformat',
893 @command('debugformat',
894 [] + cmdutil.formatteropts,
894 [] + cmdutil.formatteropts,
895 _(''))
895 _(''))
896 def debugformat(ui, repo, **opts):
896 def debugformat(ui, repo, **opts):
897 """display format information about the current repository
897 """display format information about the current repository
898
898
899 Use --verbose to get extra information about current config value and
899 Use --verbose to get extra information about current config value and
900 Mercurial default."""
900 Mercurial default."""
901 opts = pycompat.byteskwargs(opts)
901 opts = pycompat.byteskwargs(opts)
902 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
902 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
903 maxvariantlength = max(len('format-variant'), maxvariantlength)
903 maxvariantlength = max(len('format-variant'), maxvariantlength)
904
904
905 def makeformatname(name):
905 def makeformatname(name):
906 return '%s:' + (' ' * (maxvariantlength - len(name)))
906 return '%s:' + (' ' * (maxvariantlength - len(name)))
907
907
908 fm = ui.formatter('debugformat', opts)
908 fm = ui.formatter('debugformat', opts)
909 if fm.isplain():
909 if fm.isplain():
910 def formatvalue(value):
910 def formatvalue(value):
911 if util.safehasattr(value, 'startswith'):
911 if util.safehasattr(value, 'startswith'):
912 return value
912 return value
913 if value:
913 if value:
914 return 'yes'
914 return 'yes'
915 else:
915 else:
916 return 'no'
916 return 'no'
917 else:
917 else:
918 formatvalue = pycompat.identity
918 formatvalue = pycompat.identity
919
919
920 fm.plain('format-variant')
920 fm.plain('format-variant')
921 fm.plain(' ' * (maxvariantlength - len('format-variant')))
921 fm.plain(' ' * (maxvariantlength - len('format-variant')))
922 fm.plain(' repo')
922 fm.plain(' repo')
923 if ui.verbose:
923 if ui.verbose:
924 fm.plain(' config default')
924 fm.plain(' config default')
925 fm.plain('\n')
925 fm.plain('\n')
926 for fv in upgrade.allformatvariant:
926 for fv in upgrade.allformatvariant:
927 fm.startitem()
927 fm.startitem()
928 repovalue = fv.fromrepo(repo)
928 repovalue = fv.fromrepo(repo)
929 configvalue = fv.fromconfig(repo)
929 configvalue = fv.fromconfig(repo)
930
930
931 if repovalue != configvalue:
931 if repovalue != configvalue:
932 namelabel = 'formatvariant.name.mismatchconfig'
932 namelabel = 'formatvariant.name.mismatchconfig'
933 repolabel = 'formatvariant.repo.mismatchconfig'
933 repolabel = 'formatvariant.repo.mismatchconfig'
934 elif repovalue != fv.default:
934 elif repovalue != fv.default:
935 namelabel = 'formatvariant.name.mismatchdefault'
935 namelabel = 'formatvariant.name.mismatchdefault'
936 repolabel = 'formatvariant.repo.mismatchdefault'
936 repolabel = 'formatvariant.repo.mismatchdefault'
937 else:
937 else:
938 namelabel = 'formatvariant.name.uptodate'
938 namelabel = 'formatvariant.name.uptodate'
939 repolabel = 'formatvariant.repo.uptodate'
939 repolabel = 'formatvariant.repo.uptodate'
940
940
941 fm.write('name', makeformatname(fv.name), fv.name,
941 fm.write('name', makeformatname(fv.name), fv.name,
942 label=namelabel)
942 label=namelabel)
943 fm.write('repo', ' %3s', formatvalue(repovalue),
943 fm.write('repo', ' %3s', formatvalue(repovalue),
944 label=repolabel)
944 label=repolabel)
945 if fv.default != configvalue:
945 if fv.default != configvalue:
946 configlabel = 'formatvariant.config.special'
946 configlabel = 'formatvariant.config.special'
947 else:
947 else:
948 configlabel = 'formatvariant.config.default'
948 configlabel = 'formatvariant.config.default'
949 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
949 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
950 label=configlabel)
950 label=configlabel)
951 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
951 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
952 label='formatvariant.default')
952 label='formatvariant.default')
953 fm.plain('\n')
953 fm.plain('\n')
954 fm.end()
954 fm.end()
955
955
956 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
956 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
957 def debugfsinfo(ui, path="."):
957 def debugfsinfo(ui, path="."):
958 """show information detected about current filesystem"""
958 """show information detected about current filesystem"""
959 ui.write(('path: %s\n') % path)
959 ui.write(('path: %s\n') % path)
960 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
960 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
961 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
961 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
962 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
962 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
963 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
963 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
964 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
964 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
965 casesensitive = '(unknown)'
965 casesensitive = '(unknown)'
966 try:
966 try:
967 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
967 with tempfile.NamedTemporaryFile(prefix='.debugfsinfo', dir=path) as f:
968 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
968 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
969 except OSError:
969 except OSError:
970 pass
970 pass
971 ui.write(('case-sensitive: %s\n') % casesensitive)
971 ui.write(('case-sensitive: %s\n') % casesensitive)
972
972
973 @command('debuggetbundle',
973 @command('debuggetbundle',
974 [('H', 'head', [], _('id of head node'), _('ID')),
974 [('H', 'head', [], _('id of head node'), _('ID')),
975 ('C', 'common', [], _('id of common node'), _('ID')),
975 ('C', 'common', [], _('id of common node'), _('ID')),
976 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
976 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
977 _('REPO FILE [-H|-C ID]...'),
977 _('REPO FILE [-H|-C ID]...'),
978 norepo=True)
978 norepo=True)
979 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
979 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
980 """retrieves a bundle from a repo
980 """retrieves a bundle from a repo
981
981
982 Every ID must be a full-length hex node id string. Saves the bundle to the
982 Every ID must be a full-length hex node id string. Saves the bundle to the
983 given file.
983 given file.
984 """
984 """
985 opts = pycompat.byteskwargs(opts)
985 opts = pycompat.byteskwargs(opts)
986 repo = hg.peer(ui, opts, repopath)
986 repo = hg.peer(ui, opts, repopath)
987 if not repo.capable('getbundle'):
987 if not repo.capable('getbundle'):
988 raise error.Abort("getbundle() not supported by target repository")
988 raise error.Abort("getbundle() not supported by target repository")
989 args = {}
989 args = {}
990 if common:
990 if common:
991 args[r'common'] = [bin(s) for s in common]
991 args[r'common'] = [bin(s) for s in common]
992 if head:
992 if head:
993 args[r'heads'] = [bin(s) for s in head]
993 args[r'heads'] = [bin(s) for s in head]
994 # TODO: get desired bundlecaps from command line.
994 # TODO: get desired bundlecaps from command line.
995 args[r'bundlecaps'] = None
995 args[r'bundlecaps'] = None
996 bundle = repo.getbundle('debug', **args)
996 bundle = repo.getbundle('debug', **args)
997
997
998 bundletype = opts.get('type', 'bzip2').lower()
998 bundletype = opts.get('type', 'bzip2').lower()
999 btypes = {'none': 'HG10UN',
999 btypes = {'none': 'HG10UN',
1000 'bzip2': 'HG10BZ',
1000 'bzip2': 'HG10BZ',
1001 'gzip': 'HG10GZ',
1001 'gzip': 'HG10GZ',
1002 'bundle2': 'HG20'}
1002 'bundle2': 'HG20'}
1003 bundletype = btypes.get(bundletype)
1003 bundletype = btypes.get(bundletype)
1004 if bundletype not in bundle2.bundletypes:
1004 if bundletype not in bundle2.bundletypes:
1005 raise error.Abort(_('unknown bundle type specified with --type'))
1005 raise error.Abort(_('unknown bundle type specified with --type'))
1006 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1006 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1007
1007
1008 @command('debugignore', [], '[FILE]')
1008 @command('debugignore', [], '[FILE]')
1009 def debugignore(ui, repo, *files, **opts):
1009 def debugignore(ui, repo, *files, **opts):
1010 """display the combined ignore pattern and information about ignored files
1010 """display the combined ignore pattern and information about ignored files
1011
1011
1012 With no argument display the combined ignore pattern.
1012 With no argument display the combined ignore pattern.
1013
1013
1014 Given space separated file names, shows if the given file is ignored and
1014 Given space separated file names, shows if the given file is ignored and
1015 if so, show the ignore rule (file and line number) that matched it.
1015 if so, show the ignore rule (file and line number) that matched it.
1016 """
1016 """
1017 ignore = repo.dirstate._ignore
1017 ignore = repo.dirstate._ignore
1018 if not files:
1018 if not files:
1019 # Show all the patterns
1019 # Show all the patterns
1020 ui.write("%s\n" % pycompat.byterepr(ignore))
1020 ui.write("%s\n" % pycompat.byterepr(ignore))
1021 else:
1021 else:
1022 m = scmutil.match(repo[None], pats=files)
1022 m = scmutil.match(repo[None], pats=files)
1023 for f in m.files():
1023 for f in m.files():
1024 nf = util.normpath(f)
1024 nf = util.normpath(f)
1025 ignored = None
1025 ignored = None
1026 ignoredata = None
1026 ignoredata = None
1027 if nf != '.':
1027 if nf != '.':
1028 if ignore(nf):
1028 if ignore(nf):
1029 ignored = nf
1029 ignored = nf
1030 ignoredata = repo.dirstate._ignorefileandline(nf)
1030 ignoredata = repo.dirstate._ignorefileandline(nf)
1031 else:
1031 else:
1032 for p in util.finddirs(nf):
1032 for p in util.finddirs(nf):
1033 if ignore(p):
1033 if ignore(p):
1034 ignored = p
1034 ignored = p
1035 ignoredata = repo.dirstate._ignorefileandline(p)
1035 ignoredata = repo.dirstate._ignorefileandline(p)
1036 break
1036 break
1037 if ignored:
1037 if ignored:
1038 if ignored == nf:
1038 if ignored == nf:
1039 ui.write(_("%s is ignored\n") % m.uipath(f))
1039 ui.write(_("%s is ignored\n") % m.uipath(f))
1040 else:
1040 else:
1041 ui.write(_("%s is ignored because of "
1041 ui.write(_("%s is ignored because of "
1042 "containing folder %s\n")
1042 "containing folder %s\n")
1043 % (m.uipath(f), ignored))
1043 % (m.uipath(f), ignored))
1044 ignorefile, lineno, line = ignoredata
1044 ignorefile, lineno, line = ignoredata
1045 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1045 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1046 % (ignorefile, lineno, line))
1046 % (ignorefile, lineno, line))
1047 else:
1047 else:
1048 ui.write(_("%s is not ignored\n") % m.uipath(f))
1048 ui.write(_("%s is not ignored\n") % m.uipath(f))
1049
1049
1050 @command('debugindex', cmdutil.debugrevlogopts +
1050 @command('debugindex', cmdutil.debugrevlogopts +
1051 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1051 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
1052 _('[-f FORMAT] -c|-m|FILE'),
1052 _('[-f FORMAT] -c|-m|FILE'),
1053 optionalrepo=True)
1053 optionalrepo=True)
1054 def debugindex(ui, repo, file_=None, **opts):
1054 def debugindex(ui, repo, file_=None, **opts):
1055 """dump the contents of an index file"""
1055 """dump the contents of an index file"""
1056 opts = pycompat.byteskwargs(opts)
1056 opts = pycompat.byteskwargs(opts)
1057 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1057 r = cmdutil.openrevlog(repo, 'debugindex', file_, opts)
1058 format = opts.get('format', 0)
1058 format = opts.get('format', 0)
1059 if format not in (0, 1):
1059 if format not in (0, 1):
1060 raise error.Abort(_("unknown format %d") % format)
1060 raise error.Abort(_("unknown format %d") % format)
1061
1061
1062 if ui.debugflag:
1062 if ui.debugflag:
1063 shortfn = hex
1063 shortfn = hex
1064 else:
1064 else:
1065 shortfn = short
1065 shortfn = short
1066
1066
1067 # There might not be anything in r, so have a sane default
1067 # There might not be anything in r, so have a sane default
1068 idlen = 12
1068 idlen = 12
1069 for i in r:
1069 for i in r:
1070 idlen = len(shortfn(r.node(i)))
1070 idlen = len(shortfn(r.node(i)))
1071 break
1071 break
1072
1072
1073 if format == 0:
1073 if format == 0:
1074 if ui.verbose:
1074 if ui.verbose:
1075 ui.write((" rev offset length linkrev"
1075 ui.write((" rev offset length linkrev"
1076 " %s %s p2\n") % ("nodeid".ljust(idlen),
1076 " %s %s p2\n") % ("nodeid".ljust(idlen),
1077 "p1".ljust(idlen)))
1077 "p1".ljust(idlen)))
1078 else:
1078 else:
1079 ui.write((" rev linkrev %s %s p2\n") % (
1079 ui.write((" rev linkrev %s %s p2\n") % (
1080 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1080 "nodeid".ljust(idlen), "p1".ljust(idlen)))
1081 elif format == 1:
1081 elif format == 1:
1082 if ui.verbose:
1082 if ui.verbose:
1083 ui.write((" rev flag offset length size link p1"
1083 ui.write((" rev flag offset length size link p1"
1084 " p2 %s\n") % "nodeid".rjust(idlen))
1084 " p2 %s\n") % "nodeid".rjust(idlen))
1085 else:
1085 else:
1086 ui.write((" rev flag size link p1 p2 %s\n") %
1086 ui.write((" rev flag size link p1 p2 %s\n") %
1087 "nodeid".rjust(idlen))
1087 "nodeid".rjust(idlen))
1088
1088
1089 for i in r:
1089 for i in r:
1090 node = r.node(i)
1090 node = r.node(i)
1091 if format == 0:
1091 if format == 0:
1092 try:
1092 try:
1093 pp = r.parents(node)
1093 pp = r.parents(node)
1094 except Exception:
1094 except Exception:
1095 pp = [nullid, nullid]
1095 pp = [nullid, nullid]
1096 if ui.verbose:
1096 if ui.verbose:
1097 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1097 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
1098 i, r.start(i), r.length(i), r.linkrev(i),
1098 i, r.start(i), r.length(i), r.linkrev(i),
1099 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1099 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
1100 else:
1100 else:
1101 ui.write("% 6d % 7d %s %s %s\n" % (
1101 ui.write("% 6d % 7d %s %s %s\n" % (
1102 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1102 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
1103 shortfn(pp[1])))
1103 shortfn(pp[1])))
1104 elif format == 1:
1104 elif format == 1:
1105 pr = r.parentrevs(i)
1105 pr = r.parentrevs(i)
1106 if ui.verbose:
1106 if ui.verbose:
1107 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1107 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
1108 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1108 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
1109 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1109 r.linkrev(i), pr[0], pr[1], shortfn(node)))
1110 else:
1110 else:
1111 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1111 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
1112 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1112 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
1113 shortfn(node)))
1113 shortfn(node)))
1114
1114
1115 @command('debugindexdot', cmdutil.debugrevlogopts,
1115 @command('debugindexdot', cmdutil.debugrevlogopts,
1116 _('-c|-m|FILE'), optionalrepo=True)
1116 _('-c|-m|FILE'), optionalrepo=True)
1117 def debugindexdot(ui, repo, file_=None, **opts):
1117 def debugindexdot(ui, repo, file_=None, **opts):
1118 """dump an index DAG as a graphviz dot file"""
1118 """dump an index DAG as a graphviz dot file"""
1119 opts = pycompat.byteskwargs(opts)
1119 opts = pycompat.byteskwargs(opts)
1120 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1120 r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts)
1121 ui.write(("digraph G {\n"))
1121 ui.write(("digraph G {\n"))
1122 for i in r:
1122 for i in r:
1123 node = r.node(i)
1123 node = r.node(i)
1124 pp = r.parents(node)
1124 pp = r.parents(node)
1125 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1125 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1126 if pp[1] != nullid:
1126 if pp[1] != nullid:
1127 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1127 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1128 ui.write("}\n")
1128 ui.write("}\n")
1129
1129
1130 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1130 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1131 def debuginstall(ui, **opts):
1131 def debuginstall(ui, **opts):
1132 '''test Mercurial installation
1132 '''test Mercurial installation
1133
1133
1134 Returns 0 on success.
1134 Returns 0 on success.
1135 '''
1135 '''
1136 opts = pycompat.byteskwargs(opts)
1136 opts = pycompat.byteskwargs(opts)
1137
1137
1138 def writetemp(contents):
1138 def writetemp(contents):
1139 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1139 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1140 f = os.fdopen(fd, r"wb")
1140 f = os.fdopen(fd, r"wb")
1141 f.write(contents)
1141 f.write(contents)
1142 f.close()
1142 f.close()
1143 return name
1143 return name
1144
1144
1145 problems = 0
1145 problems = 0
1146
1146
1147 fm = ui.formatter('debuginstall', opts)
1147 fm = ui.formatter('debuginstall', opts)
1148 fm.startitem()
1148 fm.startitem()
1149
1149
1150 # encoding
1150 # encoding
1151 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1151 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1152 err = None
1152 err = None
1153 try:
1153 try:
1154 codecs.lookup(pycompat.sysstr(encoding.encoding))
1154 codecs.lookup(pycompat.sysstr(encoding.encoding))
1155 except LookupError as inst:
1155 except LookupError as inst:
1156 err = stringutil.forcebytestr(inst)
1156 err = stringutil.forcebytestr(inst)
1157 problems += 1
1157 problems += 1
1158 fm.condwrite(err, 'encodingerror', _(" %s\n"
1158 fm.condwrite(err, 'encodingerror', _(" %s\n"
1159 " (check that your locale is properly set)\n"), err)
1159 " (check that your locale is properly set)\n"), err)
1160
1160
1161 # Python
1161 # Python
1162 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1162 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1163 pycompat.sysexecutable)
1163 pycompat.sysexecutable)
1164 fm.write('pythonver', _("checking Python version (%s)\n"),
1164 fm.write('pythonver', _("checking Python version (%s)\n"),
1165 ("%d.%d.%d" % sys.version_info[:3]))
1165 ("%d.%d.%d" % sys.version_info[:3]))
1166 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1166 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1167 os.path.dirname(pycompat.fsencode(os.__file__)))
1167 os.path.dirname(pycompat.fsencode(os.__file__)))
1168
1168
1169 security = set(sslutil.supportedprotocols)
1169 security = set(sslutil.supportedprotocols)
1170 if sslutil.hassni:
1170 if sslutil.hassni:
1171 security.add('sni')
1171 security.add('sni')
1172
1172
1173 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1173 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1174 fm.formatlist(sorted(security), name='protocol',
1174 fm.formatlist(sorted(security), name='protocol',
1175 fmt='%s', sep=','))
1175 fmt='%s', sep=','))
1176
1176
1177 # These are warnings, not errors. So don't increment problem count. This
1177 # These are warnings, not errors. So don't increment problem count. This
1178 # may change in the future.
1178 # may change in the future.
1179 if 'tls1.2' not in security:
1179 if 'tls1.2' not in security:
1180 fm.plain(_(' TLS 1.2 not supported by Python install; '
1180 fm.plain(_(' TLS 1.2 not supported by Python install; '
1181 'network connections lack modern security\n'))
1181 'network connections lack modern security\n'))
1182 if 'sni' not in security:
1182 if 'sni' not in security:
1183 fm.plain(_(' SNI not supported by Python install; may have '
1183 fm.plain(_(' SNI not supported by Python install; may have '
1184 'connectivity issues with some servers\n'))
1184 'connectivity issues with some servers\n'))
1185
1185
1186 # TODO print CA cert info
1186 # TODO print CA cert info
1187
1187
1188 # hg version
1188 # hg version
1189 hgver = util.version()
1189 hgver = util.version()
1190 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1190 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1191 hgver.split('+')[0])
1191 hgver.split('+')[0])
1192 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1192 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1193 '+'.join(hgver.split('+')[1:]))
1193 '+'.join(hgver.split('+')[1:]))
1194
1194
1195 # compiled modules
1195 # compiled modules
1196 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1196 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1197 policy.policy)
1197 policy.policy)
1198 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1198 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1199 os.path.dirname(pycompat.fsencode(__file__)))
1199 os.path.dirname(pycompat.fsencode(__file__)))
1200
1200
1201 if policy.policy in ('c', 'allow'):
1201 if policy.policy in ('c', 'allow'):
1202 err = None
1202 err = None
1203 try:
1203 try:
1204 from .cext import (
1204 from .cext import (
1205 base85,
1205 base85,
1206 bdiff,
1206 bdiff,
1207 mpatch,
1207 mpatch,
1208 osutil,
1208 osutil,
1209 )
1209 )
1210 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1210 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1211 except Exception as inst:
1211 except Exception as inst:
1212 err = stringutil.forcebytestr(inst)
1212 err = stringutil.forcebytestr(inst)
1213 problems += 1
1213 problems += 1
1214 fm.condwrite(err, 'extensionserror', " %s\n", err)
1214 fm.condwrite(err, 'extensionserror', " %s\n", err)
1215
1215
1216 compengines = util.compengines._engines.values()
1216 compengines = util.compengines._engines.values()
1217 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1217 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1218 fm.formatlist(sorted(e.name() for e in compengines),
1218 fm.formatlist(sorted(e.name() for e in compengines),
1219 name='compengine', fmt='%s', sep=', '))
1219 name='compengine', fmt='%s', sep=', '))
1220 fm.write('compenginesavail', _('checking available compression engines '
1220 fm.write('compenginesavail', _('checking available compression engines '
1221 '(%s)\n'),
1221 '(%s)\n'),
1222 fm.formatlist(sorted(e.name() for e in compengines
1222 fm.formatlist(sorted(e.name() for e in compengines
1223 if e.available()),
1223 if e.available()),
1224 name='compengine', fmt='%s', sep=', '))
1224 name='compengine', fmt='%s', sep=', '))
1225 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1225 wirecompengines = util.compengines.supportedwireengines(util.SERVERROLE)
1226 fm.write('compenginesserver', _('checking available compression engines '
1226 fm.write('compenginesserver', _('checking available compression engines '
1227 'for wire protocol (%s)\n'),
1227 'for wire protocol (%s)\n'),
1228 fm.formatlist([e.name() for e in wirecompengines
1228 fm.formatlist([e.name() for e in wirecompengines
1229 if e.wireprotosupport()],
1229 if e.wireprotosupport()],
1230 name='compengine', fmt='%s', sep=', '))
1230 name='compengine', fmt='%s', sep=', '))
1231 re2 = 'missing'
1231 re2 = 'missing'
1232 if util._re2:
1232 if util._re2:
1233 re2 = 'available'
1233 re2 = 'available'
1234 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1234 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1235 fm.data(re2=bool(util._re2))
1235 fm.data(re2=bool(util._re2))
1236
1236
1237 # templates
1237 # templates
1238 p = templater.templatepaths()
1238 p = templater.templatepaths()
1239 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1239 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1240 fm.condwrite(not p, '', _(" no template directories found\n"))
1240 fm.condwrite(not p, '', _(" no template directories found\n"))
1241 if p:
1241 if p:
1242 m = templater.templatepath("map-cmdline.default")
1242 m = templater.templatepath("map-cmdline.default")
1243 if m:
1243 if m:
1244 # template found, check if it is working
1244 # template found, check if it is working
1245 err = None
1245 err = None
1246 try:
1246 try:
1247 templater.templater.frommapfile(m)
1247 templater.templater.frommapfile(m)
1248 except Exception as inst:
1248 except Exception as inst:
1249 err = stringutil.forcebytestr(inst)
1249 err = stringutil.forcebytestr(inst)
1250 p = None
1250 p = None
1251 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1251 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1252 else:
1252 else:
1253 p = None
1253 p = None
1254 fm.condwrite(p, 'defaulttemplate',
1254 fm.condwrite(p, 'defaulttemplate',
1255 _("checking default template (%s)\n"), m)
1255 _("checking default template (%s)\n"), m)
1256 fm.condwrite(not m, 'defaulttemplatenotfound',
1256 fm.condwrite(not m, 'defaulttemplatenotfound',
1257 _(" template '%s' not found\n"), "default")
1257 _(" template '%s' not found\n"), "default")
1258 if not p:
1258 if not p:
1259 problems += 1
1259 problems += 1
1260 fm.condwrite(not p, '',
1260 fm.condwrite(not p, '',
1261 _(" (templates seem to have been installed incorrectly)\n"))
1261 _(" (templates seem to have been installed incorrectly)\n"))
1262
1262
1263 # editor
1263 # editor
1264 editor = ui.geteditor()
1264 editor = ui.geteditor()
1265 editor = util.expandpath(editor)
1265 editor = util.expandpath(editor)
1266 editorbin = procutil.shellsplit(editor)[0]
1266 editorbin = procutil.shellsplit(editor)[0]
1267 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1267 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1268 cmdpath = procutil.findexe(editorbin)
1268 cmdpath = procutil.findexe(editorbin)
1269 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1269 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1270 _(" No commit editor set and can't find %s in PATH\n"
1270 _(" No commit editor set and can't find %s in PATH\n"
1271 " (specify a commit editor in your configuration"
1271 " (specify a commit editor in your configuration"
1272 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1272 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1273 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1273 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1274 _(" Can't find editor '%s' in PATH\n"
1274 _(" Can't find editor '%s' in PATH\n"
1275 " (specify a commit editor in your configuration"
1275 " (specify a commit editor in your configuration"
1276 " file)\n"), not cmdpath and editorbin)
1276 " file)\n"), not cmdpath and editorbin)
1277 if not cmdpath and editor != 'vi':
1277 if not cmdpath and editor != 'vi':
1278 problems += 1
1278 problems += 1
1279
1279
1280 # check username
1280 # check username
1281 username = None
1281 username = None
1282 err = None
1282 err = None
1283 try:
1283 try:
1284 username = ui.username()
1284 username = ui.username()
1285 except error.Abort as e:
1285 except error.Abort as e:
1286 err = stringutil.forcebytestr(e)
1286 err = stringutil.forcebytestr(e)
1287 problems += 1
1287 problems += 1
1288
1288
1289 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1289 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1290 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1290 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1291 " (specify a username in your configuration file)\n"), err)
1291 " (specify a username in your configuration file)\n"), err)
1292
1292
1293 fm.condwrite(not problems, '',
1293 fm.condwrite(not problems, '',
1294 _("no problems detected\n"))
1294 _("no problems detected\n"))
1295 if not problems:
1295 if not problems:
1296 fm.data(problems=problems)
1296 fm.data(problems=problems)
1297 fm.condwrite(problems, 'problems',
1297 fm.condwrite(problems, 'problems',
1298 _("%d problems detected,"
1298 _("%d problems detected,"
1299 " please check your install!\n"), problems)
1299 " please check your install!\n"), problems)
1300 fm.end()
1300 fm.end()
1301
1301
1302 return problems
1302 return problems
1303
1303
1304 @command('debugknown', [], _('REPO ID...'), norepo=True)
1304 @command('debugknown', [], _('REPO ID...'), norepo=True)
1305 def debugknown(ui, repopath, *ids, **opts):
1305 def debugknown(ui, repopath, *ids, **opts):
1306 """test whether node ids are known to a repo
1306 """test whether node ids are known to a repo
1307
1307
1308 Every ID must be a full-length hex node id string. Returns a list of 0s
1308 Every ID must be a full-length hex node id string. Returns a list of 0s
1309 and 1s indicating unknown/known.
1309 and 1s indicating unknown/known.
1310 """
1310 """
1311 opts = pycompat.byteskwargs(opts)
1311 opts = pycompat.byteskwargs(opts)
1312 repo = hg.peer(ui, opts, repopath)
1312 repo = hg.peer(ui, opts, repopath)
1313 if not repo.capable('known'):
1313 if not repo.capable('known'):
1314 raise error.Abort("known() not supported by target repository")
1314 raise error.Abort("known() not supported by target repository")
1315 flags = repo.known([bin(s) for s in ids])
1315 flags = repo.known([bin(s) for s in ids])
1316 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1316 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1317
1317
1318 @command('debuglabelcomplete', [], _('LABEL...'))
1318 @command('debuglabelcomplete', [], _('LABEL...'))
1319 def debuglabelcomplete(ui, repo, *args):
1319 def debuglabelcomplete(ui, repo, *args):
1320 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1320 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1321 debugnamecomplete(ui, repo, *args)
1321 debugnamecomplete(ui, repo, *args)
1322
1322
1323 @command('debuglocks',
1323 @command('debuglocks',
1324 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1324 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1325 ('W', 'force-wlock', None,
1325 ('W', 'force-wlock', None,
1326 _('free the working state lock (DANGEROUS)')),
1326 _('free the working state lock (DANGEROUS)')),
1327 ('s', 'set-lock', None, _('set the store lock until stopped')),
1327 ('s', 'set-lock', None, _('set the store lock until stopped')),
1328 ('S', 'set-wlock', None,
1328 ('S', 'set-wlock', None,
1329 _('set the working state lock until stopped'))],
1329 _('set the working state lock until stopped'))],
1330 _('[OPTION]...'))
1330 _('[OPTION]...'))
1331 def debuglocks(ui, repo, **opts):
1331 def debuglocks(ui, repo, **opts):
1332 """show or modify state of locks
1332 """show or modify state of locks
1333
1333
1334 By default, this command will show which locks are held. This
1334 By default, this command will show which locks are held. This
1335 includes the user and process holding the lock, the amount of time
1335 includes the user and process holding the lock, the amount of time
1336 the lock has been held, and the machine name where the process is
1336 the lock has been held, and the machine name where the process is
1337 running if it's not local.
1337 running if it's not local.
1338
1338
1339 Locks protect the integrity of Mercurial's data, so should be
1339 Locks protect the integrity of Mercurial's data, so should be
1340 treated with care. System crashes or other interruptions may cause
1340 treated with care. System crashes or other interruptions may cause
1341 locks to not be properly released, though Mercurial will usually
1341 locks to not be properly released, though Mercurial will usually
1342 detect and remove such stale locks automatically.
1342 detect and remove such stale locks automatically.
1343
1343
1344 However, detecting stale locks may not always be possible (for
1344 However, detecting stale locks may not always be possible (for
1345 instance, on a shared filesystem). Removing locks may also be
1345 instance, on a shared filesystem). Removing locks may also be
1346 blocked by filesystem permissions.
1346 blocked by filesystem permissions.
1347
1347
1348 Setting a lock will prevent other commands from changing the data.
1348 Setting a lock will prevent other commands from changing the data.
1349 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1349 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1350 The set locks are removed when the command exits.
1350 The set locks are removed when the command exits.
1351
1351
1352 Returns 0 if no locks are held.
1352 Returns 0 if no locks are held.
1353
1353
1354 """
1354 """
1355
1355
1356 if opts.get(r'force_lock'):
1356 if opts.get(r'force_lock'):
1357 repo.svfs.unlink('lock')
1357 repo.svfs.unlink('lock')
1358 if opts.get(r'force_wlock'):
1358 if opts.get(r'force_wlock'):
1359 repo.vfs.unlink('wlock')
1359 repo.vfs.unlink('wlock')
1360 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1360 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1361 return 0
1361 return 0
1362
1362
1363 locks = []
1363 locks = []
1364 try:
1364 try:
1365 if opts.get(r'set_wlock'):
1365 if opts.get(r'set_wlock'):
1366 try:
1366 try:
1367 locks.append(repo.wlock(False))
1367 locks.append(repo.wlock(False))
1368 except error.LockHeld:
1368 except error.LockHeld:
1369 raise error.Abort(_('wlock is already held'))
1369 raise error.Abort(_('wlock is already held'))
1370 if opts.get(r'set_lock'):
1370 if opts.get(r'set_lock'):
1371 try:
1371 try:
1372 locks.append(repo.lock(False))
1372 locks.append(repo.lock(False))
1373 except error.LockHeld:
1373 except error.LockHeld:
1374 raise error.Abort(_('lock is already held'))
1374 raise error.Abort(_('lock is already held'))
1375 if len(locks):
1375 if len(locks):
1376 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1376 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1377 return 0
1377 return 0
1378 finally:
1378 finally:
1379 release(*locks)
1379 release(*locks)
1380
1380
1381 now = time.time()
1381 now = time.time()
1382 held = 0
1382 held = 0
1383
1383
1384 def report(vfs, name, method):
1384 def report(vfs, name, method):
1385 # this causes stale locks to get reaped for more accurate reporting
1385 # this causes stale locks to get reaped for more accurate reporting
1386 try:
1386 try:
1387 l = method(False)
1387 l = method(False)
1388 except error.LockHeld:
1388 except error.LockHeld:
1389 l = None
1389 l = None
1390
1390
1391 if l:
1391 if l:
1392 l.release()
1392 l.release()
1393 else:
1393 else:
1394 try:
1394 try:
1395 st = vfs.lstat(name)
1395 st = vfs.lstat(name)
1396 age = now - st[stat.ST_MTIME]
1396 age = now - st[stat.ST_MTIME]
1397 user = util.username(st.st_uid)
1397 user = util.username(st.st_uid)
1398 locker = vfs.readlock(name)
1398 locker = vfs.readlock(name)
1399 if ":" in locker:
1399 if ":" in locker:
1400 host, pid = locker.split(':')
1400 host, pid = locker.split(':')
1401 if host == socket.gethostname():
1401 if host == socket.gethostname():
1402 locker = 'user %s, process %s' % (user, pid)
1402 locker = 'user %s, process %s' % (user, pid)
1403 else:
1403 else:
1404 locker = 'user %s, process %s, host %s' \
1404 locker = 'user %s, process %s, host %s' \
1405 % (user, pid, host)
1405 % (user, pid, host)
1406 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1406 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1407 return 1
1407 return 1
1408 except OSError as e:
1408 except OSError as e:
1409 if e.errno != errno.ENOENT:
1409 if e.errno != errno.ENOENT:
1410 raise
1410 raise
1411
1411
1412 ui.write(("%-6s free\n") % (name + ":"))
1412 ui.write(("%-6s free\n") % (name + ":"))
1413 return 0
1413 return 0
1414
1414
1415 held += report(repo.svfs, "lock", repo.lock)
1415 held += report(repo.svfs, "lock", repo.lock)
1416 held += report(repo.vfs, "wlock", repo.wlock)
1416 held += report(repo.vfs, "wlock", repo.wlock)
1417
1417
1418 return held
1418 return held
1419
1419
1420 @command('debugmergestate', [], '')
1420 @command('debugmergestate', [], '')
1421 def debugmergestate(ui, repo, *args):
1421 def debugmergestate(ui, repo, *args):
1422 """print merge state
1422 """print merge state
1423
1423
1424 Use --verbose to print out information about whether v1 or v2 merge state
1424 Use --verbose to print out information about whether v1 or v2 merge state
1425 was chosen."""
1425 was chosen."""
1426 def _hashornull(h):
1426 def _hashornull(h):
1427 if h == nullhex:
1427 if h == nullhex:
1428 return 'null'
1428 return 'null'
1429 else:
1429 else:
1430 return h
1430 return h
1431
1431
1432 def printrecords(version):
1432 def printrecords(version):
1433 ui.write(('* version %d records\n') % version)
1433 ui.write(('* version %d records\n') % version)
1434 if version == 1:
1434 if version == 1:
1435 records = v1records
1435 records = v1records
1436 else:
1436 else:
1437 records = v2records
1437 records = v2records
1438
1438
1439 for rtype, record in records:
1439 for rtype, record in records:
1440 # pretty print some record types
1440 # pretty print some record types
1441 if rtype == 'L':
1441 if rtype == 'L':
1442 ui.write(('local: %s\n') % record)
1442 ui.write(('local: %s\n') % record)
1443 elif rtype == 'O':
1443 elif rtype == 'O':
1444 ui.write(('other: %s\n') % record)
1444 ui.write(('other: %s\n') % record)
1445 elif rtype == 'm':
1445 elif rtype == 'm':
1446 driver, mdstate = record.split('\0', 1)
1446 driver, mdstate = record.split('\0', 1)
1447 ui.write(('merge driver: %s (state "%s")\n')
1447 ui.write(('merge driver: %s (state "%s")\n')
1448 % (driver, mdstate))
1448 % (driver, mdstate))
1449 elif rtype in 'FDC':
1449 elif rtype in 'FDC':
1450 r = record.split('\0')
1450 r = record.split('\0')
1451 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1451 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1452 if version == 1:
1452 if version == 1:
1453 onode = 'not stored in v1 format'
1453 onode = 'not stored in v1 format'
1454 flags = r[7]
1454 flags = r[7]
1455 else:
1455 else:
1456 onode, flags = r[7:9]
1456 onode, flags = r[7:9]
1457 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1457 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1458 % (f, rtype, state, _hashornull(hash)))
1458 % (f, rtype, state, _hashornull(hash)))
1459 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1459 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1460 ui.write((' ancestor path: %s (node %s)\n')
1460 ui.write((' ancestor path: %s (node %s)\n')
1461 % (afile, _hashornull(anode)))
1461 % (afile, _hashornull(anode)))
1462 ui.write((' other path: %s (node %s)\n')
1462 ui.write((' other path: %s (node %s)\n')
1463 % (ofile, _hashornull(onode)))
1463 % (ofile, _hashornull(onode)))
1464 elif rtype == 'f':
1464 elif rtype == 'f':
1465 filename, rawextras = record.split('\0', 1)
1465 filename, rawextras = record.split('\0', 1)
1466 extras = rawextras.split('\0')
1466 extras = rawextras.split('\0')
1467 i = 0
1467 i = 0
1468 extrastrings = []
1468 extrastrings = []
1469 while i < len(extras):
1469 while i < len(extras):
1470 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1470 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1471 i += 2
1471 i += 2
1472
1472
1473 ui.write(('file extras: %s (%s)\n')
1473 ui.write(('file extras: %s (%s)\n')
1474 % (filename, ', '.join(extrastrings)))
1474 % (filename, ', '.join(extrastrings)))
1475 elif rtype == 'l':
1475 elif rtype == 'l':
1476 labels = record.split('\0', 2)
1476 labels = record.split('\0', 2)
1477 labels = [l for l in labels if len(l) > 0]
1477 labels = [l for l in labels if len(l) > 0]
1478 ui.write(('labels:\n'))
1478 ui.write(('labels:\n'))
1479 ui.write((' local: %s\n' % labels[0]))
1479 ui.write((' local: %s\n' % labels[0]))
1480 ui.write((' other: %s\n' % labels[1]))
1480 ui.write((' other: %s\n' % labels[1]))
1481 if len(labels) > 2:
1481 if len(labels) > 2:
1482 ui.write((' base: %s\n' % labels[2]))
1482 ui.write((' base: %s\n' % labels[2]))
1483 else:
1483 else:
1484 ui.write(('unrecognized entry: %s\t%s\n')
1484 ui.write(('unrecognized entry: %s\t%s\n')
1485 % (rtype, record.replace('\0', '\t')))
1485 % (rtype, record.replace('\0', '\t')))
1486
1486
1487 # Avoid mergestate.read() since it may raise an exception for unsupported
1487 # Avoid mergestate.read() since it may raise an exception for unsupported
1488 # merge state records. We shouldn't be doing this, but this is OK since this
1488 # merge state records. We shouldn't be doing this, but this is OK since this
1489 # command is pretty low-level.
1489 # command is pretty low-level.
1490 ms = mergemod.mergestate(repo)
1490 ms = mergemod.mergestate(repo)
1491
1491
1492 # sort so that reasonable information is on top
1492 # sort so that reasonable information is on top
1493 v1records = ms._readrecordsv1()
1493 v1records = ms._readrecordsv1()
1494 v2records = ms._readrecordsv2()
1494 v2records = ms._readrecordsv2()
1495 order = 'LOml'
1495 order = 'LOml'
1496 def key(r):
1496 def key(r):
1497 idx = order.find(r[0])
1497 idx = order.find(r[0])
1498 if idx == -1:
1498 if idx == -1:
1499 return (1, r[1])
1499 return (1, r[1])
1500 else:
1500 else:
1501 return (0, idx)
1501 return (0, idx)
1502 v1records.sort(key=key)
1502 v1records.sort(key=key)
1503 v2records.sort(key=key)
1503 v2records.sort(key=key)
1504
1504
1505 if not v1records and not v2records:
1505 if not v1records and not v2records:
1506 ui.write(('no merge state found\n'))
1506 ui.write(('no merge state found\n'))
1507 elif not v2records:
1507 elif not v2records:
1508 ui.note(('no version 2 merge state\n'))
1508 ui.note(('no version 2 merge state\n'))
1509 printrecords(1)
1509 printrecords(1)
1510 elif ms._v1v2match(v1records, v2records):
1510 elif ms._v1v2match(v1records, v2records):
1511 ui.note(('v1 and v2 states match: using v2\n'))
1511 ui.note(('v1 and v2 states match: using v2\n'))
1512 printrecords(2)
1512 printrecords(2)
1513 else:
1513 else:
1514 ui.note(('v1 and v2 states mismatch: using v1\n'))
1514 ui.note(('v1 and v2 states mismatch: using v1\n'))
1515 printrecords(1)
1515 printrecords(1)
1516 if ui.verbose:
1516 if ui.verbose:
1517 printrecords(2)
1517 printrecords(2)
1518
1518
1519 @command('debugnamecomplete', [], _('NAME...'))
1519 @command('debugnamecomplete', [], _('NAME...'))
1520 def debugnamecomplete(ui, repo, *args):
1520 def debugnamecomplete(ui, repo, *args):
1521 '''complete "names" - tags, open branch names, bookmark names'''
1521 '''complete "names" - tags, open branch names, bookmark names'''
1522
1522
1523 names = set()
1523 names = set()
1524 # since we previously only listed open branches, we will handle that
1524 # since we previously only listed open branches, we will handle that
1525 # specially (after this for loop)
1525 # specially (after this for loop)
1526 for name, ns in repo.names.iteritems():
1526 for name, ns in repo.names.iteritems():
1527 if name != 'branches':
1527 if name != 'branches':
1528 names.update(ns.listnames(repo))
1528 names.update(ns.listnames(repo))
1529 names.update(tag for (tag, heads, tip, closed)
1529 names.update(tag for (tag, heads, tip, closed)
1530 in repo.branchmap().iterbranches() if not closed)
1530 in repo.branchmap().iterbranches() if not closed)
1531 completions = set()
1531 completions = set()
1532 if not args:
1532 if not args:
1533 args = ['']
1533 args = ['']
1534 for a in args:
1534 for a in args:
1535 completions.update(n for n in names if n.startswith(a))
1535 completions.update(n for n in names if n.startswith(a))
1536 ui.write('\n'.join(sorted(completions)))
1536 ui.write('\n'.join(sorted(completions)))
1537 ui.write('\n')
1537 ui.write('\n')
1538
1538
1539 @command('debugobsolete',
1539 @command('debugobsolete',
1540 [('', 'flags', 0, _('markers flag')),
1540 [('', 'flags', 0, _('markers flag')),
1541 ('', 'record-parents', False,
1541 ('', 'record-parents', False,
1542 _('record parent information for the precursor')),
1542 _('record parent information for the precursor')),
1543 ('r', 'rev', [], _('display markers relevant to REV')),
1543 ('r', 'rev', [], _('display markers relevant to REV')),
1544 ('', 'exclusive', False, _('restrict display to markers only '
1544 ('', 'exclusive', False, _('restrict display to markers only '
1545 'relevant to REV')),
1545 'relevant to REV')),
1546 ('', 'index', False, _('display index of the marker')),
1546 ('', 'index', False, _('display index of the marker')),
1547 ('', 'delete', [], _('delete markers specified by indices')),
1547 ('', 'delete', [], _('delete markers specified by indices')),
1548 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1548 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1549 _('[OBSOLETED [REPLACEMENT ...]]'))
1549 _('[OBSOLETED [REPLACEMENT ...]]'))
1550 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1550 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1551 """create arbitrary obsolete marker
1551 """create arbitrary obsolete marker
1552
1552
1553 With no arguments, displays the list of obsolescence markers."""
1553 With no arguments, displays the list of obsolescence markers."""
1554
1554
1555 opts = pycompat.byteskwargs(opts)
1555 opts = pycompat.byteskwargs(opts)
1556
1556
1557 def parsenodeid(s):
1557 def parsenodeid(s):
1558 try:
1558 try:
1559 # We do not use revsingle/revrange functions here to accept
1559 # We do not use revsingle/revrange functions here to accept
1560 # arbitrary node identifiers, possibly not present in the
1560 # arbitrary node identifiers, possibly not present in the
1561 # local repository.
1561 # local repository.
1562 n = bin(s)
1562 n = bin(s)
1563 if len(n) != len(nullid):
1563 if len(n) != len(nullid):
1564 raise TypeError()
1564 raise TypeError()
1565 return n
1565 return n
1566 except TypeError:
1566 except TypeError:
1567 raise error.Abort('changeset references must be full hexadecimal '
1567 raise error.Abort('changeset references must be full hexadecimal '
1568 'node identifiers')
1568 'node identifiers')
1569
1569
1570 if opts.get('delete'):
1570 if opts.get('delete'):
1571 indices = []
1571 indices = []
1572 for v in opts.get('delete'):
1572 for v in opts.get('delete'):
1573 try:
1573 try:
1574 indices.append(int(v))
1574 indices.append(int(v))
1575 except ValueError:
1575 except ValueError:
1576 raise error.Abort(_('invalid index value: %r') % v,
1576 raise error.Abort(_('invalid index value: %r') % v,
1577 hint=_('use integers for indices'))
1577 hint=_('use integers for indices'))
1578
1578
1579 if repo.currenttransaction():
1579 if repo.currenttransaction():
1580 raise error.Abort(_('cannot delete obsmarkers in the middle '
1580 raise error.Abort(_('cannot delete obsmarkers in the middle '
1581 'of transaction.'))
1581 'of transaction.'))
1582
1582
1583 with repo.lock():
1583 with repo.lock():
1584 n = repair.deleteobsmarkers(repo.obsstore, indices)
1584 n = repair.deleteobsmarkers(repo.obsstore, indices)
1585 ui.write(_('deleted %i obsolescence markers\n') % n)
1585 ui.write(_('deleted %i obsolescence markers\n') % n)
1586
1586
1587 return
1587 return
1588
1588
1589 if precursor is not None:
1589 if precursor is not None:
1590 if opts['rev']:
1590 if opts['rev']:
1591 raise error.Abort('cannot select revision when creating marker')
1591 raise error.Abort('cannot select revision when creating marker')
1592 metadata = {}
1592 metadata = {}
1593 metadata['user'] = opts['user'] or ui.username()
1593 metadata['user'] = opts['user'] or ui.username()
1594 succs = tuple(parsenodeid(succ) for succ in successors)
1594 succs = tuple(parsenodeid(succ) for succ in successors)
1595 l = repo.lock()
1595 l = repo.lock()
1596 try:
1596 try:
1597 tr = repo.transaction('debugobsolete')
1597 tr = repo.transaction('debugobsolete')
1598 try:
1598 try:
1599 date = opts.get('date')
1599 date = opts.get('date')
1600 if date:
1600 if date:
1601 date = dateutil.parsedate(date)
1601 date = dateutil.parsedate(date)
1602 else:
1602 else:
1603 date = None
1603 date = None
1604 prec = parsenodeid(precursor)
1604 prec = parsenodeid(precursor)
1605 parents = None
1605 parents = None
1606 if opts['record_parents']:
1606 if opts['record_parents']:
1607 if prec not in repo.unfiltered():
1607 if prec not in repo.unfiltered():
1608 raise error.Abort('cannot used --record-parents on '
1608 raise error.Abort('cannot used --record-parents on '
1609 'unknown changesets')
1609 'unknown changesets')
1610 parents = repo.unfiltered()[prec].parents()
1610 parents = repo.unfiltered()[prec].parents()
1611 parents = tuple(p.node() for p in parents)
1611 parents = tuple(p.node() for p in parents)
1612 repo.obsstore.create(tr, prec, succs, opts['flags'],
1612 repo.obsstore.create(tr, prec, succs, opts['flags'],
1613 parents=parents, date=date,
1613 parents=parents, date=date,
1614 metadata=metadata, ui=ui)
1614 metadata=metadata, ui=ui)
1615 tr.close()
1615 tr.close()
1616 except ValueError as exc:
1616 except ValueError as exc:
1617 raise error.Abort(_('bad obsmarker input: %s') %
1617 raise error.Abort(_('bad obsmarker input: %s') %
1618 pycompat.bytestr(exc))
1618 pycompat.bytestr(exc))
1619 finally:
1619 finally:
1620 tr.release()
1620 tr.release()
1621 finally:
1621 finally:
1622 l.release()
1622 l.release()
1623 else:
1623 else:
1624 if opts['rev']:
1624 if opts['rev']:
1625 revs = scmutil.revrange(repo, opts['rev'])
1625 revs = scmutil.revrange(repo, opts['rev'])
1626 nodes = [repo[r].node() for r in revs]
1626 nodes = [repo[r].node() for r in revs]
1627 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1627 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1628 exclusive=opts['exclusive']))
1628 exclusive=opts['exclusive']))
1629 markers.sort(key=lambda x: x._data)
1629 markers.sort(key=lambda x: x._data)
1630 else:
1630 else:
1631 markers = obsutil.getmarkers(repo)
1631 markers = obsutil.getmarkers(repo)
1632
1632
1633 markerstoiter = markers
1633 markerstoiter = markers
1634 isrelevant = lambda m: True
1634 isrelevant = lambda m: True
1635 if opts.get('rev') and opts.get('index'):
1635 if opts.get('rev') and opts.get('index'):
1636 markerstoiter = obsutil.getmarkers(repo)
1636 markerstoiter = obsutil.getmarkers(repo)
1637 markerset = set(markers)
1637 markerset = set(markers)
1638 isrelevant = lambda m: m in markerset
1638 isrelevant = lambda m: m in markerset
1639
1639
1640 fm = ui.formatter('debugobsolete', opts)
1640 fm = ui.formatter('debugobsolete', opts)
1641 for i, m in enumerate(markerstoiter):
1641 for i, m in enumerate(markerstoiter):
1642 if not isrelevant(m):
1642 if not isrelevant(m):
1643 # marker can be irrelevant when we're iterating over a set
1643 # marker can be irrelevant when we're iterating over a set
1644 # of markers (markerstoiter) which is bigger than the set
1644 # of markers (markerstoiter) which is bigger than the set
1645 # of markers we want to display (markers)
1645 # of markers we want to display (markers)
1646 # this can happen if both --index and --rev options are
1646 # this can happen if both --index and --rev options are
1647 # provided and thus we need to iterate over all of the markers
1647 # provided and thus we need to iterate over all of the markers
1648 # to get the correct indices, but only display the ones that
1648 # to get the correct indices, but only display the ones that
1649 # are relevant to --rev value
1649 # are relevant to --rev value
1650 continue
1650 continue
1651 fm.startitem()
1651 fm.startitem()
1652 ind = i if opts.get('index') else None
1652 ind = i if opts.get('index') else None
1653 cmdutil.showmarker(fm, m, index=ind)
1653 cmdutil.showmarker(fm, m, index=ind)
1654 fm.end()
1654 fm.end()
1655
1655
1656 @command('debugpathcomplete',
1656 @command('debugpathcomplete',
1657 [('f', 'full', None, _('complete an entire path')),
1657 [('f', 'full', None, _('complete an entire path')),
1658 ('n', 'normal', None, _('show only normal files')),
1658 ('n', 'normal', None, _('show only normal files')),
1659 ('a', 'added', None, _('show only added files')),
1659 ('a', 'added', None, _('show only added files')),
1660 ('r', 'removed', None, _('show only removed files'))],
1660 ('r', 'removed', None, _('show only removed files'))],
1661 _('FILESPEC...'))
1661 _('FILESPEC...'))
1662 def debugpathcomplete(ui, repo, *specs, **opts):
1662 def debugpathcomplete(ui, repo, *specs, **opts):
1663 '''complete part or all of a tracked path
1663 '''complete part or all of a tracked path
1664
1664
1665 This command supports shells that offer path name completion. It
1665 This command supports shells that offer path name completion. It
1666 currently completes only files already known to the dirstate.
1666 currently completes only files already known to the dirstate.
1667
1667
1668 Completion extends only to the next path segment unless
1668 Completion extends only to the next path segment unless
1669 --full is specified, in which case entire paths are used.'''
1669 --full is specified, in which case entire paths are used.'''
1670
1670
1671 def complete(path, acceptable):
1671 def complete(path, acceptable):
1672 dirstate = repo.dirstate
1672 dirstate = repo.dirstate
1673 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1673 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
1674 rootdir = repo.root + pycompat.ossep
1674 rootdir = repo.root + pycompat.ossep
1675 if spec != repo.root and not spec.startswith(rootdir):
1675 if spec != repo.root and not spec.startswith(rootdir):
1676 return [], []
1676 return [], []
1677 if os.path.isdir(spec):
1677 if os.path.isdir(spec):
1678 spec += '/'
1678 spec += '/'
1679 spec = spec[len(rootdir):]
1679 spec = spec[len(rootdir):]
1680 fixpaths = pycompat.ossep != '/'
1680 fixpaths = pycompat.ossep != '/'
1681 if fixpaths:
1681 if fixpaths:
1682 spec = spec.replace(pycompat.ossep, '/')
1682 spec = spec.replace(pycompat.ossep, '/')
1683 speclen = len(spec)
1683 speclen = len(spec)
1684 fullpaths = opts[r'full']
1684 fullpaths = opts[r'full']
1685 files, dirs = set(), set()
1685 files, dirs = set(), set()
1686 adddir, addfile = dirs.add, files.add
1686 adddir, addfile = dirs.add, files.add
1687 for f, st in dirstate.iteritems():
1687 for f, st in dirstate.iteritems():
1688 if f.startswith(spec) and st[0] in acceptable:
1688 if f.startswith(spec) and st[0] in acceptable:
1689 if fixpaths:
1689 if fixpaths:
1690 f = f.replace('/', pycompat.ossep)
1690 f = f.replace('/', pycompat.ossep)
1691 if fullpaths:
1691 if fullpaths:
1692 addfile(f)
1692 addfile(f)
1693 continue
1693 continue
1694 s = f.find(pycompat.ossep, speclen)
1694 s = f.find(pycompat.ossep, speclen)
1695 if s >= 0:
1695 if s >= 0:
1696 adddir(f[:s])
1696 adddir(f[:s])
1697 else:
1697 else:
1698 addfile(f)
1698 addfile(f)
1699 return files, dirs
1699 return files, dirs
1700
1700
1701 acceptable = ''
1701 acceptable = ''
1702 if opts[r'normal']:
1702 if opts[r'normal']:
1703 acceptable += 'nm'
1703 acceptable += 'nm'
1704 if opts[r'added']:
1704 if opts[r'added']:
1705 acceptable += 'a'
1705 acceptable += 'a'
1706 if opts[r'removed']:
1706 if opts[r'removed']:
1707 acceptable += 'r'
1707 acceptable += 'r'
1708 cwd = repo.getcwd()
1708 cwd = repo.getcwd()
1709 if not specs:
1709 if not specs:
1710 specs = ['.']
1710 specs = ['.']
1711
1711
1712 files, dirs = set(), set()
1712 files, dirs = set(), set()
1713 for spec in specs:
1713 for spec in specs:
1714 f, d = complete(spec, acceptable or 'nmar')
1714 f, d = complete(spec, acceptable or 'nmar')
1715 files.update(f)
1715 files.update(f)
1716 dirs.update(d)
1716 dirs.update(d)
1717 files.update(dirs)
1717 files.update(dirs)
1718 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1718 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1719 ui.write('\n')
1719 ui.write('\n')
1720
1720
1721 @command('debugpeer', [], _('PATH'), norepo=True)
1721 @command('debugpeer', [], _('PATH'), norepo=True)
1722 def debugpeer(ui, path):
1722 def debugpeer(ui, path):
1723 """establish a connection to a peer repository"""
1723 """establish a connection to a peer repository"""
1724 # Always enable peer request logging. Requires --debug to display
1724 # Always enable peer request logging. Requires --debug to display
1725 # though.
1725 # though.
1726 overrides = {
1726 overrides = {
1727 ('devel', 'debug.peer-request'): True,
1727 ('devel', 'debug.peer-request'): True,
1728 }
1728 }
1729
1729
1730 with ui.configoverride(overrides):
1730 with ui.configoverride(overrides):
1731 peer = hg.peer(ui, {}, path)
1731 peer = hg.peer(ui, {}, path)
1732
1732
1733 local = peer.local() is not None
1733 local = peer.local() is not None
1734 canpush = peer.canpush()
1734 canpush = peer.canpush()
1735
1735
1736 ui.write(_('url: %s\n') % peer.url())
1736 ui.write(_('url: %s\n') % peer.url())
1737 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1737 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1738 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1738 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1739
1739
1740 @command('debugpickmergetool',
1740 @command('debugpickmergetool',
1741 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1741 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1742 ('', 'changedelete', None, _('emulate merging change and delete')),
1742 ('', 'changedelete', None, _('emulate merging change and delete')),
1743 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1743 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1744 _('[PATTERN]...'),
1744 _('[PATTERN]...'),
1745 inferrepo=True)
1745 inferrepo=True)
1746 def debugpickmergetool(ui, repo, *pats, **opts):
1746 def debugpickmergetool(ui, repo, *pats, **opts):
1747 """examine which merge tool is chosen for specified file
1747 """examine which merge tool is chosen for specified file
1748
1748
1749 As described in :hg:`help merge-tools`, Mercurial examines
1749 As described in :hg:`help merge-tools`, Mercurial examines
1750 configurations below in this order to decide which merge tool is
1750 configurations below in this order to decide which merge tool is
1751 chosen for specified file.
1751 chosen for specified file.
1752
1752
1753 1. ``--tool`` option
1753 1. ``--tool`` option
1754 2. ``HGMERGE`` environment variable
1754 2. ``HGMERGE`` environment variable
1755 3. configurations in ``merge-patterns`` section
1755 3. configurations in ``merge-patterns`` section
1756 4. configuration of ``ui.merge``
1756 4. configuration of ``ui.merge``
1757 5. configurations in ``merge-tools`` section
1757 5. configurations in ``merge-tools`` section
1758 6. ``hgmerge`` tool (for historical reason only)
1758 6. ``hgmerge`` tool (for historical reason only)
1759 7. default tool for fallback (``:merge`` or ``:prompt``)
1759 7. default tool for fallback (``:merge`` or ``:prompt``)
1760
1760
1761 This command writes out examination result in the style below::
1761 This command writes out examination result in the style below::
1762
1762
1763 FILE = MERGETOOL
1763 FILE = MERGETOOL
1764
1764
1765 By default, all files known in the first parent context of the
1765 By default, all files known in the first parent context of the
1766 working directory are examined. Use file patterns and/or -I/-X
1766 working directory are examined. Use file patterns and/or -I/-X
1767 options to limit target files. -r/--rev is also useful to examine
1767 options to limit target files. -r/--rev is also useful to examine
1768 files in another context without actual updating to it.
1768 files in another context without actual updating to it.
1769
1769
1770 With --debug, this command shows warning messages while matching
1770 With --debug, this command shows warning messages while matching
1771 against ``merge-patterns`` and so on, too. It is recommended to
1771 against ``merge-patterns`` and so on, too. It is recommended to
1772 use this option with explicit file patterns and/or -I/-X options,
1772 use this option with explicit file patterns and/or -I/-X options,
1773 because this option increases amount of output per file according
1773 because this option increases amount of output per file according
1774 to configurations in hgrc.
1774 to configurations in hgrc.
1775
1775
1776 With -v/--verbose, this command shows configurations below at
1776 With -v/--verbose, this command shows configurations below at
1777 first (only if specified).
1777 first (only if specified).
1778
1778
1779 - ``--tool`` option
1779 - ``--tool`` option
1780 - ``HGMERGE`` environment variable
1780 - ``HGMERGE`` environment variable
1781 - configuration of ``ui.merge``
1781 - configuration of ``ui.merge``
1782
1782
1783 If merge tool is chosen before matching against
1783 If merge tool is chosen before matching against
1784 ``merge-patterns``, this command can't show any helpful
1784 ``merge-patterns``, this command can't show any helpful
1785 information, even with --debug. In such case, information above is
1785 information, even with --debug. In such case, information above is
1786 useful to know why a merge tool is chosen.
1786 useful to know why a merge tool is chosen.
1787 """
1787 """
1788 opts = pycompat.byteskwargs(opts)
1788 opts = pycompat.byteskwargs(opts)
1789 overrides = {}
1789 overrides = {}
1790 if opts['tool']:
1790 if opts['tool']:
1791 overrides[('ui', 'forcemerge')] = opts['tool']
1791 overrides[('ui', 'forcemerge')] = opts['tool']
1792 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1792 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1793
1793
1794 with ui.configoverride(overrides, 'debugmergepatterns'):
1794 with ui.configoverride(overrides, 'debugmergepatterns'):
1795 hgmerge = encoding.environ.get("HGMERGE")
1795 hgmerge = encoding.environ.get("HGMERGE")
1796 if hgmerge is not None:
1796 if hgmerge is not None:
1797 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1797 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1798 uimerge = ui.config("ui", "merge")
1798 uimerge = ui.config("ui", "merge")
1799 if uimerge:
1799 if uimerge:
1800 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1800 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1801
1801
1802 ctx = scmutil.revsingle(repo, opts.get('rev'))
1802 ctx = scmutil.revsingle(repo, opts.get('rev'))
1803 m = scmutil.match(ctx, pats, opts)
1803 m = scmutil.match(ctx, pats, opts)
1804 changedelete = opts['changedelete']
1804 changedelete = opts['changedelete']
1805 for path in ctx.walk(m):
1805 for path in ctx.walk(m):
1806 fctx = ctx[path]
1806 fctx = ctx[path]
1807 try:
1807 try:
1808 if not ui.debugflag:
1808 if not ui.debugflag:
1809 ui.pushbuffer(error=True)
1809 ui.pushbuffer(error=True)
1810 tool, toolpath = filemerge._picktool(repo, ui, path,
1810 tool, toolpath = filemerge._picktool(repo, ui, path,
1811 fctx.isbinary(),
1811 fctx.isbinary(),
1812 'l' in fctx.flags(),
1812 'l' in fctx.flags(),
1813 changedelete)
1813 changedelete)
1814 finally:
1814 finally:
1815 if not ui.debugflag:
1815 if not ui.debugflag:
1816 ui.popbuffer()
1816 ui.popbuffer()
1817 ui.write(('%s = %s\n') % (path, tool))
1817 ui.write(('%s = %s\n') % (path, tool))
1818
1818
1819 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1819 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
1820 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1820 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
1821 '''access the pushkey key/value protocol
1821 '''access the pushkey key/value protocol
1822
1822
1823 With two args, list the keys in the given namespace.
1823 With two args, list the keys in the given namespace.
1824
1824
1825 With five args, set a key to new if it currently is set to old.
1825 With five args, set a key to new if it currently is set to old.
1826 Reports success or failure.
1826 Reports success or failure.
1827 '''
1827 '''
1828
1828
1829 target = hg.peer(ui, {}, repopath)
1829 target = hg.peer(ui, {}, repopath)
1830 if keyinfo:
1830 if keyinfo:
1831 key, old, new = keyinfo
1831 key, old, new = keyinfo
1832 r = target.pushkey(namespace, key, old, new)
1832 r = target.pushkey(namespace, key, old, new)
1833 ui.status(pycompat.bytestr(r) + '\n')
1833 ui.status(pycompat.bytestr(r) + '\n')
1834 return not r
1834 return not r
1835 else:
1835 else:
1836 for k, v in sorted(target.listkeys(namespace).iteritems()):
1836 for k, v in sorted(target.listkeys(namespace).iteritems()):
1837 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1837 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
1838 stringutil.escapestr(v)))
1838 stringutil.escapestr(v)))
1839
1839
1840 @command('debugpvec', [], _('A B'))
1840 @command('debugpvec', [], _('A B'))
1841 def debugpvec(ui, repo, a, b=None):
1841 def debugpvec(ui, repo, a, b=None):
1842 ca = scmutil.revsingle(repo, a)
1842 ca = scmutil.revsingle(repo, a)
1843 cb = scmutil.revsingle(repo, b)
1843 cb = scmutil.revsingle(repo, b)
1844 pa = pvec.ctxpvec(ca)
1844 pa = pvec.ctxpvec(ca)
1845 pb = pvec.ctxpvec(cb)
1845 pb = pvec.ctxpvec(cb)
1846 if pa == pb:
1846 if pa == pb:
1847 rel = "="
1847 rel = "="
1848 elif pa > pb:
1848 elif pa > pb:
1849 rel = ">"
1849 rel = ">"
1850 elif pa < pb:
1850 elif pa < pb:
1851 rel = "<"
1851 rel = "<"
1852 elif pa | pb:
1852 elif pa | pb:
1853 rel = "|"
1853 rel = "|"
1854 ui.write(_("a: %s\n") % pa)
1854 ui.write(_("a: %s\n") % pa)
1855 ui.write(_("b: %s\n") % pb)
1855 ui.write(_("b: %s\n") % pb)
1856 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1856 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
1857 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1857 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
1858 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1858 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
1859 pa.distance(pb), rel))
1859 pa.distance(pb), rel))
1860
1860
1861 @command('debugrebuilddirstate|debugrebuildstate',
1861 @command('debugrebuilddirstate|debugrebuildstate',
1862 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1862 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
1863 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1863 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
1864 'the working copy parent')),
1864 'the working copy parent')),
1865 ],
1865 ],
1866 _('[-r REV]'))
1866 _('[-r REV]'))
1867 def debugrebuilddirstate(ui, repo, rev, **opts):
1867 def debugrebuilddirstate(ui, repo, rev, **opts):
1868 """rebuild the dirstate as it would look like for the given revision
1868 """rebuild the dirstate as it would look like for the given revision
1869
1869
1870 If no revision is specified the first current parent will be used.
1870 If no revision is specified the first current parent will be used.
1871
1871
1872 The dirstate will be set to the files of the given revision.
1872 The dirstate will be set to the files of the given revision.
1873 The actual working directory content or existing dirstate
1873 The actual working directory content or existing dirstate
1874 information such as adds or removes is not considered.
1874 information such as adds or removes is not considered.
1875
1875
1876 ``minimal`` will only rebuild the dirstate status for files that claim to be
1876 ``minimal`` will only rebuild the dirstate status for files that claim to be
1877 tracked but are not in the parent manifest, or that exist in the parent
1877 tracked but are not in the parent manifest, or that exist in the parent
1878 manifest but are not in the dirstate. It will not change adds, removes, or
1878 manifest but are not in the dirstate. It will not change adds, removes, or
1879 modified files that are in the working copy parent.
1879 modified files that are in the working copy parent.
1880
1880
1881 One use of this command is to make the next :hg:`status` invocation
1881 One use of this command is to make the next :hg:`status` invocation
1882 check the actual file content.
1882 check the actual file content.
1883 """
1883 """
1884 ctx = scmutil.revsingle(repo, rev)
1884 ctx = scmutil.revsingle(repo, rev)
1885 with repo.wlock():
1885 with repo.wlock():
1886 dirstate = repo.dirstate
1886 dirstate = repo.dirstate
1887 changedfiles = None
1887 changedfiles = None
1888 # See command doc for what minimal does.
1888 # See command doc for what minimal does.
1889 if opts.get(r'minimal'):
1889 if opts.get(r'minimal'):
1890 manifestfiles = set(ctx.manifest().keys())
1890 manifestfiles = set(ctx.manifest().keys())
1891 dirstatefiles = set(dirstate)
1891 dirstatefiles = set(dirstate)
1892 manifestonly = manifestfiles - dirstatefiles
1892 manifestonly = manifestfiles - dirstatefiles
1893 dsonly = dirstatefiles - manifestfiles
1893 dsonly = dirstatefiles - manifestfiles
1894 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1894 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
1895 changedfiles = manifestonly | dsnotadded
1895 changedfiles = manifestonly | dsnotadded
1896
1896
1897 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1897 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
1898
1898
1899 @command('debugrebuildfncache', [], '')
1899 @command('debugrebuildfncache', [], '')
1900 def debugrebuildfncache(ui, repo):
1900 def debugrebuildfncache(ui, repo):
1901 """rebuild the fncache file"""
1901 """rebuild the fncache file"""
1902 repair.rebuildfncache(ui, repo)
1902 repair.rebuildfncache(ui, repo)
1903
1903
1904 @command('debugrename',
1904 @command('debugrename',
1905 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1905 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1906 _('[-r REV] FILE'))
1906 _('[-r REV] FILE'))
1907 def debugrename(ui, repo, file1, *pats, **opts):
1907 def debugrename(ui, repo, file1, *pats, **opts):
1908 """dump rename information"""
1908 """dump rename information"""
1909
1909
1910 opts = pycompat.byteskwargs(opts)
1910 opts = pycompat.byteskwargs(opts)
1911 ctx = scmutil.revsingle(repo, opts.get('rev'))
1911 ctx = scmutil.revsingle(repo, opts.get('rev'))
1912 m = scmutil.match(ctx, (file1,) + pats, opts)
1912 m = scmutil.match(ctx, (file1,) + pats, opts)
1913 for abs in ctx.walk(m):
1913 for abs in ctx.walk(m):
1914 fctx = ctx[abs]
1914 fctx = ctx[abs]
1915 o = fctx.filelog().renamed(fctx.filenode())
1915 o = fctx.filelog().renamed(fctx.filenode())
1916 rel = m.rel(abs)
1916 rel = m.rel(abs)
1917 if o:
1917 if o:
1918 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1918 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1919 else:
1919 else:
1920 ui.write(_("%s not renamed\n") % rel)
1920 ui.write(_("%s not renamed\n") % rel)
1921
1921
1922 @command('debugrevlog', cmdutil.debugrevlogopts +
1922 @command('debugrevlog', cmdutil.debugrevlogopts +
1923 [('d', 'dump', False, _('dump index data'))],
1923 [('d', 'dump', False, _('dump index data'))],
1924 _('-c|-m|FILE'),
1924 _('-c|-m|FILE'),
1925 optionalrepo=True)
1925 optionalrepo=True)
1926 def debugrevlog(ui, repo, file_=None, **opts):
1926 def debugrevlog(ui, repo, file_=None, **opts):
1927 """show data and statistics about a revlog"""
1927 """show data and statistics about a revlog"""
1928 opts = pycompat.byteskwargs(opts)
1928 opts = pycompat.byteskwargs(opts)
1929 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1929 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
1930
1930
1931 if opts.get("dump"):
1931 if opts.get("dump"):
1932 numrevs = len(r)
1932 numrevs = len(r)
1933 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1933 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
1934 " rawsize totalsize compression heads chainlen\n"))
1934 " rawsize totalsize compression heads chainlen\n"))
1935 ts = 0
1935 ts = 0
1936 heads = set()
1936 heads = set()
1937
1937
1938 for rev in xrange(numrevs):
1938 for rev in xrange(numrevs):
1939 dbase = r.deltaparent(rev)
1939 dbase = r.deltaparent(rev)
1940 if dbase == -1:
1940 if dbase == -1:
1941 dbase = rev
1941 dbase = rev
1942 cbase = r.chainbase(rev)
1942 cbase = r.chainbase(rev)
1943 clen = r.chainlen(rev)
1943 clen = r.chainlen(rev)
1944 p1, p2 = r.parentrevs(rev)
1944 p1, p2 = r.parentrevs(rev)
1945 rs = r.rawsize(rev)
1945 rs = r.rawsize(rev)
1946 ts = ts + rs
1946 ts = ts + rs
1947 heads -= set(r.parentrevs(rev))
1947 heads -= set(r.parentrevs(rev))
1948 heads.add(rev)
1948 heads.add(rev)
1949 try:
1949 try:
1950 compression = ts / r.end(rev)
1950 compression = ts / r.end(rev)
1951 except ZeroDivisionError:
1951 except ZeroDivisionError:
1952 compression = 0
1952 compression = 0
1953 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1953 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
1954 "%11d %5d %8d\n" %
1954 "%11d %5d %8d\n" %
1955 (rev, p1, p2, r.start(rev), r.end(rev),
1955 (rev, p1, p2, r.start(rev), r.end(rev),
1956 r.start(dbase), r.start(cbase),
1956 r.start(dbase), r.start(cbase),
1957 r.start(p1), r.start(p2),
1957 r.start(p1), r.start(p2),
1958 rs, ts, compression, len(heads), clen))
1958 rs, ts, compression, len(heads), clen))
1959 return 0
1959 return 0
1960
1960
1961 v = r.version
1961 v = r.version
1962 format = v & 0xFFFF
1962 format = v & 0xFFFF
1963 flags = []
1963 flags = []
1964 gdelta = False
1964 gdelta = False
1965 if v & revlog.FLAG_INLINE_DATA:
1965 if v & revlog.FLAG_INLINE_DATA:
1966 flags.append('inline')
1966 flags.append('inline')
1967 if v & revlog.FLAG_GENERALDELTA:
1967 if v & revlog.FLAG_GENERALDELTA:
1968 gdelta = True
1968 gdelta = True
1969 flags.append('generaldelta')
1969 flags.append('generaldelta')
1970 if not flags:
1970 if not flags:
1971 flags = ['(none)']
1971 flags = ['(none)']
1972
1972
1973 nummerges = 0
1973 nummerges = 0
1974 numfull = 0
1974 numfull = 0
1975 numprev = 0
1975 numprev = 0
1976 nump1 = 0
1976 nump1 = 0
1977 nump2 = 0
1977 nump2 = 0
1978 numother = 0
1978 numother = 0
1979 nump1prev = 0
1979 nump1prev = 0
1980 nump2prev = 0
1980 nump2prev = 0
1981 chainlengths = []
1981 chainlengths = []
1982 chainbases = []
1982 chainbases = []
1983 chainspans = []
1983 chainspans = []
1984
1984
1985 datasize = [None, 0, 0]
1985 datasize = [None, 0, 0]
1986 fullsize = [None, 0, 0]
1986 fullsize = [None, 0, 0]
1987 deltasize = [None, 0, 0]
1987 deltasize = [None, 0, 0]
1988 chunktypecounts = {}
1988 chunktypecounts = {}
1989 chunktypesizes = {}
1989 chunktypesizes = {}
1990
1990
1991 def addsize(size, l):
1991 def addsize(size, l):
1992 if l[0] is None or size < l[0]:
1992 if l[0] is None or size < l[0]:
1993 l[0] = size
1993 l[0] = size
1994 if size > l[1]:
1994 if size > l[1]:
1995 l[1] = size
1995 l[1] = size
1996 l[2] += size
1996 l[2] += size
1997
1997
1998 numrevs = len(r)
1998 numrevs = len(r)
1999 for rev in xrange(numrevs):
1999 for rev in xrange(numrevs):
2000 p1, p2 = r.parentrevs(rev)
2000 p1, p2 = r.parentrevs(rev)
2001 delta = r.deltaparent(rev)
2001 delta = r.deltaparent(rev)
2002 if format > 0:
2002 if format > 0:
2003 addsize(r.rawsize(rev), datasize)
2003 addsize(r.rawsize(rev), datasize)
2004 if p2 != nullrev:
2004 if p2 != nullrev:
2005 nummerges += 1
2005 nummerges += 1
2006 size = r.length(rev)
2006 size = r.length(rev)
2007 if delta == nullrev:
2007 if delta == nullrev:
2008 chainlengths.append(0)
2008 chainlengths.append(0)
2009 chainbases.append(r.start(rev))
2009 chainbases.append(r.start(rev))
2010 chainspans.append(size)
2010 chainspans.append(size)
2011 numfull += 1
2011 numfull += 1
2012 addsize(size, fullsize)
2012 addsize(size, fullsize)
2013 else:
2013 else:
2014 chainlengths.append(chainlengths[delta] + 1)
2014 chainlengths.append(chainlengths[delta] + 1)
2015 baseaddr = chainbases[delta]
2015 baseaddr = chainbases[delta]
2016 revaddr = r.start(rev)
2016 revaddr = r.start(rev)
2017 chainbases.append(baseaddr)
2017 chainbases.append(baseaddr)
2018 chainspans.append((revaddr - baseaddr) + size)
2018 chainspans.append((revaddr - baseaddr) + size)
2019 addsize(size, deltasize)
2019 addsize(size, deltasize)
2020 if delta == rev - 1:
2020 if delta == rev - 1:
2021 numprev += 1
2021 numprev += 1
2022 if delta == p1:
2022 if delta == p1:
2023 nump1prev += 1
2023 nump1prev += 1
2024 elif delta == p2:
2024 elif delta == p2:
2025 nump2prev += 1
2025 nump2prev += 1
2026 elif delta == p1:
2026 elif delta == p1:
2027 nump1 += 1
2027 nump1 += 1
2028 elif delta == p2:
2028 elif delta == p2:
2029 nump2 += 1
2029 nump2 += 1
2030 elif delta != nullrev:
2030 elif delta != nullrev:
2031 numother += 1
2031 numother += 1
2032
2032
2033 # Obtain data on the raw chunks in the revlog.
2033 # Obtain data on the raw chunks in the revlog.
2034 segment = r._getsegmentforrevs(rev, rev)[1]
2034 segment = r._getsegmentforrevs(rev, rev)[1]
2035 if segment:
2035 if segment:
2036 chunktype = bytes(segment[0:1])
2036 chunktype = bytes(segment[0:1])
2037 else:
2037 else:
2038 chunktype = 'empty'
2038 chunktype = 'empty'
2039
2039
2040 if chunktype not in chunktypecounts:
2040 if chunktype not in chunktypecounts:
2041 chunktypecounts[chunktype] = 0
2041 chunktypecounts[chunktype] = 0
2042 chunktypesizes[chunktype] = 0
2042 chunktypesizes[chunktype] = 0
2043
2043
2044 chunktypecounts[chunktype] += 1
2044 chunktypecounts[chunktype] += 1
2045 chunktypesizes[chunktype] += size
2045 chunktypesizes[chunktype] += size
2046
2046
2047 # Adjust size min value for empty cases
2047 # Adjust size min value for empty cases
2048 for size in (datasize, fullsize, deltasize):
2048 for size in (datasize, fullsize, deltasize):
2049 if size[0] is None:
2049 if size[0] is None:
2050 size[0] = 0
2050 size[0] = 0
2051
2051
2052 numdeltas = numrevs - numfull
2052 numdeltas = numrevs - numfull
2053 numoprev = numprev - nump1prev - nump2prev
2053 numoprev = numprev - nump1prev - nump2prev
2054 totalrawsize = datasize[2]
2054 totalrawsize = datasize[2]
2055 datasize[2] /= numrevs
2055 datasize[2] /= numrevs
2056 fulltotal = fullsize[2]
2056 fulltotal = fullsize[2]
2057 fullsize[2] /= numfull
2057 fullsize[2] /= numfull
2058 deltatotal = deltasize[2]
2058 deltatotal = deltasize[2]
2059 if numrevs - numfull > 0:
2059 if numrevs - numfull > 0:
2060 deltasize[2] /= numrevs - numfull
2060 deltasize[2] /= numrevs - numfull
2061 totalsize = fulltotal + deltatotal
2061 totalsize = fulltotal + deltatotal
2062 avgchainlen = sum(chainlengths) / numrevs
2062 avgchainlen = sum(chainlengths) / numrevs
2063 maxchainlen = max(chainlengths)
2063 maxchainlen = max(chainlengths)
2064 maxchainspan = max(chainspans)
2064 maxchainspan = max(chainspans)
2065 compratio = 1
2065 compratio = 1
2066 if totalsize:
2066 if totalsize:
2067 compratio = totalrawsize / totalsize
2067 compratio = totalrawsize / totalsize
2068
2068
2069 basedfmtstr = '%%%dd\n'
2069 basedfmtstr = '%%%dd\n'
2070 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2070 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2071
2071
2072 def dfmtstr(max):
2072 def dfmtstr(max):
2073 return basedfmtstr % len(str(max))
2073 return basedfmtstr % len(str(max))
2074 def pcfmtstr(max, padding=0):
2074 def pcfmtstr(max, padding=0):
2075 return basepcfmtstr % (len(str(max)), ' ' * padding)
2075 return basepcfmtstr % (len(str(max)), ' ' * padding)
2076
2076
2077 def pcfmt(value, total):
2077 def pcfmt(value, total):
2078 if total:
2078 if total:
2079 return (value, 100 * float(value) / total)
2079 return (value, 100 * float(value) / total)
2080 else:
2080 else:
2081 return value, 100.0
2081 return value, 100.0
2082
2082
2083 ui.write(('format : %d\n') % format)
2083 ui.write(('format : %d\n') % format)
2084 ui.write(('flags : %s\n') % ', '.join(flags))
2084 ui.write(('flags : %s\n') % ', '.join(flags))
2085
2085
2086 ui.write('\n')
2086 ui.write('\n')
2087 fmt = pcfmtstr(totalsize)
2087 fmt = pcfmtstr(totalsize)
2088 fmt2 = dfmtstr(totalsize)
2088 fmt2 = dfmtstr(totalsize)
2089 ui.write(('revisions : ') + fmt2 % numrevs)
2089 ui.write(('revisions : ') + fmt2 % numrevs)
2090 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2090 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2091 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2091 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2092 ui.write(('revisions : ') + fmt2 % numrevs)
2092 ui.write(('revisions : ') + fmt2 % numrevs)
2093 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2093 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2094 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2094 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2095 ui.write(('revision size : ') + fmt2 % totalsize)
2095 ui.write(('revision size : ') + fmt2 % totalsize)
2096 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2096 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2097 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2097 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2098
2098
2099 def fmtchunktype(chunktype):
2099 def fmtchunktype(chunktype):
2100 if chunktype == 'empty':
2100 if chunktype == 'empty':
2101 return ' %s : ' % chunktype
2101 return ' %s : ' % chunktype
2102 elif chunktype in pycompat.bytestr(string.ascii_letters):
2102 elif chunktype in pycompat.bytestr(string.ascii_letters):
2103 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2103 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2104 else:
2104 else:
2105 return ' 0x%s : ' % hex(chunktype)
2105 return ' 0x%s : ' % hex(chunktype)
2106
2106
2107 ui.write('\n')
2107 ui.write('\n')
2108 ui.write(('chunks : ') + fmt2 % numrevs)
2108 ui.write(('chunks : ') + fmt2 % numrevs)
2109 for chunktype in sorted(chunktypecounts):
2109 for chunktype in sorted(chunktypecounts):
2110 ui.write(fmtchunktype(chunktype))
2110 ui.write(fmtchunktype(chunktype))
2111 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2111 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2112 ui.write(('chunks size : ') + fmt2 % totalsize)
2112 ui.write(('chunks size : ') + fmt2 % totalsize)
2113 for chunktype in sorted(chunktypecounts):
2113 for chunktype in sorted(chunktypecounts):
2114 ui.write(fmtchunktype(chunktype))
2114 ui.write(fmtchunktype(chunktype))
2115 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2115 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2116
2116
2117 ui.write('\n')
2117 ui.write('\n')
2118 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2118 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2119 ui.write(('avg chain length : ') + fmt % avgchainlen)
2119 ui.write(('avg chain length : ') + fmt % avgchainlen)
2120 ui.write(('max chain length : ') + fmt % maxchainlen)
2120 ui.write(('max chain length : ') + fmt % maxchainlen)
2121 ui.write(('max chain reach : ') + fmt % maxchainspan)
2121 ui.write(('max chain reach : ') + fmt % maxchainspan)
2122 ui.write(('compression ratio : ') + fmt % compratio)
2122 ui.write(('compression ratio : ') + fmt % compratio)
2123
2123
2124 if format > 0:
2124 if format > 0:
2125 ui.write('\n')
2125 ui.write('\n')
2126 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2126 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2127 % tuple(datasize))
2127 % tuple(datasize))
2128 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2128 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2129 % tuple(fullsize))
2129 % tuple(fullsize))
2130 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2130 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2131 % tuple(deltasize))
2131 % tuple(deltasize))
2132
2132
2133 if numdeltas > 0:
2133 if numdeltas > 0:
2134 ui.write('\n')
2134 ui.write('\n')
2135 fmt = pcfmtstr(numdeltas)
2135 fmt = pcfmtstr(numdeltas)
2136 fmt2 = pcfmtstr(numdeltas, 4)
2136 fmt2 = pcfmtstr(numdeltas, 4)
2137 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2137 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2138 if numprev > 0:
2138 if numprev > 0:
2139 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2139 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2140 numprev))
2140 numprev))
2141 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2141 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2142 numprev))
2142 numprev))
2143 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2143 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2144 numprev))
2144 numprev))
2145 if gdelta:
2145 if gdelta:
2146 ui.write(('deltas against p1 : ')
2146 ui.write(('deltas against p1 : ')
2147 + fmt % pcfmt(nump1, numdeltas))
2147 + fmt % pcfmt(nump1, numdeltas))
2148 ui.write(('deltas against p2 : ')
2148 ui.write(('deltas against p2 : ')
2149 + fmt % pcfmt(nump2, numdeltas))
2149 + fmt % pcfmt(nump2, numdeltas))
2150 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2150 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2151 numdeltas))
2151 numdeltas))
2152
2152
2153 @command('debugrevspec',
2153 @command('debugrevspec',
2154 [('', 'optimize', None,
2154 [('', 'optimize', None,
2155 _('print parsed tree after optimizing (DEPRECATED)')),
2155 _('print parsed tree after optimizing (DEPRECATED)')),
2156 ('', 'show-revs', True, _('print list of result revisions (default)')),
2156 ('', 'show-revs', True, _('print list of result revisions (default)')),
2157 ('s', 'show-set', None, _('print internal representation of result set')),
2157 ('s', 'show-set', None, _('print internal representation of result set')),
2158 ('p', 'show-stage', [],
2158 ('p', 'show-stage', [],
2159 _('print parsed tree at the given stage'), _('NAME')),
2159 _('print parsed tree at the given stage'), _('NAME')),
2160 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2160 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2161 ('', 'verify-optimized', False, _('verify optimized result')),
2161 ('', 'verify-optimized', False, _('verify optimized result')),
2162 ],
2162 ],
2163 ('REVSPEC'))
2163 ('REVSPEC'))
2164 def debugrevspec(ui, repo, expr, **opts):
2164 def debugrevspec(ui, repo, expr, **opts):
2165 """parse and apply a revision specification
2165 """parse and apply a revision specification
2166
2166
2167 Use -p/--show-stage option to print the parsed tree at the given stages.
2167 Use -p/--show-stage option to print the parsed tree at the given stages.
2168 Use -p all to print tree at every stage.
2168 Use -p all to print tree at every stage.
2169
2169
2170 Use --no-show-revs option with -s or -p to print only the set
2170 Use --no-show-revs option with -s or -p to print only the set
2171 representation or the parsed tree respectively.
2171 representation or the parsed tree respectively.
2172
2172
2173 Use --verify-optimized to compare the optimized result with the unoptimized
2173 Use --verify-optimized to compare the optimized result with the unoptimized
2174 one. Returns 1 if the optimized result differs.
2174 one. Returns 1 if the optimized result differs.
2175 """
2175 """
2176 opts = pycompat.byteskwargs(opts)
2176 opts = pycompat.byteskwargs(opts)
2177 aliases = ui.configitems('revsetalias')
2177 aliases = ui.configitems('revsetalias')
2178 stages = [
2178 stages = [
2179 ('parsed', lambda tree: tree),
2179 ('parsed', lambda tree: tree),
2180 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2180 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2181 ui.warn)),
2181 ui.warn)),
2182 ('concatenated', revsetlang.foldconcat),
2182 ('concatenated', revsetlang.foldconcat),
2183 ('analyzed', revsetlang.analyze),
2183 ('analyzed', revsetlang.analyze),
2184 ('optimized', revsetlang.optimize),
2184 ('optimized', revsetlang.optimize),
2185 ]
2185 ]
2186 if opts['no_optimized']:
2186 if opts['no_optimized']:
2187 stages = stages[:-1]
2187 stages = stages[:-1]
2188 if opts['verify_optimized'] and opts['no_optimized']:
2188 if opts['verify_optimized'] and opts['no_optimized']:
2189 raise error.Abort(_('cannot use --verify-optimized with '
2189 raise error.Abort(_('cannot use --verify-optimized with '
2190 '--no-optimized'))
2190 '--no-optimized'))
2191 stagenames = set(n for n, f in stages)
2191 stagenames = set(n for n, f in stages)
2192
2192
2193 showalways = set()
2193 showalways = set()
2194 showchanged = set()
2194 showchanged = set()
2195 if ui.verbose and not opts['show_stage']:
2195 if ui.verbose and not opts['show_stage']:
2196 # show parsed tree by --verbose (deprecated)
2196 # show parsed tree by --verbose (deprecated)
2197 showalways.add('parsed')
2197 showalways.add('parsed')
2198 showchanged.update(['expanded', 'concatenated'])
2198 showchanged.update(['expanded', 'concatenated'])
2199 if opts['optimize']:
2199 if opts['optimize']:
2200 showalways.add('optimized')
2200 showalways.add('optimized')
2201 if opts['show_stage'] and opts['optimize']:
2201 if opts['show_stage'] and opts['optimize']:
2202 raise error.Abort(_('cannot use --optimize with --show-stage'))
2202 raise error.Abort(_('cannot use --optimize with --show-stage'))
2203 if opts['show_stage'] == ['all']:
2203 if opts['show_stage'] == ['all']:
2204 showalways.update(stagenames)
2204 showalways.update(stagenames)
2205 else:
2205 else:
2206 for n in opts['show_stage']:
2206 for n in opts['show_stage']:
2207 if n not in stagenames:
2207 if n not in stagenames:
2208 raise error.Abort(_('invalid stage name: %s') % n)
2208 raise error.Abort(_('invalid stage name: %s') % n)
2209 showalways.update(opts['show_stage'])
2209 showalways.update(opts['show_stage'])
2210
2210
2211 treebystage = {}
2211 treebystage = {}
2212 printedtree = None
2212 printedtree = None
2213 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2213 tree = revsetlang.parse(expr, lookup=repo.__contains__)
2214 for n, f in stages:
2214 for n, f in stages:
2215 treebystage[n] = tree = f(tree)
2215 treebystage[n] = tree = f(tree)
2216 if n in showalways or (n in showchanged and tree != printedtree):
2216 if n in showalways or (n in showchanged and tree != printedtree):
2217 if opts['show_stage'] or n != 'parsed':
2217 if opts['show_stage'] or n != 'parsed':
2218 ui.write(("* %s:\n") % n)
2218 ui.write(("* %s:\n") % n)
2219 ui.write(revsetlang.prettyformat(tree), "\n")
2219 ui.write(revsetlang.prettyformat(tree), "\n")
2220 printedtree = tree
2220 printedtree = tree
2221
2221
2222 if opts['verify_optimized']:
2222 if opts['verify_optimized']:
2223 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2223 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2224 brevs = revset.makematcher(treebystage['optimized'])(repo)
2224 brevs = revset.makematcher(treebystage['optimized'])(repo)
2225 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2225 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2226 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2226 ui.write(("* analyzed set:\n"), smartset.prettyformat(arevs), "\n")
2227 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2227 ui.write(("* optimized set:\n"), smartset.prettyformat(brevs), "\n")
2228 arevs = list(arevs)
2228 arevs = list(arevs)
2229 brevs = list(brevs)
2229 brevs = list(brevs)
2230 if arevs == brevs:
2230 if arevs == brevs:
2231 return 0
2231 return 0
2232 ui.write(('--- analyzed\n'), label='diff.file_a')
2232 ui.write(('--- analyzed\n'), label='diff.file_a')
2233 ui.write(('+++ optimized\n'), label='diff.file_b')
2233 ui.write(('+++ optimized\n'), label='diff.file_b')
2234 sm = difflib.SequenceMatcher(None, arevs, brevs)
2234 sm = difflib.SequenceMatcher(None, arevs, brevs)
2235 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2235 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2236 if tag in ('delete', 'replace'):
2236 if tag in ('delete', 'replace'):
2237 for c in arevs[alo:ahi]:
2237 for c in arevs[alo:ahi]:
2238 ui.write('-%s\n' % c, label='diff.deleted')
2238 ui.write('-%s\n' % c, label='diff.deleted')
2239 if tag in ('insert', 'replace'):
2239 if tag in ('insert', 'replace'):
2240 for c in brevs[blo:bhi]:
2240 for c in brevs[blo:bhi]:
2241 ui.write('+%s\n' % c, label='diff.inserted')
2241 ui.write('+%s\n' % c, label='diff.inserted')
2242 if tag == 'equal':
2242 if tag == 'equal':
2243 for c in arevs[alo:ahi]:
2243 for c in arevs[alo:ahi]:
2244 ui.write(' %s\n' % c)
2244 ui.write(' %s\n' % c)
2245 return 1
2245 return 1
2246
2246
2247 func = revset.makematcher(tree)
2247 func = revset.makematcher(tree)
2248 revs = func(repo)
2248 revs = func(repo)
2249 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2249 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2250 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2250 ui.write(("* set:\n"), smartset.prettyformat(revs), "\n")
2251 if not opts['show_revs']:
2251 if not opts['show_revs']:
2252 return
2252 return
2253 for c in revs:
2253 for c in revs:
2254 ui.write("%d\n" % c)
2254 ui.write("%d\n" % c)
2255
2255
2256 @command('debugserve', [
2256 @command('debugserve', [
2257 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2257 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2258 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2258 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2259 ('', 'logiofile', '', _('file to log server I/O to')),
2259 ('', 'logiofile', '', _('file to log server I/O to')),
2260 ], '')
2260 ], '')
2261 def debugserve(ui, repo, **opts):
2261 def debugserve(ui, repo, **opts):
2262 """run a server with advanced settings
2262 """run a server with advanced settings
2263
2263
2264 This command is similar to :hg:`serve`. It exists partially as a
2264 This command is similar to :hg:`serve`. It exists partially as a
2265 workaround to the fact that ``hg serve --stdio`` must have specific
2265 workaround to the fact that ``hg serve --stdio`` must have specific
2266 arguments for security reasons.
2266 arguments for security reasons.
2267 """
2267 """
2268 opts = pycompat.byteskwargs(opts)
2268 opts = pycompat.byteskwargs(opts)
2269
2269
2270 if not opts['sshstdio']:
2270 if not opts['sshstdio']:
2271 raise error.Abort(_('only --sshstdio is currently supported'))
2271 raise error.Abort(_('only --sshstdio is currently supported'))
2272
2272
2273 logfh = None
2273 logfh = None
2274
2274
2275 if opts['logiofd'] and opts['logiofile']:
2275 if opts['logiofd'] and opts['logiofile']:
2276 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2276 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2277
2277
2278 if opts['logiofd']:
2278 if opts['logiofd']:
2279 # Line buffered because output is line based.
2279 # Line buffered because output is line based.
2280 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2280 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2281 elif opts['logiofile']:
2281 elif opts['logiofile']:
2282 logfh = open(opts['logiofile'], 'ab', 1)
2282 logfh = open(opts['logiofile'], 'ab', 1)
2283
2283
2284 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2284 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2285 s.serve_forever()
2285 s.serve_forever()
2286
2286
2287 @command('debugsetparents', [], _('REV1 [REV2]'))
2287 @command('debugsetparents', [], _('REV1 [REV2]'))
2288 def debugsetparents(ui, repo, rev1, rev2=None):
2288 def debugsetparents(ui, repo, rev1, rev2=None):
2289 """manually set the parents of the current working directory
2289 """manually set the parents of the current working directory
2290
2290
2291 This is useful for writing repository conversion tools, but should
2291 This is useful for writing repository conversion tools, but should
2292 be used with care. For example, neither the working directory nor the
2292 be used with care. For example, neither the working directory nor the
2293 dirstate is updated, so file status may be incorrect after running this
2293 dirstate is updated, so file status may be incorrect after running this
2294 command.
2294 command.
2295
2295
2296 Returns 0 on success.
2296 Returns 0 on success.
2297 """
2297 """
2298
2298
2299 node1 = scmutil.revsingle(repo, rev1).node()
2299 node1 = scmutil.revsingle(repo, rev1).node()
2300 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2300 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2301
2301
2302 with repo.wlock():
2302 with repo.wlock():
2303 repo.setparents(node1, node2)
2303 repo.setparents(node1, node2)
2304
2304
2305 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2305 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2306 def debugssl(ui, repo, source=None, **opts):
2306 def debugssl(ui, repo, source=None, **opts):
2307 '''test a secure connection to a server
2307 '''test a secure connection to a server
2308
2308
2309 This builds the certificate chain for the server on Windows, installing the
2309 This builds the certificate chain for the server on Windows, installing the
2310 missing intermediates and trusted root via Windows Update if necessary. It
2310 missing intermediates and trusted root via Windows Update if necessary. It
2311 does nothing on other platforms.
2311 does nothing on other platforms.
2312
2312
2313 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2313 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2314 that server is used. See :hg:`help urls` for more information.
2314 that server is used. See :hg:`help urls` for more information.
2315
2315
2316 If the update succeeds, retry the original operation. Otherwise, the cause
2316 If the update succeeds, retry the original operation. Otherwise, the cause
2317 of the SSL error is likely another issue.
2317 of the SSL error is likely another issue.
2318 '''
2318 '''
2319 if not pycompat.iswindows:
2319 if not pycompat.iswindows:
2320 raise error.Abort(_('certificate chain building is only possible on '
2320 raise error.Abort(_('certificate chain building is only possible on '
2321 'Windows'))
2321 'Windows'))
2322
2322
2323 if not source:
2323 if not source:
2324 if not repo:
2324 if not repo:
2325 raise error.Abort(_("there is no Mercurial repository here, and no "
2325 raise error.Abort(_("there is no Mercurial repository here, and no "
2326 "server specified"))
2326 "server specified"))
2327 source = "default"
2327 source = "default"
2328
2328
2329 source, branches = hg.parseurl(ui.expandpath(source))
2329 source, branches = hg.parseurl(ui.expandpath(source))
2330 url = util.url(source)
2330 url = util.url(source)
2331 addr = None
2331 addr = None
2332
2332
2333 defaultport = {'https': 443, 'ssh': 22}
2333 defaultport = {'https': 443, 'ssh': 22}
2334 if url.scheme in defaultport:
2334 if url.scheme in defaultport:
2335 try:
2335 try:
2336 addr = (url.host, int(url.port or defaultport[url.scheme]))
2336 addr = (url.host, int(url.port or defaultport[url.scheme]))
2337 except ValueError:
2337 except ValueError:
2338 raise error.Abort(_("malformed port number in URL"))
2338 raise error.Abort(_("malformed port number in URL"))
2339 else:
2339 else:
2340 raise error.Abort(_("only https and ssh connections are supported"))
2340 raise error.Abort(_("only https and ssh connections are supported"))
2341
2341
2342 from . import win32
2342 from . import win32
2343
2343
2344 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2344 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2345 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2345 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2346
2346
2347 try:
2347 try:
2348 s.connect(addr)
2348 s.connect(addr)
2349 cert = s.getpeercert(True)
2349 cert = s.getpeercert(True)
2350
2350
2351 ui.status(_('checking the certificate chain for %s\n') % url.host)
2351 ui.status(_('checking the certificate chain for %s\n') % url.host)
2352
2352
2353 complete = win32.checkcertificatechain(cert, build=False)
2353 complete = win32.checkcertificatechain(cert, build=False)
2354
2354
2355 if not complete:
2355 if not complete:
2356 ui.status(_('certificate chain is incomplete, updating... '))
2356 ui.status(_('certificate chain is incomplete, updating... '))
2357
2357
2358 if not win32.checkcertificatechain(cert):
2358 if not win32.checkcertificatechain(cert):
2359 ui.status(_('failed.\n'))
2359 ui.status(_('failed.\n'))
2360 else:
2360 else:
2361 ui.status(_('done.\n'))
2361 ui.status(_('done.\n'))
2362 else:
2362 else:
2363 ui.status(_('full certificate chain is available\n'))
2363 ui.status(_('full certificate chain is available\n'))
2364 finally:
2364 finally:
2365 s.close()
2365 s.close()
2366
2366
2367 @command('debugsub',
2367 @command('debugsub',
2368 [('r', 'rev', '',
2368 [('r', 'rev', '',
2369 _('revision to check'), _('REV'))],
2369 _('revision to check'), _('REV'))],
2370 _('[-r REV] [REV]'))
2370 _('[-r REV] [REV]'))
2371 def debugsub(ui, repo, rev=None):
2371 def debugsub(ui, repo, rev=None):
2372 ctx = scmutil.revsingle(repo, rev, None)
2372 ctx = scmutil.revsingle(repo, rev, None)
2373 for k, v in sorted(ctx.substate.items()):
2373 for k, v in sorted(ctx.substate.items()):
2374 ui.write(('path %s\n') % k)
2374 ui.write(('path %s\n') % k)
2375 ui.write((' source %s\n') % v[0])
2375 ui.write((' source %s\n') % v[0])
2376 ui.write((' revision %s\n') % v[1])
2376 ui.write((' revision %s\n') % v[1])
2377
2377
2378 @command('debugsuccessorssets',
2378 @command('debugsuccessorssets',
2379 [('', 'closest', False, _('return closest successors sets only'))],
2379 [('', 'closest', False, _('return closest successors sets only'))],
2380 _('[REV]'))
2380 _('[REV]'))
2381 def debugsuccessorssets(ui, repo, *revs, **opts):
2381 def debugsuccessorssets(ui, repo, *revs, **opts):
2382 """show set of successors for revision
2382 """show set of successors for revision
2383
2383
2384 A successors set of changeset A is a consistent group of revisions that
2384 A successors set of changeset A is a consistent group of revisions that
2385 succeed A. It contains non-obsolete changesets only unless closests
2385 succeed A. It contains non-obsolete changesets only unless closests
2386 successors set is set.
2386 successors set is set.
2387
2387
2388 In most cases a changeset A has a single successors set containing a single
2388 In most cases a changeset A has a single successors set containing a single
2389 successor (changeset A replaced by A').
2389 successor (changeset A replaced by A').
2390
2390
2391 A changeset that is made obsolete with no successors are called "pruned".
2391 A changeset that is made obsolete with no successors are called "pruned".
2392 Such changesets have no successors sets at all.
2392 Such changesets have no successors sets at all.
2393
2393
2394 A changeset that has been "split" will have a successors set containing
2394 A changeset that has been "split" will have a successors set containing
2395 more than one successor.
2395 more than one successor.
2396
2396
2397 A changeset that has been rewritten in multiple different ways is called
2397 A changeset that has been rewritten in multiple different ways is called
2398 "divergent". Such changesets have multiple successor sets (each of which
2398 "divergent". Such changesets have multiple successor sets (each of which
2399 may also be split, i.e. have multiple successors).
2399 may also be split, i.e. have multiple successors).
2400
2400
2401 Results are displayed as follows::
2401 Results are displayed as follows::
2402
2402
2403 <rev1>
2403 <rev1>
2404 <successors-1A>
2404 <successors-1A>
2405 <rev2>
2405 <rev2>
2406 <successors-2A>
2406 <successors-2A>
2407 <successors-2B1> <successors-2B2> <successors-2B3>
2407 <successors-2B1> <successors-2B2> <successors-2B3>
2408
2408
2409 Here rev2 has two possible (i.e. divergent) successors sets. The first
2409 Here rev2 has two possible (i.e. divergent) successors sets. The first
2410 holds one element, whereas the second holds three (i.e. the changeset has
2410 holds one element, whereas the second holds three (i.e. the changeset has
2411 been split).
2411 been split).
2412 """
2412 """
2413 # passed to successorssets caching computation from one call to another
2413 # passed to successorssets caching computation from one call to another
2414 cache = {}
2414 cache = {}
2415 ctx2str = bytes
2415 ctx2str = bytes
2416 node2str = short
2416 node2str = short
2417 for rev in scmutil.revrange(repo, revs):
2417 for rev in scmutil.revrange(repo, revs):
2418 ctx = repo[rev]
2418 ctx = repo[rev]
2419 ui.write('%s\n'% ctx2str(ctx))
2419 ui.write('%s\n'% ctx2str(ctx))
2420 for succsset in obsutil.successorssets(repo, ctx.node(),
2420 for succsset in obsutil.successorssets(repo, ctx.node(),
2421 closest=opts[r'closest'],
2421 closest=opts[r'closest'],
2422 cache=cache):
2422 cache=cache):
2423 if succsset:
2423 if succsset:
2424 ui.write(' ')
2424 ui.write(' ')
2425 ui.write(node2str(succsset[0]))
2425 ui.write(node2str(succsset[0]))
2426 for node in succsset[1:]:
2426 for node in succsset[1:]:
2427 ui.write(' ')
2427 ui.write(' ')
2428 ui.write(node2str(node))
2428 ui.write(node2str(node))
2429 ui.write('\n')
2429 ui.write('\n')
2430
2430
2431 @command('debugtemplate',
2431 @command('debugtemplate',
2432 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2432 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2433 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2433 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2434 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2434 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2435 optionalrepo=True)
2435 optionalrepo=True)
2436 def debugtemplate(ui, repo, tmpl, **opts):
2436 def debugtemplate(ui, repo, tmpl, **opts):
2437 """parse and apply a template
2437 """parse and apply a template
2438
2438
2439 If -r/--rev is given, the template is processed as a log template and
2439 If -r/--rev is given, the template is processed as a log template and
2440 applied to the given changesets. Otherwise, it is processed as a generic
2440 applied to the given changesets. Otherwise, it is processed as a generic
2441 template.
2441 template.
2442
2442
2443 Use --verbose to print the parsed tree.
2443 Use --verbose to print the parsed tree.
2444 """
2444 """
2445 revs = None
2445 revs = None
2446 if opts[r'rev']:
2446 if opts[r'rev']:
2447 if repo is None:
2447 if repo is None:
2448 raise error.RepoError(_('there is no Mercurial repository here '
2448 raise error.RepoError(_('there is no Mercurial repository here '
2449 '(.hg not found)'))
2449 '(.hg not found)'))
2450 revs = scmutil.revrange(repo, opts[r'rev'])
2450 revs = scmutil.revrange(repo, opts[r'rev'])
2451
2451
2452 props = {}
2452 props = {}
2453 for d in opts[r'define']:
2453 for d in opts[r'define']:
2454 try:
2454 try:
2455 k, v = (e.strip() for e in d.split('=', 1))
2455 k, v = (e.strip() for e in d.split('=', 1))
2456 if not k or k == 'ui':
2456 if not k or k == 'ui':
2457 raise ValueError
2457 raise ValueError
2458 props[k] = v
2458 props[k] = v
2459 except ValueError:
2459 except ValueError:
2460 raise error.Abort(_('malformed keyword definition: %s') % d)
2460 raise error.Abort(_('malformed keyword definition: %s') % d)
2461
2461
2462 if ui.verbose:
2462 if ui.verbose:
2463 aliases = ui.configitems('templatealias')
2463 aliases = ui.configitems('templatealias')
2464 tree = templater.parse(tmpl)
2464 tree = templater.parse(tmpl)
2465 ui.note(templater.prettyformat(tree), '\n')
2465 ui.note(templater.prettyformat(tree), '\n')
2466 newtree = templater.expandaliases(tree, aliases)
2466 newtree = templater.expandaliases(tree, aliases)
2467 if newtree != tree:
2467 if newtree != tree:
2468 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2468 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2469
2469
2470 if revs is None:
2470 if revs is None:
2471 tres = formatter.templateresources(ui, repo)
2471 tres = formatter.templateresources(ui, repo)
2472 t = formatter.maketemplater(ui, tmpl, resources=tres)
2472 t = formatter.maketemplater(ui, tmpl, resources=tres)
2473 ui.write(t.renderdefault(props))
2473 ui.write(t.renderdefault(props))
2474 else:
2474 else:
2475 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2475 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2476 for r in revs:
2476 for r in revs:
2477 displayer.show(repo[r], **pycompat.strkwargs(props))
2477 displayer.show(repo[r], **pycompat.strkwargs(props))
2478 displayer.close()
2478 displayer.close()
2479
2479
2480 @command('debuguigetpass', [
2480 @command('debuguigetpass', [
2481 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2481 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2482 ], _('[-p TEXT]'), norepo=True)
2482 ], _('[-p TEXT]'), norepo=True)
2483 def debuguigetpass(ui, prompt=''):
2483 def debuguigetpass(ui, prompt=''):
2484 """show prompt to type password"""
2484 """show prompt to type password"""
2485 r = ui.getpass(prompt)
2485 r = ui.getpass(prompt)
2486 ui.write(('respose: %s\n') % r)
2486 ui.write(('respose: %s\n') % r)
2487
2487
2488 @command('debuguiprompt', [
2488 @command('debuguiprompt', [
2489 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2489 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2490 ], _('[-p TEXT]'), norepo=True)
2490 ], _('[-p TEXT]'), norepo=True)
2491 def debuguiprompt(ui, prompt=''):
2491 def debuguiprompt(ui, prompt=''):
2492 """show plain prompt"""
2492 """show plain prompt"""
2493 r = ui.prompt(prompt)
2493 r = ui.prompt(prompt)
2494 ui.write(('response: %s\n') % r)
2494 ui.write(('response: %s\n') % r)
2495
2495
2496 @command('debugupdatecaches', [])
2496 @command('debugupdatecaches', [])
2497 def debugupdatecaches(ui, repo, *pats, **opts):
2497 def debugupdatecaches(ui, repo, *pats, **opts):
2498 """warm all known caches in the repository"""
2498 """warm all known caches in the repository"""
2499 with repo.wlock(), repo.lock():
2499 with repo.wlock(), repo.lock():
2500 repo.updatecaches(full=True)
2500 repo.updatecaches(full=True)
2501
2501
2502 @command('debugupgraderepo', [
2502 @command('debugupgraderepo', [
2503 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2503 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2504 ('', 'run', False, _('performs an upgrade')),
2504 ('', 'run', False, _('performs an upgrade')),
2505 ])
2505 ])
2506 def debugupgraderepo(ui, repo, run=False, optimize=None):
2506 def debugupgraderepo(ui, repo, run=False, optimize=None):
2507 """upgrade a repository to use different features
2507 """upgrade a repository to use different features
2508
2508
2509 If no arguments are specified, the repository is evaluated for upgrade
2509 If no arguments are specified, the repository is evaluated for upgrade
2510 and a list of problems and potential optimizations is printed.
2510 and a list of problems and potential optimizations is printed.
2511
2511
2512 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2512 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2513 can be influenced via additional arguments. More details will be provided
2513 can be influenced via additional arguments. More details will be provided
2514 by the command output when run without ``--run``.
2514 by the command output when run without ``--run``.
2515
2515
2516 During the upgrade, the repository will be locked and no writes will be
2516 During the upgrade, the repository will be locked and no writes will be
2517 allowed.
2517 allowed.
2518
2518
2519 At the end of the upgrade, the repository may not be readable while new
2519 At the end of the upgrade, the repository may not be readable while new
2520 repository data is swapped in. This window will be as long as it takes to
2520 repository data is swapped in. This window will be as long as it takes to
2521 rename some directories inside the ``.hg`` directory. On most machines, this
2521 rename some directories inside the ``.hg`` directory. On most machines, this
2522 should complete almost instantaneously and the chances of a consumer being
2522 should complete almost instantaneously and the chances of a consumer being
2523 unable to access the repository should be low.
2523 unable to access the repository should be low.
2524 """
2524 """
2525 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2525 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize)
2526
2526
2527 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2527 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2528 inferrepo=True)
2528 inferrepo=True)
2529 def debugwalk(ui, repo, *pats, **opts):
2529 def debugwalk(ui, repo, *pats, **opts):
2530 """show how files match on given patterns"""
2530 """show how files match on given patterns"""
2531 opts = pycompat.byteskwargs(opts)
2531 opts = pycompat.byteskwargs(opts)
2532 m = scmutil.match(repo[None], pats, opts)
2532 m = scmutil.match(repo[None], pats, opts)
2533 ui.write(('matcher: %r\n' % m))
2533 ui.write(('matcher: %r\n' % m))
2534 items = list(repo[None].walk(m))
2534 items = list(repo[None].walk(m))
2535 if not items:
2535 if not items:
2536 return
2536 return
2537 f = lambda fn: fn
2537 f = lambda fn: fn
2538 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2538 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2539 f = lambda fn: util.normpath(fn)
2539 f = lambda fn: util.normpath(fn)
2540 fmt = 'f %%-%ds %%-%ds %%s' % (
2540 fmt = 'f %%-%ds %%-%ds %%s' % (
2541 max([len(abs) for abs in items]),
2541 max([len(abs) for abs in items]),
2542 max([len(m.rel(abs)) for abs in items]))
2542 max([len(m.rel(abs)) for abs in items]))
2543 for abs in items:
2543 for abs in items:
2544 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2544 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2545 ui.write("%s\n" % line.rstrip())
2545 ui.write("%s\n" % line.rstrip())
2546
2546
2547 @command('debugwhyunstable', [], _('REV'))
2547 @command('debugwhyunstable', [], _('REV'))
2548 def debugwhyunstable(ui, repo, rev):
2548 def debugwhyunstable(ui, repo, rev):
2549 """explain instabilities of a changeset"""
2549 """explain instabilities of a changeset"""
2550 for entry in obsutil.whyunstable(repo, repo[rev]):
2550 for entry in obsutil.whyunstable(repo, repo[rev]):
2551 dnodes = ''
2551 dnodes = ''
2552 if entry.get('divergentnodes'):
2552 if entry.get('divergentnodes'):
2553 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2553 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2554 for ctx in entry['divergentnodes']) + ' '
2554 for ctx in entry['divergentnodes']) + ' '
2555 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2555 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2556 entry['reason'], entry['node']))
2556 entry['reason'], entry['node']))
2557
2557
2558 @command('debugwireargs',
2558 @command('debugwireargs',
2559 [('', 'three', '', 'three'),
2559 [('', 'three', '', 'three'),
2560 ('', 'four', '', 'four'),
2560 ('', 'four', '', 'four'),
2561 ('', 'five', '', 'five'),
2561 ('', 'five', '', 'five'),
2562 ] + cmdutil.remoteopts,
2562 ] + cmdutil.remoteopts,
2563 _('REPO [OPTIONS]... [ONE [TWO]]'),
2563 _('REPO [OPTIONS]... [ONE [TWO]]'),
2564 norepo=True)
2564 norepo=True)
2565 def debugwireargs(ui, repopath, *vals, **opts):
2565 def debugwireargs(ui, repopath, *vals, **opts):
2566 opts = pycompat.byteskwargs(opts)
2566 opts = pycompat.byteskwargs(opts)
2567 repo = hg.peer(ui, opts, repopath)
2567 repo = hg.peer(ui, opts, repopath)
2568 for opt in cmdutil.remoteopts:
2568 for opt in cmdutil.remoteopts:
2569 del opts[opt[1]]
2569 del opts[opt[1]]
2570 args = {}
2570 args = {}
2571 for k, v in opts.iteritems():
2571 for k, v in opts.iteritems():
2572 if v:
2572 if v:
2573 args[k] = v
2573 args[k] = v
2574 args = pycompat.strkwargs(args)
2574 args = pycompat.strkwargs(args)
2575 # run twice to check that we don't mess up the stream for the next command
2575 # run twice to check that we don't mess up the stream for the next command
2576 res1 = repo.debugwireargs(*vals, **args)
2576 res1 = repo.debugwireargs(*vals, **args)
2577 res2 = repo.debugwireargs(*vals, **args)
2577 res2 = repo.debugwireargs(*vals, **args)
2578 ui.write("%s\n" % res1)
2578 ui.write("%s\n" % res1)
2579 if res1 != res2:
2579 if res1 != res2:
2580 ui.warn("%s\n" % res2)
2580 ui.warn("%s\n" % res2)
2581
2581
2582 def _parsewirelangblocks(fh):
2582 def _parsewirelangblocks(fh):
2583 activeaction = None
2583 activeaction = None
2584 blocklines = []
2584 blocklines = []
2585
2585
2586 for line in fh:
2586 for line in fh:
2587 line = line.rstrip()
2587 line = line.rstrip()
2588 if not line:
2588 if not line:
2589 continue
2589 continue
2590
2590
2591 if line.startswith(b'#'):
2591 if line.startswith(b'#'):
2592 continue
2592 continue
2593
2593
2594 if not line.startswith(' '):
2594 if not line.startswith(' '):
2595 # New block. Flush previous one.
2595 # New block. Flush previous one.
2596 if activeaction:
2596 if activeaction:
2597 yield activeaction, blocklines
2597 yield activeaction, blocklines
2598
2598
2599 activeaction = line
2599 activeaction = line
2600 blocklines = []
2600 blocklines = []
2601 continue
2601 continue
2602
2602
2603 # Else we start with an indent.
2603 # Else we start with an indent.
2604
2604
2605 if not activeaction:
2605 if not activeaction:
2606 raise error.Abort(_('indented line outside of block'))
2606 raise error.Abort(_('indented line outside of block'))
2607
2607
2608 blocklines.append(line)
2608 blocklines.append(line)
2609
2609
2610 # Flush last block.
2610 # Flush last block.
2611 if activeaction:
2611 if activeaction:
2612 yield activeaction, blocklines
2612 yield activeaction, blocklines
2613
2613
2614 @command('debugwireproto',
2614 @command('debugwireproto',
2615 [
2615 [
2616 ('', 'localssh', False, _('start an SSH server for this repo')),
2616 ('', 'localssh', False, _('start an SSH server for this repo')),
2617 ('', 'peer', '', _('construct a specific version of the peer')),
2617 ('', 'peer', '', _('construct a specific version of the peer')),
2618 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2618 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2619 ] + cmdutil.remoteopts,
2619 ] + cmdutil.remoteopts,
2620 _('[PATH]'),
2620 _('[PATH]'),
2621 optionalrepo=True)
2621 optionalrepo=True)
2622 def debugwireproto(ui, repo, path=None, **opts):
2622 def debugwireproto(ui, repo, path=None, **opts):
2623 """send wire protocol commands to a server
2623 """send wire protocol commands to a server
2624
2624
2625 This command can be used to issue wire protocol commands to remote
2625 This command can be used to issue wire protocol commands to remote
2626 peers and to debug the raw data being exchanged.
2626 peers and to debug the raw data being exchanged.
2627
2627
2628 ``--localssh`` will start an SSH server against the current repository
2628 ``--localssh`` will start an SSH server against the current repository
2629 and connect to that. By default, the connection will perform a handshake
2629 and connect to that. By default, the connection will perform a handshake
2630 and establish an appropriate peer instance.
2630 and establish an appropriate peer instance.
2631
2631
2632 ``--peer`` can be used to bypass the handshake protocol and construct a
2632 ``--peer`` can be used to bypass the handshake protocol and construct a
2633 peer instance using the specified class type. Valid values are ``raw``,
2633 peer instance using the specified class type. Valid values are ``raw``,
2634 ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending raw data
2634 ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending raw data
2635 payloads and don't support higher-level command actions.
2635 payloads and don't support higher-level command actions.
2636
2636
2637 ``--noreadstderr`` can be used to disable automatic reading from stderr
2637 ``--noreadstderr`` can be used to disable automatic reading from stderr
2638 of the peer (for SSH connections only). Disabling automatic reading of
2638 of the peer (for SSH connections only). Disabling automatic reading of
2639 stderr is useful for making output more deterministic.
2639 stderr is useful for making output more deterministic.
2640
2640
2641 Commands are issued via a mini language which is specified via stdin.
2641 Commands are issued via a mini language which is specified via stdin.
2642 The language consists of individual actions to perform. An action is
2642 The language consists of individual actions to perform. An action is
2643 defined by a block. A block is defined as a line with no leading
2643 defined by a block. A block is defined as a line with no leading
2644 space followed by 0 or more lines with leading space. Blocks are
2644 space followed by 0 or more lines with leading space. Blocks are
2645 effectively a high-level command with additional metadata.
2645 effectively a high-level command with additional metadata.
2646
2646
2647 Lines beginning with ``#`` are ignored.
2647 Lines beginning with ``#`` are ignored.
2648
2648
2649 The following sections denote available actions.
2649 The following sections denote available actions.
2650
2650
2651 raw
2651 raw
2652 ---
2652 ---
2653
2653
2654 Send raw data to the server.
2654 Send raw data to the server.
2655
2655
2656 The block payload contains the raw data to send as one atomic send
2656 The block payload contains the raw data to send as one atomic send
2657 operation. The data may not actually be delivered in a single system
2657 operation. The data may not actually be delivered in a single system
2658 call: it depends on the abilities of the transport being used.
2658 call: it depends on the abilities of the transport being used.
2659
2659
2660 Each line in the block is de-indented and concatenated. Then, that
2660 Each line in the block is de-indented and concatenated. Then, that
2661 value is evaluated as a Python b'' literal. This allows the use of
2661 value is evaluated as a Python b'' literal. This allows the use of
2662 backslash escaping, etc.
2662 backslash escaping, etc.
2663
2663
2664 raw+
2664 raw+
2665 ----
2665 ----
2666
2666
2667 Behaves like ``raw`` except flushes output afterwards.
2667 Behaves like ``raw`` except flushes output afterwards.
2668
2668
2669 command <X>
2669 command <X>
2670 -----------
2670 -----------
2671
2671
2672 Send a request to run a named command, whose name follows the ``command``
2672 Send a request to run a named command, whose name follows the ``command``
2673 string.
2673 string.
2674
2674
2675 Arguments to the command are defined as lines in this block. The format of
2675 Arguments to the command are defined as lines in this block. The format of
2676 each line is ``<key> <value>``. e.g.::
2676 each line is ``<key> <value>``. e.g.::
2677
2677
2678 command listkeys
2678 command listkeys
2679 namespace bookmarks
2679 namespace bookmarks
2680
2680
2681 Values are interpreted as Python b'' literals. This allows encoding
2681 Values are interpreted as Python b'' literals. This allows encoding
2682 special byte sequences via backslash escaping.
2682 special byte sequences via backslash escaping.
2683
2683
2684 The following arguments have special meaning:
2684 The following arguments have special meaning:
2685
2685
2686 ``PUSHFILE``
2686 ``PUSHFILE``
2687 When defined, the *push* mechanism of the peer will be used instead
2687 When defined, the *push* mechanism of the peer will be used instead
2688 of the static request-response mechanism and the content of the
2688 of the static request-response mechanism and the content of the
2689 file specified in the value of this argument will be sent as the
2689 file specified in the value of this argument will be sent as the
2690 command payload.
2690 command payload.
2691
2691
2692 This can be used to submit a local bundle file to the remote.
2692 This can be used to submit a local bundle file to the remote.
2693
2693
2694 batchbegin
2694 batchbegin
2695 ----------
2695 ----------
2696
2696
2697 Instruct the peer to begin a batched send.
2697 Instruct the peer to begin a batched send.
2698
2698
2699 All ``command`` blocks are queued for execution until the next
2699 All ``command`` blocks are queued for execution until the next
2700 ``batchsubmit`` block.
2700 ``batchsubmit`` block.
2701
2701
2702 batchsubmit
2702 batchsubmit
2703 -----------
2703 -----------
2704
2704
2705 Submit previously queued ``command`` blocks as a batch request.
2705 Submit previously queued ``command`` blocks as a batch request.
2706
2706
2707 This action MUST be paired with a ``batchbegin`` action.
2707 This action MUST be paired with a ``batchbegin`` action.
2708
2708
2709 httprequest <method> <path>
2709 httprequest <method> <path>
2710 ---------------------------
2710 ---------------------------
2711
2711
2712 (HTTP peer only)
2712 (HTTP peer only)
2713
2713
2714 Send an HTTP request to the peer.
2714 Send an HTTP request to the peer.
2715
2715
2716 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2716 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
2717
2717
2718 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2718 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
2719 headers to add to the request. e.g. ``Accept: foo``.
2719 headers to add to the request. e.g. ``Accept: foo``.
2720
2720
2721 The following arguments are special:
2721 The following arguments are special:
2722
2722
2723 ``BODYFILE``
2723 ``BODYFILE``
2724 The content of the file defined as the value to this argument will be
2724 The content of the file defined as the value to this argument will be
2725 transferred verbatim as the HTTP request body.
2725 transferred verbatim as the HTTP request body.
2726
2726
2727 ``frame <type> <flags> <payload>``
2727 ``frame <type> <flags> <payload>``
2728 Send a unified protocol frame as part of the request body.
2728 Send a unified protocol frame as part of the request body.
2729
2729
2730 All frames will be collected and sent as the body to the HTTP
2730 All frames will be collected and sent as the body to the HTTP
2731 request.
2731 request.
2732
2732
2733 close
2733 close
2734 -----
2734 -----
2735
2735
2736 Close the connection to the server.
2736 Close the connection to the server.
2737
2737
2738 flush
2738 flush
2739 -----
2739 -----
2740
2740
2741 Flush data written to the server.
2741 Flush data written to the server.
2742
2742
2743 readavailable
2743 readavailable
2744 -------------
2744 -------------
2745
2745
2746 Close the write end of the connection and read all available data from
2746 Close the write end of the connection and read all available data from
2747 the server.
2747 the server.
2748
2748
2749 If the connection to the server encompasses multiple pipes, we poll both
2749 If the connection to the server encompasses multiple pipes, we poll both
2750 pipes and read available data.
2750 pipes and read available data.
2751
2751
2752 readline
2752 readline
2753 --------
2753 --------
2754
2754
2755 Read a line of output from the server. If there are multiple output
2755 Read a line of output from the server. If there are multiple output
2756 pipes, reads only the main pipe.
2756 pipes, reads only the main pipe.
2757
2757
2758 ereadline
2758 ereadline
2759 ---------
2759 ---------
2760
2760
2761 Like ``readline``, but read from the stderr pipe, if available.
2761 Like ``readline``, but read from the stderr pipe, if available.
2762
2762
2763 read <X>
2763 read <X>
2764 --------
2764 --------
2765
2765
2766 ``read()`` N bytes from the server's main output pipe.
2766 ``read()`` N bytes from the server's main output pipe.
2767
2767
2768 eread <X>
2768 eread <X>
2769 ---------
2769 ---------
2770
2770
2771 ``read()`` N bytes from the server's stderr pipe, if available.
2771 ``read()`` N bytes from the server's stderr pipe, if available.
2772
2772
2773 Specifying Unified Frame-Based Protocol Frames
2773 Specifying Unified Frame-Based Protocol Frames
2774 ----------------------------------------------
2774 ----------------------------------------------
2775
2775
2776 It is possible to emit a *Unified Frame-Based Protocol* by using special
2776 It is possible to emit a *Unified Frame-Based Protocol* by using special
2777 syntax.
2777 syntax.
2778
2778
2779 A frame is composed as a type, flags, and payload. These can be parsed
2779 A frame is composed as a type, flags, and payload. These can be parsed
2780 from a string of the form:
2780 from a string of the form:
2781
2781
2782 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2782 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
2783
2783
2784 ``request-id`` and ``stream-id`` are integers defining the request and
2784 ``request-id`` and ``stream-id`` are integers defining the request and
2785 stream identifiers.
2785 stream identifiers.
2786
2786
2787 ``type`` can be an integer value for the frame type or the string name
2787 ``type`` can be an integer value for the frame type or the string name
2788 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2788 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
2789 ``command-name``.
2789 ``command-name``.
2790
2790
2791 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2791 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
2792 components. Each component (and there can be just one) can be an integer
2792 components. Each component (and there can be just one) can be an integer
2793 or a flag name for stream flags or frame flags, respectively. Values are
2793 or a flag name for stream flags or frame flags, respectively. Values are
2794 resolved to integers and then bitwise OR'd together.
2794 resolved to integers and then bitwise OR'd together.
2795
2795
2796 ``payload`` is is evaluated as a Python byte string literal.
2796 ``payload`` represents the raw frame payload. If it begins with
2797 ``cbor:``, the following string is evaluated as Python code and the
2798 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
2799 as a Python byte string literal.
2797 """
2800 """
2798 opts = pycompat.byteskwargs(opts)
2801 opts = pycompat.byteskwargs(opts)
2799
2802
2800 if opts['localssh'] and not repo:
2803 if opts['localssh'] and not repo:
2801 raise error.Abort(_('--localssh requires a repository'))
2804 raise error.Abort(_('--localssh requires a repository'))
2802
2805
2803 if opts['peer'] and opts['peer'] not in ('raw', 'ssh1', 'ssh2'):
2806 if opts['peer'] and opts['peer'] not in ('raw', 'ssh1', 'ssh2'):
2804 raise error.Abort(_('invalid value for --peer'),
2807 raise error.Abort(_('invalid value for --peer'),
2805 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2808 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
2806
2809
2807 if path and opts['localssh']:
2810 if path and opts['localssh']:
2808 raise error.Abort(_('cannot specify --localssh with an explicit '
2811 raise error.Abort(_('cannot specify --localssh with an explicit '
2809 'path'))
2812 'path'))
2810
2813
2811 if ui.interactive():
2814 if ui.interactive():
2812 ui.write(_('(waiting for commands on stdin)\n'))
2815 ui.write(_('(waiting for commands on stdin)\n'))
2813
2816
2814 blocks = list(_parsewirelangblocks(ui.fin))
2817 blocks = list(_parsewirelangblocks(ui.fin))
2815
2818
2816 proc = None
2819 proc = None
2817 stdin = None
2820 stdin = None
2818 stdout = None
2821 stdout = None
2819 stderr = None
2822 stderr = None
2820 opener = None
2823 opener = None
2821
2824
2822 if opts['localssh']:
2825 if opts['localssh']:
2823 # We start the SSH server in its own process so there is process
2826 # We start the SSH server in its own process so there is process
2824 # separation. This prevents a whole class of potential bugs around
2827 # separation. This prevents a whole class of potential bugs around
2825 # shared state from interfering with server operation.
2828 # shared state from interfering with server operation.
2826 args = procutil.hgcmd() + [
2829 args = procutil.hgcmd() + [
2827 '-R', repo.root,
2830 '-R', repo.root,
2828 'debugserve', '--sshstdio',
2831 'debugserve', '--sshstdio',
2829 ]
2832 ]
2830 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2833 proc = subprocess.Popen(args, stdin=subprocess.PIPE,
2831 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2834 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
2832 bufsize=0)
2835 bufsize=0)
2833
2836
2834 stdin = proc.stdin
2837 stdin = proc.stdin
2835 stdout = proc.stdout
2838 stdout = proc.stdout
2836 stderr = proc.stderr
2839 stderr = proc.stderr
2837
2840
2838 # We turn the pipes into observers so we can log I/O.
2841 # We turn the pipes into observers so we can log I/O.
2839 if ui.verbose or opts['peer'] == 'raw':
2842 if ui.verbose or opts['peer'] == 'raw':
2840 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2843 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
2841 logdata=True)
2844 logdata=True)
2842 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2845 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
2843 logdata=True)
2846 logdata=True)
2844 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2847 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
2845 logdata=True)
2848 logdata=True)
2846
2849
2847 # --localssh also implies the peer connection settings.
2850 # --localssh also implies the peer connection settings.
2848
2851
2849 url = 'ssh://localserver'
2852 url = 'ssh://localserver'
2850 autoreadstderr = not opts['noreadstderr']
2853 autoreadstderr = not opts['noreadstderr']
2851
2854
2852 if opts['peer'] == 'ssh1':
2855 if opts['peer'] == 'ssh1':
2853 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2856 ui.write(_('creating ssh peer for wire protocol version 1\n'))
2854 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2857 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
2855 None, autoreadstderr=autoreadstderr)
2858 None, autoreadstderr=autoreadstderr)
2856 elif opts['peer'] == 'ssh2':
2859 elif opts['peer'] == 'ssh2':
2857 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2860 ui.write(_('creating ssh peer for wire protocol version 2\n'))
2858 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2861 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
2859 None, autoreadstderr=autoreadstderr)
2862 None, autoreadstderr=autoreadstderr)
2860 elif opts['peer'] == 'raw':
2863 elif opts['peer'] == 'raw':
2861 ui.write(_('using raw connection to peer\n'))
2864 ui.write(_('using raw connection to peer\n'))
2862 peer = None
2865 peer = None
2863 else:
2866 else:
2864 ui.write(_('creating ssh peer from handshake results\n'))
2867 ui.write(_('creating ssh peer from handshake results\n'))
2865 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2868 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
2866 autoreadstderr=autoreadstderr)
2869 autoreadstderr=autoreadstderr)
2867
2870
2868 elif path:
2871 elif path:
2869 # We bypass hg.peer() so we can proxy the sockets.
2872 # We bypass hg.peer() so we can proxy the sockets.
2870 # TODO consider not doing this because we skip
2873 # TODO consider not doing this because we skip
2871 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
2874 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
2872 u = util.url(path)
2875 u = util.url(path)
2873 if u.scheme != 'http':
2876 if u.scheme != 'http':
2874 raise error.Abort(_('only http:// paths are currently supported'))
2877 raise error.Abort(_('only http:// paths are currently supported'))
2875
2878
2876 url, authinfo = u.authinfo()
2879 url, authinfo = u.authinfo()
2877 openerargs = {}
2880 openerargs = {}
2878
2881
2879 # Turn pipes/sockets into observers so we can log I/O.
2882 # Turn pipes/sockets into observers so we can log I/O.
2880 if ui.verbose:
2883 if ui.verbose:
2881 openerargs = {
2884 openerargs = {
2882 r'loggingfh': ui,
2885 r'loggingfh': ui,
2883 r'loggingname': b's',
2886 r'loggingname': b's',
2884 r'loggingopts': {
2887 r'loggingopts': {
2885 r'logdata': True,
2888 r'logdata': True,
2886 r'logdataapis': False,
2889 r'logdataapis': False,
2887 },
2890 },
2888 }
2891 }
2889
2892
2890 if ui.debugflag:
2893 if ui.debugflag:
2891 openerargs[r'loggingopts'][r'logdataapis'] = True
2894 openerargs[r'loggingopts'][r'logdataapis'] = True
2892
2895
2893 # Don't send default headers when in raw mode. This allows us to
2896 # Don't send default headers when in raw mode. This allows us to
2894 # bypass most of the behavior of our URL handling code so we can
2897 # bypass most of the behavior of our URL handling code so we can
2895 # have near complete control over what's sent on the wire.
2898 # have near complete control over what's sent on the wire.
2896 if opts['peer'] == 'raw':
2899 if opts['peer'] == 'raw':
2897 openerargs[r'sendaccept'] = False
2900 openerargs[r'sendaccept'] = False
2898
2901
2899 opener = urlmod.opener(ui, authinfo, **openerargs)
2902 opener = urlmod.opener(ui, authinfo, **openerargs)
2900
2903
2901 if opts['peer'] == 'raw':
2904 if opts['peer'] == 'raw':
2902 ui.write(_('using raw connection to peer\n'))
2905 ui.write(_('using raw connection to peer\n'))
2903 peer = None
2906 peer = None
2904 elif opts['peer']:
2907 elif opts['peer']:
2905 raise error.Abort(_('--peer %s not supported with HTTP peers') %
2908 raise error.Abort(_('--peer %s not supported with HTTP peers') %
2906 opts['peer'])
2909 opts['peer'])
2907 else:
2910 else:
2908 peer = httppeer.httppeer(ui, path, url, opener)
2911 peer = httppeer.httppeer(ui, path, url, opener)
2909 peer._fetchcaps()
2912 peer._fetchcaps()
2910
2913
2911 # We /could/ populate stdin/stdout with sock.makefile()...
2914 # We /could/ populate stdin/stdout with sock.makefile()...
2912 else:
2915 else:
2913 raise error.Abort(_('unsupported connection configuration'))
2916 raise error.Abort(_('unsupported connection configuration'))
2914
2917
2915 batchedcommands = None
2918 batchedcommands = None
2916
2919
2917 # Now perform actions based on the parsed wire language instructions.
2920 # Now perform actions based on the parsed wire language instructions.
2918 for action, lines in blocks:
2921 for action, lines in blocks:
2919 if action in ('raw', 'raw+'):
2922 if action in ('raw', 'raw+'):
2920 if not stdin:
2923 if not stdin:
2921 raise error.Abort(_('cannot call raw/raw+ on this peer'))
2924 raise error.Abort(_('cannot call raw/raw+ on this peer'))
2922
2925
2923 # Concatenate the data together.
2926 # Concatenate the data together.
2924 data = ''.join(l.lstrip() for l in lines)
2927 data = ''.join(l.lstrip() for l in lines)
2925 data = stringutil.unescapestr(data)
2928 data = stringutil.unescapestr(data)
2926 stdin.write(data)
2929 stdin.write(data)
2927
2930
2928 if action == 'raw+':
2931 if action == 'raw+':
2929 stdin.flush()
2932 stdin.flush()
2930 elif action == 'flush':
2933 elif action == 'flush':
2931 if not stdin:
2934 if not stdin:
2932 raise error.Abort(_('cannot call flush on this peer'))
2935 raise error.Abort(_('cannot call flush on this peer'))
2933 stdin.flush()
2936 stdin.flush()
2934 elif action.startswith('command'):
2937 elif action.startswith('command'):
2935 if not peer:
2938 if not peer:
2936 raise error.Abort(_('cannot send commands unless peer instance '
2939 raise error.Abort(_('cannot send commands unless peer instance '
2937 'is available'))
2940 'is available'))
2938
2941
2939 command = action.split(' ', 1)[1]
2942 command = action.split(' ', 1)[1]
2940
2943
2941 args = {}
2944 args = {}
2942 for line in lines:
2945 for line in lines:
2943 # We need to allow empty values.
2946 # We need to allow empty values.
2944 fields = line.lstrip().split(' ', 1)
2947 fields = line.lstrip().split(' ', 1)
2945 if len(fields) == 1:
2948 if len(fields) == 1:
2946 key = fields[0]
2949 key = fields[0]
2947 value = ''
2950 value = ''
2948 else:
2951 else:
2949 key, value = fields
2952 key, value = fields
2950
2953
2951 args[key] = stringutil.unescapestr(value)
2954 args[key] = stringutil.unescapestr(value)
2952
2955
2953 if batchedcommands is not None:
2956 if batchedcommands is not None:
2954 batchedcommands.append((command, args))
2957 batchedcommands.append((command, args))
2955 continue
2958 continue
2956
2959
2957 ui.status(_('sending %s command\n') % command)
2960 ui.status(_('sending %s command\n') % command)
2958
2961
2959 if 'PUSHFILE' in args:
2962 if 'PUSHFILE' in args:
2960 with open(args['PUSHFILE'], r'rb') as fh:
2963 with open(args['PUSHFILE'], r'rb') as fh:
2961 del args['PUSHFILE']
2964 del args['PUSHFILE']
2962 res, output = peer._callpush(command, fh,
2965 res, output = peer._callpush(command, fh,
2963 **pycompat.strkwargs(args))
2966 **pycompat.strkwargs(args))
2964 ui.status(_('result: %s\n') % stringutil.escapedata(res))
2967 ui.status(_('result: %s\n') % stringutil.escapedata(res))
2965 ui.status(_('remote output: %s\n') %
2968 ui.status(_('remote output: %s\n') %
2966 stringutil.escapedata(output))
2969 stringutil.escapedata(output))
2967 else:
2970 else:
2968 res = peer._call(command, **pycompat.strkwargs(args))
2971 res = peer._call(command, **pycompat.strkwargs(args))
2969 ui.status(_('response: %s\n') % stringutil.escapedata(res))
2972 ui.status(_('response: %s\n') % stringutil.escapedata(res))
2970
2973
2971 elif action == 'batchbegin':
2974 elif action == 'batchbegin':
2972 if batchedcommands is not None:
2975 if batchedcommands is not None:
2973 raise error.Abort(_('nested batchbegin not allowed'))
2976 raise error.Abort(_('nested batchbegin not allowed'))
2974
2977
2975 batchedcommands = []
2978 batchedcommands = []
2976 elif action == 'batchsubmit':
2979 elif action == 'batchsubmit':
2977 # There is a batching API we could go through. But it would be
2980 # There is a batching API we could go through. But it would be
2978 # difficult to normalize requests into function calls. It is easier
2981 # difficult to normalize requests into function calls. It is easier
2979 # to bypass this layer and normalize to commands + args.
2982 # to bypass this layer and normalize to commands + args.
2980 ui.status(_('sending batch with %d sub-commands\n') %
2983 ui.status(_('sending batch with %d sub-commands\n') %
2981 len(batchedcommands))
2984 len(batchedcommands))
2982 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
2985 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
2983 ui.status(_('response #%d: %s\n') %
2986 ui.status(_('response #%d: %s\n') %
2984 (i, stringutil.escapedata(chunk)))
2987 (i, stringutil.escapedata(chunk)))
2985
2988
2986 batchedcommands = None
2989 batchedcommands = None
2987
2990
2988 elif action.startswith('httprequest '):
2991 elif action.startswith('httprequest '):
2989 if not opener:
2992 if not opener:
2990 raise error.Abort(_('cannot use httprequest without an HTTP '
2993 raise error.Abort(_('cannot use httprequest without an HTTP '
2991 'peer'))
2994 'peer'))
2992
2995
2993 request = action.split(' ', 2)
2996 request = action.split(' ', 2)
2994 if len(request) != 3:
2997 if len(request) != 3:
2995 raise error.Abort(_('invalid httprequest: expected format is '
2998 raise error.Abort(_('invalid httprequest: expected format is '
2996 '"httprequest <method> <path>'))
2999 '"httprequest <method> <path>'))
2997
3000
2998 method, httppath = request[1:]
3001 method, httppath = request[1:]
2999 headers = {}
3002 headers = {}
3000 body = None
3003 body = None
3001 frames = []
3004 frames = []
3002 for line in lines:
3005 for line in lines:
3003 line = line.lstrip()
3006 line = line.lstrip()
3004 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3007 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3005 if m:
3008 if m:
3006 headers[m.group(1)] = m.group(2)
3009 headers[m.group(1)] = m.group(2)
3007 continue
3010 continue
3008
3011
3009 if line.startswith(b'BODYFILE '):
3012 if line.startswith(b'BODYFILE '):
3010 with open(line.split(b' ', 1), 'rb') as fh:
3013 with open(line.split(b' ', 1), 'rb') as fh:
3011 body = fh.read()
3014 body = fh.read()
3012 elif line.startswith(b'frame '):
3015 elif line.startswith(b'frame '):
3013 frame = wireprotoframing.makeframefromhumanstring(
3016 frame = wireprotoframing.makeframefromhumanstring(
3014 line[len(b'frame '):])
3017 line[len(b'frame '):])
3015
3018
3016 frames.append(frame)
3019 frames.append(frame)
3017 else:
3020 else:
3018 raise error.Abort(_('unknown argument to httprequest: %s') %
3021 raise error.Abort(_('unknown argument to httprequest: %s') %
3019 line)
3022 line)
3020
3023
3021 url = path + httppath
3024 url = path + httppath
3022
3025
3023 if frames:
3026 if frames:
3024 body = b''.join(bytes(f) for f in frames)
3027 body = b''.join(bytes(f) for f in frames)
3025
3028
3026 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3029 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3027
3030
3028 # urllib.Request insists on using has_data() as a proxy for
3031 # urllib.Request insists on using has_data() as a proxy for
3029 # determining the request method. Override that to use our
3032 # determining the request method. Override that to use our
3030 # explicitly requested method.
3033 # explicitly requested method.
3031 req.get_method = lambda: method
3034 req.get_method = lambda: method
3032
3035
3033 try:
3036 try:
3034 opener.open(req).read()
3037 opener.open(req).read()
3035 except util.urlerr.urlerror as e:
3038 except util.urlerr.urlerror as e:
3036 e.read()
3039 e.read()
3037
3040
3038 elif action == 'close':
3041 elif action == 'close':
3039 peer.close()
3042 peer.close()
3040 elif action == 'readavailable':
3043 elif action == 'readavailable':
3041 if not stdout or not stderr:
3044 if not stdout or not stderr:
3042 raise error.Abort(_('readavailable not available on this peer'))
3045 raise error.Abort(_('readavailable not available on this peer'))
3043
3046
3044 stdin.close()
3047 stdin.close()
3045 stdout.read()
3048 stdout.read()
3046 stderr.read()
3049 stderr.read()
3047
3050
3048 elif action == 'readline':
3051 elif action == 'readline':
3049 if not stdout:
3052 if not stdout:
3050 raise error.Abort(_('readline not available on this peer'))
3053 raise error.Abort(_('readline not available on this peer'))
3051 stdout.readline()
3054 stdout.readline()
3052 elif action == 'ereadline':
3055 elif action == 'ereadline':
3053 if not stderr:
3056 if not stderr:
3054 raise error.Abort(_('ereadline not available on this peer'))
3057 raise error.Abort(_('ereadline not available on this peer'))
3055 stderr.readline()
3058 stderr.readline()
3056 elif action.startswith('read '):
3059 elif action.startswith('read '):
3057 count = int(action.split(' ', 1)[1])
3060 count = int(action.split(' ', 1)[1])
3058 if not stdout:
3061 if not stdout:
3059 raise error.Abort(_('read not available on this peer'))
3062 raise error.Abort(_('read not available on this peer'))
3060 stdout.read(count)
3063 stdout.read(count)
3061 elif action.startswith('eread '):
3064 elif action.startswith('eread '):
3062 count = int(action.split(' ', 1)[1])
3065 count = int(action.split(' ', 1)[1])
3063 if not stderr:
3066 if not stderr:
3064 raise error.Abort(_('eread not available on this peer'))
3067 raise error.Abort(_('eread not available on this peer'))
3065 stderr.read(count)
3068 stderr.read(count)
3066 else:
3069 else:
3067 raise error.Abort(_('unknown action: %s') % action)
3070 raise error.Abort(_('unknown action: %s') % action)
3068
3071
3069 if batchedcommands is not None:
3072 if batchedcommands is not None:
3070 raise error.Abort(_('unclosed "batchbegin" request'))
3073 raise error.Abort(_('unclosed "batchbegin" request'))
3071
3074
3072 if peer:
3075 if peer:
3073 peer.close()
3076 peer.close()
3074
3077
3075 if proc:
3078 if proc:
3076 proc.kill()
3079 proc.kill()
@@ -1,499 +1,526 b''
1 # stringutil.py - utility for generic string formatting, parsing, etc.
1 # stringutil.py - utility for generic string formatting, parsing, etc.
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 from __future__ import absolute_import
10 from __future__ import absolute_import
11
11
12 import __future__
12 import codecs
13 import codecs
13 import re as remod
14 import re as remod
14 import textwrap
15 import textwrap
15
16
16 from ..i18n import _
17 from ..i18n import _
17 from ..thirdparty import attr
18 from ..thirdparty import attr
18
19
19 from .. import (
20 from .. import (
20 encoding,
21 encoding,
21 error,
22 error,
22 pycompat,
23 pycompat,
23 )
24 )
24
25
25 _DATA_ESCAPE_MAP = {pycompat.bytechr(i): br'\x%02x' % i for i in range(256)}
26 _DATA_ESCAPE_MAP = {pycompat.bytechr(i): br'\x%02x' % i for i in range(256)}
26 _DATA_ESCAPE_MAP.update({
27 _DATA_ESCAPE_MAP.update({
27 b'\\': b'\\\\',
28 b'\\': b'\\\\',
28 b'\r': br'\r',
29 b'\r': br'\r',
29 b'\n': br'\n',
30 b'\n': br'\n',
30 })
31 })
31 _DATA_ESCAPE_RE = remod.compile(br'[\x00-\x08\x0a-\x1f\\\x7f-\xff]')
32 _DATA_ESCAPE_RE = remod.compile(br'[\x00-\x08\x0a-\x1f\\\x7f-\xff]')
32
33
33 def escapedata(s):
34 def escapedata(s):
34 if isinstance(s, bytearray):
35 if isinstance(s, bytearray):
35 s = bytes(s)
36 s = bytes(s)
36
37
37 return _DATA_ESCAPE_RE.sub(lambda m: _DATA_ESCAPE_MAP[m.group(0)], s)
38 return _DATA_ESCAPE_RE.sub(lambda m: _DATA_ESCAPE_MAP[m.group(0)], s)
38
39
39 def binary(s):
40 def binary(s):
40 """return true if a string is binary data"""
41 """return true if a string is binary data"""
41 return bool(s and '\0' in s)
42 return bool(s and '\0' in s)
42
43
43 def stringmatcher(pattern, casesensitive=True):
44 def stringmatcher(pattern, casesensitive=True):
44 """
45 """
45 accepts a string, possibly starting with 're:' or 'literal:' prefix.
46 accepts a string, possibly starting with 're:' or 'literal:' prefix.
46 returns the matcher name, pattern, and matcher function.
47 returns the matcher name, pattern, and matcher function.
47 missing or unknown prefixes are treated as literal matches.
48 missing or unknown prefixes are treated as literal matches.
48
49
49 helper for tests:
50 helper for tests:
50 >>> def test(pattern, *tests):
51 >>> def test(pattern, *tests):
51 ... kind, pattern, matcher = stringmatcher(pattern)
52 ... kind, pattern, matcher = stringmatcher(pattern)
52 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
53 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
53 >>> def itest(pattern, *tests):
54 >>> def itest(pattern, *tests):
54 ... kind, pattern, matcher = stringmatcher(pattern, casesensitive=False)
55 ... kind, pattern, matcher = stringmatcher(pattern, casesensitive=False)
55 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
56 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
56
57
57 exact matching (no prefix):
58 exact matching (no prefix):
58 >>> test(b'abcdefg', b'abc', b'def', b'abcdefg')
59 >>> test(b'abcdefg', b'abc', b'def', b'abcdefg')
59 ('literal', 'abcdefg', [False, False, True])
60 ('literal', 'abcdefg', [False, False, True])
60
61
61 regex matching ('re:' prefix)
62 regex matching ('re:' prefix)
62 >>> test(b're:a.+b', b'nomatch', b'fooadef', b'fooadefbar')
63 >>> test(b're:a.+b', b'nomatch', b'fooadef', b'fooadefbar')
63 ('re', 'a.+b', [False, False, True])
64 ('re', 'a.+b', [False, False, True])
64
65
65 force exact matches ('literal:' prefix)
66 force exact matches ('literal:' prefix)
66 >>> test(b'literal:re:foobar', b'foobar', b're:foobar')
67 >>> test(b'literal:re:foobar', b'foobar', b're:foobar')
67 ('literal', 're:foobar', [False, True])
68 ('literal', 're:foobar', [False, True])
68
69
69 unknown prefixes are ignored and treated as literals
70 unknown prefixes are ignored and treated as literals
70 >>> test(b'foo:bar', b'foo', b'bar', b'foo:bar')
71 >>> test(b'foo:bar', b'foo', b'bar', b'foo:bar')
71 ('literal', 'foo:bar', [False, False, True])
72 ('literal', 'foo:bar', [False, False, True])
72
73
73 case insensitive regex matches
74 case insensitive regex matches
74 >>> itest(b're:A.+b', b'nomatch', b'fooadef', b'fooadefBar')
75 >>> itest(b're:A.+b', b'nomatch', b'fooadef', b'fooadefBar')
75 ('re', 'A.+b', [False, False, True])
76 ('re', 'A.+b', [False, False, True])
76
77
77 case insensitive literal matches
78 case insensitive literal matches
78 >>> itest(b'ABCDEFG', b'abc', b'def', b'abcdefg')
79 >>> itest(b'ABCDEFG', b'abc', b'def', b'abcdefg')
79 ('literal', 'ABCDEFG', [False, False, True])
80 ('literal', 'ABCDEFG', [False, False, True])
80 """
81 """
81 if pattern.startswith('re:'):
82 if pattern.startswith('re:'):
82 pattern = pattern[3:]
83 pattern = pattern[3:]
83 try:
84 try:
84 flags = 0
85 flags = 0
85 if not casesensitive:
86 if not casesensitive:
86 flags = remod.I
87 flags = remod.I
87 regex = remod.compile(pattern, flags)
88 regex = remod.compile(pattern, flags)
88 except remod.error as e:
89 except remod.error as e:
89 raise error.ParseError(_('invalid regular expression: %s')
90 raise error.ParseError(_('invalid regular expression: %s')
90 % e)
91 % e)
91 return 're', pattern, regex.search
92 return 're', pattern, regex.search
92 elif pattern.startswith('literal:'):
93 elif pattern.startswith('literal:'):
93 pattern = pattern[8:]
94 pattern = pattern[8:]
94
95
95 match = pattern.__eq__
96 match = pattern.__eq__
96
97
97 if not casesensitive:
98 if not casesensitive:
98 ipat = encoding.lower(pattern)
99 ipat = encoding.lower(pattern)
99 match = lambda s: ipat == encoding.lower(s)
100 match = lambda s: ipat == encoding.lower(s)
100 return 'literal', pattern, match
101 return 'literal', pattern, match
101
102
102 def shortuser(user):
103 def shortuser(user):
103 """Return a short representation of a user name or email address."""
104 """Return a short representation of a user name or email address."""
104 f = user.find('@')
105 f = user.find('@')
105 if f >= 0:
106 if f >= 0:
106 user = user[:f]
107 user = user[:f]
107 f = user.find('<')
108 f = user.find('<')
108 if f >= 0:
109 if f >= 0:
109 user = user[f + 1:]
110 user = user[f + 1:]
110 f = user.find(' ')
111 f = user.find(' ')
111 if f >= 0:
112 if f >= 0:
112 user = user[:f]
113 user = user[:f]
113 f = user.find('.')
114 f = user.find('.')
114 if f >= 0:
115 if f >= 0:
115 user = user[:f]
116 user = user[:f]
116 return user
117 return user
117
118
118 def emailuser(user):
119 def emailuser(user):
119 """Return the user portion of an email address."""
120 """Return the user portion of an email address."""
120 f = user.find('@')
121 f = user.find('@')
121 if f >= 0:
122 if f >= 0:
122 user = user[:f]
123 user = user[:f]
123 f = user.find('<')
124 f = user.find('<')
124 if f >= 0:
125 if f >= 0:
125 user = user[f + 1:]
126 user = user[f + 1:]
126 return user
127 return user
127
128
128 def email(author):
129 def email(author):
129 '''get email of author.'''
130 '''get email of author.'''
130 r = author.find('>')
131 r = author.find('>')
131 if r == -1:
132 if r == -1:
132 r = None
133 r = None
133 return author[author.find('<') + 1:r]
134 return author[author.find('<') + 1:r]
134
135
135 def person(author):
136 def person(author):
136 """Returns the name before an email address,
137 """Returns the name before an email address,
137 interpreting it as per RFC 5322
138 interpreting it as per RFC 5322
138
139
139 >>> person(b'foo@bar')
140 >>> person(b'foo@bar')
140 'foo'
141 'foo'
141 >>> person(b'Foo Bar <foo@bar>')
142 >>> person(b'Foo Bar <foo@bar>')
142 'Foo Bar'
143 'Foo Bar'
143 >>> person(b'"Foo Bar" <foo@bar>')
144 >>> person(b'"Foo Bar" <foo@bar>')
144 'Foo Bar'
145 'Foo Bar'
145 >>> person(b'"Foo \"buz\" Bar" <foo@bar>')
146 >>> person(b'"Foo \"buz\" Bar" <foo@bar>')
146 'Foo "buz" Bar'
147 'Foo "buz" Bar'
147 >>> # The following are invalid, but do exist in real-life
148 >>> # The following are invalid, but do exist in real-life
148 ...
149 ...
149 >>> person(b'Foo "buz" Bar <foo@bar>')
150 >>> person(b'Foo "buz" Bar <foo@bar>')
150 'Foo "buz" Bar'
151 'Foo "buz" Bar'
151 >>> person(b'"Foo Bar <foo@bar>')
152 >>> person(b'"Foo Bar <foo@bar>')
152 'Foo Bar'
153 'Foo Bar'
153 """
154 """
154 if '@' not in author:
155 if '@' not in author:
155 return author
156 return author
156 f = author.find('<')
157 f = author.find('<')
157 if f != -1:
158 if f != -1:
158 return author[:f].strip(' "').replace('\\"', '"')
159 return author[:f].strip(' "').replace('\\"', '"')
159 f = author.find('@')
160 f = author.find('@')
160 return author[:f].replace('.', ' ')
161 return author[:f].replace('.', ' ')
161
162
162 @attr.s(hash=True)
163 @attr.s(hash=True)
163 class mailmapping(object):
164 class mailmapping(object):
164 '''Represents a username/email key or value in
165 '''Represents a username/email key or value in
165 a mailmap file'''
166 a mailmap file'''
166 email = attr.ib()
167 email = attr.ib()
167 name = attr.ib(default=None)
168 name = attr.ib(default=None)
168
169
169 def _ismailmaplineinvalid(names, emails):
170 def _ismailmaplineinvalid(names, emails):
170 '''Returns True if the parsed names and emails
171 '''Returns True if the parsed names and emails
171 in a mailmap entry are invalid.
172 in a mailmap entry are invalid.
172
173
173 >>> # No names or emails fails
174 >>> # No names or emails fails
174 >>> names, emails = [], []
175 >>> names, emails = [], []
175 >>> _ismailmaplineinvalid(names, emails)
176 >>> _ismailmaplineinvalid(names, emails)
176 True
177 True
177 >>> # Only one email fails
178 >>> # Only one email fails
178 >>> emails = [b'email@email.com']
179 >>> emails = [b'email@email.com']
179 >>> _ismailmaplineinvalid(names, emails)
180 >>> _ismailmaplineinvalid(names, emails)
180 True
181 True
181 >>> # One email and one name passes
182 >>> # One email and one name passes
182 >>> names = [b'Test Name']
183 >>> names = [b'Test Name']
183 >>> _ismailmaplineinvalid(names, emails)
184 >>> _ismailmaplineinvalid(names, emails)
184 False
185 False
185 >>> # No names but two emails passes
186 >>> # No names but two emails passes
186 >>> names = []
187 >>> names = []
187 >>> emails = [b'proper@email.com', b'commit@email.com']
188 >>> emails = [b'proper@email.com', b'commit@email.com']
188 >>> _ismailmaplineinvalid(names, emails)
189 >>> _ismailmaplineinvalid(names, emails)
189 False
190 False
190 '''
191 '''
191 return not emails or not names and len(emails) < 2
192 return not emails or not names and len(emails) < 2
192
193
193 def parsemailmap(mailmapcontent):
194 def parsemailmap(mailmapcontent):
194 """Parses data in the .mailmap format
195 """Parses data in the .mailmap format
195
196
196 >>> mmdata = b"\\n".join([
197 >>> mmdata = b"\\n".join([
197 ... b'# Comment',
198 ... b'# Comment',
198 ... b'Name <commit1@email.xx>',
199 ... b'Name <commit1@email.xx>',
199 ... b'<name@email.xx> <commit2@email.xx>',
200 ... b'<name@email.xx> <commit2@email.xx>',
200 ... b'Name <proper@email.xx> <commit3@email.xx>',
201 ... b'Name <proper@email.xx> <commit3@email.xx>',
201 ... b'Name <proper@email.xx> Commit <commit4@email.xx>',
202 ... b'Name <proper@email.xx> Commit <commit4@email.xx>',
202 ... ])
203 ... ])
203 >>> mm = parsemailmap(mmdata)
204 >>> mm = parsemailmap(mmdata)
204 >>> for key in sorted(mm.keys()):
205 >>> for key in sorted(mm.keys()):
205 ... print(key)
206 ... print(key)
206 mailmapping(email='commit1@email.xx', name=None)
207 mailmapping(email='commit1@email.xx', name=None)
207 mailmapping(email='commit2@email.xx', name=None)
208 mailmapping(email='commit2@email.xx', name=None)
208 mailmapping(email='commit3@email.xx', name=None)
209 mailmapping(email='commit3@email.xx', name=None)
209 mailmapping(email='commit4@email.xx', name='Commit')
210 mailmapping(email='commit4@email.xx', name='Commit')
210 >>> for val in sorted(mm.values()):
211 >>> for val in sorted(mm.values()):
211 ... print(val)
212 ... print(val)
212 mailmapping(email='commit1@email.xx', name='Name')
213 mailmapping(email='commit1@email.xx', name='Name')
213 mailmapping(email='name@email.xx', name=None)
214 mailmapping(email='name@email.xx', name=None)
214 mailmapping(email='proper@email.xx', name='Name')
215 mailmapping(email='proper@email.xx', name='Name')
215 mailmapping(email='proper@email.xx', name='Name')
216 mailmapping(email='proper@email.xx', name='Name')
216 """
217 """
217 mailmap = {}
218 mailmap = {}
218
219
219 if mailmapcontent is None:
220 if mailmapcontent is None:
220 return mailmap
221 return mailmap
221
222
222 for line in mailmapcontent.splitlines():
223 for line in mailmapcontent.splitlines():
223
224
224 # Don't bother checking the line if it is a comment or
225 # Don't bother checking the line if it is a comment or
225 # is an improperly formed author field
226 # is an improperly formed author field
226 if line.lstrip().startswith('#'):
227 if line.lstrip().startswith('#'):
227 continue
228 continue
228
229
229 # names, emails hold the parsed emails and names for each line
230 # names, emails hold the parsed emails and names for each line
230 # name_builder holds the words in a persons name
231 # name_builder holds the words in a persons name
231 names, emails = [], []
232 names, emails = [], []
232 namebuilder = []
233 namebuilder = []
233
234
234 for element in line.split():
235 for element in line.split():
235 if element.startswith('#'):
236 if element.startswith('#'):
236 # If we reach a comment in the mailmap file, move on
237 # If we reach a comment in the mailmap file, move on
237 break
238 break
238
239
239 elif element.startswith('<') and element.endswith('>'):
240 elif element.startswith('<') and element.endswith('>'):
240 # We have found an email.
241 # We have found an email.
241 # Parse it, and finalize any names from earlier
242 # Parse it, and finalize any names from earlier
242 emails.append(element[1:-1]) # Slice off the "<>"
243 emails.append(element[1:-1]) # Slice off the "<>"
243
244
244 if namebuilder:
245 if namebuilder:
245 names.append(' '.join(namebuilder))
246 names.append(' '.join(namebuilder))
246 namebuilder = []
247 namebuilder = []
247
248
248 # Break if we have found a second email, any other
249 # Break if we have found a second email, any other
249 # data does not fit the spec for .mailmap
250 # data does not fit the spec for .mailmap
250 if len(emails) > 1:
251 if len(emails) > 1:
251 break
252 break
252
253
253 else:
254 else:
254 # We have found another word in the committers name
255 # We have found another word in the committers name
255 namebuilder.append(element)
256 namebuilder.append(element)
256
257
257 # Check to see if we have parsed the line into a valid form
258 # Check to see if we have parsed the line into a valid form
258 # We require at least one email, and either at least one
259 # We require at least one email, and either at least one
259 # name or a second email
260 # name or a second email
260 if _ismailmaplineinvalid(names, emails):
261 if _ismailmaplineinvalid(names, emails):
261 continue
262 continue
262
263
263 mailmapkey = mailmapping(
264 mailmapkey = mailmapping(
264 email=emails[-1],
265 email=emails[-1],
265 name=names[-1] if len(names) == 2 else None,
266 name=names[-1] if len(names) == 2 else None,
266 )
267 )
267
268
268 mailmap[mailmapkey] = mailmapping(
269 mailmap[mailmapkey] = mailmapping(
269 email=emails[0],
270 email=emails[0],
270 name=names[0] if names else None,
271 name=names[0] if names else None,
271 )
272 )
272
273
273 return mailmap
274 return mailmap
274
275
275 def mapname(mailmap, author):
276 def mapname(mailmap, author):
276 """Returns the author field according to the mailmap cache, or
277 """Returns the author field according to the mailmap cache, or
277 the original author field.
278 the original author field.
278
279
279 >>> mmdata = b"\\n".join([
280 >>> mmdata = b"\\n".join([
280 ... b'# Comment',
281 ... b'# Comment',
281 ... b'Name <commit1@email.xx>',
282 ... b'Name <commit1@email.xx>',
282 ... b'<name@email.xx> <commit2@email.xx>',
283 ... b'<name@email.xx> <commit2@email.xx>',
283 ... b'Name <proper@email.xx> <commit3@email.xx>',
284 ... b'Name <proper@email.xx> <commit3@email.xx>',
284 ... b'Name <proper@email.xx> Commit <commit4@email.xx>',
285 ... b'Name <proper@email.xx> Commit <commit4@email.xx>',
285 ... ])
286 ... ])
286 >>> m = parsemailmap(mmdata)
287 >>> m = parsemailmap(mmdata)
287 >>> mapname(m, b'Commit <commit1@email.xx>')
288 >>> mapname(m, b'Commit <commit1@email.xx>')
288 'Name <commit1@email.xx>'
289 'Name <commit1@email.xx>'
289 >>> mapname(m, b'Name <commit2@email.xx>')
290 >>> mapname(m, b'Name <commit2@email.xx>')
290 'Name <name@email.xx>'
291 'Name <name@email.xx>'
291 >>> mapname(m, b'Commit <commit3@email.xx>')
292 >>> mapname(m, b'Commit <commit3@email.xx>')
292 'Name <proper@email.xx>'
293 'Name <proper@email.xx>'
293 >>> mapname(m, b'Commit <commit4@email.xx>')
294 >>> mapname(m, b'Commit <commit4@email.xx>')
294 'Name <proper@email.xx>'
295 'Name <proper@email.xx>'
295 >>> mapname(m, b'Unknown Name <unknown@email.com>')
296 >>> mapname(m, b'Unknown Name <unknown@email.com>')
296 'Unknown Name <unknown@email.com>'
297 'Unknown Name <unknown@email.com>'
297 """
298 """
298 # If the author field coming in isn't in the correct format,
299 # If the author field coming in isn't in the correct format,
299 # or the mailmap is empty just return the original author field
300 # or the mailmap is empty just return the original author field
300 if not isauthorwellformed(author) or not mailmap:
301 if not isauthorwellformed(author) or not mailmap:
301 return author
302 return author
302
303
303 # Turn the user name into a mailmapping
304 # Turn the user name into a mailmapping
304 commit = mailmapping(name=person(author), email=email(author))
305 commit = mailmapping(name=person(author), email=email(author))
305
306
306 try:
307 try:
307 # Try and use both the commit email and name as the key
308 # Try and use both the commit email and name as the key
308 proper = mailmap[commit]
309 proper = mailmap[commit]
309
310
310 except KeyError:
311 except KeyError:
311 # If the lookup fails, use just the email as the key instead
312 # If the lookup fails, use just the email as the key instead
312 # We call this commit2 as not to erase original commit fields
313 # We call this commit2 as not to erase original commit fields
313 commit2 = mailmapping(email=commit.email)
314 commit2 = mailmapping(email=commit.email)
314 proper = mailmap.get(commit2, mailmapping(None, None))
315 proper = mailmap.get(commit2, mailmapping(None, None))
315
316
316 # Return the author field with proper values filled in
317 # Return the author field with proper values filled in
317 return '%s <%s>' % (
318 return '%s <%s>' % (
318 proper.name if proper.name else commit.name,
319 proper.name if proper.name else commit.name,
319 proper.email if proper.email else commit.email,
320 proper.email if proper.email else commit.email,
320 )
321 )
321
322
322 _correctauthorformat = remod.compile(br'^[^<]+\s\<[^<>]+@[^<>]+\>$')
323 _correctauthorformat = remod.compile(br'^[^<]+\s\<[^<>]+@[^<>]+\>$')
323
324
324 def isauthorwellformed(author):
325 def isauthorwellformed(author):
325 '''Return True if the author field is well formed
326 '''Return True if the author field is well formed
326 (ie "Contributor Name <contrib@email.dom>")
327 (ie "Contributor Name <contrib@email.dom>")
327
328
328 >>> isauthorwellformed(b'Good Author <good@author.com>')
329 >>> isauthorwellformed(b'Good Author <good@author.com>')
329 True
330 True
330 >>> isauthorwellformed(b'Author <good@author.com>')
331 >>> isauthorwellformed(b'Author <good@author.com>')
331 True
332 True
332 >>> isauthorwellformed(b'Bad Author')
333 >>> isauthorwellformed(b'Bad Author')
333 False
334 False
334 >>> isauthorwellformed(b'Bad Author <author@author.com')
335 >>> isauthorwellformed(b'Bad Author <author@author.com')
335 False
336 False
336 >>> isauthorwellformed(b'Bad Author author@author.com')
337 >>> isauthorwellformed(b'Bad Author author@author.com')
337 False
338 False
338 >>> isauthorwellformed(b'<author@author.com>')
339 >>> isauthorwellformed(b'<author@author.com>')
339 False
340 False
340 >>> isauthorwellformed(b'Bad Author <author>')
341 >>> isauthorwellformed(b'Bad Author <author>')
341 False
342 False
342 '''
343 '''
343 return _correctauthorformat.match(author) is not None
344 return _correctauthorformat.match(author) is not None
344
345
345 def ellipsis(text, maxlength=400):
346 def ellipsis(text, maxlength=400):
346 """Trim string to at most maxlength (default: 400) columns in display."""
347 """Trim string to at most maxlength (default: 400) columns in display."""
347 return encoding.trim(text, maxlength, ellipsis='...')
348 return encoding.trim(text, maxlength, ellipsis='...')
348
349
349 def escapestr(s):
350 def escapestr(s):
350 # call underlying function of s.encode('string_escape') directly for
351 # call underlying function of s.encode('string_escape') directly for
351 # Python 3 compatibility
352 # Python 3 compatibility
352 return codecs.escape_encode(s)[0]
353 return codecs.escape_encode(s)[0]
353
354
354 def unescapestr(s):
355 def unescapestr(s):
355 return codecs.escape_decode(s)[0]
356 return codecs.escape_decode(s)[0]
356
357
357 def forcebytestr(obj):
358 def forcebytestr(obj):
358 """Portably format an arbitrary object (e.g. exception) into a byte
359 """Portably format an arbitrary object (e.g. exception) into a byte
359 string."""
360 string."""
360 try:
361 try:
361 return pycompat.bytestr(obj)
362 return pycompat.bytestr(obj)
362 except UnicodeEncodeError:
363 except UnicodeEncodeError:
363 # non-ascii string, may be lossy
364 # non-ascii string, may be lossy
364 return pycompat.bytestr(encoding.strtolocal(str(obj)))
365 return pycompat.bytestr(encoding.strtolocal(str(obj)))
365
366
366 def uirepr(s):
367 def uirepr(s):
367 # Avoid double backslash in Windows path repr()
368 # Avoid double backslash in Windows path repr()
368 return pycompat.byterepr(pycompat.bytestr(s)).replace(b'\\\\', b'\\')
369 return pycompat.byterepr(pycompat.bytestr(s)).replace(b'\\\\', b'\\')
369
370
370 # delay import of textwrap
371 # delay import of textwrap
371 def _MBTextWrapper(**kwargs):
372 def _MBTextWrapper(**kwargs):
372 class tw(textwrap.TextWrapper):
373 class tw(textwrap.TextWrapper):
373 """
374 """
374 Extend TextWrapper for width-awareness.
375 Extend TextWrapper for width-awareness.
375
376
376 Neither number of 'bytes' in any encoding nor 'characters' is
377 Neither number of 'bytes' in any encoding nor 'characters' is
377 appropriate to calculate terminal columns for specified string.
378 appropriate to calculate terminal columns for specified string.
378
379
379 Original TextWrapper implementation uses built-in 'len()' directly,
380 Original TextWrapper implementation uses built-in 'len()' directly,
380 so overriding is needed to use width information of each characters.
381 so overriding is needed to use width information of each characters.
381
382
382 In addition, characters classified into 'ambiguous' width are
383 In addition, characters classified into 'ambiguous' width are
383 treated as wide in East Asian area, but as narrow in other.
384 treated as wide in East Asian area, but as narrow in other.
384
385
385 This requires use decision to determine width of such characters.
386 This requires use decision to determine width of such characters.
386 """
387 """
387 def _cutdown(self, ucstr, space_left):
388 def _cutdown(self, ucstr, space_left):
388 l = 0
389 l = 0
389 colwidth = encoding.ucolwidth
390 colwidth = encoding.ucolwidth
390 for i in xrange(len(ucstr)):
391 for i in xrange(len(ucstr)):
391 l += colwidth(ucstr[i])
392 l += colwidth(ucstr[i])
392 if space_left < l:
393 if space_left < l:
393 return (ucstr[:i], ucstr[i:])
394 return (ucstr[:i], ucstr[i:])
394 return ucstr, ''
395 return ucstr, ''
395
396
396 # overriding of base class
397 # overriding of base class
397 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
398 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
398 space_left = max(width - cur_len, 1)
399 space_left = max(width - cur_len, 1)
399
400
400 if self.break_long_words:
401 if self.break_long_words:
401 cut, res = self._cutdown(reversed_chunks[-1], space_left)
402 cut, res = self._cutdown(reversed_chunks[-1], space_left)
402 cur_line.append(cut)
403 cur_line.append(cut)
403 reversed_chunks[-1] = res
404 reversed_chunks[-1] = res
404 elif not cur_line:
405 elif not cur_line:
405 cur_line.append(reversed_chunks.pop())
406 cur_line.append(reversed_chunks.pop())
406
407
407 # this overriding code is imported from TextWrapper of Python 2.6
408 # this overriding code is imported from TextWrapper of Python 2.6
408 # to calculate columns of string by 'encoding.ucolwidth()'
409 # to calculate columns of string by 'encoding.ucolwidth()'
409 def _wrap_chunks(self, chunks):
410 def _wrap_chunks(self, chunks):
410 colwidth = encoding.ucolwidth
411 colwidth = encoding.ucolwidth
411
412
412 lines = []
413 lines = []
413 if self.width <= 0:
414 if self.width <= 0:
414 raise ValueError("invalid width %r (must be > 0)" % self.width)
415 raise ValueError("invalid width %r (must be > 0)" % self.width)
415
416
416 # Arrange in reverse order so items can be efficiently popped
417 # Arrange in reverse order so items can be efficiently popped
417 # from a stack of chucks.
418 # from a stack of chucks.
418 chunks.reverse()
419 chunks.reverse()
419
420
420 while chunks:
421 while chunks:
421
422
422 # Start the list of chunks that will make up the current line.
423 # Start the list of chunks that will make up the current line.
423 # cur_len is just the length of all the chunks in cur_line.
424 # cur_len is just the length of all the chunks in cur_line.
424 cur_line = []
425 cur_line = []
425 cur_len = 0
426 cur_len = 0
426
427
427 # Figure out which static string will prefix this line.
428 # Figure out which static string will prefix this line.
428 if lines:
429 if lines:
429 indent = self.subsequent_indent
430 indent = self.subsequent_indent
430 else:
431 else:
431 indent = self.initial_indent
432 indent = self.initial_indent
432
433
433 # Maximum width for this line.
434 # Maximum width for this line.
434 width = self.width - len(indent)
435 width = self.width - len(indent)
435
436
436 # First chunk on line is whitespace -- drop it, unless this
437 # First chunk on line is whitespace -- drop it, unless this
437 # is the very beginning of the text (i.e. no lines started yet).
438 # is the very beginning of the text (i.e. no lines started yet).
438 if self.drop_whitespace and chunks[-1].strip() == r'' and lines:
439 if self.drop_whitespace and chunks[-1].strip() == r'' and lines:
439 del chunks[-1]
440 del chunks[-1]
440
441
441 while chunks:
442 while chunks:
442 l = colwidth(chunks[-1])
443 l = colwidth(chunks[-1])
443
444
444 # Can at least squeeze this chunk onto the current line.
445 # Can at least squeeze this chunk onto the current line.
445 if cur_len + l <= width:
446 if cur_len + l <= width:
446 cur_line.append(chunks.pop())
447 cur_line.append(chunks.pop())
447 cur_len += l
448 cur_len += l
448
449
449 # Nope, this line is full.
450 # Nope, this line is full.
450 else:
451 else:
451 break
452 break
452
453
453 # The current line is full, and the next chunk is too big to
454 # The current line is full, and the next chunk is too big to
454 # fit on *any* line (not just this one).
455 # fit on *any* line (not just this one).
455 if chunks and colwidth(chunks[-1]) > width:
456 if chunks and colwidth(chunks[-1]) > width:
456 self._handle_long_word(chunks, cur_line, cur_len, width)
457 self._handle_long_word(chunks, cur_line, cur_len, width)
457
458
458 # If the last chunk on this line is all whitespace, drop it.
459 # If the last chunk on this line is all whitespace, drop it.
459 if (self.drop_whitespace and
460 if (self.drop_whitespace and
460 cur_line and cur_line[-1].strip() == r''):
461 cur_line and cur_line[-1].strip() == r''):
461 del cur_line[-1]
462 del cur_line[-1]
462
463
463 # Convert current line back to a string and store it in list
464 # Convert current line back to a string and store it in list
464 # of all lines (return value).
465 # of all lines (return value).
465 if cur_line:
466 if cur_line:
466 lines.append(indent + r''.join(cur_line))
467 lines.append(indent + r''.join(cur_line))
467
468
468 return lines
469 return lines
469
470
470 global _MBTextWrapper
471 global _MBTextWrapper
471 _MBTextWrapper = tw
472 _MBTextWrapper = tw
472 return tw(**kwargs)
473 return tw(**kwargs)
473
474
474 def wrap(line, width, initindent='', hangindent=''):
475 def wrap(line, width, initindent='', hangindent=''):
475 maxindent = max(len(hangindent), len(initindent))
476 maxindent = max(len(hangindent), len(initindent))
476 if width <= maxindent:
477 if width <= maxindent:
477 # adjust for weird terminal size
478 # adjust for weird terminal size
478 width = max(78, maxindent + 1)
479 width = max(78, maxindent + 1)
479 line = line.decode(pycompat.sysstr(encoding.encoding),
480 line = line.decode(pycompat.sysstr(encoding.encoding),
480 pycompat.sysstr(encoding.encodingmode))
481 pycompat.sysstr(encoding.encodingmode))
481 initindent = initindent.decode(pycompat.sysstr(encoding.encoding),
482 initindent = initindent.decode(pycompat.sysstr(encoding.encoding),
482 pycompat.sysstr(encoding.encodingmode))
483 pycompat.sysstr(encoding.encodingmode))
483 hangindent = hangindent.decode(pycompat.sysstr(encoding.encoding),
484 hangindent = hangindent.decode(pycompat.sysstr(encoding.encoding),
484 pycompat.sysstr(encoding.encodingmode))
485 pycompat.sysstr(encoding.encodingmode))
485 wrapper = _MBTextWrapper(width=width,
486 wrapper = _MBTextWrapper(width=width,
486 initial_indent=initindent,
487 initial_indent=initindent,
487 subsequent_indent=hangindent)
488 subsequent_indent=hangindent)
488 return wrapper.fill(line).encode(pycompat.sysstr(encoding.encoding))
489 return wrapper.fill(line).encode(pycompat.sysstr(encoding.encoding))
489
490
490 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
491 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
491 '0': False, 'no': False, 'false': False, 'off': False,
492 '0': False, 'no': False, 'false': False, 'off': False,
492 'never': False}
493 'never': False}
493
494
494 def parsebool(s):
495 def parsebool(s):
495 """Parse s into a boolean.
496 """Parse s into a boolean.
496
497
497 If s is not a valid boolean, returns None.
498 If s is not a valid boolean, returns None.
498 """
499 """
499 return _booleans.get(s.lower(), None)
500 return _booleans.get(s.lower(), None)
501
502 def evalpython(s):
503 """Evaluate a string containing a Python expression.
504
505 THIS FUNCTION IS NOT SAFE TO USE ON UNTRUSTED INPUT. IT'S USE SHOULD BE
506 LIMITED TO DEVELOPER-FACING FUNCTIONALITY.
507 """
508 globs = {
509 r'__builtins__': {
510 r'None': None,
511 r'False': False,
512 r'True': True,
513 r'int': int,
514 r'set': set,
515 r'tuple': tuple,
516 # Don't need to expose dict and list because we can use
517 # literals.
518 },
519 }
520
521 # We can't use eval() directly because it inherits compiler
522 # flags from this module and we need unicode literals for Python 3
523 # compatibility.
524 code = compile(s, r'<string>', r'eval',
525 __future__.unicode_literals.compiler_flag, True)
526 return eval(code, globs, {})
@@ -1,825 +1,838 b''
1 # wireprotoframing.py - unified framing protocol for wire protocol
1 # wireprotoframing.py - unified framing protocol for wire protocol
2 #
2 #
3 # Copyright 2018 Gregory Szorc <gregory.szorc@gmail.com>
3 # Copyright 2018 Gregory Szorc <gregory.szorc@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # This file contains functionality to support the unified frame-based wire
8 # This file contains functionality to support the unified frame-based wire
9 # protocol. For details about the protocol, see
9 # protocol. For details about the protocol, see
10 # `hg help internals.wireprotocol`.
10 # `hg help internals.wireprotocol`.
11
11
12 from __future__ import absolute_import
12 from __future__ import absolute_import
13
13
14 import struct
14 import struct
15
15
16 from .i18n import _
16 from .i18n import _
17 from .thirdparty import (
17 from .thirdparty import (
18 attr,
18 attr,
19 cbor,
19 )
20 )
20 from . import (
21 from . import (
21 error,
22 error,
22 util,
23 util,
23 )
24 )
24 from .utils import (
25 from .utils import (
25 stringutil,
26 stringutil,
26 )
27 )
27
28
28 FRAME_HEADER_SIZE = 8
29 FRAME_HEADER_SIZE = 8
29 DEFAULT_MAX_FRAME_SIZE = 32768
30 DEFAULT_MAX_FRAME_SIZE = 32768
30
31
31 STREAM_FLAG_BEGIN_STREAM = 0x01
32 STREAM_FLAG_BEGIN_STREAM = 0x01
32 STREAM_FLAG_END_STREAM = 0x02
33 STREAM_FLAG_END_STREAM = 0x02
33 STREAM_FLAG_ENCODING_APPLIED = 0x04
34 STREAM_FLAG_ENCODING_APPLIED = 0x04
34
35
35 STREAM_FLAGS = {
36 STREAM_FLAGS = {
36 b'stream-begin': STREAM_FLAG_BEGIN_STREAM,
37 b'stream-begin': STREAM_FLAG_BEGIN_STREAM,
37 b'stream-end': STREAM_FLAG_END_STREAM,
38 b'stream-end': STREAM_FLAG_END_STREAM,
38 b'encoded': STREAM_FLAG_ENCODING_APPLIED,
39 b'encoded': STREAM_FLAG_ENCODING_APPLIED,
39 }
40 }
40
41
41 FRAME_TYPE_COMMAND_NAME = 0x01
42 FRAME_TYPE_COMMAND_NAME = 0x01
42 FRAME_TYPE_COMMAND_ARGUMENT = 0x02
43 FRAME_TYPE_COMMAND_ARGUMENT = 0x02
43 FRAME_TYPE_COMMAND_DATA = 0x03
44 FRAME_TYPE_COMMAND_DATA = 0x03
44 FRAME_TYPE_BYTES_RESPONSE = 0x04
45 FRAME_TYPE_BYTES_RESPONSE = 0x04
45 FRAME_TYPE_ERROR_RESPONSE = 0x05
46 FRAME_TYPE_ERROR_RESPONSE = 0x05
46 FRAME_TYPE_TEXT_OUTPUT = 0x06
47 FRAME_TYPE_TEXT_OUTPUT = 0x06
47 FRAME_TYPE_STREAM_SETTINGS = 0x08
48 FRAME_TYPE_STREAM_SETTINGS = 0x08
48
49
49 FRAME_TYPES = {
50 FRAME_TYPES = {
50 b'command-name': FRAME_TYPE_COMMAND_NAME,
51 b'command-name': FRAME_TYPE_COMMAND_NAME,
51 b'command-argument': FRAME_TYPE_COMMAND_ARGUMENT,
52 b'command-argument': FRAME_TYPE_COMMAND_ARGUMENT,
52 b'command-data': FRAME_TYPE_COMMAND_DATA,
53 b'command-data': FRAME_TYPE_COMMAND_DATA,
53 b'bytes-response': FRAME_TYPE_BYTES_RESPONSE,
54 b'bytes-response': FRAME_TYPE_BYTES_RESPONSE,
54 b'error-response': FRAME_TYPE_ERROR_RESPONSE,
55 b'error-response': FRAME_TYPE_ERROR_RESPONSE,
55 b'text-output': FRAME_TYPE_TEXT_OUTPUT,
56 b'text-output': FRAME_TYPE_TEXT_OUTPUT,
56 b'stream-settings': FRAME_TYPE_STREAM_SETTINGS,
57 b'stream-settings': FRAME_TYPE_STREAM_SETTINGS,
57 }
58 }
58
59
59 FLAG_COMMAND_NAME_EOS = 0x01
60 FLAG_COMMAND_NAME_EOS = 0x01
60 FLAG_COMMAND_NAME_HAVE_ARGS = 0x02
61 FLAG_COMMAND_NAME_HAVE_ARGS = 0x02
61 FLAG_COMMAND_NAME_HAVE_DATA = 0x04
62 FLAG_COMMAND_NAME_HAVE_DATA = 0x04
62
63
63 FLAGS_COMMAND = {
64 FLAGS_COMMAND = {
64 b'eos': FLAG_COMMAND_NAME_EOS,
65 b'eos': FLAG_COMMAND_NAME_EOS,
65 b'have-args': FLAG_COMMAND_NAME_HAVE_ARGS,
66 b'have-args': FLAG_COMMAND_NAME_HAVE_ARGS,
66 b'have-data': FLAG_COMMAND_NAME_HAVE_DATA,
67 b'have-data': FLAG_COMMAND_NAME_HAVE_DATA,
67 }
68 }
68
69
69 FLAG_COMMAND_ARGUMENT_CONTINUATION = 0x01
70 FLAG_COMMAND_ARGUMENT_CONTINUATION = 0x01
70 FLAG_COMMAND_ARGUMENT_EOA = 0x02
71 FLAG_COMMAND_ARGUMENT_EOA = 0x02
71
72
72 FLAGS_COMMAND_ARGUMENT = {
73 FLAGS_COMMAND_ARGUMENT = {
73 b'continuation': FLAG_COMMAND_ARGUMENT_CONTINUATION,
74 b'continuation': FLAG_COMMAND_ARGUMENT_CONTINUATION,
74 b'eoa': FLAG_COMMAND_ARGUMENT_EOA,
75 b'eoa': FLAG_COMMAND_ARGUMENT_EOA,
75 }
76 }
76
77
77 FLAG_COMMAND_DATA_CONTINUATION = 0x01
78 FLAG_COMMAND_DATA_CONTINUATION = 0x01
78 FLAG_COMMAND_DATA_EOS = 0x02
79 FLAG_COMMAND_DATA_EOS = 0x02
79
80
80 FLAGS_COMMAND_DATA = {
81 FLAGS_COMMAND_DATA = {
81 b'continuation': FLAG_COMMAND_DATA_CONTINUATION,
82 b'continuation': FLAG_COMMAND_DATA_CONTINUATION,
82 b'eos': FLAG_COMMAND_DATA_EOS,
83 b'eos': FLAG_COMMAND_DATA_EOS,
83 }
84 }
84
85
85 FLAG_BYTES_RESPONSE_CONTINUATION = 0x01
86 FLAG_BYTES_RESPONSE_CONTINUATION = 0x01
86 FLAG_BYTES_RESPONSE_EOS = 0x02
87 FLAG_BYTES_RESPONSE_EOS = 0x02
87
88
88 FLAGS_BYTES_RESPONSE = {
89 FLAGS_BYTES_RESPONSE = {
89 b'continuation': FLAG_BYTES_RESPONSE_CONTINUATION,
90 b'continuation': FLAG_BYTES_RESPONSE_CONTINUATION,
90 b'eos': FLAG_BYTES_RESPONSE_EOS,
91 b'eos': FLAG_BYTES_RESPONSE_EOS,
91 }
92 }
92
93
93 FLAG_ERROR_RESPONSE_PROTOCOL = 0x01
94 FLAG_ERROR_RESPONSE_PROTOCOL = 0x01
94 FLAG_ERROR_RESPONSE_APPLICATION = 0x02
95 FLAG_ERROR_RESPONSE_APPLICATION = 0x02
95
96
96 FLAGS_ERROR_RESPONSE = {
97 FLAGS_ERROR_RESPONSE = {
97 b'protocol': FLAG_ERROR_RESPONSE_PROTOCOL,
98 b'protocol': FLAG_ERROR_RESPONSE_PROTOCOL,
98 b'application': FLAG_ERROR_RESPONSE_APPLICATION,
99 b'application': FLAG_ERROR_RESPONSE_APPLICATION,
99 }
100 }
100
101
101 # Maps frame types to their available flags.
102 # Maps frame types to their available flags.
102 FRAME_TYPE_FLAGS = {
103 FRAME_TYPE_FLAGS = {
103 FRAME_TYPE_COMMAND_NAME: FLAGS_COMMAND,
104 FRAME_TYPE_COMMAND_NAME: FLAGS_COMMAND,
104 FRAME_TYPE_COMMAND_ARGUMENT: FLAGS_COMMAND_ARGUMENT,
105 FRAME_TYPE_COMMAND_ARGUMENT: FLAGS_COMMAND_ARGUMENT,
105 FRAME_TYPE_COMMAND_DATA: FLAGS_COMMAND_DATA,
106 FRAME_TYPE_COMMAND_DATA: FLAGS_COMMAND_DATA,
106 FRAME_TYPE_BYTES_RESPONSE: FLAGS_BYTES_RESPONSE,
107 FRAME_TYPE_BYTES_RESPONSE: FLAGS_BYTES_RESPONSE,
107 FRAME_TYPE_ERROR_RESPONSE: FLAGS_ERROR_RESPONSE,
108 FRAME_TYPE_ERROR_RESPONSE: FLAGS_ERROR_RESPONSE,
108 FRAME_TYPE_TEXT_OUTPUT: {},
109 FRAME_TYPE_TEXT_OUTPUT: {},
109 FRAME_TYPE_STREAM_SETTINGS: {},
110 FRAME_TYPE_STREAM_SETTINGS: {},
110 }
111 }
111
112
112 ARGUMENT_FRAME_HEADER = struct.Struct(r'<HH')
113 ARGUMENT_FRAME_HEADER = struct.Struct(r'<HH')
113
114
114 @attr.s(slots=True)
115 @attr.s(slots=True)
115 class frameheader(object):
116 class frameheader(object):
116 """Represents the data in a frame header."""
117 """Represents the data in a frame header."""
117
118
118 length = attr.ib()
119 length = attr.ib()
119 requestid = attr.ib()
120 requestid = attr.ib()
120 streamid = attr.ib()
121 streamid = attr.ib()
121 streamflags = attr.ib()
122 streamflags = attr.ib()
122 typeid = attr.ib()
123 typeid = attr.ib()
123 flags = attr.ib()
124 flags = attr.ib()
124
125
125 @attr.s(slots=True)
126 @attr.s(slots=True)
126 class frame(object):
127 class frame(object):
127 """Represents a parsed frame."""
128 """Represents a parsed frame."""
128
129
129 requestid = attr.ib()
130 requestid = attr.ib()
130 streamid = attr.ib()
131 streamid = attr.ib()
131 streamflags = attr.ib()
132 streamflags = attr.ib()
132 typeid = attr.ib()
133 typeid = attr.ib()
133 flags = attr.ib()
134 flags = attr.ib()
134 payload = attr.ib()
135 payload = attr.ib()
135
136
136 def makeframe(requestid, streamid, streamflags, typeid, flags, payload):
137 def makeframe(requestid, streamid, streamflags, typeid, flags, payload):
137 """Assemble a frame into a byte array."""
138 """Assemble a frame into a byte array."""
138 # TODO assert size of payload.
139 # TODO assert size of payload.
139 frame = bytearray(FRAME_HEADER_SIZE + len(payload))
140 frame = bytearray(FRAME_HEADER_SIZE + len(payload))
140
141
141 # 24 bits length
142 # 24 bits length
142 # 16 bits request id
143 # 16 bits request id
143 # 8 bits stream id
144 # 8 bits stream id
144 # 8 bits stream flags
145 # 8 bits stream flags
145 # 4 bits type
146 # 4 bits type
146 # 4 bits flags
147 # 4 bits flags
147
148
148 l = struct.pack(r'<I', len(payload))
149 l = struct.pack(r'<I', len(payload))
149 frame[0:3] = l[0:3]
150 frame[0:3] = l[0:3]
150 struct.pack_into(r'<HBB', frame, 3, requestid, streamid, streamflags)
151 struct.pack_into(r'<HBB', frame, 3, requestid, streamid, streamflags)
151 frame[7] = (typeid << 4) | flags
152 frame[7] = (typeid << 4) | flags
152 frame[8:] = payload
153 frame[8:] = payload
153
154
154 return frame
155 return frame
155
156
156 def makeframefromhumanstring(s):
157 def makeframefromhumanstring(s):
157 """Create a frame from a human readable string
158 """Create a frame from a human readable string
158
159
160 DANGER: NOT SAFE TO USE WITH UNTRUSTED INPUT BECAUSE OF POTENTIAL
161 eval() USAGE. DO NOT USE IN CORE.
162
159 Strings have the form:
163 Strings have the form:
160
164
161 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
165 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
162
166
163 This can be used by user-facing applications and tests for creating
167 This can be used by user-facing applications and tests for creating
164 frames easily without having to type out a bunch of constants.
168 frames easily without having to type out a bunch of constants.
165
169
166 Request ID and stream IDs are integers.
170 Request ID and stream IDs are integers.
167
171
168 Stream flags, frame type, and flags can be specified by integer or
172 Stream flags, frame type, and flags can be specified by integer or
169 named constant.
173 named constant.
170
174
171 Flags can be delimited by `|` to bitwise OR them together.
175 Flags can be delimited by `|` to bitwise OR them together.
176
177 If the payload begins with ``cbor:``, the following string will be
178 evaluated as Python code and the resulting object will be fed into
179 a CBOR encoder. Otherwise, the payload is interpreted as a Python
180 byte string literal.
172 """
181 """
173 fields = s.split(b' ', 5)
182 fields = s.split(b' ', 5)
174 requestid, streamid, streamflags, frametype, frameflags, payload = fields
183 requestid, streamid, streamflags, frametype, frameflags, payload = fields
175
184
176 requestid = int(requestid)
185 requestid = int(requestid)
177 streamid = int(streamid)
186 streamid = int(streamid)
178
187
179 finalstreamflags = 0
188 finalstreamflags = 0
180 for flag in streamflags.split(b'|'):
189 for flag in streamflags.split(b'|'):
181 if flag in STREAM_FLAGS:
190 if flag in STREAM_FLAGS:
182 finalstreamflags |= STREAM_FLAGS[flag]
191 finalstreamflags |= STREAM_FLAGS[flag]
183 else:
192 else:
184 finalstreamflags |= int(flag)
193 finalstreamflags |= int(flag)
185
194
186 if frametype in FRAME_TYPES:
195 if frametype in FRAME_TYPES:
187 frametype = FRAME_TYPES[frametype]
196 frametype = FRAME_TYPES[frametype]
188 else:
197 else:
189 frametype = int(frametype)
198 frametype = int(frametype)
190
199
191 finalflags = 0
200 finalflags = 0
192 validflags = FRAME_TYPE_FLAGS[frametype]
201 validflags = FRAME_TYPE_FLAGS[frametype]
193 for flag in frameflags.split(b'|'):
202 for flag in frameflags.split(b'|'):
194 if flag in validflags:
203 if flag in validflags:
195 finalflags |= validflags[flag]
204 finalflags |= validflags[flag]
196 else:
205 else:
197 finalflags |= int(flag)
206 finalflags |= int(flag)
198
207
199 payload = stringutil.unescapestr(payload)
208 if payload.startswith(b'cbor:'):
209 payload = cbor.dumps(stringutil.evalpython(payload[5:]), canonical=True)
210
211 else:
212 payload = stringutil.unescapestr(payload)
200
213
201 return makeframe(requestid=requestid, streamid=streamid,
214 return makeframe(requestid=requestid, streamid=streamid,
202 streamflags=finalstreamflags, typeid=frametype,
215 streamflags=finalstreamflags, typeid=frametype,
203 flags=finalflags, payload=payload)
216 flags=finalflags, payload=payload)
204
217
205 def parseheader(data):
218 def parseheader(data):
206 """Parse a unified framing protocol frame header from a buffer.
219 """Parse a unified framing protocol frame header from a buffer.
207
220
208 The header is expected to be in the buffer at offset 0 and the
221 The header is expected to be in the buffer at offset 0 and the
209 buffer is expected to be large enough to hold a full header.
222 buffer is expected to be large enough to hold a full header.
210 """
223 """
211 # 24 bits payload length (little endian)
224 # 24 bits payload length (little endian)
212 # 16 bits request ID
225 # 16 bits request ID
213 # 8 bits stream ID
226 # 8 bits stream ID
214 # 8 bits stream flags
227 # 8 bits stream flags
215 # 4 bits frame type
228 # 4 bits frame type
216 # 4 bits frame flags
229 # 4 bits frame flags
217 # ... payload
230 # ... payload
218 framelength = data[0] + 256 * data[1] + 16384 * data[2]
231 framelength = data[0] + 256 * data[1] + 16384 * data[2]
219 requestid, streamid, streamflags = struct.unpack_from(r'<HBB', data, 3)
232 requestid, streamid, streamflags = struct.unpack_from(r'<HBB', data, 3)
220 typeflags = data[7]
233 typeflags = data[7]
221
234
222 frametype = (typeflags & 0xf0) >> 4
235 frametype = (typeflags & 0xf0) >> 4
223 frameflags = typeflags & 0x0f
236 frameflags = typeflags & 0x0f
224
237
225 return frameheader(framelength, requestid, streamid, streamflags,
238 return frameheader(framelength, requestid, streamid, streamflags,
226 frametype, frameflags)
239 frametype, frameflags)
227
240
228 def readframe(fh):
241 def readframe(fh):
229 """Read a unified framing protocol frame from a file object.
242 """Read a unified framing protocol frame from a file object.
230
243
231 Returns a 3-tuple of (type, flags, payload) for the decoded frame or
244 Returns a 3-tuple of (type, flags, payload) for the decoded frame or
232 None if no frame is available. May raise if a malformed frame is
245 None if no frame is available. May raise if a malformed frame is
233 seen.
246 seen.
234 """
247 """
235 header = bytearray(FRAME_HEADER_SIZE)
248 header = bytearray(FRAME_HEADER_SIZE)
236
249
237 readcount = fh.readinto(header)
250 readcount = fh.readinto(header)
238
251
239 if readcount == 0:
252 if readcount == 0:
240 return None
253 return None
241
254
242 if readcount != FRAME_HEADER_SIZE:
255 if readcount != FRAME_HEADER_SIZE:
243 raise error.Abort(_('received incomplete frame: got %d bytes: %s') %
256 raise error.Abort(_('received incomplete frame: got %d bytes: %s') %
244 (readcount, header))
257 (readcount, header))
245
258
246 h = parseheader(header)
259 h = parseheader(header)
247
260
248 payload = fh.read(h.length)
261 payload = fh.read(h.length)
249 if len(payload) != h.length:
262 if len(payload) != h.length:
250 raise error.Abort(_('frame length error: expected %d; got %d') %
263 raise error.Abort(_('frame length error: expected %d; got %d') %
251 (h.length, len(payload)))
264 (h.length, len(payload)))
252
265
253 return frame(h.requestid, h.streamid, h.streamflags, h.typeid, h.flags,
266 return frame(h.requestid, h.streamid, h.streamflags, h.typeid, h.flags,
254 payload)
267 payload)
255
268
256 def createcommandframes(stream, requestid, cmd, args, datafh=None):
269 def createcommandframes(stream, requestid, cmd, args, datafh=None):
257 """Create frames necessary to transmit a request to run a command.
270 """Create frames necessary to transmit a request to run a command.
258
271
259 This is a generator of bytearrays. Each item represents a frame
272 This is a generator of bytearrays. Each item represents a frame
260 ready to be sent over the wire to a peer.
273 ready to be sent over the wire to a peer.
261 """
274 """
262 flags = 0
275 flags = 0
263 if args:
276 if args:
264 flags |= FLAG_COMMAND_NAME_HAVE_ARGS
277 flags |= FLAG_COMMAND_NAME_HAVE_ARGS
265 if datafh:
278 if datafh:
266 flags |= FLAG_COMMAND_NAME_HAVE_DATA
279 flags |= FLAG_COMMAND_NAME_HAVE_DATA
267
280
268 if not flags:
281 if not flags:
269 flags |= FLAG_COMMAND_NAME_EOS
282 flags |= FLAG_COMMAND_NAME_EOS
270
283
271 yield stream.makeframe(requestid=requestid, typeid=FRAME_TYPE_COMMAND_NAME,
284 yield stream.makeframe(requestid=requestid, typeid=FRAME_TYPE_COMMAND_NAME,
272 flags=flags, payload=cmd)
285 flags=flags, payload=cmd)
273
286
274 for i, k in enumerate(sorted(args)):
287 for i, k in enumerate(sorted(args)):
275 v = args[k]
288 v = args[k]
276 last = i == len(args) - 1
289 last = i == len(args) - 1
277
290
278 # TODO handle splitting of argument values across frames.
291 # TODO handle splitting of argument values across frames.
279 payload = bytearray(ARGUMENT_FRAME_HEADER.size + len(k) + len(v))
292 payload = bytearray(ARGUMENT_FRAME_HEADER.size + len(k) + len(v))
280 offset = 0
293 offset = 0
281 ARGUMENT_FRAME_HEADER.pack_into(payload, offset, len(k), len(v))
294 ARGUMENT_FRAME_HEADER.pack_into(payload, offset, len(k), len(v))
282 offset += ARGUMENT_FRAME_HEADER.size
295 offset += ARGUMENT_FRAME_HEADER.size
283 payload[offset:offset + len(k)] = k
296 payload[offset:offset + len(k)] = k
284 offset += len(k)
297 offset += len(k)
285 payload[offset:offset + len(v)] = v
298 payload[offset:offset + len(v)] = v
286
299
287 flags = FLAG_COMMAND_ARGUMENT_EOA if last else 0
300 flags = FLAG_COMMAND_ARGUMENT_EOA if last else 0
288 yield stream.makeframe(requestid=requestid,
301 yield stream.makeframe(requestid=requestid,
289 typeid=FRAME_TYPE_COMMAND_ARGUMENT,
302 typeid=FRAME_TYPE_COMMAND_ARGUMENT,
290 flags=flags,
303 flags=flags,
291 payload=payload)
304 payload=payload)
292
305
293 if datafh:
306 if datafh:
294 while True:
307 while True:
295 data = datafh.read(DEFAULT_MAX_FRAME_SIZE)
308 data = datafh.read(DEFAULT_MAX_FRAME_SIZE)
296
309
297 done = False
310 done = False
298 if len(data) == DEFAULT_MAX_FRAME_SIZE:
311 if len(data) == DEFAULT_MAX_FRAME_SIZE:
299 flags = FLAG_COMMAND_DATA_CONTINUATION
312 flags = FLAG_COMMAND_DATA_CONTINUATION
300 else:
313 else:
301 flags = FLAG_COMMAND_DATA_EOS
314 flags = FLAG_COMMAND_DATA_EOS
302 assert datafh.read(1) == b''
315 assert datafh.read(1) == b''
303 done = True
316 done = True
304
317
305 yield stream.makeframe(requestid=requestid,
318 yield stream.makeframe(requestid=requestid,
306 typeid=FRAME_TYPE_COMMAND_DATA,
319 typeid=FRAME_TYPE_COMMAND_DATA,
307 flags=flags,
320 flags=flags,
308 payload=data)
321 payload=data)
309
322
310 if done:
323 if done:
311 break
324 break
312
325
313 def createbytesresponseframesfrombytes(stream, requestid, data,
326 def createbytesresponseframesfrombytes(stream, requestid, data,
314 maxframesize=DEFAULT_MAX_FRAME_SIZE):
327 maxframesize=DEFAULT_MAX_FRAME_SIZE):
315 """Create a raw frame to send a bytes response from static bytes input.
328 """Create a raw frame to send a bytes response from static bytes input.
316
329
317 Returns a generator of bytearrays.
330 Returns a generator of bytearrays.
318 """
331 """
319
332
320 # Simple case of a single frame.
333 # Simple case of a single frame.
321 if len(data) <= maxframesize:
334 if len(data) <= maxframesize:
322 yield stream.makeframe(requestid=requestid,
335 yield stream.makeframe(requestid=requestid,
323 typeid=FRAME_TYPE_BYTES_RESPONSE,
336 typeid=FRAME_TYPE_BYTES_RESPONSE,
324 flags=FLAG_BYTES_RESPONSE_EOS,
337 flags=FLAG_BYTES_RESPONSE_EOS,
325 payload=data)
338 payload=data)
326 return
339 return
327
340
328 offset = 0
341 offset = 0
329 while True:
342 while True:
330 chunk = data[offset:offset + maxframesize]
343 chunk = data[offset:offset + maxframesize]
331 offset += len(chunk)
344 offset += len(chunk)
332 done = offset == len(data)
345 done = offset == len(data)
333
346
334 if done:
347 if done:
335 flags = FLAG_BYTES_RESPONSE_EOS
348 flags = FLAG_BYTES_RESPONSE_EOS
336 else:
349 else:
337 flags = FLAG_BYTES_RESPONSE_CONTINUATION
350 flags = FLAG_BYTES_RESPONSE_CONTINUATION
338
351
339 yield stream.makeframe(requestid=requestid,
352 yield stream.makeframe(requestid=requestid,
340 typeid=FRAME_TYPE_BYTES_RESPONSE,
353 typeid=FRAME_TYPE_BYTES_RESPONSE,
341 flags=flags,
354 flags=flags,
342 payload=chunk)
355 payload=chunk)
343
356
344 if done:
357 if done:
345 break
358 break
346
359
347 def createerrorframe(stream, requestid, msg, protocol=False, application=False):
360 def createerrorframe(stream, requestid, msg, protocol=False, application=False):
348 # TODO properly handle frame size limits.
361 # TODO properly handle frame size limits.
349 assert len(msg) <= DEFAULT_MAX_FRAME_SIZE
362 assert len(msg) <= DEFAULT_MAX_FRAME_SIZE
350
363
351 flags = 0
364 flags = 0
352 if protocol:
365 if protocol:
353 flags |= FLAG_ERROR_RESPONSE_PROTOCOL
366 flags |= FLAG_ERROR_RESPONSE_PROTOCOL
354 if application:
367 if application:
355 flags |= FLAG_ERROR_RESPONSE_APPLICATION
368 flags |= FLAG_ERROR_RESPONSE_APPLICATION
356
369
357 yield stream.makeframe(requestid=requestid,
370 yield stream.makeframe(requestid=requestid,
358 typeid=FRAME_TYPE_ERROR_RESPONSE,
371 typeid=FRAME_TYPE_ERROR_RESPONSE,
359 flags=flags,
372 flags=flags,
360 payload=msg)
373 payload=msg)
361
374
362 def createtextoutputframe(stream, requestid, atoms):
375 def createtextoutputframe(stream, requestid, atoms):
363 """Create a text output frame to render text to people.
376 """Create a text output frame to render text to people.
364
377
365 ``atoms`` is a 3-tuple of (formatting string, args, labels).
378 ``atoms`` is a 3-tuple of (formatting string, args, labels).
366
379
367 The formatting string contains ``%s`` tokens to be replaced by the
380 The formatting string contains ``%s`` tokens to be replaced by the
368 corresponding indexed entry in ``args``. ``labels`` is an iterable of
381 corresponding indexed entry in ``args``. ``labels`` is an iterable of
369 formatters to be applied at rendering time. In terms of the ``ui``
382 formatters to be applied at rendering time. In terms of the ``ui``
370 class, each atom corresponds to a ``ui.write()``.
383 class, each atom corresponds to a ``ui.write()``.
371 """
384 """
372 bytesleft = DEFAULT_MAX_FRAME_SIZE
385 bytesleft = DEFAULT_MAX_FRAME_SIZE
373 atomchunks = []
386 atomchunks = []
374
387
375 for (formatting, args, labels) in atoms:
388 for (formatting, args, labels) in atoms:
376 if len(args) > 255:
389 if len(args) > 255:
377 raise ValueError('cannot use more than 255 formatting arguments')
390 raise ValueError('cannot use more than 255 formatting arguments')
378 if len(labels) > 255:
391 if len(labels) > 255:
379 raise ValueError('cannot use more than 255 labels')
392 raise ValueError('cannot use more than 255 labels')
380
393
381 # TODO look for localstr, other types here?
394 # TODO look for localstr, other types here?
382
395
383 if not isinstance(formatting, bytes):
396 if not isinstance(formatting, bytes):
384 raise ValueError('must use bytes formatting strings')
397 raise ValueError('must use bytes formatting strings')
385 for arg in args:
398 for arg in args:
386 if not isinstance(arg, bytes):
399 if not isinstance(arg, bytes):
387 raise ValueError('must use bytes for arguments')
400 raise ValueError('must use bytes for arguments')
388 for label in labels:
401 for label in labels:
389 if not isinstance(label, bytes):
402 if not isinstance(label, bytes):
390 raise ValueError('must use bytes for labels')
403 raise ValueError('must use bytes for labels')
391
404
392 # Formatting string must be UTF-8.
405 # Formatting string must be UTF-8.
393 formatting = formatting.decode(r'utf-8', r'replace').encode(r'utf-8')
406 formatting = formatting.decode(r'utf-8', r'replace').encode(r'utf-8')
394
407
395 # Arguments must be UTF-8.
408 # Arguments must be UTF-8.
396 args = [a.decode(r'utf-8', r'replace').encode(r'utf-8') for a in args]
409 args = [a.decode(r'utf-8', r'replace').encode(r'utf-8') for a in args]
397
410
398 # Labels must be ASCII.
411 # Labels must be ASCII.
399 labels = [l.decode(r'ascii', r'strict').encode(r'ascii')
412 labels = [l.decode(r'ascii', r'strict').encode(r'ascii')
400 for l in labels]
413 for l in labels]
401
414
402 if len(formatting) > 65535:
415 if len(formatting) > 65535:
403 raise ValueError('formatting string cannot be longer than 64k')
416 raise ValueError('formatting string cannot be longer than 64k')
404
417
405 if any(len(a) > 65535 for a in args):
418 if any(len(a) > 65535 for a in args):
406 raise ValueError('argument string cannot be longer than 64k')
419 raise ValueError('argument string cannot be longer than 64k')
407
420
408 if any(len(l) > 255 for l in labels):
421 if any(len(l) > 255 for l in labels):
409 raise ValueError('label string cannot be longer than 255 bytes')
422 raise ValueError('label string cannot be longer than 255 bytes')
410
423
411 chunks = [
424 chunks = [
412 struct.pack(r'<H', len(formatting)),
425 struct.pack(r'<H', len(formatting)),
413 struct.pack(r'<BB', len(labels), len(args)),
426 struct.pack(r'<BB', len(labels), len(args)),
414 struct.pack(r'<' + r'B' * len(labels), *map(len, labels)),
427 struct.pack(r'<' + r'B' * len(labels), *map(len, labels)),
415 struct.pack(r'<' + r'H' * len(args), *map(len, args)),
428 struct.pack(r'<' + r'H' * len(args), *map(len, args)),
416 ]
429 ]
417 chunks.append(formatting)
430 chunks.append(formatting)
418 chunks.extend(labels)
431 chunks.extend(labels)
419 chunks.extend(args)
432 chunks.extend(args)
420
433
421 atom = b''.join(chunks)
434 atom = b''.join(chunks)
422 atomchunks.append(atom)
435 atomchunks.append(atom)
423 bytesleft -= len(atom)
436 bytesleft -= len(atom)
424
437
425 if bytesleft < 0:
438 if bytesleft < 0:
426 raise ValueError('cannot encode data in a single frame')
439 raise ValueError('cannot encode data in a single frame')
427
440
428 yield stream.makeframe(requestid=requestid,
441 yield stream.makeframe(requestid=requestid,
429 typeid=FRAME_TYPE_TEXT_OUTPUT,
442 typeid=FRAME_TYPE_TEXT_OUTPUT,
430 flags=0,
443 flags=0,
431 payload=b''.join(atomchunks))
444 payload=b''.join(atomchunks))
432
445
433 class stream(object):
446 class stream(object):
434 """Represents a logical unidirectional series of frames."""
447 """Represents a logical unidirectional series of frames."""
435
448
436 def __init__(self, streamid, active=False):
449 def __init__(self, streamid, active=False):
437 self.streamid = streamid
450 self.streamid = streamid
438 self._active = False
451 self._active = False
439
452
440 def makeframe(self, requestid, typeid, flags, payload):
453 def makeframe(self, requestid, typeid, flags, payload):
441 """Create a frame to be sent out over this stream.
454 """Create a frame to be sent out over this stream.
442
455
443 Only returns the frame instance. Does not actually send it.
456 Only returns the frame instance. Does not actually send it.
444 """
457 """
445 streamflags = 0
458 streamflags = 0
446 if not self._active:
459 if not self._active:
447 streamflags |= STREAM_FLAG_BEGIN_STREAM
460 streamflags |= STREAM_FLAG_BEGIN_STREAM
448 self._active = True
461 self._active = True
449
462
450 return makeframe(requestid, self.streamid, streamflags, typeid, flags,
463 return makeframe(requestid, self.streamid, streamflags, typeid, flags,
451 payload)
464 payload)
452
465
453 def ensureserverstream(stream):
466 def ensureserverstream(stream):
454 if stream.streamid % 2:
467 if stream.streamid % 2:
455 raise error.ProgrammingError('server should only write to even '
468 raise error.ProgrammingError('server should only write to even '
456 'numbered streams; %d is not even' %
469 'numbered streams; %d is not even' %
457 stream.streamid)
470 stream.streamid)
458
471
459 class serverreactor(object):
472 class serverreactor(object):
460 """Holds state of a server handling frame-based protocol requests.
473 """Holds state of a server handling frame-based protocol requests.
461
474
462 This class is the "brain" of the unified frame-based protocol server
475 This class is the "brain" of the unified frame-based protocol server
463 component. While the protocol is stateless from the perspective of
476 component. While the protocol is stateless from the perspective of
464 requests/commands, something needs to track which frames have been
477 requests/commands, something needs to track which frames have been
465 received, what frames to expect, etc. This class is that thing.
478 received, what frames to expect, etc. This class is that thing.
466
479
467 Instances are modeled as a state machine of sorts. Instances are also
480 Instances are modeled as a state machine of sorts. Instances are also
468 reactionary to external events. The point of this class is to encapsulate
481 reactionary to external events. The point of this class is to encapsulate
469 the state of the connection and the exchange of frames, not to perform
482 the state of the connection and the exchange of frames, not to perform
470 work. Instead, callers tell this class when something occurs, like a
483 work. Instead, callers tell this class when something occurs, like a
471 frame arriving. If that activity is worthy of a follow-up action (say
484 frame arriving. If that activity is worthy of a follow-up action (say
472 *run a command*), the return value of that handler will say so.
485 *run a command*), the return value of that handler will say so.
473
486
474 I/O and CPU intensive operations are purposefully delegated outside of
487 I/O and CPU intensive operations are purposefully delegated outside of
475 this class.
488 this class.
476
489
477 Consumers are expected to tell instances when events occur. They do so by
490 Consumers are expected to tell instances when events occur. They do so by
478 calling the various ``on*`` methods. These methods return a 2-tuple
491 calling the various ``on*`` methods. These methods return a 2-tuple
479 describing any follow-up action(s) to take. The first element is the
492 describing any follow-up action(s) to take. The first element is the
480 name of an action to perform. The second is a data structure (usually
493 name of an action to perform. The second is a data structure (usually
481 a dict) specific to that action that contains more information. e.g.
494 a dict) specific to that action that contains more information. e.g.
482 if the server wants to send frames back to the client, the data structure
495 if the server wants to send frames back to the client, the data structure
483 will contain a reference to those frames.
496 will contain a reference to those frames.
484
497
485 Valid actions that consumers can be instructed to take are:
498 Valid actions that consumers can be instructed to take are:
486
499
487 sendframes
500 sendframes
488 Indicates that frames should be sent to the client. The ``framegen``
501 Indicates that frames should be sent to the client. The ``framegen``
489 key contains a generator of frames that should be sent. The server
502 key contains a generator of frames that should be sent. The server
490 assumes that all frames are sent to the client.
503 assumes that all frames are sent to the client.
491
504
492 error
505 error
493 Indicates that an error occurred. Consumer should probably abort.
506 Indicates that an error occurred. Consumer should probably abort.
494
507
495 runcommand
508 runcommand
496 Indicates that the consumer should run a wire protocol command. Details
509 Indicates that the consumer should run a wire protocol command. Details
497 of the command to run are given in the data structure.
510 of the command to run are given in the data structure.
498
511
499 wantframe
512 wantframe
500 Indicates that nothing of interest happened and the server is waiting on
513 Indicates that nothing of interest happened and the server is waiting on
501 more frames from the client before anything interesting can be done.
514 more frames from the client before anything interesting can be done.
502
515
503 noop
516 noop
504 Indicates no additional action is required.
517 Indicates no additional action is required.
505
518
506 Known Issues
519 Known Issues
507 ------------
520 ------------
508
521
509 There are no limits to the number of partially received commands or their
522 There are no limits to the number of partially received commands or their
510 size. A malicious client could stream command request data and exhaust the
523 size. A malicious client could stream command request data and exhaust the
511 server's memory.
524 server's memory.
512
525
513 Partially received commands are not acted upon when end of input is
526 Partially received commands are not acted upon when end of input is
514 reached. Should the server error if it receives a partial request?
527 reached. Should the server error if it receives a partial request?
515 Should the client send a message to abort a partially transmitted request
528 Should the client send a message to abort a partially transmitted request
516 to facilitate graceful shutdown?
529 to facilitate graceful shutdown?
517
530
518 Active requests that haven't been responded to aren't tracked. This means
531 Active requests that haven't been responded to aren't tracked. This means
519 that if we receive a command and instruct its dispatch, another command
532 that if we receive a command and instruct its dispatch, another command
520 with its request ID can come in over the wire and there will be a race
533 with its request ID can come in over the wire and there will be a race
521 between who responds to what.
534 between who responds to what.
522 """
535 """
523
536
524 def __init__(self, deferoutput=False):
537 def __init__(self, deferoutput=False):
525 """Construct a new server reactor.
538 """Construct a new server reactor.
526
539
527 ``deferoutput`` can be used to indicate that no output frames should be
540 ``deferoutput`` can be used to indicate that no output frames should be
528 instructed to be sent until input has been exhausted. In this mode,
541 instructed to be sent until input has been exhausted. In this mode,
529 events that would normally generate output frames (such as a command
542 events that would normally generate output frames (such as a command
530 response being ready) will instead defer instructing the consumer to
543 response being ready) will instead defer instructing the consumer to
531 send those frames. This is useful for half-duplex transports where the
544 send those frames. This is useful for half-duplex transports where the
532 sender cannot receive until all data has been transmitted.
545 sender cannot receive until all data has been transmitted.
533 """
546 """
534 self._deferoutput = deferoutput
547 self._deferoutput = deferoutput
535 self._state = 'idle'
548 self._state = 'idle'
536 self._nextoutgoingstreamid = 2
549 self._nextoutgoingstreamid = 2
537 self._bufferedframegens = []
550 self._bufferedframegens = []
538 # stream id -> stream instance for all active streams from the client.
551 # stream id -> stream instance for all active streams from the client.
539 self._incomingstreams = {}
552 self._incomingstreams = {}
540 self._outgoingstreams = {}
553 self._outgoingstreams = {}
541 # request id -> dict of commands that are actively being received.
554 # request id -> dict of commands that are actively being received.
542 self._receivingcommands = {}
555 self._receivingcommands = {}
543 # Request IDs that have been received and are actively being processed.
556 # Request IDs that have been received and are actively being processed.
544 # Once all output for a request has been sent, it is removed from this
557 # Once all output for a request has been sent, it is removed from this
545 # set.
558 # set.
546 self._activecommands = set()
559 self._activecommands = set()
547
560
548 def onframerecv(self, frame):
561 def onframerecv(self, frame):
549 """Process a frame that has been received off the wire.
562 """Process a frame that has been received off the wire.
550
563
551 Returns a dict with an ``action`` key that details what action,
564 Returns a dict with an ``action`` key that details what action,
552 if any, the consumer should take next.
565 if any, the consumer should take next.
553 """
566 """
554 if not frame.streamid % 2:
567 if not frame.streamid % 2:
555 self._state = 'errored'
568 self._state = 'errored'
556 return self._makeerrorresult(
569 return self._makeerrorresult(
557 _('received frame with even numbered stream ID: %d') %
570 _('received frame with even numbered stream ID: %d') %
558 frame.streamid)
571 frame.streamid)
559
572
560 if frame.streamid not in self._incomingstreams:
573 if frame.streamid not in self._incomingstreams:
561 if not frame.streamflags & STREAM_FLAG_BEGIN_STREAM:
574 if not frame.streamflags & STREAM_FLAG_BEGIN_STREAM:
562 self._state = 'errored'
575 self._state = 'errored'
563 return self._makeerrorresult(
576 return self._makeerrorresult(
564 _('received frame on unknown inactive stream without '
577 _('received frame on unknown inactive stream without '
565 'beginning of stream flag set'))
578 'beginning of stream flag set'))
566
579
567 self._incomingstreams[frame.streamid] = stream(frame.streamid)
580 self._incomingstreams[frame.streamid] = stream(frame.streamid)
568
581
569 if frame.streamflags & STREAM_FLAG_ENCODING_APPLIED:
582 if frame.streamflags & STREAM_FLAG_ENCODING_APPLIED:
570 # TODO handle decoding frames
583 # TODO handle decoding frames
571 self._state = 'errored'
584 self._state = 'errored'
572 raise error.ProgrammingError('support for decoding stream payloads '
585 raise error.ProgrammingError('support for decoding stream payloads '
573 'not yet implemented')
586 'not yet implemented')
574
587
575 if frame.streamflags & STREAM_FLAG_END_STREAM:
588 if frame.streamflags & STREAM_FLAG_END_STREAM:
576 del self._incomingstreams[frame.streamid]
589 del self._incomingstreams[frame.streamid]
577
590
578 handlers = {
591 handlers = {
579 'idle': self._onframeidle,
592 'idle': self._onframeidle,
580 'command-receiving': self._onframecommandreceiving,
593 'command-receiving': self._onframecommandreceiving,
581 'errored': self._onframeerrored,
594 'errored': self._onframeerrored,
582 }
595 }
583
596
584 meth = handlers.get(self._state)
597 meth = handlers.get(self._state)
585 if not meth:
598 if not meth:
586 raise error.ProgrammingError('unhandled state: %s' % self._state)
599 raise error.ProgrammingError('unhandled state: %s' % self._state)
587
600
588 return meth(frame)
601 return meth(frame)
589
602
590 def onbytesresponseready(self, stream, requestid, data):
603 def onbytesresponseready(self, stream, requestid, data):
591 """Signal that a bytes response is ready to be sent to the client.
604 """Signal that a bytes response is ready to be sent to the client.
592
605
593 The raw bytes response is passed as an argument.
606 The raw bytes response is passed as an argument.
594 """
607 """
595 ensureserverstream(stream)
608 ensureserverstream(stream)
596
609
597 def sendframes():
610 def sendframes():
598 for frame in createbytesresponseframesfrombytes(stream, requestid,
611 for frame in createbytesresponseframesfrombytes(stream, requestid,
599 data):
612 data):
600 yield frame
613 yield frame
601
614
602 self._activecommands.remove(requestid)
615 self._activecommands.remove(requestid)
603
616
604 result = sendframes()
617 result = sendframes()
605
618
606 if self._deferoutput:
619 if self._deferoutput:
607 self._bufferedframegens.append(result)
620 self._bufferedframegens.append(result)
608 return 'noop', {}
621 return 'noop', {}
609 else:
622 else:
610 return 'sendframes', {
623 return 'sendframes', {
611 'framegen': result,
624 'framegen': result,
612 }
625 }
613
626
614 def oninputeof(self):
627 def oninputeof(self):
615 """Signals that end of input has been received.
628 """Signals that end of input has been received.
616
629
617 No more frames will be received. All pending activity should be
630 No more frames will be received. All pending activity should be
618 completed.
631 completed.
619 """
632 """
620 # TODO should we do anything about in-flight commands?
633 # TODO should we do anything about in-flight commands?
621
634
622 if not self._deferoutput or not self._bufferedframegens:
635 if not self._deferoutput or not self._bufferedframegens:
623 return 'noop', {}
636 return 'noop', {}
624
637
625 # If we buffered all our responses, emit those.
638 # If we buffered all our responses, emit those.
626 def makegen():
639 def makegen():
627 for gen in self._bufferedframegens:
640 for gen in self._bufferedframegens:
628 for frame in gen:
641 for frame in gen:
629 yield frame
642 yield frame
630
643
631 return 'sendframes', {
644 return 'sendframes', {
632 'framegen': makegen(),
645 'framegen': makegen(),
633 }
646 }
634
647
635 def onapplicationerror(self, stream, requestid, msg):
648 def onapplicationerror(self, stream, requestid, msg):
636 ensureserverstream(stream)
649 ensureserverstream(stream)
637
650
638 return 'sendframes', {
651 return 'sendframes', {
639 'framegen': createerrorframe(stream, requestid, msg,
652 'framegen': createerrorframe(stream, requestid, msg,
640 application=True),
653 application=True),
641 }
654 }
642
655
643 def makeoutputstream(self):
656 def makeoutputstream(self):
644 """Create a stream to be used for sending data to the client."""
657 """Create a stream to be used for sending data to the client."""
645 streamid = self._nextoutgoingstreamid
658 streamid = self._nextoutgoingstreamid
646 self._nextoutgoingstreamid += 2
659 self._nextoutgoingstreamid += 2
647
660
648 s = stream(streamid)
661 s = stream(streamid)
649 self._outgoingstreams[streamid] = s
662 self._outgoingstreams[streamid] = s
650
663
651 return s
664 return s
652
665
653 def _makeerrorresult(self, msg):
666 def _makeerrorresult(self, msg):
654 return 'error', {
667 return 'error', {
655 'message': msg,
668 'message': msg,
656 }
669 }
657
670
658 def _makeruncommandresult(self, requestid):
671 def _makeruncommandresult(self, requestid):
659 entry = self._receivingcommands[requestid]
672 entry = self._receivingcommands[requestid]
660 del self._receivingcommands[requestid]
673 del self._receivingcommands[requestid]
661
674
662 if self._receivingcommands:
675 if self._receivingcommands:
663 self._state = 'command-receiving'
676 self._state = 'command-receiving'
664 else:
677 else:
665 self._state = 'idle'
678 self._state = 'idle'
666
679
667 assert requestid not in self._activecommands
680 assert requestid not in self._activecommands
668 self._activecommands.add(requestid)
681 self._activecommands.add(requestid)
669
682
670 return 'runcommand', {
683 return 'runcommand', {
671 'requestid': requestid,
684 'requestid': requestid,
672 'command': entry['command'],
685 'command': entry['command'],
673 'args': entry['args'],
686 'args': entry['args'],
674 'data': entry['data'].getvalue() if entry['data'] else None,
687 'data': entry['data'].getvalue() if entry['data'] else None,
675 }
688 }
676
689
677 def _makewantframeresult(self):
690 def _makewantframeresult(self):
678 return 'wantframe', {
691 return 'wantframe', {
679 'state': self._state,
692 'state': self._state,
680 }
693 }
681
694
682 def _onframeidle(self, frame):
695 def _onframeidle(self, frame):
683 # The only frame type that should be received in this state is a
696 # The only frame type that should be received in this state is a
684 # command request.
697 # command request.
685 if frame.typeid != FRAME_TYPE_COMMAND_NAME:
698 if frame.typeid != FRAME_TYPE_COMMAND_NAME:
686 self._state = 'errored'
699 self._state = 'errored'
687 return self._makeerrorresult(
700 return self._makeerrorresult(
688 _('expected command frame; got %d') % frame.typeid)
701 _('expected command frame; got %d') % frame.typeid)
689
702
690 if frame.requestid in self._receivingcommands:
703 if frame.requestid in self._receivingcommands:
691 self._state = 'errored'
704 self._state = 'errored'
692 return self._makeerrorresult(
705 return self._makeerrorresult(
693 _('request with ID %d already received') % frame.requestid)
706 _('request with ID %d already received') % frame.requestid)
694
707
695 if frame.requestid in self._activecommands:
708 if frame.requestid in self._activecommands:
696 self._state = 'errored'
709 self._state = 'errored'
697 return self._makeerrorresult((
710 return self._makeerrorresult((
698 _('request with ID %d is already active') % frame.requestid))
711 _('request with ID %d is already active') % frame.requestid))
699
712
700 expectingargs = bool(frame.flags & FLAG_COMMAND_NAME_HAVE_ARGS)
713 expectingargs = bool(frame.flags & FLAG_COMMAND_NAME_HAVE_ARGS)
701 expectingdata = bool(frame.flags & FLAG_COMMAND_NAME_HAVE_DATA)
714 expectingdata = bool(frame.flags & FLAG_COMMAND_NAME_HAVE_DATA)
702
715
703 self._receivingcommands[frame.requestid] = {
716 self._receivingcommands[frame.requestid] = {
704 'command': frame.payload,
717 'command': frame.payload,
705 'args': {},
718 'args': {},
706 'data': None,
719 'data': None,
707 'expectingargs': expectingargs,
720 'expectingargs': expectingargs,
708 'expectingdata': expectingdata,
721 'expectingdata': expectingdata,
709 }
722 }
710
723
711 if frame.flags & FLAG_COMMAND_NAME_EOS:
724 if frame.flags & FLAG_COMMAND_NAME_EOS:
712 return self._makeruncommandresult(frame.requestid)
725 return self._makeruncommandresult(frame.requestid)
713
726
714 if expectingargs or expectingdata:
727 if expectingargs or expectingdata:
715 self._state = 'command-receiving'
728 self._state = 'command-receiving'
716 return self._makewantframeresult()
729 return self._makewantframeresult()
717 else:
730 else:
718 self._state = 'errored'
731 self._state = 'errored'
719 return self._makeerrorresult(_('missing frame flags on '
732 return self._makeerrorresult(_('missing frame flags on '
720 'command frame'))
733 'command frame'))
721
734
722 def _onframecommandreceiving(self, frame):
735 def _onframecommandreceiving(self, frame):
723 # It could be a new command request. Process it as such.
736 # It could be a new command request. Process it as such.
724 if frame.typeid == FRAME_TYPE_COMMAND_NAME:
737 if frame.typeid == FRAME_TYPE_COMMAND_NAME:
725 return self._onframeidle(frame)
738 return self._onframeidle(frame)
726
739
727 # All other frames should be related to a command that is currently
740 # All other frames should be related to a command that is currently
728 # receiving but is not active.
741 # receiving but is not active.
729 if frame.requestid in self._activecommands:
742 if frame.requestid in self._activecommands:
730 self._state = 'errored'
743 self._state = 'errored'
731 return self._makeerrorresult(
744 return self._makeerrorresult(
732 _('received frame for request that is still active: %d') %
745 _('received frame for request that is still active: %d') %
733 frame.requestid)
746 frame.requestid)
734
747
735 if frame.requestid not in self._receivingcommands:
748 if frame.requestid not in self._receivingcommands:
736 self._state = 'errored'
749 self._state = 'errored'
737 return self._makeerrorresult(
750 return self._makeerrorresult(
738 _('received frame for request that is not receiving: %d') %
751 _('received frame for request that is not receiving: %d') %
739 frame.requestid)
752 frame.requestid)
740
753
741 entry = self._receivingcommands[frame.requestid]
754 entry = self._receivingcommands[frame.requestid]
742
755
743 if frame.typeid == FRAME_TYPE_COMMAND_ARGUMENT:
756 if frame.typeid == FRAME_TYPE_COMMAND_ARGUMENT:
744 if not entry['expectingargs']:
757 if not entry['expectingargs']:
745 self._state = 'errored'
758 self._state = 'errored'
746 return self._makeerrorresult(_(
759 return self._makeerrorresult(_(
747 'received command argument frame for request that is not '
760 'received command argument frame for request that is not '
748 'expecting arguments: %d') % frame.requestid)
761 'expecting arguments: %d') % frame.requestid)
749
762
750 return self._handlecommandargsframe(frame, entry)
763 return self._handlecommandargsframe(frame, entry)
751
764
752 elif frame.typeid == FRAME_TYPE_COMMAND_DATA:
765 elif frame.typeid == FRAME_TYPE_COMMAND_DATA:
753 if not entry['expectingdata']:
766 if not entry['expectingdata']:
754 self._state = 'errored'
767 self._state = 'errored'
755 return self._makeerrorresult(_(
768 return self._makeerrorresult(_(
756 'received command data frame for request that is not '
769 'received command data frame for request that is not '
757 'expecting data: %d') % frame.requestid)
770 'expecting data: %d') % frame.requestid)
758
771
759 if entry['data'] is None:
772 if entry['data'] is None:
760 entry['data'] = util.bytesio()
773 entry['data'] = util.bytesio()
761
774
762 return self._handlecommanddataframe(frame, entry)
775 return self._handlecommanddataframe(frame, entry)
763
776
764 def _handlecommandargsframe(self, frame, entry):
777 def _handlecommandargsframe(self, frame, entry):
765 # The frame and state of command should have already been validated.
778 # The frame and state of command should have already been validated.
766 assert frame.typeid == FRAME_TYPE_COMMAND_ARGUMENT
779 assert frame.typeid == FRAME_TYPE_COMMAND_ARGUMENT
767
780
768 offset = 0
781 offset = 0
769 namesize, valuesize = ARGUMENT_FRAME_HEADER.unpack_from(frame.payload)
782 namesize, valuesize = ARGUMENT_FRAME_HEADER.unpack_from(frame.payload)
770 offset += ARGUMENT_FRAME_HEADER.size
783 offset += ARGUMENT_FRAME_HEADER.size
771
784
772 # The argument name MUST fit inside the frame.
785 # The argument name MUST fit inside the frame.
773 argname = bytes(frame.payload[offset:offset + namesize])
786 argname = bytes(frame.payload[offset:offset + namesize])
774 offset += namesize
787 offset += namesize
775
788
776 if len(argname) != namesize:
789 if len(argname) != namesize:
777 self._state = 'errored'
790 self._state = 'errored'
778 return self._makeerrorresult(_('malformed argument frame: '
791 return self._makeerrorresult(_('malformed argument frame: '
779 'partial argument name'))
792 'partial argument name'))
780
793
781 argvalue = bytes(frame.payload[offset:])
794 argvalue = bytes(frame.payload[offset:])
782
795
783 # Argument value spans multiple frames. Record our active state
796 # Argument value spans multiple frames. Record our active state
784 # and wait for the next frame.
797 # and wait for the next frame.
785 if frame.flags & FLAG_COMMAND_ARGUMENT_CONTINUATION:
798 if frame.flags & FLAG_COMMAND_ARGUMENT_CONTINUATION:
786 raise error.ProgrammingError('not yet implemented')
799 raise error.ProgrammingError('not yet implemented')
787
800
788 # Common case: the argument value is completely contained in this
801 # Common case: the argument value is completely contained in this
789 # frame.
802 # frame.
790
803
791 if len(argvalue) != valuesize:
804 if len(argvalue) != valuesize:
792 self._state = 'errored'
805 self._state = 'errored'
793 return self._makeerrorresult(_('malformed argument frame: '
806 return self._makeerrorresult(_('malformed argument frame: '
794 'partial argument value'))
807 'partial argument value'))
795
808
796 entry['args'][argname] = argvalue
809 entry['args'][argname] = argvalue
797
810
798 if frame.flags & FLAG_COMMAND_ARGUMENT_EOA:
811 if frame.flags & FLAG_COMMAND_ARGUMENT_EOA:
799 if entry['expectingdata']:
812 if entry['expectingdata']:
800 # TODO signal request to run a command once we don't
813 # TODO signal request to run a command once we don't
801 # buffer data frames.
814 # buffer data frames.
802 return self._makewantframeresult()
815 return self._makewantframeresult()
803 else:
816 else:
804 return self._makeruncommandresult(frame.requestid)
817 return self._makeruncommandresult(frame.requestid)
805 else:
818 else:
806 return self._makewantframeresult()
819 return self._makewantframeresult()
807
820
808 def _handlecommanddataframe(self, frame, entry):
821 def _handlecommanddataframe(self, frame, entry):
809 assert frame.typeid == FRAME_TYPE_COMMAND_DATA
822 assert frame.typeid == FRAME_TYPE_COMMAND_DATA
810
823
811 # TODO support streaming data instead of buffering it.
824 # TODO support streaming data instead of buffering it.
812 entry['data'].write(frame.payload)
825 entry['data'].write(frame.payload)
813
826
814 if frame.flags & FLAG_COMMAND_DATA_CONTINUATION:
827 if frame.flags & FLAG_COMMAND_DATA_CONTINUATION:
815 return self._makewantframeresult()
828 return self._makewantframeresult()
816 elif frame.flags & FLAG_COMMAND_DATA_EOS:
829 elif frame.flags & FLAG_COMMAND_DATA_EOS:
817 entry['data'].seek(0)
830 entry['data'].seek(0)
818 return self._makeruncommandresult(frame.requestid)
831 return self._makeruncommandresult(frame.requestid)
819 else:
832 else:
820 self._state = 'errored'
833 self._state = 'errored'
821 return self._makeerrorresult(_('command data frame without '
834 return self._makeerrorresult(_('command data frame without '
822 'flags'))
835 'flags'))
823
836
824 def _onframeerrored(self, frame):
837 def _onframeerrored(self, frame):
825 return self._makeerrorresult(_('server already errored'))
838 return self._makeerrorresult(_('server already errored'))
@@ -1,684 +1,737 b''
1 from __future__ import absolute_import, print_function
1 from __future__ import absolute_import, print_function
2
2
3 import unittest
3 import unittest
4
4
5 from mercurial import (
5 from mercurial import (
6 util,
6 util,
7 wireprotoframing as framing,
7 wireprotoframing as framing,
8 )
8 )
9
9
10 ffs = framing.makeframefromhumanstring
10 ffs = framing.makeframefromhumanstring
11
11
12 def makereactor(deferoutput=False):
12 def makereactor(deferoutput=False):
13 return framing.serverreactor(deferoutput=deferoutput)
13 return framing.serverreactor(deferoutput=deferoutput)
14
14
15 def sendframes(reactor, gen):
15 def sendframes(reactor, gen):
16 """Send a generator of frame bytearray to a reactor.
16 """Send a generator of frame bytearray to a reactor.
17
17
18 Emits a generator of results from ``onframerecv()`` calls.
18 Emits a generator of results from ``onframerecv()`` calls.
19 """
19 """
20 for frame in gen:
20 for frame in gen:
21 header = framing.parseheader(frame)
21 header = framing.parseheader(frame)
22 payload = frame[framing.FRAME_HEADER_SIZE:]
22 payload = frame[framing.FRAME_HEADER_SIZE:]
23 assert len(payload) == header.length
23 assert len(payload) == header.length
24
24
25 yield reactor.onframerecv(framing.frame(header.requestid,
25 yield reactor.onframerecv(framing.frame(header.requestid,
26 header.streamid,
26 header.streamid,
27 header.streamflags,
27 header.streamflags,
28 header.typeid,
28 header.typeid,
29 header.flags,
29 header.flags,
30 payload))
30 payload))
31
31
32 def sendcommandframes(reactor, stream, rid, cmd, args, datafh=None):
32 def sendcommandframes(reactor, stream, rid, cmd, args, datafh=None):
33 """Generate frames to run a command and send them to a reactor."""
33 """Generate frames to run a command and send them to a reactor."""
34 return sendframes(reactor,
34 return sendframes(reactor,
35 framing.createcommandframes(stream, rid, cmd, args,
35 framing.createcommandframes(stream, rid, cmd, args,
36 datafh))
36 datafh))
37
37
38 class FrameHumanStringTests(unittest.TestCase):
39 def testbasic(self):
40 self.assertEqual(ffs(b'1 1 0 1 0 '),
41 b'\x00\x00\x00\x01\x00\x01\x00\x10')
42
43 self.assertEqual(ffs(b'2 4 0 1 0 '),
44 b'\x00\x00\x00\x02\x00\x04\x00\x10')
45
46 self.assertEqual(ffs(b'2 4 0 1 0 foo'),
47 b'\x03\x00\x00\x02\x00\x04\x00\x10foo')
48
49 def testcborint(self):
50 self.assertEqual(ffs(b'1 1 0 1 0 cbor:15'),
51 b'\x01\x00\x00\x01\x00\x01\x00\x10\x0f')
52
53 self.assertEqual(ffs(b'1 1 0 1 0 cbor:42'),
54 b'\x02\x00\x00\x01\x00\x01\x00\x10\x18*')
55
56 self.assertEqual(ffs(b'1 1 0 1 0 cbor:1048576'),
57 b'\x05\x00\x00\x01\x00\x01\x00\x10\x1a'
58 b'\x00\x10\x00\x00')
59
60 self.assertEqual(ffs(b'1 1 0 1 0 cbor:0'),
61 b'\x01\x00\x00\x01\x00\x01\x00\x10\x00')
62
63 self.assertEqual(ffs(b'1 1 0 1 0 cbor:-1'),
64 b'\x01\x00\x00\x01\x00\x01\x00\x10 ')
65
66 self.assertEqual(ffs(b'1 1 0 1 0 cbor:-342542'),
67 b'\x05\x00\x00\x01\x00\x01\x00\x10:\x00\x05:\r')
68
69 def testcborstrings(self):
70 # String literals should be unicode.
71 self.assertEqual(ffs(b"1 1 0 1 0 cbor:'foo'"),
72 b'\x04\x00\x00\x01\x00\x01\x00\x10cfoo')
73
74 self.assertEqual(ffs(b"1 1 0 1 0 cbor:b'foo'"),
75 b'\x04\x00\x00\x01\x00\x01\x00\x10Cfoo')
76
77 self.assertEqual(ffs(b"1 1 0 1 0 cbor:u'foo'"),
78 b'\x04\x00\x00\x01\x00\x01\x00\x10cfoo')
79
80 def testcborlists(self):
81 self.assertEqual(ffs(b"1 1 0 1 0 cbor:[None, True, False, 42, b'foo']"),
82 b'\n\x00\x00\x01\x00\x01\x00\x10\x85\xf6\xf5\xf4'
83 b'\x18*Cfoo')
84
85 def testcbordicts(self):
86 self.assertEqual(ffs(b"1 1 0 1 0 "
87 b"cbor:{b'foo': b'val1', b'bar': b'val2'}"),
88 b'\x13\x00\x00\x01\x00\x01\x00\x10\xa2'
89 b'CbarDval2CfooDval1')
90
38 class FrameTests(unittest.TestCase):
91 class FrameTests(unittest.TestCase):
39 def testdataexactframesize(self):
92 def testdataexactframesize(self):
40 data = util.bytesio(b'x' * framing.DEFAULT_MAX_FRAME_SIZE)
93 data = util.bytesio(b'x' * framing.DEFAULT_MAX_FRAME_SIZE)
41
94
42 stream = framing.stream(1)
95 stream = framing.stream(1)
43 frames = list(framing.createcommandframes(stream, 1, b'command',
96 frames = list(framing.createcommandframes(stream, 1, b'command',
44 {}, data))
97 {}, data))
45 self.assertEqual(frames, [
98 self.assertEqual(frames, [
46 ffs(b'1 1 stream-begin command-name have-data command'),
99 ffs(b'1 1 stream-begin command-name have-data command'),
47 ffs(b'1 1 0 command-data continuation %s' % data.getvalue()),
100 ffs(b'1 1 0 command-data continuation %s' % data.getvalue()),
48 ffs(b'1 1 0 command-data eos ')
101 ffs(b'1 1 0 command-data eos ')
49 ])
102 ])
50
103
51 def testdatamultipleframes(self):
104 def testdatamultipleframes(self):
52 data = util.bytesio(b'x' * (framing.DEFAULT_MAX_FRAME_SIZE + 1))
105 data = util.bytesio(b'x' * (framing.DEFAULT_MAX_FRAME_SIZE + 1))
53
106
54 stream = framing.stream(1)
107 stream = framing.stream(1)
55 frames = list(framing.createcommandframes(stream, 1, b'command', {},
108 frames = list(framing.createcommandframes(stream, 1, b'command', {},
56 data))
109 data))
57 self.assertEqual(frames, [
110 self.assertEqual(frames, [
58 ffs(b'1 1 stream-begin command-name have-data command'),
111 ffs(b'1 1 stream-begin command-name have-data command'),
59 ffs(b'1 1 0 command-data continuation %s' % (
112 ffs(b'1 1 0 command-data continuation %s' % (
60 b'x' * framing.DEFAULT_MAX_FRAME_SIZE)),
113 b'x' * framing.DEFAULT_MAX_FRAME_SIZE)),
61 ffs(b'1 1 0 command-data eos x'),
114 ffs(b'1 1 0 command-data eos x'),
62 ])
115 ])
63
116
64 def testargsanddata(self):
117 def testargsanddata(self):
65 data = util.bytesio(b'x' * 100)
118 data = util.bytesio(b'x' * 100)
66
119
67 stream = framing.stream(1)
120 stream = framing.stream(1)
68 frames = list(framing.createcommandframes(stream, 1, b'command', {
121 frames = list(framing.createcommandframes(stream, 1, b'command', {
69 b'key1': b'key1value',
122 b'key1': b'key1value',
70 b'key2': b'key2value',
123 b'key2': b'key2value',
71 b'key3': b'key3value',
124 b'key3': b'key3value',
72 }, data))
125 }, data))
73
126
74 self.assertEqual(frames, [
127 self.assertEqual(frames, [
75 ffs(b'1 1 stream-begin command-name have-args|have-data command'),
128 ffs(b'1 1 stream-begin command-name have-args|have-data command'),
76 ffs(br'1 1 0 command-argument 0 \x04\x00\x09\x00key1key1value'),
129 ffs(br'1 1 0 command-argument 0 \x04\x00\x09\x00key1key1value'),
77 ffs(br'1 1 0 command-argument 0 \x04\x00\x09\x00key2key2value'),
130 ffs(br'1 1 0 command-argument 0 \x04\x00\x09\x00key2key2value'),
78 ffs(br'1 1 0 command-argument eoa \x04\x00\x09\x00key3key3value'),
131 ffs(br'1 1 0 command-argument eoa \x04\x00\x09\x00key3key3value'),
79 ffs(b'1 1 0 command-data eos %s' % data.getvalue()),
132 ffs(b'1 1 0 command-data eos %s' % data.getvalue()),
80 ])
133 ])
81
134
82 def testtextoutputexcessiveargs(self):
135 def testtextoutputexcessiveargs(self):
83 """At most 255 formatting arguments are allowed."""
136 """At most 255 formatting arguments are allowed."""
84 with self.assertRaisesRegexp(ValueError,
137 with self.assertRaisesRegexp(ValueError,
85 'cannot use more than 255 formatting'):
138 'cannot use more than 255 formatting'):
86 args = [b'x' for i in range(256)]
139 args = [b'x' for i in range(256)]
87 list(framing.createtextoutputframe(None, 1,
140 list(framing.createtextoutputframe(None, 1,
88 [(b'bleh', args, [])]))
141 [(b'bleh', args, [])]))
89
142
90 def testtextoutputexcessivelabels(self):
143 def testtextoutputexcessivelabels(self):
91 """At most 255 labels are allowed."""
144 """At most 255 labels are allowed."""
92 with self.assertRaisesRegexp(ValueError,
145 with self.assertRaisesRegexp(ValueError,
93 'cannot use more than 255 labels'):
146 'cannot use more than 255 labels'):
94 labels = [b'l' for i in range(256)]
147 labels = [b'l' for i in range(256)]
95 list(framing.createtextoutputframe(None, 1,
148 list(framing.createtextoutputframe(None, 1,
96 [(b'bleh', [], labels)]))
149 [(b'bleh', [], labels)]))
97
150
98 def testtextoutputformattingstringtype(self):
151 def testtextoutputformattingstringtype(self):
99 """Formatting string must be bytes."""
152 """Formatting string must be bytes."""
100 with self.assertRaisesRegexp(ValueError, 'must use bytes formatting '):
153 with self.assertRaisesRegexp(ValueError, 'must use bytes formatting '):
101 list(framing.createtextoutputframe(None, 1, [
154 list(framing.createtextoutputframe(None, 1, [
102 (b'foo'.decode('ascii'), [], [])]))
155 (b'foo'.decode('ascii'), [], [])]))
103
156
104 def testtextoutputargumentbytes(self):
157 def testtextoutputargumentbytes(self):
105 with self.assertRaisesRegexp(ValueError, 'must use bytes for argument'):
158 with self.assertRaisesRegexp(ValueError, 'must use bytes for argument'):
106 list(framing.createtextoutputframe(None, 1, [
159 list(framing.createtextoutputframe(None, 1, [
107 (b'foo', [b'foo'.decode('ascii')], [])]))
160 (b'foo', [b'foo'.decode('ascii')], [])]))
108
161
109 def testtextoutputlabelbytes(self):
162 def testtextoutputlabelbytes(self):
110 with self.assertRaisesRegexp(ValueError, 'must use bytes for labels'):
163 with self.assertRaisesRegexp(ValueError, 'must use bytes for labels'):
111 list(framing.createtextoutputframe(None, 1, [
164 list(framing.createtextoutputframe(None, 1, [
112 (b'foo', [], [b'foo'.decode('ascii')])]))
165 (b'foo', [], [b'foo'.decode('ascii')])]))
113
166
114 def testtextoutputtoolongformatstring(self):
167 def testtextoutputtoolongformatstring(self):
115 with self.assertRaisesRegexp(ValueError,
168 with self.assertRaisesRegexp(ValueError,
116 'formatting string cannot be longer than'):
169 'formatting string cannot be longer than'):
117 list(framing.createtextoutputframe(None, 1, [
170 list(framing.createtextoutputframe(None, 1, [
118 (b'x' * 65536, [], [])]))
171 (b'x' * 65536, [], [])]))
119
172
120 def testtextoutputtoolongargumentstring(self):
173 def testtextoutputtoolongargumentstring(self):
121 with self.assertRaisesRegexp(ValueError,
174 with self.assertRaisesRegexp(ValueError,
122 'argument string cannot be longer than'):
175 'argument string cannot be longer than'):
123 list(framing.createtextoutputframe(None, 1, [
176 list(framing.createtextoutputframe(None, 1, [
124 (b'bleh', [b'x' * 65536], [])]))
177 (b'bleh', [b'x' * 65536], [])]))
125
178
126 def testtextoutputtoolonglabelstring(self):
179 def testtextoutputtoolonglabelstring(self):
127 with self.assertRaisesRegexp(ValueError,
180 with self.assertRaisesRegexp(ValueError,
128 'label string cannot be longer than'):
181 'label string cannot be longer than'):
129 list(framing.createtextoutputframe(None, 1, [
182 list(framing.createtextoutputframe(None, 1, [
130 (b'bleh', [], [b'x' * 65536])]))
183 (b'bleh', [], [b'x' * 65536])]))
131
184
132 def testtextoutput1simpleatom(self):
185 def testtextoutput1simpleatom(self):
133 stream = framing.stream(1)
186 stream = framing.stream(1)
134 val = list(framing.createtextoutputframe(stream, 1, [
187 val = list(framing.createtextoutputframe(stream, 1, [
135 (b'foo', [], [])]))
188 (b'foo', [], [])]))
136
189
137 self.assertEqual(val, [
190 self.assertEqual(val, [
138 ffs(br'1 1 stream-begin text-output 0 \x03\x00\x00\x00foo'),
191 ffs(br'1 1 stream-begin text-output 0 \x03\x00\x00\x00foo'),
139 ])
192 ])
140
193
141 def testtextoutput2simpleatoms(self):
194 def testtextoutput2simpleatoms(self):
142 stream = framing.stream(1)
195 stream = framing.stream(1)
143 val = list(framing.createtextoutputframe(stream, 1, [
196 val = list(framing.createtextoutputframe(stream, 1, [
144 (b'foo', [], []),
197 (b'foo', [], []),
145 (b'bar', [], []),
198 (b'bar', [], []),
146 ]))
199 ]))
147
200
148 self.assertEqual(val, [
201 self.assertEqual(val, [
149 ffs(br'1 1 stream-begin text-output 0 '
202 ffs(br'1 1 stream-begin text-output 0 '
150 br'\x03\x00\x00\x00foo\x03\x00\x00\x00bar'),
203 br'\x03\x00\x00\x00foo\x03\x00\x00\x00bar'),
151 ])
204 ])
152
205
153 def testtextoutput1arg(self):
206 def testtextoutput1arg(self):
154 stream = framing.stream(1)
207 stream = framing.stream(1)
155 val = list(framing.createtextoutputframe(stream, 1, [
208 val = list(framing.createtextoutputframe(stream, 1, [
156 (b'foo %s', [b'val1'], []),
209 (b'foo %s', [b'val1'], []),
157 ]))
210 ]))
158
211
159 self.assertEqual(val, [
212 self.assertEqual(val, [
160 ffs(br'1 1 stream-begin text-output 0 '
213 ffs(br'1 1 stream-begin text-output 0 '
161 br'\x06\x00\x00\x01\x04\x00foo %sval1'),
214 br'\x06\x00\x00\x01\x04\x00foo %sval1'),
162 ])
215 ])
163
216
164 def testtextoutput2arg(self):
217 def testtextoutput2arg(self):
165 stream = framing.stream(1)
218 stream = framing.stream(1)
166 val = list(framing.createtextoutputframe(stream, 1, [
219 val = list(framing.createtextoutputframe(stream, 1, [
167 (b'foo %s %s', [b'val', b'value'], []),
220 (b'foo %s %s', [b'val', b'value'], []),
168 ]))
221 ]))
169
222
170 self.assertEqual(val, [
223 self.assertEqual(val, [
171 ffs(br'1 1 stream-begin text-output 0 '
224 ffs(br'1 1 stream-begin text-output 0 '
172 br'\x09\x00\x00\x02\x03\x00\x05\x00foo %s %svalvalue'),
225 br'\x09\x00\x00\x02\x03\x00\x05\x00foo %s %svalvalue'),
173 ])
226 ])
174
227
175 def testtextoutput1label(self):
228 def testtextoutput1label(self):
176 stream = framing.stream(1)
229 stream = framing.stream(1)
177 val = list(framing.createtextoutputframe(stream, 1, [
230 val = list(framing.createtextoutputframe(stream, 1, [
178 (b'foo', [], [b'label']),
231 (b'foo', [], [b'label']),
179 ]))
232 ]))
180
233
181 self.assertEqual(val, [
234 self.assertEqual(val, [
182 ffs(br'1 1 stream-begin text-output 0 '
235 ffs(br'1 1 stream-begin text-output 0 '
183 br'\x03\x00\x01\x00\x05foolabel'),
236 br'\x03\x00\x01\x00\x05foolabel'),
184 ])
237 ])
185
238
186 def testargandlabel(self):
239 def testargandlabel(self):
187 stream = framing.stream(1)
240 stream = framing.stream(1)
188 val = list(framing.createtextoutputframe(stream, 1, [
241 val = list(framing.createtextoutputframe(stream, 1, [
189 (b'foo %s', [b'arg'], [b'label']),
242 (b'foo %s', [b'arg'], [b'label']),
190 ]))
243 ]))
191
244
192 self.assertEqual(val, [
245 self.assertEqual(val, [
193 ffs(br'1 1 stream-begin text-output 0 '
246 ffs(br'1 1 stream-begin text-output 0 '
194 br'\x06\x00\x01\x01\x05\x03\x00foo %slabelarg'),
247 br'\x06\x00\x01\x01\x05\x03\x00foo %slabelarg'),
195 ])
248 ])
196
249
197 class ServerReactorTests(unittest.TestCase):
250 class ServerReactorTests(unittest.TestCase):
198 def _sendsingleframe(self, reactor, f):
251 def _sendsingleframe(self, reactor, f):
199 results = list(sendframes(reactor, [f]))
252 results = list(sendframes(reactor, [f]))
200 self.assertEqual(len(results), 1)
253 self.assertEqual(len(results), 1)
201
254
202 return results[0]
255 return results[0]
203
256
204 def assertaction(self, res, expected):
257 def assertaction(self, res, expected):
205 self.assertIsInstance(res, tuple)
258 self.assertIsInstance(res, tuple)
206 self.assertEqual(len(res), 2)
259 self.assertEqual(len(res), 2)
207 self.assertIsInstance(res[1], dict)
260 self.assertIsInstance(res[1], dict)
208 self.assertEqual(res[0], expected)
261 self.assertEqual(res[0], expected)
209
262
210 def assertframesequal(self, frames, framestrings):
263 def assertframesequal(self, frames, framestrings):
211 expected = [ffs(s) for s in framestrings]
264 expected = [ffs(s) for s in framestrings]
212 self.assertEqual(list(frames), expected)
265 self.assertEqual(list(frames), expected)
213
266
214 def test1framecommand(self):
267 def test1framecommand(self):
215 """Receiving a command in a single frame yields request to run it."""
268 """Receiving a command in a single frame yields request to run it."""
216 reactor = makereactor()
269 reactor = makereactor()
217 stream = framing.stream(1)
270 stream = framing.stream(1)
218 results = list(sendcommandframes(reactor, stream, 1, b'mycommand', {}))
271 results = list(sendcommandframes(reactor, stream, 1, b'mycommand', {}))
219 self.assertEqual(len(results), 1)
272 self.assertEqual(len(results), 1)
220 self.assertaction(results[0], 'runcommand')
273 self.assertaction(results[0], 'runcommand')
221 self.assertEqual(results[0][1], {
274 self.assertEqual(results[0][1], {
222 'requestid': 1,
275 'requestid': 1,
223 'command': b'mycommand',
276 'command': b'mycommand',
224 'args': {},
277 'args': {},
225 'data': None,
278 'data': None,
226 })
279 })
227
280
228 result = reactor.oninputeof()
281 result = reactor.oninputeof()
229 self.assertaction(result, 'noop')
282 self.assertaction(result, 'noop')
230
283
231 def test1argument(self):
284 def test1argument(self):
232 reactor = makereactor()
285 reactor = makereactor()
233 stream = framing.stream(1)
286 stream = framing.stream(1)
234 results = list(sendcommandframes(reactor, stream, 41, b'mycommand',
287 results = list(sendcommandframes(reactor, stream, 41, b'mycommand',
235 {b'foo': b'bar'}))
288 {b'foo': b'bar'}))
236 self.assertEqual(len(results), 2)
289 self.assertEqual(len(results), 2)
237 self.assertaction(results[0], 'wantframe')
290 self.assertaction(results[0], 'wantframe')
238 self.assertaction(results[1], 'runcommand')
291 self.assertaction(results[1], 'runcommand')
239 self.assertEqual(results[1][1], {
292 self.assertEqual(results[1][1], {
240 'requestid': 41,
293 'requestid': 41,
241 'command': b'mycommand',
294 'command': b'mycommand',
242 'args': {b'foo': b'bar'},
295 'args': {b'foo': b'bar'},
243 'data': None,
296 'data': None,
244 })
297 })
245
298
246 def testmultiarguments(self):
299 def testmultiarguments(self):
247 reactor = makereactor()
300 reactor = makereactor()
248 stream = framing.stream(1)
301 stream = framing.stream(1)
249 results = list(sendcommandframes(reactor, stream, 1, b'mycommand',
302 results = list(sendcommandframes(reactor, stream, 1, b'mycommand',
250 {b'foo': b'bar', b'biz': b'baz'}))
303 {b'foo': b'bar', b'biz': b'baz'}))
251 self.assertEqual(len(results), 3)
304 self.assertEqual(len(results), 3)
252 self.assertaction(results[0], 'wantframe')
305 self.assertaction(results[0], 'wantframe')
253 self.assertaction(results[1], 'wantframe')
306 self.assertaction(results[1], 'wantframe')
254 self.assertaction(results[2], 'runcommand')
307 self.assertaction(results[2], 'runcommand')
255 self.assertEqual(results[2][1], {
308 self.assertEqual(results[2][1], {
256 'requestid': 1,
309 'requestid': 1,
257 'command': b'mycommand',
310 'command': b'mycommand',
258 'args': {b'foo': b'bar', b'biz': b'baz'},
311 'args': {b'foo': b'bar', b'biz': b'baz'},
259 'data': None,
312 'data': None,
260 })
313 })
261
314
262 def testsimplecommanddata(self):
315 def testsimplecommanddata(self):
263 reactor = makereactor()
316 reactor = makereactor()
264 stream = framing.stream(1)
317 stream = framing.stream(1)
265 results = list(sendcommandframes(reactor, stream, 1, b'mycommand', {},
318 results = list(sendcommandframes(reactor, stream, 1, b'mycommand', {},
266 util.bytesio(b'data!')))
319 util.bytesio(b'data!')))
267 self.assertEqual(len(results), 2)
320 self.assertEqual(len(results), 2)
268 self.assertaction(results[0], 'wantframe')
321 self.assertaction(results[0], 'wantframe')
269 self.assertaction(results[1], 'runcommand')
322 self.assertaction(results[1], 'runcommand')
270 self.assertEqual(results[1][1], {
323 self.assertEqual(results[1][1], {
271 'requestid': 1,
324 'requestid': 1,
272 'command': b'mycommand',
325 'command': b'mycommand',
273 'args': {},
326 'args': {},
274 'data': b'data!',
327 'data': b'data!',
275 })
328 })
276
329
277 def testmultipledataframes(self):
330 def testmultipledataframes(self):
278 frames = [
331 frames = [
279 ffs(b'1 1 stream-begin command-name have-data mycommand'),
332 ffs(b'1 1 stream-begin command-name have-data mycommand'),
280 ffs(b'1 1 0 command-data continuation data1'),
333 ffs(b'1 1 0 command-data continuation data1'),
281 ffs(b'1 1 0 command-data continuation data2'),
334 ffs(b'1 1 0 command-data continuation data2'),
282 ffs(b'1 1 0 command-data eos data3'),
335 ffs(b'1 1 0 command-data eos data3'),
283 ]
336 ]
284
337
285 reactor = makereactor()
338 reactor = makereactor()
286 results = list(sendframes(reactor, frames))
339 results = list(sendframes(reactor, frames))
287 self.assertEqual(len(results), 4)
340 self.assertEqual(len(results), 4)
288 for i in range(3):
341 for i in range(3):
289 self.assertaction(results[i], 'wantframe')
342 self.assertaction(results[i], 'wantframe')
290 self.assertaction(results[3], 'runcommand')
343 self.assertaction(results[3], 'runcommand')
291 self.assertEqual(results[3][1], {
344 self.assertEqual(results[3][1], {
292 'requestid': 1,
345 'requestid': 1,
293 'command': b'mycommand',
346 'command': b'mycommand',
294 'args': {},
347 'args': {},
295 'data': b'data1data2data3',
348 'data': b'data1data2data3',
296 })
349 })
297
350
298 def testargumentanddata(self):
351 def testargumentanddata(self):
299 frames = [
352 frames = [
300 ffs(b'1 1 stream-begin command-name have-args|have-data command'),
353 ffs(b'1 1 stream-begin command-name have-args|have-data command'),
301 ffs(br'1 1 0 command-argument 0 \x03\x00\x03\x00keyval'),
354 ffs(br'1 1 0 command-argument 0 \x03\x00\x03\x00keyval'),
302 ffs(br'1 1 0 command-argument eoa \x03\x00\x03\x00foobar'),
355 ffs(br'1 1 0 command-argument eoa \x03\x00\x03\x00foobar'),
303 ffs(b'1 1 0 command-data continuation value1'),
356 ffs(b'1 1 0 command-data continuation value1'),
304 ffs(b'1 1 0 command-data eos value2'),
357 ffs(b'1 1 0 command-data eos value2'),
305 ]
358 ]
306
359
307 reactor = makereactor()
360 reactor = makereactor()
308 results = list(sendframes(reactor, frames))
361 results = list(sendframes(reactor, frames))
309
362
310 self.assertaction(results[-1], 'runcommand')
363 self.assertaction(results[-1], 'runcommand')
311 self.assertEqual(results[-1][1], {
364 self.assertEqual(results[-1][1], {
312 'requestid': 1,
365 'requestid': 1,
313 'command': b'command',
366 'command': b'command',
314 'args': {
367 'args': {
315 b'key': b'val',
368 b'key': b'val',
316 b'foo': b'bar',
369 b'foo': b'bar',
317 },
370 },
318 'data': b'value1value2',
371 'data': b'value1value2',
319 })
372 })
320
373
321 def testunexpectedcommandargument(self):
374 def testunexpectedcommandargument(self):
322 """Command argument frame when not running a command is an error."""
375 """Command argument frame when not running a command is an error."""
323 result = self._sendsingleframe(
376 result = self._sendsingleframe(
324 makereactor(), ffs(b'1 1 stream-begin command-argument 0 ignored'))
377 makereactor(), ffs(b'1 1 stream-begin command-argument 0 ignored'))
325 self.assertaction(result, 'error')
378 self.assertaction(result, 'error')
326 self.assertEqual(result[1], {
379 self.assertEqual(result[1], {
327 'message': b'expected command frame; got 2',
380 'message': b'expected command frame; got 2',
328 })
381 })
329
382
330 def testunexpectedcommandargumentreceiving(self):
383 def testunexpectedcommandargumentreceiving(self):
331 """Same as above but the command is receiving."""
384 """Same as above but the command is receiving."""
332 results = list(sendframes(makereactor(), [
385 results = list(sendframes(makereactor(), [
333 ffs(b'1 1 stream-begin command-name have-data command'),
386 ffs(b'1 1 stream-begin command-name have-data command'),
334 ffs(b'1 1 0 command-argument eoa ignored'),
387 ffs(b'1 1 0 command-argument eoa ignored'),
335 ]))
388 ]))
336
389
337 self.assertaction(results[1], 'error')
390 self.assertaction(results[1], 'error')
338 self.assertEqual(results[1][1], {
391 self.assertEqual(results[1][1], {
339 'message': b'received command argument frame for request that is '
392 'message': b'received command argument frame for request that is '
340 b'not expecting arguments: 1',
393 b'not expecting arguments: 1',
341 })
394 })
342
395
343 def testunexpectedcommanddata(self):
396 def testunexpectedcommanddata(self):
344 """Command argument frame when not running a command is an error."""
397 """Command argument frame when not running a command is an error."""
345 result = self._sendsingleframe(
398 result = self._sendsingleframe(
346 makereactor(), ffs(b'1 1 stream-begin command-data 0 ignored'))
399 makereactor(), ffs(b'1 1 stream-begin command-data 0 ignored'))
347 self.assertaction(result, 'error')
400 self.assertaction(result, 'error')
348 self.assertEqual(result[1], {
401 self.assertEqual(result[1], {
349 'message': b'expected command frame; got 3',
402 'message': b'expected command frame; got 3',
350 })
403 })
351
404
352 def testunexpectedcommanddatareceiving(self):
405 def testunexpectedcommanddatareceiving(self):
353 """Same as above except the command is receiving."""
406 """Same as above except the command is receiving."""
354 results = list(sendframes(makereactor(), [
407 results = list(sendframes(makereactor(), [
355 ffs(b'1 1 stream-begin command-name have-args command'),
408 ffs(b'1 1 stream-begin command-name have-args command'),
356 ffs(b'1 1 0 command-data eos ignored'),
409 ffs(b'1 1 0 command-data eos ignored'),
357 ]))
410 ]))
358
411
359 self.assertaction(results[1], 'error')
412 self.assertaction(results[1], 'error')
360 self.assertEqual(results[1][1], {
413 self.assertEqual(results[1][1], {
361 'message': b'received command data frame for request that is not '
414 'message': b'received command data frame for request that is not '
362 b'expecting data: 1',
415 b'expecting data: 1',
363 })
416 })
364
417
365 def testmissingcommandframeflags(self):
418 def testmissingcommandframeflags(self):
366 """Command name frame must have flags set."""
419 """Command name frame must have flags set."""
367 result = self._sendsingleframe(
420 result = self._sendsingleframe(
368 makereactor(), ffs(b'1 1 stream-begin command-name 0 command'))
421 makereactor(), ffs(b'1 1 stream-begin command-name 0 command'))
369 self.assertaction(result, 'error')
422 self.assertaction(result, 'error')
370 self.assertEqual(result[1], {
423 self.assertEqual(result[1], {
371 'message': b'missing frame flags on command frame',
424 'message': b'missing frame flags on command frame',
372 })
425 })
373
426
374 def testconflictingrequestidallowed(self):
427 def testconflictingrequestidallowed(self):
375 """Multiple fully serviced commands with same request ID is allowed."""
428 """Multiple fully serviced commands with same request ID is allowed."""
376 reactor = makereactor()
429 reactor = makereactor()
377 results = []
430 results = []
378 outstream = reactor.makeoutputstream()
431 outstream = reactor.makeoutputstream()
379 results.append(self._sendsingleframe(
432 results.append(self._sendsingleframe(
380 reactor, ffs(b'1 1 stream-begin command-name eos command')))
433 reactor, ffs(b'1 1 stream-begin command-name eos command')))
381 result = reactor.onbytesresponseready(outstream, 1, b'response1')
434 result = reactor.onbytesresponseready(outstream, 1, b'response1')
382 self.assertaction(result, 'sendframes')
435 self.assertaction(result, 'sendframes')
383 list(result[1]['framegen'])
436 list(result[1]['framegen'])
384 results.append(self._sendsingleframe(
437 results.append(self._sendsingleframe(
385 reactor, ffs(b'1 1 0 command-name eos command')))
438 reactor, ffs(b'1 1 0 command-name eos command')))
386 result = reactor.onbytesresponseready(outstream, 1, b'response2')
439 result = reactor.onbytesresponseready(outstream, 1, b'response2')
387 self.assertaction(result, 'sendframes')
440 self.assertaction(result, 'sendframes')
388 list(result[1]['framegen'])
441 list(result[1]['framegen'])
389 results.append(self._sendsingleframe(
442 results.append(self._sendsingleframe(
390 reactor, ffs(b'1 1 0 command-name eos command')))
443 reactor, ffs(b'1 1 0 command-name eos command')))
391 result = reactor.onbytesresponseready(outstream, 1, b'response3')
444 result = reactor.onbytesresponseready(outstream, 1, b'response3')
392 self.assertaction(result, 'sendframes')
445 self.assertaction(result, 'sendframes')
393 list(result[1]['framegen'])
446 list(result[1]['framegen'])
394
447
395 for i in range(3):
448 for i in range(3):
396 self.assertaction(results[i], 'runcommand')
449 self.assertaction(results[i], 'runcommand')
397 self.assertEqual(results[i][1], {
450 self.assertEqual(results[i][1], {
398 'requestid': 1,
451 'requestid': 1,
399 'command': b'command',
452 'command': b'command',
400 'args': {},
453 'args': {},
401 'data': None,
454 'data': None,
402 })
455 })
403
456
404 def testconflictingrequestid(self):
457 def testconflictingrequestid(self):
405 """Request ID for new command matching in-flight command is illegal."""
458 """Request ID for new command matching in-flight command is illegal."""
406 results = list(sendframes(makereactor(), [
459 results = list(sendframes(makereactor(), [
407 ffs(b'1 1 stream-begin command-name have-args command'),
460 ffs(b'1 1 stream-begin command-name have-args command'),
408 ffs(b'1 1 0 command-name eos command'),
461 ffs(b'1 1 0 command-name eos command'),
409 ]))
462 ]))
410
463
411 self.assertaction(results[0], 'wantframe')
464 self.assertaction(results[0], 'wantframe')
412 self.assertaction(results[1], 'error')
465 self.assertaction(results[1], 'error')
413 self.assertEqual(results[1][1], {
466 self.assertEqual(results[1][1], {
414 'message': b'request with ID 1 already received',
467 'message': b'request with ID 1 already received',
415 })
468 })
416
469
417 def testinterleavedcommands(self):
470 def testinterleavedcommands(self):
418 results = list(sendframes(makereactor(), [
471 results = list(sendframes(makereactor(), [
419 ffs(b'1 1 stream-begin command-name have-args command1'),
472 ffs(b'1 1 stream-begin command-name have-args command1'),
420 ffs(b'3 1 0 command-name have-args command3'),
473 ffs(b'3 1 0 command-name have-args command3'),
421 ffs(br'1 1 0 command-argument 0 \x03\x00\x03\x00foobar'),
474 ffs(br'1 1 0 command-argument 0 \x03\x00\x03\x00foobar'),
422 ffs(br'3 1 0 command-argument 0 \x03\x00\x03\x00bizbaz'),
475 ffs(br'3 1 0 command-argument 0 \x03\x00\x03\x00bizbaz'),
423 ffs(br'3 1 0 command-argument eoa \x03\x00\x03\x00keyval'),
476 ffs(br'3 1 0 command-argument eoa \x03\x00\x03\x00keyval'),
424 ffs(br'1 1 0 command-argument eoa \x04\x00\x03\x00key1val'),
477 ffs(br'1 1 0 command-argument eoa \x04\x00\x03\x00key1val'),
425 ]))
478 ]))
426
479
427 self.assertEqual([t[0] for t in results], [
480 self.assertEqual([t[0] for t in results], [
428 'wantframe',
481 'wantframe',
429 'wantframe',
482 'wantframe',
430 'wantframe',
483 'wantframe',
431 'wantframe',
484 'wantframe',
432 'runcommand',
485 'runcommand',
433 'runcommand',
486 'runcommand',
434 ])
487 ])
435
488
436 self.assertEqual(results[4][1], {
489 self.assertEqual(results[4][1], {
437 'requestid': 3,
490 'requestid': 3,
438 'command': 'command3',
491 'command': 'command3',
439 'args': {b'biz': b'baz', b'key': b'val'},
492 'args': {b'biz': b'baz', b'key': b'val'},
440 'data': None,
493 'data': None,
441 })
494 })
442 self.assertEqual(results[5][1], {
495 self.assertEqual(results[5][1], {
443 'requestid': 1,
496 'requestid': 1,
444 'command': 'command1',
497 'command': 'command1',
445 'args': {b'foo': b'bar', b'key1': b'val'},
498 'args': {b'foo': b'bar', b'key1': b'val'},
446 'data': None,
499 'data': None,
447 })
500 })
448
501
449 def testmissingargumentframe(self):
502 def testmissingargumentframe(self):
450 # This test attempts to test behavior when reactor has an incomplete
503 # This test attempts to test behavior when reactor has an incomplete
451 # command request waiting on argument data. But it doesn't handle that
504 # command request waiting on argument data. But it doesn't handle that
452 # scenario yet. So this test does nothing of value.
505 # scenario yet. So this test does nothing of value.
453 frames = [
506 frames = [
454 ffs(b'1 1 stream-begin command-name have-args command'),
507 ffs(b'1 1 stream-begin command-name have-args command'),
455 ]
508 ]
456
509
457 results = list(sendframes(makereactor(), frames))
510 results = list(sendframes(makereactor(), frames))
458 self.assertaction(results[0], 'wantframe')
511 self.assertaction(results[0], 'wantframe')
459
512
460 def testincompleteargumentname(self):
513 def testincompleteargumentname(self):
461 """Argument frame with incomplete name."""
514 """Argument frame with incomplete name."""
462 frames = [
515 frames = [
463 ffs(b'1 1 stream-begin command-name have-args command1'),
516 ffs(b'1 1 stream-begin command-name have-args command1'),
464 ffs(br'1 1 0 command-argument eoa \x04\x00\xde\xadfoo'),
517 ffs(br'1 1 0 command-argument eoa \x04\x00\xde\xadfoo'),
465 ]
518 ]
466
519
467 results = list(sendframes(makereactor(), frames))
520 results = list(sendframes(makereactor(), frames))
468 self.assertEqual(len(results), 2)
521 self.assertEqual(len(results), 2)
469 self.assertaction(results[0], 'wantframe')
522 self.assertaction(results[0], 'wantframe')
470 self.assertaction(results[1], 'error')
523 self.assertaction(results[1], 'error')
471 self.assertEqual(results[1][1], {
524 self.assertEqual(results[1][1], {
472 'message': b'malformed argument frame: partial argument name',
525 'message': b'malformed argument frame: partial argument name',
473 })
526 })
474
527
475 def testincompleteargumentvalue(self):
528 def testincompleteargumentvalue(self):
476 """Argument frame with incomplete value."""
529 """Argument frame with incomplete value."""
477 frames = [
530 frames = [
478 ffs(b'1 1 stream-begin command-name have-args command'),
531 ffs(b'1 1 stream-begin command-name have-args command'),
479 ffs(br'1 1 0 command-argument eoa \x03\x00\xaa\xaafoopartialvalue'),
532 ffs(br'1 1 0 command-argument eoa \x03\x00\xaa\xaafoopartialvalue'),
480 ]
533 ]
481
534
482 results = list(sendframes(makereactor(), frames))
535 results = list(sendframes(makereactor(), frames))
483 self.assertEqual(len(results), 2)
536 self.assertEqual(len(results), 2)
484 self.assertaction(results[0], 'wantframe')
537 self.assertaction(results[0], 'wantframe')
485 self.assertaction(results[1], 'error')
538 self.assertaction(results[1], 'error')
486 self.assertEqual(results[1][1], {
539 self.assertEqual(results[1][1], {
487 'message': b'malformed argument frame: partial argument value',
540 'message': b'malformed argument frame: partial argument value',
488 })
541 })
489
542
490 def testmissingcommanddataframe(self):
543 def testmissingcommanddataframe(self):
491 # The reactor doesn't currently handle partially received commands.
544 # The reactor doesn't currently handle partially received commands.
492 # So this test is failing to do anything with request 1.
545 # So this test is failing to do anything with request 1.
493 frames = [
546 frames = [
494 ffs(b'1 1 stream-begin command-name have-data command1'),
547 ffs(b'1 1 stream-begin command-name have-data command1'),
495 ffs(b'3 1 0 command-name eos command2'),
548 ffs(b'3 1 0 command-name eos command2'),
496 ]
549 ]
497 results = list(sendframes(makereactor(), frames))
550 results = list(sendframes(makereactor(), frames))
498 self.assertEqual(len(results), 2)
551 self.assertEqual(len(results), 2)
499 self.assertaction(results[0], 'wantframe')
552 self.assertaction(results[0], 'wantframe')
500 self.assertaction(results[1], 'runcommand')
553 self.assertaction(results[1], 'runcommand')
501
554
502 def testmissingcommanddataframeflags(self):
555 def testmissingcommanddataframeflags(self):
503 frames = [
556 frames = [
504 ffs(b'1 1 stream-begin command-name have-data command1'),
557 ffs(b'1 1 stream-begin command-name have-data command1'),
505 ffs(b'1 1 0 command-data 0 data'),
558 ffs(b'1 1 0 command-data 0 data'),
506 ]
559 ]
507 results = list(sendframes(makereactor(), frames))
560 results = list(sendframes(makereactor(), frames))
508 self.assertEqual(len(results), 2)
561 self.assertEqual(len(results), 2)
509 self.assertaction(results[0], 'wantframe')
562 self.assertaction(results[0], 'wantframe')
510 self.assertaction(results[1], 'error')
563 self.assertaction(results[1], 'error')
511 self.assertEqual(results[1][1], {
564 self.assertEqual(results[1][1], {
512 'message': b'command data frame without flags',
565 'message': b'command data frame without flags',
513 })
566 })
514
567
515 def testframefornonreceivingrequest(self):
568 def testframefornonreceivingrequest(self):
516 """Receiving a frame for a command that is not receiving is illegal."""
569 """Receiving a frame for a command that is not receiving is illegal."""
517 results = list(sendframes(makereactor(), [
570 results = list(sendframes(makereactor(), [
518 ffs(b'1 1 stream-begin command-name eos command1'),
571 ffs(b'1 1 stream-begin command-name eos command1'),
519 ffs(b'3 1 0 command-name have-data command3'),
572 ffs(b'3 1 0 command-name have-data command3'),
520 ffs(b'5 1 0 command-argument eoa ignored'),
573 ffs(b'5 1 0 command-argument eoa ignored'),
521 ]))
574 ]))
522 self.assertaction(results[2], 'error')
575 self.assertaction(results[2], 'error')
523 self.assertEqual(results[2][1], {
576 self.assertEqual(results[2][1], {
524 'message': b'received frame for request that is not receiving: 5',
577 'message': b'received frame for request that is not receiving: 5',
525 })
578 })
526
579
527 def testsimpleresponse(self):
580 def testsimpleresponse(self):
528 """Bytes response to command sends result frames."""
581 """Bytes response to command sends result frames."""
529 reactor = makereactor()
582 reactor = makereactor()
530 instream = framing.stream(1)
583 instream = framing.stream(1)
531 list(sendcommandframes(reactor, instream, 1, b'mycommand', {}))
584 list(sendcommandframes(reactor, instream, 1, b'mycommand', {}))
532
585
533 outstream = reactor.makeoutputstream()
586 outstream = reactor.makeoutputstream()
534 result = reactor.onbytesresponseready(outstream, 1, b'response')
587 result = reactor.onbytesresponseready(outstream, 1, b'response')
535 self.assertaction(result, 'sendframes')
588 self.assertaction(result, 'sendframes')
536 self.assertframesequal(result[1]['framegen'], [
589 self.assertframesequal(result[1]['framegen'], [
537 b'1 2 stream-begin bytes-response eos response',
590 b'1 2 stream-begin bytes-response eos response',
538 ])
591 ])
539
592
540 def testmultiframeresponse(self):
593 def testmultiframeresponse(self):
541 """Bytes response spanning multiple frames is handled."""
594 """Bytes response spanning multiple frames is handled."""
542 first = b'x' * framing.DEFAULT_MAX_FRAME_SIZE
595 first = b'x' * framing.DEFAULT_MAX_FRAME_SIZE
543 second = b'y' * 100
596 second = b'y' * 100
544
597
545 reactor = makereactor()
598 reactor = makereactor()
546 instream = framing.stream(1)
599 instream = framing.stream(1)
547 list(sendcommandframes(reactor, instream, 1, b'mycommand', {}))
600 list(sendcommandframes(reactor, instream, 1, b'mycommand', {}))
548
601
549 outstream = reactor.makeoutputstream()
602 outstream = reactor.makeoutputstream()
550 result = reactor.onbytesresponseready(outstream, 1, first + second)
603 result = reactor.onbytesresponseready(outstream, 1, first + second)
551 self.assertaction(result, 'sendframes')
604 self.assertaction(result, 'sendframes')
552 self.assertframesequal(result[1]['framegen'], [
605 self.assertframesequal(result[1]['framegen'], [
553 b'1 2 stream-begin bytes-response continuation %s' % first,
606 b'1 2 stream-begin bytes-response continuation %s' % first,
554 b'1 2 0 bytes-response eos %s' % second,
607 b'1 2 0 bytes-response eos %s' % second,
555 ])
608 ])
556
609
557 def testapplicationerror(self):
610 def testapplicationerror(self):
558 reactor = makereactor()
611 reactor = makereactor()
559 instream = framing.stream(1)
612 instream = framing.stream(1)
560 list(sendcommandframes(reactor, instream, 1, b'mycommand', {}))
613 list(sendcommandframes(reactor, instream, 1, b'mycommand', {}))
561
614
562 outstream = reactor.makeoutputstream()
615 outstream = reactor.makeoutputstream()
563 result = reactor.onapplicationerror(outstream, 1, b'some message')
616 result = reactor.onapplicationerror(outstream, 1, b'some message')
564 self.assertaction(result, 'sendframes')
617 self.assertaction(result, 'sendframes')
565 self.assertframesequal(result[1]['framegen'], [
618 self.assertframesequal(result[1]['framegen'], [
566 b'1 2 stream-begin error-response application some message',
619 b'1 2 stream-begin error-response application some message',
567 ])
620 ])
568
621
569 def test1commanddeferresponse(self):
622 def test1commanddeferresponse(self):
570 """Responses when in deferred output mode are delayed until EOF."""
623 """Responses when in deferred output mode are delayed until EOF."""
571 reactor = makereactor(deferoutput=True)
624 reactor = makereactor(deferoutput=True)
572 instream = framing.stream(1)
625 instream = framing.stream(1)
573 results = list(sendcommandframes(reactor, instream, 1, b'mycommand',
626 results = list(sendcommandframes(reactor, instream, 1, b'mycommand',
574 {}))
627 {}))
575 self.assertEqual(len(results), 1)
628 self.assertEqual(len(results), 1)
576 self.assertaction(results[0], 'runcommand')
629 self.assertaction(results[0], 'runcommand')
577
630
578 outstream = reactor.makeoutputstream()
631 outstream = reactor.makeoutputstream()
579 result = reactor.onbytesresponseready(outstream, 1, b'response')
632 result = reactor.onbytesresponseready(outstream, 1, b'response')
580 self.assertaction(result, 'noop')
633 self.assertaction(result, 'noop')
581 result = reactor.oninputeof()
634 result = reactor.oninputeof()
582 self.assertaction(result, 'sendframes')
635 self.assertaction(result, 'sendframes')
583 self.assertframesequal(result[1]['framegen'], [
636 self.assertframesequal(result[1]['framegen'], [
584 b'1 2 stream-begin bytes-response eos response',
637 b'1 2 stream-begin bytes-response eos response',
585 ])
638 ])
586
639
587 def testmultiplecommanddeferresponse(self):
640 def testmultiplecommanddeferresponse(self):
588 reactor = makereactor(deferoutput=True)
641 reactor = makereactor(deferoutput=True)
589 instream = framing.stream(1)
642 instream = framing.stream(1)
590 list(sendcommandframes(reactor, instream, 1, b'command1', {}))
643 list(sendcommandframes(reactor, instream, 1, b'command1', {}))
591 list(sendcommandframes(reactor, instream, 3, b'command2', {}))
644 list(sendcommandframes(reactor, instream, 3, b'command2', {}))
592
645
593 outstream = reactor.makeoutputstream()
646 outstream = reactor.makeoutputstream()
594 result = reactor.onbytesresponseready(outstream, 1, b'response1')
647 result = reactor.onbytesresponseready(outstream, 1, b'response1')
595 self.assertaction(result, 'noop')
648 self.assertaction(result, 'noop')
596 result = reactor.onbytesresponseready(outstream, 3, b'response2')
649 result = reactor.onbytesresponseready(outstream, 3, b'response2')
597 self.assertaction(result, 'noop')
650 self.assertaction(result, 'noop')
598 result = reactor.oninputeof()
651 result = reactor.oninputeof()
599 self.assertaction(result, 'sendframes')
652 self.assertaction(result, 'sendframes')
600 self.assertframesequal(result[1]['framegen'], [
653 self.assertframesequal(result[1]['framegen'], [
601 b'1 2 stream-begin bytes-response eos response1',
654 b'1 2 stream-begin bytes-response eos response1',
602 b'3 2 0 bytes-response eos response2'
655 b'3 2 0 bytes-response eos response2'
603 ])
656 ])
604
657
605 def testrequestidtracking(self):
658 def testrequestidtracking(self):
606 reactor = makereactor(deferoutput=True)
659 reactor = makereactor(deferoutput=True)
607 instream = framing.stream(1)
660 instream = framing.stream(1)
608 list(sendcommandframes(reactor, instream, 1, b'command1', {}))
661 list(sendcommandframes(reactor, instream, 1, b'command1', {}))
609 list(sendcommandframes(reactor, instream, 3, b'command2', {}))
662 list(sendcommandframes(reactor, instream, 3, b'command2', {}))
610 list(sendcommandframes(reactor, instream, 5, b'command3', {}))
663 list(sendcommandframes(reactor, instream, 5, b'command3', {}))
611
664
612 # Register results for commands out of order.
665 # Register results for commands out of order.
613 outstream = reactor.makeoutputstream()
666 outstream = reactor.makeoutputstream()
614 reactor.onbytesresponseready(outstream, 3, b'response3')
667 reactor.onbytesresponseready(outstream, 3, b'response3')
615 reactor.onbytesresponseready(outstream, 1, b'response1')
668 reactor.onbytesresponseready(outstream, 1, b'response1')
616 reactor.onbytesresponseready(outstream, 5, b'response5')
669 reactor.onbytesresponseready(outstream, 5, b'response5')
617
670
618 result = reactor.oninputeof()
671 result = reactor.oninputeof()
619 self.assertaction(result, 'sendframes')
672 self.assertaction(result, 'sendframes')
620 self.assertframesequal(result[1]['framegen'], [
673 self.assertframesequal(result[1]['framegen'], [
621 b'3 2 stream-begin bytes-response eos response3',
674 b'3 2 stream-begin bytes-response eos response3',
622 b'1 2 0 bytes-response eos response1',
675 b'1 2 0 bytes-response eos response1',
623 b'5 2 0 bytes-response eos response5',
676 b'5 2 0 bytes-response eos response5',
624 ])
677 ])
625
678
626 def testduplicaterequestonactivecommand(self):
679 def testduplicaterequestonactivecommand(self):
627 """Receiving a request ID that matches a request that isn't finished."""
680 """Receiving a request ID that matches a request that isn't finished."""
628 reactor = makereactor()
681 reactor = makereactor()
629 stream = framing.stream(1)
682 stream = framing.stream(1)
630 list(sendcommandframes(reactor, stream, 1, b'command1', {}))
683 list(sendcommandframes(reactor, stream, 1, b'command1', {}))
631 results = list(sendcommandframes(reactor, stream, 1, b'command1', {}))
684 results = list(sendcommandframes(reactor, stream, 1, b'command1', {}))
632
685
633 self.assertaction(results[0], 'error')
686 self.assertaction(results[0], 'error')
634 self.assertEqual(results[0][1], {
687 self.assertEqual(results[0][1], {
635 'message': b'request with ID 1 is already active',
688 'message': b'request with ID 1 is already active',
636 })
689 })
637
690
638 def testduplicaterequestonactivecommandnosend(self):
691 def testduplicaterequestonactivecommandnosend(self):
639 """Same as above but we've registered a response but haven't sent it."""
692 """Same as above but we've registered a response but haven't sent it."""
640 reactor = makereactor()
693 reactor = makereactor()
641 instream = framing.stream(1)
694 instream = framing.stream(1)
642 list(sendcommandframes(reactor, instream, 1, b'command1', {}))
695 list(sendcommandframes(reactor, instream, 1, b'command1', {}))
643 outstream = reactor.makeoutputstream()
696 outstream = reactor.makeoutputstream()
644 reactor.onbytesresponseready(outstream, 1, b'response')
697 reactor.onbytesresponseready(outstream, 1, b'response')
645
698
646 # We've registered the response but haven't sent it. From the
699 # We've registered the response but haven't sent it. From the
647 # perspective of the reactor, the command is still active.
700 # perspective of the reactor, the command is still active.
648
701
649 results = list(sendcommandframes(reactor, instream, 1, b'command1', {}))
702 results = list(sendcommandframes(reactor, instream, 1, b'command1', {}))
650 self.assertaction(results[0], 'error')
703 self.assertaction(results[0], 'error')
651 self.assertEqual(results[0][1], {
704 self.assertEqual(results[0][1], {
652 'message': b'request with ID 1 is already active',
705 'message': b'request with ID 1 is already active',
653 })
706 })
654
707
655 def testduplicaterequestargumentframe(self):
708 def testduplicaterequestargumentframe(self):
656 """Variant on above except we sent an argument frame instead of name."""
709 """Variant on above except we sent an argument frame instead of name."""
657 reactor = makereactor()
710 reactor = makereactor()
658 stream = framing.stream(1)
711 stream = framing.stream(1)
659 list(sendcommandframes(reactor, stream, 1, b'command', {}))
712 list(sendcommandframes(reactor, stream, 1, b'command', {}))
660 results = list(sendframes(reactor, [
713 results = list(sendframes(reactor, [
661 ffs(b'3 1 stream-begin command-name have-args command'),
714 ffs(b'3 1 stream-begin command-name have-args command'),
662 ffs(b'1 1 0 command-argument 0 ignored'),
715 ffs(b'1 1 0 command-argument 0 ignored'),
663 ]))
716 ]))
664 self.assertaction(results[0], 'wantframe')
717 self.assertaction(results[0], 'wantframe')
665 self.assertaction(results[1], 'error')
718 self.assertaction(results[1], 'error')
666 self.assertEqual(results[1][1], {
719 self.assertEqual(results[1][1], {
667 'message': 'received frame for request that is still active: 1',
720 'message': 'received frame for request that is still active: 1',
668 })
721 })
669
722
670 def testduplicaterequestaftersend(self):
723 def testduplicaterequestaftersend(self):
671 """We can use a duplicate request ID after we've sent the response."""
724 """We can use a duplicate request ID after we've sent the response."""
672 reactor = makereactor()
725 reactor = makereactor()
673 instream = framing.stream(1)
726 instream = framing.stream(1)
674 list(sendcommandframes(reactor, instream, 1, b'command1', {}))
727 list(sendcommandframes(reactor, instream, 1, b'command1', {}))
675 outstream = reactor.makeoutputstream()
728 outstream = reactor.makeoutputstream()
676 res = reactor.onbytesresponseready(outstream, 1, b'response')
729 res = reactor.onbytesresponseready(outstream, 1, b'response')
677 list(res[1]['framegen'])
730 list(res[1]['framegen'])
678
731
679 results = list(sendcommandframes(reactor, instream, 1, b'command1', {}))
732 results = list(sendcommandframes(reactor, instream, 1, b'command1', {}))
680 self.assertaction(results[0], 'runcommand')
733 self.assertaction(results[0], 'runcommand')
681
734
682 if __name__ == '__main__':
735 if __name__ == '__main__':
683 import silenttestrunner
736 import silenttestrunner
684 silenttestrunner.main(__name__)
737 silenttestrunner.main(__name__)
General Comments 0
You need to be logged in to leave comments. Login now