##// END OF EJS Templates
pycompat: make fewer assumptions about sys.executable...
Rodrigo Damazio Bovendorp -
r42723:49998d5b default
parent child Browse files
Show More
@@ -1,639 +1,641 b''
1 # chgserver.py - command server extension for cHg
1 # chgserver.py - command server extension for cHg
2 #
2 #
3 # Copyright 2011 Yuya Nishihara <yuya@tcha.org>
3 # Copyright 2011 Yuya Nishihara <yuya@tcha.org>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """command server extension for cHg
8 """command server extension for cHg
9
9
10 'S' channel (read/write)
10 'S' channel (read/write)
11 propagate ui.system() request to client
11 propagate ui.system() request to client
12
12
13 'attachio' command
13 'attachio' command
14 attach client's stdio passed by sendmsg()
14 attach client's stdio passed by sendmsg()
15
15
16 'chdir' command
16 'chdir' command
17 change current directory
17 change current directory
18
18
19 'setenv' command
19 'setenv' command
20 replace os.environ completely
20 replace os.environ completely
21
21
22 'setumask' command (DEPRECATED)
22 'setumask' command (DEPRECATED)
23 'setumask2' command
23 'setumask2' command
24 set umask
24 set umask
25
25
26 'validate' command
26 'validate' command
27 reload the config and check if the server is up to date
27 reload the config and check if the server is up to date
28
28
29 Config
29 Config
30 ------
30 ------
31
31
32 ::
32 ::
33
33
34 [chgserver]
34 [chgserver]
35 # how long (in seconds) should an idle chg server exit
35 # how long (in seconds) should an idle chg server exit
36 idletimeout = 3600
36 idletimeout = 3600
37
37
38 # whether to skip config or env change checks
38 # whether to skip config or env change checks
39 skiphash = False
39 skiphash = False
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import hashlib
44 import hashlib
45 import inspect
45 import inspect
46 import os
46 import os
47 import re
47 import re
48 import socket
48 import socket
49 import stat
49 import stat
50 import struct
50 import struct
51 import time
51 import time
52
52
53 from .i18n import _
53 from .i18n import _
54
54
55 from . import (
55 from . import (
56 commandserver,
56 commandserver,
57 encoding,
57 encoding,
58 error,
58 error,
59 extensions,
59 extensions,
60 node,
60 node,
61 pycompat,
61 pycompat,
62 util,
62 util,
63 )
63 )
64
64
65 from .utils import (
65 from .utils import (
66 procutil,
66 procutil,
67 stringutil,
67 stringutil,
68 )
68 )
69
69
70 def _hashlist(items):
70 def _hashlist(items):
71 """return sha1 hexdigest for a list"""
71 """return sha1 hexdigest for a list"""
72 return node.hex(hashlib.sha1(stringutil.pprint(items)).digest())
72 return node.hex(hashlib.sha1(stringutil.pprint(items)).digest())
73
73
74 # sensitive config sections affecting confighash
74 # sensitive config sections affecting confighash
75 _configsections = [
75 _configsections = [
76 'alias', # affects global state commands.table
76 'alias', # affects global state commands.table
77 'eol', # uses setconfig('eol', ...)
77 'eol', # uses setconfig('eol', ...)
78 'extdiff', # uisetup will register new commands
78 'extdiff', # uisetup will register new commands
79 'extensions',
79 'extensions',
80 ]
80 ]
81
81
82 _configsectionitems = [
82 _configsectionitems = [
83 ('commands', 'show.aliasprefix'), # show.py reads it in extsetup
83 ('commands', 'show.aliasprefix'), # show.py reads it in extsetup
84 ]
84 ]
85
85
86 # sensitive environment variables affecting confighash
86 # sensitive environment variables affecting confighash
87 _envre = re.compile(br'''\A(?:
87 _envre = re.compile(br'''\A(?:
88 CHGHG
88 CHGHG
89 |HG(?:DEMANDIMPORT|EMITWARNINGS|MODULEPOLICY|PROF|RCPATH)?
89 |HG(?:DEMANDIMPORT|EMITWARNINGS|MODULEPOLICY|PROF|RCPATH)?
90 |HG(?:ENCODING|PLAIN).*
90 |HG(?:ENCODING|PLAIN).*
91 |LANG(?:UAGE)?
91 |LANG(?:UAGE)?
92 |LC_.*
92 |LC_.*
93 |LD_.*
93 |LD_.*
94 |PATH
94 |PATH
95 |PYTHON.*
95 |PYTHON.*
96 |TERM(?:INFO)?
96 |TERM(?:INFO)?
97 |TZ
97 |TZ
98 )\Z''', re.X)
98 )\Z''', re.X)
99
99
100 def _confighash(ui):
100 def _confighash(ui):
101 """return a quick hash for detecting config/env changes
101 """return a quick hash for detecting config/env changes
102
102
103 confighash is the hash of sensitive config items and environment variables.
103 confighash is the hash of sensitive config items and environment variables.
104
104
105 for chgserver, it is designed that once confighash changes, the server is
105 for chgserver, it is designed that once confighash changes, the server is
106 not qualified to serve its client and should redirect the client to a new
106 not qualified to serve its client and should redirect the client to a new
107 server. different from mtimehash, confighash change will not mark the
107 server. different from mtimehash, confighash change will not mark the
108 server outdated and exit since the user can have different configs at the
108 server outdated and exit since the user can have different configs at the
109 same time.
109 same time.
110 """
110 """
111 sectionitems = []
111 sectionitems = []
112 for section in _configsections:
112 for section in _configsections:
113 sectionitems.append(ui.configitems(section))
113 sectionitems.append(ui.configitems(section))
114 for section, item in _configsectionitems:
114 for section, item in _configsectionitems:
115 sectionitems.append(ui.config(section, item))
115 sectionitems.append(ui.config(section, item))
116 sectionhash = _hashlist(sectionitems)
116 sectionhash = _hashlist(sectionitems)
117 # If $CHGHG is set, the change to $HG should not trigger a new chg server
117 # If $CHGHG is set, the change to $HG should not trigger a new chg server
118 if 'CHGHG' in encoding.environ:
118 if 'CHGHG' in encoding.environ:
119 ignored = {'HG'}
119 ignored = {'HG'}
120 else:
120 else:
121 ignored = set()
121 ignored = set()
122 envitems = [(k, v) for k, v in encoding.environ.iteritems()
122 envitems = [(k, v) for k, v in encoding.environ.iteritems()
123 if _envre.match(k) and k not in ignored]
123 if _envre.match(k) and k not in ignored]
124 envhash = _hashlist(sorted(envitems))
124 envhash = _hashlist(sorted(envitems))
125 return sectionhash[:6] + envhash[:6]
125 return sectionhash[:6] + envhash[:6]
126
126
127 def _getmtimepaths(ui):
127 def _getmtimepaths(ui):
128 """get a list of paths that should be checked to detect change
128 """get a list of paths that should be checked to detect change
129
129
130 The list will include:
130 The list will include:
131 - extensions (will not cover all files for complex extensions)
131 - extensions (will not cover all files for complex extensions)
132 - mercurial/__version__.py
132 - mercurial/__version__.py
133 - python binary
133 - python binary
134 """
134 """
135 modules = [m for n, m in extensions.extensions(ui)]
135 modules = [m for n, m in extensions.extensions(ui)]
136 try:
136 try:
137 from . import __version__
137 from . import __version__
138 modules.append(__version__)
138 modules.append(__version__)
139 except ImportError:
139 except ImportError:
140 pass
140 pass
141 files = [pycompat.sysexecutable]
141 files = []
142 if pycompat.sysexecutable:
143 files.append(pycompat.sysexecutable)
142 for m in modules:
144 for m in modules:
143 try:
145 try:
144 files.append(pycompat.fsencode(inspect.getabsfile(m)))
146 files.append(pycompat.fsencode(inspect.getabsfile(m)))
145 except TypeError:
147 except TypeError:
146 pass
148 pass
147 return sorted(set(files))
149 return sorted(set(files))
148
150
149 def _mtimehash(paths):
151 def _mtimehash(paths):
150 """return a quick hash for detecting file changes
152 """return a quick hash for detecting file changes
151
153
152 mtimehash calls stat on given paths and calculate a hash based on size and
154 mtimehash calls stat on given paths and calculate a hash based on size and
153 mtime of each file. mtimehash does not read file content because reading is
155 mtime of each file. mtimehash does not read file content because reading is
154 expensive. therefore it's not 100% reliable for detecting content changes.
156 expensive. therefore it's not 100% reliable for detecting content changes.
155 it's possible to return different hashes for same file contents.
157 it's possible to return different hashes for same file contents.
156 it's also possible to return a same hash for different file contents for
158 it's also possible to return a same hash for different file contents for
157 some carefully crafted situation.
159 some carefully crafted situation.
158
160
159 for chgserver, it is designed that once mtimehash changes, the server is
161 for chgserver, it is designed that once mtimehash changes, the server is
160 considered outdated immediately and should no longer provide service.
162 considered outdated immediately and should no longer provide service.
161
163
162 mtimehash is not included in confighash because we only know the paths of
164 mtimehash is not included in confighash because we only know the paths of
163 extensions after importing them (there is imp.find_module but that faces
165 extensions after importing them (there is imp.find_module but that faces
164 race conditions). We need to calculate confighash without importing.
166 race conditions). We need to calculate confighash without importing.
165 """
167 """
166 def trystat(path):
168 def trystat(path):
167 try:
169 try:
168 st = os.stat(path)
170 st = os.stat(path)
169 return (st[stat.ST_MTIME], st.st_size)
171 return (st[stat.ST_MTIME], st.st_size)
170 except OSError:
172 except OSError:
171 # could be ENOENT, EPERM etc. not fatal in any case
173 # could be ENOENT, EPERM etc. not fatal in any case
172 pass
174 pass
173 return _hashlist(map(trystat, paths))[:12]
175 return _hashlist(map(trystat, paths))[:12]
174
176
175 class hashstate(object):
177 class hashstate(object):
176 """a structure storing confighash, mtimehash, paths used for mtimehash"""
178 """a structure storing confighash, mtimehash, paths used for mtimehash"""
177 def __init__(self, confighash, mtimehash, mtimepaths):
179 def __init__(self, confighash, mtimehash, mtimepaths):
178 self.confighash = confighash
180 self.confighash = confighash
179 self.mtimehash = mtimehash
181 self.mtimehash = mtimehash
180 self.mtimepaths = mtimepaths
182 self.mtimepaths = mtimepaths
181
183
182 @staticmethod
184 @staticmethod
183 def fromui(ui, mtimepaths=None):
185 def fromui(ui, mtimepaths=None):
184 if mtimepaths is None:
186 if mtimepaths is None:
185 mtimepaths = _getmtimepaths(ui)
187 mtimepaths = _getmtimepaths(ui)
186 confighash = _confighash(ui)
188 confighash = _confighash(ui)
187 mtimehash = _mtimehash(mtimepaths)
189 mtimehash = _mtimehash(mtimepaths)
188 ui.log('cmdserver', 'confighash = %s mtimehash = %s\n',
190 ui.log('cmdserver', 'confighash = %s mtimehash = %s\n',
189 confighash, mtimehash)
191 confighash, mtimehash)
190 return hashstate(confighash, mtimehash, mtimepaths)
192 return hashstate(confighash, mtimehash, mtimepaths)
191
193
192 def _newchgui(srcui, csystem, attachio):
194 def _newchgui(srcui, csystem, attachio):
193 class chgui(srcui.__class__):
195 class chgui(srcui.__class__):
194 def __init__(self, src=None):
196 def __init__(self, src=None):
195 super(chgui, self).__init__(src)
197 super(chgui, self).__init__(src)
196 if src:
198 if src:
197 self._csystem = getattr(src, '_csystem', csystem)
199 self._csystem = getattr(src, '_csystem', csystem)
198 else:
200 else:
199 self._csystem = csystem
201 self._csystem = csystem
200
202
201 def _runsystem(self, cmd, environ, cwd, out):
203 def _runsystem(self, cmd, environ, cwd, out):
202 # fallback to the original system method if
204 # fallback to the original system method if
203 # a. the output stream is not stdout (e.g. stderr, cStringIO),
205 # a. the output stream is not stdout (e.g. stderr, cStringIO),
204 # b. or stdout is redirected by protectfinout(),
206 # b. or stdout is redirected by protectfinout(),
205 # because the chg client is not aware of these situations and
207 # because the chg client is not aware of these situations and
206 # will behave differently (i.e. write to stdout).
208 # will behave differently (i.e. write to stdout).
207 if (out is not self.fout
209 if (out is not self.fout
208 or not util.safehasattr(self.fout, 'fileno')
210 or not util.safehasattr(self.fout, 'fileno')
209 or self.fout.fileno() != procutil.stdout.fileno()
211 or self.fout.fileno() != procutil.stdout.fileno()
210 or self._finoutredirected):
212 or self._finoutredirected):
211 return procutil.system(cmd, environ=environ, cwd=cwd, out=out)
213 return procutil.system(cmd, environ=environ, cwd=cwd, out=out)
212 self.flush()
214 self.flush()
213 return self._csystem(cmd, procutil.shellenviron(environ), cwd)
215 return self._csystem(cmd, procutil.shellenviron(environ), cwd)
214
216
215 def _runpager(self, cmd, env=None):
217 def _runpager(self, cmd, env=None):
216 self._csystem(cmd, procutil.shellenviron(env), type='pager',
218 self._csystem(cmd, procutil.shellenviron(env), type='pager',
217 cmdtable={'attachio': attachio})
219 cmdtable={'attachio': attachio})
218 return True
220 return True
219
221
220 return chgui(srcui)
222 return chgui(srcui)
221
223
222 def _loadnewui(srcui, args, cdebug):
224 def _loadnewui(srcui, args, cdebug):
223 from . import dispatch # avoid cycle
225 from . import dispatch # avoid cycle
224
226
225 newui = srcui.__class__.load()
227 newui = srcui.__class__.load()
226 for a in ['fin', 'fout', 'ferr', 'environ']:
228 for a in ['fin', 'fout', 'ferr', 'environ']:
227 setattr(newui, a, getattr(srcui, a))
229 setattr(newui, a, getattr(srcui, a))
228 if util.safehasattr(srcui, '_csystem'):
230 if util.safehasattr(srcui, '_csystem'):
229 newui._csystem = srcui._csystem
231 newui._csystem = srcui._csystem
230
232
231 # command line args
233 # command line args
232 options = dispatch._earlyparseopts(newui, args)
234 options = dispatch._earlyparseopts(newui, args)
233 dispatch._parseconfig(newui, options['config'])
235 dispatch._parseconfig(newui, options['config'])
234
236
235 # stolen from tortoisehg.util.copydynamicconfig()
237 # stolen from tortoisehg.util.copydynamicconfig()
236 for section, name, value in srcui.walkconfig():
238 for section, name, value in srcui.walkconfig():
237 source = srcui.configsource(section, name)
239 source = srcui.configsource(section, name)
238 if ':' in source or source == '--config' or source.startswith('$'):
240 if ':' in source or source == '--config' or source.startswith('$'):
239 # path:line or command line, or environ
241 # path:line or command line, or environ
240 continue
242 continue
241 newui.setconfig(section, name, value, source)
243 newui.setconfig(section, name, value, source)
242
244
243 # load wd and repo config, copied from dispatch.py
245 # load wd and repo config, copied from dispatch.py
244 cwd = options['cwd']
246 cwd = options['cwd']
245 cwd = cwd and os.path.realpath(cwd) or None
247 cwd = cwd and os.path.realpath(cwd) or None
246 rpath = options['repository']
248 rpath = options['repository']
247 path, newlui = dispatch._getlocal(newui, rpath, wd=cwd)
249 path, newlui = dispatch._getlocal(newui, rpath, wd=cwd)
248
250
249 extensions.populateui(newui)
251 extensions.populateui(newui)
250 commandserver.setuplogging(newui, fp=cdebug)
252 commandserver.setuplogging(newui, fp=cdebug)
251 if newui is not newlui:
253 if newui is not newlui:
252 extensions.populateui(newlui)
254 extensions.populateui(newlui)
253 commandserver.setuplogging(newlui, fp=cdebug)
255 commandserver.setuplogging(newlui, fp=cdebug)
254
256
255 return (newui, newlui)
257 return (newui, newlui)
256
258
257 class channeledsystem(object):
259 class channeledsystem(object):
258 """Propagate ui.system() request in the following format:
260 """Propagate ui.system() request in the following format:
259
261
260 payload length (unsigned int),
262 payload length (unsigned int),
261 type, '\0',
263 type, '\0',
262 cmd, '\0',
264 cmd, '\0',
263 cwd, '\0',
265 cwd, '\0',
264 envkey, '=', val, '\0',
266 envkey, '=', val, '\0',
265 ...
267 ...
266 envkey, '=', val
268 envkey, '=', val
267
269
268 if type == 'system', waits for:
270 if type == 'system', waits for:
269
271
270 exitcode length (unsigned int),
272 exitcode length (unsigned int),
271 exitcode (int)
273 exitcode (int)
272
274
273 if type == 'pager', repetitively waits for a command name ending with '\n'
275 if type == 'pager', repetitively waits for a command name ending with '\n'
274 and executes it defined by cmdtable, or exits the loop if the command name
276 and executes it defined by cmdtable, or exits the loop if the command name
275 is empty.
277 is empty.
276 """
278 """
277 def __init__(self, in_, out, channel):
279 def __init__(self, in_, out, channel):
278 self.in_ = in_
280 self.in_ = in_
279 self.out = out
281 self.out = out
280 self.channel = channel
282 self.channel = channel
281
283
282 def __call__(self, cmd, environ, cwd=None, type='system', cmdtable=None):
284 def __call__(self, cmd, environ, cwd=None, type='system', cmdtable=None):
283 args = [type, procutil.quotecommand(cmd), os.path.abspath(cwd or '.')]
285 args = [type, procutil.quotecommand(cmd), os.path.abspath(cwd or '.')]
284 args.extend('%s=%s' % (k, v) for k, v in environ.iteritems())
286 args.extend('%s=%s' % (k, v) for k, v in environ.iteritems())
285 data = '\0'.join(args)
287 data = '\0'.join(args)
286 self.out.write(struct.pack('>cI', self.channel, len(data)))
288 self.out.write(struct.pack('>cI', self.channel, len(data)))
287 self.out.write(data)
289 self.out.write(data)
288 self.out.flush()
290 self.out.flush()
289
291
290 if type == 'system':
292 if type == 'system':
291 length = self.in_.read(4)
293 length = self.in_.read(4)
292 length, = struct.unpack('>I', length)
294 length, = struct.unpack('>I', length)
293 if length != 4:
295 if length != 4:
294 raise error.Abort(_('invalid response'))
296 raise error.Abort(_('invalid response'))
295 rc, = struct.unpack('>i', self.in_.read(4))
297 rc, = struct.unpack('>i', self.in_.read(4))
296 return rc
298 return rc
297 elif type == 'pager':
299 elif type == 'pager':
298 while True:
300 while True:
299 cmd = self.in_.readline()[:-1]
301 cmd = self.in_.readline()[:-1]
300 if not cmd:
302 if not cmd:
301 break
303 break
302 if cmdtable and cmd in cmdtable:
304 if cmdtable and cmd in cmdtable:
303 cmdtable[cmd]()
305 cmdtable[cmd]()
304 else:
306 else:
305 raise error.Abort(_('unexpected command: %s') % cmd)
307 raise error.Abort(_('unexpected command: %s') % cmd)
306 else:
308 else:
307 raise error.ProgrammingError('invalid S channel type: %s' % type)
309 raise error.ProgrammingError('invalid S channel type: %s' % type)
308
310
309 _iochannels = [
311 _iochannels = [
310 # server.ch, ui.fp, mode
312 # server.ch, ui.fp, mode
311 ('cin', 'fin', r'rb'),
313 ('cin', 'fin', r'rb'),
312 ('cout', 'fout', r'wb'),
314 ('cout', 'fout', r'wb'),
313 ('cerr', 'ferr', r'wb'),
315 ('cerr', 'ferr', r'wb'),
314 ]
316 ]
315
317
316 class chgcmdserver(commandserver.server):
318 class chgcmdserver(commandserver.server):
317 def __init__(self, ui, repo, fin, fout, sock, prereposetups,
319 def __init__(self, ui, repo, fin, fout, sock, prereposetups,
318 hashstate, baseaddress):
320 hashstate, baseaddress):
319 super(chgcmdserver, self).__init__(
321 super(chgcmdserver, self).__init__(
320 _newchgui(ui, channeledsystem(fin, fout, 'S'), self.attachio),
322 _newchgui(ui, channeledsystem(fin, fout, 'S'), self.attachio),
321 repo, fin, fout, prereposetups)
323 repo, fin, fout, prereposetups)
322 self.clientsock = sock
324 self.clientsock = sock
323 self._ioattached = False
325 self._ioattached = False
324 self._oldios = [] # original (self.ch, ui.fp, fd) before "attachio"
326 self._oldios = [] # original (self.ch, ui.fp, fd) before "attachio"
325 self.hashstate = hashstate
327 self.hashstate = hashstate
326 self.baseaddress = baseaddress
328 self.baseaddress = baseaddress
327 if hashstate is not None:
329 if hashstate is not None:
328 self.capabilities = self.capabilities.copy()
330 self.capabilities = self.capabilities.copy()
329 self.capabilities['validate'] = chgcmdserver.validate
331 self.capabilities['validate'] = chgcmdserver.validate
330
332
331 def cleanup(self):
333 def cleanup(self):
332 super(chgcmdserver, self).cleanup()
334 super(chgcmdserver, self).cleanup()
333 # dispatch._runcatch() does not flush outputs if exception is not
335 # dispatch._runcatch() does not flush outputs if exception is not
334 # handled by dispatch._dispatch()
336 # handled by dispatch._dispatch()
335 self.ui.flush()
337 self.ui.flush()
336 self._restoreio()
338 self._restoreio()
337 self._ioattached = False
339 self._ioattached = False
338
340
339 def attachio(self):
341 def attachio(self):
340 """Attach to client's stdio passed via unix domain socket; all
342 """Attach to client's stdio passed via unix domain socket; all
341 channels except cresult will no longer be used
343 channels except cresult will no longer be used
342 """
344 """
343 # tell client to sendmsg() with 1-byte payload, which makes it
345 # tell client to sendmsg() with 1-byte payload, which makes it
344 # distinctive from "attachio\n" command consumed by client.read()
346 # distinctive from "attachio\n" command consumed by client.read()
345 self.clientsock.sendall(struct.pack('>cI', 'I', 1))
347 self.clientsock.sendall(struct.pack('>cI', 'I', 1))
346 clientfds = util.recvfds(self.clientsock.fileno())
348 clientfds = util.recvfds(self.clientsock.fileno())
347 self.ui.log('chgserver', 'received fds: %r\n', clientfds)
349 self.ui.log('chgserver', 'received fds: %r\n', clientfds)
348
350
349 ui = self.ui
351 ui = self.ui
350 ui.flush()
352 ui.flush()
351 self._saveio()
353 self._saveio()
352 for fd, (cn, fn, mode) in zip(clientfds, _iochannels):
354 for fd, (cn, fn, mode) in zip(clientfds, _iochannels):
353 assert fd > 0
355 assert fd > 0
354 fp = getattr(ui, fn)
356 fp = getattr(ui, fn)
355 os.dup2(fd, fp.fileno())
357 os.dup2(fd, fp.fileno())
356 os.close(fd)
358 os.close(fd)
357 if self._ioattached:
359 if self._ioattached:
358 continue
360 continue
359 # reset buffering mode when client is first attached. as we want
361 # reset buffering mode when client is first attached. as we want
360 # to see output immediately on pager, the mode stays unchanged
362 # to see output immediately on pager, the mode stays unchanged
361 # when client re-attached. ferr is unchanged because it should
363 # when client re-attached. ferr is unchanged because it should
362 # be unbuffered no matter if it is a tty or not.
364 # be unbuffered no matter if it is a tty or not.
363 if fn == 'ferr':
365 if fn == 'ferr':
364 newfp = fp
366 newfp = fp
365 else:
367 else:
366 # make it line buffered explicitly because the default is
368 # make it line buffered explicitly because the default is
367 # decided on first write(), where fout could be a pager.
369 # decided on first write(), where fout could be a pager.
368 if fp.isatty():
370 if fp.isatty():
369 bufsize = 1 # line buffered
371 bufsize = 1 # line buffered
370 else:
372 else:
371 bufsize = -1 # system default
373 bufsize = -1 # system default
372 newfp = os.fdopen(fp.fileno(), mode, bufsize)
374 newfp = os.fdopen(fp.fileno(), mode, bufsize)
373 setattr(ui, fn, newfp)
375 setattr(ui, fn, newfp)
374 setattr(self, cn, newfp)
376 setattr(self, cn, newfp)
375
377
376 self._ioattached = True
378 self._ioattached = True
377 self.cresult.write(struct.pack('>i', len(clientfds)))
379 self.cresult.write(struct.pack('>i', len(clientfds)))
378
380
379 def _saveio(self):
381 def _saveio(self):
380 if self._oldios:
382 if self._oldios:
381 return
383 return
382 ui = self.ui
384 ui = self.ui
383 for cn, fn, _mode in _iochannels:
385 for cn, fn, _mode in _iochannels:
384 ch = getattr(self, cn)
386 ch = getattr(self, cn)
385 fp = getattr(ui, fn)
387 fp = getattr(ui, fn)
386 fd = os.dup(fp.fileno())
388 fd = os.dup(fp.fileno())
387 self._oldios.append((ch, fp, fd))
389 self._oldios.append((ch, fp, fd))
388
390
389 def _restoreio(self):
391 def _restoreio(self):
390 ui = self.ui
392 ui = self.ui
391 for (ch, fp, fd), (cn, fn, _mode) in zip(self._oldios, _iochannels):
393 for (ch, fp, fd), (cn, fn, _mode) in zip(self._oldios, _iochannels):
392 newfp = getattr(ui, fn)
394 newfp = getattr(ui, fn)
393 # close newfp while it's associated with client; otherwise it
395 # close newfp while it's associated with client; otherwise it
394 # would be closed when newfp is deleted
396 # would be closed when newfp is deleted
395 if newfp is not fp:
397 if newfp is not fp:
396 newfp.close()
398 newfp.close()
397 # restore original fd: fp is open again
399 # restore original fd: fp is open again
398 os.dup2(fd, fp.fileno())
400 os.dup2(fd, fp.fileno())
399 os.close(fd)
401 os.close(fd)
400 setattr(self, cn, ch)
402 setattr(self, cn, ch)
401 setattr(ui, fn, fp)
403 setattr(ui, fn, fp)
402 del self._oldios[:]
404 del self._oldios[:]
403
405
404 def validate(self):
406 def validate(self):
405 """Reload the config and check if the server is up to date
407 """Reload the config and check if the server is up to date
406
408
407 Read a list of '\0' separated arguments.
409 Read a list of '\0' separated arguments.
408 Write a non-empty list of '\0' separated instruction strings or '\0'
410 Write a non-empty list of '\0' separated instruction strings or '\0'
409 if the list is empty.
411 if the list is empty.
410 An instruction string could be either:
412 An instruction string could be either:
411 - "unlink $path", the client should unlink the path to stop the
413 - "unlink $path", the client should unlink the path to stop the
412 outdated server.
414 outdated server.
413 - "redirect $path", the client should attempt to connect to $path
415 - "redirect $path", the client should attempt to connect to $path
414 first. If it does not work, start a new server. It implies
416 first. If it does not work, start a new server. It implies
415 "reconnect".
417 "reconnect".
416 - "exit $n", the client should exit directly with code n.
418 - "exit $n", the client should exit directly with code n.
417 This may happen if we cannot parse the config.
419 This may happen if we cannot parse the config.
418 - "reconnect", the client should close the connection and
420 - "reconnect", the client should close the connection and
419 reconnect.
421 reconnect.
420 If neither "reconnect" nor "redirect" is included in the instruction
422 If neither "reconnect" nor "redirect" is included in the instruction
421 list, the client can continue with this server after completing all
423 list, the client can continue with this server after completing all
422 the instructions.
424 the instructions.
423 """
425 """
424 from . import dispatch # avoid cycle
426 from . import dispatch # avoid cycle
425
427
426 args = self._readlist()
428 args = self._readlist()
427 try:
429 try:
428 self.ui, lui = _loadnewui(self.ui, args, self.cdebug)
430 self.ui, lui = _loadnewui(self.ui, args, self.cdebug)
429 except error.ParseError as inst:
431 except error.ParseError as inst:
430 dispatch._formatparse(self.ui.warn, inst)
432 dispatch._formatparse(self.ui.warn, inst)
431 self.ui.flush()
433 self.ui.flush()
432 self.cresult.write('exit 255')
434 self.cresult.write('exit 255')
433 return
435 return
434 except error.Abort as inst:
436 except error.Abort as inst:
435 self.ui.error(_("abort: %s\n") % inst)
437 self.ui.error(_("abort: %s\n") % inst)
436 if inst.hint:
438 if inst.hint:
437 self.ui.error(_("(%s)\n") % inst.hint)
439 self.ui.error(_("(%s)\n") % inst.hint)
438 self.ui.flush()
440 self.ui.flush()
439 self.cresult.write('exit 255')
441 self.cresult.write('exit 255')
440 return
442 return
441 newhash = hashstate.fromui(lui, self.hashstate.mtimepaths)
443 newhash = hashstate.fromui(lui, self.hashstate.mtimepaths)
442 insts = []
444 insts = []
443 if newhash.mtimehash != self.hashstate.mtimehash:
445 if newhash.mtimehash != self.hashstate.mtimehash:
444 addr = _hashaddress(self.baseaddress, self.hashstate.confighash)
446 addr = _hashaddress(self.baseaddress, self.hashstate.confighash)
445 insts.append('unlink %s' % addr)
447 insts.append('unlink %s' % addr)
446 # mtimehash is empty if one or more extensions fail to load.
448 # mtimehash is empty if one or more extensions fail to load.
447 # to be compatible with hg, still serve the client this time.
449 # to be compatible with hg, still serve the client this time.
448 if self.hashstate.mtimehash:
450 if self.hashstate.mtimehash:
449 insts.append('reconnect')
451 insts.append('reconnect')
450 if newhash.confighash != self.hashstate.confighash:
452 if newhash.confighash != self.hashstate.confighash:
451 addr = _hashaddress(self.baseaddress, newhash.confighash)
453 addr = _hashaddress(self.baseaddress, newhash.confighash)
452 insts.append('redirect %s' % addr)
454 insts.append('redirect %s' % addr)
453 self.ui.log('chgserver', 'validate: %s\n', stringutil.pprint(insts))
455 self.ui.log('chgserver', 'validate: %s\n', stringutil.pprint(insts))
454 self.cresult.write('\0'.join(insts) or '\0')
456 self.cresult.write('\0'.join(insts) or '\0')
455
457
456 def chdir(self):
458 def chdir(self):
457 """Change current directory
459 """Change current directory
458
460
459 Note that the behavior of --cwd option is bit different from this.
461 Note that the behavior of --cwd option is bit different from this.
460 It does not affect --config parameter.
462 It does not affect --config parameter.
461 """
463 """
462 path = self._readstr()
464 path = self._readstr()
463 if not path:
465 if not path:
464 return
466 return
465 self.ui.log('chgserver', 'chdir to %r\n', path)
467 self.ui.log('chgserver', 'chdir to %r\n', path)
466 os.chdir(path)
468 os.chdir(path)
467
469
468 def setumask(self):
470 def setumask(self):
469 """Change umask (DEPRECATED)"""
471 """Change umask (DEPRECATED)"""
470 # BUG: this does not follow the message frame structure, but kept for
472 # BUG: this does not follow the message frame structure, but kept for
471 # backward compatibility with old chg clients for some time
473 # backward compatibility with old chg clients for some time
472 self._setumask(self._read(4))
474 self._setumask(self._read(4))
473
475
474 def setumask2(self):
476 def setumask2(self):
475 """Change umask"""
477 """Change umask"""
476 data = self._readstr()
478 data = self._readstr()
477 if len(data) != 4:
479 if len(data) != 4:
478 raise ValueError('invalid mask length in setumask2 request')
480 raise ValueError('invalid mask length in setumask2 request')
479 self._setumask(data)
481 self._setumask(data)
480
482
481 def _setumask(self, data):
483 def _setumask(self, data):
482 mask = struct.unpack('>I', data)[0]
484 mask = struct.unpack('>I', data)[0]
483 self.ui.log('chgserver', 'setumask %r\n', mask)
485 self.ui.log('chgserver', 'setumask %r\n', mask)
484 os.umask(mask)
486 os.umask(mask)
485
487
486 def runcommand(self):
488 def runcommand(self):
487 # pager may be attached within the runcommand session, which should
489 # pager may be attached within the runcommand session, which should
488 # be detached at the end of the session. otherwise the pager wouldn't
490 # be detached at the end of the session. otherwise the pager wouldn't
489 # receive EOF.
491 # receive EOF.
490 globaloldios = self._oldios
492 globaloldios = self._oldios
491 self._oldios = []
493 self._oldios = []
492 try:
494 try:
493 return super(chgcmdserver, self).runcommand()
495 return super(chgcmdserver, self).runcommand()
494 finally:
496 finally:
495 self._restoreio()
497 self._restoreio()
496 self._oldios = globaloldios
498 self._oldios = globaloldios
497
499
498 def setenv(self):
500 def setenv(self):
499 """Clear and update os.environ
501 """Clear and update os.environ
500
502
501 Note that not all variables can make an effect on the running process.
503 Note that not all variables can make an effect on the running process.
502 """
504 """
503 l = self._readlist()
505 l = self._readlist()
504 try:
506 try:
505 newenv = dict(s.split('=', 1) for s in l)
507 newenv = dict(s.split('=', 1) for s in l)
506 except ValueError:
508 except ValueError:
507 raise ValueError('unexpected value in setenv request')
509 raise ValueError('unexpected value in setenv request')
508 self.ui.log('chgserver', 'setenv: %r\n', sorted(newenv.keys()))
510 self.ui.log('chgserver', 'setenv: %r\n', sorted(newenv.keys()))
509 encoding.environ.clear()
511 encoding.environ.clear()
510 encoding.environ.update(newenv)
512 encoding.environ.update(newenv)
511
513
512 capabilities = commandserver.server.capabilities.copy()
514 capabilities = commandserver.server.capabilities.copy()
513 capabilities.update({'attachio': attachio,
515 capabilities.update({'attachio': attachio,
514 'chdir': chdir,
516 'chdir': chdir,
515 'runcommand': runcommand,
517 'runcommand': runcommand,
516 'setenv': setenv,
518 'setenv': setenv,
517 'setumask': setumask,
519 'setumask': setumask,
518 'setumask2': setumask2})
520 'setumask2': setumask2})
519
521
520 if util.safehasattr(procutil, 'setprocname'):
522 if util.safehasattr(procutil, 'setprocname'):
521 def setprocname(self):
523 def setprocname(self):
522 """Change process title"""
524 """Change process title"""
523 name = self._readstr()
525 name = self._readstr()
524 self.ui.log('chgserver', 'setprocname: %r\n', name)
526 self.ui.log('chgserver', 'setprocname: %r\n', name)
525 procutil.setprocname(name)
527 procutil.setprocname(name)
526 capabilities['setprocname'] = setprocname
528 capabilities['setprocname'] = setprocname
527
529
528 def _tempaddress(address):
530 def _tempaddress(address):
529 return '%s.%d.tmp' % (address, os.getpid())
531 return '%s.%d.tmp' % (address, os.getpid())
530
532
531 def _hashaddress(address, hashstr):
533 def _hashaddress(address, hashstr):
532 # if the basename of address contains '.', use only the left part. this
534 # if the basename of address contains '.', use only the left part. this
533 # makes it possible for the client to pass 'server.tmp$PID' and follow by
535 # makes it possible for the client to pass 'server.tmp$PID' and follow by
534 # an atomic rename to avoid locking when spawning new servers.
536 # an atomic rename to avoid locking when spawning new servers.
535 dirname, basename = os.path.split(address)
537 dirname, basename = os.path.split(address)
536 basename = basename.split('.', 1)[0]
538 basename = basename.split('.', 1)[0]
537 return '%s-%s' % (os.path.join(dirname, basename), hashstr)
539 return '%s-%s' % (os.path.join(dirname, basename), hashstr)
538
540
539 class chgunixservicehandler(object):
541 class chgunixservicehandler(object):
540 """Set of operations for chg services"""
542 """Set of operations for chg services"""
541
543
542 pollinterval = 1 # [sec]
544 pollinterval = 1 # [sec]
543
545
544 def __init__(self, ui):
546 def __init__(self, ui):
545 self.ui = ui
547 self.ui = ui
546 self._idletimeout = ui.configint('chgserver', 'idletimeout')
548 self._idletimeout = ui.configint('chgserver', 'idletimeout')
547 self._lastactive = time.time()
549 self._lastactive = time.time()
548
550
549 def bindsocket(self, sock, address):
551 def bindsocket(self, sock, address):
550 self._inithashstate(address)
552 self._inithashstate(address)
551 self._checkextensions()
553 self._checkextensions()
552 self._bind(sock)
554 self._bind(sock)
553 self._createsymlink()
555 self._createsymlink()
554 # no "listening at" message should be printed to simulate hg behavior
556 # no "listening at" message should be printed to simulate hg behavior
555
557
556 def _inithashstate(self, address):
558 def _inithashstate(self, address):
557 self._baseaddress = address
559 self._baseaddress = address
558 if self.ui.configbool('chgserver', 'skiphash'):
560 if self.ui.configbool('chgserver', 'skiphash'):
559 self._hashstate = None
561 self._hashstate = None
560 self._realaddress = address
562 self._realaddress = address
561 return
563 return
562 self._hashstate = hashstate.fromui(self.ui)
564 self._hashstate = hashstate.fromui(self.ui)
563 self._realaddress = _hashaddress(address, self._hashstate.confighash)
565 self._realaddress = _hashaddress(address, self._hashstate.confighash)
564
566
565 def _checkextensions(self):
567 def _checkextensions(self):
566 if not self._hashstate:
568 if not self._hashstate:
567 return
569 return
568 if extensions.notloaded():
570 if extensions.notloaded():
569 # one or more extensions failed to load. mtimehash becomes
571 # one or more extensions failed to load. mtimehash becomes
570 # meaningless because we do not know the paths of those extensions.
572 # meaningless because we do not know the paths of those extensions.
571 # set mtimehash to an illegal hash value to invalidate the server.
573 # set mtimehash to an illegal hash value to invalidate the server.
572 self._hashstate.mtimehash = ''
574 self._hashstate.mtimehash = ''
573
575
574 def _bind(self, sock):
576 def _bind(self, sock):
575 # use a unique temp address so we can stat the file and do ownership
577 # use a unique temp address so we can stat the file and do ownership
576 # check later
578 # check later
577 tempaddress = _tempaddress(self._realaddress)
579 tempaddress = _tempaddress(self._realaddress)
578 util.bindunixsocket(sock, tempaddress)
580 util.bindunixsocket(sock, tempaddress)
579 self._socketstat = os.stat(tempaddress)
581 self._socketstat = os.stat(tempaddress)
580 sock.listen(socket.SOMAXCONN)
582 sock.listen(socket.SOMAXCONN)
581 # rename will replace the old socket file if exists atomically. the
583 # rename will replace the old socket file if exists atomically. the
582 # old server will detect ownership change and exit.
584 # old server will detect ownership change and exit.
583 util.rename(tempaddress, self._realaddress)
585 util.rename(tempaddress, self._realaddress)
584
586
585 def _createsymlink(self):
587 def _createsymlink(self):
586 if self._baseaddress == self._realaddress:
588 if self._baseaddress == self._realaddress:
587 return
589 return
588 tempaddress = _tempaddress(self._baseaddress)
590 tempaddress = _tempaddress(self._baseaddress)
589 os.symlink(os.path.basename(self._realaddress), tempaddress)
591 os.symlink(os.path.basename(self._realaddress), tempaddress)
590 util.rename(tempaddress, self._baseaddress)
592 util.rename(tempaddress, self._baseaddress)
591
593
592 def _issocketowner(self):
594 def _issocketowner(self):
593 try:
595 try:
594 st = os.stat(self._realaddress)
596 st = os.stat(self._realaddress)
595 return (st.st_ino == self._socketstat.st_ino and
597 return (st.st_ino == self._socketstat.st_ino and
596 st[stat.ST_MTIME] == self._socketstat[stat.ST_MTIME])
598 st[stat.ST_MTIME] == self._socketstat[stat.ST_MTIME])
597 except OSError:
599 except OSError:
598 return False
600 return False
599
601
600 def unlinksocket(self, address):
602 def unlinksocket(self, address):
601 if not self._issocketowner():
603 if not self._issocketowner():
602 return
604 return
603 # it is possible to have a race condition here that we may
605 # it is possible to have a race condition here that we may
604 # remove another server's socket file. but that's okay
606 # remove another server's socket file. but that's okay
605 # since that server will detect and exit automatically and
607 # since that server will detect and exit automatically and
606 # the client will start a new server on demand.
608 # the client will start a new server on demand.
607 util.tryunlink(self._realaddress)
609 util.tryunlink(self._realaddress)
608
610
609 def shouldexit(self):
611 def shouldexit(self):
610 if not self._issocketowner():
612 if not self._issocketowner():
611 self.ui.log(b'chgserver', b'%s is not owned, exiting.\n',
613 self.ui.log(b'chgserver', b'%s is not owned, exiting.\n',
612 self._realaddress)
614 self._realaddress)
613 return True
615 return True
614 if time.time() - self._lastactive > self._idletimeout:
616 if time.time() - self._lastactive > self._idletimeout:
615 self.ui.log(b'chgserver', b'being idle too long. exiting.\n')
617 self.ui.log(b'chgserver', b'being idle too long. exiting.\n')
616 return True
618 return True
617 return False
619 return False
618
620
619 def newconnection(self):
621 def newconnection(self):
620 self._lastactive = time.time()
622 self._lastactive = time.time()
621
623
622 def createcmdserver(self, repo, conn, fin, fout, prereposetups):
624 def createcmdserver(self, repo, conn, fin, fout, prereposetups):
623 return chgcmdserver(self.ui, repo, fin, fout, conn, prereposetups,
625 return chgcmdserver(self.ui, repo, fin, fout, conn, prereposetups,
624 self._hashstate, self._baseaddress)
626 self._hashstate, self._baseaddress)
625
627
626 def chgunixservice(ui, repo, opts):
628 def chgunixservice(ui, repo, opts):
627 # CHGINTERNALMARK is set by chg client. It is an indication of things are
629 # CHGINTERNALMARK is set by chg client. It is an indication of things are
628 # started by chg so other code can do things accordingly, like disabling
630 # started by chg so other code can do things accordingly, like disabling
629 # demandimport or detecting chg client started by chg client. When executed
631 # demandimport or detecting chg client started by chg client. When executed
630 # here, CHGINTERNALMARK is no longer useful and hence dropped to make
632 # here, CHGINTERNALMARK is no longer useful and hence dropped to make
631 # environ cleaner.
633 # environ cleaner.
632 if 'CHGINTERNALMARK' in encoding.environ:
634 if 'CHGINTERNALMARK' in encoding.environ:
633 del encoding.environ['CHGINTERNALMARK']
635 del encoding.environ['CHGINTERNALMARK']
634
636
635 if repo:
637 if repo:
636 # one chgserver can serve multiple repos. drop repo information
638 # one chgserver can serve multiple repos. drop repo information
637 ui.setconfig('bundle', 'mainreporoot', '', 'repo')
639 ui.setconfig('bundle', 'mainreporoot', '', 'repo')
638 h = chgunixservicehandler(ui)
640 h = chgunixservicehandler(ui)
639 return commandserver.unixforkingservice(ui, repo=None, opts=opts, handler=h)
641 return commandserver.unixforkingservice(ui, repo=None, opts=opts, handler=h)
@@ -1,3481 +1,3481 b''
1 # debugcommands.py - command processing for debug* commands
1 # debugcommands.py - command processing for debug* commands
2 #
2 #
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2016 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import codecs
10 import codecs
11 import collections
11 import collections
12 import difflib
12 import difflib
13 import errno
13 import errno
14 import operator
14 import operator
15 import os
15 import os
16 import random
16 import random
17 import re
17 import re
18 import socket
18 import socket
19 import ssl
19 import ssl
20 import stat
20 import stat
21 import string
21 import string
22 import subprocess
22 import subprocess
23 import sys
23 import sys
24 import time
24 import time
25
25
26 from .i18n import _
26 from .i18n import _
27 from .node import (
27 from .node import (
28 bin,
28 bin,
29 hex,
29 hex,
30 nullhex,
30 nullhex,
31 nullid,
31 nullid,
32 nullrev,
32 nullrev,
33 short,
33 short,
34 )
34 )
35 from . import (
35 from . import (
36 bundle2,
36 bundle2,
37 changegroup,
37 changegroup,
38 cmdutil,
38 cmdutil,
39 color,
39 color,
40 context,
40 context,
41 copies,
41 copies,
42 dagparser,
42 dagparser,
43 encoding,
43 encoding,
44 error,
44 error,
45 exchange,
45 exchange,
46 extensions,
46 extensions,
47 filemerge,
47 filemerge,
48 filesetlang,
48 filesetlang,
49 formatter,
49 formatter,
50 hg,
50 hg,
51 httppeer,
51 httppeer,
52 localrepo,
52 localrepo,
53 lock as lockmod,
53 lock as lockmod,
54 logcmdutil,
54 logcmdutil,
55 merge as mergemod,
55 merge as mergemod,
56 obsolete,
56 obsolete,
57 obsutil,
57 obsutil,
58 phases,
58 phases,
59 policy,
59 policy,
60 pvec,
60 pvec,
61 pycompat,
61 pycompat,
62 registrar,
62 registrar,
63 repair,
63 repair,
64 revlog,
64 revlog,
65 revset,
65 revset,
66 revsetlang,
66 revsetlang,
67 scmutil,
67 scmutil,
68 setdiscovery,
68 setdiscovery,
69 simplemerge,
69 simplemerge,
70 sshpeer,
70 sshpeer,
71 sslutil,
71 sslutil,
72 streamclone,
72 streamclone,
73 templater,
73 templater,
74 treediscovery,
74 treediscovery,
75 upgrade,
75 upgrade,
76 url as urlmod,
76 url as urlmod,
77 util,
77 util,
78 vfs as vfsmod,
78 vfs as vfsmod,
79 wireprotoframing,
79 wireprotoframing,
80 wireprotoserver,
80 wireprotoserver,
81 wireprotov2peer,
81 wireprotov2peer,
82 )
82 )
83 from .utils import (
83 from .utils import (
84 cborutil,
84 cborutil,
85 compression,
85 compression,
86 dateutil,
86 dateutil,
87 procutil,
87 procutil,
88 stringutil,
88 stringutil,
89 )
89 )
90
90
91 from .revlogutils import (
91 from .revlogutils import (
92 deltas as deltautil
92 deltas as deltautil
93 )
93 )
94
94
95 release = lockmod.release
95 release = lockmod.release
96
96
97 command = registrar.command()
97 command = registrar.command()
98
98
99 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
99 @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True)
100 def debugancestor(ui, repo, *args):
100 def debugancestor(ui, repo, *args):
101 """find the ancestor revision of two revisions in a given index"""
101 """find the ancestor revision of two revisions in a given index"""
102 if len(args) == 3:
102 if len(args) == 3:
103 index, rev1, rev2 = args
103 index, rev1, rev2 = args
104 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
104 r = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False), index)
105 lookup = r.lookup
105 lookup = r.lookup
106 elif len(args) == 2:
106 elif len(args) == 2:
107 if not repo:
107 if not repo:
108 raise error.Abort(_('there is no Mercurial repository here '
108 raise error.Abort(_('there is no Mercurial repository here '
109 '(.hg not found)'))
109 '(.hg not found)'))
110 rev1, rev2 = args
110 rev1, rev2 = args
111 r = repo.changelog
111 r = repo.changelog
112 lookup = repo.lookup
112 lookup = repo.lookup
113 else:
113 else:
114 raise error.Abort(_('either two or three arguments required'))
114 raise error.Abort(_('either two or three arguments required'))
115 a = r.ancestor(lookup(rev1), lookup(rev2))
115 a = r.ancestor(lookup(rev1), lookup(rev2))
116 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
116 ui.write('%d:%s\n' % (r.rev(a), hex(a)))
117
117
118 @command('debugapplystreamclonebundle', [], 'FILE')
118 @command('debugapplystreamclonebundle', [], 'FILE')
119 def debugapplystreamclonebundle(ui, repo, fname):
119 def debugapplystreamclonebundle(ui, repo, fname):
120 """apply a stream clone bundle file"""
120 """apply a stream clone bundle file"""
121 f = hg.openpath(ui, fname)
121 f = hg.openpath(ui, fname)
122 gen = exchange.readbundle(ui, f, fname)
122 gen = exchange.readbundle(ui, f, fname)
123 gen.apply(repo)
123 gen.apply(repo)
124
124
125 @command('debugbuilddag',
125 @command('debugbuilddag',
126 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
126 [('m', 'mergeable-file', None, _('add single file mergeable changes')),
127 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
127 ('o', 'overwritten-file', None, _('add single file all revs overwrite')),
128 ('n', 'new-file', None, _('add new file at each rev'))],
128 ('n', 'new-file', None, _('add new file at each rev'))],
129 _('[OPTION]... [TEXT]'))
129 _('[OPTION]... [TEXT]'))
130 def debugbuilddag(ui, repo, text=None,
130 def debugbuilddag(ui, repo, text=None,
131 mergeable_file=False,
131 mergeable_file=False,
132 overwritten_file=False,
132 overwritten_file=False,
133 new_file=False):
133 new_file=False):
134 """builds a repo with a given DAG from scratch in the current empty repo
134 """builds a repo with a given DAG from scratch in the current empty repo
135
135
136 The description of the DAG is read from stdin if not given on the
136 The description of the DAG is read from stdin if not given on the
137 command line.
137 command line.
138
138
139 Elements:
139 Elements:
140
140
141 - "+n" is a linear run of n nodes based on the current default parent
141 - "+n" is a linear run of n nodes based on the current default parent
142 - "." is a single node based on the current default parent
142 - "." is a single node based on the current default parent
143 - "$" resets the default parent to null (implied at the start);
143 - "$" resets the default parent to null (implied at the start);
144 otherwise the default parent is always the last node created
144 otherwise the default parent is always the last node created
145 - "<p" sets the default parent to the backref p
145 - "<p" sets the default parent to the backref p
146 - "*p" is a fork at parent p, which is a backref
146 - "*p" is a fork at parent p, which is a backref
147 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
147 - "*p1/p2" is a merge of parents p1 and p2, which are backrefs
148 - "/p2" is a merge of the preceding node and p2
148 - "/p2" is a merge of the preceding node and p2
149 - ":tag" defines a local tag for the preceding node
149 - ":tag" defines a local tag for the preceding node
150 - "@branch" sets the named branch for subsequent nodes
150 - "@branch" sets the named branch for subsequent nodes
151 - "#...\\n" is a comment up to the end of the line
151 - "#...\\n" is a comment up to the end of the line
152
152
153 Whitespace between the above elements is ignored.
153 Whitespace between the above elements is ignored.
154
154
155 A backref is either
155 A backref is either
156
156
157 - a number n, which references the node curr-n, where curr is the current
157 - a number n, which references the node curr-n, where curr is the current
158 node, or
158 node, or
159 - the name of a local tag you placed earlier using ":tag", or
159 - the name of a local tag you placed earlier using ":tag", or
160 - empty to denote the default parent.
160 - empty to denote the default parent.
161
161
162 All string valued-elements are either strictly alphanumeric, or must
162 All string valued-elements are either strictly alphanumeric, or must
163 be enclosed in double quotes ("..."), with "\\" as escape character.
163 be enclosed in double quotes ("..."), with "\\" as escape character.
164 """
164 """
165
165
166 if text is None:
166 if text is None:
167 ui.status(_("reading DAG from stdin\n"))
167 ui.status(_("reading DAG from stdin\n"))
168 text = ui.fin.read()
168 text = ui.fin.read()
169
169
170 cl = repo.changelog
170 cl = repo.changelog
171 if len(cl) > 0:
171 if len(cl) > 0:
172 raise error.Abort(_('repository is not empty'))
172 raise error.Abort(_('repository is not empty'))
173
173
174 # determine number of revs in DAG
174 # determine number of revs in DAG
175 total = 0
175 total = 0
176 for type, data in dagparser.parsedag(text):
176 for type, data in dagparser.parsedag(text):
177 if type == 'n':
177 if type == 'n':
178 total += 1
178 total += 1
179
179
180 if mergeable_file:
180 if mergeable_file:
181 linesperrev = 2
181 linesperrev = 2
182 # make a file with k lines per rev
182 # make a file with k lines per rev
183 initialmergedlines = ['%d' % i
183 initialmergedlines = ['%d' % i
184 for i in pycompat.xrange(0, total * linesperrev)]
184 for i in pycompat.xrange(0, total * linesperrev)]
185 initialmergedlines.append("")
185 initialmergedlines.append("")
186
186
187 tags = []
187 tags = []
188 progress = ui.makeprogress(_('building'), unit=_('revisions'),
188 progress = ui.makeprogress(_('building'), unit=_('revisions'),
189 total=total)
189 total=total)
190 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
190 with progress, repo.wlock(), repo.lock(), repo.transaction("builddag"):
191 at = -1
191 at = -1
192 atbranch = 'default'
192 atbranch = 'default'
193 nodeids = []
193 nodeids = []
194 id = 0
194 id = 0
195 progress.update(id)
195 progress.update(id)
196 for type, data in dagparser.parsedag(text):
196 for type, data in dagparser.parsedag(text):
197 if type == 'n':
197 if type == 'n':
198 ui.note(('node %s\n' % pycompat.bytestr(data)))
198 ui.note(('node %s\n' % pycompat.bytestr(data)))
199 id, ps = data
199 id, ps = data
200
200
201 files = []
201 files = []
202 filecontent = {}
202 filecontent = {}
203
203
204 p2 = None
204 p2 = None
205 if mergeable_file:
205 if mergeable_file:
206 fn = "mf"
206 fn = "mf"
207 p1 = repo[ps[0]]
207 p1 = repo[ps[0]]
208 if len(ps) > 1:
208 if len(ps) > 1:
209 p2 = repo[ps[1]]
209 p2 = repo[ps[1]]
210 pa = p1.ancestor(p2)
210 pa = p1.ancestor(p2)
211 base, local, other = [x[fn].data() for x in (pa, p1,
211 base, local, other = [x[fn].data() for x in (pa, p1,
212 p2)]
212 p2)]
213 m3 = simplemerge.Merge3Text(base, local, other)
213 m3 = simplemerge.Merge3Text(base, local, other)
214 ml = [l.strip() for l in m3.merge_lines()]
214 ml = [l.strip() for l in m3.merge_lines()]
215 ml.append("")
215 ml.append("")
216 elif at > 0:
216 elif at > 0:
217 ml = p1[fn].data().split("\n")
217 ml = p1[fn].data().split("\n")
218 else:
218 else:
219 ml = initialmergedlines
219 ml = initialmergedlines
220 ml[id * linesperrev] += " r%i" % id
220 ml[id * linesperrev] += " r%i" % id
221 mergedtext = "\n".join(ml)
221 mergedtext = "\n".join(ml)
222 files.append(fn)
222 files.append(fn)
223 filecontent[fn] = mergedtext
223 filecontent[fn] = mergedtext
224
224
225 if overwritten_file:
225 if overwritten_file:
226 fn = "of"
226 fn = "of"
227 files.append(fn)
227 files.append(fn)
228 filecontent[fn] = "r%i\n" % id
228 filecontent[fn] = "r%i\n" % id
229
229
230 if new_file:
230 if new_file:
231 fn = "nf%i" % id
231 fn = "nf%i" % id
232 files.append(fn)
232 files.append(fn)
233 filecontent[fn] = "r%i\n" % id
233 filecontent[fn] = "r%i\n" % id
234 if len(ps) > 1:
234 if len(ps) > 1:
235 if not p2:
235 if not p2:
236 p2 = repo[ps[1]]
236 p2 = repo[ps[1]]
237 for fn in p2:
237 for fn in p2:
238 if fn.startswith("nf"):
238 if fn.startswith("nf"):
239 files.append(fn)
239 files.append(fn)
240 filecontent[fn] = p2[fn].data()
240 filecontent[fn] = p2[fn].data()
241
241
242 def fctxfn(repo, cx, path):
242 def fctxfn(repo, cx, path):
243 if path in filecontent:
243 if path in filecontent:
244 return context.memfilectx(repo, cx, path,
244 return context.memfilectx(repo, cx, path,
245 filecontent[path])
245 filecontent[path])
246 return None
246 return None
247
247
248 if len(ps) == 0 or ps[0] < 0:
248 if len(ps) == 0 or ps[0] < 0:
249 pars = [None, None]
249 pars = [None, None]
250 elif len(ps) == 1:
250 elif len(ps) == 1:
251 pars = [nodeids[ps[0]], None]
251 pars = [nodeids[ps[0]], None]
252 else:
252 else:
253 pars = [nodeids[p] for p in ps]
253 pars = [nodeids[p] for p in ps]
254 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
254 cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn,
255 date=(id, 0),
255 date=(id, 0),
256 user="debugbuilddag",
256 user="debugbuilddag",
257 extra={'branch': atbranch})
257 extra={'branch': atbranch})
258 nodeid = repo.commitctx(cx)
258 nodeid = repo.commitctx(cx)
259 nodeids.append(nodeid)
259 nodeids.append(nodeid)
260 at = id
260 at = id
261 elif type == 'l':
261 elif type == 'l':
262 id, name = data
262 id, name = data
263 ui.note(('tag %s\n' % name))
263 ui.note(('tag %s\n' % name))
264 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
264 tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name))
265 elif type == 'a':
265 elif type == 'a':
266 ui.note(('branch %s\n' % data))
266 ui.note(('branch %s\n' % data))
267 atbranch = data
267 atbranch = data
268 progress.update(id)
268 progress.update(id)
269
269
270 if tags:
270 if tags:
271 repo.vfs.write("localtags", "".join(tags))
271 repo.vfs.write("localtags", "".join(tags))
272
272
273 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
273 def _debugchangegroup(ui, gen, all=None, indent=0, **opts):
274 indent_string = ' ' * indent
274 indent_string = ' ' * indent
275 if all:
275 if all:
276 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
276 ui.write(("%sformat: id, p1, p2, cset, delta base, len(delta)\n")
277 % indent_string)
277 % indent_string)
278
278
279 def showchunks(named):
279 def showchunks(named):
280 ui.write("\n%s%s\n" % (indent_string, named))
280 ui.write("\n%s%s\n" % (indent_string, named))
281 for deltadata in gen.deltaiter():
281 for deltadata in gen.deltaiter():
282 node, p1, p2, cs, deltabase, delta, flags = deltadata
282 node, p1, p2, cs, deltabase, delta, flags = deltadata
283 ui.write("%s%s %s %s %s %s %d\n" %
283 ui.write("%s%s %s %s %s %s %d\n" %
284 (indent_string, hex(node), hex(p1), hex(p2),
284 (indent_string, hex(node), hex(p1), hex(p2),
285 hex(cs), hex(deltabase), len(delta)))
285 hex(cs), hex(deltabase), len(delta)))
286
286
287 chunkdata = gen.changelogheader()
287 chunkdata = gen.changelogheader()
288 showchunks("changelog")
288 showchunks("changelog")
289 chunkdata = gen.manifestheader()
289 chunkdata = gen.manifestheader()
290 showchunks("manifest")
290 showchunks("manifest")
291 for chunkdata in iter(gen.filelogheader, {}):
291 for chunkdata in iter(gen.filelogheader, {}):
292 fname = chunkdata['filename']
292 fname = chunkdata['filename']
293 showchunks(fname)
293 showchunks(fname)
294 else:
294 else:
295 if isinstance(gen, bundle2.unbundle20):
295 if isinstance(gen, bundle2.unbundle20):
296 raise error.Abort(_('use debugbundle2 for this file'))
296 raise error.Abort(_('use debugbundle2 for this file'))
297 chunkdata = gen.changelogheader()
297 chunkdata = gen.changelogheader()
298 for deltadata in gen.deltaiter():
298 for deltadata in gen.deltaiter():
299 node, p1, p2, cs, deltabase, delta, flags = deltadata
299 node, p1, p2, cs, deltabase, delta, flags = deltadata
300 ui.write("%s%s\n" % (indent_string, hex(node)))
300 ui.write("%s%s\n" % (indent_string, hex(node)))
301
301
302 def _debugobsmarkers(ui, part, indent=0, **opts):
302 def _debugobsmarkers(ui, part, indent=0, **opts):
303 """display version and markers contained in 'data'"""
303 """display version and markers contained in 'data'"""
304 opts = pycompat.byteskwargs(opts)
304 opts = pycompat.byteskwargs(opts)
305 data = part.read()
305 data = part.read()
306 indent_string = ' ' * indent
306 indent_string = ' ' * indent
307 try:
307 try:
308 version, markers = obsolete._readmarkers(data)
308 version, markers = obsolete._readmarkers(data)
309 except error.UnknownVersion as exc:
309 except error.UnknownVersion as exc:
310 msg = "%sunsupported version: %s (%d bytes)\n"
310 msg = "%sunsupported version: %s (%d bytes)\n"
311 msg %= indent_string, exc.version, len(data)
311 msg %= indent_string, exc.version, len(data)
312 ui.write(msg)
312 ui.write(msg)
313 else:
313 else:
314 msg = "%sversion: %d (%d bytes)\n"
314 msg = "%sversion: %d (%d bytes)\n"
315 msg %= indent_string, version, len(data)
315 msg %= indent_string, version, len(data)
316 ui.write(msg)
316 ui.write(msg)
317 fm = ui.formatter('debugobsolete', opts)
317 fm = ui.formatter('debugobsolete', opts)
318 for rawmarker in sorted(markers):
318 for rawmarker in sorted(markers):
319 m = obsutil.marker(None, rawmarker)
319 m = obsutil.marker(None, rawmarker)
320 fm.startitem()
320 fm.startitem()
321 fm.plain(indent_string)
321 fm.plain(indent_string)
322 cmdutil.showmarker(fm, m)
322 cmdutil.showmarker(fm, m)
323 fm.end()
323 fm.end()
324
324
325 def _debugphaseheads(ui, data, indent=0):
325 def _debugphaseheads(ui, data, indent=0):
326 """display version and markers contained in 'data'"""
326 """display version and markers contained in 'data'"""
327 indent_string = ' ' * indent
327 indent_string = ' ' * indent
328 headsbyphase = phases.binarydecode(data)
328 headsbyphase = phases.binarydecode(data)
329 for phase in phases.allphases:
329 for phase in phases.allphases:
330 for head in headsbyphase[phase]:
330 for head in headsbyphase[phase]:
331 ui.write(indent_string)
331 ui.write(indent_string)
332 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
332 ui.write('%s %s\n' % (hex(head), phases.phasenames[phase]))
333
333
334 def _quasirepr(thing):
334 def _quasirepr(thing):
335 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
335 if isinstance(thing, (dict, util.sortdict, collections.OrderedDict)):
336 return '{%s}' % (
336 return '{%s}' % (
337 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
337 b', '.join(b'%s: %s' % (k, thing[k]) for k in sorted(thing)))
338 return pycompat.bytestr(repr(thing))
338 return pycompat.bytestr(repr(thing))
339
339
340 def _debugbundle2(ui, gen, all=None, **opts):
340 def _debugbundle2(ui, gen, all=None, **opts):
341 """lists the contents of a bundle2"""
341 """lists the contents of a bundle2"""
342 if not isinstance(gen, bundle2.unbundle20):
342 if not isinstance(gen, bundle2.unbundle20):
343 raise error.Abort(_('not a bundle2 file'))
343 raise error.Abort(_('not a bundle2 file'))
344 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
344 ui.write(('Stream params: %s\n' % _quasirepr(gen.params)))
345 parttypes = opts.get(r'part_type', [])
345 parttypes = opts.get(r'part_type', [])
346 for part in gen.iterparts():
346 for part in gen.iterparts():
347 if parttypes and part.type not in parttypes:
347 if parttypes and part.type not in parttypes:
348 continue
348 continue
349 msg = '%s -- %s (mandatory: %r)\n'
349 msg = '%s -- %s (mandatory: %r)\n'
350 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
350 ui.write((msg % (part.type, _quasirepr(part.params), part.mandatory)))
351 if part.type == 'changegroup':
351 if part.type == 'changegroup':
352 version = part.params.get('version', '01')
352 version = part.params.get('version', '01')
353 cg = changegroup.getunbundler(version, part, 'UN')
353 cg = changegroup.getunbundler(version, part, 'UN')
354 if not ui.quiet:
354 if not ui.quiet:
355 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
355 _debugchangegroup(ui, cg, all=all, indent=4, **opts)
356 if part.type == 'obsmarkers':
356 if part.type == 'obsmarkers':
357 if not ui.quiet:
357 if not ui.quiet:
358 _debugobsmarkers(ui, part, indent=4, **opts)
358 _debugobsmarkers(ui, part, indent=4, **opts)
359 if part.type == 'phase-heads':
359 if part.type == 'phase-heads':
360 if not ui.quiet:
360 if not ui.quiet:
361 _debugphaseheads(ui, part, indent=4)
361 _debugphaseheads(ui, part, indent=4)
362
362
363 @command('debugbundle',
363 @command('debugbundle',
364 [('a', 'all', None, _('show all details')),
364 [('a', 'all', None, _('show all details')),
365 ('', 'part-type', [], _('show only the named part type')),
365 ('', 'part-type', [], _('show only the named part type')),
366 ('', 'spec', None, _('print the bundlespec of the bundle'))],
366 ('', 'spec', None, _('print the bundlespec of the bundle'))],
367 _('FILE'),
367 _('FILE'),
368 norepo=True)
368 norepo=True)
369 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
369 def debugbundle(ui, bundlepath, all=None, spec=None, **opts):
370 """lists the contents of a bundle"""
370 """lists the contents of a bundle"""
371 with hg.openpath(ui, bundlepath) as f:
371 with hg.openpath(ui, bundlepath) as f:
372 if spec:
372 if spec:
373 spec = exchange.getbundlespec(ui, f)
373 spec = exchange.getbundlespec(ui, f)
374 ui.write('%s\n' % spec)
374 ui.write('%s\n' % spec)
375 return
375 return
376
376
377 gen = exchange.readbundle(ui, f, bundlepath)
377 gen = exchange.readbundle(ui, f, bundlepath)
378 if isinstance(gen, bundle2.unbundle20):
378 if isinstance(gen, bundle2.unbundle20):
379 return _debugbundle2(ui, gen, all=all, **opts)
379 return _debugbundle2(ui, gen, all=all, **opts)
380 _debugchangegroup(ui, gen, all=all, **opts)
380 _debugchangegroup(ui, gen, all=all, **opts)
381
381
382 @command('debugcapabilities',
382 @command('debugcapabilities',
383 [], _('PATH'),
383 [], _('PATH'),
384 norepo=True)
384 norepo=True)
385 def debugcapabilities(ui, path, **opts):
385 def debugcapabilities(ui, path, **opts):
386 """lists the capabilities of a remote peer"""
386 """lists the capabilities of a remote peer"""
387 opts = pycompat.byteskwargs(opts)
387 opts = pycompat.byteskwargs(opts)
388 peer = hg.peer(ui, opts, path)
388 peer = hg.peer(ui, opts, path)
389 caps = peer.capabilities()
389 caps = peer.capabilities()
390 ui.write(('Main capabilities:\n'))
390 ui.write(('Main capabilities:\n'))
391 for c in sorted(caps):
391 for c in sorted(caps):
392 ui.write((' %s\n') % c)
392 ui.write((' %s\n') % c)
393 b2caps = bundle2.bundle2caps(peer)
393 b2caps = bundle2.bundle2caps(peer)
394 if b2caps:
394 if b2caps:
395 ui.write(('Bundle2 capabilities:\n'))
395 ui.write(('Bundle2 capabilities:\n'))
396 for key, values in sorted(b2caps.iteritems()):
396 for key, values in sorted(b2caps.iteritems()):
397 ui.write((' %s\n') % key)
397 ui.write((' %s\n') % key)
398 for v in values:
398 for v in values:
399 ui.write((' %s\n') % v)
399 ui.write((' %s\n') % v)
400
400
401 @command('debugcheckstate', [], '')
401 @command('debugcheckstate', [], '')
402 def debugcheckstate(ui, repo):
402 def debugcheckstate(ui, repo):
403 """validate the correctness of the current dirstate"""
403 """validate the correctness of the current dirstate"""
404 parent1, parent2 = repo.dirstate.parents()
404 parent1, parent2 = repo.dirstate.parents()
405 m1 = repo[parent1].manifest()
405 m1 = repo[parent1].manifest()
406 m2 = repo[parent2].manifest()
406 m2 = repo[parent2].manifest()
407 errors = 0
407 errors = 0
408 for f in repo.dirstate:
408 for f in repo.dirstate:
409 state = repo.dirstate[f]
409 state = repo.dirstate[f]
410 if state in "nr" and f not in m1:
410 if state in "nr" and f not in m1:
411 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
411 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
412 errors += 1
412 errors += 1
413 if state in "a" and f in m1:
413 if state in "a" and f in m1:
414 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
414 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
415 errors += 1
415 errors += 1
416 if state in "m" and f not in m1 and f not in m2:
416 if state in "m" and f not in m1 and f not in m2:
417 ui.warn(_("%s in state %s, but not in either manifest\n") %
417 ui.warn(_("%s in state %s, but not in either manifest\n") %
418 (f, state))
418 (f, state))
419 errors += 1
419 errors += 1
420 for f in m1:
420 for f in m1:
421 state = repo.dirstate[f]
421 state = repo.dirstate[f]
422 if state not in "nrm":
422 if state not in "nrm":
423 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
423 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
424 errors += 1
424 errors += 1
425 if errors:
425 if errors:
426 error = _(".hg/dirstate inconsistent with current parent's manifest")
426 error = _(".hg/dirstate inconsistent with current parent's manifest")
427 raise error.Abort(error)
427 raise error.Abort(error)
428
428
429 @command('debugcolor',
429 @command('debugcolor',
430 [('', 'style', None, _('show all configured styles'))],
430 [('', 'style', None, _('show all configured styles'))],
431 'hg debugcolor')
431 'hg debugcolor')
432 def debugcolor(ui, repo, **opts):
432 def debugcolor(ui, repo, **opts):
433 """show available color, effects or style"""
433 """show available color, effects or style"""
434 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
434 ui.write(('color mode: %s\n') % stringutil.pprint(ui._colormode))
435 if opts.get(r'style'):
435 if opts.get(r'style'):
436 return _debugdisplaystyle(ui)
436 return _debugdisplaystyle(ui)
437 else:
437 else:
438 return _debugdisplaycolor(ui)
438 return _debugdisplaycolor(ui)
439
439
440 def _debugdisplaycolor(ui):
440 def _debugdisplaycolor(ui):
441 ui = ui.copy()
441 ui = ui.copy()
442 ui._styles.clear()
442 ui._styles.clear()
443 for effect in color._activeeffects(ui).keys():
443 for effect in color._activeeffects(ui).keys():
444 ui._styles[effect] = effect
444 ui._styles[effect] = effect
445 if ui._terminfoparams:
445 if ui._terminfoparams:
446 for k, v in ui.configitems('color'):
446 for k, v in ui.configitems('color'):
447 if k.startswith('color.'):
447 if k.startswith('color.'):
448 ui._styles[k] = k[6:]
448 ui._styles[k] = k[6:]
449 elif k.startswith('terminfo.'):
449 elif k.startswith('terminfo.'):
450 ui._styles[k] = k[9:]
450 ui._styles[k] = k[9:]
451 ui.write(_('available colors:\n'))
451 ui.write(_('available colors:\n'))
452 # sort label with a '_' after the other to group '_background' entry.
452 # sort label with a '_' after the other to group '_background' entry.
453 items = sorted(ui._styles.items(),
453 items = sorted(ui._styles.items(),
454 key=lambda i: ('_' in i[0], i[0], i[1]))
454 key=lambda i: ('_' in i[0], i[0], i[1]))
455 for colorname, label in items:
455 for colorname, label in items:
456 ui.write(('%s\n') % colorname, label=label)
456 ui.write(('%s\n') % colorname, label=label)
457
457
458 def _debugdisplaystyle(ui):
458 def _debugdisplaystyle(ui):
459 ui.write(_('available style:\n'))
459 ui.write(_('available style:\n'))
460 if not ui._styles:
460 if not ui._styles:
461 return
461 return
462 width = max(len(s) for s in ui._styles)
462 width = max(len(s) for s in ui._styles)
463 for label, effects in sorted(ui._styles.items()):
463 for label, effects in sorted(ui._styles.items()):
464 ui.write('%s' % label, label=label)
464 ui.write('%s' % label, label=label)
465 if effects:
465 if effects:
466 # 50
466 # 50
467 ui.write(': ')
467 ui.write(': ')
468 ui.write(' ' * (max(0, width - len(label))))
468 ui.write(' ' * (max(0, width - len(label))))
469 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
469 ui.write(', '.join(ui.label(e, e) for e in effects.split()))
470 ui.write('\n')
470 ui.write('\n')
471
471
472 @command('debugcreatestreamclonebundle', [], 'FILE')
472 @command('debugcreatestreamclonebundle', [], 'FILE')
473 def debugcreatestreamclonebundle(ui, repo, fname):
473 def debugcreatestreamclonebundle(ui, repo, fname):
474 """create a stream clone bundle file
474 """create a stream clone bundle file
475
475
476 Stream bundles are special bundles that are essentially archives of
476 Stream bundles are special bundles that are essentially archives of
477 revlog files. They are commonly used for cloning very quickly.
477 revlog files. They are commonly used for cloning very quickly.
478 """
478 """
479 # TODO we may want to turn this into an abort when this functionality
479 # TODO we may want to turn this into an abort when this functionality
480 # is moved into `hg bundle`.
480 # is moved into `hg bundle`.
481 if phases.hassecret(repo):
481 if phases.hassecret(repo):
482 ui.warn(_('(warning: stream clone bundle will contain secret '
482 ui.warn(_('(warning: stream clone bundle will contain secret '
483 'revisions)\n'))
483 'revisions)\n'))
484
484
485 requirements, gen = streamclone.generatebundlev1(repo)
485 requirements, gen = streamclone.generatebundlev1(repo)
486 changegroup.writechunks(ui, gen, fname)
486 changegroup.writechunks(ui, gen, fname)
487
487
488 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
488 ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements)))
489
489
490 @command('debugdag',
490 @command('debugdag',
491 [('t', 'tags', None, _('use tags as labels')),
491 [('t', 'tags', None, _('use tags as labels')),
492 ('b', 'branches', None, _('annotate with branch names')),
492 ('b', 'branches', None, _('annotate with branch names')),
493 ('', 'dots', None, _('use dots for runs')),
493 ('', 'dots', None, _('use dots for runs')),
494 ('s', 'spaces', None, _('separate elements by spaces'))],
494 ('s', 'spaces', None, _('separate elements by spaces'))],
495 _('[OPTION]... [FILE [REV]...]'),
495 _('[OPTION]... [FILE [REV]...]'),
496 optionalrepo=True)
496 optionalrepo=True)
497 def debugdag(ui, repo, file_=None, *revs, **opts):
497 def debugdag(ui, repo, file_=None, *revs, **opts):
498 """format the changelog or an index DAG as a concise textual description
498 """format the changelog or an index DAG as a concise textual description
499
499
500 If you pass a revlog index, the revlog's DAG is emitted. If you list
500 If you pass a revlog index, the revlog's DAG is emitted. If you list
501 revision numbers, they get labeled in the output as rN.
501 revision numbers, they get labeled in the output as rN.
502
502
503 Otherwise, the changelog DAG of the current repo is emitted.
503 Otherwise, the changelog DAG of the current repo is emitted.
504 """
504 """
505 spaces = opts.get(r'spaces')
505 spaces = opts.get(r'spaces')
506 dots = opts.get(r'dots')
506 dots = opts.get(r'dots')
507 if file_:
507 if file_:
508 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
508 rlog = revlog.revlog(vfsmod.vfs(encoding.getcwd(), audit=False),
509 file_)
509 file_)
510 revs = set((int(r) for r in revs))
510 revs = set((int(r) for r in revs))
511 def events():
511 def events():
512 for r in rlog:
512 for r in rlog:
513 yield 'n', (r, list(p for p in rlog.parentrevs(r)
513 yield 'n', (r, list(p for p in rlog.parentrevs(r)
514 if p != -1))
514 if p != -1))
515 if r in revs:
515 if r in revs:
516 yield 'l', (r, "r%i" % r)
516 yield 'l', (r, "r%i" % r)
517 elif repo:
517 elif repo:
518 cl = repo.changelog
518 cl = repo.changelog
519 tags = opts.get(r'tags')
519 tags = opts.get(r'tags')
520 branches = opts.get(r'branches')
520 branches = opts.get(r'branches')
521 if tags:
521 if tags:
522 labels = {}
522 labels = {}
523 for l, n in repo.tags().items():
523 for l, n in repo.tags().items():
524 labels.setdefault(cl.rev(n), []).append(l)
524 labels.setdefault(cl.rev(n), []).append(l)
525 def events():
525 def events():
526 b = "default"
526 b = "default"
527 for r in cl:
527 for r in cl:
528 if branches:
528 if branches:
529 newb = cl.read(cl.node(r))[5]['branch']
529 newb = cl.read(cl.node(r))[5]['branch']
530 if newb != b:
530 if newb != b:
531 yield 'a', newb
531 yield 'a', newb
532 b = newb
532 b = newb
533 yield 'n', (r, list(p for p in cl.parentrevs(r)
533 yield 'n', (r, list(p for p in cl.parentrevs(r)
534 if p != -1))
534 if p != -1))
535 if tags:
535 if tags:
536 ls = labels.get(r)
536 ls = labels.get(r)
537 if ls:
537 if ls:
538 for l in ls:
538 for l in ls:
539 yield 'l', (r, l)
539 yield 'l', (r, l)
540 else:
540 else:
541 raise error.Abort(_('need repo for changelog dag'))
541 raise error.Abort(_('need repo for changelog dag'))
542
542
543 for line in dagparser.dagtextlines(events(),
543 for line in dagparser.dagtextlines(events(),
544 addspaces=spaces,
544 addspaces=spaces,
545 wraplabels=True,
545 wraplabels=True,
546 wrapannotations=True,
546 wrapannotations=True,
547 wrapnonlinear=dots,
547 wrapnonlinear=dots,
548 usedots=dots,
548 usedots=dots,
549 maxlinewidth=70):
549 maxlinewidth=70):
550 ui.write(line)
550 ui.write(line)
551 ui.write("\n")
551 ui.write("\n")
552
552
553 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
553 @command('debugdata', cmdutil.debugrevlogopts, _('-c|-m|FILE REV'))
554 def debugdata(ui, repo, file_, rev=None, **opts):
554 def debugdata(ui, repo, file_, rev=None, **opts):
555 """dump the contents of a data file revision"""
555 """dump the contents of a data file revision"""
556 opts = pycompat.byteskwargs(opts)
556 opts = pycompat.byteskwargs(opts)
557 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
557 if opts.get('changelog') or opts.get('manifest') or opts.get('dir'):
558 if rev is not None:
558 if rev is not None:
559 raise error.CommandError('debugdata', _('invalid arguments'))
559 raise error.CommandError('debugdata', _('invalid arguments'))
560 file_, rev = None, file_
560 file_, rev = None, file_
561 elif rev is None:
561 elif rev is None:
562 raise error.CommandError('debugdata', _('invalid arguments'))
562 raise error.CommandError('debugdata', _('invalid arguments'))
563 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
563 r = cmdutil.openstorage(repo, 'debugdata', file_, opts)
564 try:
564 try:
565 ui.write(r.revision(r.lookup(rev), raw=True))
565 ui.write(r.revision(r.lookup(rev), raw=True))
566 except KeyError:
566 except KeyError:
567 raise error.Abort(_('invalid revision identifier %s') % rev)
567 raise error.Abort(_('invalid revision identifier %s') % rev)
568
568
569 @command('debugdate',
569 @command('debugdate',
570 [('e', 'extended', None, _('try extended date formats'))],
570 [('e', 'extended', None, _('try extended date formats'))],
571 _('[-e] DATE [RANGE]'),
571 _('[-e] DATE [RANGE]'),
572 norepo=True, optionalrepo=True)
572 norepo=True, optionalrepo=True)
573 def debugdate(ui, date, range=None, **opts):
573 def debugdate(ui, date, range=None, **opts):
574 """parse and display a date"""
574 """parse and display a date"""
575 if opts[r"extended"]:
575 if opts[r"extended"]:
576 d = dateutil.parsedate(date, util.extendeddateformats)
576 d = dateutil.parsedate(date, util.extendeddateformats)
577 else:
577 else:
578 d = dateutil.parsedate(date)
578 d = dateutil.parsedate(date)
579 ui.write(("internal: %d %d\n") % d)
579 ui.write(("internal: %d %d\n") % d)
580 ui.write(("standard: %s\n") % dateutil.datestr(d))
580 ui.write(("standard: %s\n") % dateutil.datestr(d))
581 if range:
581 if range:
582 m = dateutil.matchdate(range)
582 m = dateutil.matchdate(range)
583 ui.write(("match: %s\n") % m(d[0]))
583 ui.write(("match: %s\n") % m(d[0]))
584
584
585 @command('debugdeltachain',
585 @command('debugdeltachain',
586 cmdutil.debugrevlogopts + cmdutil.formatteropts,
586 cmdutil.debugrevlogopts + cmdutil.formatteropts,
587 _('-c|-m|FILE'),
587 _('-c|-m|FILE'),
588 optionalrepo=True)
588 optionalrepo=True)
589 def debugdeltachain(ui, repo, file_=None, **opts):
589 def debugdeltachain(ui, repo, file_=None, **opts):
590 """dump information about delta chains in a revlog
590 """dump information about delta chains in a revlog
591
591
592 Output can be templatized. Available template keywords are:
592 Output can be templatized. Available template keywords are:
593
593
594 :``rev``: revision number
594 :``rev``: revision number
595 :``chainid``: delta chain identifier (numbered by unique base)
595 :``chainid``: delta chain identifier (numbered by unique base)
596 :``chainlen``: delta chain length to this revision
596 :``chainlen``: delta chain length to this revision
597 :``prevrev``: previous revision in delta chain
597 :``prevrev``: previous revision in delta chain
598 :``deltatype``: role of delta / how it was computed
598 :``deltatype``: role of delta / how it was computed
599 :``compsize``: compressed size of revision
599 :``compsize``: compressed size of revision
600 :``uncompsize``: uncompressed size of revision
600 :``uncompsize``: uncompressed size of revision
601 :``chainsize``: total size of compressed revisions in chain
601 :``chainsize``: total size of compressed revisions in chain
602 :``chainratio``: total chain size divided by uncompressed revision size
602 :``chainratio``: total chain size divided by uncompressed revision size
603 (new delta chains typically start at ratio 2.00)
603 (new delta chains typically start at ratio 2.00)
604 :``lindist``: linear distance from base revision in delta chain to end
604 :``lindist``: linear distance from base revision in delta chain to end
605 of this revision
605 of this revision
606 :``extradist``: total size of revisions not part of this delta chain from
606 :``extradist``: total size of revisions not part of this delta chain from
607 base of delta chain to end of this revision; a measurement
607 base of delta chain to end of this revision; a measurement
608 of how much extra data we need to read/seek across to read
608 of how much extra data we need to read/seek across to read
609 the delta chain for this revision
609 the delta chain for this revision
610 :``extraratio``: extradist divided by chainsize; another representation of
610 :``extraratio``: extradist divided by chainsize; another representation of
611 how much unrelated data is needed to load this delta chain
611 how much unrelated data is needed to load this delta chain
612
612
613 If the repository is configured to use the sparse read, additional keywords
613 If the repository is configured to use the sparse read, additional keywords
614 are available:
614 are available:
615
615
616 :``readsize``: total size of data read from the disk for a revision
616 :``readsize``: total size of data read from the disk for a revision
617 (sum of the sizes of all the blocks)
617 (sum of the sizes of all the blocks)
618 :``largestblock``: size of the largest block of data read from the disk
618 :``largestblock``: size of the largest block of data read from the disk
619 :``readdensity``: density of useful bytes in the data read from the disk
619 :``readdensity``: density of useful bytes in the data read from the disk
620 :``srchunks``: in how many data hunks the whole revision would be read
620 :``srchunks``: in how many data hunks the whole revision would be read
621
621
622 The sparse read can be enabled with experimental.sparse-read = True
622 The sparse read can be enabled with experimental.sparse-read = True
623 """
623 """
624 opts = pycompat.byteskwargs(opts)
624 opts = pycompat.byteskwargs(opts)
625 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
625 r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts)
626 index = r.index
626 index = r.index
627 start = r.start
627 start = r.start
628 length = r.length
628 length = r.length
629 generaldelta = r.version & revlog.FLAG_GENERALDELTA
629 generaldelta = r.version & revlog.FLAG_GENERALDELTA
630 withsparseread = getattr(r, '_withsparseread', False)
630 withsparseread = getattr(r, '_withsparseread', False)
631
631
632 def revinfo(rev):
632 def revinfo(rev):
633 e = index[rev]
633 e = index[rev]
634 compsize = e[1]
634 compsize = e[1]
635 uncompsize = e[2]
635 uncompsize = e[2]
636 chainsize = 0
636 chainsize = 0
637
637
638 if generaldelta:
638 if generaldelta:
639 if e[3] == e[5]:
639 if e[3] == e[5]:
640 deltatype = 'p1'
640 deltatype = 'p1'
641 elif e[3] == e[6]:
641 elif e[3] == e[6]:
642 deltatype = 'p2'
642 deltatype = 'p2'
643 elif e[3] == rev - 1:
643 elif e[3] == rev - 1:
644 deltatype = 'prev'
644 deltatype = 'prev'
645 elif e[3] == rev:
645 elif e[3] == rev:
646 deltatype = 'base'
646 deltatype = 'base'
647 else:
647 else:
648 deltatype = 'other'
648 deltatype = 'other'
649 else:
649 else:
650 if e[3] == rev:
650 if e[3] == rev:
651 deltatype = 'base'
651 deltatype = 'base'
652 else:
652 else:
653 deltatype = 'prev'
653 deltatype = 'prev'
654
654
655 chain = r._deltachain(rev)[0]
655 chain = r._deltachain(rev)[0]
656 for iterrev in chain:
656 for iterrev in chain:
657 e = index[iterrev]
657 e = index[iterrev]
658 chainsize += e[1]
658 chainsize += e[1]
659
659
660 return compsize, uncompsize, deltatype, chain, chainsize
660 return compsize, uncompsize, deltatype, chain, chainsize
661
661
662 fm = ui.formatter('debugdeltachain', opts)
662 fm = ui.formatter('debugdeltachain', opts)
663
663
664 fm.plain(' rev chain# chainlen prev delta '
664 fm.plain(' rev chain# chainlen prev delta '
665 'size rawsize chainsize ratio lindist extradist '
665 'size rawsize chainsize ratio lindist extradist '
666 'extraratio')
666 'extraratio')
667 if withsparseread:
667 if withsparseread:
668 fm.plain(' readsize largestblk rddensity srchunks')
668 fm.plain(' readsize largestblk rddensity srchunks')
669 fm.plain('\n')
669 fm.plain('\n')
670
670
671 chainbases = {}
671 chainbases = {}
672 for rev in r:
672 for rev in r:
673 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
673 comp, uncomp, deltatype, chain, chainsize = revinfo(rev)
674 chainbase = chain[0]
674 chainbase = chain[0]
675 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
675 chainid = chainbases.setdefault(chainbase, len(chainbases) + 1)
676 basestart = start(chainbase)
676 basestart = start(chainbase)
677 revstart = start(rev)
677 revstart = start(rev)
678 lineardist = revstart + comp - basestart
678 lineardist = revstart + comp - basestart
679 extradist = lineardist - chainsize
679 extradist = lineardist - chainsize
680 try:
680 try:
681 prevrev = chain[-2]
681 prevrev = chain[-2]
682 except IndexError:
682 except IndexError:
683 prevrev = -1
683 prevrev = -1
684
684
685 if uncomp != 0:
685 if uncomp != 0:
686 chainratio = float(chainsize) / float(uncomp)
686 chainratio = float(chainsize) / float(uncomp)
687 else:
687 else:
688 chainratio = chainsize
688 chainratio = chainsize
689
689
690 if chainsize != 0:
690 if chainsize != 0:
691 extraratio = float(extradist) / float(chainsize)
691 extraratio = float(extradist) / float(chainsize)
692 else:
692 else:
693 extraratio = extradist
693 extraratio = extradist
694
694
695 fm.startitem()
695 fm.startitem()
696 fm.write('rev chainid chainlen prevrev deltatype compsize '
696 fm.write('rev chainid chainlen prevrev deltatype compsize '
697 'uncompsize chainsize chainratio lindist extradist '
697 'uncompsize chainsize chainratio lindist extradist '
698 'extraratio',
698 'extraratio',
699 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
699 '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f',
700 rev, chainid, len(chain), prevrev, deltatype, comp,
700 rev, chainid, len(chain), prevrev, deltatype, comp,
701 uncomp, chainsize, chainratio, lineardist, extradist,
701 uncomp, chainsize, chainratio, lineardist, extradist,
702 extraratio,
702 extraratio,
703 rev=rev, chainid=chainid, chainlen=len(chain),
703 rev=rev, chainid=chainid, chainlen=len(chain),
704 prevrev=prevrev, deltatype=deltatype, compsize=comp,
704 prevrev=prevrev, deltatype=deltatype, compsize=comp,
705 uncompsize=uncomp, chainsize=chainsize,
705 uncompsize=uncomp, chainsize=chainsize,
706 chainratio=chainratio, lindist=lineardist,
706 chainratio=chainratio, lindist=lineardist,
707 extradist=extradist, extraratio=extraratio)
707 extradist=extradist, extraratio=extraratio)
708 if withsparseread:
708 if withsparseread:
709 readsize = 0
709 readsize = 0
710 largestblock = 0
710 largestblock = 0
711 srchunks = 0
711 srchunks = 0
712
712
713 for revschunk in deltautil.slicechunk(r, chain):
713 for revschunk in deltautil.slicechunk(r, chain):
714 srchunks += 1
714 srchunks += 1
715 blkend = start(revschunk[-1]) + length(revschunk[-1])
715 blkend = start(revschunk[-1]) + length(revschunk[-1])
716 blksize = blkend - start(revschunk[0])
716 blksize = blkend - start(revschunk[0])
717
717
718 readsize += blksize
718 readsize += blksize
719 if largestblock < blksize:
719 if largestblock < blksize:
720 largestblock = blksize
720 largestblock = blksize
721
721
722 if readsize:
722 if readsize:
723 readdensity = float(chainsize) / float(readsize)
723 readdensity = float(chainsize) / float(readsize)
724 else:
724 else:
725 readdensity = 1
725 readdensity = 1
726
726
727 fm.write('readsize largestblock readdensity srchunks',
727 fm.write('readsize largestblock readdensity srchunks',
728 ' %10d %10d %9.5f %8d',
728 ' %10d %10d %9.5f %8d',
729 readsize, largestblock, readdensity, srchunks,
729 readsize, largestblock, readdensity, srchunks,
730 readsize=readsize, largestblock=largestblock,
730 readsize=readsize, largestblock=largestblock,
731 readdensity=readdensity, srchunks=srchunks)
731 readdensity=readdensity, srchunks=srchunks)
732
732
733 fm.plain('\n')
733 fm.plain('\n')
734
734
735 fm.end()
735 fm.end()
736
736
737 @command('debugdirstate|debugstate',
737 @command('debugdirstate|debugstate',
738 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
738 [('', 'nodates', None, _('do not display the saved mtime (DEPRECATED)')),
739 ('', 'dates', True, _('display the saved mtime')),
739 ('', 'dates', True, _('display the saved mtime')),
740 ('', 'datesort', None, _('sort by saved mtime'))],
740 ('', 'datesort', None, _('sort by saved mtime'))],
741 _('[OPTION]...'))
741 _('[OPTION]...'))
742 def debugstate(ui, repo, **opts):
742 def debugstate(ui, repo, **opts):
743 """show the contents of the current dirstate"""
743 """show the contents of the current dirstate"""
744
744
745 nodates = not opts[r'dates']
745 nodates = not opts[r'dates']
746 if opts.get(r'nodates') is not None:
746 if opts.get(r'nodates') is not None:
747 nodates = True
747 nodates = True
748 datesort = opts.get(r'datesort')
748 datesort = opts.get(r'datesort')
749
749
750 if datesort:
750 if datesort:
751 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
751 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
752 else:
752 else:
753 keyfunc = None # sort by filename
753 keyfunc = None # sort by filename
754 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
754 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
755 if ent[3] == -1:
755 if ent[3] == -1:
756 timestr = 'unset '
756 timestr = 'unset '
757 elif nodates:
757 elif nodates:
758 timestr = 'set '
758 timestr = 'set '
759 else:
759 else:
760 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
760 timestr = time.strftime(r"%Y-%m-%d %H:%M:%S ",
761 time.localtime(ent[3]))
761 time.localtime(ent[3]))
762 timestr = encoding.strtolocal(timestr)
762 timestr = encoding.strtolocal(timestr)
763 if ent[1] & 0o20000:
763 if ent[1] & 0o20000:
764 mode = 'lnk'
764 mode = 'lnk'
765 else:
765 else:
766 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
766 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
767 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
767 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
768 for f in repo.dirstate.copies():
768 for f in repo.dirstate.copies():
769 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
769 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
770
770
771 @command('debugdiscovery',
771 @command('debugdiscovery',
772 [('', 'old', None, _('use old-style discovery')),
772 [('', 'old', None, _('use old-style discovery')),
773 ('', 'nonheads', None,
773 ('', 'nonheads', None,
774 _('use old-style discovery with non-heads included')),
774 _('use old-style discovery with non-heads included')),
775 ('', 'rev', [], 'restrict discovery to this set of revs'),
775 ('', 'rev', [], 'restrict discovery to this set of revs'),
776 ('', 'seed', '12323', 'specify the random seed use for discovery'),
776 ('', 'seed', '12323', 'specify the random seed use for discovery'),
777 ] + cmdutil.remoteopts,
777 ] + cmdutil.remoteopts,
778 _('[--rev REV] [OTHER]'))
778 _('[--rev REV] [OTHER]'))
779 def debugdiscovery(ui, repo, remoteurl="default", **opts):
779 def debugdiscovery(ui, repo, remoteurl="default", **opts):
780 """runs the changeset discovery protocol in isolation"""
780 """runs the changeset discovery protocol in isolation"""
781 opts = pycompat.byteskwargs(opts)
781 opts = pycompat.byteskwargs(opts)
782 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
782 remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl))
783 remote = hg.peer(repo, opts, remoteurl)
783 remote = hg.peer(repo, opts, remoteurl)
784 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
784 ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
785
785
786 # make sure tests are repeatable
786 # make sure tests are repeatable
787 random.seed(int(opts['seed']))
787 random.seed(int(opts['seed']))
788
788
789
789
790
790
791 if opts.get('old'):
791 if opts.get('old'):
792 def doit(pushedrevs, remoteheads, remote=remote):
792 def doit(pushedrevs, remoteheads, remote=remote):
793 if not util.safehasattr(remote, 'branches'):
793 if not util.safehasattr(remote, 'branches'):
794 # enable in-client legacy support
794 # enable in-client legacy support
795 remote = localrepo.locallegacypeer(remote.local())
795 remote = localrepo.locallegacypeer(remote.local())
796 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
796 common, _in, hds = treediscovery.findcommonincoming(repo, remote,
797 force=True)
797 force=True)
798 common = set(common)
798 common = set(common)
799 if not opts.get('nonheads'):
799 if not opts.get('nonheads'):
800 ui.write(("unpruned common: %s\n") %
800 ui.write(("unpruned common: %s\n") %
801 " ".join(sorted(short(n) for n in common)))
801 " ".join(sorted(short(n) for n in common)))
802
802
803 clnode = repo.changelog.node
803 clnode = repo.changelog.node
804 common = repo.revs('heads(::%ln)', common)
804 common = repo.revs('heads(::%ln)', common)
805 common = {clnode(r) for r in common}
805 common = {clnode(r) for r in common}
806 return common, hds
806 return common, hds
807 else:
807 else:
808 def doit(pushedrevs, remoteheads, remote=remote):
808 def doit(pushedrevs, remoteheads, remote=remote):
809 nodes = None
809 nodes = None
810 if pushedrevs:
810 if pushedrevs:
811 revs = scmutil.revrange(repo, pushedrevs)
811 revs = scmutil.revrange(repo, pushedrevs)
812 nodes = [repo[r].node() for r in revs]
812 nodes = [repo[r].node() for r in revs]
813 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
813 common, any, hds = setdiscovery.findcommonheads(ui, repo, remote,
814 ancestorsof=nodes)
814 ancestorsof=nodes)
815 return common, hds
815 return common, hds
816
816
817 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
817 remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, revs=None)
818 localrevs = opts['rev']
818 localrevs = opts['rev']
819 with util.timedcm('debug-discovery') as t:
819 with util.timedcm('debug-discovery') as t:
820 common, hds = doit(localrevs, remoterevs)
820 common, hds = doit(localrevs, remoterevs)
821
821
822 # compute all statistics
822 # compute all statistics
823 common = set(common)
823 common = set(common)
824 rheads = set(hds)
824 rheads = set(hds)
825 lheads = set(repo.heads())
825 lheads = set(repo.heads())
826
826
827 data = {}
827 data = {}
828 data['elapsed'] = t.elapsed
828 data['elapsed'] = t.elapsed
829 data['nb-common'] = len(common)
829 data['nb-common'] = len(common)
830 data['nb-common-local'] = len(common & lheads)
830 data['nb-common-local'] = len(common & lheads)
831 data['nb-common-remote'] = len(common & rheads)
831 data['nb-common-remote'] = len(common & rheads)
832 data['nb-common-both'] = len(common & rheads & lheads)
832 data['nb-common-both'] = len(common & rheads & lheads)
833 data['nb-local'] = len(lheads)
833 data['nb-local'] = len(lheads)
834 data['nb-local-missing'] = data['nb-local'] - data['nb-common-local']
834 data['nb-local-missing'] = data['nb-local'] - data['nb-common-local']
835 data['nb-remote'] = len(rheads)
835 data['nb-remote'] = len(rheads)
836 data['nb-remote-unknown'] = data['nb-remote'] - data['nb-common-remote']
836 data['nb-remote-unknown'] = data['nb-remote'] - data['nb-common-remote']
837 data['nb-revs'] = len(repo.revs('all()'))
837 data['nb-revs'] = len(repo.revs('all()'))
838 data['nb-revs-common'] = len(repo.revs('::%ln', common))
838 data['nb-revs-common'] = len(repo.revs('::%ln', common))
839 data['nb-revs-missing'] = data['nb-revs'] - data['nb-revs-common']
839 data['nb-revs-missing'] = data['nb-revs'] - data['nb-revs-common']
840
840
841 # display discovery summary
841 # display discovery summary
842 ui.write(("elapsed time: %(elapsed)f seconds\n") % data)
842 ui.write(("elapsed time: %(elapsed)f seconds\n") % data)
843 ui.write(("heads summary:\n"))
843 ui.write(("heads summary:\n"))
844 ui.write((" total common heads: %(nb-common)9d\n") % data)
844 ui.write((" total common heads: %(nb-common)9d\n") % data)
845 ui.write((" also local heads: %(nb-common-local)9d\n") % data)
845 ui.write((" also local heads: %(nb-common-local)9d\n") % data)
846 ui.write((" also remote heads: %(nb-common-remote)9d\n") % data)
846 ui.write((" also remote heads: %(nb-common-remote)9d\n") % data)
847 ui.write((" both: %(nb-common-both)9d\n") % data)
847 ui.write((" both: %(nb-common-both)9d\n") % data)
848 ui.write((" local heads: %(nb-local)9d\n") % data)
848 ui.write((" local heads: %(nb-local)9d\n") % data)
849 ui.write((" common: %(nb-common-local)9d\n") % data)
849 ui.write((" common: %(nb-common-local)9d\n") % data)
850 ui.write((" missing: %(nb-local-missing)9d\n") % data)
850 ui.write((" missing: %(nb-local-missing)9d\n") % data)
851 ui.write((" remote heads: %(nb-remote)9d\n") % data)
851 ui.write((" remote heads: %(nb-remote)9d\n") % data)
852 ui.write((" common: %(nb-common-remote)9d\n") % data)
852 ui.write((" common: %(nb-common-remote)9d\n") % data)
853 ui.write((" unknown: %(nb-remote-unknown)9d\n") % data)
853 ui.write((" unknown: %(nb-remote-unknown)9d\n") % data)
854 ui.write(("local changesets: %(nb-revs)9d\n") % data)
854 ui.write(("local changesets: %(nb-revs)9d\n") % data)
855 ui.write((" common: %(nb-revs-common)9d\n") % data)
855 ui.write((" common: %(nb-revs-common)9d\n") % data)
856 ui.write((" missing: %(nb-revs-missing)9d\n") % data)
856 ui.write((" missing: %(nb-revs-missing)9d\n") % data)
857
857
858 if ui.verbose:
858 if ui.verbose:
859 ui.write(("common heads: %s\n") %
859 ui.write(("common heads: %s\n") %
860 " ".join(sorted(short(n) for n in common)))
860 " ".join(sorted(short(n) for n in common)))
861
861
862 _chunksize = 4 << 10
862 _chunksize = 4 << 10
863
863
864 @command('debugdownload',
864 @command('debugdownload',
865 [
865 [
866 ('o', 'output', '', _('path')),
866 ('o', 'output', '', _('path')),
867 ],
867 ],
868 optionalrepo=True)
868 optionalrepo=True)
869 def debugdownload(ui, repo, url, output=None, **opts):
869 def debugdownload(ui, repo, url, output=None, **opts):
870 """download a resource using Mercurial logic and config
870 """download a resource using Mercurial logic and config
871 """
871 """
872 fh = urlmod.open(ui, url, output)
872 fh = urlmod.open(ui, url, output)
873
873
874 dest = ui
874 dest = ui
875 if output:
875 if output:
876 dest = open(output, "wb", _chunksize)
876 dest = open(output, "wb", _chunksize)
877 try:
877 try:
878 data = fh.read(_chunksize)
878 data = fh.read(_chunksize)
879 while data:
879 while data:
880 dest.write(data)
880 dest.write(data)
881 data = fh.read(_chunksize)
881 data = fh.read(_chunksize)
882 finally:
882 finally:
883 if output:
883 if output:
884 dest.close()
884 dest.close()
885
885
886 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
886 @command('debugextensions', cmdutil.formatteropts, [], optionalrepo=True)
887 def debugextensions(ui, repo, **opts):
887 def debugextensions(ui, repo, **opts):
888 '''show information about active extensions'''
888 '''show information about active extensions'''
889 opts = pycompat.byteskwargs(opts)
889 opts = pycompat.byteskwargs(opts)
890 exts = extensions.extensions(ui)
890 exts = extensions.extensions(ui)
891 hgver = util.version()
891 hgver = util.version()
892 fm = ui.formatter('debugextensions', opts)
892 fm = ui.formatter('debugextensions', opts)
893 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
893 for extname, extmod in sorted(exts, key=operator.itemgetter(0)):
894 isinternal = extensions.ismoduleinternal(extmod)
894 isinternal = extensions.ismoduleinternal(extmod)
895 extsource = pycompat.fsencode(extmod.__file__)
895 extsource = pycompat.fsencode(extmod.__file__)
896 if isinternal:
896 if isinternal:
897 exttestedwith = [] # never expose magic string to users
897 exttestedwith = [] # never expose magic string to users
898 else:
898 else:
899 exttestedwith = getattr(extmod, 'testedwith', '').split()
899 exttestedwith = getattr(extmod, 'testedwith', '').split()
900 extbuglink = getattr(extmod, 'buglink', None)
900 extbuglink = getattr(extmod, 'buglink', None)
901
901
902 fm.startitem()
902 fm.startitem()
903
903
904 if ui.quiet or ui.verbose:
904 if ui.quiet or ui.verbose:
905 fm.write('name', '%s\n', extname)
905 fm.write('name', '%s\n', extname)
906 else:
906 else:
907 fm.write('name', '%s', extname)
907 fm.write('name', '%s', extname)
908 if isinternal or hgver in exttestedwith:
908 if isinternal or hgver in exttestedwith:
909 fm.plain('\n')
909 fm.plain('\n')
910 elif not exttestedwith:
910 elif not exttestedwith:
911 fm.plain(_(' (untested!)\n'))
911 fm.plain(_(' (untested!)\n'))
912 else:
912 else:
913 lasttestedversion = exttestedwith[-1]
913 lasttestedversion = exttestedwith[-1]
914 fm.plain(' (%s!)\n' % lasttestedversion)
914 fm.plain(' (%s!)\n' % lasttestedversion)
915
915
916 fm.condwrite(ui.verbose and extsource, 'source',
916 fm.condwrite(ui.verbose and extsource, 'source',
917 _(' location: %s\n'), extsource or "")
917 _(' location: %s\n'), extsource or "")
918
918
919 if ui.verbose:
919 if ui.verbose:
920 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
920 fm.plain(_(' bundled: %s\n') % ['no', 'yes'][isinternal])
921 fm.data(bundled=isinternal)
921 fm.data(bundled=isinternal)
922
922
923 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
923 fm.condwrite(ui.verbose and exttestedwith, 'testedwith',
924 _(' tested with: %s\n'),
924 _(' tested with: %s\n'),
925 fm.formatlist(exttestedwith, name='ver'))
925 fm.formatlist(exttestedwith, name='ver'))
926
926
927 fm.condwrite(ui.verbose and extbuglink, 'buglink',
927 fm.condwrite(ui.verbose and extbuglink, 'buglink',
928 _(' bug reporting: %s\n'), extbuglink or "")
928 _(' bug reporting: %s\n'), extbuglink or "")
929
929
930 fm.end()
930 fm.end()
931
931
932 @command('debugfileset',
932 @command('debugfileset',
933 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
933 [('r', 'rev', '', _('apply the filespec on this revision'), _('REV')),
934 ('', 'all-files', False,
934 ('', 'all-files', False,
935 _('test files from all revisions and working directory')),
935 _('test files from all revisions and working directory')),
936 ('s', 'show-matcher', None,
936 ('s', 'show-matcher', None,
937 _('print internal representation of matcher')),
937 _('print internal representation of matcher')),
938 ('p', 'show-stage', [],
938 ('p', 'show-stage', [],
939 _('print parsed tree at the given stage'), _('NAME'))],
939 _('print parsed tree at the given stage'), _('NAME'))],
940 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
940 _('[-r REV] [--all-files] [OPTION]... FILESPEC'))
941 def debugfileset(ui, repo, expr, **opts):
941 def debugfileset(ui, repo, expr, **opts):
942 '''parse and apply a fileset specification'''
942 '''parse and apply a fileset specification'''
943 from . import fileset
943 from . import fileset
944 fileset.symbols # force import of fileset so we have predicates to optimize
944 fileset.symbols # force import of fileset so we have predicates to optimize
945 opts = pycompat.byteskwargs(opts)
945 opts = pycompat.byteskwargs(opts)
946 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
946 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
947
947
948 stages = [
948 stages = [
949 ('parsed', pycompat.identity),
949 ('parsed', pycompat.identity),
950 ('analyzed', filesetlang.analyze),
950 ('analyzed', filesetlang.analyze),
951 ('optimized', filesetlang.optimize),
951 ('optimized', filesetlang.optimize),
952 ]
952 ]
953 stagenames = set(n for n, f in stages)
953 stagenames = set(n for n, f in stages)
954
954
955 showalways = set()
955 showalways = set()
956 if ui.verbose and not opts['show_stage']:
956 if ui.verbose and not opts['show_stage']:
957 # show parsed tree by --verbose (deprecated)
957 # show parsed tree by --verbose (deprecated)
958 showalways.add('parsed')
958 showalways.add('parsed')
959 if opts['show_stage'] == ['all']:
959 if opts['show_stage'] == ['all']:
960 showalways.update(stagenames)
960 showalways.update(stagenames)
961 else:
961 else:
962 for n in opts['show_stage']:
962 for n in opts['show_stage']:
963 if n not in stagenames:
963 if n not in stagenames:
964 raise error.Abort(_('invalid stage name: %s') % n)
964 raise error.Abort(_('invalid stage name: %s') % n)
965 showalways.update(opts['show_stage'])
965 showalways.update(opts['show_stage'])
966
966
967 tree = filesetlang.parse(expr)
967 tree = filesetlang.parse(expr)
968 for n, f in stages:
968 for n, f in stages:
969 tree = f(tree)
969 tree = f(tree)
970 if n in showalways:
970 if n in showalways:
971 if opts['show_stage'] or n != 'parsed':
971 if opts['show_stage'] or n != 'parsed':
972 ui.write(("* %s:\n") % n)
972 ui.write(("* %s:\n") % n)
973 ui.write(filesetlang.prettyformat(tree), "\n")
973 ui.write(filesetlang.prettyformat(tree), "\n")
974
974
975 files = set()
975 files = set()
976 if opts['all_files']:
976 if opts['all_files']:
977 for r in repo:
977 for r in repo:
978 c = repo[r]
978 c = repo[r]
979 files.update(c.files())
979 files.update(c.files())
980 files.update(c.substate)
980 files.update(c.substate)
981 if opts['all_files'] or ctx.rev() is None:
981 if opts['all_files'] or ctx.rev() is None:
982 wctx = repo[None]
982 wctx = repo[None]
983 files.update(repo.dirstate.walk(scmutil.matchall(repo),
983 files.update(repo.dirstate.walk(scmutil.matchall(repo),
984 subrepos=list(wctx.substate),
984 subrepos=list(wctx.substate),
985 unknown=True, ignored=True))
985 unknown=True, ignored=True))
986 files.update(wctx.substate)
986 files.update(wctx.substate)
987 else:
987 else:
988 files.update(ctx.files())
988 files.update(ctx.files())
989 files.update(ctx.substate)
989 files.update(ctx.substate)
990
990
991 m = ctx.matchfileset(expr)
991 m = ctx.matchfileset(expr)
992 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
992 if opts['show_matcher'] or (opts['show_matcher'] is None and ui.verbose):
993 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
993 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
994 for f in sorted(files):
994 for f in sorted(files):
995 if not m(f):
995 if not m(f):
996 continue
996 continue
997 ui.write("%s\n" % f)
997 ui.write("%s\n" % f)
998
998
999 @command('debugformat',
999 @command('debugformat',
1000 [] + cmdutil.formatteropts)
1000 [] + cmdutil.formatteropts)
1001 def debugformat(ui, repo, **opts):
1001 def debugformat(ui, repo, **opts):
1002 """display format information about the current repository
1002 """display format information about the current repository
1003
1003
1004 Use --verbose to get extra information about current config value and
1004 Use --verbose to get extra information about current config value and
1005 Mercurial default."""
1005 Mercurial default."""
1006 opts = pycompat.byteskwargs(opts)
1006 opts = pycompat.byteskwargs(opts)
1007 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1007 maxvariantlength = max(len(fv.name) for fv in upgrade.allformatvariant)
1008 maxvariantlength = max(len('format-variant'), maxvariantlength)
1008 maxvariantlength = max(len('format-variant'), maxvariantlength)
1009
1009
1010 def makeformatname(name):
1010 def makeformatname(name):
1011 return '%s:' + (' ' * (maxvariantlength - len(name)))
1011 return '%s:' + (' ' * (maxvariantlength - len(name)))
1012
1012
1013 fm = ui.formatter('debugformat', opts)
1013 fm = ui.formatter('debugformat', opts)
1014 if fm.isplain():
1014 if fm.isplain():
1015 def formatvalue(value):
1015 def formatvalue(value):
1016 if util.safehasattr(value, 'startswith'):
1016 if util.safehasattr(value, 'startswith'):
1017 return value
1017 return value
1018 if value:
1018 if value:
1019 return 'yes'
1019 return 'yes'
1020 else:
1020 else:
1021 return 'no'
1021 return 'no'
1022 else:
1022 else:
1023 formatvalue = pycompat.identity
1023 formatvalue = pycompat.identity
1024
1024
1025 fm.plain('format-variant')
1025 fm.plain('format-variant')
1026 fm.plain(' ' * (maxvariantlength - len('format-variant')))
1026 fm.plain(' ' * (maxvariantlength - len('format-variant')))
1027 fm.plain(' repo')
1027 fm.plain(' repo')
1028 if ui.verbose:
1028 if ui.verbose:
1029 fm.plain(' config default')
1029 fm.plain(' config default')
1030 fm.plain('\n')
1030 fm.plain('\n')
1031 for fv in upgrade.allformatvariant:
1031 for fv in upgrade.allformatvariant:
1032 fm.startitem()
1032 fm.startitem()
1033 repovalue = fv.fromrepo(repo)
1033 repovalue = fv.fromrepo(repo)
1034 configvalue = fv.fromconfig(repo)
1034 configvalue = fv.fromconfig(repo)
1035
1035
1036 if repovalue != configvalue:
1036 if repovalue != configvalue:
1037 namelabel = 'formatvariant.name.mismatchconfig'
1037 namelabel = 'formatvariant.name.mismatchconfig'
1038 repolabel = 'formatvariant.repo.mismatchconfig'
1038 repolabel = 'formatvariant.repo.mismatchconfig'
1039 elif repovalue != fv.default:
1039 elif repovalue != fv.default:
1040 namelabel = 'formatvariant.name.mismatchdefault'
1040 namelabel = 'formatvariant.name.mismatchdefault'
1041 repolabel = 'formatvariant.repo.mismatchdefault'
1041 repolabel = 'formatvariant.repo.mismatchdefault'
1042 else:
1042 else:
1043 namelabel = 'formatvariant.name.uptodate'
1043 namelabel = 'formatvariant.name.uptodate'
1044 repolabel = 'formatvariant.repo.uptodate'
1044 repolabel = 'formatvariant.repo.uptodate'
1045
1045
1046 fm.write('name', makeformatname(fv.name), fv.name,
1046 fm.write('name', makeformatname(fv.name), fv.name,
1047 label=namelabel)
1047 label=namelabel)
1048 fm.write('repo', ' %3s', formatvalue(repovalue),
1048 fm.write('repo', ' %3s', formatvalue(repovalue),
1049 label=repolabel)
1049 label=repolabel)
1050 if fv.default != configvalue:
1050 if fv.default != configvalue:
1051 configlabel = 'formatvariant.config.special'
1051 configlabel = 'formatvariant.config.special'
1052 else:
1052 else:
1053 configlabel = 'formatvariant.config.default'
1053 configlabel = 'formatvariant.config.default'
1054 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1054 fm.condwrite(ui.verbose, 'config', ' %6s', formatvalue(configvalue),
1055 label=configlabel)
1055 label=configlabel)
1056 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1056 fm.condwrite(ui.verbose, 'default', ' %7s', formatvalue(fv.default),
1057 label='formatvariant.default')
1057 label='formatvariant.default')
1058 fm.plain('\n')
1058 fm.plain('\n')
1059 fm.end()
1059 fm.end()
1060
1060
1061 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1061 @command('debugfsinfo', [], _('[PATH]'), norepo=True)
1062 def debugfsinfo(ui, path="."):
1062 def debugfsinfo(ui, path="."):
1063 """show information detected about current filesystem"""
1063 """show information detected about current filesystem"""
1064 ui.write(('path: %s\n') % path)
1064 ui.write(('path: %s\n') % path)
1065 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1065 ui.write(('mounted on: %s\n') % (util.getfsmountpoint(path) or '(unknown)'))
1066 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1066 ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no'))
1067 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1067 ui.write(('fstype: %s\n') % (util.getfstype(path) or '(unknown)'))
1068 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1068 ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no'))
1069 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1069 ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no'))
1070 casesensitive = '(unknown)'
1070 casesensitive = '(unknown)'
1071 try:
1071 try:
1072 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1072 with pycompat.namedtempfile(prefix='.debugfsinfo', dir=path) as f:
1073 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1073 casesensitive = util.fscasesensitive(f.name) and 'yes' or 'no'
1074 except OSError:
1074 except OSError:
1075 pass
1075 pass
1076 ui.write(('case-sensitive: %s\n') % casesensitive)
1076 ui.write(('case-sensitive: %s\n') % casesensitive)
1077
1077
1078 @command('debuggetbundle',
1078 @command('debuggetbundle',
1079 [('H', 'head', [], _('id of head node'), _('ID')),
1079 [('H', 'head', [], _('id of head node'), _('ID')),
1080 ('C', 'common', [], _('id of common node'), _('ID')),
1080 ('C', 'common', [], _('id of common node'), _('ID')),
1081 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1081 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))],
1082 _('REPO FILE [-H|-C ID]...'),
1082 _('REPO FILE [-H|-C ID]...'),
1083 norepo=True)
1083 norepo=True)
1084 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1084 def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
1085 """retrieves a bundle from a repo
1085 """retrieves a bundle from a repo
1086
1086
1087 Every ID must be a full-length hex node id string. Saves the bundle to the
1087 Every ID must be a full-length hex node id string. Saves the bundle to the
1088 given file.
1088 given file.
1089 """
1089 """
1090 opts = pycompat.byteskwargs(opts)
1090 opts = pycompat.byteskwargs(opts)
1091 repo = hg.peer(ui, opts, repopath)
1091 repo = hg.peer(ui, opts, repopath)
1092 if not repo.capable('getbundle'):
1092 if not repo.capable('getbundle'):
1093 raise error.Abort("getbundle() not supported by target repository")
1093 raise error.Abort("getbundle() not supported by target repository")
1094 args = {}
1094 args = {}
1095 if common:
1095 if common:
1096 args[r'common'] = [bin(s) for s in common]
1096 args[r'common'] = [bin(s) for s in common]
1097 if head:
1097 if head:
1098 args[r'heads'] = [bin(s) for s in head]
1098 args[r'heads'] = [bin(s) for s in head]
1099 # TODO: get desired bundlecaps from command line.
1099 # TODO: get desired bundlecaps from command line.
1100 args[r'bundlecaps'] = None
1100 args[r'bundlecaps'] = None
1101 bundle = repo.getbundle('debug', **args)
1101 bundle = repo.getbundle('debug', **args)
1102
1102
1103 bundletype = opts.get('type', 'bzip2').lower()
1103 bundletype = opts.get('type', 'bzip2').lower()
1104 btypes = {'none': 'HG10UN',
1104 btypes = {'none': 'HG10UN',
1105 'bzip2': 'HG10BZ',
1105 'bzip2': 'HG10BZ',
1106 'gzip': 'HG10GZ',
1106 'gzip': 'HG10GZ',
1107 'bundle2': 'HG20'}
1107 'bundle2': 'HG20'}
1108 bundletype = btypes.get(bundletype)
1108 bundletype = btypes.get(bundletype)
1109 if bundletype not in bundle2.bundletypes:
1109 if bundletype not in bundle2.bundletypes:
1110 raise error.Abort(_('unknown bundle type specified with --type'))
1110 raise error.Abort(_('unknown bundle type specified with --type'))
1111 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1111 bundle2.writebundle(ui, bundle, bundlepath, bundletype)
1112
1112
1113 @command('debugignore', [], '[FILE]')
1113 @command('debugignore', [], '[FILE]')
1114 def debugignore(ui, repo, *files, **opts):
1114 def debugignore(ui, repo, *files, **opts):
1115 """display the combined ignore pattern and information about ignored files
1115 """display the combined ignore pattern and information about ignored files
1116
1116
1117 With no argument display the combined ignore pattern.
1117 With no argument display the combined ignore pattern.
1118
1118
1119 Given space separated file names, shows if the given file is ignored and
1119 Given space separated file names, shows if the given file is ignored and
1120 if so, show the ignore rule (file and line number) that matched it.
1120 if so, show the ignore rule (file and line number) that matched it.
1121 """
1121 """
1122 ignore = repo.dirstate._ignore
1122 ignore = repo.dirstate._ignore
1123 if not files:
1123 if not files:
1124 # Show all the patterns
1124 # Show all the patterns
1125 ui.write("%s\n" % pycompat.byterepr(ignore))
1125 ui.write("%s\n" % pycompat.byterepr(ignore))
1126 else:
1126 else:
1127 m = scmutil.match(repo[None], pats=files)
1127 m = scmutil.match(repo[None], pats=files)
1128 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1128 uipathfn = scmutil.getuipathfn(repo, legacyrelativevalue=True)
1129 for f in m.files():
1129 for f in m.files():
1130 nf = util.normpath(f)
1130 nf = util.normpath(f)
1131 ignored = None
1131 ignored = None
1132 ignoredata = None
1132 ignoredata = None
1133 if nf != '.':
1133 if nf != '.':
1134 if ignore(nf):
1134 if ignore(nf):
1135 ignored = nf
1135 ignored = nf
1136 ignoredata = repo.dirstate._ignorefileandline(nf)
1136 ignoredata = repo.dirstate._ignorefileandline(nf)
1137 else:
1137 else:
1138 for p in util.finddirs(nf):
1138 for p in util.finddirs(nf):
1139 if ignore(p):
1139 if ignore(p):
1140 ignored = p
1140 ignored = p
1141 ignoredata = repo.dirstate._ignorefileandline(p)
1141 ignoredata = repo.dirstate._ignorefileandline(p)
1142 break
1142 break
1143 if ignored:
1143 if ignored:
1144 if ignored == nf:
1144 if ignored == nf:
1145 ui.write(_("%s is ignored\n") % uipathfn(f))
1145 ui.write(_("%s is ignored\n") % uipathfn(f))
1146 else:
1146 else:
1147 ui.write(_("%s is ignored because of "
1147 ui.write(_("%s is ignored because of "
1148 "containing directory %s\n")
1148 "containing directory %s\n")
1149 % (uipathfn(f), ignored))
1149 % (uipathfn(f), ignored))
1150 ignorefile, lineno, line = ignoredata
1150 ignorefile, lineno, line = ignoredata
1151 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1151 ui.write(_("(ignore rule in %s, line %d: '%s')\n")
1152 % (ignorefile, lineno, line))
1152 % (ignorefile, lineno, line))
1153 else:
1153 else:
1154 ui.write(_("%s is not ignored\n") % uipathfn(f))
1154 ui.write(_("%s is not ignored\n") % uipathfn(f))
1155
1155
1156 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1156 @command('debugindex', cmdutil.debugrevlogopts + cmdutil.formatteropts,
1157 _('-c|-m|FILE'))
1157 _('-c|-m|FILE'))
1158 def debugindex(ui, repo, file_=None, **opts):
1158 def debugindex(ui, repo, file_=None, **opts):
1159 """dump index data for a storage primitive"""
1159 """dump index data for a storage primitive"""
1160 opts = pycompat.byteskwargs(opts)
1160 opts = pycompat.byteskwargs(opts)
1161 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1161 store = cmdutil.openstorage(repo, 'debugindex', file_, opts)
1162
1162
1163 if ui.debugflag:
1163 if ui.debugflag:
1164 shortfn = hex
1164 shortfn = hex
1165 else:
1165 else:
1166 shortfn = short
1166 shortfn = short
1167
1167
1168 idlen = 12
1168 idlen = 12
1169 for i in store:
1169 for i in store:
1170 idlen = len(shortfn(store.node(i)))
1170 idlen = len(shortfn(store.node(i)))
1171 break
1171 break
1172
1172
1173 fm = ui.formatter('debugindex', opts)
1173 fm = ui.formatter('debugindex', opts)
1174 fm.plain(b' rev linkrev %s %s p2\n' % (
1174 fm.plain(b' rev linkrev %s %s p2\n' % (
1175 b'nodeid'.ljust(idlen),
1175 b'nodeid'.ljust(idlen),
1176 b'p1'.ljust(idlen)))
1176 b'p1'.ljust(idlen)))
1177
1177
1178 for rev in store:
1178 for rev in store:
1179 node = store.node(rev)
1179 node = store.node(rev)
1180 parents = store.parents(node)
1180 parents = store.parents(node)
1181
1181
1182 fm.startitem()
1182 fm.startitem()
1183 fm.write(b'rev', b'%6d ', rev)
1183 fm.write(b'rev', b'%6d ', rev)
1184 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1184 fm.write(b'linkrev', '%7d ', store.linkrev(rev))
1185 fm.write(b'node', '%s ', shortfn(node))
1185 fm.write(b'node', '%s ', shortfn(node))
1186 fm.write(b'p1', '%s ', shortfn(parents[0]))
1186 fm.write(b'p1', '%s ', shortfn(parents[0]))
1187 fm.write(b'p2', '%s', shortfn(parents[1]))
1187 fm.write(b'p2', '%s', shortfn(parents[1]))
1188 fm.plain(b'\n')
1188 fm.plain(b'\n')
1189
1189
1190 fm.end()
1190 fm.end()
1191
1191
1192 @command('debugindexdot', cmdutil.debugrevlogopts,
1192 @command('debugindexdot', cmdutil.debugrevlogopts,
1193 _('-c|-m|FILE'), optionalrepo=True)
1193 _('-c|-m|FILE'), optionalrepo=True)
1194 def debugindexdot(ui, repo, file_=None, **opts):
1194 def debugindexdot(ui, repo, file_=None, **opts):
1195 """dump an index DAG as a graphviz dot file"""
1195 """dump an index DAG as a graphviz dot file"""
1196 opts = pycompat.byteskwargs(opts)
1196 opts = pycompat.byteskwargs(opts)
1197 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1197 r = cmdutil.openstorage(repo, 'debugindexdot', file_, opts)
1198 ui.write(("digraph G {\n"))
1198 ui.write(("digraph G {\n"))
1199 for i in r:
1199 for i in r:
1200 node = r.node(i)
1200 node = r.node(i)
1201 pp = r.parents(node)
1201 pp = r.parents(node)
1202 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1202 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1203 if pp[1] != nullid:
1203 if pp[1] != nullid:
1204 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1204 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1205 ui.write("}\n")
1205 ui.write("}\n")
1206
1206
1207 @command('debugindexstats', [])
1207 @command('debugindexstats', [])
1208 def debugindexstats(ui, repo):
1208 def debugindexstats(ui, repo):
1209 """show stats related to the changelog index"""
1209 """show stats related to the changelog index"""
1210 repo.changelog.shortest(nullid, 1)
1210 repo.changelog.shortest(nullid, 1)
1211 index = repo.changelog.index
1211 index = repo.changelog.index
1212 if not util.safehasattr(index, 'stats'):
1212 if not util.safehasattr(index, 'stats'):
1213 raise error.Abort(_('debugindexstats only works with native code'))
1213 raise error.Abort(_('debugindexstats only works with native code'))
1214 for k, v in sorted(index.stats().items()):
1214 for k, v in sorted(index.stats().items()):
1215 ui.write('%s: %d\n' % (k, v))
1215 ui.write('%s: %d\n' % (k, v))
1216
1216
1217 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1217 @command('debuginstall', [] + cmdutil.formatteropts, '', norepo=True)
1218 def debuginstall(ui, **opts):
1218 def debuginstall(ui, **opts):
1219 '''test Mercurial installation
1219 '''test Mercurial installation
1220
1220
1221 Returns 0 on success.
1221 Returns 0 on success.
1222 '''
1222 '''
1223 opts = pycompat.byteskwargs(opts)
1223 opts = pycompat.byteskwargs(opts)
1224
1224
1225 problems = 0
1225 problems = 0
1226
1226
1227 fm = ui.formatter('debuginstall', opts)
1227 fm = ui.formatter('debuginstall', opts)
1228 fm.startitem()
1228 fm.startitem()
1229
1229
1230 # encoding
1230 # encoding
1231 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1231 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1232 err = None
1232 err = None
1233 try:
1233 try:
1234 codecs.lookup(pycompat.sysstr(encoding.encoding))
1234 codecs.lookup(pycompat.sysstr(encoding.encoding))
1235 except LookupError as inst:
1235 except LookupError as inst:
1236 err = stringutil.forcebytestr(inst)
1236 err = stringutil.forcebytestr(inst)
1237 problems += 1
1237 problems += 1
1238 fm.condwrite(err, 'encodingerror', _(" %s\n"
1238 fm.condwrite(err, 'encodingerror', _(" %s\n"
1239 " (check that your locale is properly set)\n"), err)
1239 " (check that your locale is properly set)\n"), err)
1240
1240
1241 # Python
1241 # Python
1242 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1242 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1243 pycompat.sysexecutable)
1243 pycompat.sysexecutable or _("unknown"))
1244 fm.write('pythonver', _("checking Python version (%s)\n"),
1244 fm.write('pythonver', _("checking Python version (%s)\n"),
1245 ("%d.%d.%d" % sys.version_info[:3]))
1245 ("%d.%d.%d" % sys.version_info[:3]))
1246 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1246 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1247 os.path.dirname(pycompat.fsencode(os.__file__)))
1247 os.path.dirname(pycompat.fsencode(os.__file__)))
1248
1248
1249 security = set(sslutil.supportedprotocols)
1249 security = set(sslutil.supportedprotocols)
1250 if sslutil.hassni:
1250 if sslutil.hassni:
1251 security.add('sni')
1251 security.add('sni')
1252
1252
1253 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1253 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1254 fm.formatlist(sorted(security), name='protocol',
1254 fm.formatlist(sorted(security), name='protocol',
1255 fmt='%s', sep=','))
1255 fmt='%s', sep=','))
1256
1256
1257 # These are warnings, not errors. So don't increment problem count. This
1257 # These are warnings, not errors. So don't increment problem count. This
1258 # may change in the future.
1258 # may change in the future.
1259 if 'tls1.2' not in security:
1259 if 'tls1.2' not in security:
1260 fm.plain(_(' TLS 1.2 not supported by Python install; '
1260 fm.plain(_(' TLS 1.2 not supported by Python install; '
1261 'network connections lack modern security\n'))
1261 'network connections lack modern security\n'))
1262 if 'sni' not in security:
1262 if 'sni' not in security:
1263 fm.plain(_(' SNI not supported by Python install; may have '
1263 fm.plain(_(' SNI not supported by Python install; may have '
1264 'connectivity issues with some servers\n'))
1264 'connectivity issues with some servers\n'))
1265
1265
1266 # TODO print CA cert info
1266 # TODO print CA cert info
1267
1267
1268 # hg version
1268 # hg version
1269 hgver = util.version()
1269 hgver = util.version()
1270 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1270 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1271 hgver.split('+')[0])
1271 hgver.split('+')[0])
1272 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1272 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1273 '+'.join(hgver.split('+')[1:]))
1273 '+'.join(hgver.split('+')[1:]))
1274
1274
1275 # compiled modules
1275 # compiled modules
1276 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1276 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1277 policy.policy)
1277 policy.policy)
1278 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1278 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1279 os.path.dirname(pycompat.fsencode(__file__)))
1279 os.path.dirname(pycompat.fsencode(__file__)))
1280
1280
1281 rustandc = policy.policy in ('rust+c', 'rust+c-allow')
1281 rustandc = policy.policy in ('rust+c', 'rust+c-allow')
1282 rustext = rustandc # for now, that's the only case
1282 rustext = rustandc # for now, that's the only case
1283 cext = policy.policy in ('c', 'allow') or rustandc
1283 cext = policy.policy in ('c', 'allow') or rustandc
1284 nopure = cext or rustext
1284 nopure = cext or rustext
1285 if nopure:
1285 if nopure:
1286 err = None
1286 err = None
1287 try:
1287 try:
1288 if cext:
1288 if cext:
1289 from .cext import (
1289 from .cext import (
1290 base85,
1290 base85,
1291 bdiff,
1291 bdiff,
1292 mpatch,
1292 mpatch,
1293 osutil,
1293 osutil,
1294 )
1294 )
1295 # quiet pyflakes
1295 # quiet pyflakes
1296 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1296 dir(bdiff), dir(mpatch), dir(base85), dir(osutil)
1297 if rustext:
1297 if rustext:
1298 from .rustext import (
1298 from .rustext import (
1299 ancestor,
1299 ancestor,
1300 dirstate,
1300 dirstate,
1301 )
1301 )
1302 dir(ancestor), dir(dirstate) # quiet pyflakes
1302 dir(ancestor), dir(dirstate) # quiet pyflakes
1303 except Exception as inst:
1303 except Exception as inst:
1304 err = stringutil.forcebytestr(inst)
1304 err = stringutil.forcebytestr(inst)
1305 problems += 1
1305 problems += 1
1306 fm.condwrite(err, 'extensionserror', " %s\n", err)
1306 fm.condwrite(err, 'extensionserror', " %s\n", err)
1307
1307
1308 compengines = util.compengines._engines.values()
1308 compengines = util.compengines._engines.values()
1309 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1309 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1310 fm.formatlist(sorted(e.name() for e in compengines),
1310 fm.formatlist(sorted(e.name() for e in compengines),
1311 name='compengine', fmt='%s', sep=', '))
1311 name='compengine', fmt='%s', sep=', '))
1312 fm.write('compenginesavail', _('checking available compression engines '
1312 fm.write('compenginesavail', _('checking available compression engines '
1313 '(%s)\n'),
1313 '(%s)\n'),
1314 fm.formatlist(sorted(e.name() for e in compengines
1314 fm.formatlist(sorted(e.name() for e in compengines
1315 if e.available()),
1315 if e.available()),
1316 name='compengine', fmt='%s', sep=', '))
1316 name='compengine', fmt='%s', sep=', '))
1317 wirecompengines = compression.compengines.supportedwireengines(
1317 wirecompengines = compression.compengines.supportedwireengines(
1318 compression.SERVERROLE)
1318 compression.SERVERROLE)
1319 fm.write('compenginesserver', _('checking available compression engines '
1319 fm.write('compenginesserver', _('checking available compression engines '
1320 'for wire protocol (%s)\n'),
1320 'for wire protocol (%s)\n'),
1321 fm.formatlist([e.name() for e in wirecompengines
1321 fm.formatlist([e.name() for e in wirecompengines
1322 if e.wireprotosupport()],
1322 if e.wireprotosupport()],
1323 name='compengine', fmt='%s', sep=', '))
1323 name='compengine', fmt='%s', sep=', '))
1324 re2 = 'missing'
1324 re2 = 'missing'
1325 if util._re2:
1325 if util._re2:
1326 re2 = 'available'
1326 re2 = 'available'
1327 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1327 fm.plain(_('checking "re2" regexp engine (%s)\n') % re2)
1328 fm.data(re2=bool(util._re2))
1328 fm.data(re2=bool(util._re2))
1329
1329
1330 # templates
1330 # templates
1331 p = templater.templatepaths()
1331 p = templater.templatepaths()
1332 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1332 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1333 fm.condwrite(not p, '', _(" no template directories found\n"))
1333 fm.condwrite(not p, '', _(" no template directories found\n"))
1334 if p:
1334 if p:
1335 m = templater.templatepath("map-cmdline.default")
1335 m = templater.templatepath("map-cmdline.default")
1336 if m:
1336 if m:
1337 # template found, check if it is working
1337 # template found, check if it is working
1338 err = None
1338 err = None
1339 try:
1339 try:
1340 templater.templater.frommapfile(m)
1340 templater.templater.frommapfile(m)
1341 except Exception as inst:
1341 except Exception as inst:
1342 err = stringutil.forcebytestr(inst)
1342 err = stringutil.forcebytestr(inst)
1343 p = None
1343 p = None
1344 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1344 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1345 else:
1345 else:
1346 p = None
1346 p = None
1347 fm.condwrite(p, 'defaulttemplate',
1347 fm.condwrite(p, 'defaulttemplate',
1348 _("checking default template (%s)\n"), m)
1348 _("checking default template (%s)\n"), m)
1349 fm.condwrite(not m, 'defaulttemplatenotfound',
1349 fm.condwrite(not m, 'defaulttemplatenotfound',
1350 _(" template '%s' not found\n"), "default")
1350 _(" template '%s' not found\n"), "default")
1351 if not p:
1351 if not p:
1352 problems += 1
1352 problems += 1
1353 fm.condwrite(not p, '',
1353 fm.condwrite(not p, '',
1354 _(" (templates seem to have been installed incorrectly)\n"))
1354 _(" (templates seem to have been installed incorrectly)\n"))
1355
1355
1356 # editor
1356 # editor
1357 editor = ui.geteditor()
1357 editor = ui.geteditor()
1358 editor = util.expandpath(editor)
1358 editor = util.expandpath(editor)
1359 editorbin = procutil.shellsplit(editor)[0]
1359 editorbin = procutil.shellsplit(editor)[0]
1360 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1360 fm.write('editor', _("checking commit editor... (%s)\n"), editorbin)
1361 cmdpath = procutil.findexe(editorbin)
1361 cmdpath = procutil.findexe(editorbin)
1362 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1362 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1363 _(" No commit editor set and can't find %s in PATH\n"
1363 _(" No commit editor set and can't find %s in PATH\n"
1364 " (specify a commit editor in your configuration"
1364 " (specify a commit editor in your configuration"
1365 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1365 " file)\n"), not cmdpath and editor == 'vi' and editorbin)
1366 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1366 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1367 _(" Can't find editor '%s' in PATH\n"
1367 _(" Can't find editor '%s' in PATH\n"
1368 " (specify a commit editor in your configuration"
1368 " (specify a commit editor in your configuration"
1369 " file)\n"), not cmdpath and editorbin)
1369 " file)\n"), not cmdpath and editorbin)
1370 if not cmdpath and editor != 'vi':
1370 if not cmdpath and editor != 'vi':
1371 problems += 1
1371 problems += 1
1372
1372
1373 # check username
1373 # check username
1374 username = None
1374 username = None
1375 err = None
1375 err = None
1376 try:
1376 try:
1377 username = ui.username()
1377 username = ui.username()
1378 except error.Abort as e:
1378 except error.Abort as e:
1379 err = stringutil.forcebytestr(e)
1379 err = stringutil.forcebytestr(e)
1380 problems += 1
1380 problems += 1
1381
1381
1382 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1382 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
1383 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1383 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
1384 " (specify a username in your configuration file)\n"), err)
1384 " (specify a username in your configuration file)\n"), err)
1385
1385
1386 fm.condwrite(not problems, '',
1386 fm.condwrite(not problems, '',
1387 _("no problems detected\n"))
1387 _("no problems detected\n"))
1388 if not problems:
1388 if not problems:
1389 fm.data(problems=problems)
1389 fm.data(problems=problems)
1390 fm.condwrite(problems, 'problems',
1390 fm.condwrite(problems, 'problems',
1391 _("%d problems detected,"
1391 _("%d problems detected,"
1392 " please check your install!\n"), problems)
1392 " please check your install!\n"), problems)
1393 fm.end()
1393 fm.end()
1394
1394
1395 return problems
1395 return problems
1396
1396
1397 @command('debugknown', [], _('REPO ID...'), norepo=True)
1397 @command('debugknown', [], _('REPO ID...'), norepo=True)
1398 def debugknown(ui, repopath, *ids, **opts):
1398 def debugknown(ui, repopath, *ids, **opts):
1399 """test whether node ids are known to a repo
1399 """test whether node ids are known to a repo
1400
1400
1401 Every ID must be a full-length hex node id string. Returns a list of 0s
1401 Every ID must be a full-length hex node id string. Returns a list of 0s
1402 and 1s indicating unknown/known.
1402 and 1s indicating unknown/known.
1403 """
1403 """
1404 opts = pycompat.byteskwargs(opts)
1404 opts = pycompat.byteskwargs(opts)
1405 repo = hg.peer(ui, opts, repopath)
1405 repo = hg.peer(ui, opts, repopath)
1406 if not repo.capable('known'):
1406 if not repo.capable('known'):
1407 raise error.Abort("known() not supported by target repository")
1407 raise error.Abort("known() not supported by target repository")
1408 flags = repo.known([bin(s) for s in ids])
1408 flags = repo.known([bin(s) for s in ids])
1409 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1409 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
1410
1410
1411 @command('debuglabelcomplete', [], _('LABEL...'))
1411 @command('debuglabelcomplete', [], _('LABEL...'))
1412 def debuglabelcomplete(ui, repo, *args):
1412 def debuglabelcomplete(ui, repo, *args):
1413 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1413 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
1414 debugnamecomplete(ui, repo, *args)
1414 debugnamecomplete(ui, repo, *args)
1415
1415
1416 @command('debuglocks',
1416 @command('debuglocks',
1417 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1417 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
1418 ('W', 'force-wlock', None,
1418 ('W', 'force-wlock', None,
1419 _('free the working state lock (DANGEROUS)')),
1419 _('free the working state lock (DANGEROUS)')),
1420 ('s', 'set-lock', None, _('set the store lock until stopped')),
1420 ('s', 'set-lock', None, _('set the store lock until stopped')),
1421 ('S', 'set-wlock', None,
1421 ('S', 'set-wlock', None,
1422 _('set the working state lock until stopped'))],
1422 _('set the working state lock until stopped'))],
1423 _('[OPTION]...'))
1423 _('[OPTION]...'))
1424 def debuglocks(ui, repo, **opts):
1424 def debuglocks(ui, repo, **opts):
1425 """show or modify state of locks
1425 """show or modify state of locks
1426
1426
1427 By default, this command will show which locks are held. This
1427 By default, this command will show which locks are held. This
1428 includes the user and process holding the lock, the amount of time
1428 includes the user and process holding the lock, the amount of time
1429 the lock has been held, and the machine name where the process is
1429 the lock has been held, and the machine name where the process is
1430 running if it's not local.
1430 running if it's not local.
1431
1431
1432 Locks protect the integrity of Mercurial's data, so should be
1432 Locks protect the integrity of Mercurial's data, so should be
1433 treated with care. System crashes or other interruptions may cause
1433 treated with care. System crashes or other interruptions may cause
1434 locks to not be properly released, though Mercurial will usually
1434 locks to not be properly released, though Mercurial will usually
1435 detect and remove such stale locks automatically.
1435 detect and remove such stale locks automatically.
1436
1436
1437 However, detecting stale locks may not always be possible (for
1437 However, detecting stale locks may not always be possible (for
1438 instance, on a shared filesystem). Removing locks may also be
1438 instance, on a shared filesystem). Removing locks may also be
1439 blocked by filesystem permissions.
1439 blocked by filesystem permissions.
1440
1440
1441 Setting a lock will prevent other commands from changing the data.
1441 Setting a lock will prevent other commands from changing the data.
1442 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1442 The command will wait until an interruption (SIGINT, SIGTERM, ...) occurs.
1443 The set locks are removed when the command exits.
1443 The set locks are removed when the command exits.
1444
1444
1445 Returns 0 if no locks are held.
1445 Returns 0 if no locks are held.
1446
1446
1447 """
1447 """
1448
1448
1449 if opts.get(r'force_lock'):
1449 if opts.get(r'force_lock'):
1450 repo.svfs.unlink('lock')
1450 repo.svfs.unlink('lock')
1451 if opts.get(r'force_wlock'):
1451 if opts.get(r'force_wlock'):
1452 repo.vfs.unlink('wlock')
1452 repo.vfs.unlink('wlock')
1453 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1453 if opts.get(r'force_lock') or opts.get(r'force_wlock'):
1454 return 0
1454 return 0
1455
1455
1456 locks = []
1456 locks = []
1457 try:
1457 try:
1458 if opts.get(r'set_wlock'):
1458 if opts.get(r'set_wlock'):
1459 try:
1459 try:
1460 locks.append(repo.wlock(False))
1460 locks.append(repo.wlock(False))
1461 except error.LockHeld:
1461 except error.LockHeld:
1462 raise error.Abort(_('wlock is already held'))
1462 raise error.Abort(_('wlock is already held'))
1463 if opts.get(r'set_lock'):
1463 if opts.get(r'set_lock'):
1464 try:
1464 try:
1465 locks.append(repo.lock(False))
1465 locks.append(repo.lock(False))
1466 except error.LockHeld:
1466 except error.LockHeld:
1467 raise error.Abort(_('lock is already held'))
1467 raise error.Abort(_('lock is already held'))
1468 if len(locks):
1468 if len(locks):
1469 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1469 ui.promptchoice(_("ready to release the lock (y)? $$ &Yes"))
1470 return 0
1470 return 0
1471 finally:
1471 finally:
1472 release(*locks)
1472 release(*locks)
1473
1473
1474 now = time.time()
1474 now = time.time()
1475 held = 0
1475 held = 0
1476
1476
1477 def report(vfs, name, method):
1477 def report(vfs, name, method):
1478 # this causes stale locks to get reaped for more accurate reporting
1478 # this causes stale locks to get reaped for more accurate reporting
1479 try:
1479 try:
1480 l = method(False)
1480 l = method(False)
1481 except error.LockHeld:
1481 except error.LockHeld:
1482 l = None
1482 l = None
1483
1483
1484 if l:
1484 if l:
1485 l.release()
1485 l.release()
1486 else:
1486 else:
1487 try:
1487 try:
1488 st = vfs.lstat(name)
1488 st = vfs.lstat(name)
1489 age = now - st[stat.ST_MTIME]
1489 age = now - st[stat.ST_MTIME]
1490 user = util.username(st.st_uid)
1490 user = util.username(st.st_uid)
1491 locker = vfs.readlock(name)
1491 locker = vfs.readlock(name)
1492 if ":" in locker:
1492 if ":" in locker:
1493 host, pid = locker.split(':')
1493 host, pid = locker.split(':')
1494 if host == socket.gethostname():
1494 if host == socket.gethostname():
1495 locker = 'user %s, process %s' % (user or b'None', pid)
1495 locker = 'user %s, process %s' % (user or b'None', pid)
1496 else:
1496 else:
1497 locker = ('user %s, process %s, host %s'
1497 locker = ('user %s, process %s, host %s'
1498 % (user or b'None', pid, host))
1498 % (user or b'None', pid, host))
1499 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1499 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
1500 return 1
1500 return 1
1501 except OSError as e:
1501 except OSError as e:
1502 if e.errno != errno.ENOENT:
1502 if e.errno != errno.ENOENT:
1503 raise
1503 raise
1504
1504
1505 ui.write(("%-6s free\n") % (name + ":"))
1505 ui.write(("%-6s free\n") % (name + ":"))
1506 return 0
1506 return 0
1507
1507
1508 held += report(repo.svfs, "lock", repo.lock)
1508 held += report(repo.svfs, "lock", repo.lock)
1509 held += report(repo.vfs, "wlock", repo.wlock)
1509 held += report(repo.vfs, "wlock", repo.wlock)
1510
1510
1511 return held
1511 return held
1512
1512
1513 @command('debugmanifestfulltextcache', [
1513 @command('debugmanifestfulltextcache', [
1514 ('', 'clear', False, _('clear the cache')),
1514 ('', 'clear', False, _('clear the cache')),
1515 ('a', 'add', [], _('add the given manifest nodes to the cache'),
1515 ('a', 'add', [], _('add the given manifest nodes to the cache'),
1516 _('NODE'))
1516 _('NODE'))
1517 ], '')
1517 ], '')
1518 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1518 def debugmanifestfulltextcache(ui, repo, add=(), **opts):
1519 """show, clear or amend the contents of the manifest fulltext cache"""
1519 """show, clear or amend the contents of the manifest fulltext cache"""
1520
1520
1521 def getcache():
1521 def getcache():
1522 r = repo.manifestlog.getstorage(b'')
1522 r = repo.manifestlog.getstorage(b'')
1523 try:
1523 try:
1524 return r._fulltextcache
1524 return r._fulltextcache
1525 except AttributeError:
1525 except AttributeError:
1526 msg = _("Current revlog implementation doesn't appear to have a "
1526 msg = _("Current revlog implementation doesn't appear to have a "
1527 "manifest fulltext cache\n")
1527 "manifest fulltext cache\n")
1528 raise error.Abort(msg)
1528 raise error.Abort(msg)
1529
1529
1530 if opts.get(r'clear'):
1530 if opts.get(r'clear'):
1531 with repo.wlock():
1531 with repo.wlock():
1532 cache = getcache()
1532 cache = getcache()
1533 cache.clear(clear_persisted_data=True)
1533 cache.clear(clear_persisted_data=True)
1534 return
1534 return
1535
1535
1536 if add:
1536 if add:
1537 with repo.wlock():
1537 with repo.wlock():
1538 m = repo.manifestlog
1538 m = repo.manifestlog
1539 store = m.getstorage(b'')
1539 store = m.getstorage(b'')
1540 for n in add:
1540 for n in add:
1541 try:
1541 try:
1542 manifest = m[store.lookup(n)]
1542 manifest = m[store.lookup(n)]
1543 except error.LookupError as e:
1543 except error.LookupError as e:
1544 raise error.Abort(e, hint="Check your manifest node id")
1544 raise error.Abort(e, hint="Check your manifest node id")
1545 manifest.read() # stores revisision in cache too
1545 manifest.read() # stores revisision in cache too
1546 return
1546 return
1547
1547
1548 cache = getcache()
1548 cache = getcache()
1549 if not len(cache):
1549 if not len(cache):
1550 ui.write(_('cache empty\n'))
1550 ui.write(_('cache empty\n'))
1551 else:
1551 else:
1552 ui.write(
1552 ui.write(
1553 _('cache contains %d manifest entries, in order of most to '
1553 _('cache contains %d manifest entries, in order of most to '
1554 'least recent:\n') % (len(cache),))
1554 'least recent:\n') % (len(cache),))
1555 totalsize = 0
1555 totalsize = 0
1556 for nodeid in cache:
1556 for nodeid in cache:
1557 # Use cache.get to not update the LRU order
1557 # Use cache.get to not update the LRU order
1558 data = cache.peek(nodeid)
1558 data = cache.peek(nodeid)
1559 size = len(data)
1559 size = len(data)
1560 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1560 totalsize += size + 24 # 20 bytes nodeid, 4 bytes size
1561 ui.write(_('id: %s, size %s\n') % (
1561 ui.write(_('id: %s, size %s\n') % (
1562 hex(nodeid), util.bytecount(size)))
1562 hex(nodeid), util.bytecount(size)))
1563 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1563 ondisk = cache._opener.stat('manifestfulltextcache').st_size
1564 ui.write(
1564 ui.write(
1565 _('total cache data size %s, on-disk %s\n') % (
1565 _('total cache data size %s, on-disk %s\n') % (
1566 util.bytecount(totalsize), util.bytecount(ondisk))
1566 util.bytecount(totalsize), util.bytecount(ondisk))
1567 )
1567 )
1568
1568
1569 @command('debugmergestate', [], '')
1569 @command('debugmergestate', [], '')
1570 def debugmergestate(ui, repo, *args):
1570 def debugmergestate(ui, repo, *args):
1571 """print merge state
1571 """print merge state
1572
1572
1573 Use --verbose to print out information about whether v1 or v2 merge state
1573 Use --verbose to print out information about whether v1 or v2 merge state
1574 was chosen."""
1574 was chosen."""
1575 def _hashornull(h):
1575 def _hashornull(h):
1576 if h == nullhex:
1576 if h == nullhex:
1577 return 'null'
1577 return 'null'
1578 else:
1578 else:
1579 return h
1579 return h
1580
1580
1581 def printrecords(version):
1581 def printrecords(version):
1582 ui.write(('* version %d records\n') % version)
1582 ui.write(('* version %d records\n') % version)
1583 if version == 1:
1583 if version == 1:
1584 records = v1records
1584 records = v1records
1585 else:
1585 else:
1586 records = v2records
1586 records = v2records
1587
1587
1588 for rtype, record in records:
1588 for rtype, record in records:
1589 # pretty print some record types
1589 # pretty print some record types
1590 if rtype == 'L':
1590 if rtype == 'L':
1591 ui.write(('local: %s\n') % record)
1591 ui.write(('local: %s\n') % record)
1592 elif rtype == 'O':
1592 elif rtype == 'O':
1593 ui.write(('other: %s\n') % record)
1593 ui.write(('other: %s\n') % record)
1594 elif rtype == 'm':
1594 elif rtype == 'm':
1595 driver, mdstate = record.split('\0', 1)
1595 driver, mdstate = record.split('\0', 1)
1596 ui.write(('merge driver: %s (state "%s")\n')
1596 ui.write(('merge driver: %s (state "%s")\n')
1597 % (driver, mdstate))
1597 % (driver, mdstate))
1598 elif rtype in 'FDC':
1598 elif rtype in 'FDC':
1599 r = record.split('\0')
1599 r = record.split('\0')
1600 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1600 f, state, hash, lfile, afile, anode, ofile = r[0:7]
1601 if version == 1:
1601 if version == 1:
1602 onode = 'not stored in v1 format'
1602 onode = 'not stored in v1 format'
1603 flags = r[7]
1603 flags = r[7]
1604 else:
1604 else:
1605 onode, flags = r[7:9]
1605 onode, flags = r[7:9]
1606 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1606 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
1607 % (f, rtype, state, _hashornull(hash)))
1607 % (f, rtype, state, _hashornull(hash)))
1608 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1608 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
1609 ui.write((' ancestor path: %s (node %s)\n')
1609 ui.write((' ancestor path: %s (node %s)\n')
1610 % (afile, _hashornull(anode)))
1610 % (afile, _hashornull(anode)))
1611 ui.write((' other path: %s (node %s)\n')
1611 ui.write((' other path: %s (node %s)\n')
1612 % (ofile, _hashornull(onode)))
1612 % (ofile, _hashornull(onode)))
1613 elif rtype == 'f':
1613 elif rtype == 'f':
1614 filename, rawextras = record.split('\0', 1)
1614 filename, rawextras = record.split('\0', 1)
1615 extras = rawextras.split('\0')
1615 extras = rawextras.split('\0')
1616 i = 0
1616 i = 0
1617 extrastrings = []
1617 extrastrings = []
1618 while i < len(extras):
1618 while i < len(extras):
1619 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1619 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
1620 i += 2
1620 i += 2
1621
1621
1622 ui.write(('file extras: %s (%s)\n')
1622 ui.write(('file extras: %s (%s)\n')
1623 % (filename, ', '.join(extrastrings)))
1623 % (filename, ', '.join(extrastrings)))
1624 elif rtype == 'l':
1624 elif rtype == 'l':
1625 labels = record.split('\0', 2)
1625 labels = record.split('\0', 2)
1626 labels = [l for l in labels if len(l) > 0]
1626 labels = [l for l in labels if len(l) > 0]
1627 ui.write(('labels:\n'))
1627 ui.write(('labels:\n'))
1628 ui.write((' local: %s\n' % labels[0]))
1628 ui.write((' local: %s\n' % labels[0]))
1629 ui.write((' other: %s\n' % labels[1]))
1629 ui.write((' other: %s\n' % labels[1]))
1630 if len(labels) > 2:
1630 if len(labels) > 2:
1631 ui.write((' base: %s\n' % labels[2]))
1631 ui.write((' base: %s\n' % labels[2]))
1632 else:
1632 else:
1633 ui.write(('unrecognized entry: %s\t%s\n')
1633 ui.write(('unrecognized entry: %s\t%s\n')
1634 % (rtype, record.replace('\0', '\t')))
1634 % (rtype, record.replace('\0', '\t')))
1635
1635
1636 # Avoid mergestate.read() since it may raise an exception for unsupported
1636 # Avoid mergestate.read() since it may raise an exception for unsupported
1637 # merge state records. We shouldn't be doing this, but this is OK since this
1637 # merge state records. We shouldn't be doing this, but this is OK since this
1638 # command is pretty low-level.
1638 # command is pretty low-level.
1639 ms = mergemod.mergestate(repo)
1639 ms = mergemod.mergestate(repo)
1640
1640
1641 # sort so that reasonable information is on top
1641 # sort so that reasonable information is on top
1642 v1records = ms._readrecordsv1()
1642 v1records = ms._readrecordsv1()
1643 v2records = ms._readrecordsv2()
1643 v2records = ms._readrecordsv2()
1644 order = 'LOml'
1644 order = 'LOml'
1645 def key(r):
1645 def key(r):
1646 idx = order.find(r[0])
1646 idx = order.find(r[0])
1647 if idx == -1:
1647 if idx == -1:
1648 return (1, r[1])
1648 return (1, r[1])
1649 else:
1649 else:
1650 return (0, idx)
1650 return (0, idx)
1651 v1records.sort(key=key)
1651 v1records.sort(key=key)
1652 v2records.sort(key=key)
1652 v2records.sort(key=key)
1653
1653
1654 if not v1records and not v2records:
1654 if not v1records and not v2records:
1655 ui.write(('no merge state found\n'))
1655 ui.write(('no merge state found\n'))
1656 elif not v2records:
1656 elif not v2records:
1657 ui.note(('no version 2 merge state\n'))
1657 ui.note(('no version 2 merge state\n'))
1658 printrecords(1)
1658 printrecords(1)
1659 elif ms._v1v2match(v1records, v2records):
1659 elif ms._v1v2match(v1records, v2records):
1660 ui.note(('v1 and v2 states match: using v2\n'))
1660 ui.note(('v1 and v2 states match: using v2\n'))
1661 printrecords(2)
1661 printrecords(2)
1662 else:
1662 else:
1663 ui.note(('v1 and v2 states mismatch: using v1\n'))
1663 ui.note(('v1 and v2 states mismatch: using v1\n'))
1664 printrecords(1)
1664 printrecords(1)
1665 if ui.verbose:
1665 if ui.verbose:
1666 printrecords(2)
1666 printrecords(2)
1667
1667
1668 @command('debugnamecomplete', [], _('NAME...'))
1668 @command('debugnamecomplete', [], _('NAME...'))
1669 def debugnamecomplete(ui, repo, *args):
1669 def debugnamecomplete(ui, repo, *args):
1670 '''complete "names" - tags, open branch names, bookmark names'''
1670 '''complete "names" - tags, open branch names, bookmark names'''
1671
1671
1672 names = set()
1672 names = set()
1673 # since we previously only listed open branches, we will handle that
1673 # since we previously only listed open branches, we will handle that
1674 # specially (after this for loop)
1674 # specially (after this for loop)
1675 for name, ns in repo.names.iteritems():
1675 for name, ns in repo.names.iteritems():
1676 if name != 'branches':
1676 if name != 'branches':
1677 names.update(ns.listnames(repo))
1677 names.update(ns.listnames(repo))
1678 names.update(tag for (tag, heads, tip, closed)
1678 names.update(tag for (tag, heads, tip, closed)
1679 in repo.branchmap().iterbranches() if not closed)
1679 in repo.branchmap().iterbranches() if not closed)
1680 completions = set()
1680 completions = set()
1681 if not args:
1681 if not args:
1682 args = ['']
1682 args = ['']
1683 for a in args:
1683 for a in args:
1684 completions.update(n for n in names if n.startswith(a))
1684 completions.update(n for n in names if n.startswith(a))
1685 ui.write('\n'.join(sorted(completions)))
1685 ui.write('\n'.join(sorted(completions)))
1686 ui.write('\n')
1686 ui.write('\n')
1687
1687
1688 @command('debugobsolete',
1688 @command('debugobsolete',
1689 [('', 'flags', 0, _('markers flag')),
1689 [('', 'flags', 0, _('markers flag')),
1690 ('', 'record-parents', False,
1690 ('', 'record-parents', False,
1691 _('record parent information for the precursor')),
1691 _('record parent information for the precursor')),
1692 ('r', 'rev', [], _('display markers relevant to REV')),
1692 ('r', 'rev', [], _('display markers relevant to REV')),
1693 ('', 'exclusive', False, _('restrict display to markers only '
1693 ('', 'exclusive', False, _('restrict display to markers only '
1694 'relevant to REV')),
1694 'relevant to REV')),
1695 ('', 'index', False, _('display index of the marker')),
1695 ('', 'index', False, _('display index of the marker')),
1696 ('', 'delete', [], _('delete markers specified by indices')),
1696 ('', 'delete', [], _('delete markers specified by indices')),
1697 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1697 ] + cmdutil.commitopts2 + cmdutil.formatteropts,
1698 _('[OBSOLETED [REPLACEMENT ...]]'))
1698 _('[OBSOLETED [REPLACEMENT ...]]'))
1699 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1699 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
1700 """create arbitrary obsolete marker
1700 """create arbitrary obsolete marker
1701
1701
1702 With no arguments, displays the list of obsolescence markers."""
1702 With no arguments, displays the list of obsolescence markers."""
1703
1703
1704 opts = pycompat.byteskwargs(opts)
1704 opts = pycompat.byteskwargs(opts)
1705
1705
1706 def parsenodeid(s):
1706 def parsenodeid(s):
1707 try:
1707 try:
1708 # We do not use revsingle/revrange functions here to accept
1708 # We do not use revsingle/revrange functions here to accept
1709 # arbitrary node identifiers, possibly not present in the
1709 # arbitrary node identifiers, possibly not present in the
1710 # local repository.
1710 # local repository.
1711 n = bin(s)
1711 n = bin(s)
1712 if len(n) != len(nullid):
1712 if len(n) != len(nullid):
1713 raise TypeError()
1713 raise TypeError()
1714 return n
1714 return n
1715 except TypeError:
1715 except TypeError:
1716 raise error.Abort('changeset references must be full hexadecimal '
1716 raise error.Abort('changeset references must be full hexadecimal '
1717 'node identifiers')
1717 'node identifiers')
1718
1718
1719 if opts.get('delete'):
1719 if opts.get('delete'):
1720 indices = []
1720 indices = []
1721 for v in opts.get('delete'):
1721 for v in opts.get('delete'):
1722 try:
1722 try:
1723 indices.append(int(v))
1723 indices.append(int(v))
1724 except ValueError:
1724 except ValueError:
1725 raise error.Abort(_('invalid index value: %r') % v,
1725 raise error.Abort(_('invalid index value: %r') % v,
1726 hint=_('use integers for indices'))
1726 hint=_('use integers for indices'))
1727
1727
1728 if repo.currenttransaction():
1728 if repo.currenttransaction():
1729 raise error.Abort(_('cannot delete obsmarkers in the middle '
1729 raise error.Abort(_('cannot delete obsmarkers in the middle '
1730 'of transaction.'))
1730 'of transaction.'))
1731
1731
1732 with repo.lock():
1732 with repo.lock():
1733 n = repair.deleteobsmarkers(repo.obsstore, indices)
1733 n = repair.deleteobsmarkers(repo.obsstore, indices)
1734 ui.write(_('deleted %i obsolescence markers\n') % n)
1734 ui.write(_('deleted %i obsolescence markers\n') % n)
1735
1735
1736 return
1736 return
1737
1737
1738 if precursor is not None:
1738 if precursor is not None:
1739 if opts['rev']:
1739 if opts['rev']:
1740 raise error.Abort('cannot select revision when creating marker')
1740 raise error.Abort('cannot select revision when creating marker')
1741 metadata = {}
1741 metadata = {}
1742 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1742 metadata['user'] = encoding.fromlocal(opts['user'] or ui.username())
1743 succs = tuple(parsenodeid(succ) for succ in successors)
1743 succs = tuple(parsenodeid(succ) for succ in successors)
1744 l = repo.lock()
1744 l = repo.lock()
1745 try:
1745 try:
1746 tr = repo.transaction('debugobsolete')
1746 tr = repo.transaction('debugobsolete')
1747 try:
1747 try:
1748 date = opts.get('date')
1748 date = opts.get('date')
1749 if date:
1749 if date:
1750 date = dateutil.parsedate(date)
1750 date = dateutil.parsedate(date)
1751 else:
1751 else:
1752 date = None
1752 date = None
1753 prec = parsenodeid(precursor)
1753 prec = parsenodeid(precursor)
1754 parents = None
1754 parents = None
1755 if opts['record_parents']:
1755 if opts['record_parents']:
1756 if prec not in repo.unfiltered():
1756 if prec not in repo.unfiltered():
1757 raise error.Abort('cannot used --record-parents on '
1757 raise error.Abort('cannot used --record-parents on '
1758 'unknown changesets')
1758 'unknown changesets')
1759 parents = repo.unfiltered()[prec].parents()
1759 parents = repo.unfiltered()[prec].parents()
1760 parents = tuple(p.node() for p in parents)
1760 parents = tuple(p.node() for p in parents)
1761 repo.obsstore.create(tr, prec, succs, opts['flags'],
1761 repo.obsstore.create(tr, prec, succs, opts['flags'],
1762 parents=parents, date=date,
1762 parents=parents, date=date,
1763 metadata=metadata, ui=ui)
1763 metadata=metadata, ui=ui)
1764 tr.close()
1764 tr.close()
1765 except ValueError as exc:
1765 except ValueError as exc:
1766 raise error.Abort(_('bad obsmarker input: %s') %
1766 raise error.Abort(_('bad obsmarker input: %s') %
1767 pycompat.bytestr(exc))
1767 pycompat.bytestr(exc))
1768 finally:
1768 finally:
1769 tr.release()
1769 tr.release()
1770 finally:
1770 finally:
1771 l.release()
1771 l.release()
1772 else:
1772 else:
1773 if opts['rev']:
1773 if opts['rev']:
1774 revs = scmutil.revrange(repo, opts['rev'])
1774 revs = scmutil.revrange(repo, opts['rev'])
1775 nodes = [repo[r].node() for r in revs]
1775 nodes = [repo[r].node() for r in revs]
1776 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1776 markers = list(obsutil.getmarkers(repo, nodes=nodes,
1777 exclusive=opts['exclusive']))
1777 exclusive=opts['exclusive']))
1778 markers.sort(key=lambda x: x._data)
1778 markers.sort(key=lambda x: x._data)
1779 else:
1779 else:
1780 markers = obsutil.getmarkers(repo)
1780 markers = obsutil.getmarkers(repo)
1781
1781
1782 markerstoiter = markers
1782 markerstoiter = markers
1783 isrelevant = lambda m: True
1783 isrelevant = lambda m: True
1784 if opts.get('rev') and opts.get('index'):
1784 if opts.get('rev') and opts.get('index'):
1785 markerstoiter = obsutil.getmarkers(repo)
1785 markerstoiter = obsutil.getmarkers(repo)
1786 markerset = set(markers)
1786 markerset = set(markers)
1787 isrelevant = lambda m: m in markerset
1787 isrelevant = lambda m: m in markerset
1788
1788
1789 fm = ui.formatter('debugobsolete', opts)
1789 fm = ui.formatter('debugobsolete', opts)
1790 for i, m in enumerate(markerstoiter):
1790 for i, m in enumerate(markerstoiter):
1791 if not isrelevant(m):
1791 if not isrelevant(m):
1792 # marker can be irrelevant when we're iterating over a set
1792 # marker can be irrelevant when we're iterating over a set
1793 # of markers (markerstoiter) which is bigger than the set
1793 # of markers (markerstoiter) which is bigger than the set
1794 # of markers we want to display (markers)
1794 # of markers we want to display (markers)
1795 # this can happen if both --index and --rev options are
1795 # this can happen if both --index and --rev options are
1796 # provided and thus we need to iterate over all of the markers
1796 # provided and thus we need to iterate over all of the markers
1797 # to get the correct indices, but only display the ones that
1797 # to get the correct indices, but only display the ones that
1798 # are relevant to --rev value
1798 # are relevant to --rev value
1799 continue
1799 continue
1800 fm.startitem()
1800 fm.startitem()
1801 ind = i if opts.get('index') else None
1801 ind = i if opts.get('index') else None
1802 cmdutil.showmarker(fm, m, index=ind)
1802 cmdutil.showmarker(fm, m, index=ind)
1803 fm.end()
1803 fm.end()
1804
1804
1805 @command('debugp1copies',
1805 @command('debugp1copies',
1806 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1806 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1807 _('[-r REV]'))
1807 _('[-r REV]'))
1808 def debugp1copies(ui, repo, **opts):
1808 def debugp1copies(ui, repo, **opts):
1809 """dump copy information compared to p1"""
1809 """dump copy information compared to p1"""
1810
1810
1811 opts = pycompat.byteskwargs(opts)
1811 opts = pycompat.byteskwargs(opts)
1812 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1812 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1813 for dst, src in ctx.p1copies().items():
1813 for dst, src in ctx.p1copies().items():
1814 ui.write('%s -> %s\n' % (src, dst))
1814 ui.write('%s -> %s\n' % (src, dst))
1815
1815
1816 @command('debugp2copies',
1816 @command('debugp2copies',
1817 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1817 [('r', 'rev', '', _('revision to debug'), _('REV'))],
1818 _('[-r REV]'))
1818 _('[-r REV]'))
1819 def debugp1copies(ui, repo, **opts):
1819 def debugp1copies(ui, repo, **opts):
1820 """dump copy information compared to p2"""
1820 """dump copy information compared to p2"""
1821
1821
1822 opts = pycompat.byteskwargs(opts)
1822 opts = pycompat.byteskwargs(opts)
1823 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1823 ctx = scmutil.revsingle(repo, opts.get('rev'), default=None)
1824 for dst, src in ctx.p2copies().items():
1824 for dst, src in ctx.p2copies().items():
1825 ui.write('%s -> %s\n' % (src, dst))
1825 ui.write('%s -> %s\n' % (src, dst))
1826
1826
1827 @command('debugpathcomplete',
1827 @command('debugpathcomplete',
1828 [('f', 'full', None, _('complete an entire path')),
1828 [('f', 'full', None, _('complete an entire path')),
1829 ('n', 'normal', None, _('show only normal files')),
1829 ('n', 'normal', None, _('show only normal files')),
1830 ('a', 'added', None, _('show only added files')),
1830 ('a', 'added', None, _('show only added files')),
1831 ('r', 'removed', None, _('show only removed files'))],
1831 ('r', 'removed', None, _('show only removed files'))],
1832 _('FILESPEC...'))
1832 _('FILESPEC...'))
1833 def debugpathcomplete(ui, repo, *specs, **opts):
1833 def debugpathcomplete(ui, repo, *specs, **opts):
1834 '''complete part or all of a tracked path
1834 '''complete part or all of a tracked path
1835
1835
1836 This command supports shells that offer path name completion. It
1836 This command supports shells that offer path name completion. It
1837 currently completes only files already known to the dirstate.
1837 currently completes only files already known to the dirstate.
1838
1838
1839 Completion extends only to the next path segment unless
1839 Completion extends only to the next path segment unless
1840 --full is specified, in which case entire paths are used.'''
1840 --full is specified, in which case entire paths are used.'''
1841
1841
1842 def complete(path, acceptable):
1842 def complete(path, acceptable):
1843 dirstate = repo.dirstate
1843 dirstate = repo.dirstate
1844 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1844 spec = os.path.normpath(os.path.join(encoding.getcwd(), path))
1845 rootdir = repo.root + pycompat.ossep
1845 rootdir = repo.root + pycompat.ossep
1846 if spec != repo.root and not spec.startswith(rootdir):
1846 if spec != repo.root and not spec.startswith(rootdir):
1847 return [], []
1847 return [], []
1848 if os.path.isdir(spec):
1848 if os.path.isdir(spec):
1849 spec += '/'
1849 spec += '/'
1850 spec = spec[len(rootdir):]
1850 spec = spec[len(rootdir):]
1851 fixpaths = pycompat.ossep != '/'
1851 fixpaths = pycompat.ossep != '/'
1852 if fixpaths:
1852 if fixpaths:
1853 spec = spec.replace(pycompat.ossep, '/')
1853 spec = spec.replace(pycompat.ossep, '/')
1854 speclen = len(spec)
1854 speclen = len(spec)
1855 fullpaths = opts[r'full']
1855 fullpaths = opts[r'full']
1856 files, dirs = set(), set()
1856 files, dirs = set(), set()
1857 adddir, addfile = dirs.add, files.add
1857 adddir, addfile = dirs.add, files.add
1858 for f, st in dirstate.iteritems():
1858 for f, st in dirstate.iteritems():
1859 if f.startswith(spec) and st[0] in acceptable:
1859 if f.startswith(spec) and st[0] in acceptable:
1860 if fixpaths:
1860 if fixpaths:
1861 f = f.replace('/', pycompat.ossep)
1861 f = f.replace('/', pycompat.ossep)
1862 if fullpaths:
1862 if fullpaths:
1863 addfile(f)
1863 addfile(f)
1864 continue
1864 continue
1865 s = f.find(pycompat.ossep, speclen)
1865 s = f.find(pycompat.ossep, speclen)
1866 if s >= 0:
1866 if s >= 0:
1867 adddir(f[:s])
1867 adddir(f[:s])
1868 else:
1868 else:
1869 addfile(f)
1869 addfile(f)
1870 return files, dirs
1870 return files, dirs
1871
1871
1872 acceptable = ''
1872 acceptable = ''
1873 if opts[r'normal']:
1873 if opts[r'normal']:
1874 acceptable += 'nm'
1874 acceptable += 'nm'
1875 if opts[r'added']:
1875 if opts[r'added']:
1876 acceptable += 'a'
1876 acceptable += 'a'
1877 if opts[r'removed']:
1877 if opts[r'removed']:
1878 acceptable += 'r'
1878 acceptable += 'r'
1879 cwd = repo.getcwd()
1879 cwd = repo.getcwd()
1880 if not specs:
1880 if not specs:
1881 specs = ['.']
1881 specs = ['.']
1882
1882
1883 files, dirs = set(), set()
1883 files, dirs = set(), set()
1884 for spec in specs:
1884 for spec in specs:
1885 f, d = complete(spec, acceptable or 'nmar')
1885 f, d = complete(spec, acceptable or 'nmar')
1886 files.update(f)
1886 files.update(f)
1887 dirs.update(d)
1887 dirs.update(d)
1888 files.update(dirs)
1888 files.update(dirs)
1889 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1889 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
1890 ui.write('\n')
1890 ui.write('\n')
1891
1891
1892 @command('debugpathcopies',
1892 @command('debugpathcopies',
1893 cmdutil.walkopts,
1893 cmdutil.walkopts,
1894 'hg debugpathcopies REV1 REV2 [FILE]',
1894 'hg debugpathcopies REV1 REV2 [FILE]',
1895 inferrepo=True)
1895 inferrepo=True)
1896 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
1896 def debugpathcopies(ui, repo, rev1, rev2, *pats, **opts):
1897 """show copies between two revisions"""
1897 """show copies between two revisions"""
1898 ctx1 = scmutil.revsingle(repo, rev1)
1898 ctx1 = scmutil.revsingle(repo, rev1)
1899 ctx2 = scmutil.revsingle(repo, rev2)
1899 ctx2 = scmutil.revsingle(repo, rev2)
1900 m = scmutil.match(ctx1, pats, opts)
1900 m = scmutil.match(ctx1, pats, opts)
1901 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
1901 for dst, src in sorted(copies.pathcopies(ctx1, ctx2, m).items()):
1902 ui.write('%s -> %s\n' % (src, dst))
1902 ui.write('%s -> %s\n' % (src, dst))
1903
1903
1904 @command('debugpeer', [], _('PATH'), norepo=True)
1904 @command('debugpeer', [], _('PATH'), norepo=True)
1905 def debugpeer(ui, path):
1905 def debugpeer(ui, path):
1906 """establish a connection to a peer repository"""
1906 """establish a connection to a peer repository"""
1907 # Always enable peer request logging. Requires --debug to display
1907 # Always enable peer request logging. Requires --debug to display
1908 # though.
1908 # though.
1909 overrides = {
1909 overrides = {
1910 ('devel', 'debug.peer-request'): True,
1910 ('devel', 'debug.peer-request'): True,
1911 }
1911 }
1912
1912
1913 with ui.configoverride(overrides):
1913 with ui.configoverride(overrides):
1914 peer = hg.peer(ui, {}, path)
1914 peer = hg.peer(ui, {}, path)
1915
1915
1916 local = peer.local() is not None
1916 local = peer.local() is not None
1917 canpush = peer.canpush()
1917 canpush = peer.canpush()
1918
1918
1919 ui.write(_('url: %s\n') % peer.url())
1919 ui.write(_('url: %s\n') % peer.url())
1920 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1920 ui.write(_('local: %s\n') % (_('yes') if local else _('no')))
1921 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1921 ui.write(_('pushable: %s\n') % (_('yes') if canpush else _('no')))
1922
1922
1923 @command('debugpickmergetool',
1923 @command('debugpickmergetool',
1924 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1924 [('r', 'rev', '', _('check for files in this revision'), _('REV')),
1925 ('', 'changedelete', None, _('emulate merging change and delete')),
1925 ('', 'changedelete', None, _('emulate merging change and delete')),
1926 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1926 ] + cmdutil.walkopts + cmdutil.mergetoolopts,
1927 _('[PATTERN]...'),
1927 _('[PATTERN]...'),
1928 inferrepo=True)
1928 inferrepo=True)
1929 def debugpickmergetool(ui, repo, *pats, **opts):
1929 def debugpickmergetool(ui, repo, *pats, **opts):
1930 """examine which merge tool is chosen for specified file
1930 """examine which merge tool is chosen for specified file
1931
1931
1932 As described in :hg:`help merge-tools`, Mercurial examines
1932 As described in :hg:`help merge-tools`, Mercurial examines
1933 configurations below in this order to decide which merge tool is
1933 configurations below in this order to decide which merge tool is
1934 chosen for specified file.
1934 chosen for specified file.
1935
1935
1936 1. ``--tool`` option
1936 1. ``--tool`` option
1937 2. ``HGMERGE`` environment variable
1937 2. ``HGMERGE`` environment variable
1938 3. configurations in ``merge-patterns`` section
1938 3. configurations in ``merge-patterns`` section
1939 4. configuration of ``ui.merge``
1939 4. configuration of ``ui.merge``
1940 5. configurations in ``merge-tools`` section
1940 5. configurations in ``merge-tools`` section
1941 6. ``hgmerge`` tool (for historical reason only)
1941 6. ``hgmerge`` tool (for historical reason only)
1942 7. default tool for fallback (``:merge`` or ``:prompt``)
1942 7. default tool for fallback (``:merge`` or ``:prompt``)
1943
1943
1944 This command writes out examination result in the style below::
1944 This command writes out examination result in the style below::
1945
1945
1946 FILE = MERGETOOL
1946 FILE = MERGETOOL
1947
1947
1948 By default, all files known in the first parent context of the
1948 By default, all files known in the first parent context of the
1949 working directory are examined. Use file patterns and/or -I/-X
1949 working directory are examined. Use file patterns and/or -I/-X
1950 options to limit target files. -r/--rev is also useful to examine
1950 options to limit target files. -r/--rev is also useful to examine
1951 files in another context without actual updating to it.
1951 files in another context without actual updating to it.
1952
1952
1953 With --debug, this command shows warning messages while matching
1953 With --debug, this command shows warning messages while matching
1954 against ``merge-patterns`` and so on, too. It is recommended to
1954 against ``merge-patterns`` and so on, too. It is recommended to
1955 use this option with explicit file patterns and/or -I/-X options,
1955 use this option with explicit file patterns and/or -I/-X options,
1956 because this option increases amount of output per file according
1956 because this option increases amount of output per file according
1957 to configurations in hgrc.
1957 to configurations in hgrc.
1958
1958
1959 With -v/--verbose, this command shows configurations below at
1959 With -v/--verbose, this command shows configurations below at
1960 first (only if specified).
1960 first (only if specified).
1961
1961
1962 - ``--tool`` option
1962 - ``--tool`` option
1963 - ``HGMERGE`` environment variable
1963 - ``HGMERGE`` environment variable
1964 - configuration of ``ui.merge``
1964 - configuration of ``ui.merge``
1965
1965
1966 If merge tool is chosen before matching against
1966 If merge tool is chosen before matching against
1967 ``merge-patterns``, this command can't show any helpful
1967 ``merge-patterns``, this command can't show any helpful
1968 information, even with --debug. In such case, information above is
1968 information, even with --debug. In such case, information above is
1969 useful to know why a merge tool is chosen.
1969 useful to know why a merge tool is chosen.
1970 """
1970 """
1971 opts = pycompat.byteskwargs(opts)
1971 opts = pycompat.byteskwargs(opts)
1972 overrides = {}
1972 overrides = {}
1973 if opts['tool']:
1973 if opts['tool']:
1974 overrides[('ui', 'forcemerge')] = opts['tool']
1974 overrides[('ui', 'forcemerge')] = opts['tool']
1975 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1975 ui.note(('with --tool %r\n') % (pycompat.bytestr(opts['tool'])))
1976
1976
1977 with ui.configoverride(overrides, 'debugmergepatterns'):
1977 with ui.configoverride(overrides, 'debugmergepatterns'):
1978 hgmerge = encoding.environ.get("HGMERGE")
1978 hgmerge = encoding.environ.get("HGMERGE")
1979 if hgmerge is not None:
1979 if hgmerge is not None:
1980 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1980 ui.note(('with HGMERGE=%r\n') % (pycompat.bytestr(hgmerge)))
1981 uimerge = ui.config("ui", "merge")
1981 uimerge = ui.config("ui", "merge")
1982 if uimerge:
1982 if uimerge:
1983 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1983 ui.note(('with ui.merge=%r\n') % (pycompat.bytestr(uimerge)))
1984
1984
1985 ctx = scmutil.revsingle(repo, opts.get('rev'))
1985 ctx = scmutil.revsingle(repo, opts.get('rev'))
1986 m = scmutil.match(ctx, pats, opts)
1986 m = scmutil.match(ctx, pats, opts)
1987 changedelete = opts['changedelete']
1987 changedelete = opts['changedelete']
1988 for path in ctx.walk(m):
1988 for path in ctx.walk(m):
1989 fctx = ctx[path]
1989 fctx = ctx[path]
1990 try:
1990 try:
1991 if not ui.debugflag:
1991 if not ui.debugflag:
1992 ui.pushbuffer(error=True)
1992 ui.pushbuffer(error=True)
1993 tool, toolpath = filemerge._picktool(repo, ui, path,
1993 tool, toolpath = filemerge._picktool(repo, ui, path,
1994 fctx.isbinary(),
1994 fctx.isbinary(),
1995 'l' in fctx.flags(),
1995 'l' in fctx.flags(),
1996 changedelete)
1996 changedelete)
1997 finally:
1997 finally:
1998 if not ui.debugflag:
1998 if not ui.debugflag:
1999 ui.popbuffer()
1999 ui.popbuffer()
2000 ui.write(('%s = %s\n') % (path, tool))
2000 ui.write(('%s = %s\n') % (path, tool))
2001
2001
2002 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2002 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2003 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2003 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2004 '''access the pushkey key/value protocol
2004 '''access the pushkey key/value protocol
2005
2005
2006 With two args, list the keys in the given namespace.
2006 With two args, list the keys in the given namespace.
2007
2007
2008 With five args, set a key to new if it currently is set to old.
2008 With five args, set a key to new if it currently is set to old.
2009 Reports success or failure.
2009 Reports success or failure.
2010 '''
2010 '''
2011
2011
2012 target = hg.peer(ui, {}, repopath)
2012 target = hg.peer(ui, {}, repopath)
2013 if keyinfo:
2013 if keyinfo:
2014 key, old, new = keyinfo
2014 key, old, new = keyinfo
2015 with target.commandexecutor() as e:
2015 with target.commandexecutor() as e:
2016 r = e.callcommand('pushkey', {
2016 r = e.callcommand('pushkey', {
2017 'namespace': namespace,
2017 'namespace': namespace,
2018 'key': key,
2018 'key': key,
2019 'old': old,
2019 'old': old,
2020 'new': new,
2020 'new': new,
2021 }).result()
2021 }).result()
2022
2022
2023 ui.status(pycompat.bytestr(r) + '\n')
2023 ui.status(pycompat.bytestr(r) + '\n')
2024 return not r
2024 return not r
2025 else:
2025 else:
2026 for k, v in sorted(target.listkeys(namespace).iteritems()):
2026 for k, v in sorted(target.listkeys(namespace).iteritems()):
2027 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
2027 ui.write("%s\t%s\n" % (stringutil.escapestr(k),
2028 stringutil.escapestr(v)))
2028 stringutil.escapestr(v)))
2029
2029
2030 @command('debugpvec', [], _('A B'))
2030 @command('debugpvec', [], _('A B'))
2031 def debugpvec(ui, repo, a, b=None):
2031 def debugpvec(ui, repo, a, b=None):
2032 ca = scmutil.revsingle(repo, a)
2032 ca = scmutil.revsingle(repo, a)
2033 cb = scmutil.revsingle(repo, b)
2033 cb = scmutil.revsingle(repo, b)
2034 pa = pvec.ctxpvec(ca)
2034 pa = pvec.ctxpvec(ca)
2035 pb = pvec.ctxpvec(cb)
2035 pb = pvec.ctxpvec(cb)
2036 if pa == pb:
2036 if pa == pb:
2037 rel = "="
2037 rel = "="
2038 elif pa > pb:
2038 elif pa > pb:
2039 rel = ">"
2039 rel = ">"
2040 elif pa < pb:
2040 elif pa < pb:
2041 rel = "<"
2041 rel = "<"
2042 elif pa | pb:
2042 elif pa | pb:
2043 rel = "|"
2043 rel = "|"
2044 ui.write(_("a: %s\n") % pa)
2044 ui.write(_("a: %s\n") % pa)
2045 ui.write(_("b: %s\n") % pb)
2045 ui.write(_("b: %s\n") % pb)
2046 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2046 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2047 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2047 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2048 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2048 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2049 pa.distance(pb), rel))
2049 pa.distance(pb), rel))
2050
2050
2051 @command('debugrebuilddirstate|debugrebuildstate',
2051 @command('debugrebuilddirstate|debugrebuildstate',
2052 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
2052 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
2053 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
2053 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
2054 'the working copy parent')),
2054 'the working copy parent')),
2055 ],
2055 ],
2056 _('[-r REV]'))
2056 _('[-r REV]'))
2057 def debugrebuilddirstate(ui, repo, rev, **opts):
2057 def debugrebuilddirstate(ui, repo, rev, **opts):
2058 """rebuild the dirstate as it would look like for the given revision
2058 """rebuild the dirstate as it would look like for the given revision
2059
2059
2060 If no revision is specified the first current parent will be used.
2060 If no revision is specified the first current parent will be used.
2061
2061
2062 The dirstate will be set to the files of the given revision.
2062 The dirstate will be set to the files of the given revision.
2063 The actual working directory content or existing dirstate
2063 The actual working directory content or existing dirstate
2064 information such as adds or removes is not considered.
2064 information such as adds or removes is not considered.
2065
2065
2066 ``minimal`` will only rebuild the dirstate status for files that claim to be
2066 ``minimal`` will only rebuild the dirstate status for files that claim to be
2067 tracked but are not in the parent manifest, or that exist in the parent
2067 tracked but are not in the parent manifest, or that exist in the parent
2068 manifest but are not in the dirstate. It will not change adds, removes, or
2068 manifest but are not in the dirstate. It will not change adds, removes, or
2069 modified files that are in the working copy parent.
2069 modified files that are in the working copy parent.
2070
2070
2071 One use of this command is to make the next :hg:`status` invocation
2071 One use of this command is to make the next :hg:`status` invocation
2072 check the actual file content.
2072 check the actual file content.
2073 """
2073 """
2074 ctx = scmutil.revsingle(repo, rev)
2074 ctx = scmutil.revsingle(repo, rev)
2075 with repo.wlock():
2075 with repo.wlock():
2076 dirstate = repo.dirstate
2076 dirstate = repo.dirstate
2077 changedfiles = None
2077 changedfiles = None
2078 # See command doc for what minimal does.
2078 # See command doc for what minimal does.
2079 if opts.get(r'minimal'):
2079 if opts.get(r'minimal'):
2080 manifestfiles = set(ctx.manifest().keys())
2080 manifestfiles = set(ctx.manifest().keys())
2081 dirstatefiles = set(dirstate)
2081 dirstatefiles = set(dirstate)
2082 manifestonly = manifestfiles - dirstatefiles
2082 manifestonly = manifestfiles - dirstatefiles
2083 dsonly = dirstatefiles - manifestfiles
2083 dsonly = dirstatefiles - manifestfiles
2084 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2084 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2085 changedfiles = manifestonly | dsnotadded
2085 changedfiles = manifestonly | dsnotadded
2086
2086
2087 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2087 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2088
2088
2089 @command('debugrebuildfncache', [], '')
2089 @command('debugrebuildfncache', [], '')
2090 def debugrebuildfncache(ui, repo):
2090 def debugrebuildfncache(ui, repo):
2091 """rebuild the fncache file"""
2091 """rebuild the fncache file"""
2092 repair.rebuildfncache(ui, repo)
2092 repair.rebuildfncache(ui, repo)
2093
2093
2094 @command('debugrename',
2094 @command('debugrename',
2095 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2095 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2096 _('[-r REV] [FILE]...'))
2096 _('[-r REV] [FILE]...'))
2097 def debugrename(ui, repo, *pats, **opts):
2097 def debugrename(ui, repo, *pats, **opts):
2098 """dump rename information"""
2098 """dump rename information"""
2099
2099
2100 opts = pycompat.byteskwargs(opts)
2100 opts = pycompat.byteskwargs(opts)
2101 ctx = scmutil.revsingle(repo, opts.get('rev'))
2101 ctx = scmutil.revsingle(repo, opts.get('rev'))
2102 m = scmutil.match(ctx, pats, opts)
2102 m = scmutil.match(ctx, pats, opts)
2103 for abs in ctx.walk(m):
2103 for abs in ctx.walk(m):
2104 fctx = ctx[abs]
2104 fctx = ctx[abs]
2105 o = fctx.filelog().renamed(fctx.filenode())
2105 o = fctx.filelog().renamed(fctx.filenode())
2106 rel = repo.pathto(abs)
2106 rel = repo.pathto(abs)
2107 if o:
2107 if o:
2108 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2108 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2109 else:
2109 else:
2110 ui.write(_("%s not renamed\n") % rel)
2110 ui.write(_("%s not renamed\n") % rel)
2111
2111
2112 @command('debugrevlog', cmdutil.debugrevlogopts +
2112 @command('debugrevlog', cmdutil.debugrevlogopts +
2113 [('d', 'dump', False, _('dump index data'))],
2113 [('d', 'dump', False, _('dump index data'))],
2114 _('-c|-m|FILE'),
2114 _('-c|-m|FILE'),
2115 optionalrepo=True)
2115 optionalrepo=True)
2116 def debugrevlog(ui, repo, file_=None, **opts):
2116 def debugrevlog(ui, repo, file_=None, **opts):
2117 """show data and statistics about a revlog"""
2117 """show data and statistics about a revlog"""
2118 opts = pycompat.byteskwargs(opts)
2118 opts = pycompat.byteskwargs(opts)
2119 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2119 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2120
2120
2121 if opts.get("dump"):
2121 if opts.get("dump"):
2122 numrevs = len(r)
2122 numrevs = len(r)
2123 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2123 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2124 " rawsize totalsize compression heads chainlen\n"))
2124 " rawsize totalsize compression heads chainlen\n"))
2125 ts = 0
2125 ts = 0
2126 heads = set()
2126 heads = set()
2127
2127
2128 for rev in pycompat.xrange(numrevs):
2128 for rev in pycompat.xrange(numrevs):
2129 dbase = r.deltaparent(rev)
2129 dbase = r.deltaparent(rev)
2130 if dbase == -1:
2130 if dbase == -1:
2131 dbase = rev
2131 dbase = rev
2132 cbase = r.chainbase(rev)
2132 cbase = r.chainbase(rev)
2133 clen = r.chainlen(rev)
2133 clen = r.chainlen(rev)
2134 p1, p2 = r.parentrevs(rev)
2134 p1, p2 = r.parentrevs(rev)
2135 rs = r.rawsize(rev)
2135 rs = r.rawsize(rev)
2136 ts = ts + rs
2136 ts = ts + rs
2137 heads -= set(r.parentrevs(rev))
2137 heads -= set(r.parentrevs(rev))
2138 heads.add(rev)
2138 heads.add(rev)
2139 try:
2139 try:
2140 compression = ts / r.end(rev)
2140 compression = ts / r.end(rev)
2141 except ZeroDivisionError:
2141 except ZeroDivisionError:
2142 compression = 0
2142 compression = 0
2143 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2143 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2144 "%11d %5d %8d\n" %
2144 "%11d %5d %8d\n" %
2145 (rev, p1, p2, r.start(rev), r.end(rev),
2145 (rev, p1, p2, r.start(rev), r.end(rev),
2146 r.start(dbase), r.start(cbase),
2146 r.start(dbase), r.start(cbase),
2147 r.start(p1), r.start(p2),
2147 r.start(p1), r.start(p2),
2148 rs, ts, compression, len(heads), clen))
2148 rs, ts, compression, len(heads), clen))
2149 return 0
2149 return 0
2150
2150
2151 v = r.version
2151 v = r.version
2152 format = v & 0xFFFF
2152 format = v & 0xFFFF
2153 flags = []
2153 flags = []
2154 gdelta = False
2154 gdelta = False
2155 if v & revlog.FLAG_INLINE_DATA:
2155 if v & revlog.FLAG_INLINE_DATA:
2156 flags.append('inline')
2156 flags.append('inline')
2157 if v & revlog.FLAG_GENERALDELTA:
2157 if v & revlog.FLAG_GENERALDELTA:
2158 gdelta = True
2158 gdelta = True
2159 flags.append('generaldelta')
2159 flags.append('generaldelta')
2160 if not flags:
2160 if not flags:
2161 flags = ['(none)']
2161 flags = ['(none)']
2162
2162
2163 ### tracks merge vs single parent
2163 ### tracks merge vs single parent
2164 nummerges = 0
2164 nummerges = 0
2165
2165
2166 ### tracks ways the "delta" are build
2166 ### tracks ways the "delta" are build
2167 # nodelta
2167 # nodelta
2168 numempty = 0
2168 numempty = 0
2169 numemptytext = 0
2169 numemptytext = 0
2170 numemptydelta = 0
2170 numemptydelta = 0
2171 # full file content
2171 # full file content
2172 numfull = 0
2172 numfull = 0
2173 # intermediate snapshot against a prior snapshot
2173 # intermediate snapshot against a prior snapshot
2174 numsemi = 0
2174 numsemi = 0
2175 # snapshot count per depth
2175 # snapshot count per depth
2176 numsnapdepth = collections.defaultdict(lambda: 0)
2176 numsnapdepth = collections.defaultdict(lambda: 0)
2177 # delta against previous revision
2177 # delta against previous revision
2178 numprev = 0
2178 numprev = 0
2179 # delta against first or second parent (not prev)
2179 # delta against first or second parent (not prev)
2180 nump1 = 0
2180 nump1 = 0
2181 nump2 = 0
2181 nump2 = 0
2182 # delta against neither prev nor parents
2182 # delta against neither prev nor parents
2183 numother = 0
2183 numother = 0
2184 # delta against prev that are also first or second parent
2184 # delta against prev that are also first or second parent
2185 # (details of `numprev`)
2185 # (details of `numprev`)
2186 nump1prev = 0
2186 nump1prev = 0
2187 nump2prev = 0
2187 nump2prev = 0
2188
2188
2189 # data about delta chain of each revs
2189 # data about delta chain of each revs
2190 chainlengths = []
2190 chainlengths = []
2191 chainbases = []
2191 chainbases = []
2192 chainspans = []
2192 chainspans = []
2193
2193
2194 # data about each revision
2194 # data about each revision
2195 datasize = [None, 0, 0]
2195 datasize = [None, 0, 0]
2196 fullsize = [None, 0, 0]
2196 fullsize = [None, 0, 0]
2197 semisize = [None, 0, 0]
2197 semisize = [None, 0, 0]
2198 # snapshot count per depth
2198 # snapshot count per depth
2199 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2199 snapsizedepth = collections.defaultdict(lambda: [None, 0, 0])
2200 deltasize = [None, 0, 0]
2200 deltasize = [None, 0, 0]
2201 chunktypecounts = {}
2201 chunktypecounts = {}
2202 chunktypesizes = {}
2202 chunktypesizes = {}
2203
2203
2204 def addsize(size, l):
2204 def addsize(size, l):
2205 if l[0] is None or size < l[0]:
2205 if l[0] is None or size < l[0]:
2206 l[0] = size
2206 l[0] = size
2207 if size > l[1]:
2207 if size > l[1]:
2208 l[1] = size
2208 l[1] = size
2209 l[2] += size
2209 l[2] += size
2210
2210
2211 numrevs = len(r)
2211 numrevs = len(r)
2212 for rev in pycompat.xrange(numrevs):
2212 for rev in pycompat.xrange(numrevs):
2213 p1, p2 = r.parentrevs(rev)
2213 p1, p2 = r.parentrevs(rev)
2214 delta = r.deltaparent(rev)
2214 delta = r.deltaparent(rev)
2215 if format > 0:
2215 if format > 0:
2216 addsize(r.rawsize(rev), datasize)
2216 addsize(r.rawsize(rev), datasize)
2217 if p2 != nullrev:
2217 if p2 != nullrev:
2218 nummerges += 1
2218 nummerges += 1
2219 size = r.length(rev)
2219 size = r.length(rev)
2220 if delta == nullrev:
2220 if delta == nullrev:
2221 chainlengths.append(0)
2221 chainlengths.append(0)
2222 chainbases.append(r.start(rev))
2222 chainbases.append(r.start(rev))
2223 chainspans.append(size)
2223 chainspans.append(size)
2224 if size == 0:
2224 if size == 0:
2225 numempty += 1
2225 numempty += 1
2226 numemptytext += 1
2226 numemptytext += 1
2227 else:
2227 else:
2228 numfull += 1
2228 numfull += 1
2229 numsnapdepth[0] += 1
2229 numsnapdepth[0] += 1
2230 addsize(size, fullsize)
2230 addsize(size, fullsize)
2231 addsize(size, snapsizedepth[0])
2231 addsize(size, snapsizedepth[0])
2232 else:
2232 else:
2233 chainlengths.append(chainlengths[delta] + 1)
2233 chainlengths.append(chainlengths[delta] + 1)
2234 baseaddr = chainbases[delta]
2234 baseaddr = chainbases[delta]
2235 revaddr = r.start(rev)
2235 revaddr = r.start(rev)
2236 chainbases.append(baseaddr)
2236 chainbases.append(baseaddr)
2237 chainspans.append((revaddr - baseaddr) + size)
2237 chainspans.append((revaddr - baseaddr) + size)
2238 if size == 0:
2238 if size == 0:
2239 numempty += 1
2239 numempty += 1
2240 numemptydelta += 1
2240 numemptydelta += 1
2241 elif r.issnapshot(rev):
2241 elif r.issnapshot(rev):
2242 addsize(size, semisize)
2242 addsize(size, semisize)
2243 numsemi += 1
2243 numsemi += 1
2244 depth = r.snapshotdepth(rev)
2244 depth = r.snapshotdepth(rev)
2245 numsnapdepth[depth] += 1
2245 numsnapdepth[depth] += 1
2246 addsize(size, snapsizedepth[depth])
2246 addsize(size, snapsizedepth[depth])
2247 else:
2247 else:
2248 addsize(size, deltasize)
2248 addsize(size, deltasize)
2249 if delta == rev - 1:
2249 if delta == rev - 1:
2250 numprev += 1
2250 numprev += 1
2251 if delta == p1:
2251 if delta == p1:
2252 nump1prev += 1
2252 nump1prev += 1
2253 elif delta == p2:
2253 elif delta == p2:
2254 nump2prev += 1
2254 nump2prev += 1
2255 elif delta == p1:
2255 elif delta == p1:
2256 nump1 += 1
2256 nump1 += 1
2257 elif delta == p2:
2257 elif delta == p2:
2258 nump2 += 1
2258 nump2 += 1
2259 elif delta != nullrev:
2259 elif delta != nullrev:
2260 numother += 1
2260 numother += 1
2261
2261
2262 # Obtain data on the raw chunks in the revlog.
2262 # Obtain data on the raw chunks in the revlog.
2263 if util.safehasattr(r, '_getsegmentforrevs'):
2263 if util.safehasattr(r, '_getsegmentforrevs'):
2264 segment = r._getsegmentforrevs(rev, rev)[1]
2264 segment = r._getsegmentforrevs(rev, rev)[1]
2265 else:
2265 else:
2266 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2266 segment = r._revlog._getsegmentforrevs(rev, rev)[1]
2267 if segment:
2267 if segment:
2268 chunktype = bytes(segment[0:1])
2268 chunktype = bytes(segment[0:1])
2269 else:
2269 else:
2270 chunktype = 'empty'
2270 chunktype = 'empty'
2271
2271
2272 if chunktype not in chunktypecounts:
2272 if chunktype not in chunktypecounts:
2273 chunktypecounts[chunktype] = 0
2273 chunktypecounts[chunktype] = 0
2274 chunktypesizes[chunktype] = 0
2274 chunktypesizes[chunktype] = 0
2275
2275
2276 chunktypecounts[chunktype] += 1
2276 chunktypecounts[chunktype] += 1
2277 chunktypesizes[chunktype] += size
2277 chunktypesizes[chunktype] += size
2278
2278
2279 # Adjust size min value for empty cases
2279 # Adjust size min value for empty cases
2280 for size in (datasize, fullsize, semisize, deltasize):
2280 for size in (datasize, fullsize, semisize, deltasize):
2281 if size[0] is None:
2281 if size[0] is None:
2282 size[0] = 0
2282 size[0] = 0
2283
2283
2284 numdeltas = numrevs - numfull - numempty - numsemi
2284 numdeltas = numrevs - numfull - numempty - numsemi
2285 numoprev = numprev - nump1prev - nump2prev
2285 numoprev = numprev - nump1prev - nump2prev
2286 totalrawsize = datasize[2]
2286 totalrawsize = datasize[2]
2287 datasize[2] /= numrevs
2287 datasize[2] /= numrevs
2288 fulltotal = fullsize[2]
2288 fulltotal = fullsize[2]
2289 fullsize[2] /= numfull
2289 fullsize[2] /= numfull
2290 semitotal = semisize[2]
2290 semitotal = semisize[2]
2291 snaptotal = {}
2291 snaptotal = {}
2292 if numsemi > 0:
2292 if numsemi > 0:
2293 semisize[2] /= numsemi
2293 semisize[2] /= numsemi
2294 for depth in snapsizedepth:
2294 for depth in snapsizedepth:
2295 snaptotal[depth] = snapsizedepth[depth][2]
2295 snaptotal[depth] = snapsizedepth[depth][2]
2296 snapsizedepth[depth][2] /= numsnapdepth[depth]
2296 snapsizedepth[depth][2] /= numsnapdepth[depth]
2297
2297
2298 deltatotal = deltasize[2]
2298 deltatotal = deltasize[2]
2299 if numdeltas > 0:
2299 if numdeltas > 0:
2300 deltasize[2] /= numdeltas
2300 deltasize[2] /= numdeltas
2301 totalsize = fulltotal + semitotal + deltatotal
2301 totalsize = fulltotal + semitotal + deltatotal
2302 avgchainlen = sum(chainlengths) / numrevs
2302 avgchainlen = sum(chainlengths) / numrevs
2303 maxchainlen = max(chainlengths)
2303 maxchainlen = max(chainlengths)
2304 maxchainspan = max(chainspans)
2304 maxchainspan = max(chainspans)
2305 compratio = 1
2305 compratio = 1
2306 if totalsize:
2306 if totalsize:
2307 compratio = totalrawsize / totalsize
2307 compratio = totalrawsize / totalsize
2308
2308
2309 basedfmtstr = '%%%dd\n'
2309 basedfmtstr = '%%%dd\n'
2310 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2310 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2311
2311
2312 def dfmtstr(max):
2312 def dfmtstr(max):
2313 return basedfmtstr % len(str(max))
2313 return basedfmtstr % len(str(max))
2314 def pcfmtstr(max, padding=0):
2314 def pcfmtstr(max, padding=0):
2315 return basepcfmtstr % (len(str(max)), ' ' * padding)
2315 return basepcfmtstr % (len(str(max)), ' ' * padding)
2316
2316
2317 def pcfmt(value, total):
2317 def pcfmt(value, total):
2318 if total:
2318 if total:
2319 return (value, 100 * float(value) / total)
2319 return (value, 100 * float(value) / total)
2320 else:
2320 else:
2321 return value, 100.0
2321 return value, 100.0
2322
2322
2323 ui.write(('format : %d\n') % format)
2323 ui.write(('format : %d\n') % format)
2324 ui.write(('flags : %s\n') % ', '.join(flags))
2324 ui.write(('flags : %s\n') % ', '.join(flags))
2325
2325
2326 ui.write('\n')
2326 ui.write('\n')
2327 fmt = pcfmtstr(totalsize)
2327 fmt = pcfmtstr(totalsize)
2328 fmt2 = dfmtstr(totalsize)
2328 fmt2 = dfmtstr(totalsize)
2329 ui.write(('revisions : ') + fmt2 % numrevs)
2329 ui.write(('revisions : ') + fmt2 % numrevs)
2330 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2330 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2331 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2331 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2332 ui.write(('revisions : ') + fmt2 % numrevs)
2332 ui.write(('revisions : ') + fmt2 % numrevs)
2333 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2333 ui.write((' empty : ') + fmt % pcfmt(numempty, numrevs))
2334 ui.write((' text : ')
2334 ui.write((' text : ')
2335 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2335 + fmt % pcfmt(numemptytext, numemptytext + numemptydelta))
2336 ui.write((' delta : ')
2336 ui.write((' delta : ')
2337 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2337 + fmt % pcfmt(numemptydelta, numemptytext + numemptydelta))
2338 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2338 ui.write((' snapshot : ') + fmt % pcfmt(numfull + numsemi, numrevs))
2339 for depth in sorted(numsnapdepth):
2339 for depth in sorted(numsnapdepth):
2340 ui.write((' lvl-%-3d : ' % depth)
2340 ui.write((' lvl-%-3d : ' % depth)
2341 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2341 + fmt % pcfmt(numsnapdepth[depth], numrevs))
2342 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2342 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2343 ui.write(('revision size : ') + fmt2 % totalsize)
2343 ui.write(('revision size : ') + fmt2 % totalsize)
2344 ui.write((' snapshot : ')
2344 ui.write((' snapshot : ')
2345 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2345 + fmt % pcfmt(fulltotal + semitotal, totalsize))
2346 for depth in sorted(numsnapdepth):
2346 for depth in sorted(numsnapdepth):
2347 ui.write((' lvl-%-3d : ' % depth)
2347 ui.write((' lvl-%-3d : ' % depth)
2348 + fmt % pcfmt(snaptotal[depth], totalsize))
2348 + fmt % pcfmt(snaptotal[depth], totalsize))
2349 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2349 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2350
2350
2351 def fmtchunktype(chunktype):
2351 def fmtchunktype(chunktype):
2352 if chunktype == 'empty':
2352 if chunktype == 'empty':
2353 return ' %s : ' % chunktype
2353 return ' %s : ' % chunktype
2354 elif chunktype in pycompat.bytestr(string.ascii_letters):
2354 elif chunktype in pycompat.bytestr(string.ascii_letters):
2355 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2355 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2356 else:
2356 else:
2357 return ' 0x%s : ' % hex(chunktype)
2357 return ' 0x%s : ' % hex(chunktype)
2358
2358
2359 ui.write('\n')
2359 ui.write('\n')
2360 ui.write(('chunks : ') + fmt2 % numrevs)
2360 ui.write(('chunks : ') + fmt2 % numrevs)
2361 for chunktype in sorted(chunktypecounts):
2361 for chunktype in sorted(chunktypecounts):
2362 ui.write(fmtchunktype(chunktype))
2362 ui.write(fmtchunktype(chunktype))
2363 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2363 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2364 ui.write(('chunks size : ') + fmt2 % totalsize)
2364 ui.write(('chunks size : ') + fmt2 % totalsize)
2365 for chunktype in sorted(chunktypecounts):
2365 for chunktype in sorted(chunktypecounts):
2366 ui.write(fmtchunktype(chunktype))
2366 ui.write(fmtchunktype(chunktype))
2367 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2367 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2368
2368
2369 ui.write('\n')
2369 ui.write('\n')
2370 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2370 fmt = dfmtstr(max(avgchainlen, maxchainlen, maxchainspan, compratio))
2371 ui.write(('avg chain length : ') + fmt % avgchainlen)
2371 ui.write(('avg chain length : ') + fmt % avgchainlen)
2372 ui.write(('max chain length : ') + fmt % maxchainlen)
2372 ui.write(('max chain length : ') + fmt % maxchainlen)
2373 ui.write(('max chain reach : ') + fmt % maxchainspan)
2373 ui.write(('max chain reach : ') + fmt % maxchainspan)
2374 ui.write(('compression ratio : ') + fmt % compratio)
2374 ui.write(('compression ratio : ') + fmt % compratio)
2375
2375
2376 if format > 0:
2376 if format > 0:
2377 ui.write('\n')
2377 ui.write('\n')
2378 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2378 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2379 % tuple(datasize))
2379 % tuple(datasize))
2380 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2380 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2381 % tuple(fullsize))
2381 % tuple(fullsize))
2382 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2382 ui.write(('inter-snapshot size (min/max/avg) : %d / %d / %d\n')
2383 % tuple(semisize))
2383 % tuple(semisize))
2384 for depth in sorted(snapsizedepth):
2384 for depth in sorted(snapsizedepth):
2385 if depth == 0:
2385 if depth == 0:
2386 continue
2386 continue
2387 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2387 ui.write((' level-%-3d (min/max/avg) : %d / %d / %d\n')
2388 % ((depth,) + tuple(snapsizedepth[depth])))
2388 % ((depth,) + tuple(snapsizedepth[depth])))
2389 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2389 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2390 % tuple(deltasize))
2390 % tuple(deltasize))
2391
2391
2392 if numdeltas > 0:
2392 if numdeltas > 0:
2393 ui.write('\n')
2393 ui.write('\n')
2394 fmt = pcfmtstr(numdeltas)
2394 fmt = pcfmtstr(numdeltas)
2395 fmt2 = pcfmtstr(numdeltas, 4)
2395 fmt2 = pcfmtstr(numdeltas, 4)
2396 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2396 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2397 if numprev > 0:
2397 if numprev > 0:
2398 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2398 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2399 numprev))
2399 numprev))
2400 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2400 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2401 numprev))
2401 numprev))
2402 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2402 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2403 numprev))
2403 numprev))
2404 if gdelta:
2404 if gdelta:
2405 ui.write(('deltas against p1 : ')
2405 ui.write(('deltas against p1 : ')
2406 + fmt % pcfmt(nump1, numdeltas))
2406 + fmt % pcfmt(nump1, numdeltas))
2407 ui.write(('deltas against p2 : ')
2407 ui.write(('deltas against p2 : ')
2408 + fmt % pcfmt(nump2, numdeltas))
2408 + fmt % pcfmt(nump2, numdeltas))
2409 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2409 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2410 numdeltas))
2410 numdeltas))
2411
2411
2412 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2412 @command('debugrevlogindex', cmdutil.debugrevlogopts +
2413 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2413 [('f', 'format', 0, _('revlog format'), _('FORMAT'))],
2414 _('[-f FORMAT] -c|-m|FILE'),
2414 _('[-f FORMAT] -c|-m|FILE'),
2415 optionalrepo=True)
2415 optionalrepo=True)
2416 def debugrevlogindex(ui, repo, file_=None, **opts):
2416 def debugrevlogindex(ui, repo, file_=None, **opts):
2417 """dump the contents of a revlog index"""
2417 """dump the contents of a revlog index"""
2418 opts = pycompat.byteskwargs(opts)
2418 opts = pycompat.byteskwargs(opts)
2419 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2419 r = cmdutil.openrevlog(repo, 'debugrevlogindex', file_, opts)
2420 format = opts.get('format', 0)
2420 format = opts.get('format', 0)
2421 if format not in (0, 1):
2421 if format not in (0, 1):
2422 raise error.Abort(_("unknown format %d") % format)
2422 raise error.Abort(_("unknown format %d") % format)
2423
2423
2424 if ui.debugflag:
2424 if ui.debugflag:
2425 shortfn = hex
2425 shortfn = hex
2426 else:
2426 else:
2427 shortfn = short
2427 shortfn = short
2428
2428
2429 # There might not be anything in r, so have a sane default
2429 # There might not be anything in r, so have a sane default
2430 idlen = 12
2430 idlen = 12
2431 for i in r:
2431 for i in r:
2432 idlen = len(shortfn(r.node(i)))
2432 idlen = len(shortfn(r.node(i)))
2433 break
2433 break
2434
2434
2435 if format == 0:
2435 if format == 0:
2436 if ui.verbose:
2436 if ui.verbose:
2437 ui.write((" rev offset length linkrev"
2437 ui.write((" rev offset length linkrev"
2438 " %s %s p2\n") % ("nodeid".ljust(idlen),
2438 " %s %s p2\n") % ("nodeid".ljust(idlen),
2439 "p1".ljust(idlen)))
2439 "p1".ljust(idlen)))
2440 else:
2440 else:
2441 ui.write((" rev linkrev %s %s p2\n") % (
2441 ui.write((" rev linkrev %s %s p2\n") % (
2442 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2442 "nodeid".ljust(idlen), "p1".ljust(idlen)))
2443 elif format == 1:
2443 elif format == 1:
2444 if ui.verbose:
2444 if ui.verbose:
2445 ui.write((" rev flag offset length size link p1"
2445 ui.write((" rev flag offset length size link p1"
2446 " p2 %s\n") % "nodeid".rjust(idlen))
2446 " p2 %s\n") % "nodeid".rjust(idlen))
2447 else:
2447 else:
2448 ui.write((" rev flag size link p1 p2 %s\n") %
2448 ui.write((" rev flag size link p1 p2 %s\n") %
2449 "nodeid".rjust(idlen))
2449 "nodeid".rjust(idlen))
2450
2450
2451 for i in r:
2451 for i in r:
2452 node = r.node(i)
2452 node = r.node(i)
2453 if format == 0:
2453 if format == 0:
2454 try:
2454 try:
2455 pp = r.parents(node)
2455 pp = r.parents(node)
2456 except Exception:
2456 except Exception:
2457 pp = [nullid, nullid]
2457 pp = [nullid, nullid]
2458 if ui.verbose:
2458 if ui.verbose:
2459 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2459 ui.write("% 6d % 9d % 7d % 7d %s %s %s\n" % (
2460 i, r.start(i), r.length(i), r.linkrev(i),
2460 i, r.start(i), r.length(i), r.linkrev(i),
2461 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2461 shortfn(node), shortfn(pp[0]), shortfn(pp[1])))
2462 else:
2462 else:
2463 ui.write("% 6d % 7d %s %s %s\n" % (
2463 ui.write("% 6d % 7d %s %s %s\n" % (
2464 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2464 i, r.linkrev(i), shortfn(node), shortfn(pp[0]),
2465 shortfn(pp[1])))
2465 shortfn(pp[1])))
2466 elif format == 1:
2466 elif format == 1:
2467 pr = r.parentrevs(i)
2467 pr = r.parentrevs(i)
2468 if ui.verbose:
2468 if ui.verbose:
2469 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2469 ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d %s\n" % (
2470 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2470 i, r.flags(i), r.start(i), r.length(i), r.rawsize(i),
2471 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2471 r.linkrev(i), pr[0], pr[1], shortfn(node)))
2472 else:
2472 else:
2473 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2473 ui.write("% 6d %04x % 8d % 6d % 6d % 6d %s\n" % (
2474 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2474 i, r.flags(i), r.rawsize(i), r.linkrev(i), pr[0], pr[1],
2475 shortfn(node)))
2475 shortfn(node)))
2476
2476
2477 @command('debugrevspec',
2477 @command('debugrevspec',
2478 [('', 'optimize', None,
2478 [('', 'optimize', None,
2479 _('print parsed tree after optimizing (DEPRECATED)')),
2479 _('print parsed tree after optimizing (DEPRECATED)')),
2480 ('', 'show-revs', True, _('print list of result revisions (default)')),
2480 ('', 'show-revs', True, _('print list of result revisions (default)')),
2481 ('s', 'show-set', None, _('print internal representation of result set')),
2481 ('s', 'show-set', None, _('print internal representation of result set')),
2482 ('p', 'show-stage', [],
2482 ('p', 'show-stage', [],
2483 _('print parsed tree at the given stage'), _('NAME')),
2483 _('print parsed tree at the given stage'), _('NAME')),
2484 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2484 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2485 ('', 'verify-optimized', False, _('verify optimized result')),
2485 ('', 'verify-optimized', False, _('verify optimized result')),
2486 ],
2486 ],
2487 ('REVSPEC'))
2487 ('REVSPEC'))
2488 def debugrevspec(ui, repo, expr, **opts):
2488 def debugrevspec(ui, repo, expr, **opts):
2489 """parse and apply a revision specification
2489 """parse and apply a revision specification
2490
2490
2491 Use -p/--show-stage option to print the parsed tree at the given stages.
2491 Use -p/--show-stage option to print the parsed tree at the given stages.
2492 Use -p all to print tree at every stage.
2492 Use -p all to print tree at every stage.
2493
2493
2494 Use --no-show-revs option with -s or -p to print only the set
2494 Use --no-show-revs option with -s or -p to print only the set
2495 representation or the parsed tree respectively.
2495 representation or the parsed tree respectively.
2496
2496
2497 Use --verify-optimized to compare the optimized result with the unoptimized
2497 Use --verify-optimized to compare the optimized result with the unoptimized
2498 one. Returns 1 if the optimized result differs.
2498 one. Returns 1 if the optimized result differs.
2499 """
2499 """
2500 opts = pycompat.byteskwargs(opts)
2500 opts = pycompat.byteskwargs(opts)
2501 aliases = ui.configitems('revsetalias')
2501 aliases = ui.configitems('revsetalias')
2502 stages = [
2502 stages = [
2503 ('parsed', lambda tree: tree),
2503 ('parsed', lambda tree: tree),
2504 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2504 ('expanded', lambda tree: revsetlang.expandaliases(tree, aliases,
2505 ui.warn)),
2505 ui.warn)),
2506 ('concatenated', revsetlang.foldconcat),
2506 ('concatenated', revsetlang.foldconcat),
2507 ('analyzed', revsetlang.analyze),
2507 ('analyzed', revsetlang.analyze),
2508 ('optimized', revsetlang.optimize),
2508 ('optimized', revsetlang.optimize),
2509 ]
2509 ]
2510 if opts['no_optimized']:
2510 if opts['no_optimized']:
2511 stages = stages[:-1]
2511 stages = stages[:-1]
2512 if opts['verify_optimized'] and opts['no_optimized']:
2512 if opts['verify_optimized'] and opts['no_optimized']:
2513 raise error.Abort(_('cannot use --verify-optimized with '
2513 raise error.Abort(_('cannot use --verify-optimized with '
2514 '--no-optimized'))
2514 '--no-optimized'))
2515 stagenames = set(n for n, f in stages)
2515 stagenames = set(n for n, f in stages)
2516
2516
2517 showalways = set()
2517 showalways = set()
2518 showchanged = set()
2518 showchanged = set()
2519 if ui.verbose and not opts['show_stage']:
2519 if ui.verbose and not opts['show_stage']:
2520 # show parsed tree by --verbose (deprecated)
2520 # show parsed tree by --verbose (deprecated)
2521 showalways.add('parsed')
2521 showalways.add('parsed')
2522 showchanged.update(['expanded', 'concatenated'])
2522 showchanged.update(['expanded', 'concatenated'])
2523 if opts['optimize']:
2523 if opts['optimize']:
2524 showalways.add('optimized')
2524 showalways.add('optimized')
2525 if opts['show_stage'] and opts['optimize']:
2525 if opts['show_stage'] and opts['optimize']:
2526 raise error.Abort(_('cannot use --optimize with --show-stage'))
2526 raise error.Abort(_('cannot use --optimize with --show-stage'))
2527 if opts['show_stage'] == ['all']:
2527 if opts['show_stage'] == ['all']:
2528 showalways.update(stagenames)
2528 showalways.update(stagenames)
2529 else:
2529 else:
2530 for n in opts['show_stage']:
2530 for n in opts['show_stage']:
2531 if n not in stagenames:
2531 if n not in stagenames:
2532 raise error.Abort(_('invalid stage name: %s') % n)
2532 raise error.Abort(_('invalid stage name: %s') % n)
2533 showalways.update(opts['show_stage'])
2533 showalways.update(opts['show_stage'])
2534
2534
2535 treebystage = {}
2535 treebystage = {}
2536 printedtree = None
2536 printedtree = None
2537 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2537 tree = revsetlang.parse(expr, lookup=revset.lookupfn(repo))
2538 for n, f in stages:
2538 for n, f in stages:
2539 treebystage[n] = tree = f(tree)
2539 treebystage[n] = tree = f(tree)
2540 if n in showalways or (n in showchanged and tree != printedtree):
2540 if n in showalways or (n in showchanged and tree != printedtree):
2541 if opts['show_stage'] or n != 'parsed':
2541 if opts['show_stage'] or n != 'parsed':
2542 ui.write(("* %s:\n") % n)
2542 ui.write(("* %s:\n") % n)
2543 ui.write(revsetlang.prettyformat(tree), "\n")
2543 ui.write(revsetlang.prettyformat(tree), "\n")
2544 printedtree = tree
2544 printedtree = tree
2545
2545
2546 if opts['verify_optimized']:
2546 if opts['verify_optimized']:
2547 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2547 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2548 brevs = revset.makematcher(treebystage['optimized'])(repo)
2548 brevs = revset.makematcher(treebystage['optimized'])(repo)
2549 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2549 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2550 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2550 ui.write(("* analyzed set:\n"), stringutil.prettyrepr(arevs), "\n")
2551 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2551 ui.write(("* optimized set:\n"), stringutil.prettyrepr(brevs), "\n")
2552 arevs = list(arevs)
2552 arevs = list(arevs)
2553 brevs = list(brevs)
2553 brevs = list(brevs)
2554 if arevs == brevs:
2554 if arevs == brevs:
2555 return 0
2555 return 0
2556 ui.write(('--- analyzed\n'), label='diff.file_a')
2556 ui.write(('--- analyzed\n'), label='diff.file_a')
2557 ui.write(('+++ optimized\n'), label='diff.file_b')
2557 ui.write(('+++ optimized\n'), label='diff.file_b')
2558 sm = difflib.SequenceMatcher(None, arevs, brevs)
2558 sm = difflib.SequenceMatcher(None, arevs, brevs)
2559 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2559 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2560 if tag in (r'delete', r'replace'):
2560 if tag in (r'delete', r'replace'):
2561 for c in arevs[alo:ahi]:
2561 for c in arevs[alo:ahi]:
2562 ui.write('-%d\n' % c, label='diff.deleted')
2562 ui.write('-%d\n' % c, label='diff.deleted')
2563 if tag in (r'insert', r'replace'):
2563 if tag in (r'insert', r'replace'):
2564 for c in brevs[blo:bhi]:
2564 for c in brevs[blo:bhi]:
2565 ui.write('+%d\n' % c, label='diff.inserted')
2565 ui.write('+%d\n' % c, label='diff.inserted')
2566 if tag == r'equal':
2566 if tag == r'equal':
2567 for c in arevs[alo:ahi]:
2567 for c in arevs[alo:ahi]:
2568 ui.write(' %d\n' % c)
2568 ui.write(' %d\n' % c)
2569 return 1
2569 return 1
2570
2570
2571 func = revset.makematcher(tree)
2571 func = revset.makematcher(tree)
2572 revs = func(repo)
2572 revs = func(repo)
2573 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2573 if opts['show_set'] or (opts['show_set'] is None and ui.verbose):
2574 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2574 ui.write(("* set:\n"), stringutil.prettyrepr(revs), "\n")
2575 if not opts['show_revs']:
2575 if not opts['show_revs']:
2576 return
2576 return
2577 for c in revs:
2577 for c in revs:
2578 ui.write("%d\n" % c)
2578 ui.write("%d\n" % c)
2579
2579
2580 @command('debugserve', [
2580 @command('debugserve', [
2581 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2581 ('', 'sshstdio', False, _('run an SSH server bound to process handles')),
2582 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2582 ('', 'logiofd', '', _('file descriptor to log server I/O to')),
2583 ('', 'logiofile', '', _('file to log server I/O to')),
2583 ('', 'logiofile', '', _('file to log server I/O to')),
2584 ], '')
2584 ], '')
2585 def debugserve(ui, repo, **opts):
2585 def debugserve(ui, repo, **opts):
2586 """run a server with advanced settings
2586 """run a server with advanced settings
2587
2587
2588 This command is similar to :hg:`serve`. It exists partially as a
2588 This command is similar to :hg:`serve`. It exists partially as a
2589 workaround to the fact that ``hg serve --stdio`` must have specific
2589 workaround to the fact that ``hg serve --stdio`` must have specific
2590 arguments for security reasons.
2590 arguments for security reasons.
2591 """
2591 """
2592 opts = pycompat.byteskwargs(opts)
2592 opts = pycompat.byteskwargs(opts)
2593
2593
2594 if not opts['sshstdio']:
2594 if not opts['sshstdio']:
2595 raise error.Abort(_('only --sshstdio is currently supported'))
2595 raise error.Abort(_('only --sshstdio is currently supported'))
2596
2596
2597 logfh = None
2597 logfh = None
2598
2598
2599 if opts['logiofd'] and opts['logiofile']:
2599 if opts['logiofd'] and opts['logiofile']:
2600 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2600 raise error.Abort(_('cannot use both --logiofd and --logiofile'))
2601
2601
2602 if opts['logiofd']:
2602 if opts['logiofd']:
2603 # Line buffered because output is line based.
2603 # Line buffered because output is line based.
2604 try:
2604 try:
2605 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2605 logfh = os.fdopen(int(opts['logiofd']), r'ab', 1)
2606 except OSError as e:
2606 except OSError as e:
2607 if e.errno != errno.ESPIPE:
2607 if e.errno != errno.ESPIPE:
2608 raise
2608 raise
2609 # can't seek a pipe, so `ab` mode fails on py3
2609 # can't seek a pipe, so `ab` mode fails on py3
2610 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2610 logfh = os.fdopen(int(opts['logiofd']), r'wb', 1)
2611 elif opts['logiofile']:
2611 elif opts['logiofile']:
2612 logfh = open(opts['logiofile'], 'ab', 1)
2612 logfh = open(opts['logiofile'], 'ab', 1)
2613
2613
2614 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2614 s = wireprotoserver.sshserver(ui, repo, logfh=logfh)
2615 s.serve_forever()
2615 s.serve_forever()
2616
2616
2617 @command('debugsetparents', [], _('REV1 [REV2]'))
2617 @command('debugsetparents', [], _('REV1 [REV2]'))
2618 def debugsetparents(ui, repo, rev1, rev2=None):
2618 def debugsetparents(ui, repo, rev1, rev2=None):
2619 """manually set the parents of the current working directory
2619 """manually set the parents of the current working directory
2620
2620
2621 This is useful for writing repository conversion tools, but should
2621 This is useful for writing repository conversion tools, but should
2622 be used with care. For example, neither the working directory nor the
2622 be used with care. For example, neither the working directory nor the
2623 dirstate is updated, so file status may be incorrect after running this
2623 dirstate is updated, so file status may be incorrect after running this
2624 command.
2624 command.
2625
2625
2626 Returns 0 on success.
2626 Returns 0 on success.
2627 """
2627 """
2628
2628
2629 node1 = scmutil.revsingle(repo, rev1).node()
2629 node1 = scmutil.revsingle(repo, rev1).node()
2630 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2630 node2 = scmutil.revsingle(repo, rev2, 'null').node()
2631
2631
2632 with repo.wlock():
2632 with repo.wlock():
2633 repo.setparents(node1, node2)
2633 repo.setparents(node1, node2)
2634
2634
2635 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2635 @command('debugssl', [], '[SOURCE]', optionalrepo=True)
2636 def debugssl(ui, repo, source=None, **opts):
2636 def debugssl(ui, repo, source=None, **opts):
2637 '''test a secure connection to a server
2637 '''test a secure connection to a server
2638
2638
2639 This builds the certificate chain for the server on Windows, installing the
2639 This builds the certificate chain for the server on Windows, installing the
2640 missing intermediates and trusted root via Windows Update if necessary. It
2640 missing intermediates and trusted root via Windows Update if necessary. It
2641 does nothing on other platforms.
2641 does nothing on other platforms.
2642
2642
2643 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2643 If SOURCE is omitted, the 'default' path will be used. If a URL is given,
2644 that server is used. See :hg:`help urls` for more information.
2644 that server is used. See :hg:`help urls` for more information.
2645
2645
2646 If the update succeeds, retry the original operation. Otherwise, the cause
2646 If the update succeeds, retry the original operation. Otherwise, the cause
2647 of the SSL error is likely another issue.
2647 of the SSL error is likely another issue.
2648 '''
2648 '''
2649 if not pycompat.iswindows:
2649 if not pycompat.iswindows:
2650 raise error.Abort(_('certificate chain building is only possible on '
2650 raise error.Abort(_('certificate chain building is only possible on '
2651 'Windows'))
2651 'Windows'))
2652
2652
2653 if not source:
2653 if not source:
2654 if not repo:
2654 if not repo:
2655 raise error.Abort(_("there is no Mercurial repository here, and no "
2655 raise error.Abort(_("there is no Mercurial repository here, and no "
2656 "server specified"))
2656 "server specified"))
2657 source = "default"
2657 source = "default"
2658
2658
2659 source, branches = hg.parseurl(ui.expandpath(source))
2659 source, branches = hg.parseurl(ui.expandpath(source))
2660 url = util.url(source)
2660 url = util.url(source)
2661
2661
2662 defaultport = {'https': 443, 'ssh': 22}
2662 defaultport = {'https': 443, 'ssh': 22}
2663 if url.scheme in defaultport:
2663 if url.scheme in defaultport:
2664 try:
2664 try:
2665 addr = (url.host, int(url.port or defaultport[url.scheme]))
2665 addr = (url.host, int(url.port or defaultport[url.scheme]))
2666 except ValueError:
2666 except ValueError:
2667 raise error.Abort(_("malformed port number in URL"))
2667 raise error.Abort(_("malformed port number in URL"))
2668 else:
2668 else:
2669 raise error.Abort(_("only https and ssh connections are supported"))
2669 raise error.Abort(_("only https and ssh connections are supported"))
2670
2670
2671 from . import win32
2671 from . import win32
2672
2672
2673 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2673 s = ssl.wrap_socket(socket.socket(), ssl_version=ssl.PROTOCOL_TLS,
2674 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2674 cert_reqs=ssl.CERT_NONE, ca_certs=None)
2675
2675
2676 try:
2676 try:
2677 s.connect(addr)
2677 s.connect(addr)
2678 cert = s.getpeercert(True)
2678 cert = s.getpeercert(True)
2679
2679
2680 ui.status(_('checking the certificate chain for %s\n') % url.host)
2680 ui.status(_('checking the certificate chain for %s\n') % url.host)
2681
2681
2682 complete = win32.checkcertificatechain(cert, build=False)
2682 complete = win32.checkcertificatechain(cert, build=False)
2683
2683
2684 if not complete:
2684 if not complete:
2685 ui.status(_('certificate chain is incomplete, updating... '))
2685 ui.status(_('certificate chain is incomplete, updating... '))
2686
2686
2687 if not win32.checkcertificatechain(cert):
2687 if not win32.checkcertificatechain(cert):
2688 ui.status(_('failed.\n'))
2688 ui.status(_('failed.\n'))
2689 else:
2689 else:
2690 ui.status(_('done.\n'))
2690 ui.status(_('done.\n'))
2691 else:
2691 else:
2692 ui.status(_('full certificate chain is available\n'))
2692 ui.status(_('full certificate chain is available\n'))
2693 finally:
2693 finally:
2694 s.close()
2694 s.close()
2695
2695
2696 @command('debugsub',
2696 @command('debugsub',
2697 [('r', 'rev', '',
2697 [('r', 'rev', '',
2698 _('revision to check'), _('REV'))],
2698 _('revision to check'), _('REV'))],
2699 _('[-r REV] [REV]'))
2699 _('[-r REV] [REV]'))
2700 def debugsub(ui, repo, rev=None):
2700 def debugsub(ui, repo, rev=None):
2701 ctx = scmutil.revsingle(repo, rev, None)
2701 ctx = scmutil.revsingle(repo, rev, None)
2702 for k, v in sorted(ctx.substate.items()):
2702 for k, v in sorted(ctx.substate.items()):
2703 ui.write(('path %s\n') % k)
2703 ui.write(('path %s\n') % k)
2704 ui.write((' source %s\n') % v[0])
2704 ui.write((' source %s\n') % v[0])
2705 ui.write((' revision %s\n') % v[1])
2705 ui.write((' revision %s\n') % v[1])
2706
2706
2707 @command('debugsuccessorssets',
2707 @command('debugsuccessorssets',
2708 [('', 'closest', False, _('return closest successors sets only'))],
2708 [('', 'closest', False, _('return closest successors sets only'))],
2709 _('[REV]'))
2709 _('[REV]'))
2710 def debugsuccessorssets(ui, repo, *revs, **opts):
2710 def debugsuccessorssets(ui, repo, *revs, **opts):
2711 """show set of successors for revision
2711 """show set of successors for revision
2712
2712
2713 A successors set of changeset A is a consistent group of revisions that
2713 A successors set of changeset A is a consistent group of revisions that
2714 succeed A. It contains non-obsolete changesets only unless closests
2714 succeed A. It contains non-obsolete changesets only unless closests
2715 successors set is set.
2715 successors set is set.
2716
2716
2717 In most cases a changeset A has a single successors set containing a single
2717 In most cases a changeset A has a single successors set containing a single
2718 successor (changeset A replaced by A').
2718 successor (changeset A replaced by A').
2719
2719
2720 A changeset that is made obsolete with no successors are called "pruned".
2720 A changeset that is made obsolete with no successors are called "pruned".
2721 Such changesets have no successors sets at all.
2721 Such changesets have no successors sets at all.
2722
2722
2723 A changeset that has been "split" will have a successors set containing
2723 A changeset that has been "split" will have a successors set containing
2724 more than one successor.
2724 more than one successor.
2725
2725
2726 A changeset that has been rewritten in multiple different ways is called
2726 A changeset that has been rewritten in multiple different ways is called
2727 "divergent". Such changesets have multiple successor sets (each of which
2727 "divergent". Such changesets have multiple successor sets (each of which
2728 may also be split, i.e. have multiple successors).
2728 may also be split, i.e. have multiple successors).
2729
2729
2730 Results are displayed as follows::
2730 Results are displayed as follows::
2731
2731
2732 <rev1>
2732 <rev1>
2733 <successors-1A>
2733 <successors-1A>
2734 <rev2>
2734 <rev2>
2735 <successors-2A>
2735 <successors-2A>
2736 <successors-2B1> <successors-2B2> <successors-2B3>
2736 <successors-2B1> <successors-2B2> <successors-2B3>
2737
2737
2738 Here rev2 has two possible (i.e. divergent) successors sets. The first
2738 Here rev2 has two possible (i.e. divergent) successors sets. The first
2739 holds one element, whereas the second holds three (i.e. the changeset has
2739 holds one element, whereas the second holds three (i.e. the changeset has
2740 been split).
2740 been split).
2741 """
2741 """
2742 # passed to successorssets caching computation from one call to another
2742 # passed to successorssets caching computation from one call to another
2743 cache = {}
2743 cache = {}
2744 ctx2str = bytes
2744 ctx2str = bytes
2745 node2str = short
2745 node2str = short
2746 for rev in scmutil.revrange(repo, revs):
2746 for rev in scmutil.revrange(repo, revs):
2747 ctx = repo[rev]
2747 ctx = repo[rev]
2748 ui.write('%s\n'% ctx2str(ctx))
2748 ui.write('%s\n'% ctx2str(ctx))
2749 for succsset in obsutil.successorssets(repo, ctx.node(),
2749 for succsset in obsutil.successorssets(repo, ctx.node(),
2750 closest=opts[r'closest'],
2750 closest=opts[r'closest'],
2751 cache=cache):
2751 cache=cache):
2752 if succsset:
2752 if succsset:
2753 ui.write(' ')
2753 ui.write(' ')
2754 ui.write(node2str(succsset[0]))
2754 ui.write(node2str(succsset[0]))
2755 for node in succsset[1:]:
2755 for node in succsset[1:]:
2756 ui.write(' ')
2756 ui.write(' ')
2757 ui.write(node2str(node))
2757 ui.write(node2str(node))
2758 ui.write('\n')
2758 ui.write('\n')
2759
2759
2760 @command('debugtemplate',
2760 @command('debugtemplate',
2761 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2761 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2762 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2762 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2763 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2763 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2764 optionalrepo=True)
2764 optionalrepo=True)
2765 def debugtemplate(ui, repo, tmpl, **opts):
2765 def debugtemplate(ui, repo, tmpl, **opts):
2766 """parse and apply a template
2766 """parse and apply a template
2767
2767
2768 If -r/--rev is given, the template is processed as a log template and
2768 If -r/--rev is given, the template is processed as a log template and
2769 applied to the given changesets. Otherwise, it is processed as a generic
2769 applied to the given changesets. Otherwise, it is processed as a generic
2770 template.
2770 template.
2771
2771
2772 Use --verbose to print the parsed tree.
2772 Use --verbose to print the parsed tree.
2773 """
2773 """
2774 revs = None
2774 revs = None
2775 if opts[r'rev']:
2775 if opts[r'rev']:
2776 if repo is None:
2776 if repo is None:
2777 raise error.RepoError(_('there is no Mercurial repository here '
2777 raise error.RepoError(_('there is no Mercurial repository here '
2778 '(.hg not found)'))
2778 '(.hg not found)'))
2779 revs = scmutil.revrange(repo, opts[r'rev'])
2779 revs = scmutil.revrange(repo, opts[r'rev'])
2780
2780
2781 props = {}
2781 props = {}
2782 for d in opts[r'define']:
2782 for d in opts[r'define']:
2783 try:
2783 try:
2784 k, v = (e.strip() for e in d.split('=', 1))
2784 k, v = (e.strip() for e in d.split('=', 1))
2785 if not k or k == 'ui':
2785 if not k or k == 'ui':
2786 raise ValueError
2786 raise ValueError
2787 props[k] = v
2787 props[k] = v
2788 except ValueError:
2788 except ValueError:
2789 raise error.Abort(_('malformed keyword definition: %s') % d)
2789 raise error.Abort(_('malformed keyword definition: %s') % d)
2790
2790
2791 if ui.verbose:
2791 if ui.verbose:
2792 aliases = ui.configitems('templatealias')
2792 aliases = ui.configitems('templatealias')
2793 tree = templater.parse(tmpl)
2793 tree = templater.parse(tmpl)
2794 ui.note(templater.prettyformat(tree), '\n')
2794 ui.note(templater.prettyformat(tree), '\n')
2795 newtree = templater.expandaliases(tree, aliases)
2795 newtree = templater.expandaliases(tree, aliases)
2796 if newtree != tree:
2796 if newtree != tree:
2797 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2797 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2798
2798
2799 if revs is None:
2799 if revs is None:
2800 tres = formatter.templateresources(ui, repo)
2800 tres = formatter.templateresources(ui, repo)
2801 t = formatter.maketemplater(ui, tmpl, resources=tres)
2801 t = formatter.maketemplater(ui, tmpl, resources=tres)
2802 if ui.verbose:
2802 if ui.verbose:
2803 kwds, funcs = t.symbolsuseddefault()
2803 kwds, funcs = t.symbolsuseddefault()
2804 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2804 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2805 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2805 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2806 ui.write(t.renderdefault(props))
2806 ui.write(t.renderdefault(props))
2807 else:
2807 else:
2808 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2808 displayer = logcmdutil.maketemplater(ui, repo, tmpl)
2809 if ui.verbose:
2809 if ui.verbose:
2810 kwds, funcs = displayer.t.symbolsuseddefault()
2810 kwds, funcs = displayer.t.symbolsuseddefault()
2811 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2811 ui.write(("* keywords: %s\n") % ', '.join(sorted(kwds)))
2812 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2812 ui.write(("* functions: %s\n") % ', '.join(sorted(funcs)))
2813 for r in revs:
2813 for r in revs:
2814 displayer.show(repo[r], **pycompat.strkwargs(props))
2814 displayer.show(repo[r], **pycompat.strkwargs(props))
2815 displayer.close()
2815 displayer.close()
2816
2816
2817 @command('debuguigetpass', [
2817 @command('debuguigetpass', [
2818 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2818 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2819 ], _('[-p TEXT]'), norepo=True)
2819 ], _('[-p TEXT]'), norepo=True)
2820 def debuguigetpass(ui, prompt=''):
2820 def debuguigetpass(ui, prompt=''):
2821 """show prompt to type password"""
2821 """show prompt to type password"""
2822 r = ui.getpass(prompt)
2822 r = ui.getpass(prompt)
2823 ui.write(('respose: %s\n') % r)
2823 ui.write(('respose: %s\n') % r)
2824
2824
2825 @command('debuguiprompt', [
2825 @command('debuguiprompt', [
2826 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2826 ('p', 'prompt', '', _('prompt text'), _('TEXT')),
2827 ], _('[-p TEXT]'), norepo=True)
2827 ], _('[-p TEXT]'), norepo=True)
2828 def debuguiprompt(ui, prompt=''):
2828 def debuguiprompt(ui, prompt=''):
2829 """show plain prompt"""
2829 """show plain prompt"""
2830 r = ui.prompt(prompt)
2830 r = ui.prompt(prompt)
2831 ui.write(('response: %s\n') % r)
2831 ui.write(('response: %s\n') % r)
2832
2832
2833 @command('debugupdatecaches', [])
2833 @command('debugupdatecaches', [])
2834 def debugupdatecaches(ui, repo, *pats, **opts):
2834 def debugupdatecaches(ui, repo, *pats, **opts):
2835 """warm all known caches in the repository"""
2835 """warm all known caches in the repository"""
2836 with repo.wlock(), repo.lock():
2836 with repo.wlock(), repo.lock():
2837 repo.updatecaches(full=True)
2837 repo.updatecaches(full=True)
2838
2838
2839 @command('debugupgraderepo', [
2839 @command('debugupgraderepo', [
2840 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2840 ('o', 'optimize', [], _('extra optimization to perform'), _('NAME')),
2841 ('', 'run', False, _('performs an upgrade')),
2841 ('', 'run', False, _('performs an upgrade')),
2842 ('', 'backup', True, _('keep the old repository content around')),
2842 ('', 'backup', True, _('keep the old repository content around')),
2843 ])
2843 ])
2844 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True):
2844 def debugupgraderepo(ui, repo, run=False, optimize=None, backup=True):
2845 """upgrade a repository to use different features
2845 """upgrade a repository to use different features
2846
2846
2847 If no arguments are specified, the repository is evaluated for upgrade
2847 If no arguments are specified, the repository is evaluated for upgrade
2848 and a list of problems and potential optimizations is printed.
2848 and a list of problems and potential optimizations is printed.
2849
2849
2850 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2850 With ``--run``, a repository upgrade is performed. Behavior of the upgrade
2851 can be influenced via additional arguments. More details will be provided
2851 can be influenced via additional arguments. More details will be provided
2852 by the command output when run without ``--run``.
2852 by the command output when run without ``--run``.
2853
2853
2854 During the upgrade, the repository will be locked and no writes will be
2854 During the upgrade, the repository will be locked and no writes will be
2855 allowed.
2855 allowed.
2856
2856
2857 At the end of the upgrade, the repository may not be readable while new
2857 At the end of the upgrade, the repository may not be readable while new
2858 repository data is swapped in. This window will be as long as it takes to
2858 repository data is swapped in. This window will be as long as it takes to
2859 rename some directories inside the ``.hg`` directory. On most machines, this
2859 rename some directories inside the ``.hg`` directory. On most machines, this
2860 should complete almost instantaneously and the chances of a consumer being
2860 should complete almost instantaneously and the chances of a consumer being
2861 unable to access the repository should be low.
2861 unable to access the repository should be low.
2862 """
2862 """
2863 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
2863 return upgrade.upgraderepo(ui, repo, run=run, optimize=optimize,
2864 backup=backup)
2864 backup=backup)
2865
2865
2866 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2866 @command('debugwalk', cmdutil.walkopts, _('[OPTION]... [FILE]...'),
2867 inferrepo=True)
2867 inferrepo=True)
2868 def debugwalk(ui, repo, *pats, **opts):
2868 def debugwalk(ui, repo, *pats, **opts):
2869 """show how files match on given patterns"""
2869 """show how files match on given patterns"""
2870 opts = pycompat.byteskwargs(opts)
2870 opts = pycompat.byteskwargs(opts)
2871 m = scmutil.match(repo[None], pats, opts)
2871 m = scmutil.match(repo[None], pats, opts)
2872 if ui.verbose:
2872 if ui.verbose:
2873 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2873 ui.write(('* matcher:\n'), stringutil.prettyrepr(m), '\n')
2874 items = list(repo[None].walk(m))
2874 items = list(repo[None].walk(m))
2875 if not items:
2875 if not items:
2876 return
2876 return
2877 f = lambda fn: fn
2877 f = lambda fn: fn
2878 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2878 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2879 f = lambda fn: util.normpath(fn)
2879 f = lambda fn: util.normpath(fn)
2880 fmt = 'f %%-%ds %%-%ds %%s' % (
2880 fmt = 'f %%-%ds %%-%ds %%s' % (
2881 max([len(abs) for abs in items]),
2881 max([len(abs) for abs in items]),
2882 max([len(repo.pathto(abs)) for abs in items]))
2882 max([len(repo.pathto(abs)) for abs in items]))
2883 for abs in items:
2883 for abs in items:
2884 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
2884 line = fmt % (abs, f(repo.pathto(abs)), m.exact(abs) and 'exact' or '')
2885 ui.write("%s\n" % line.rstrip())
2885 ui.write("%s\n" % line.rstrip())
2886
2886
2887 @command('debugwhyunstable', [], _('REV'))
2887 @command('debugwhyunstable', [], _('REV'))
2888 def debugwhyunstable(ui, repo, rev):
2888 def debugwhyunstable(ui, repo, rev):
2889 """explain instabilities of a changeset"""
2889 """explain instabilities of a changeset"""
2890 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2890 for entry in obsutil.whyunstable(repo, scmutil.revsingle(repo, rev)):
2891 dnodes = ''
2891 dnodes = ''
2892 if entry.get('divergentnodes'):
2892 if entry.get('divergentnodes'):
2893 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2893 dnodes = ' '.join('%s (%s)' % (ctx.hex(), ctx.phasestr())
2894 for ctx in entry['divergentnodes']) + ' '
2894 for ctx in entry['divergentnodes']) + ' '
2895 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2895 ui.write('%s: %s%s %s\n' % (entry['instability'], dnodes,
2896 entry['reason'], entry['node']))
2896 entry['reason'], entry['node']))
2897
2897
2898 @command('debugwireargs',
2898 @command('debugwireargs',
2899 [('', 'three', '', 'three'),
2899 [('', 'three', '', 'three'),
2900 ('', 'four', '', 'four'),
2900 ('', 'four', '', 'four'),
2901 ('', 'five', '', 'five'),
2901 ('', 'five', '', 'five'),
2902 ] + cmdutil.remoteopts,
2902 ] + cmdutil.remoteopts,
2903 _('REPO [OPTIONS]... [ONE [TWO]]'),
2903 _('REPO [OPTIONS]... [ONE [TWO]]'),
2904 norepo=True)
2904 norepo=True)
2905 def debugwireargs(ui, repopath, *vals, **opts):
2905 def debugwireargs(ui, repopath, *vals, **opts):
2906 opts = pycompat.byteskwargs(opts)
2906 opts = pycompat.byteskwargs(opts)
2907 repo = hg.peer(ui, opts, repopath)
2907 repo = hg.peer(ui, opts, repopath)
2908 for opt in cmdutil.remoteopts:
2908 for opt in cmdutil.remoteopts:
2909 del opts[opt[1]]
2909 del opts[opt[1]]
2910 args = {}
2910 args = {}
2911 for k, v in opts.iteritems():
2911 for k, v in opts.iteritems():
2912 if v:
2912 if v:
2913 args[k] = v
2913 args[k] = v
2914 args = pycompat.strkwargs(args)
2914 args = pycompat.strkwargs(args)
2915 # run twice to check that we don't mess up the stream for the next command
2915 # run twice to check that we don't mess up the stream for the next command
2916 res1 = repo.debugwireargs(*vals, **args)
2916 res1 = repo.debugwireargs(*vals, **args)
2917 res2 = repo.debugwireargs(*vals, **args)
2917 res2 = repo.debugwireargs(*vals, **args)
2918 ui.write("%s\n" % res1)
2918 ui.write("%s\n" % res1)
2919 if res1 != res2:
2919 if res1 != res2:
2920 ui.warn("%s\n" % res2)
2920 ui.warn("%s\n" % res2)
2921
2921
2922 def _parsewirelangblocks(fh):
2922 def _parsewirelangblocks(fh):
2923 activeaction = None
2923 activeaction = None
2924 blocklines = []
2924 blocklines = []
2925 lastindent = 0
2925 lastindent = 0
2926
2926
2927 for line in fh:
2927 for line in fh:
2928 line = line.rstrip()
2928 line = line.rstrip()
2929 if not line:
2929 if not line:
2930 continue
2930 continue
2931
2931
2932 if line.startswith(b'#'):
2932 if line.startswith(b'#'):
2933 continue
2933 continue
2934
2934
2935 if not line.startswith(b' '):
2935 if not line.startswith(b' '):
2936 # New block. Flush previous one.
2936 # New block. Flush previous one.
2937 if activeaction:
2937 if activeaction:
2938 yield activeaction, blocklines
2938 yield activeaction, blocklines
2939
2939
2940 activeaction = line
2940 activeaction = line
2941 blocklines = []
2941 blocklines = []
2942 lastindent = 0
2942 lastindent = 0
2943 continue
2943 continue
2944
2944
2945 # Else we start with an indent.
2945 # Else we start with an indent.
2946
2946
2947 if not activeaction:
2947 if not activeaction:
2948 raise error.Abort(_('indented line outside of block'))
2948 raise error.Abort(_('indented line outside of block'))
2949
2949
2950 indent = len(line) - len(line.lstrip())
2950 indent = len(line) - len(line.lstrip())
2951
2951
2952 # If this line is indented more than the last line, concatenate it.
2952 # If this line is indented more than the last line, concatenate it.
2953 if indent > lastindent and blocklines:
2953 if indent > lastindent and blocklines:
2954 blocklines[-1] += line.lstrip()
2954 blocklines[-1] += line.lstrip()
2955 else:
2955 else:
2956 blocklines.append(line)
2956 blocklines.append(line)
2957 lastindent = indent
2957 lastindent = indent
2958
2958
2959 # Flush last block.
2959 # Flush last block.
2960 if activeaction:
2960 if activeaction:
2961 yield activeaction, blocklines
2961 yield activeaction, blocklines
2962
2962
2963 @command('debugwireproto',
2963 @command('debugwireproto',
2964 [
2964 [
2965 ('', 'localssh', False, _('start an SSH server for this repo')),
2965 ('', 'localssh', False, _('start an SSH server for this repo')),
2966 ('', 'peer', '', _('construct a specific version of the peer')),
2966 ('', 'peer', '', _('construct a specific version of the peer')),
2967 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2967 ('', 'noreadstderr', False, _('do not read from stderr of the remote')),
2968 ('', 'nologhandshake', False,
2968 ('', 'nologhandshake', False,
2969 _('do not log I/O related to the peer handshake')),
2969 _('do not log I/O related to the peer handshake')),
2970 ] + cmdutil.remoteopts,
2970 ] + cmdutil.remoteopts,
2971 _('[PATH]'),
2971 _('[PATH]'),
2972 optionalrepo=True)
2972 optionalrepo=True)
2973 def debugwireproto(ui, repo, path=None, **opts):
2973 def debugwireproto(ui, repo, path=None, **opts):
2974 """send wire protocol commands to a server
2974 """send wire protocol commands to a server
2975
2975
2976 This command can be used to issue wire protocol commands to remote
2976 This command can be used to issue wire protocol commands to remote
2977 peers and to debug the raw data being exchanged.
2977 peers and to debug the raw data being exchanged.
2978
2978
2979 ``--localssh`` will start an SSH server against the current repository
2979 ``--localssh`` will start an SSH server against the current repository
2980 and connect to that. By default, the connection will perform a handshake
2980 and connect to that. By default, the connection will perform a handshake
2981 and establish an appropriate peer instance.
2981 and establish an appropriate peer instance.
2982
2982
2983 ``--peer`` can be used to bypass the handshake protocol and construct a
2983 ``--peer`` can be used to bypass the handshake protocol and construct a
2984 peer instance using the specified class type. Valid values are ``raw``,
2984 peer instance using the specified class type. Valid values are ``raw``,
2985 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2985 ``http2``, ``ssh1``, and ``ssh2``. ``raw`` instances only allow sending
2986 raw data payloads and don't support higher-level command actions.
2986 raw data payloads and don't support higher-level command actions.
2987
2987
2988 ``--noreadstderr`` can be used to disable automatic reading from stderr
2988 ``--noreadstderr`` can be used to disable automatic reading from stderr
2989 of the peer (for SSH connections only). Disabling automatic reading of
2989 of the peer (for SSH connections only). Disabling automatic reading of
2990 stderr is useful for making output more deterministic.
2990 stderr is useful for making output more deterministic.
2991
2991
2992 Commands are issued via a mini language which is specified via stdin.
2992 Commands are issued via a mini language which is specified via stdin.
2993 The language consists of individual actions to perform. An action is
2993 The language consists of individual actions to perform. An action is
2994 defined by a block. A block is defined as a line with no leading
2994 defined by a block. A block is defined as a line with no leading
2995 space followed by 0 or more lines with leading space. Blocks are
2995 space followed by 0 or more lines with leading space. Blocks are
2996 effectively a high-level command with additional metadata.
2996 effectively a high-level command with additional metadata.
2997
2997
2998 Lines beginning with ``#`` are ignored.
2998 Lines beginning with ``#`` are ignored.
2999
2999
3000 The following sections denote available actions.
3000 The following sections denote available actions.
3001
3001
3002 raw
3002 raw
3003 ---
3003 ---
3004
3004
3005 Send raw data to the server.
3005 Send raw data to the server.
3006
3006
3007 The block payload contains the raw data to send as one atomic send
3007 The block payload contains the raw data to send as one atomic send
3008 operation. The data may not actually be delivered in a single system
3008 operation. The data may not actually be delivered in a single system
3009 call: it depends on the abilities of the transport being used.
3009 call: it depends on the abilities of the transport being used.
3010
3010
3011 Each line in the block is de-indented and concatenated. Then, that
3011 Each line in the block is de-indented and concatenated. Then, that
3012 value is evaluated as a Python b'' literal. This allows the use of
3012 value is evaluated as a Python b'' literal. This allows the use of
3013 backslash escaping, etc.
3013 backslash escaping, etc.
3014
3014
3015 raw+
3015 raw+
3016 ----
3016 ----
3017
3017
3018 Behaves like ``raw`` except flushes output afterwards.
3018 Behaves like ``raw`` except flushes output afterwards.
3019
3019
3020 command <X>
3020 command <X>
3021 -----------
3021 -----------
3022
3022
3023 Send a request to run a named command, whose name follows the ``command``
3023 Send a request to run a named command, whose name follows the ``command``
3024 string.
3024 string.
3025
3025
3026 Arguments to the command are defined as lines in this block. The format of
3026 Arguments to the command are defined as lines in this block. The format of
3027 each line is ``<key> <value>``. e.g.::
3027 each line is ``<key> <value>``. e.g.::
3028
3028
3029 command listkeys
3029 command listkeys
3030 namespace bookmarks
3030 namespace bookmarks
3031
3031
3032 If the value begins with ``eval:``, it will be interpreted as a Python
3032 If the value begins with ``eval:``, it will be interpreted as a Python
3033 literal expression. Otherwise values are interpreted as Python b'' literals.
3033 literal expression. Otherwise values are interpreted as Python b'' literals.
3034 This allows sending complex types and encoding special byte sequences via
3034 This allows sending complex types and encoding special byte sequences via
3035 backslash escaping.
3035 backslash escaping.
3036
3036
3037 The following arguments have special meaning:
3037 The following arguments have special meaning:
3038
3038
3039 ``PUSHFILE``
3039 ``PUSHFILE``
3040 When defined, the *push* mechanism of the peer will be used instead
3040 When defined, the *push* mechanism of the peer will be used instead
3041 of the static request-response mechanism and the content of the
3041 of the static request-response mechanism and the content of the
3042 file specified in the value of this argument will be sent as the
3042 file specified in the value of this argument will be sent as the
3043 command payload.
3043 command payload.
3044
3044
3045 This can be used to submit a local bundle file to the remote.
3045 This can be used to submit a local bundle file to the remote.
3046
3046
3047 batchbegin
3047 batchbegin
3048 ----------
3048 ----------
3049
3049
3050 Instruct the peer to begin a batched send.
3050 Instruct the peer to begin a batched send.
3051
3051
3052 All ``command`` blocks are queued for execution until the next
3052 All ``command`` blocks are queued for execution until the next
3053 ``batchsubmit`` block.
3053 ``batchsubmit`` block.
3054
3054
3055 batchsubmit
3055 batchsubmit
3056 -----------
3056 -----------
3057
3057
3058 Submit previously queued ``command`` blocks as a batch request.
3058 Submit previously queued ``command`` blocks as a batch request.
3059
3059
3060 This action MUST be paired with a ``batchbegin`` action.
3060 This action MUST be paired with a ``batchbegin`` action.
3061
3061
3062 httprequest <method> <path>
3062 httprequest <method> <path>
3063 ---------------------------
3063 ---------------------------
3064
3064
3065 (HTTP peer only)
3065 (HTTP peer only)
3066
3066
3067 Send an HTTP request to the peer.
3067 Send an HTTP request to the peer.
3068
3068
3069 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3069 The HTTP request line follows the ``httprequest`` action. e.g. ``GET /foo``.
3070
3070
3071 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3071 Arguments of the form ``<key>: <value>`` are interpreted as HTTP request
3072 headers to add to the request. e.g. ``Accept: foo``.
3072 headers to add to the request. e.g. ``Accept: foo``.
3073
3073
3074 The following arguments are special:
3074 The following arguments are special:
3075
3075
3076 ``BODYFILE``
3076 ``BODYFILE``
3077 The content of the file defined as the value to this argument will be
3077 The content of the file defined as the value to this argument will be
3078 transferred verbatim as the HTTP request body.
3078 transferred verbatim as the HTTP request body.
3079
3079
3080 ``frame <type> <flags> <payload>``
3080 ``frame <type> <flags> <payload>``
3081 Send a unified protocol frame as part of the request body.
3081 Send a unified protocol frame as part of the request body.
3082
3082
3083 All frames will be collected and sent as the body to the HTTP
3083 All frames will be collected and sent as the body to the HTTP
3084 request.
3084 request.
3085
3085
3086 close
3086 close
3087 -----
3087 -----
3088
3088
3089 Close the connection to the server.
3089 Close the connection to the server.
3090
3090
3091 flush
3091 flush
3092 -----
3092 -----
3093
3093
3094 Flush data written to the server.
3094 Flush data written to the server.
3095
3095
3096 readavailable
3096 readavailable
3097 -------------
3097 -------------
3098
3098
3099 Close the write end of the connection and read all available data from
3099 Close the write end of the connection and read all available data from
3100 the server.
3100 the server.
3101
3101
3102 If the connection to the server encompasses multiple pipes, we poll both
3102 If the connection to the server encompasses multiple pipes, we poll both
3103 pipes and read available data.
3103 pipes and read available data.
3104
3104
3105 readline
3105 readline
3106 --------
3106 --------
3107
3107
3108 Read a line of output from the server. If there are multiple output
3108 Read a line of output from the server. If there are multiple output
3109 pipes, reads only the main pipe.
3109 pipes, reads only the main pipe.
3110
3110
3111 ereadline
3111 ereadline
3112 ---------
3112 ---------
3113
3113
3114 Like ``readline``, but read from the stderr pipe, if available.
3114 Like ``readline``, but read from the stderr pipe, if available.
3115
3115
3116 read <X>
3116 read <X>
3117 --------
3117 --------
3118
3118
3119 ``read()`` N bytes from the server's main output pipe.
3119 ``read()`` N bytes from the server's main output pipe.
3120
3120
3121 eread <X>
3121 eread <X>
3122 ---------
3122 ---------
3123
3123
3124 ``read()`` N bytes from the server's stderr pipe, if available.
3124 ``read()`` N bytes from the server's stderr pipe, if available.
3125
3125
3126 Specifying Unified Frame-Based Protocol Frames
3126 Specifying Unified Frame-Based Protocol Frames
3127 ----------------------------------------------
3127 ----------------------------------------------
3128
3128
3129 It is possible to emit a *Unified Frame-Based Protocol* by using special
3129 It is possible to emit a *Unified Frame-Based Protocol* by using special
3130 syntax.
3130 syntax.
3131
3131
3132 A frame is composed as a type, flags, and payload. These can be parsed
3132 A frame is composed as a type, flags, and payload. These can be parsed
3133 from a string of the form:
3133 from a string of the form:
3134
3134
3135 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3135 <request-id> <stream-id> <stream-flags> <type> <flags> <payload>
3136
3136
3137 ``request-id`` and ``stream-id`` are integers defining the request and
3137 ``request-id`` and ``stream-id`` are integers defining the request and
3138 stream identifiers.
3138 stream identifiers.
3139
3139
3140 ``type`` can be an integer value for the frame type or the string name
3140 ``type`` can be an integer value for the frame type or the string name
3141 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3141 of the type. The strings are defined in ``wireprotoframing.py``. e.g.
3142 ``command-name``.
3142 ``command-name``.
3143
3143
3144 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3144 ``stream-flags`` and ``flags`` are a ``|`` delimited list of flag
3145 components. Each component (and there can be just one) can be an integer
3145 components. Each component (and there can be just one) can be an integer
3146 or a flag name for stream flags or frame flags, respectively. Values are
3146 or a flag name for stream flags or frame flags, respectively. Values are
3147 resolved to integers and then bitwise OR'd together.
3147 resolved to integers and then bitwise OR'd together.
3148
3148
3149 ``payload`` represents the raw frame payload. If it begins with
3149 ``payload`` represents the raw frame payload. If it begins with
3150 ``cbor:``, the following string is evaluated as Python code and the
3150 ``cbor:``, the following string is evaluated as Python code and the
3151 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3151 resulting object is fed into a CBOR encoder. Otherwise it is interpreted
3152 as a Python byte string literal.
3152 as a Python byte string literal.
3153 """
3153 """
3154 opts = pycompat.byteskwargs(opts)
3154 opts = pycompat.byteskwargs(opts)
3155
3155
3156 if opts['localssh'] and not repo:
3156 if opts['localssh'] and not repo:
3157 raise error.Abort(_('--localssh requires a repository'))
3157 raise error.Abort(_('--localssh requires a repository'))
3158
3158
3159 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3159 if opts['peer'] and opts['peer'] not in ('raw', 'http2', 'ssh1', 'ssh2'):
3160 raise error.Abort(_('invalid value for --peer'),
3160 raise error.Abort(_('invalid value for --peer'),
3161 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3161 hint=_('valid values are "raw", "ssh1", and "ssh2"'))
3162
3162
3163 if path and opts['localssh']:
3163 if path and opts['localssh']:
3164 raise error.Abort(_('cannot specify --localssh with an explicit '
3164 raise error.Abort(_('cannot specify --localssh with an explicit '
3165 'path'))
3165 'path'))
3166
3166
3167 if ui.interactive():
3167 if ui.interactive():
3168 ui.write(_('(waiting for commands on stdin)\n'))
3168 ui.write(_('(waiting for commands on stdin)\n'))
3169
3169
3170 blocks = list(_parsewirelangblocks(ui.fin))
3170 blocks = list(_parsewirelangblocks(ui.fin))
3171
3171
3172 proc = None
3172 proc = None
3173 stdin = None
3173 stdin = None
3174 stdout = None
3174 stdout = None
3175 stderr = None
3175 stderr = None
3176 opener = None
3176 opener = None
3177
3177
3178 if opts['localssh']:
3178 if opts['localssh']:
3179 # We start the SSH server in its own process so there is process
3179 # We start the SSH server in its own process so there is process
3180 # separation. This prevents a whole class of potential bugs around
3180 # separation. This prevents a whole class of potential bugs around
3181 # shared state from interfering with server operation.
3181 # shared state from interfering with server operation.
3182 args = procutil.hgcmd() + [
3182 args = procutil.hgcmd() + [
3183 '-R', repo.root,
3183 '-R', repo.root,
3184 'debugserve', '--sshstdio',
3184 'debugserve', '--sshstdio',
3185 ]
3185 ]
3186 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3186 proc = subprocess.Popen(pycompat.rapply(procutil.tonativestr, args),
3187 stdin=subprocess.PIPE,
3187 stdin=subprocess.PIPE,
3188 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3188 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
3189 bufsize=0)
3189 bufsize=0)
3190
3190
3191 stdin = proc.stdin
3191 stdin = proc.stdin
3192 stdout = proc.stdout
3192 stdout = proc.stdout
3193 stderr = proc.stderr
3193 stderr = proc.stderr
3194
3194
3195 # We turn the pipes into observers so we can log I/O.
3195 # We turn the pipes into observers so we can log I/O.
3196 if ui.verbose or opts['peer'] == 'raw':
3196 if ui.verbose or opts['peer'] == 'raw':
3197 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3197 stdin = util.makeloggingfileobject(ui, proc.stdin, b'i',
3198 logdata=True)
3198 logdata=True)
3199 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3199 stdout = util.makeloggingfileobject(ui, proc.stdout, b'o',
3200 logdata=True)
3200 logdata=True)
3201 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3201 stderr = util.makeloggingfileobject(ui, proc.stderr, b'e',
3202 logdata=True)
3202 logdata=True)
3203
3203
3204 # --localssh also implies the peer connection settings.
3204 # --localssh also implies the peer connection settings.
3205
3205
3206 url = 'ssh://localserver'
3206 url = 'ssh://localserver'
3207 autoreadstderr = not opts['noreadstderr']
3207 autoreadstderr = not opts['noreadstderr']
3208
3208
3209 if opts['peer'] == 'ssh1':
3209 if opts['peer'] == 'ssh1':
3210 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3210 ui.write(_('creating ssh peer for wire protocol version 1\n'))
3211 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3211 peer = sshpeer.sshv1peer(ui, url, proc, stdin, stdout, stderr,
3212 None, autoreadstderr=autoreadstderr)
3212 None, autoreadstderr=autoreadstderr)
3213 elif opts['peer'] == 'ssh2':
3213 elif opts['peer'] == 'ssh2':
3214 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3214 ui.write(_('creating ssh peer for wire protocol version 2\n'))
3215 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3215 peer = sshpeer.sshv2peer(ui, url, proc, stdin, stdout, stderr,
3216 None, autoreadstderr=autoreadstderr)
3216 None, autoreadstderr=autoreadstderr)
3217 elif opts['peer'] == 'raw':
3217 elif opts['peer'] == 'raw':
3218 ui.write(_('using raw connection to peer\n'))
3218 ui.write(_('using raw connection to peer\n'))
3219 peer = None
3219 peer = None
3220 else:
3220 else:
3221 ui.write(_('creating ssh peer from handshake results\n'))
3221 ui.write(_('creating ssh peer from handshake results\n'))
3222 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3222 peer = sshpeer.makepeer(ui, url, proc, stdin, stdout, stderr,
3223 autoreadstderr=autoreadstderr)
3223 autoreadstderr=autoreadstderr)
3224
3224
3225 elif path:
3225 elif path:
3226 # We bypass hg.peer() so we can proxy the sockets.
3226 # We bypass hg.peer() so we can proxy the sockets.
3227 # TODO consider not doing this because we skip
3227 # TODO consider not doing this because we skip
3228 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3228 # ``hg.wirepeersetupfuncs`` and potentially other useful functionality.
3229 u = util.url(path)
3229 u = util.url(path)
3230 if u.scheme != 'http':
3230 if u.scheme != 'http':
3231 raise error.Abort(_('only http:// paths are currently supported'))
3231 raise error.Abort(_('only http:// paths are currently supported'))
3232
3232
3233 url, authinfo = u.authinfo()
3233 url, authinfo = u.authinfo()
3234 openerargs = {
3234 openerargs = {
3235 r'useragent': b'Mercurial debugwireproto',
3235 r'useragent': b'Mercurial debugwireproto',
3236 }
3236 }
3237
3237
3238 # Turn pipes/sockets into observers so we can log I/O.
3238 # Turn pipes/sockets into observers so we can log I/O.
3239 if ui.verbose:
3239 if ui.verbose:
3240 openerargs.update({
3240 openerargs.update({
3241 r'loggingfh': ui,
3241 r'loggingfh': ui,
3242 r'loggingname': b's',
3242 r'loggingname': b's',
3243 r'loggingopts': {
3243 r'loggingopts': {
3244 r'logdata': True,
3244 r'logdata': True,
3245 r'logdataapis': False,
3245 r'logdataapis': False,
3246 },
3246 },
3247 })
3247 })
3248
3248
3249 if ui.debugflag:
3249 if ui.debugflag:
3250 openerargs[r'loggingopts'][r'logdataapis'] = True
3250 openerargs[r'loggingopts'][r'logdataapis'] = True
3251
3251
3252 # Don't send default headers when in raw mode. This allows us to
3252 # Don't send default headers when in raw mode. This allows us to
3253 # bypass most of the behavior of our URL handling code so we can
3253 # bypass most of the behavior of our URL handling code so we can
3254 # have near complete control over what's sent on the wire.
3254 # have near complete control over what's sent on the wire.
3255 if opts['peer'] == 'raw':
3255 if opts['peer'] == 'raw':
3256 openerargs[r'sendaccept'] = False
3256 openerargs[r'sendaccept'] = False
3257
3257
3258 opener = urlmod.opener(ui, authinfo, **openerargs)
3258 opener = urlmod.opener(ui, authinfo, **openerargs)
3259
3259
3260 if opts['peer'] == 'http2':
3260 if opts['peer'] == 'http2':
3261 ui.write(_('creating http peer for wire protocol version 2\n'))
3261 ui.write(_('creating http peer for wire protocol version 2\n'))
3262 # We go through makepeer() because we need an API descriptor for
3262 # We go through makepeer() because we need an API descriptor for
3263 # the peer instance to be useful.
3263 # the peer instance to be useful.
3264 with ui.configoverride({
3264 with ui.configoverride({
3265 ('experimental', 'httppeer.advertise-v2'): True}):
3265 ('experimental', 'httppeer.advertise-v2'): True}):
3266 if opts['nologhandshake']:
3266 if opts['nologhandshake']:
3267 ui.pushbuffer()
3267 ui.pushbuffer()
3268
3268
3269 peer = httppeer.makepeer(ui, path, opener=opener)
3269 peer = httppeer.makepeer(ui, path, opener=opener)
3270
3270
3271 if opts['nologhandshake']:
3271 if opts['nologhandshake']:
3272 ui.popbuffer()
3272 ui.popbuffer()
3273
3273
3274 if not isinstance(peer, httppeer.httpv2peer):
3274 if not isinstance(peer, httppeer.httpv2peer):
3275 raise error.Abort(_('could not instantiate HTTP peer for '
3275 raise error.Abort(_('could not instantiate HTTP peer for '
3276 'wire protocol version 2'),
3276 'wire protocol version 2'),
3277 hint=_('the server may not have the feature '
3277 hint=_('the server may not have the feature '
3278 'enabled or is not allowing this '
3278 'enabled or is not allowing this '
3279 'client version'))
3279 'client version'))
3280
3280
3281 elif opts['peer'] == 'raw':
3281 elif opts['peer'] == 'raw':
3282 ui.write(_('using raw connection to peer\n'))
3282 ui.write(_('using raw connection to peer\n'))
3283 peer = None
3283 peer = None
3284 elif opts['peer']:
3284 elif opts['peer']:
3285 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3285 raise error.Abort(_('--peer %s not supported with HTTP peers') %
3286 opts['peer'])
3286 opts['peer'])
3287 else:
3287 else:
3288 peer = httppeer.makepeer(ui, path, opener=opener)
3288 peer = httppeer.makepeer(ui, path, opener=opener)
3289
3289
3290 # We /could/ populate stdin/stdout with sock.makefile()...
3290 # We /could/ populate stdin/stdout with sock.makefile()...
3291 else:
3291 else:
3292 raise error.Abort(_('unsupported connection configuration'))
3292 raise error.Abort(_('unsupported connection configuration'))
3293
3293
3294 batchedcommands = None
3294 batchedcommands = None
3295
3295
3296 # Now perform actions based on the parsed wire language instructions.
3296 # Now perform actions based on the parsed wire language instructions.
3297 for action, lines in blocks:
3297 for action, lines in blocks:
3298 if action in ('raw', 'raw+'):
3298 if action in ('raw', 'raw+'):
3299 if not stdin:
3299 if not stdin:
3300 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3300 raise error.Abort(_('cannot call raw/raw+ on this peer'))
3301
3301
3302 # Concatenate the data together.
3302 # Concatenate the data together.
3303 data = ''.join(l.lstrip() for l in lines)
3303 data = ''.join(l.lstrip() for l in lines)
3304 data = stringutil.unescapestr(data)
3304 data = stringutil.unescapestr(data)
3305 stdin.write(data)
3305 stdin.write(data)
3306
3306
3307 if action == 'raw+':
3307 if action == 'raw+':
3308 stdin.flush()
3308 stdin.flush()
3309 elif action == 'flush':
3309 elif action == 'flush':
3310 if not stdin:
3310 if not stdin:
3311 raise error.Abort(_('cannot call flush on this peer'))
3311 raise error.Abort(_('cannot call flush on this peer'))
3312 stdin.flush()
3312 stdin.flush()
3313 elif action.startswith('command'):
3313 elif action.startswith('command'):
3314 if not peer:
3314 if not peer:
3315 raise error.Abort(_('cannot send commands unless peer instance '
3315 raise error.Abort(_('cannot send commands unless peer instance '
3316 'is available'))
3316 'is available'))
3317
3317
3318 command = action.split(' ', 1)[1]
3318 command = action.split(' ', 1)[1]
3319
3319
3320 args = {}
3320 args = {}
3321 for line in lines:
3321 for line in lines:
3322 # We need to allow empty values.
3322 # We need to allow empty values.
3323 fields = line.lstrip().split(' ', 1)
3323 fields = line.lstrip().split(' ', 1)
3324 if len(fields) == 1:
3324 if len(fields) == 1:
3325 key = fields[0]
3325 key = fields[0]
3326 value = ''
3326 value = ''
3327 else:
3327 else:
3328 key, value = fields
3328 key, value = fields
3329
3329
3330 if value.startswith('eval:'):
3330 if value.startswith('eval:'):
3331 value = stringutil.evalpythonliteral(value[5:])
3331 value = stringutil.evalpythonliteral(value[5:])
3332 else:
3332 else:
3333 value = stringutil.unescapestr(value)
3333 value = stringutil.unescapestr(value)
3334
3334
3335 args[key] = value
3335 args[key] = value
3336
3336
3337 if batchedcommands is not None:
3337 if batchedcommands is not None:
3338 batchedcommands.append((command, args))
3338 batchedcommands.append((command, args))
3339 continue
3339 continue
3340
3340
3341 ui.status(_('sending %s command\n') % command)
3341 ui.status(_('sending %s command\n') % command)
3342
3342
3343 if 'PUSHFILE' in args:
3343 if 'PUSHFILE' in args:
3344 with open(args['PUSHFILE'], r'rb') as fh:
3344 with open(args['PUSHFILE'], r'rb') as fh:
3345 del args['PUSHFILE']
3345 del args['PUSHFILE']
3346 res, output = peer._callpush(command, fh,
3346 res, output = peer._callpush(command, fh,
3347 **pycompat.strkwargs(args))
3347 **pycompat.strkwargs(args))
3348 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3348 ui.status(_('result: %s\n') % stringutil.escapestr(res))
3349 ui.status(_('remote output: %s\n') %
3349 ui.status(_('remote output: %s\n') %
3350 stringutil.escapestr(output))
3350 stringutil.escapestr(output))
3351 else:
3351 else:
3352 with peer.commandexecutor() as e:
3352 with peer.commandexecutor() as e:
3353 res = e.callcommand(command, args).result()
3353 res = e.callcommand(command, args).result()
3354
3354
3355 if isinstance(res, wireprotov2peer.commandresponse):
3355 if isinstance(res, wireprotov2peer.commandresponse):
3356 val = res.objects()
3356 val = res.objects()
3357 ui.status(_('response: %s\n') %
3357 ui.status(_('response: %s\n') %
3358 stringutil.pprint(val, bprefix=True, indent=2))
3358 stringutil.pprint(val, bprefix=True, indent=2))
3359 else:
3359 else:
3360 ui.status(_('response: %s\n') %
3360 ui.status(_('response: %s\n') %
3361 stringutil.pprint(res, bprefix=True, indent=2))
3361 stringutil.pprint(res, bprefix=True, indent=2))
3362
3362
3363 elif action == 'batchbegin':
3363 elif action == 'batchbegin':
3364 if batchedcommands is not None:
3364 if batchedcommands is not None:
3365 raise error.Abort(_('nested batchbegin not allowed'))
3365 raise error.Abort(_('nested batchbegin not allowed'))
3366
3366
3367 batchedcommands = []
3367 batchedcommands = []
3368 elif action == 'batchsubmit':
3368 elif action == 'batchsubmit':
3369 # There is a batching API we could go through. But it would be
3369 # There is a batching API we could go through. But it would be
3370 # difficult to normalize requests into function calls. It is easier
3370 # difficult to normalize requests into function calls. It is easier
3371 # to bypass this layer and normalize to commands + args.
3371 # to bypass this layer and normalize to commands + args.
3372 ui.status(_('sending batch with %d sub-commands\n') %
3372 ui.status(_('sending batch with %d sub-commands\n') %
3373 len(batchedcommands))
3373 len(batchedcommands))
3374 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3374 for i, chunk in enumerate(peer._submitbatch(batchedcommands)):
3375 ui.status(_('response #%d: %s\n') %
3375 ui.status(_('response #%d: %s\n') %
3376 (i, stringutil.escapestr(chunk)))
3376 (i, stringutil.escapestr(chunk)))
3377
3377
3378 batchedcommands = None
3378 batchedcommands = None
3379
3379
3380 elif action.startswith('httprequest '):
3380 elif action.startswith('httprequest '):
3381 if not opener:
3381 if not opener:
3382 raise error.Abort(_('cannot use httprequest without an HTTP '
3382 raise error.Abort(_('cannot use httprequest without an HTTP '
3383 'peer'))
3383 'peer'))
3384
3384
3385 request = action.split(' ', 2)
3385 request = action.split(' ', 2)
3386 if len(request) != 3:
3386 if len(request) != 3:
3387 raise error.Abort(_('invalid httprequest: expected format is '
3387 raise error.Abort(_('invalid httprequest: expected format is '
3388 '"httprequest <method> <path>'))
3388 '"httprequest <method> <path>'))
3389
3389
3390 method, httppath = request[1:]
3390 method, httppath = request[1:]
3391 headers = {}
3391 headers = {}
3392 body = None
3392 body = None
3393 frames = []
3393 frames = []
3394 for line in lines:
3394 for line in lines:
3395 line = line.lstrip()
3395 line = line.lstrip()
3396 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3396 m = re.match(b'^([a-zA-Z0-9_-]+): (.*)$', line)
3397 if m:
3397 if m:
3398 # Headers need to use native strings.
3398 # Headers need to use native strings.
3399 key = pycompat.strurl(m.group(1))
3399 key = pycompat.strurl(m.group(1))
3400 value = pycompat.strurl(m.group(2))
3400 value = pycompat.strurl(m.group(2))
3401 headers[key] = value
3401 headers[key] = value
3402 continue
3402 continue
3403
3403
3404 if line.startswith(b'BODYFILE '):
3404 if line.startswith(b'BODYFILE '):
3405 with open(line.split(b' ', 1), 'rb') as fh:
3405 with open(line.split(b' ', 1), 'rb') as fh:
3406 body = fh.read()
3406 body = fh.read()
3407 elif line.startswith(b'frame '):
3407 elif line.startswith(b'frame '):
3408 frame = wireprotoframing.makeframefromhumanstring(
3408 frame = wireprotoframing.makeframefromhumanstring(
3409 line[len(b'frame '):])
3409 line[len(b'frame '):])
3410
3410
3411 frames.append(frame)
3411 frames.append(frame)
3412 else:
3412 else:
3413 raise error.Abort(_('unknown argument to httprequest: %s') %
3413 raise error.Abort(_('unknown argument to httprequest: %s') %
3414 line)
3414 line)
3415
3415
3416 url = path + httppath
3416 url = path + httppath
3417
3417
3418 if frames:
3418 if frames:
3419 body = b''.join(bytes(f) for f in frames)
3419 body = b''.join(bytes(f) for f in frames)
3420
3420
3421 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3421 req = urlmod.urlreq.request(pycompat.strurl(url), body, headers)
3422
3422
3423 # urllib.Request insists on using has_data() as a proxy for
3423 # urllib.Request insists on using has_data() as a proxy for
3424 # determining the request method. Override that to use our
3424 # determining the request method. Override that to use our
3425 # explicitly requested method.
3425 # explicitly requested method.
3426 req.get_method = lambda: pycompat.sysstr(method)
3426 req.get_method = lambda: pycompat.sysstr(method)
3427
3427
3428 try:
3428 try:
3429 res = opener.open(req)
3429 res = opener.open(req)
3430 body = res.read()
3430 body = res.read()
3431 except util.urlerr.urlerror as e:
3431 except util.urlerr.urlerror as e:
3432 # read() method must be called, but only exists in Python 2
3432 # read() method must be called, but only exists in Python 2
3433 getattr(e, 'read', lambda: None)()
3433 getattr(e, 'read', lambda: None)()
3434 continue
3434 continue
3435
3435
3436 ct = res.headers.get(r'Content-Type')
3436 ct = res.headers.get(r'Content-Type')
3437 if ct == r'application/mercurial-cbor':
3437 if ct == r'application/mercurial-cbor':
3438 ui.write(_('cbor> %s\n') %
3438 ui.write(_('cbor> %s\n') %
3439 stringutil.pprint(cborutil.decodeall(body),
3439 stringutil.pprint(cborutil.decodeall(body),
3440 bprefix=True,
3440 bprefix=True,
3441 indent=2))
3441 indent=2))
3442
3442
3443 elif action == 'close':
3443 elif action == 'close':
3444 peer.close()
3444 peer.close()
3445 elif action == 'readavailable':
3445 elif action == 'readavailable':
3446 if not stdout or not stderr:
3446 if not stdout or not stderr:
3447 raise error.Abort(_('readavailable not available on this peer'))
3447 raise error.Abort(_('readavailable not available on this peer'))
3448
3448
3449 stdin.close()
3449 stdin.close()
3450 stdout.read()
3450 stdout.read()
3451 stderr.read()
3451 stderr.read()
3452
3452
3453 elif action == 'readline':
3453 elif action == 'readline':
3454 if not stdout:
3454 if not stdout:
3455 raise error.Abort(_('readline not available on this peer'))
3455 raise error.Abort(_('readline not available on this peer'))
3456 stdout.readline()
3456 stdout.readline()
3457 elif action == 'ereadline':
3457 elif action == 'ereadline':
3458 if not stderr:
3458 if not stderr:
3459 raise error.Abort(_('ereadline not available on this peer'))
3459 raise error.Abort(_('ereadline not available on this peer'))
3460 stderr.readline()
3460 stderr.readline()
3461 elif action.startswith('read '):
3461 elif action.startswith('read '):
3462 count = int(action.split(' ', 1)[1])
3462 count = int(action.split(' ', 1)[1])
3463 if not stdout:
3463 if not stdout:
3464 raise error.Abort(_('read not available on this peer'))
3464 raise error.Abort(_('read not available on this peer'))
3465 stdout.read(count)
3465 stdout.read(count)
3466 elif action.startswith('eread '):
3466 elif action.startswith('eread '):
3467 count = int(action.split(' ', 1)[1])
3467 count = int(action.split(' ', 1)[1])
3468 if not stderr:
3468 if not stderr:
3469 raise error.Abort(_('eread not available on this peer'))
3469 raise error.Abort(_('eread not available on this peer'))
3470 stderr.read(count)
3470 stderr.read(count)
3471 else:
3471 else:
3472 raise error.Abort(_('unknown action: %s') % action)
3472 raise error.Abort(_('unknown action: %s') % action)
3473
3473
3474 if batchedcommands is not None:
3474 if batchedcommands is not None:
3475 raise error.Abort(_('unclosed "batchbegin" request'))
3475 raise error.Abort(_('unclosed "batchbegin" request'))
3476
3476
3477 if peer:
3477 if peer:
3478 peer.close()
3478 peer.close()
3479
3479
3480 if proc:
3480 if proc:
3481 proc.kill()
3481 proc.kill()
@@ -1,3316 +1,3325 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # run-tests.py - Run a set of tests on Mercurial
3 # run-tests.py - Run a set of tests on Mercurial
4 #
4 #
5 # Copyright 2006 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Matt Mackall <mpm@selenic.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 # Modifying this script is tricky because it has many modes:
10 # Modifying this script is tricky because it has many modes:
11 # - serial (default) vs parallel (-jN, N > 1)
11 # - serial (default) vs parallel (-jN, N > 1)
12 # - no coverage (default) vs coverage (-c, -C, -s)
12 # - no coverage (default) vs coverage (-c, -C, -s)
13 # - temp install (default) vs specific hg script (--with-hg, --local)
13 # - temp install (default) vs specific hg script (--with-hg, --local)
14 # - tests are a mix of shell scripts and Python scripts
14 # - tests are a mix of shell scripts and Python scripts
15 #
15 #
16 # If you change this script, it is recommended that you ensure you
16 # If you change this script, it is recommended that you ensure you
17 # haven't broken it by running it in various modes with a representative
17 # haven't broken it by running it in various modes with a representative
18 # sample of test scripts. For example:
18 # sample of test scripts. For example:
19 #
19 #
20 # 1) serial, no coverage, temp install:
20 # 1) serial, no coverage, temp install:
21 # ./run-tests.py test-s*
21 # ./run-tests.py test-s*
22 # 2) serial, no coverage, local hg:
22 # 2) serial, no coverage, local hg:
23 # ./run-tests.py --local test-s*
23 # ./run-tests.py --local test-s*
24 # 3) serial, coverage, temp install:
24 # 3) serial, coverage, temp install:
25 # ./run-tests.py -c test-s*
25 # ./run-tests.py -c test-s*
26 # 4) serial, coverage, local hg:
26 # 4) serial, coverage, local hg:
27 # ./run-tests.py -c --local test-s* # unsupported
27 # ./run-tests.py -c --local test-s* # unsupported
28 # 5) parallel, no coverage, temp install:
28 # 5) parallel, no coverage, temp install:
29 # ./run-tests.py -j2 test-s*
29 # ./run-tests.py -j2 test-s*
30 # 6) parallel, no coverage, local hg:
30 # 6) parallel, no coverage, local hg:
31 # ./run-tests.py -j2 --local test-s*
31 # ./run-tests.py -j2 --local test-s*
32 # 7) parallel, coverage, temp install:
32 # 7) parallel, coverage, temp install:
33 # ./run-tests.py -j2 -c test-s* # currently broken
33 # ./run-tests.py -j2 -c test-s* # currently broken
34 # 8) parallel, coverage, local install:
34 # 8) parallel, coverage, local install:
35 # ./run-tests.py -j2 -c --local test-s* # unsupported (and broken)
35 # ./run-tests.py -j2 -c --local test-s* # unsupported (and broken)
36 # 9) parallel, custom tmp dir:
36 # 9) parallel, custom tmp dir:
37 # ./run-tests.py -j2 --tmpdir /tmp/myhgtests
37 # ./run-tests.py -j2 --tmpdir /tmp/myhgtests
38 # 10) parallel, pure, tests that call run-tests:
38 # 10) parallel, pure, tests that call run-tests:
39 # ./run-tests.py --pure `grep -l run-tests.py *.t`
39 # ./run-tests.py --pure `grep -l run-tests.py *.t`
40 #
40 #
41 # (You could use any subset of the tests: test-s* happens to match
41 # (You could use any subset of the tests: test-s* happens to match
42 # enough that it's worth doing parallel runs, few enough that it
42 # enough that it's worth doing parallel runs, few enough that it
43 # completes fairly quickly, includes both shell and Python scripts, and
43 # completes fairly quickly, includes both shell and Python scripts, and
44 # includes some scripts that run daemon processes.)
44 # includes some scripts that run daemon processes.)
45
45
46 from __future__ import absolute_import, print_function
46 from __future__ import absolute_import, print_function
47
47
48 import argparse
48 import argparse
49 import collections
49 import collections
50 import difflib
50 import difflib
51 import distutils.version as version
51 import distutils.version as version
52 import errno
52 import errno
53 import json
53 import json
54 import multiprocessing
54 import multiprocessing
55 import os
55 import os
56 import random
56 import random
57 import re
57 import re
58 import shutil
58 import shutil
59 import signal
59 import signal
60 import socket
60 import socket
61 import subprocess
61 import subprocess
62 import sys
62 import sys
63 import sysconfig
63 import sysconfig
64 import tempfile
64 import tempfile
65 import threading
65 import threading
66 import time
66 import time
67 import unittest
67 import unittest
68 import uuid
68 import uuid
69 import xml.dom.minidom as minidom
69 import xml.dom.minidom as minidom
70
70
71 try:
71 try:
72 import Queue as queue
72 import Queue as queue
73 except ImportError:
73 except ImportError:
74 import queue
74 import queue
75
75
76 try:
76 try:
77 import shlex
77 import shlex
78 shellquote = shlex.quote
78 shellquote = shlex.quote
79 except (ImportError, AttributeError):
79 except (ImportError, AttributeError):
80 import pipes
80 import pipes
81 shellquote = pipes.quote
81 shellquote = pipes.quote
82
82
83 if os.environ.get('RTUNICODEPEDANTRY', False):
83 if os.environ.get('RTUNICODEPEDANTRY', False):
84 try:
84 try:
85 reload(sys)
85 reload(sys)
86 sys.setdefaultencoding("undefined")
86 sys.setdefaultencoding("undefined")
87 except NameError:
87 except NameError:
88 pass
88 pass
89
89
90 processlock = threading.Lock()
90 processlock = threading.Lock()
91
91
92 pygmentspresent = False
92 pygmentspresent = False
93 # ANSI color is unsupported prior to Windows 10
93 # ANSI color is unsupported prior to Windows 10
94 if os.name != 'nt':
94 if os.name != 'nt':
95 try: # is pygments installed
95 try: # is pygments installed
96 import pygments
96 import pygments
97 import pygments.lexers as lexers
97 import pygments.lexers as lexers
98 import pygments.lexer as lexer
98 import pygments.lexer as lexer
99 import pygments.formatters as formatters
99 import pygments.formatters as formatters
100 import pygments.token as token
100 import pygments.token as token
101 import pygments.style as style
101 import pygments.style as style
102 pygmentspresent = True
102 pygmentspresent = True
103 difflexer = lexers.DiffLexer()
103 difflexer = lexers.DiffLexer()
104 terminal256formatter = formatters.Terminal256Formatter()
104 terminal256formatter = formatters.Terminal256Formatter()
105 except ImportError:
105 except ImportError:
106 pass
106 pass
107
107
108 if pygmentspresent:
108 if pygmentspresent:
109 class TestRunnerStyle(style.Style):
109 class TestRunnerStyle(style.Style):
110 default_style = ""
110 default_style = ""
111 skipped = token.string_to_tokentype("Token.Generic.Skipped")
111 skipped = token.string_to_tokentype("Token.Generic.Skipped")
112 failed = token.string_to_tokentype("Token.Generic.Failed")
112 failed = token.string_to_tokentype("Token.Generic.Failed")
113 skippedname = token.string_to_tokentype("Token.Generic.SName")
113 skippedname = token.string_to_tokentype("Token.Generic.SName")
114 failedname = token.string_to_tokentype("Token.Generic.FName")
114 failedname = token.string_to_tokentype("Token.Generic.FName")
115 styles = {
115 styles = {
116 skipped: '#e5e5e5',
116 skipped: '#e5e5e5',
117 skippedname: '#00ffff',
117 skippedname: '#00ffff',
118 failed: '#7f0000',
118 failed: '#7f0000',
119 failedname: '#ff0000',
119 failedname: '#ff0000',
120 }
120 }
121
121
122 class TestRunnerLexer(lexer.RegexLexer):
122 class TestRunnerLexer(lexer.RegexLexer):
123 testpattern = r'[\w-]+\.(t|py)(#[a-zA-Z0-9_\-\.]+)?'
123 testpattern = r'[\w-]+\.(t|py)(#[a-zA-Z0-9_\-\.]+)?'
124 tokens = {
124 tokens = {
125 'root': [
125 'root': [
126 (r'^Skipped', token.Generic.Skipped, 'skipped'),
126 (r'^Skipped', token.Generic.Skipped, 'skipped'),
127 (r'^Failed ', token.Generic.Failed, 'failed'),
127 (r'^Failed ', token.Generic.Failed, 'failed'),
128 (r'^ERROR: ', token.Generic.Failed, 'failed'),
128 (r'^ERROR: ', token.Generic.Failed, 'failed'),
129 ],
129 ],
130 'skipped': [
130 'skipped': [
131 (testpattern, token.Generic.SName),
131 (testpattern, token.Generic.SName),
132 (r':.*', token.Generic.Skipped),
132 (r':.*', token.Generic.Skipped),
133 ],
133 ],
134 'failed': [
134 'failed': [
135 (testpattern, token.Generic.FName),
135 (testpattern, token.Generic.FName),
136 (r'(:| ).*', token.Generic.Failed),
136 (r'(:| ).*', token.Generic.Failed),
137 ]
137 ]
138 }
138 }
139
139
140 runnerformatter = formatters.Terminal256Formatter(style=TestRunnerStyle)
140 runnerformatter = formatters.Terminal256Formatter(style=TestRunnerStyle)
141 runnerlexer = TestRunnerLexer()
141 runnerlexer = TestRunnerLexer()
142
142
143 origenviron = os.environ.copy()
143 origenviron = os.environ.copy()
144
144
145 if sys.version_info > (3, 5, 0):
145 if sys.version_info > (3, 5, 0):
146 PYTHON3 = True
146 PYTHON3 = True
147 xrange = range # we use xrange in one place, and we'd rather not use range
147 xrange = range # we use xrange in one place, and we'd rather not use range
148 def _bytespath(p):
148 def _bytespath(p):
149 if p is None:
149 if p is None:
150 return p
150 return p
151 return p.encode('utf-8')
151 return p.encode('utf-8')
152
152
153 def _strpath(p):
153 def _strpath(p):
154 if p is None:
154 if p is None:
155 return p
155 return p
156 return p.decode('utf-8')
156 return p.decode('utf-8')
157
157
158 osenvironb = getattr(os, 'environb', None)
158 osenvironb = getattr(os, 'environb', None)
159 if osenvironb is None:
159 if osenvironb is None:
160 # Windows lacks os.environb, for instance. A proxy over the real thing
160 # Windows lacks os.environb, for instance. A proxy over the real thing
161 # instead of a copy allows the environment to be updated via bytes on
161 # instead of a copy allows the environment to be updated via bytes on
162 # all platforms.
162 # all platforms.
163 class environbytes(object):
163 class environbytes(object):
164 def __init__(self, strenv):
164 def __init__(self, strenv):
165 self.__len__ = strenv.__len__
165 self.__len__ = strenv.__len__
166 self.clear = strenv.clear
166 self.clear = strenv.clear
167 self._strenv = strenv
167 self._strenv = strenv
168 def __getitem__(self, k):
168 def __getitem__(self, k):
169 v = self._strenv.__getitem__(_strpath(k))
169 v = self._strenv.__getitem__(_strpath(k))
170 return _bytespath(v)
170 return _bytespath(v)
171 def __setitem__(self, k, v):
171 def __setitem__(self, k, v):
172 self._strenv.__setitem__(_strpath(k), _strpath(v))
172 self._strenv.__setitem__(_strpath(k), _strpath(v))
173 def __delitem__(self, k):
173 def __delitem__(self, k):
174 self._strenv.__delitem__(_strpath(k))
174 self._strenv.__delitem__(_strpath(k))
175 def __contains__(self, k):
175 def __contains__(self, k):
176 return self._strenv.__contains__(_strpath(k))
176 return self._strenv.__contains__(_strpath(k))
177 def __iter__(self):
177 def __iter__(self):
178 return iter([_bytespath(k) for k in iter(self._strenv)])
178 return iter([_bytespath(k) for k in iter(self._strenv)])
179 def get(self, k, default=None):
179 def get(self, k, default=None):
180 v = self._strenv.get(_strpath(k), _strpath(default))
180 v = self._strenv.get(_strpath(k), _strpath(default))
181 return _bytespath(v)
181 return _bytespath(v)
182 def pop(self, k, default=None):
182 def pop(self, k, default=None):
183 v = self._strenv.pop(_strpath(k), _strpath(default))
183 v = self._strenv.pop(_strpath(k), _strpath(default))
184 return _bytespath(v)
184 return _bytespath(v)
185
185
186 osenvironb = environbytes(os.environ)
186 osenvironb = environbytes(os.environ)
187
187
188 getcwdb = getattr(os, 'getcwdb')
188 getcwdb = getattr(os, 'getcwdb')
189 if not getcwdb or os.name == 'nt':
189 if not getcwdb or os.name == 'nt':
190 getcwdb = lambda: _bytespath(os.getcwd())
190 getcwdb = lambda: _bytespath(os.getcwd())
191
191
192 elif sys.version_info >= (3, 0, 0):
192 elif sys.version_info >= (3, 0, 0):
193 print('%s is only supported on Python 3.5+ and 2.7, not %s' %
193 print('%s is only supported on Python 3.5+ and 2.7, not %s' %
194 (sys.argv[0], '.'.join(str(v) for v in sys.version_info[:3])))
194 (sys.argv[0], '.'.join(str(v) for v in sys.version_info[:3])))
195 sys.exit(70) # EX_SOFTWARE from `man 3 sysexit`
195 sys.exit(70) # EX_SOFTWARE from `man 3 sysexit`
196 else:
196 else:
197 PYTHON3 = False
197 PYTHON3 = False
198
198
199 # In python 2.x, path operations are generally done using
199 # In python 2.x, path operations are generally done using
200 # bytestrings by default, so we don't have to do any extra
200 # bytestrings by default, so we don't have to do any extra
201 # fiddling there. We define the wrapper functions anyway just to
201 # fiddling there. We define the wrapper functions anyway just to
202 # help keep code consistent between platforms.
202 # help keep code consistent between platforms.
203 def _bytespath(p):
203 def _bytespath(p):
204 return p
204 return p
205
205
206 _strpath = _bytespath
206 _strpath = _bytespath
207 osenvironb = os.environ
207 osenvironb = os.environ
208 getcwdb = os.getcwd
208 getcwdb = os.getcwd
209
209
210 # For Windows support
210 # For Windows support
211 wifexited = getattr(os, "WIFEXITED", lambda x: False)
211 wifexited = getattr(os, "WIFEXITED", lambda x: False)
212
212
213 # Whether to use IPv6
213 # Whether to use IPv6
214 def checksocketfamily(name, port=20058):
214 def checksocketfamily(name, port=20058):
215 """return true if we can listen on localhost using family=name
215 """return true if we can listen on localhost using family=name
216
216
217 name should be either 'AF_INET', or 'AF_INET6'.
217 name should be either 'AF_INET', or 'AF_INET6'.
218 port being used is okay - EADDRINUSE is considered as successful.
218 port being used is okay - EADDRINUSE is considered as successful.
219 """
219 """
220 family = getattr(socket, name, None)
220 family = getattr(socket, name, None)
221 if family is None:
221 if family is None:
222 return False
222 return False
223 try:
223 try:
224 s = socket.socket(family, socket.SOCK_STREAM)
224 s = socket.socket(family, socket.SOCK_STREAM)
225 s.bind(('localhost', port))
225 s.bind(('localhost', port))
226 s.close()
226 s.close()
227 return True
227 return True
228 except socket.error as exc:
228 except socket.error as exc:
229 if exc.errno == errno.EADDRINUSE:
229 if exc.errno == errno.EADDRINUSE:
230 return True
230 return True
231 elif exc.errno in (errno.EADDRNOTAVAIL, errno.EPROTONOSUPPORT):
231 elif exc.errno in (errno.EADDRNOTAVAIL, errno.EPROTONOSUPPORT):
232 return False
232 return False
233 else:
233 else:
234 raise
234 raise
235 else:
235 else:
236 return False
236 return False
237
237
238 # useipv6 will be set by parseargs
238 # useipv6 will be set by parseargs
239 useipv6 = None
239 useipv6 = None
240
240
241 def checkportisavailable(port):
241 def checkportisavailable(port):
242 """return true if a port seems free to bind on localhost"""
242 """return true if a port seems free to bind on localhost"""
243 if useipv6:
243 if useipv6:
244 family = socket.AF_INET6
244 family = socket.AF_INET6
245 else:
245 else:
246 family = socket.AF_INET
246 family = socket.AF_INET
247 try:
247 try:
248 s = socket.socket(family, socket.SOCK_STREAM)
248 s = socket.socket(family, socket.SOCK_STREAM)
249 s.bind(('localhost', port))
249 s.bind(('localhost', port))
250 s.close()
250 s.close()
251 return True
251 return True
252 except socket.error as exc:
252 except socket.error as exc:
253 if exc.errno not in (errno.EADDRINUSE, errno.EADDRNOTAVAIL,
253 if exc.errno not in (errno.EADDRINUSE, errno.EADDRNOTAVAIL,
254 errno.EPROTONOSUPPORT):
254 errno.EPROTONOSUPPORT):
255 raise
255 raise
256 return False
256 return False
257
257
258 closefds = os.name == 'posix'
258 closefds = os.name == 'posix'
259 def Popen4(cmd, wd, timeout, env=None):
259 def Popen4(cmd, wd, timeout, env=None):
260 processlock.acquire()
260 processlock.acquire()
261 p = subprocess.Popen(_strpath(cmd), shell=True, bufsize=-1,
261 p = subprocess.Popen(_strpath(cmd), shell=True, bufsize=-1,
262 cwd=_strpath(wd), env=env,
262 cwd=_strpath(wd), env=env,
263 close_fds=closefds,
263 close_fds=closefds,
264 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
264 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
265 stderr=subprocess.STDOUT)
265 stderr=subprocess.STDOUT)
266 processlock.release()
266 processlock.release()
267
267
268 p.fromchild = p.stdout
268 p.fromchild = p.stdout
269 p.tochild = p.stdin
269 p.tochild = p.stdin
270 p.childerr = p.stderr
270 p.childerr = p.stderr
271
271
272 p.timeout = False
272 p.timeout = False
273 if timeout:
273 if timeout:
274 def t():
274 def t():
275 start = time.time()
275 start = time.time()
276 while time.time() - start < timeout and p.returncode is None:
276 while time.time() - start < timeout and p.returncode is None:
277 time.sleep(.1)
277 time.sleep(.1)
278 p.timeout = True
278 p.timeout = True
279 if p.returncode is None:
279 if p.returncode is None:
280 terminate(p)
280 terminate(p)
281 threading.Thread(target=t).start()
281 threading.Thread(target=t).start()
282
282
283 return p
283 return p
284
284
285 PYTHON = _bytespath(sys.executable.replace('\\', '/'))
285 if sys.executable:
286 sysexecutable = sys.executable
287 elif os.environ.get('PYTHONEXECUTABLE'):
288 sysexecutable = os.environ['PYTHONEXECUTABLE']
289 elif os.environ.get('PYTHON'):
290 sysexecutable = os.environ['PYTHON']
291 else:
292 raise AssertionError('Could not find Python interpreter')
293
294 PYTHON = _bytespath(sysexecutable.replace('\\', '/'))
286 IMPL_PATH = b'PYTHONPATH'
295 IMPL_PATH = b'PYTHONPATH'
287 if 'java' in sys.platform:
296 if 'java' in sys.platform:
288 IMPL_PATH = b'JYTHONPATH'
297 IMPL_PATH = b'JYTHONPATH'
289
298
290 defaults = {
299 defaults = {
291 'jobs': ('HGTEST_JOBS', multiprocessing.cpu_count()),
300 'jobs': ('HGTEST_JOBS', multiprocessing.cpu_count()),
292 'timeout': ('HGTEST_TIMEOUT', 180),
301 'timeout': ('HGTEST_TIMEOUT', 180),
293 'slowtimeout': ('HGTEST_SLOWTIMEOUT', 1500),
302 'slowtimeout': ('HGTEST_SLOWTIMEOUT', 1500),
294 'port': ('HGTEST_PORT', 20059),
303 'port': ('HGTEST_PORT', 20059),
295 'shell': ('HGTEST_SHELL', 'sh'),
304 'shell': ('HGTEST_SHELL', 'sh'),
296 }
305 }
297
306
298 def canonpath(path):
307 def canonpath(path):
299 return os.path.realpath(os.path.expanduser(path))
308 return os.path.realpath(os.path.expanduser(path))
300
309
301 def parselistfiles(files, listtype, warn=True):
310 def parselistfiles(files, listtype, warn=True):
302 entries = dict()
311 entries = dict()
303 for filename in files:
312 for filename in files:
304 try:
313 try:
305 path = os.path.expanduser(os.path.expandvars(filename))
314 path = os.path.expanduser(os.path.expandvars(filename))
306 f = open(path, "rb")
315 f = open(path, "rb")
307 except IOError as err:
316 except IOError as err:
308 if err.errno != errno.ENOENT:
317 if err.errno != errno.ENOENT:
309 raise
318 raise
310 if warn:
319 if warn:
311 print("warning: no such %s file: %s" % (listtype, filename))
320 print("warning: no such %s file: %s" % (listtype, filename))
312 continue
321 continue
313
322
314 for line in f.readlines():
323 for line in f.readlines():
315 line = line.split(b'#', 1)[0].strip()
324 line = line.split(b'#', 1)[0].strip()
316 if line:
325 if line:
317 entries[line] = filename
326 entries[line] = filename
318
327
319 f.close()
328 f.close()
320 return entries
329 return entries
321
330
322 def parsettestcases(path):
331 def parsettestcases(path):
323 """read a .t test file, return a set of test case names
332 """read a .t test file, return a set of test case names
324
333
325 If path does not exist, return an empty set.
334 If path does not exist, return an empty set.
326 """
335 """
327 cases = []
336 cases = []
328 try:
337 try:
329 with open(path, 'rb') as f:
338 with open(path, 'rb') as f:
330 for l in f:
339 for l in f:
331 if l.startswith(b'#testcases '):
340 if l.startswith(b'#testcases '):
332 cases.append(sorted(l[11:].split()))
341 cases.append(sorted(l[11:].split()))
333 except IOError as ex:
342 except IOError as ex:
334 if ex.errno != errno.ENOENT:
343 if ex.errno != errno.ENOENT:
335 raise
344 raise
336 return cases
345 return cases
337
346
338 def getparser():
347 def getparser():
339 """Obtain the OptionParser used by the CLI."""
348 """Obtain the OptionParser used by the CLI."""
340 parser = argparse.ArgumentParser(usage='%(prog)s [options] [tests]')
349 parser = argparse.ArgumentParser(usage='%(prog)s [options] [tests]')
341
350
342 selection = parser.add_argument_group('Test Selection')
351 selection = parser.add_argument_group('Test Selection')
343 selection.add_argument('--allow-slow-tests', action='store_true',
352 selection.add_argument('--allow-slow-tests', action='store_true',
344 help='allow extremely slow tests')
353 help='allow extremely slow tests')
345 selection.add_argument("--blacklist", action="append",
354 selection.add_argument("--blacklist", action="append",
346 help="skip tests listed in the specified blacklist file")
355 help="skip tests listed in the specified blacklist file")
347 selection.add_argument("--changed",
356 selection.add_argument("--changed",
348 help="run tests that are changed in parent rev or working directory")
357 help="run tests that are changed in parent rev or working directory")
349 selection.add_argument("-k", "--keywords",
358 selection.add_argument("-k", "--keywords",
350 help="run tests matching keywords")
359 help="run tests matching keywords")
351 selection.add_argument("-r", "--retest", action="store_true",
360 selection.add_argument("-r", "--retest", action="store_true",
352 help = "retest failed tests")
361 help = "retest failed tests")
353 selection.add_argument("--test-list", action="append",
362 selection.add_argument("--test-list", action="append",
354 help="read tests to run from the specified file")
363 help="read tests to run from the specified file")
355 selection.add_argument("--whitelist", action="append",
364 selection.add_argument("--whitelist", action="append",
356 help="always run tests listed in the specified whitelist file")
365 help="always run tests listed in the specified whitelist file")
357 selection.add_argument('tests', metavar='TESTS', nargs='*',
366 selection.add_argument('tests', metavar='TESTS', nargs='*',
358 help='Tests to run')
367 help='Tests to run')
359
368
360 harness = parser.add_argument_group('Test Harness Behavior')
369 harness = parser.add_argument_group('Test Harness Behavior')
361 harness.add_argument('--bisect-repo',
370 harness.add_argument('--bisect-repo',
362 metavar='bisect_repo',
371 metavar='bisect_repo',
363 help=("Path of a repo to bisect. Use together with "
372 help=("Path of a repo to bisect. Use together with "
364 "--known-good-rev"))
373 "--known-good-rev"))
365 harness.add_argument("-d", "--debug", action="store_true",
374 harness.add_argument("-d", "--debug", action="store_true",
366 help="debug mode: write output of test scripts to console"
375 help="debug mode: write output of test scripts to console"
367 " rather than capturing and diffing it (disables timeout)")
376 " rather than capturing and diffing it (disables timeout)")
368 harness.add_argument("-f", "--first", action="store_true",
377 harness.add_argument("-f", "--first", action="store_true",
369 help="exit on the first test failure")
378 help="exit on the first test failure")
370 harness.add_argument("-i", "--interactive", action="store_true",
379 harness.add_argument("-i", "--interactive", action="store_true",
371 help="prompt to accept changed output")
380 help="prompt to accept changed output")
372 harness.add_argument("-j", "--jobs", type=int,
381 harness.add_argument("-j", "--jobs", type=int,
373 help="number of jobs to run in parallel"
382 help="number of jobs to run in parallel"
374 " (default: $%s or %d)" % defaults['jobs'])
383 " (default: $%s or %d)" % defaults['jobs'])
375 harness.add_argument("--keep-tmpdir", action="store_true",
384 harness.add_argument("--keep-tmpdir", action="store_true",
376 help="keep temporary directory after running tests")
385 help="keep temporary directory after running tests")
377 harness.add_argument('--known-good-rev',
386 harness.add_argument('--known-good-rev',
378 metavar="known_good_rev",
387 metavar="known_good_rev",
379 help=("Automatically bisect any failures using this "
388 help=("Automatically bisect any failures using this "
380 "revision as a known-good revision."))
389 "revision as a known-good revision."))
381 harness.add_argument("--list-tests", action="store_true",
390 harness.add_argument("--list-tests", action="store_true",
382 help="list tests instead of running them")
391 help="list tests instead of running them")
383 harness.add_argument("--loop", action="store_true",
392 harness.add_argument("--loop", action="store_true",
384 help="loop tests repeatedly")
393 help="loop tests repeatedly")
385 harness.add_argument('--random', action="store_true",
394 harness.add_argument('--random', action="store_true",
386 help='run tests in random order')
395 help='run tests in random order')
387 harness.add_argument('--order-by-runtime', action="store_true",
396 harness.add_argument('--order-by-runtime', action="store_true",
388 help='run slowest tests first, according to .testtimes')
397 help='run slowest tests first, according to .testtimes')
389 harness.add_argument("-p", "--port", type=int,
398 harness.add_argument("-p", "--port", type=int,
390 help="port on which servers should listen"
399 help="port on which servers should listen"
391 " (default: $%s or %d)" % defaults['port'])
400 " (default: $%s or %d)" % defaults['port'])
392 harness.add_argument('--profile-runner', action='store_true',
401 harness.add_argument('--profile-runner', action='store_true',
393 help='run statprof on run-tests')
402 help='run statprof on run-tests')
394 harness.add_argument("-R", "--restart", action="store_true",
403 harness.add_argument("-R", "--restart", action="store_true",
395 help="restart at last error")
404 help="restart at last error")
396 harness.add_argument("--runs-per-test", type=int, dest="runs_per_test",
405 harness.add_argument("--runs-per-test", type=int, dest="runs_per_test",
397 help="run each test N times (default=1)", default=1)
406 help="run each test N times (default=1)", default=1)
398 harness.add_argument("--shell",
407 harness.add_argument("--shell",
399 help="shell to use (default: $%s or %s)" % defaults['shell'])
408 help="shell to use (default: $%s or %s)" % defaults['shell'])
400 harness.add_argument('--showchannels', action='store_true',
409 harness.add_argument('--showchannels', action='store_true',
401 help='show scheduling channels')
410 help='show scheduling channels')
402 harness.add_argument("--slowtimeout", type=int,
411 harness.add_argument("--slowtimeout", type=int,
403 help="kill errant slow tests after SLOWTIMEOUT seconds"
412 help="kill errant slow tests after SLOWTIMEOUT seconds"
404 " (default: $%s or %d)" % defaults['slowtimeout'])
413 " (default: $%s or %d)" % defaults['slowtimeout'])
405 harness.add_argument("-t", "--timeout", type=int,
414 harness.add_argument("-t", "--timeout", type=int,
406 help="kill errant tests after TIMEOUT seconds"
415 help="kill errant tests after TIMEOUT seconds"
407 " (default: $%s or %d)" % defaults['timeout'])
416 " (default: $%s or %d)" % defaults['timeout'])
408 harness.add_argument("--tmpdir",
417 harness.add_argument("--tmpdir",
409 help="run tests in the given temporary directory"
418 help="run tests in the given temporary directory"
410 " (implies --keep-tmpdir)")
419 " (implies --keep-tmpdir)")
411 harness.add_argument("-v", "--verbose", action="store_true",
420 harness.add_argument("-v", "--verbose", action="store_true",
412 help="output verbose messages")
421 help="output verbose messages")
413
422
414 hgconf = parser.add_argument_group('Mercurial Configuration')
423 hgconf = parser.add_argument_group('Mercurial Configuration')
415 hgconf.add_argument("--chg", action="store_true",
424 hgconf.add_argument("--chg", action="store_true",
416 help="install and use chg wrapper in place of hg")
425 help="install and use chg wrapper in place of hg")
417 hgconf.add_argument("--compiler",
426 hgconf.add_argument("--compiler",
418 help="compiler to build with")
427 help="compiler to build with")
419 hgconf.add_argument('--extra-config-opt', action="append", default=[],
428 hgconf.add_argument('--extra-config-opt', action="append", default=[],
420 help='set the given config opt in the test hgrc')
429 help='set the given config opt in the test hgrc')
421 hgconf.add_argument("-l", "--local", action="store_true",
430 hgconf.add_argument("-l", "--local", action="store_true",
422 help="shortcut for --with-hg=<testdir>/../hg, "
431 help="shortcut for --with-hg=<testdir>/../hg, "
423 "and --with-chg=<testdir>/../contrib/chg/chg if --chg is set")
432 "and --with-chg=<testdir>/../contrib/chg/chg if --chg is set")
424 hgconf.add_argument("--ipv6", action="store_true",
433 hgconf.add_argument("--ipv6", action="store_true",
425 help="prefer IPv6 to IPv4 for network related tests")
434 help="prefer IPv6 to IPv4 for network related tests")
426 hgconf.add_argument("--pure", action="store_true",
435 hgconf.add_argument("--pure", action="store_true",
427 help="use pure Python code instead of C extensions")
436 help="use pure Python code instead of C extensions")
428 hgconf.add_argument("-3", "--py3-warnings", action="store_true",
437 hgconf.add_argument("-3", "--py3-warnings", action="store_true",
429 help="enable Py3k warnings on Python 2.7+")
438 help="enable Py3k warnings on Python 2.7+")
430 hgconf.add_argument("--with-chg", metavar="CHG",
439 hgconf.add_argument("--with-chg", metavar="CHG",
431 help="use specified chg wrapper in place of hg")
440 help="use specified chg wrapper in place of hg")
432 hgconf.add_argument("--with-hg",
441 hgconf.add_argument("--with-hg",
433 metavar="HG",
442 metavar="HG",
434 help="test using specified hg script rather than a "
443 help="test using specified hg script rather than a "
435 "temporary installation")
444 "temporary installation")
436
445
437 reporting = parser.add_argument_group('Results Reporting')
446 reporting = parser.add_argument_group('Results Reporting')
438 reporting.add_argument("-C", "--annotate", action="store_true",
447 reporting.add_argument("-C", "--annotate", action="store_true",
439 help="output files annotated with coverage")
448 help="output files annotated with coverage")
440 reporting.add_argument("--color", choices=["always", "auto", "never"],
449 reporting.add_argument("--color", choices=["always", "auto", "never"],
441 default=os.environ.get('HGRUNTESTSCOLOR', 'auto'),
450 default=os.environ.get('HGRUNTESTSCOLOR', 'auto'),
442 help="colorisation: always|auto|never (default: auto)")
451 help="colorisation: always|auto|never (default: auto)")
443 reporting.add_argument("-c", "--cover", action="store_true",
452 reporting.add_argument("-c", "--cover", action="store_true",
444 help="print a test coverage report")
453 help="print a test coverage report")
445 reporting.add_argument('--exceptions', action='store_true',
454 reporting.add_argument('--exceptions', action='store_true',
446 help='log all exceptions and generate an exception report')
455 help='log all exceptions and generate an exception report')
447 reporting.add_argument("-H", "--htmlcov", action="store_true",
456 reporting.add_argument("-H", "--htmlcov", action="store_true",
448 help="create an HTML report of the coverage of the files")
457 help="create an HTML report of the coverage of the files")
449 reporting.add_argument("--json", action="store_true",
458 reporting.add_argument("--json", action="store_true",
450 help="store test result data in 'report.json' file")
459 help="store test result data in 'report.json' file")
451 reporting.add_argument("--outputdir",
460 reporting.add_argument("--outputdir",
452 help="directory to write error logs to (default=test directory)")
461 help="directory to write error logs to (default=test directory)")
453 reporting.add_argument("-n", "--nodiff", action="store_true",
462 reporting.add_argument("-n", "--nodiff", action="store_true",
454 help="skip showing test changes")
463 help="skip showing test changes")
455 reporting.add_argument("-S", "--noskips", action="store_true",
464 reporting.add_argument("-S", "--noskips", action="store_true",
456 help="don't report skip tests verbosely")
465 help="don't report skip tests verbosely")
457 reporting.add_argument("--time", action="store_true",
466 reporting.add_argument("--time", action="store_true",
458 help="time how long each test takes")
467 help="time how long each test takes")
459 reporting.add_argument("--view",
468 reporting.add_argument("--view",
460 help="external diff viewer")
469 help="external diff viewer")
461 reporting.add_argument("--xunit",
470 reporting.add_argument("--xunit",
462 help="record xunit results at specified path")
471 help="record xunit results at specified path")
463
472
464 for option, (envvar, default) in defaults.items():
473 for option, (envvar, default) in defaults.items():
465 defaults[option] = type(default)(os.environ.get(envvar, default))
474 defaults[option] = type(default)(os.environ.get(envvar, default))
466 parser.set_defaults(**defaults)
475 parser.set_defaults(**defaults)
467
476
468 return parser
477 return parser
469
478
470 def parseargs(args, parser):
479 def parseargs(args, parser):
471 """Parse arguments with our OptionParser and validate results."""
480 """Parse arguments with our OptionParser and validate results."""
472 options = parser.parse_args(args)
481 options = parser.parse_args(args)
473
482
474 # jython is always pure
483 # jython is always pure
475 if 'java' in sys.platform or '__pypy__' in sys.modules:
484 if 'java' in sys.platform or '__pypy__' in sys.modules:
476 options.pure = True
485 options.pure = True
477
486
478 if options.with_hg:
487 if options.with_hg:
479 options.with_hg = canonpath(_bytespath(options.with_hg))
488 options.with_hg = canonpath(_bytespath(options.with_hg))
480 if not (os.path.isfile(options.with_hg) and
489 if not (os.path.isfile(options.with_hg) and
481 os.access(options.with_hg, os.X_OK)):
490 os.access(options.with_hg, os.X_OK)):
482 parser.error('--with-hg must specify an executable hg script')
491 parser.error('--with-hg must specify an executable hg script')
483 if os.path.basename(options.with_hg) not in [b'hg', b'hg.exe']:
492 if os.path.basename(options.with_hg) not in [b'hg', b'hg.exe']:
484 sys.stderr.write('warning: --with-hg should specify an hg script\n')
493 sys.stderr.write('warning: --with-hg should specify an hg script\n')
485 sys.stderr.flush()
494 sys.stderr.flush()
486 if options.local:
495 if options.local:
487 testdir = os.path.dirname(_bytespath(canonpath(sys.argv[0])))
496 testdir = os.path.dirname(_bytespath(canonpath(sys.argv[0])))
488 reporootdir = os.path.dirname(testdir)
497 reporootdir = os.path.dirname(testdir)
489 pathandattrs = [(b'hg', 'with_hg')]
498 pathandattrs = [(b'hg', 'with_hg')]
490 if options.chg:
499 if options.chg:
491 pathandattrs.append((b'contrib/chg/chg', 'with_chg'))
500 pathandattrs.append((b'contrib/chg/chg', 'with_chg'))
492 for relpath, attr in pathandattrs:
501 for relpath, attr in pathandattrs:
493 binpath = os.path.join(reporootdir, relpath)
502 binpath = os.path.join(reporootdir, relpath)
494 if os.name != 'nt' and not os.access(binpath, os.X_OK):
503 if os.name != 'nt' and not os.access(binpath, os.X_OK):
495 parser.error('--local specified, but %r not found or '
504 parser.error('--local specified, but %r not found or '
496 'not executable' % binpath)
505 'not executable' % binpath)
497 setattr(options, attr, binpath)
506 setattr(options, attr, binpath)
498
507
499 if (options.chg or options.with_chg) and os.name == 'nt':
508 if (options.chg or options.with_chg) and os.name == 'nt':
500 parser.error('chg does not work on %s' % os.name)
509 parser.error('chg does not work on %s' % os.name)
501 if options.with_chg:
510 if options.with_chg:
502 options.chg = False # no installation to temporary location
511 options.chg = False # no installation to temporary location
503 options.with_chg = canonpath(_bytespath(options.with_chg))
512 options.with_chg = canonpath(_bytespath(options.with_chg))
504 if not (os.path.isfile(options.with_chg) and
513 if not (os.path.isfile(options.with_chg) and
505 os.access(options.with_chg, os.X_OK)):
514 os.access(options.with_chg, os.X_OK)):
506 parser.error('--with-chg must specify a chg executable')
515 parser.error('--with-chg must specify a chg executable')
507 if options.chg and options.with_hg:
516 if options.chg and options.with_hg:
508 # chg shares installation location with hg
517 # chg shares installation location with hg
509 parser.error('--chg does not work when --with-hg is specified '
518 parser.error('--chg does not work when --with-hg is specified '
510 '(use --with-chg instead)')
519 '(use --with-chg instead)')
511
520
512 if options.color == 'always' and not pygmentspresent:
521 if options.color == 'always' and not pygmentspresent:
513 sys.stderr.write('warning: --color=always ignored because '
522 sys.stderr.write('warning: --color=always ignored because '
514 'pygments is not installed\n')
523 'pygments is not installed\n')
515
524
516 if options.bisect_repo and not options.known_good_rev:
525 if options.bisect_repo and not options.known_good_rev:
517 parser.error("--bisect-repo cannot be used without --known-good-rev")
526 parser.error("--bisect-repo cannot be used without --known-good-rev")
518
527
519 global useipv6
528 global useipv6
520 if options.ipv6:
529 if options.ipv6:
521 useipv6 = checksocketfamily('AF_INET6')
530 useipv6 = checksocketfamily('AF_INET6')
522 else:
531 else:
523 # only use IPv6 if IPv4 is unavailable and IPv6 is available
532 # only use IPv6 if IPv4 is unavailable and IPv6 is available
524 useipv6 = ((not checksocketfamily('AF_INET'))
533 useipv6 = ((not checksocketfamily('AF_INET'))
525 and checksocketfamily('AF_INET6'))
534 and checksocketfamily('AF_INET6'))
526
535
527 options.anycoverage = options.cover or options.annotate or options.htmlcov
536 options.anycoverage = options.cover or options.annotate or options.htmlcov
528 if options.anycoverage:
537 if options.anycoverage:
529 try:
538 try:
530 import coverage
539 import coverage
531 covver = version.StrictVersion(coverage.__version__).version
540 covver = version.StrictVersion(coverage.__version__).version
532 if covver < (3, 3):
541 if covver < (3, 3):
533 parser.error('coverage options require coverage 3.3 or later')
542 parser.error('coverage options require coverage 3.3 or later')
534 except ImportError:
543 except ImportError:
535 parser.error('coverage options now require the coverage package')
544 parser.error('coverage options now require the coverage package')
536
545
537 if options.anycoverage and options.local:
546 if options.anycoverage and options.local:
538 # this needs some path mangling somewhere, I guess
547 # this needs some path mangling somewhere, I guess
539 parser.error("sorry, coverage options do not work when --local "
548 parser.error("sorry, coverage options do not work when --local "
540 "is specified")
549 "is specified")
541
550
542 if options.anycoverage and options.with_hg:
551 if options.anycoverage and options.with_hg:
543 parser.error("sorry, coverage options do not work when --with-hg "
552 parser.error("sorry, coverage options do not work when --with-hg "
544 "is specified")
553 "is specified")
545
554
546 global verbose
555 global verbose
547 if options.verbose:
556 if options.verbose:
548 verbose = ''
557 verbose = ''
549
558
550 if options.tmpdir:
559 if options.tmpdir:
551 options.tmpdir = canonpath(options.tmpdir)
560 options.tmpdir = canonpath(options.tmpdir)
552
561
553 if options.jobs < 1:
562 if options.jobs < 1:
554 parser.error('--jobs must be positive')
563 parser.error('--jobs must be positive')
555 if options.interactive and options.debug:
564 if options.interactive and options.debug:
556 parser.error("-i/--interactive and -d/--debug are incompatible")
565 parser.error("-i/--interactive and -d/--debug are incompatible")
557 if options.debug:
566 if options.debug:
558 if options.timeout != defaults['timeout']:
567 if options.timeout != defaults['timeout']:
559 sys.stderr.write(
568 sys.stderr.write(
560 'warning: --timeout option ignored with --debug\n')
569 'warning: --timeout option ignored with --debug\n')
561 if options.slowtimeout != defaults['slowtimeout']:
570 if options.slowtimeout != defaults['slowtimeout']:
562 sys.stderr.write(
571 sys.stderr.write(
563 'warning: --slowtimeout option ignored with --debug\n')
572 'warning: --slowtimeout option ignored with --debug\n')
564 options.timeout = 0
573 options.timeout = 0
565 options.slowtimeout = 0
574 options.slowtimeout = 0
566 if options.py3_warnings:
575 if options.py3_warnings:
567 if PYTHON3:
576 if PYTHON3:
568 parser.error(
577 parser.error(
569 '--py3-warnings can only be used on Python 2.7')
578 '--py3-warnings can only be used on Python 2.7')
570
579
571 if options.blacklist:
580 if options.blacklist:
572 options.blacklist = parselistfiles(options.blacklist, 'blacklist')
581 options.blacklist = parselistfiles(options.blacklist, 'blacklist')
573 if options.whitelist:
582 if options.whitelist:
574 options.whitelisted = parselistfiles(options.whitelist, 'whitelist')
583 options.whitelisted = parselistfiles(options.whitelist, 'whitelist')
575 else:
584 else:
576 options.whitelisted = {}
585 options.whitelisted = {}
577
586
578 if options.showchannels:
587 if options.showchannels:
579 options.nodiff = True
588 options.nodiff = True
580
589
581 return options
590 return options
582
591
583 def rename(src, dst):
592 def rename(src, dst):
584 """Like os.rename(), trade atomicity and opened files friendliness
593 """Like os.rename(), trade atomicity and opened files friendliness
585 for existing destination support.
594 for existing destination support.
586 """
595 """
587 shutil.copy(src, dst)
596 shutil.copy(src, dst)
588 os.remove(src)
597 os.remove(src)
589
598
590 def makecleanable(path):
599 def makecleanable(path):
591 """Try to fix directory permission recursively so that the entire tree
600 """Try to fix directory permission recursively so that the entire tree
592 can be deleted"""
601 can be deleted"""
593 for dirpath, dirnames, _filenames in os.walk(path, topdown=True):
602 for dirpath, dirnames, _filenames in os.walk(path, topdown=True):
594 for d in dirnames:
603 for d in dirnames:
595 p = os.path.join(dirpath, d)
604 p = os.path.join(dirpath, d)
596 try:
605 try:
597 os.chmod(p, os.stat(p).st_mode & 0o777 | 0o700) # chmod u+rwx
606 os.chmod(p, os.stat(p).st_mode & 0o777 | 0o700) # chmod u+rwx
598 except OSError:
607 except OSError:
599 pass
608 pass
600
609
601 _unified_diff = difflib.unified_diff
610 _unified_diff = difflib.unified_diff
602 if PYTHON3:
611 if PYTHON3:
603 import functools
612 import functools
604 _unified_diff = functools.partial(difflib.diff_bytes, difflib.unified_diff)
613 _unified_diff = functools.partial(difflib.diff_bytes, difflib.unified_diff)
605
614
606 def getdiff(expected, output, ref, err):
615 def getdiff(expected, output, ref, err):
607 servefail = False
616 servefail = False
608 lines = []
617 lines = []
609 for line in _unified_diff(expected, output, ref, err):
618 for line in _unified_diff(expected, output, ref, err):
610 if line.startswith(b'+++') or line.startswith(b'---'):
619 if line.startswith(b'+++') or line.startswith(b'---'):
611 line = line.replace(b'\\', b'/')
620 line = line.replace(b'\\', b'/')
612 if line.endswith(b' \n'):
621 if line.endswith(b' \n'):
613 line = line[:-2] + b'\n'
622 line = line[:-2] + b'\n'
614 lines.append(line)
623 lines.append(line)
615 if not servefail and line.startswith(
624 if not servefail and line.startswith(
616 b'+ abort: child process failed to start'):
625 b'+ abort: child process failed to start'):
617 servefail = True
626 servefail = True
618
627
619 return servefail, lines
628 return servefail, lines
620
629
621 verbose = False
630 verbose = False
622 def vlog(*msg):
631 def vlog(*msg):
623 """Log only when in verbose mode."""
632 """Log only when in verbose mode."""
624 if verbose is False:
633 if verbose is False:
625 return
634 return
626
635
627 return log(*msg)
636 return log(*msg)
628
637
629 # Bytes that break XML even in a CDATA block: control characters 0-31
638 # Bytes that break XML even in a CDATA block: control characters 0-31
630 # sans \t, \n and \r
639 # sans \t, \n and \r
631 CDATA_EVIL = re.compile(br"[\000-\010\013\014\016-\037]")
640 CDATA_EVIL = re.compile(br"[\000-\010\013\014\016-\037]")
632
641
633 # Match feature conditionalized output lines in the form, capturing the feature
642 # Match feature conditionalized output lines in the form, capturing the feature
634 # list in group 2, and the preceeding line output in group 1:
643 # list in group 2, and the preceeding line output in group 1:
635 #
644 #
636 # output..output (feature !)\n
645 # output..output (feature !)\n
637 optline = re.compile(br'(.*) \((.+?) !\)\n$')
646 optline = re.compile(br'(.*) \((.+?) !\)\n$')
638
647
639 def cdatasafe(data):
648 def cdatasafe(data):
640 """Make a string safe to include in a CDATA block.
649 """Make a string safe to include in a CDATA block.
641
650
642 Certain control characters are illegal in a CDATA block, and
651 Certain control characters are illegal in a CDATA block, and
643 there's no way to include a ]]> in a CDATA either. This function
652 there's no way to include a ]]> in a CDATA either. This function
644 replaces illegal bytes with ? and adds a space between the ]] so
653 replaces illegal bytes with ? and adds a space between the ]] so
645 that it won't break the CDATA block.
654 that it won't break the CDATA block.
646 """
655 """
647 return CDATA_EVIL.sub(b'?', data).replace(b']]>', b'] ]>')
656 return CDATA_EVIL.sub(b'?', data).replace(b']]>', b'] ]>')
648
657
649 def log(*msg):
658 def log(*msg):
650 """Log something to stdout.
659 """Log something to stdout.
651
660
652 Arguments are strings to print.
661 Arguments are strings to print.
653 """
662 """
654 with iolock:
663 with iolock:
655 if verbose:
664 if verbose:
656 print(verbose, end=' ')
665 print(verbose, end=' ')
657 for m in msg:
666 for m in msg:
658 print(m, end=' ')
667 print(m, end=' ')
659 print()
668 print()
660 sys.stdout.flush()
669 sys.stdout.flush()
661
670
662 def highlightdiff(line, color):
671 def highlightdiff(line, color):
663 if not color:
672 if not color:
664 return line
673 return line
665 assert pygmentspresent
674 assert pygmentspresent
666 return pygments.highlight(line.decode('latin1'), difflexer,
675 return pygments.highlight(line.decode('latin1'), difflexer,
667 terminal256formatter).encode('latin1')
676 terminal256formatter).encode('latin1')
668
677
669 def highlightmsg(msg, color):
678 def highlightmsg(msg, color):
670 if not color:
679 if not color:
671 return msg
680 return msg
672 assert pygmentspresent
681 assert pygmentspresent
673 return pygments.highlight(msg, runnerlexer, runnerformatter)
682 return pygments.highlight(msg, runnerlexer, runnerformatter)
674
683
675 def terminate(proc):
684 def terminate(proc):
676 """Terminate subprocess"""
685 """Terminate subprocess"""
677 vlog('# Terminating process %d' % proc.pid)
686 vlog('# Terminating process %d' % proc.pid)
678 try:
687 try:
679 proc.terminate()
688 proc.terminate()
680 except OSError:
689 except OSError:
681 pass
690 pass
682
691
683 def killdaemons(pidfile):
692 def killdaemons(pidfile):
684 import killdaemons as killmod
693 import killdaemons as killmod
685 return killmod.killdaemons(pidfile, tryhard=False, remove=True,
694 return killmod.killdaemons(pidfile, tryhard=False, remove=True,
686 logfn=vlog)
695 logfn=vlog)
687
696
688 class Test(unittest.TestCase):
697 class Test(unittest.TestCase):
689 """Encapsulates a single, runnable test.
698 """Encapsulates a single, runnable test.
690
699
691 While this class conforms to the unittest.TestCase API, it differs in that
700 While this class conforms to the unittest.TestCase API, it differs in that
692 instances need to be instantiated manually. (Typically, unittest.TestCase
701 instances need to be instantiated manually. (Typically, unittest.TestCase
693 classes are instantiated automatically by scanning modules.)
702 classes are instantiated automatically by scanning modules.)
694 """
703 """
695
704
696 # Status code reserved for skipped tests (used by hghave).
705 # Status code reserved for skipped tests (used by hghave).
697 SKIPPED_STATUS = 80
706 SKIPPED_STATUS = 80
698
707
699 def __init__(self, path, outputdir, tmpdir, keeptmpdir=False,
708 def __init__(self, path, outputdir, tmpdir, keeptmpdir=False,
700 debug=False,
709 debug=False,
701 first=False,
710 first=False,
702 timeout=None,
711 timeout=None,
703 startport=None, extraconfigopts=None,
712 startport=None, extraconfigopts=None,
704 py3warnings=False, shell=None, hgcommand=None,
713 py3warnings=False, shell=None, hgcommand=None,
705 slowtimeout=None, usechg=False,
714 slowtimeout=None, usechg=False,
706 useipv6=False):
715 useipv6=False):
707 """Create a test from parameters.
716 """Create a test from parameters.
708
717
709 path is the full path to the file defining the test.
718 path is the full path to the file defining the test.
710
719
711 tmpdir is the main temporary directory to use for this test.
720 tmpdir is the main temporary directory to use for this test.
712
721
713 keeptmpdir determines whether to keep the test's temporary directory
722 keeptmpdir determines whether to keep the test's temporary directory
714 after execution. It defaults to removal (False).
723 after execution. It defaults to removal (False).
715
724
716 debug mode will make the test execute verbosely, with unfiltered
725 debug mode will make the test execute verbosely, with unfiltered
717 output.
726 output.
718
727
719 timeout controls the maximum run time of the test. It is ignored when
728 timeout controls the maximum run time of the test. It is ignored when
720 debug is True. See slowtimeout for tests with #require slow.
729 debug is True. See slowtimeout for tests with #require slow.
721
730
722 slowtimeout overrides timeout if the test has #require slow.
731 slowtimeout overrides timeout if the test has #require slow.
723
732
724 startport controls the starting port number to use for this test. Each
733 startport controls the starting port number to use for this test. Each
725 test will reserve 3 port numbers for execution. It is the caller's
734 test will reserve 3 port numbers for execution. It is the caller's
726 responsibility to allocate a non-overlapping port range to Test
735 responsibility to allocate a non-overlapping port range to Test
727 instances.
736 instances.
728
737
729 extraconfigopts is an iterable of extra hgrc config options. Values
738 extraconfigopts is an iterable of extra hgrc config options. Values
730 must have the form "key=value" (something understood by hgrc). Values
739 must have the form "key=value" (something understood by hgrc). Values
731 of the form "foo.key=value" will result in "[foo] key=value".
740 of the form "foo.key=value" will result in "[foo] key=value".
732
741
733 py3warnings enables Py3k warnings.
742 py3warnings enables Py3k warnings.
734
743
735 shell is the shell to execute tests in.
744 shell is the shell to execute tests in.
736 """
745 """
737 if timeout is None:
746 if timeout is None:
738 timeout = defaults['timeout']
747 timeout = defaults['timeout']
739 if startport is None:
748 if startport is None:
740 startport = defaults['port']
749 startport = defaults['port']
741 if slowtimeout is None:
750 if slowtimeout is None:
742 slowtimeout = defaults['slowtimeout']
751 slowtimeout = defaults['slowtimeout']
743 self.path = path
752 self.path = path
744 self.bname = os.path.basename(path)
753 self.bname = os.path.basename(path)
745 self.name = _strpath(self.bname)
754 self.name = _strpath(self.bname)
746 self._testdir = os.path.dirname(path)
755 self._testdir = os.path.dirname(path)
747 self._outputdir = outputdir
756 self._outputdir = outputdir
748 self._tmpname = os.path.basename(path)
757 self._tmpname = os.path.basename(path)
749 self.errpath = os.path.join(self._outputdir, b'%s.err' % self.bname)
758 self.errpath = os.path.join(self._outputdir, b'%s.err' % self.bname)
750
759
751 self._threadtmp = tmpdir
760 self._threadtmp = tmpdir
752 self._keeptmpdir = keeptmpdir
761 self._keeptmpdir = keeptmpdir
753 self._debug = debug
762 self._debug = debug
754 self._first = first
763 self._first = first
755 self._timeout = timeout
764 self._timeout = timeout
756 self._slowtimeout = slowtimeout
765 self._slowtimeout = slowtimeout
757 self._startport = startport
766 self._startport = startport
758 self._extraconfigopts = extraconfigopts or []
767 self._extraconfigopts = extraconfigopts or []
759 self._py3warnings = py3warnings
768 self._py3warnings = py3warnings
760 self._shell = _bytespath(shell)
769 self._shell = _bytespath(shell)
761 self._hgcommand = hgcommand or b'hg'
770 self._hgcommand = hgcommand or b'hg'
762 self._usechg = usechg
771 self._usechg = usechg
763 self._useipv6 = useipv6
772 self._useipv6 = useipv6
764
773
765 self._aborted = False
774 self._aborted = False
766 self._daemonpids = []
775 self._daemonpids = []
767 self._finished = None
776 self._finished = None
768 self._ret = None
777 self._ret = None
769 self._out = None
778 self._out = None
770 self._skipped = None
779 self._skipped = None
771 self._testtmp = None
780 self._testtmp = None
772 self._chgsockdir = None
781 self._chgsockdir = None
773
782
774 self._refout = self.readrefout()
783 self._refout = self.readrefout()
775
784
776 def readrefout(self):
785 def readrefout(self):
777 """read reference output"""
786 """read reference output"""
778 # If we're not in --debug mode and reference output file exists,
787 # If we're not in --debug mode and reference output file exists,
779 # check test output against it.
788 # check test output against it.
780 if self._debug:
789 if self._debug:
781 return None # to match "out is None"
790 return None # to match "out is None"
782 elif os.path.exists(self.refpath):
791 elif os.path.exists(self.refpath):
783 with open(self.refpath, 'rb') as f:
792 with open(self.refpath, 'rb') as f:
784 return f.read().splitlines(True)
793 return f.read().splitlines(True)
785 else:
794 else:
786 return []
795 return []
787
796
788 # needed to get base class __repr__ running
797 # needed to get base class __repr__ running
789 @property
798 @property
790 def _testMethodName(self):
799 def _testMethodName(self):
791 return self.name
800 return self.name
792
801
793 def __str__(self):
802 def __str__(self):
794 return self.name
803 return self.name
795
804
796 def shortDescription(self):
805 def shortDescription(self):
797 return self.name
806 return self.name
798
807
799 def setUp(self):
808 def setUp(self):
800 """Tasks to perform before run()."""
809 """Tasks to perform before run()."""
801 self._finished = False
810 self._finished = False
802 self._ret = None
811 self._ret = None
803 self._out = None
812 self._out = None
804 self._skipped = None
813 self._skipped = None
805
814
806 try:
815 try:
807 os.mkdir(self._threadtmp)
816 os.mkdir(self._threadtmp)
808 except OSError as e:
817 except OSError as e:
809 if e.errno != errno.EEXIST:
818 if e.errno != errno.EEXIST:
810 raise
819 raise
811
820
812 name = self._tmpname
821 name = self._tmpname
813 self._testtmp = os.path.join(self._threadtmp, name)
822 self._testtmp = os.path.join(self._threadtmp, name)
814 os.mkdir(self._testtmp)
823 os.mkdir(self._testtmp)
815
824
816 # Remove any previous output files.
825 # Remove any previous output files.
817 if os.path.exists(self.errpath):
826 if os.path.exists(self.errpath):
818 try:
827 try:
819 os.remove(self.errpath)
828 os.remove(self.errpath)
820 except OSError as e:
829 except OSError as e:
821 # We might have raced another test to clean up a .err
830 # We might have raced another test to clean up a .err
822 # file, so ignore ENOENT when removing a previous .err
831 # file, so ignore ENOENT when removing a previous .err
823 # file.
832 # file.
824 if e.errno != errno.ENOENT:
833 if e.errno != errno.ENOENT:
825 raise
834 raise
826
835
827 if self._usechg:
836 if self._usechg:
828 self._chgsockdir = os.path.join(self._threadtmp,
837 self._chgsockdir = os.path.join(self._threadtmp,
829 b'%s.chgsock' % name)
838 b'%s.chgsock' % name)
830 os.mkdir(self._chgsockdir)
839 os.mkdir(self._chgsockdir)
831
840
832 def run(self, result):
841 def run(self, result):
833 """Run this test and report results against a TestResult instance."""
842 """Run this test and report results against a TestResult instance."""
834 # This function is extremely similar to unittest.TestCase.run(). Once
843 # This function is extremely similar to unittest.TestCase.run(). Once
835 # we require Python 2.7 (or at least its version of unittest), this
844 # we require Python 2.7 (or at least its version of unittest), this
836 # function can largely go away.
845 # function can largely go away.
837 self._result = result
846 self._result = result
838 result.startTest(self)
847 result.startTest(self)
839 try:
848 try:
840 try:
849 try:
841 self.setUp()
850 self.setUp()
842 except (KeyboardInterrupt, SystemExit):
851 except (KeyboardInterrupt, SystemExit):
843 self._aborted = True
852 self._aborted = True
844 raise
853 raise
845 except Exception:
854 except Exception:
846 result.addError(self, sys.exc_info())
855 result.addError(self, sys.exc_info())
847 return
856 return
848
857
849 success = False
858 success = False
850 try:
859 try:
851 self.runTest()
860 self.runTest()
852 except KeyboardInterrupt:
861 except KeyboardInterrupt:
853 self._aborted = True
862 self._aborted = True
854 raise
863 raise
855 except unittest.SkipTest as e:
864 except unittest.SkipTest as e:
856 result.addSkip(self, str(e))
865 result.addSkip(self, str(e))
857 # The base class will have already counted this as a
866 # The base class will have already counted this as a
858 # test we "ran", but we want to exclude skipped tests
867 # test we "ran", but we want to exclude skipped tests
859 # from those we count towards those run.
868 # from those we count towards those run.
860 result.testsRun -= 1
869 result.testsRun -= 1
861 except self.failureException as e:
870 except self.failureException as e:
862 # This differs from unittest in that we don't capture
871 # This differs from unittest in that we don't capture
863 # the stack trace. This is for historical reasons and
872 # the stack trace. This is for historical reasons and
864 # this decision could be revisited in the future,
873 # this decision could be revisited in the future,
865 # especially for PythonTest instances.
874 # especially for PythonTest instances.
866 if result.addFailure(self, str(e)):
875 if result.addFailure(self, str(e)):
867 success = True
876 success = True
868 except Exception:
877 except Exception:
869 result.addError(self, sys.exc_info())
878 result.addError(self, sys.exc_info())
870 else:
879 else:
871 success = True
880 success = True
872
881
873 try:
882 try:
874 self.tearDown()
883 self.tearDown()
875 except (KeyboardInterrupt, SystemExit):
884 except (KeyboardInterrupt, SystemExit):
876 self._aborted = True
885 self._aborted = True
877 raise
886 raise
878 except Exception:
887 except Exception:
879 result.addError(self, sys.exc_info())
888 result.addError(self, sys.exc_info())
880 success = False
889 success = False
881
890
882 if success:
891 if success:
883 result.addSuccess(self)
892 result.addSuccess(self)
884 finally:
893 finally:
885 result.stopTest(self, interrupted=self._aborted)
894 result.stopTest(self, interrupted=self._aborted)
886
895
887 def runTest(self):
896 def runTest(self):
888 """Run this test instance.
897 """Run this test instance.
889
898
890 This will return a tuple describing the result of the test.
899 This will return a tuple describing the result of the test.
891 """
900 """
892 env = self._getenv()
901 env = self._getenv()
893 self._genrestoreenv(env)
902 self._genrestoreenv(env)
894 self._daemonpids.append(env['DAEMON_PIDS'])
903 self._daemonpids.append(env['DAEMON_PIDS'])
895 self._createhgrc(env['HGRCPATH'])
904 self._createhgrc(env['HGRCPATH'])
896
905
897 vlog('# Test', self.name)
906 vlog('# Test', self.name)
898
907
899 ret, out = self._run(env)
908 ret, out = self._run(env)
900 self._finished = True
909 self._finished = True
901 self._ret = ret
910 self._ret = ret
902 self._out = out
911 self._out = out
903
912
904 def describe(ret):
913 def describe(ret):
905 if ret < 0:
914 if ret < 0:
906 return 'killed by signal: %d' % -ret
915 return 'killed by signal: %d' % -ret
907 return 'returned error code %d' % ret
916 return 'returned error code %d' % ret
908
917
909 self._skipped = False
918 self._skipped = False
910
919
911 if ret == self.SKIPPED_STATUS:
920 if ret == self.SKIPPED_STATUS:
912 if out is None: # Debug mode, nothing to parse.
921 if out is None: # Debug mode, nothing to parse.
913 missing = ['unknown']
922 missing = ['unknown']
914 failed = None
923 failed = None
915 else:
924 else:
916 missing, failed = TTest.parsehghaveoutput(out)
925 missing, failed = TTest.parsehghaveoutput(out)
917
926
918 if not missing:
927 if not missing:
919 missing = ['skipped']
928 missing = ['skipped']
920
929
921 if failed:
930 if failed:
922 self.fail('hg have failed checking for %s' % failed[-1])
931 self.fail('hg have failed checking for %s' % failed[-1])
923 else:
932 else:
924 self._skipped = True
933 self._skipped = True
925 raise unittest.SkipTest(missing[-1])
934 raise unittest.SkipTest(missing[-1])
926 elif ret == 'timeout':
935 elif ret == 'timeout':
927 self.fail('timed out')
936 self.fail('timed out')
928 elif ret is False:
937 elif ret is False:
929 self.fail('no result code from test')
938 self.fail('no result code from test')
930 elif out != self._refout:
939 elif out != self._refout:
931 # Diff generation may rely on written .err file.
940 # Diff generation may rely on written .err file.
932 if ((ret != 0 or out != self._refout) and not self._skipped
941 if ((ret != 0 or out != self._refout) and not self._skipped
933 and not self._debug):
942 and not self._debug):
934 with open(self.errpath, 'wb') as f:
943 with open(self.errpath, 'wb') as f:
935 for line in out:
944 for line in out:
936 f.write(line)
945 f.write(line)
937
946
938 # The result object handles diff calculation for us.
947 # The result object handles diff calculation for us.
939 with firstlock:
948 with firstlock:
940 if self._result.addOutputMismatch(self, ret, out, self._refout):
949 if self._result.addOutputMismatch(self, ret, out, self._refout):
941 # change was accepted, skip failing
950 # change was accepted, skip failing
942 return
951 return
943 if self._first:
952 if self._first:
944 global firsterror
953 global firsterror
945 firsterror = True
954 firsterror = True
946
955
947 if ret:
956 if ret:
948 msg = 'output changed and ' + describe(ret)
957 msg = 'output changed and ' + describe(ret)
949 else:
958 else:
950 msg = 'output changed'
959 msg = 'output changed'
951
960
952 self.fail(msg)
961 self.fail(msg)
953 elif ret:
962 elif ret:
954 self.fail(describe(ret))
963 self.fail(describe(ret))
955
964
956 def tearDown(self):
965 def tearDown(self):
957 """Tasks to perform after run()."""
966 """Tasks to perform after run()."""
958 for entry in self._daemonpids:
967 for entry in self._daemonpids:
959 killdaemons(entry)
968 killdaemons(entry)
960 self._daemonpids = []
969 self._daemonpids = []
961
970
962 if self._keeptmpdir:
971 if self._keeptmpdir:
963 log('\nKeeping testtmp dir: %s\nKeeping threadtmp dir: %s' %
972 log('\nKeeping testtmp dir: %s\nKeeping threadtmp dir: %s' %
964 (self._testtmp.decode('utf-8'),
973 (self._testtmp.decode('utf-8'),
965 self._threadtmp.decode('utf-8')))
974 self._threadtmp.decode('utf-8')))
966 else:
975 else:
967 try:
976 try:
968 shutil.rmtree(self._testtmp)
977 shutil.rmtree(self._testtmp)
969 except OSError:
978 except OSError:
970 # unreadable directory may be left in $TESTTMP; fix permission
979 # unreadable directory may be left in $TESTTMP; fix permission
971 # and try again
980 # and try again
972 makecleanable(self._testtmp)
981 makecleanable(self._testtmp)
973 shutil.rmtree(self._testtmp, True)
982 shutil.rmtree(self._testtmp, True)
974 shutil.rmtree(self._threadtmp, True)
983 shutil.rmtree(self._threadtmp, True)
975
984
976 if self._usechg:
985 if self._usechg:
977 # chgservers will stop automatically after they find the socket
986 # chgservers will stop automatically after they find the socket
978 # files are deleted
987 # files are deleted
979 shutil.rmtree(self._chgsockdir, True)
988 shutil.rmtree(self._chgsockdir, True)
980
989
981 if ((self._ret != 0 or self._out != self._refout) and not self._skipped
990 if ((self._ret != 0 or self._out != self._refout) and not self._skipped
982 and not self._debug and self._out):
991 and not self._debug and self._out):
983 with open(self.errpath, 'wb') as f:
992 with open(self.errpath, 'wb') as f:
984 for line in self._out:
993 for line in self._out:
985 f.write(line)
994 f.write(line)
986
995
987 vlog("# Ret was:", self._ret, '(%s)' % self.name)
996 vlog("# Ret was:", self._ret, '(%s)' % self.name)
988
997
989 def _run(self, env):
998 def _run(self, env):
990 # This should be implemented in child classes to run tests.
999 # This should be implemented in child classes to run tests.
991 raise unittest.SkipTest('unknown test type')
1000 raise unittest.SkipTest('unknown test type')
992
1001
993 def abort(self):
1002 def abort(self):
994 """Terminate execution of this test."""
1003 """Terminate execution of this test."""
995 self._aborted = True
1004 self._aborted = True
996
1005
997 def _portmap(self, i):
1006 def _portmap(self, i):
998 offset = b'' if i == 0 else b'%d' % i
1007 offset = b'' if i == 0 else b'%d' % i
999 return (br':%d\b' % (self._startport + i), b':$HGPORT%s' % offset)
1008 return (br':%d\b' % (self._startport + i), b':$HGPORT%s' % offset)
1000
1009
1001 def _getreplacements(self):
1010 def _getreplacements(self):
1002 """Obtain a mapping of text replacements to apply to test output.
1011 """Obtain a mapping of text replacements to apply to test output.
1003
1012
1004 Test output needs to be normalized so it can be compared to expected
1013 Test output needs to be normalized so it can be compared to expected
1005 output. This function defines how some of that normalization will
1014 output. This function defines how some of that normalization will
1006 occur.
1015 occur.
1007 """
1016 """
1008 r = [
1017 r = [
1009 # This list should be parallel to defineport in _getenv
1018 # This list should be parallel to defineport in _getenv
1010 self._portmap(0),
1019 self._portmap(0),
1011 self._portmap(1),
1020 self._portmap(1),
1012 self._portmap(2),
1021 self._portmap(2),
1013 (br'([^0-9])%s' % re.escape(self._localip()), br'\1$LOCALIP'),
1022 (br'([^0-9])%s' % re.escape(self._localip()), br'\1$LOCALIP'),
1014 (br'\bHG_TXNID=TXN:[a-f0-9]{40}\b', br'HG_TXNID=TXN:$ID$'),
1023 (br'\bHG_TXNID=TXN:[a-f0-9]{40}\b', br'HG_TXNID=TXN:$ID$'),
1015 ]
1024 ]
1016 r.append((self._escapepath(self._testtmp), b'$TESTTMP'))
1025 r.append((self._escapepath(self._testtmp), b'$TESTTMP'))
1017
1026
1018 replacementfile = os.path.join(self._testdir, b'common-pattern.py')
1027 replacementfile = os.path.join(self._testdir, b'common-pattern.py')
1019
1028
1020 if os.path.exists(replacementfile):
1029 if os.path.exists(replacementfile):
1021 data = {}
1030 data = {}
1022 with open(replacementfile, mode='rb') as source:
1031 with open(replacementfile, mode='rb') as source:
1023 # the intermediate 'compile' step help with debugging
1032 # the intermediate 'compile' step help with debugging
1024 code = compile(source.read(), replacementfile, 'exec')
1033 code = compile(source.read(), replacementfile, 'exec')
1025 exec(code, data)
1034 exec(code, data)
1026 for value in data.get('substitutions', ()):
1035 for value in data.get('substitutions', ()):
1027 if len(value) != 2:
1036 if len(value) != 2:
1028 msg = 'malformatted substitution in %s: %r'
1037 msg = 'malformatted substitution in %s: %r'
1029 msg %= (replacementfile, value)
1038 msg %= (replacementfile, value)
1030 raise ValueError(msg)
1039 raise ValueError(msg)
1031 r.append(value)
1040 r.append(value)
1032 return r
1041 return r
1033
1042
1034 def _escapepath(self, p):
1043 def _escapepath(self, p):
1035 if os.name == 'nt':
1044 if os.name == 'nt':
1036 return (
1045 return (
1037 (b''.join(c.isalpha() and b'[%s%s]' % (c.lower(), c.upper()) or
1046 (b''.join(c.isalpha() and b'[%s%s]' % (c.lower(), c.upper()) or
1038 c in b'/\\' and br'[/\\]' or c.isdigit() and c or b'\\' + c
1047 c in b'/\\' and br'[/\\]' or c.isdigit() and c or b'\\' + c
1039 for c in [p[i:i + 1] for i in range(len(p))]))
1048 for c in [p[i:i + 1] for i in range(len(p))]))
1040 )
1049 )
1041 else:
1050 else:
1042 return re.escape(p)
1051 return re.escape(p)
1043
1052
1044 def _localip(self):
1053 def _localip(self):
1045 if self._useipv6:
1054 if self._useipv6:
1046 return b'::1'
1055 return b'::1'
1047 else:
1056 else:
1048 return b'127.0.0.1'
1057 return b'127.0.0.1'
1049
1058
1050 def _genrestoreenv(self, testenv):
1059 def _genrestoreenv(self, testenv):
1051 """Generate a script that can be used by tests to restore the original
1060 """Generate a script that can be used by tests to restore the original
1052 environment."""
1061 environment."""
1053 # Put the restoreenv script inside self._threadtmp
1062 # Put the restoreenv script inside self._threadtmp
1054 scriptpath = os.path.join(self._threadtmp, b'restoreenv.sh')
1063 scriptpath = os.path.join(self._threadtmp, b'restoreenv.sh')
1055 testenv['HGTEST_RESTOREENV'] = _strpath(scriptpath)
1064 testenv['HGTEST_RESTOREENV'] = _strpath(scriptpath)
1056
1065
1057 # Only restore environment variable names that the shell allows
1066 # Only restore environment variable names that the shell allows
1058 # us to export.
1067 # us to export.
1059 name_regex = re.compile('^[a-zA-Z][a-zA-Z0-9_]*$')
1068 name_regex = re.compile('^[a-zA-Z][a-zA-Z0-9_]*$')
1060
1069
1061 # Do not restore these variables; otherwise tests would fail.
1070 # Do not restore these variables; otherwise tests would fail.
1062 reqnames = {'PYTHON', 'TESTDIR', 'TESTTMP'}
1071 reqnames = {'PYTHON', 'TESTDIR', 'TESTTMP'}
1063
1072
1064 with open(scriptpath, 'w') as envf:
1073 with open(scriptpath, 'w') as envf:
1065 for name, value in origenviron.items():
1074 for name, value in origenviron.items():
1066 if not name_regex.match(name):
1075 if not name_regex.match(name):
1067 # Skip environment variables with unusual names not
1076 # Skip environment variables with unusual names not
1068 # allowed by most shells.
1077 # allowed by most shells.
1069 continue
1078 continue
1070 if name in reqnames:
1079 if name in reqnames:
1071 continue
1080 continue
1072 envf.write('%s=%s\n' % (name, shellquote(value)))
1081 envf.write('%s=%s\n' % (name, shellquote(value)))
1073
1082
1074 for name in testenv:
1083 for name in testenv:
1075 if name in origenviron or name in reqnames:
1084 if name in origenviron or name in reqnames:
1076 continue
1085 continue
1077 envf.write('unset %s\n' % (name,))
1086 envf.write('unset %s\n' % (name,))
1078
1087
1079 def _getenv(self):
1088 def _getenv(self):
1080 """Obtain environment variables to use during test execution."""
1089 """Obtain environment variables to use during test execution."""
1081 def defineport(i):
1090 def defineport(i):
1082 offset = '' if i == 0 else '%s' % i
1091 offset = '' if i == 0 else '%s' % i
1083 env["HGPORT%s" % offset] = '%s' % (self._startport + i)
1092 env["HGPORT%s" % offset] = '%s' % (self._startport + i)
1084 env = os.environ.copy()
1093 env = os.environ.copy()
1085 env['PYTHONUSERBASE'] = sysconfig.get_config_var('userbase') or ''
1094 env['PYTHONUSERBASE'] = sysconfig.get_config_var('userbase') or ''
1086 env['HGEMITWARNINGS'] = '1'
1095 env['HGEMITWARNINGS'] = '1'
1087 env['TESTTMP'] = _strpath(self._testtmp)
1096 env['TESTTMP'] = _strpath(self._testtmp)
1088 env['TESTNAME'] = self.name
1097 env['TESTNAME'] = self.name
1089 env['HOME'] = _strpath(self._testtmp)
1098 env['HOME'] = _strpath(self._testtmp)
1090 # This number should match portneeded in _getport
1099 # This number should match portneeded in _getport
1091 for port in xrange(3):
1100 for port in xrange(3):
1092 # This list should be parallel to _portmap in _getreplacements
1101 # This list should be parallel to _portmap in _getreplacements
1093 defineport(port)
1102 defineport(port)
1094 env["HGRCPATH"] = _strpath(os.path.join(self._threadtmp, b'.hgrc'))
1103 env["HGRCPATH"] = _strpath(os.path.join(self._threadtmp, b'.hgrc'))
1095 env["DAEMON_PIDS"] = _strpath(os.path.join(self._threadtmp,
1104 env["DAEMON_PIDS"] = _strpath(os.path.join(self._threadtmp,
1096 b'daemon.pids'))
1105 b'daemon.pids'))
1097 env["HGEDITOR"] = ('"' + sys.executable + '"'
1106 env["HGEDITOR"] = ('"' + sysexecutable + '"'
1098 + ' -c "import sys; sys.exit(0)"')
1107 + ' -c "import sys; sys.exit(0)"')
1099 env["HGUSER"] = "test"
1108 env["HGUSER"] = "test"
1100 env["HGENCODING"] = "ascii"
1109 env["HGENCODING"] = "ascii"
1101 env["HGENCODINGMODE"] = "strict"
1110 env["HGENCODINGMODE"] = "strict"
1102 env["HGHOSTNAME"] = "test-hostname"
1111 env["HGHOSTNAME"] = "test-hostname"
1103 env['HGIPV6'] = str(int(self._useipv6))
1112 env['HGIPV6'] = str(int(self._useipv6))
1104 # See contrib/catapipe.py for how to use this functionality.
1113 # See contrib/catapipe.py for how to use this functionality.
1105 if 'HGTESTCATAPULTSERVERPIPE' not in env:
1114 if 'HGTESTCATAPULTSERVERPIPE' not in env:
1106 # If we don't have HGTESTCATAPULTSERVERPIPE explicitly set, pull the
1115 # If we don't have HGTESTCATAPULTSERVERPIPE explicitly set, pull the
1107 # non-test one in as a default, otherwise set to devnull
1116 # non-test one in as a default, otherwise set to devnull
1108 env['HGTESTCATAPULTSERVERPIPE'] = env.get(
1117 env['HGTESTCATAPULTSERVERPIPE'] = env.get(
1109 'HGCATAPULTSERVERPIPE', os.devnull)
1118 'HGCATAPULTSERVERPIPE', os.devnull)
1110
1119
1111 extraextensions = []
1120 extraextensions = []
1112 for opt in self._extraconfigopts:
1121 for opt in self._extraconfigopts:
1113 section, key = opt.encode('utf-8').split(b'.', 1)
1122 section, key = opt.encode('utf-8').split(b'.', 1)
1114 if section != 'extensions':
1123 if section != 'extensions':
1115 continue
1124 continue
1116 name = key.split(b'=', 1)[0]
1125 name = key.split(b'=', 1)[0]
1117 extraextensions.append(name)
1126 extraextensions.append(name)
1118
1127
1119 if extraextensions:
1128 if extraextensions:
1120 env['HGTESTEXTRAEXTENSIONS'] = b' '.join(extraextensions)
1129 env['HGTESTEXTRAEXTENSIONS'] = b' '.join(extraextensions)
1121
1130
1122 # LOCALIP could be ::1 or 127.0.0.1. Useful for tests that require raw
1131 # LOCALIP could be ::1 or 127.0.0.1. Useful for tests that require raw
1123 # IP addresses.
1132 # IP addresses.
1124 env['LOCALIP'] = _strpath(self._localip())
1133 env['LOCALIP'] = _strpath(self._localip())
1125
1134
1126 # This has the same effect as Py_LegacyWindowsStdioFlag in exewrapper.c,
1135 # This has the same effect as Py_LegacyWindowsStdioFlag in exewrapper.c,
1127 # but this is needed for testing python instances like dummyssh,
1136 # but this is needed for testing python instances like dummyssh,
1128 # dummysmtpd.py, and dumbhttp.py.
1137 # dummysmtpd.py, and dumbhttp.py.
1129 if PYTHON3 and os.name == 'nt':
1138 if PYTHON3 and os.name == 'nt':
1130 env['PYTHONLEGACYWINDOWSSTDIO'] = '1'
1139 env['PYTHONLEGACYWINDOWSSTDIO'] = '1'
1131
1140
1132 # Reset some environment variables to well-known values so that
1141 # Reset some environment variables to well-known values so that
1133 # the tests produce repeatable output.
1142 # the tests produce repeatable output.
1134 env['LANG'] = env['LC_ALL'] = env['LANGUAGE'] = 'C'
1143 env['LANG'] = env['LC_ALL'] = env['LANGUAGE'] = 'C'
1135 env['TZ'] = 'GMT'
1144 env['TZ'] = 'GMT'
1136 env["EMAIL"] = "Foo Bar <foo.bar@example.com>"
1145 env["EMAIL"] = "Foo Bar <foo.bar@example.com>"
1137 env['COLUMNS'] = '80'
1146 env['COLUMNS'] = '80'
1138 env['TERM'] = 'xterm'
1147 env['TERM'] = 'xterm'
1139
1148
1140 dropped = [
1149 dropped = [
1141 'CDPATH',
1150 'CDPATH',
1142 'CHGDEBUG',
1151 'CHGDEBUG',
1143 'EDITOR',
1152 'EDITOR',
1144 'GREP_OPTIONS',
1153 'GREP_OPTIONS',
1145 'HG',
1154 'HG',
1146 'HGMERGE',
1155 'HGMERGE',
1147 'HGPLAIN',
1156 'HGPLAIN',
1148 'HGPLAINEXCEPT',
1157 'HGPLAINEXCEPT',
1149 'HGPROF',
1158 'HGPROF',
1150 'http_proxy',
1159 'http_proxy',
1151 'no_proxy',
1160 'no_proxy',
1152 'NO_PROXY',
1161 'NO_PROXY',
1153 'PAGER',
1162 'PAGER',
1154 'VISUAL',
1163 'VISUAL',
1155 ]
1164 ]
1156
1165
1157 for k in dropped:
1166 for k in dropped:
1158 if k in env:
1167 if k in env:
1159 del env[k]
1168 del env[k]
1160
1169
1161 # unset env related to hooks
1170 # unset env related to hooks
1162 for k in list(env):
1171 for k in list(env):
1163 if k.startswith('HG_'):
1172 if k.startswith('HG_'):
1164 del env[k]
1173 del env[k]
1165
1174
1166 if self._usechg:
1175 if self._usechg:
1167 env['CHGSOCKNAME'] = os.path.join(self._chgsockdir, b'server')
1176 env['CHGSOCKNAME'] = os.path.join(self._chgsockdir, b'server')
1168
1177
1169 return env
1178 return env
1170
1179
1171 def _createhgrc(self, path):
1180 def _createhgrc(self, path):
1172 """Create an hgrc file for this test."""
1181 """Create an hgrc file for this test."""
1173 with open(path, 'wb') as hgrc:
1182 with open(path, 'wb') as hgrc:
1174 hgrc.write(b'[ui]\n')
1183 hgrc.write(b'[ui]\n')
1175 hgrc.write(b'slash = True\n')
1184 hgrc.write(b'slash = True\n')
1176 hgrc.write(b'interactive = False\n')
1185 hgrc.write(b'interactive = False\n')
1177 hgrc.write(b'merge = internal:merge\n')
1186 hgrc.write(b'merge = internal:merge\n')
1178 hgrc.write(b'mergemarkers = detailed\n')
1187 hgrc.write(b'mergemarkers = detailed\n')
1179 hgrc.write(b'promptecho = True\n')
1188 hgrc.write(b'promptecho = True\n')
1180 hgrc.write(b'[defaults]\n')
1189 hgrc.write(b'[defaults]\n')
1181 hgrc.write(b'[devel]\n')
1190 hgrc.write(b'[devel]\n')
1182 hgrc.write(b'all-warnings = true\n')
1191 hgrc.write(b'all-warnings = true\n')
1183 hgrc.write(b'default-date = 0 0\n')
1192 hgrc.write(b'default-date = 0 0\n')
1184 hgrc.write(b'[largefiles]\n')
1193 hgrc.write(b'[largefiles]\n')
1185 hgrc.write(b'usercache = %s\n' %
1194 hgrc.write(b'usercache = %s\n' %
1186 (os.path.join(self._testtmp, b'.cache/largefiles')))
1195 (os.path.join(self._testtmp, b'.cache/largefiles')))
1187 hgrc.write(b'[lfs]\n')
1196 hgrc.write(b'[lfs]\n')
1188 hgrc.write(b'usercache = %s\n' %
1197 hgrc.write(b'usercache = %s\n' %
1189 (os.path.join(self._testtmp, b'.cache/lfs')))
1198 (os.path.join(self._testtmp, b'.cache/lfs')))
1190 hgrc.write(b'[web]\n')
1199 hgrc.write(b'[web]\n')
1191 hgrc.write(b'address = localhost\n')
1200 hgrc.write(b'address = localhost\n')
1192 hgrc.write(b'ipv6 = %s\n' % str(self._useipv6).encode('ascii'))
1201 hgrc.write(b'ipv6 = %s\n' % str(self._useipv6).encode('ascii'))
1193 hgrc.write(b'server-header = testing stub value\n')
1202 hgrc.write(b'server-header = testing stub value\n')
1194
1203
1195 for opt in self._extraconfigopts:
1204 for opt in self._extraconfigopts:
1196 section, key = opt.encode('utf-8').split(b'.', 1)
1205 section, key = opt.encode('utf-8').split(b'.', 1)
1197 assert b'=' in key, ('extra config opt %s must '
1206 assert b'=' in key, ('extra config opt %s must '
1198 'have an = for assignment' % opt)
1207 'have an = for assignment' % opt)
1199 hgrc.write(b'[%s]\n%s\n' % (section, key))
1208 hgrc.write(b'[%s]\n%s\n' % (section, key))
1200
1209
1201 def fail(self, msg):
1210 def fail(self, msg):
1202 # unittest differentiates between errored and failed.
1211 # unittest differentiates between errored and failed.
1203 # Failed is denoted by AssertionError (by default at least).
1212 # Failed is denoted by AssertionError (by default at least).
1204 raise AssertionError(msg)
1213 raise AssertionError(msg)
1205
1214
1206 def _runcommand(self, cmd, env, normalizenewlines=False):
1215 def _runcommand(self, cmd, env, normalizenewlines=False):
1207 """Run command in a sub-process, capturing the output (stdout and
1216 """Run command in a sub-process, capturing the output (stdout and
1208 stderr).
1217 stderr).
1209
1218
1210 Return a tuple (exitcode, output). output is None in debug mode.
1219 Return a tuple (exitcode, output). output is None in debug mode.
1211 """
1220 """
1212 if self._debug:
1221 if self._debug:
1213 proc = subprocess.Popen(_strpath(cmd), shell=True,
1222 proc = subprocess.Popen(_strpath(cmd), shell=True,
1214 cwd=_strpath(self._testtmp),
1223 cwd=_strpath(self._testtmp),
1215 env=env)
1224 env=env)
1216 ret = proc.wait()
1225 ret = proc.wait()
1217 return (ret, None)
1226 return (ret, None)
1218
1227
1219 proc = Popen4(cmd, self._testtmp, self._timeout, env)
1228 proc = Popen4(cmd, self._testtmp, self._timeout, env)
1220 def cleanup():
1229 def cleanup():
1221 terminate(proc)
1230 terminate(proc)
1222 ret = proc.wait()
1231 ret = proc.wait()
1223 if ret == 0:
1232 if ret == 0:
1224 ret = signal.SIGTERM << 8
1233 ret = signal.SIGTERM << 8
1225 killdaemons(env['DAEMON_PIDS'])
1234 killdaemons(env['DAEMON_PIDS'])
1226 return ret
1235 return ret
1227
1236
1228 proc.tochild.close()
1237 proc.tochild.close()
1229
1238
1230 try:
1239 try:
1231 output = proc.fromchild.read()
1240 output = proc.fromchild.read()
1232 except KeyboardInterrupt:
1241 except KeyboardInterrupt:
1233 vlog('# Handling keyboard interrupt')
1242 vlog('# Handling keyboard interrupt')
1234 cleanup()
1243 cleanup()
1235 raise
1244 raise
1236
1245
1237 ret = proc.wait()
1246 ret = proc.wait()
1238 if wifexited(ret):
1247 if wifexited(ret):
1239 ret = os.WEXITSTATUS(ret)
1248 ret = os.WEXITSTATUS(ret)
1240
1249
1241 if proc.timeout:
1250 if proc.timeout:
1242 ret = 'timeout'
1251 ret = 'timeout'
1243
1252
1244 if ret:
1253 if ret:
1245 killdaemons(env['DAEMON_PIDS'])
1254 killdaemons(env['DAEMON_PIDS'])
1246
1255
1247 for s, r in self._getreplacements():
1256 for s, r in self._getreplacements():
1248 output = re.sub(s, r, output)
1257 output = re.sub(s, r, output)
1249
1258
1250 if normalizenewlines:
1259 if normalizenewlines:
1251 output = output.replace(b'\r\n', b'\n')
1260 output = output.replace(b'\r\n', b'\n')
1252
1261
1253 return ret, output.splitlines(True)
1262 return ret, output.splitlines(True)
1254
1263
1255 class PythonTest(Test):
1264 class PythonTest(Test):
1256 """A Python-based test."""
1265 """A Python-based test."""
1257
1266
1258 @property
1267 @property
1259 def refpath(self):
1268 def refpath(self):
1260 return os.path.join(self._testdir, b'%s.out' % self.bname)
1269 return os.path.join(self._testdir, b'%s.out' % self.bname)
1261
1270
1262 def _run(self, env):
1271 def _run(self, env):
1263 py3switch = self._py3warnings and b' -3' or b''
1272 py3switch = self._py3warnings and b' -3' or b''
1264 # Quote the python(3) executable for Windows
1273 # Quote the python(3) executable for Windows
1265 cmd = b'"%s"%s "%s"' % (PYTHON, py3switch, self.path)
1274 cmd = b'"%s"%s "%s"' % (PYTHON, py3switch, self.path)
1266 vlog("# Running", cmd)
1275 vlog("# Running", cmd)
1267 normalizenewlines = os.name == 'nt'
1276 normalizenewlines = os.name == 'nt'
1268 result = self._runcommand(cmd, env,
1277 result = self._runcommand(cmd, env,
1269 normalizenewlines=normalizenewlines)
1278 normalizenewlines=normalizenewlines)
1270 if self._aborted:
1279 if self._aborted:
1271 raise KeyboardInterrupt()
1280 raise KeyboardInterrupt()
1272
1281
1273 return result
1282 return result
1274
1283
1275 # Some glob patterns apply only in some circumstances, so the script
1284 # Some glob patterns apply only in some circumstances, so the script
1276 # might want to remove (glob) annotations that otherwise should be
1285 # might want to remove (glob) annotations that otherwise should be
1277 # retained.
1286 # retained.
1278 checkcodeglobpats = [
1287 checkcodeglobpats = [
1279 # On Windows it looks like \ doesn't require a (glob), but we know
1288 # On Windows it looks like \ doesn't require a (glob), but we know
1280 # better.
1289 # better.
1281 re.compile(br'^pushing to \$TESTTMP/.*[^)]$'),
1290 re.compile(br'^pushing to \$TESTTMP/.*[^)]$'),
1282 re.compile(br'^moving \S+/.*[^)]$'),
1291 re.compile(br'^moving \S+/.*[^)]$'),
1283 re.compile(br'^pulling from \$TESTTMP/.*[^)]$'),
1292 re.compile(br'^pulling from \$TESTTMP/.*[^)]$'),
1284 # Not all platforms have 127.0.0.1 as loopback (though most do),
1293 # Not all platforms have 127.0.0.1 as loopback (though most do),
1285 # so we always glob that too.
1294 # so we always glob that too.
1286 re.compile(br'.*\$LOCALIP.*$'),
1295 re.compile(br'.*\$LOCALIP.*$'),
1287 ]
1296 ]
1288
1297
1289 bchr = chr
1298 bchr = chr
1290 if PYTHON3:
1299 if PYTHON3:
1291 bchr = lambda x: bytes([x])
1300 bchr = lambda x: bytes([x])
1292
1301
1293 class TTest(Test):
1302 class TTest(Test):
1294 """A "t test" is a test backed by a .t file."""
1303 """A "t test" is a test backed by a .t file."""
1295
1304
1296 SKIPPED_PREFIX = b'skipped: '
1305 SKIPPED_PREFIX = b'skipped: '
1297 FAILED_PREFIX = b'hghave check failed: '
1306 FAILED_PREFIX = b'hghave check failed: '
1298 NEEDESCAPE = re.compile(br'[\x00-\x08\x0b-\x1f\x7f-\xff]').search
1307 NEEDESCAPE = re.compile(br'[\x00-\x08\x0b-\x1f\x7f-\xff]').search
1299
1308
1300 ESCAPESUB = re.compile(br'[\x00-\x08\x0b-\x1f\\\x7f-\xff]').sub
1309 ESCAPESUB = re.compile(br'[\x00-\x08\x0b-\x1f\\\x7f-\xff]').sub
1301 ESCAPEMAP = dict((bchr(i), br'\x%02x' % i) for i in range(256))
1310 ESCAPEMAP = dict((bchr(i), br'\x%02x' % i) for i in range(256))
1302 ESCAPEMAP.update({b'\\': b'\\\\', b'\r': br'\r'})
1311 ESCAPEMAP.update({b'\\': b'\\\\', b'\r': br'\r'})
1303
1312
1304 def __init__(self, path, *args, **kwds):
1313 def __init__(self, path, *args, **kwds):
1305 # accept an extra "case" parameter
1314 # accept an extra "case" parameter
1306 case = kwds.pop('case', [])
1315 case = kwds.pop('case', [])
1307 self._case = case
1316 self._case = case
1308 self._allcases = {x for y in parsettestcases(path) for x in y}
1317 self._allcases = {x for y in parsettestcases(path) for x in y}
1309 super(TTest, self).__init__(path, *args, **kwds)
1318 super(TTest, self).__init__(path, *args, **kwds)
1310 if case:
1319 if case:
1311 casepath = b'#'.join(case)
1320 casepath = b'#'.join(case)
1312 self.name = '%s#%s' % (self.name, _strpath(casepath))
1321 self.name = '%s#%s' % (self.name, _strpath(casepath))
1313 self.errpath = b'%s#%s.err' % (self.errpath[:-4], casepath)
1322 self.errpath = b'%s#%s.err' % (self.errpath[:-4], casepath)
1314 self._tmpname += b'-%s' % casepath
1323 self._tmpname += b'-%s' % casepath
1315 self._have = {}
1324 self._have = {}
1316
1325
1317 @property
1326 @property
1318 def refpath(self):
1327 def refpath(self):
1319 return os.path.join(self._testdir, self.bname)
1328 return os.path.join(self._testdir, self.bname)
1320
1329
1321 def _run(self, env):
1330 def _run(self, env):
1322 with open(self.path, 'rb') as f:
1331 with open(self.path, 'rb') as f:
1323 lines = f.readlines()
1332 lines = f.readlines()
1324
1333
1325 # .t file is both reference output and the test input, keep reference
1334 # .t file is both reference output and the test input, keep reference
1326 # output updated with the the test input. This avoids some race
1335 # output updated with the the test input. This avoids some race
1327 # conditions where the reference output does not match the actual test.
1336 # conditions where the reference output does not match the actual test.
1328 if self._refout is not None:
1337 if self._refout is not None:
1329 self._refout = lines
1338 self._refout = lines
1330
1339
1331 salt, script, after, expected = self._parsetest(lines)
1340 salt, script, after, expected = self._parsetest(lines)
1332
1341
1333 # Write out the generated script.
1342 # Write out the generated script.
1334 fname = b'%s.sh' % self._testtmp
1343 fname = b'%s.sh' % self._testtmp
1335 with open(fname, 'wb') as f:
1344 with open(fname, 'wb') as f:
1336 for l in script:
1345 for l in script:
1337 f.write(l)
1346 f.write(l)
1338
1347
1339 cmd = b'%s "%s"' % (self._shell, fname)
1348 cmd = b'%s "%s"' % (self._shell, fname)
1340 vlog("# Running", cmd)
1349 vlog("# Running", cmd)
1341
1350
1342 exitcode, output = self._runcommand(cmd, env)
1351 exitcode, output = self._runcommand(cmd, env)
1343
1352
1344 if self._aborted:
1353 if self._aborted:
1345 raise KeyboardInterrupt()
1354 raise KeyboardInterrupt()
1346
1355
1347 # Do not merge output if skipped. Return hghave message instead.
1356 # Do not merge output if skipped. Return hghave message instead.
1348 # Similarly, with --debug, output is None.
1357 # Similarly, with --debug, output is None.
1349 if exitcode == self.SKIPPED_STATUS or output is None:
1358 if exitcode == self.SKIPPED_STATUS or output is None:
1350 return exitcode, output
1359 return exitcode, output
1351
1360
1352 return self._processoutput(exitcode, output, salt, after, expected)
1361 return self._processoutput(exitcode, output, salt, after, expected)
1353
1362
1354 def _hghave(self, reqs):
1363 def _hghave(self, reqs):
1355 allreqs = b' '.join(reqs)
1364 allreqs = b' '.join(reqs)
1356
1365
1357 self._detectslow(reqs)
1366 self._detectslow(reqs)
1358
1367
1359 if allreqs in self._have:
1368 if allreqs in self._have:
1360 return self._have.get(allreqs)
1369 return self._have.get(allreqs)
1361
1370
1362 # TODO do something smarter when all other uses of hghave are gone.
1371 # TODO do something smarter when all other uses of hghave are gone.
1363 runtestdir = os.path.abspath(os.path.dirname(_bytespath(__file__)))
1372 runtestdir = os.path.abspath(os.path.dirname(_bytespath(__file__)))
1364 tdir = runtestdir.replace(b'\\', b'/')
1373 tdir = runtestdir.replace(b'\\', b'/')
1365 proc = Popen4(b'%s -c "%s/hghave %s"' %
1374 proc = Popen4(b'%s -c "%s/hghave %s"' %
1366 (self._shell, tdir, allreqs),
1375 (self._shell, tdir, allreqs),
1367 self._testtmp, 0, self._getenv())
1376 self._testtmp, 0, self._getenv())
1368 stdout, stderr = proc.communicate()
1377 stdout, stderr = proc.communicate()
1369 ret = proc.wait()
1378 ret = proc.wait()
1370 if wifexited(ret):
1379 if wifexited(ret):
1371 ret = os.WEXITSTATUS(ret)
1380 ret = os.WEXITSTATUS(ret)
1372 if ret == 2:
1381 if ret == 2:
1373 print(stdout.decode('utf-8'))
1382 print(stdout.decode('utf-8'))
1374 sys.exit(1)
1383 sys.exit(1)
1375
1384
1376 if ret != 0:
1385 if ret != 0:
1377 self._have[allreqs] = (False, stdout)
1386 self._have[allreqs] = (False, stdout)
1378 return False, stdout
1387 return False, stdout
1379
1388
1380 self._have[allreqs] = (True, None)
1389 self._have[allreqs] = (True, None)
1381 return True, None
1390 return True, None
1382
1391
1383 def _detectslow(self, reqs):
1392 def _detectslow(self, reqs):
1384 """update the timeout of slow test when appropriate"""
1393 """update the timeout of slow test when appropriate"""
1385 if b'slow' in reqs:
1394 if b'slow' in reqs:
1386 self._timeout = self._slowtimeout
1395 self._timeout = self._slowtimeout
1387
1396
1388 def _iftest(self, args):
1397 def _iftest(self, args):
1389 # implements "#if"
1398 # implements "#if"
1390 reqs = []
1399 reqs = []
1391 for arg in args:
1400 for arg in args:
1392 if arg.startswith(b'no-') and arg[3:] in self._allcases:
1401 if arg.startswith(b'no-') and arg[3:] in self._allcases:
1393 if arg[3:] in self._case:
1402 if arg[3:] in self._case:
1394 return False
1403 return False
1395 elif arg in self._allcases:
1404 elif arg in self._allcases:
1396 if arg not in self._case:
1405 if arg not in self._case:
1397 return False
1406 return False
1398 else:
1407 else:
1399 reqs.append(arg)
1408 reqs.append(arg)
1400 self._detectslow(reqs)
1409 self._detectslow(reqs)
1401 return self._hghave(reqs)[0]
1410 return self._hghave(reqs)[0]
1402
1411
1403 def _parsetest(self, lines):
1412 def _parsetest(self, lines):
1404 # We generate a shell script which outputs unique markers to line
1413 # We generate a shell script which outputs unique markers to line
1405 # up script results with our source. These markers include input
1414 # up script results with our source. These markers include input
1406 # line number and the last return code.
1415 # line number and the last return code.
1407 salt = b"SALT%d" % time.time()
1416 salt = b"SALT%d" % time.time()
1408 def addsalt(line, inpython):
1417 def addsalt(line, inpython):
1409 if inpython:
1418 if inpython:
1410 script.append(b'%s %d 0\n' % (salt, line))
1419 script.append(b'%s %d 0\n' % (salt, line))
1411 else:
1420 else:
1412 script.append(b'echo %s %d $?\n' % (salt, line))
1421 script.append(b'echo %s %d $?\n' % (salt, line))
1413 activetrace = []
1422 activetrace = []
1414 session = str(uuid.uuid4())
1423 session = str(uuid.uuid4())
1415 if PYTHON3:
1424 if PYTHON3:
1416 session = session.encode('ascii')
1425 session = session.encode('ascii')
1417 hgcatapult = (os.getenv('HGTESTCATAPULTSERVERPIPE') or
1426 hgcatapult = (os.getenv('HGTESTCATAPULTSERVERPIPE') or
1418 os.getenv('HGCATAPULTSERVERPIPE'))
1427 os.getenv('HGCATAPULTSERVERPIPE'))
1419 def toggletrace(cmd=None):
1428 def toggletrace(cmd=None):
1420 if not hgcatapult or hgcatapult == os.devnull:
1429 if not hgcatapult or hgcatapult == os.devnull:
1421 return
1430 return
1422
1431
1423 if activetrace:
1432 if activetrace:
1424 script.append(
1433 script.append(
1425 b'echo END %s %s >> "$HGTESTCATAPULTSERVERPIPE"\n' % (
1434 b'echo END %s %s >> "$HGTESTCATAPULTSERVERPIPE"\n' % (
1426 session, activetrace[0]))
1435 session, activetrace[0]))
1427 if cmd is None:
1436 if cmd is None:
1428 return
1437 return
1429
1438
1430 if isinstance(cmd, str):
1439 if isinstance(cmd, str):
1431 quoted = shellquote(cmd.strip())
1440 quoted = shellquote(cmd.strip())
1432 else:
1441 else:
1433 quoted = shellquote(cmd.strip().decode('utf8')).encode('utf8')
1442 quoted = shellquote(cmd.strip().decode('utf8')).encode('utf8')
1434 quoted = quoted.replace(b'\\', b'\\\\')
1443 quoted = quoted.replace(b'\\', b'\\\\')
1435 script.append(
1444 script.append(
1436 b'echo START %s %s >> "$HGTESTCATAPULTSERVERPIPE"\n' % (
1445 b'echo START %s %s >> "$HGTESTCATAPULTSERVERPIPE"\n' % (
1437 session, quoted))
1446 session, quoted))
1438 activetrace[0:] = [quoted]
1447 activetrace[0:] = [quoted]
1439
1448
1440 script = []
1449 script = []
1441
1450
1442 # After we run the shell script, we re-unify the script output
1451 # After we run the shell script, we re-unify the script output
1443 # with non-active parts of the source, with synchronization by our
1452 # with non-active parts of the source, with synchronization by our
1444 # SALT line number markers. The after table contains the non-active
1453 # SALT line number markers. The after table contains the non-active
1445 # components, ordered by line number.
1454 # components, ordered by line number.
1446 after = {}
1455 after = {}
1447
1456
1448 # Expected shell script output.
1457 # Expected shell script output.
1449 expected = {}
1458 expected = {}
1450
1459
1451 pos = prepos = -1
1460 pos = prepos = -1
1452
1461
1453 # True or False when in a true or false conditional section
1462 # True or False when in a true or false conditional section
1454 skipping = None
1463 skipping = None
1455
1464
1456 # We keep track of whether or not we're in a Python block so we
1465 # We keep track of whether or not we're in a Python block so we
1457 # can generate the surrounding doctest magic.
1466 # can generate the surrounding doctest magic.
1458 inpython = False
1467 inpython = False
1459
1468
1460 if self._debug:
1469 if self._debug:
1461 script.append(b'set -x\n')
1470 script.append(b'set -x\n')
1462 if self._hgcommand != b'hg':
1471 if self._hgcommand != b'hg':
1463 script.append(b'alias hg="%s"\n' % self._hgcommand)
1472 script.append(b'alias hg="%s"\n' % self._hgcommand)
1464 if os.getenv('MSYSTEM'):
1473 if os.getenv('MSYSTEM'):
1465 script.append(b'alias pwd="pwd -W"\n')
1474 script.append(b'alias pwd="pwd -W"\n')
1466
1475
1467 if hgcatapult and hgcatapult != os.devnull:
1476 if hgcatapult and hgcatapult != os.devnull:
1468 # Kludge: use a while loop to keep the pipe from getting
1477 # Kludge: use a while loop to keep the pipe from getting
1469 # closed by our echo commands. The still-running file gets
1478 # closed by our echo commands. The still-running file gets
1470 # reaped at the end of the script, which causes the while
1479 # reaped at the end of the script, which causes the while
1471 # loop to exit and closes the pipe. Sigh.
1480 # loop to exit and closes the pipe. Sigh.
1472 script.append(
1481 script.append(
1473 b'rtendtracing() {\n'
1482 b'rtendtracing() {\n'
1474 b' echo END %(session)s %(name)s >> %(catapult)s\n'
1483 b' echo END %(session)s %(name)s >> %(catapult)s\n'
1475 b' rm -f "$TESTTMP/.still-running"\n'
1484 b' rm -f "$TESTTMP/.still-running"\n'
1476 b'}\n'
1485 b'}\n'
1477 b'trap "rtendtracing" 0\n'
1486 b'trap "rtendtracing" 0\n'
1478 b'touch "$TESTTMP/.still-running"\n'
1487 b'touch "$TESTTMP/.still-running"\n'
1479 b'while [ -f "$TESTTMP/.still-running" ]; do sleep 1; done '
1488 b'while [ -f "$TESTTMP/.still-running" ]; do sleep 1; done '
1480 b'> %(catapult)s &\n'
1489 b'> %(catapult)s &\n'
1481 b'HGCATAPULTSESSION=%(session)s ; export HGCATAPULTSESSION\n'
1490 b'HGCATAPULTSESSION=%(session)s ; export HGCATAPULTSESSION\n'
1482 b'echo START %(session)s %(name)s >> %(catapult)s\n'
1491 b'echo START %(session)s %(name)s >> %(catapult)s\n'
1483 % {
1492 % {
1484 'name': self.name,
1493 'name': self.name,
1485 'session': session,
1494 'session': session,
1486 'catapult': hgcatapult,
1495 'catapult': hgcatapult,
1487 }
1496 }
1488 )
1497 )
1489
1498
1490 if self._case:
1499 if self._case:
1491 casestr = b'#'.join(self._case)
1500 casestr = b'#'.join(self._case)
1492 if isinstance(self._case, str):
1501 if isinstance(self._case, str):
1493 quoted = shellquote(casestr)
1502 quoted = shellquote(casestr)
1494 else:
1503 else:
1495 quoted = shellquote(casestr.decode('utf8')).encode('utf8')
1504 quoted = shellquote(casestr.decode('utf8')).encode('utf8')
1496 script.append(b'TESTCASE=%s\n' % quoted)
1505 script.append(b'TESTCASE=%s\n' % quoted)
1497 script.append(b'export TESTCASE\n')
1506 script.append(b'export TESTCASE\n')
1498
1507
1499 n = 0
1508 n = 0
1500 for n, l in enumerate(lines):
1509 for n, l in enumerate(lines):
1501 if not l.endswith(b'\n'):
1510 if not l.endswith(b'\n'):
1502 l += b'\n'
1511 l += b'\n'
1503 if l.startswith(b'#require'):
1512 if l.startswith(b'#require'):
1504 lsplit = l.split()
1513 lsplit = l.split()
1505 if len(lsplit) < 2 or lsplit[0] != b'#require':
1514 if len(lsplit) < 2 or lsplit[0] != b'#require':
1506 after.setdefault(pos, []).append(' !!! invalid #require\n')
1515 after.setdefault(pos, []).append(' !!! invalid #require\n')
1507 if not skipping:
1516 if not skipping:
1508 haveresult, message = self._hghave(lsplit[1:])
1517 haveresult, message = self._hghave(lsplit[1:])
1509 if not haveresult:
1518 if not haveresult:
1510 script = [b'echo "%s"\nexit 80\n' % message]
1519 script = [b'echo "%s"\nexit 80\n' % message]
1511 break
1520 break
1512 after.setdefault(pos, []).append(l)
1521 after.setdefault(pos, []).append(l)
1513 elif l.startswith(b'#if'):
1522 elif l.startswith(b'#if'):
1514 lsplit = l.split()
1523 lsplit = l.split()
1515 if len(lsplit) < 2 or lsplit[0] != b'#if':
1524 if len(lsplit) < 2 or lsplit[0] != b'#if':
1516 after.setdefault(pos, []).append(' !!! invalid #if\n')
1525 after.setdefault(pos, []).append(' !!! invalid #if\n')
1517 if skipping is not None:
1526 if skipping is not None:
1518 after.setdefault(pos, []).append(' !!! nested #if\n')
1527 after.setdefault(pos, []).append(' !!! nested #if\n')
1519 skipping = not self._iftest(lsplit[1:])
1528 skipping = not self._iftest(lsplit[1:])
1520 after.setdefault(pos, []).append(l)
1529 after.setdefault(pos, []).append(l)
1521 elif l.startswith(b'#else'):
1530 elif l.startswith(b'#else'):
1522 if skipping is None:
1531 if skipping is None:
1523 after.setdefault(pos, []).append(' !!! missing #if\n')
1532 after.setdefault(pos, []).append(' !!! missing #if\n')
1524 skipping = not skipping
1533 skipping = not skipping
1525 after.setdefault(pos, []).append(l)
1534 after.setdefault(pos, []).append(l)
1526 elif l.startswith(b'#endif'):
1535 elif l.startswith(b'#endif'):
1527 if skipping is None:
1536 if skipping is None:
1528 after.setdefault(pos, []).append(' !!! missing #if\n')
1537 after.setdefault(pos, []).append(' !!! missing #if\n')
1529 skipping = None
1538 skipping = None
1530 after.setdefault(pos, []).append(l)
1539 after.setdefault(pos, []).append(l)
1531 elif skipping:
1540 elif skipping:
1532 after.setdefault(pos, []).append(l)
1541 after.setdefault(pos, []).append(l)
1533 elif l.startswith(b' >>> '): # python inlines
1542 elif l.startswith(b' >>> '): # python inlines
1534 after.setdefault(pos, []).append(l)
1543 after.setdefault(pos, []).append(l)
1535 prepos = pos
1544 prepos = pos
1536 pos = n
1545 pos = n
1537 if not inpython:
1546 if not inpython:
1538 # We've just entered a Python block. Add the header.
1547 # We've just entered a Python block. Add the header.
1539 inpython = True
1548 inpython = True
1540 addsalt(prepos, False) # Make sure we report the exit code.
1549 addsalt(prepos, False) # Make sure we report the exit code.
1541 script.append(b'"%s" -m heredoctest <<EOF\n' % PYTHON)
1550 script.append(b'"%s" -m heredoctest <<EOF\n' % PYTHON)
1542 addsalt(n, True)
1551 addsalt(n, True)
1543 script.append(l[2:])
1552 script.append(l[2:])
1544 elif l.startswith(b' ... '): # python inlines
1553 elif l.startswith(b' ... '): # python inlines
1545 after.setdefault(prepos, []).append(l)
1554 after.setdefault(prepos, []).append(l)
1546 script.append(l[2:])
1555 script.append(l[2:])
1547 elif l.startswith(b' $ '): # commands
1556 elif l.startswith(b' $ '): # commands
1548 if inpython:
1557 if inpython:
1549 script.append(b'EOF\n')
1558 script.append(b'EOF\n')
1550 inpython = False
1559 inpython = False
1551 after.setdefault(pos, []).append(l)
1560 after.setdefault(pos, []).append(l)
1552 prepos = pos
1561 prepos = pos
1553 pos = n
1562 pos = n
1554 addsalt(n, False)
1563 addsalt(n, False)
1555 rawcmd = l[4:]
1564 rawcmd = l[4:]
1556 cmd = rawcmd.split()
1565 cmd = rawcmd.split()
1557 toggletrace(rawcmd)
1566 toggletrace(rawcmd)
1558 if len(cmd) == 2 and cmd[0] == b'cd':
1567 if len(cmd) == 2 and cmd[0] == b'cd':
1559 l = b' $ cd %s || exit 1\n' % cmd[1]
1568 l = b' $ cd %s || exit 1\n' % cmd[1]
1560 script.append(rawcmd)
1569 script.append(rawcmd)
1561 elif l.startswith(b' > '): # continuations
1570 elif l.startswith(b' > '): # continuations
1562 after.setdefault(prepos, []).append(l)
1571 after.setdefault(prepos, []).append(l)
1563 script.append(l[4:])
1572 script.append(l[4:])
1564 elif l.startswith(b' '): # results
1573 elif l.startswith(b' '): # results
1565 # Queue up a list of expected results.
1574 # Queue up a list of expected results.
1566 expected.setdefault(pos, []).append(l[2:])
1575 expected.setdefault(pos, []).append(l[2:])
1567 else:
1576 else:
1568 if inpython:
1577 if inpython:
1569 script.append(b'EOF\n')
1578 script.append(b'EOF\n')
1570 inpython = False
1579 inpython = False
1571 # Non-command/result. Queue up for merged output.
1580 # Non-command/result. Queue up for merged output.
1572 after.setdefault(pos, []).append(l)
1581 after.setdefault(pos, []).append(l)
1573
1582
1574 if inpython:
1583 if inpython:
1575 script.append(b'EOF\n')
1584 script.append(b'EOF\n')
1576 if skipping is not None:
1585 if skipping is not None:
1577 after.setdefault(pos, []).append(' !!! missing #endif\n')
1586 after.setdefault(pos, []).append(' !!! missing #endif\n')
1578 addsalt(n + 1, False)
1587 addsalt(n + 1, False)
1579 # Need to end any current per-command trace
1588 # Need to end any current per-command trace
1580 if activetrace:
1589 if activetrace:
1581 toggletrace()
1590 toggletrace()
1582 return salt, script, after, expected
1591 return salt, script, after, expected
1583
1592
1584 def _processoutput(self, exitcode, output, salt, after, expected):
1593 def _processoutput(self, exitcode, output, salt, after, expected):
1585 # Merge the script output back into a unified test.
1594 # Merge the script output back into a unified test.
1586 warnonly = 1 # 1: not yet; 2: yes; 3: for sure not
1595 warnonly = 1 # 1: not yet; 2: yes; 3: for sure not
1587 if exitcode != 0:
1596 if exitcode != 0:
1588 warnonly = 3
1597 warnonly = 3
1589
1598
1590 pos = -1
1599 pos = -1
1591 postout = []
1600 postout = []
1592 for l in output:
1601 for l in output:
1593 lout, lcmd = l, None
1602 lout, lcmd = l, None
1594 if salt in l:
1603 if salt in l:
1595 lout, lcmd = l.split(salt, 1)
1604 lout, lcmd = l.split(salt, 1)
1596
1605
1597 while lout:
1606 while lout:
1598 if not lout.endswith(b'\n'):
1607 if not lout.endswith(b'\n'):
1599 lout += b' (no-eol)\n'
1608 lout += b' (no-eol)\n'
1600
1609
1601 # Find the expected output at the current position.
1610 # Find the expected output at the current position.
1602 els = [None]
1611 els = [None]
1603 if expected.get(pos, None):
1612 if expected.get(pos, None):
1604 els = expected[pos]
1613 els = expected[pos]
1605
1614
1606 optional = []
1615 optional = []
1607 for i, el in enumerate(els):
1616 for i, el in enumerate(els):
1608 r = False
1617 r = False
1609 if el:
1618 if el:
1610 r, exact = self.linematch(el, lout)
1619 r, exact = self.linematch(el, lout)
1611 if isinstance(r, str):
1620 if isinstance(r, str):
1612 if r == '-glob':
1621 if r == '-glob':
1613 lout = ''.join(el.rsplit(' (glob)', 1))
1622 lout = ''.join(el.rsplit(' (glob)', 1))
1614 r = '' # Warn only this line.
1623 r = '' # Warn only this line.
1615 elif r == "retry":
1624 elif r == "retry":
1616 postout.append(b' ' + el)
1625 postout.append(b' ' + el)
1617 else:
1626 else:
1618 log('\ninfo, unknown linematch result: %r\n' % r)
1627 log('\ninfo, unknown linematch result: %r\n' % r)
1619 r = False
1628 r = False
1620 if r:
1629 if r:
1621 els.pop(i)
1630 els.pop(i)
1622 break
1631 break
1623 if el:
1632 if el:
1624 if el.endswith(b" (?)\n"):
1633 if el.endswith(b" (?)\n"):
1625 optional.append(i)
1634 optional.append(i)
1626 else:
1635 else:
1627 m = optline.match(el)
1636 m = optline.match(el)
1628 if m:
1637 if m:
1629 conditions = [
1638 conditions = [
1630 c for c in m.group(2).split(b' ')]
1639 c for c in m.group(2).split(b' ')]
1631
1640
1632 if not self._iftest(conditions):
1641 if not self._iftest(conditions):
1633 optional.append(i)
1642 optional.append(i)
1634 if exact:
1643 if exact:
1635 # Don't allow line to be matches against a later
1644 # Don't allow line to be matches against a later
1636 # line in the output
1645 # line in the output
1637 els.pop(i)
1646 els.pop(i)
1638 break
1647 break
1639
1648
1640 if r:
1649 if r:
1641 if r == "retry":
1650 if r == "retry":
1642 continue
1651 continue
1643 # clean up any optional leftovers
1652 # clean up any optional leftovers
1644 for i in optional:
1653 for i in optional:
1645 postout.append(b' ' + els[i])
1654 postout.append(b' ' + els[i])
1646 for i in reversed(optional):
1655 for i in reversed(optional):
1647 del els[i]
1656 del els[i]
1648 postout.append(b' ' + el)
1657 postout.append(b' ' + el)
1649 else:
1658 else:
1650 if self.NEEDESCAPE(lout):
1659 if self.NEEDESCAPE(lout):
1651 lout = TTest._stringescape(b'%s (esc)\n' %
1660 lout = TTest._stringescape(b'%s (esc)\n' %
1652 lout.rstrip(b'\n'))
1661 lout.rstrip(b'\n'))
1653 postout.append(b' ' + lout) # Let diff deal with it.
1662 postout.append(b' ' + lout) # Let diff deal with it.
1654 if r != '': # If line failed.
1663 if r != '': # If line failed.
1655 warnonly = 3 # for sure not
1664 warnonly = 3 # for sure not
1656 elif warnonly == 1: # Is "not yet" and line is warn only.
1665 elif warnonly == 1: # Is "not yet" and line is warn only.
1657 warnonly = 2 # Yes do warn.
1666 warnonly = 2 # Yes do warn.
1658 break
1667 break
1659 else:
1668 else:
1660 # clean up any optional leftovers
1669 # clean up any optional leftovers
1661 while expected.get(pos, None):
1670 while expected.get(pos, None):
1662 el = expected[pos].pop(0)
1671 el = expected[pos].pop(0)
1663 if el:
1672 if el:
1664 if not el.endswith(b" (?)\n"):
1673 if not el.endswith(b" (?)\n"):
1665 m = optline.match(el)
1674 m = optline.match(el)
1666 if m:
1675 if m:
1667 conditions = [c for c in m.group(2).split(b' ')]
1676 conditions = [c for c in m.group(2).split(b' ')]
1668
1677
1669 if self._iftest(conditions):
1678 if self._iftest(conditions):
1670 # Don't append as optional line
1679 # Don't append as optional line
1671 continue
1680 continue
1672 else:
1681 else:
1673 continue
1682 continue
1674 postout.append(b' ' + el)
1683 postout.append(b' ' + el)
1675
1684
1676 if lcmd:
1685 if lcmd:
1677 # Add on last return code.
1686 # Add on last return code.
1678 ret = int(lcmd.split()[1])
1687 ret = int(lcmd.split()[1])
1679 if ret != 0:
1688 if ret != 0:
1680 postout.append(b' [%d]\n' % ret)
1689 postout.append(b' [%d]\n' % ret)
1681 if pos in after:
1690 if pos in after:
1682 # Merge in non-active test bits.
1691 # Merge in non-active test bits.
1683 postout += after.pop(pos)
1692 postout += after.pop(pos)
1684 pos = int(lcmd.split()[0])
1693 pos = int(lcmd.split()[0])
1685
1694
1686 if pos in after:
1695 if pos in after:
1687 postout += after.pop(pos)
1696 postout += after.pop(pos)
1688
1697
1689 if warnonly == 2:
1698 if warnonly == 2:
1690 exitcode = False # Set exitcode to warned.
1699 exitcode = False # Set exitcode to warned.
1691
1700
1692 return exitcode, postout
1701 return exitcode, postout
1693
1702
1694 @staticmethod
1703 @staticmethod
1695 def rematch(el, l):
1704 def rematch(el, l):
1696 try:
1705 try:
1697 el = b'(?:' + el + b')'
1706 el = b'(?:' + el + b')'
1698 # use \Z to ensure that the regex matches to the end of the string
1707 # use \Z to ensure that the regex matches to the end of the string
1699 if os.name == 'nt':
1708 if os.name == 'nt':
1700 return re.match(el + br'\r?\n\Z', l)
1709 return re.match(el + br'\r?\n\Z', l)
1701 return re.match(el + br'\n\Z', l)
1710 return re.match(el + br'\n\Z', l)
1702 except re.error:
1711 except re.error:
1703 # el is an invalid regex
1712 # el is an invalid regex
1704 return False
1713 return False
1705
1714
1706 @staticmethod
1715 @staticmethod
1707 def globmatch(el, l):
1716 def globmatch(el, l):
1708 # The only supported special characters are * and ? plus / which also
1717 # The only supported special characters are * and ? plus / which also
1709 # matches \ on windows. Escaping of these characters is supported.
1718 # matches \ on windows. Escaping of these characters is supported.
1710 if el + b'\n' == l:
1719 if el + b'\n' == l:
1711 if os.altsep:
1720 if os.altsep:
1712 # matching on "/" is not needed for this line
1721 # matching on "/" is not needed for this line
1713 for pat in checkcodeglobpats:
1722 for pat in checkcodeglobpats:
1714 if pat.match(el):
1723 if pat.match(el):
1715 return True
1724 return True
1716 return b'-glob'
1725 return b'-glob'
1717 return True
1726 return True
1718 el = el.replace(b'$LOCALIP', b'*')
1727 el = el.replace(b'$LOCALIP', b'*')
1719 i, n = 0, len(el)
1728 i, n = 0, len(el)
1720 res = b''
1729 res = b''
1721 while i < n:
1730 while i < n:
1722 c = el[i:i + 1]
1731 c = el[i:i + 1]
1723 i += 1
1732 i += 1
1724 if c == b'\\' and i < n and el[i:i + 1] in b'*?\\/':
1733 if c == b'\\' and i < n and el[i:i + 1] in b'*?\\/':
1725 res += el[i - 1:i + 1]
1734 res += el[i - 1:i + 1]
1726 i += 1
1735 i += 1
1727 elif c == b'*':
1736 elif c == b'*':
1728 res += b'.*'
1737 res += b'.*'
1729 elif c == b'?':
1738 elif c == b'?':
1730 res += b'.'
1739 res += b'.'
1731 elif c == b'/' and os.altsep:
1740 elif c == b'/' and os.altsep:
1732 res += b'[/\\\\]'
1741 res += b'[/\\\\]'
1733 else:
1742 else:
1734 res += re.escape(c)
1743 res += re.escape(c)
1735 return TTest.rematch(res, l)
1744 return TTest.rematch(res, l)
1736
1745
1737 def linematch(self, el, l):
1746 def linematch(self, el, l):
1738 if el == l: # perfect match (fast)
1747 if el == l: # perfect match (fast)
1739 return True, True
1748 return True, True
1740 retry = False
1749 retry = False
1741 if el.endswith(b" (?)\n"):
1750 if el.endswith(b" (?)\n"):
1742 retry = "retry"
1751 retry = "retry"
1743 el = el[:-5] + b"\n"
1752 el = el[:-5] + b"\n"
1744 else:
1753 else:
1745 m = optline.match(el)
1754 m = optline.match(el)
1746 if m:
1755 if m:
1747 conditions = [c for c in m.group(2).split(b' ')]
1756 conditions = [c for c in m.group(2).split(b' ')]
1748
1757
1749 el = m.group(1) + b"\n"
1758 el = m.group(1) + b"\n"
1750 if not self._iftest(conditions):
1759 if not self._iftest(conditions):
1751 # listed feature missing, should not match
1760 # listed feature missing, should not match
1752 return "retry", False
1761 return "retry", False
1753
1762
1754 if el.endswith(b" (esc)\n"):
1763 if el.endswith(b" (esc)\n"):
1755 if PYTHON3:
1764 if PYTHON3:
1756 el = el[:-7].decode('unicode_escape') + '\n'
1765 el = el[:-7].decode('unicode_escape') + '\n'
1757 el = el.encode('utf-8')
1766 el = el.encode('utf-8')
1758 else:
1767 else:
1759 el = el[:-7].decode('string-escape') + '\n'
1768 el = el[:-7].decode('string-escape') + '\n'
1760 if el == l or os.name == 'nt' and el[:-1] + b'\r\n' == l:
1769 if el == l or os.name == 'nt' and el[:-1] + b'\r\n' == l:
1761 return True, True
1770 return True, True
1762 if el.endswith(b" (re)\n"):
1771 if el.endswith(b" (re)\n"):
1763 return (TTest.rematch(el[:-6], l) or retry), False
1772 return (TTest.rematch(el[:-6], l) or retry), False
1764 if el.endswith(b" (glob)\n"):
1773 if el.endswith(b" (glob)\n"):
1765 # ignore '(glob)' added to l by 'replacements'
1774 # ignore '(glob)' added to l by 'replacements'
1766 if l.endswith(b" (glob)\n"):
1775 if l.endswith(b" (glob)\n"):
1767 l = l[:-8] + b"\n"
1776 l = l[:-8] + b"\n"
1768 return (TTest.globmatch(el[:-8], l) or retry), False
1777 return (TTest.globmatch(el[:-8], l) or retry), False
1769 if os.altsep:
1778 if os.altsep:
1770 _l = l.replace(b'\\', b'/')
1779 _l = l.replace(b'\\', b'/')
1771 if el == _l or os.name == 'nt' and el[:-1] + b'\r\n' == _l:
1780 if el == _l or os.name == 'nt' and el[:-1] + b'\r\n' == _l:
1772 return True, True
1781 return True, True
1773 return retry, True
1782 return retry, True
1774
1783
1775 @staticmethod
1784 @staticmethod
1776 def parsehghaveoutput(lines):
1785 def parsehghaveoutput(lines):
1777 '''Parse hghave log lines.
1786 '''Parse hghave log lines.
1778
1787
1779 Return tuple of lists (missing, failed):
1788 Return tuple of lists (missing, failed):
1780 * the missing/unknown features
1789 * the missing/unknown features
1781 * the features for which existence check failed'''
1790 * the features for which existence check failed'''
1782 missing = []
1791 missing = []
1783 failed = []
1792 failed = []
1784 for line in lines:
1793 for line in lines:
1785 if line.startswith(TTest.SKIPPED_PREFIX):
1794 if line.startswith(TTest.SKIPPED_PREFIX):
1786 line = line.splitlines()[0]
1795 line = line.splitlines()[0]
1787 missing.append(line[len(TTest.SKIPPED_PREFIX):].decode('utf-8'))
1796 missing.append(line[len(TTest.SKIPPED_PREFIX):].decode('utf-8'))
1788 elif line.startswith(TTest.FAILED_PREFIX):
1797 elif line.startswith(TTest.FAILED_PREFIX):
1789 line = line.splitlines()[0]
1798 line = line.splitlines()[0]
1790 failed.append(line[len(TTest.FAILED_PREFIX):].decode('utf-8'))
1799 failed.append(line[len(TTest.FAILED_PREFIX):].decode('utf-8'))
1791
1800
1792 return missing, failed
1801 return missing, failed
1793
1802
1794 @staticmethod
1803 @staticmethod
1795 def _escapef(m):
1804 def _escapef(m):
1796 return TTest.ESCAPEMAP[m.group(0)]
1805 return TTest.ESCAPEMAP[m.group(0)]
1797
1806
1798 @staticmethod
1807 @staticmethod
1799 def _stringescape(s):
1808 def _stringescape(s):
1800 return TTest.ESCAPESUB(TTest._escapef, s)
1809 return TTest.ESCAPESUB(TTest._escapef, s)
1801
1810
1802 iolock = threading.RLock()
1811 iolock = threading.RLock()
1803 firstlock = threading.RLock()
1812 firstlock = threading.RLock()
1804 firsterror = False
1813 firsterror = False
1805
1814
1806 class TestResult(unittest._TextTestResult):
1815 class TestResult(unittest._TextTestResult):
1807 """Holds results when executing via unittest."""
1816 """Holds results when executing via unittest."""
1808 # Don't worry too much about accessing the non-public _TextTestResult.
1817 # Don't worry too much about accessing the non-public _TextTestResult.
1809 # It is relatively common in Python testing tools.
1818 # It is relatively common in Python testing tools.
1810 def __init__(self, options, *args, **kwargs):
1819 def __init__(self, options, *args, **kwargs):
1811 super(TestResult, self).__init__(*args, **kwargs)
1820 super(TestResult, self).__init__(*args, **kwargs)
1812
1821
1813 self._options = options
1822 self._options = options
1814
1823
1815 # unittest.TestResult didn't have skipped until 2.7. We need to
1824 # unittest.TestResult didn't have skipped until 2.7. We need to
1816 # polyfill it.
1825 # polyfill it.
1817 self.skipped = []
1826 self.skipped = []
1818
1827
1819 # We have a custom "ignored" result that isn't present in any Python
1828 # We have a custom "ignored" result that isn't present in any Python
1820 # unittest implementation. It is very similar to skipped. It may make
1829 # unittest implementation. It is very similar to skipped. It may make
1821 # sense to map it into skip some day.
1830 # sense to map it into skip some day.
1822 self.ignored = []
1831 self.ignored = []
1823
1832
1824 self.times = []
1833 self.times = []
1825 self._firststarttime = None
1834 self._firststarttime = None
1826 # Data stored for the benefit of generating xunit reports.
1835 # Data stored for the benefit of generating xunit reports.
1827 self.successes = []
1836 self.successes = []
1828 self.faildata = {}
1837 self.faildata = {}
1829
1838
1830 if options.color == 'auto':
1839 if options.color == 'auto':
1831 self.color = pygmentspresent and self.stream.isatty()
1840 self.color = pygmentspresent and self.stream.isatty()
1832 elif options.color == 'never':
1841 elif options.color == 'never':
1833 self.color = False
1842 self.color = False
1834 else: # 'always', for testing purposes
1843 else: # 'always', for testing purposes
1835 self.color = pygmentspresent
1844 self.color = pygmentspresent
1836
1845
1837 def onStart(self, test):
1846 def onStart(self, test):
1838 """ Can be overriden by custom TestResult
1847 """ Can be overriden by custom TestResult
1839 """
1848 """
1840
1849
1841 def onEnd(self):
1850 def onEnd(self):
1842 """ Can be overriden by custom TestResult
1851 """ Can be overriden by custom TestResult
1843 """
1852 """
1844
1853
1845 def addFailure(self, test, reason):
1854 def addFailure(self, test, reason):
1846 self.failures.append((test, reason))
1855 self.failures.append((test, reason))
1847
1856
1848 if self._options.first:
1857 if self._options.first:
1849 self.stop()
1858 self.stop()
1850 else:
1859 else:
1851 with iolock:
1860 with iolock:
1852 if reason == "timed out":
1861 if reason == "timed out":
1853 self.stream.write('t')
1862 self.stream.write('t')
1854 else:
1863 else:
1855 if not self._options.nodiff:
1864 if not self._options.nodiff:
1856 self.stream.write('\n')
1865 self.stream.write('\n')
1857 # Exclude the '\n' from highlighting to lex correctly
1866 # Exclude the '\n' from highlighting to lex correctly
1858 formatted = 'ERROR: %s output changed\n' % test
1867 formatted = 'ERROR: %s output changed\n' % test
1859 self.stream.write(highlightmsg(formatted, self.color))
1868 self.stream.write(highlightmsg(formatted, self.color))
1860 self.stream.write('!')
1869 self.stream.write('!')
1861
1870
1862 self.stream.flush()
1871 self.stream.flush()
1863
1872
1864 def addSuccess(self, test):
1873 def addSuccess(self, test):
1865 with iolock:
1874 with iolock:
1866 super(TestResult, self).addSuccess(test)
1875 super(TestResult, self).addSuccess(test)
1867 self.successes.append(test)
1876 self.successes.append(test)
1868
1877
1869 def addError(self, test, err):
1878 def addError(self, test, err):
1870 super(TestResult, self).addError(test, err)
1879 super(TestResult, self).addError(test, err)
1871 if self._options.first:
1880 if self._options.first:
1872 self.stop()
1881 self.stop()
1873
1882
1874 # Polyfill.
1883 # Polyfill.
1875 def addSkip(self, test, reason):
1884 def addSkip(self, test, reason):
1876 self.skipped.append((test, reason))
1885 self.skipped.append((test, reason))
1877 with iolock:
1886 with iolock:
1878 if self.showAll:
1887 if self.showAll:
1879 self.stream.writeln('skipped %s' % reason)
1888 self.stream.writeln('skipped %s' % reason)
1880 else:
1889 else:
1881 self.stream.write('s')
1890 self.stream.write('s')
1882 self.stream.flush()
1891 self.stream.flush()
1883
1892
1884 def addIgnore(self, test, reason):
1893 def addIgnore(self, test, reason):
1885 self.ignored.append((test, reason))
1894 self.ignored.append((test, reason))
1886 with iolock:
1895 with iolock:
1887 if self.showAll:
1896 if self.showAll:
1888 self.stream.writeln('ignored %s' % reason)
1897 self.stream.writeln('ignored %s' % reason)
1889 else:
1898 else:
1890 if reason not in ('not retesting', "doesn't match keyword"):
1899 if reason not in ('not retesting', "doesn't match keyword"):
1891 self.stream.write('i')
1900 self.stream.write('i')
1892 else:
1901 else:
1893 self.testsRun += 1
1902 self.testsRun += 1
1894 self.stream.flush()
1903 self.stream.flush()
1895
1904
1896 def addOutputMismatch(self, test, ret, got, expected):
1905 def addOutputMismatch(self, test, ret, got, expected):
1897 """Record a mismatch in test output for a particular test."""
1906 """Record a mismatch in test output for a particular test."""
1898 if self.shouldStop or firsterror:
1907 if self.shouldStop or firsterror:
1899 # don't print, some other test case already failed and
1908 # don't print, some other test case already failed and
1900 # printed, we're just stale and probably failed due to our
1909 # printed, we're just stale and probably failed due to our
1901 # temp dir getting cleaned up.
1910 # temp dir getting cleaned up.
1902 return
1911 return
1903
1912
1904 accepted = False
1913 accepted = False
1905 lines = []
1914 lines = []
1906
1915
1907 with iolock:
1916 with iolock:
1908 if self._options.nodiff:
1917 if self._options.nodiff:
1909 pass
1918 pass
1910 elif self._options.view:
1919 elif self._options.view:
1911 v = self._options.view
1920 v = self._options.view
1912 subprocess.call(r'"%s" "%s" "%s"' %
1921 subprocess.call(r'"%s" "%s" "%s"' %
1913 (v, _strpath(test.refpath),
1922 (v, _strpath(test.refpath),
1914 _strpath(test.errpath)), shell=True)
1923 _strpath(test.errpath)), shell=True)
1915 else:
1924 else:
1916 servefail, lines = getdiff(expected, got,
1925 servefail, lines = getdiff(expected, got,
1917 test.refpath, test.errpath)
1926 test.refpath, test.errpath)
1918 self.stream.write('\n')
1927 self.stream.write('\n')
1919 for line in lines:
1928 for line in lines:
1920 line = highlightdiff(line, self.color)
1929 line = highlightdiff(line, self.color)
1921 if PYTHON3:
1930 if PYTHON3:
1922 self.stream.flush()
1931 self.stream.flush()
1923 self.stream.buffer.write(line)
1932 self.stream.buffer.write(line)
1924 self.stream.buffer.flush()
1933 self.stream.buffer.flush()
1925 else:
1934 else:
1926 self.stream.write(line)
1935 self.stream.write(line)
1927 self.stream.flush()
1936 self.stream.flush()
1928
1937
1929 if servefail:
1938 if servefail:
1930 raise test.failureException(
1939 raise test.failureException(
1931 'server failed to start (HGPORT=%s)' % test._startport)
1940 'server failed to start (HGPORT=%s)' % test._startport)
1932
1941
1933 # handle interactive prompt without releasing iolock
1942 # handle interactive prompt without releasing iolock
1934 if self._options.interactive:
1943 if self._options.interactive:
1935 if test.readrefout() != expected:
1944 if test.readrefout() != expected:
1936 self.stream.write(
1945 self.stream.write(
1937 'Reference output has changed (run again to prompt '
1946 'Reference output has changed (run again to prompt '
1938 'changes)')
1947 'changes)')
1939 else:
1948 else:
1940 self.stream.write('Accept this change? [n] ')
1949 self.stream.write('Accept this change? [n] ')
1941 self.stream.flush()
1950 self.stream.flush()
1942 answer = sys.stdin.readline().strip()
1951 answer = sys.stdin.readline().strip()
1943 if answer.lower() in ('y', 'yes'):
1952 if answer.lower() in ('y', 'yes'):
1944 if test.path.endswith(b'.t'):
1953 if test.path.endswith(b'.t'):
1945 rename(test.errpath, test.path)
1954 rename(test.errpath, test.path)
1946 else:
1955 else:
1947 rename(test.errpath, '%s.out' % test.path)
1956 rename(test.errpath, '%s.out' % test.path)
1948 accepted = True
1957 accepted = True
1949 if not accepted:
1958 if not accepted:
1950 self.faildata[test.name] = b''.join(lines)
1959 self.faildata[test.name] = b''.join(lines)
1951
1960
1952 return accepted
1961 return accepted
1953
1962
1954 def startTest(self, test):
1963 def startTest(self, test):
1955 super(TestResult, self).startTest(test)
1964 super(TestResult, self).startTest(test)
1956
1965
1957 # os.times module computes the user time and system time spent by
1966 # os.times module computes the user time and system time spent by
1958 # child's processes along with real elapsed time taken by a process.
1967 # child's processes along with real elapsed time taken by a process.
1959 # This module has one limitation. It can only work for Linux user
1968 # This module has one limitation. It can only work for Linux user
1960 # and not for Windows.
1969 # and not for Windows.
1961 test.started = os.times()
1970 test.started = os.times()
1962 if self._firststarttime is None: # thread racy but irrelevant
1971 if self._firststarttime is None: # thread racy but irrelevant
1963 self._firststarttime = test.started[4]
1972 self._firststarttime = test.started[4]
1964
1973
1965 def stopTest(self, test, interrupted=False):
1974 def stopTest(self, test, interrupted=False):
1966 super(TestResult, self).stopTest(test)
1975 super(TestResult, self).stopTest(test)
1967
1976
1968 test.stopped = os.times()
1977 test.stopped = os.times()
1969
1978
1970 starttime = test.started
1979 starttime = test.started
1971 endtime = test.stopped
1980 endtime = test.stopped
1972 origin = self._firststarttime
1981 origin = self._firststarttime
1973 self.times.append((test.name,
1982 self.times.append((test.name,
1974 endtime[2] - starttime[2], # user space CPU time
1983 endtime[2] - starttime[2], # user space CPU time
1975 endtime[3] - starttime[3], # sys space CPU time
1984 endtime[3] - starttime[3], # sys space CPU time
1976 endtime[4] - starttime[4], # real time
1985 endtime[4] - starttime[4], # real time
1977 starttime[4] - origin, # start date in run context
1986 starttime[4] - origin, # start date in run context
1978 endtime[4] - origin, # end date in run context
1987 endtime[4] - origin, # end date in run context
1979 ))
1988 ))
1980
1989
1981 if interrupted:
1990 if interrupted:
1982 with iolock:
1991 with iolock:
1983 self.stream.writeln('INTERRUPTED: %s (after %d seconds)' % (
1992 self.stream.writeln('INTERRUPTED: %s (after %d seconds)' % (
1984 test.name, self.times[-1][3]))
1993 test.name, self.times[-1][3]))
1985
1994
1986 def getTestResult():
1995 def getTestResult():
1987 """
1996 """
1988 Returns the relevant test result
1997 Returns the relevant test result
1989 """
1998 """
1990 if "CUSTOM_TEST_RESULT" in os.environ:
1999 if "CUSTOM_TEST_RESULT" in os.environ:
1991 testresultmodule = __import__(os.environ["CUSTOM_TEST_RESULT"])
2000 testresultmodule = __import__(os.environ["CUSTOM_TEST_RESULT"])
1992 return testresultmodule.TestResult
2001 return testresultmodule.TestResult
1993 else:
2002 else:
1994 return TestResult
2003 return TestResult
1995
2004
1996 class TestSuite(unittest.TestSuite):
2005 class TestSuite(unittest.TestSuite):
1997 """Custom unittest TestSuite that knows how to execute Mercurial tests."""
2006 """Custom unittest TestSuite that knows how to execute Mercurial tests."""
1998
2007
1999 def __init__(self, testdir, jobs=1, whitelist=None, blacklist=None,
2008 def __init__(self, testdir, jobs=1, whitelist=None, blacklist=None,
2000 retest=False, keywords=None, loop=False, runs_per_test=1,
2009 retest=False, keywords=None, loop=False, runs_per_test=1,
2001 loadtest=None, showchannels=False,
2010 loadtest=None, showchannels=False,
2002 *args, **kwargs):
2011 *args, **kwargs):
2003 """Create a new instance that can run tests with a configuration.
2012 """Create a new instance that can run tests with a configuration.
2004
2013
2005 testdir specifies the directory where tests are executed from. This
2014 testdir specifies the directory where tests are executed from. This
2006 is typically the ``tests`` directory from Mercurial's source
2015 is typically the ``tests`` directory from Mercurial's source
2007 repository.
2016 repository.
2008
2017
2009 jobs specifies the number of jobs to run concurrently. Each test
2018 jobs specifies the number of jobs to run concurrently. Each test
2010 executes on its own thread. Tests actually spawn new processes, so
2019 executes on its own thread. Tests actually spawn new processes, so
2011 state mutation should not be an issue.
2020 state mutation should not be an issue.
2012
2021
2013 If there is only one job, it will use the main thread.
2022 If there is only one job, it will use the main thread.
2014
2023
2015 whitelist and blacklist denote tests that have been whitelisted and
2024 whitelist and blacklist denote tests that have been whitelisted and
2016 blacklisted, respectively. These arguments don't belong in TestSuite.
2025 blacklisted, respectively. These arguments don't belong in TestSuite.
2017 Instead, whitelist and blacklist should be handled by the thing that
2026 Instead, whitelist and blacklist should be handled by the thing that
2018 populates the TestSuite with tests. They are present to preserve
2027 populates the TestSuite with tests. They are present to preserve
2019 backwards compatible behavior which reports skipped tests as part
2028 backwards compatible behavior which reports skipped tests as part
2020 of the results.
2029 of the results.
2021
2030
2022 retest denotes whether to retest failed tests. This arguably belongs
2031 retest denotes whether to retest failed tests. This arguably belongs
2023 outside of TestSuite.
2032 outside of TestSuite.
2024
2033
2025 keywords denotes key words that will be used to filter which tests
2034 keywords denotes key words that will be used to filter which tests
2026 to execute. This arguably belongs outside of TestSuite.
2035 to execute. This arguably belongs outside of TestSuite.
2027
2036
2028 loop denotes whether to loop over tests forever.
2037 loop denotes whether to loop over tests forever.
2029 """
2038 """
2030 super(TestSuite, self).__init__(*args, **kwargs)
2039 super(TestSuite, self).__init__(*args, **kwargs)
2031
2040
2032 self._jobs = jobs
2041 self._jobs = jobs
2033 self._whitelist = whitelist
2042 self._whitelist = whitelist
2034 self._blacklist = blacklist
2043 self._blacklist = blacklist
2035 self._retest = retest
2044 self._retest = retest
2036 self._keywords = keywords
2045 self._keywords = keywords
2037 self._loop = loop
2046 self._loop = loop
2038 self._runs_per_test = runs_per_test
2047 self._runs_per_test = runs_per_test
2039 self._loadtest = loadtest
2048 self._loadtest = loadtest
2040 self._showchannels = showchannels
2049 self._showchannels = showchannels
2041
2050
2042 def run(self, result):
2051 def run(self, result):
2043 # We have a number of filters that need to be applied. We do this
2052 # We have a number of filters that need to be applied. We do this
2044 # here instead of inside Test because it makes the running logic for
2053 # here instead of inside Test because it makes the running logic for
2045 # Test simpler.
2054 # Test simpler.
2046 tests = []
2055 tests = []
2047 num_tests = [0]
2056 num_tests = [0]
2048 for test in self._tests:
2057 for test in self._tests:
2049 def get():
2058 def get():
2050 num_tests[0] += 1
2059 num_tests[0] += 1
2051 if getattr(test, 'should_reload', False):
2060 if getattr(test, 'should_reload', False):
2052 return self._loadtest(test, num_tests[0])
2061 return self._loadtest(test, num_tests[0])
2053 return test
2062 return test
2054 if not os.path.exists(test.path):
2063 if not os.path.exists(test.path):
2055 result.addSkip(test, "Doesn't exist")
2064 result.addSkip(test, "Doesn't exist")
2056 continue
2065 continue
2057
2066
2058 if not (self._whitelist and test.bname in self._whitelist):
2067 if not (self._whitelist and test.bname in self._whitelist):
2059 if self._blacklist and test.bname in self._blacklist:
2068 if self._blacklist and test.bname in self._blacklist:
2060 result.addSkip(test, 'blacklisted')
2069 result.addSkip(test, 'blacklisted')
2061 continue
2070 continue
2062
2071
2063 if self._retest and not os.path.exists(test.errpath):
2072 if self._retest and not os.path.exists(test.errpath):
2064 result.addIgnore(test, 'not retesting')
2073 result.addIgnore(test, 'not retesting')
2065 continue
2074 continue
2066
2075
2067 if self._keywords:
2076 if self._keywords:
2068 with open(test.path, 'rb') as f:
2077 with open(test.path, 'rb') as f:
2069 t = f.read().lower() + test.bname.lower()
2078 t = f.read().lower() + test.bname.lower()
2070 ignored = False
2079 ignored = False
2071 for k in self._keywords.lower().split():
2080 for k in self._keywords.lower().split():
2072 if k not in t:
2081 if k not in t:
2073 result.addIgnore(test, "doesn't match keyword")
2082 result.addIgnore(test, "doesn't match keyword")
2074 ignored = True
2083 ignored = True
2075 break
2084 break
2076
2085
2077 if ignored:
2086 if ignored:
2078 continue
2087 continue
2079 for _ in xrange(self._runs_per_test):
2088 for _ in xrange(self._runs_per_test):
2080 tests.append(get())
2089 tests.append(get())
2081
2090
2082 runtests = list(tests)
2091 runtests = list(tests)
2083 done = queue.Queue()
2092 done = queue.Queue()
2084 running = 0
2093 running = 0
2085
2094
2086 channels = [""] * self._jobs
2095 channels = [""] * self._jobs
2087
2096
2088 def job(test, result):
2097 def job(test, result):
2089 for n, v in enumerate(channels):
2098 for n, v in enumerate(channels):
2090 if not v:
2099 if not v:
2091 channel = n
2100 channel = n
2092 break
2101 break
2093 else:
2102 else:
2094 raise ValueError('Could not find output channel')
2103 raise ValueError('Could not find output channel')
2095 channels[channel] = "=" + test.name[5:].split(".")[0]
2104 channels[channel] = "=" + test.name[5:].split(".")[0]
2096 try:
2105 try:
2097 test(result)
2106 test(result)
2098 done.put(None)
2107 done.put(None)
2099 except KeyboardInterrupt:
2108 except KeyboardInterrupt:
2100 pass
2109 pass
2101 except: # re-raises
2110 except: # re-raises
2102 done.put(('!', test, 'run-test raised an error, see traceback'))
2111 done.put(('!', test, 'run-test raised an error, see traceback'))
2103 raise
2112 raise
2104 finally:
2113 finally:
2105 try:
2114 try:
2106 channels[channel] = ''
2115 channels[channel] = ''
2107 except IndexError:
2116 except IndexError:
2108 pass
2117 pass
2109
2118
2110 def stat():
2119 def stat():
2111 count = 0
2120 count = 0
2112 while channels:
2121 while channels:
2113 d = '\n%03s ' % count
2122 d = '\n%03s ' % count
2114 for n, v in enumerate(channels):
2123 for n, v in enumerate(channels):
2115 if v:
2124 if v:
2116 d += v[0]
2125 d += v[0]
2117 channels[n] = v[1:] or '.'
2126 channels[n] = v[1:] or '.'
2118 else:
2127 else:
2119 d += ' '
2128 d += ' '
2120 d += ' '
2129 d += ' '
2121 with iolock:
2130 with iolock:
2122 sys.stdout.write(d + ' ')
2131 sys.stdout.write(d + ' ')
2123 sys.stdout.flush()
2132 sys.stdout.flush()
2124 for x in xrange(10):
2133 for x in xrange(10):
2125 if channels:
2134 if channels:
2126 time.sleep(.1)
2135 time.sleep(.1)
2127 count += 1
2136 count += 1
2128
2137
2129 stoppedearly = False
2138 stoppedearly = False
2130
2139
2131 if self._showchannels:
2140 if self._showchannels:
2132 statthread = threading.Thread(target=stat, name="stat")
2141 statthread = threading.Thread(target=stat, name="stat")
2133 statthread.start()
2142 statthread.start()
2134
2143
2135 try:
2144 try:
2136 while tests or running:
2145 while tests or running:
2137 if not done.empty() or running == self._jobs or not tests:
2146 if not done.empty() or running == self._jobs or not tests:
2138 try:
2147 try:
2139 done.get(True, 1)
2148 done.get(True, 1)
2140 running -= 1
2149 running -= 1
2141 if result and result.shouldStop:
2150 if result and result.shouldStop:
2142 stoppedearly = True
2151 stoppedearly = True
2143 break
2152 break
2144 except queue.Empty:
2153 except queue.Empty:
2145 continue
2154 continue
2146 if tests and not running == self._jobs:
2155 if tests and not running == self._jobs:
2147 test = tests.pop(0)
2156 test = tests.pop(0)
2148 if self._loop:
2157 if self._loop:
2149 if getattr(test, 'should_reload', False):
2158 if getattr(test, 'should_reload', False):
2150 num_tests[0] += 1
2159 num_tests[0] += 1
2151 tests.append(
2160 tests.append(
2152 self._loadtest(test, num_tests[0]))
2161 self._loadtest(test, num_tests[0]))
2153 else:
2162 else:
2154 tests.append(test)
2163 tests.append(test)
2155 if self._jobs == 1:
2164 if self._jobs == 1:
2156 job(test, result)
2165 job(test, result)
2157 else:
2166 else:
2158 t = threading.Thread(target=job, name=test.name,
2167 t = threading.Thread(target=job, name=test.name,
2159 args=(test, result))
2168 args=(test, result))
2160 t.start()
2169 t.start()
2161 running += 1
2170 running += 1
2162
2171
2163 # If we stop early we still need to wait on started tests to
2172 # If we stop early we still need to wait on started tests to
2164 # finish. Otherwise, there is a race between the test completing
2173 # finish. Otherwise, there is a race between the test completing
2165 # and the test's cleanup code running. This could result in the
2174 # and the test's cleanup code running. This could result in the
2166 # test reporting incorrect.
2175 # test reporting incorrect.
2167 if stoppedearly:
2176 if stoppedearly:
2168 while running:
2177 while running:
2169 try:
2178 try:
2170 done.get(True, 1)
2179 done.get(True, 1)
2171 running -= 1
2180 running -= 1
2172 except queue.Empty:
2181 except queue.Empty:
2173 continue
2182 continue
2174 except KeyboardInterrupt:
2183 except KeyboardInterrupt:
2175 for test in runtests:
2184 for test in runtests:
2176 test.abort()
2185 test.abort()
2177
2186
2178 channels = []
2187 channels = []
2179
2188
2180 return result
2189 return result
2181
2190
2182 # Save the most recent 5 wall-clock runtimes of each test to a
2191 # Save the most recent 5 wall-clock runtimes of each test to a
2183 # human-readable text file named .testtimes. Tests are sorted
2192 # human-readable text file named .testtimes. Tests are sorted
2184 # alphabetically, while times for each test are listed from oldest to
2193 # alphabetically, while times for each test are listed from oldest to
2185 # newest.
2194 # newest.
2186
2195
2187 def loadtimes(outputdir):
2196 def loadtimes(outputdir):
2188 times = []
2197 times = []
2189 try:
2198 try:
2190 with open(os.path.join(outputdir, b'.testtimes')) as fp:
2199 with open(os.path.join(outputdir, b'.testtimes')) as fp:
2191 for line in fp:
2200 for line in fp:
2192 m = re.match('(.*?) ([0-9. ]+)', line)
2201 m = re.match('(.*?) ([0-9. ]+)', line)
2193 times.append((m.group(1),
2202 times.append((m.group(1),
2194 [float(t) for t in m.group(2).split()]))
2203 [float(t) for t in m.group(2).split()]))
2195 except IOError as err:
2204 except IOError as err:
2196 if err.errno != errno.ENOENT:
2205 if err.errno != errno.ENOENT:
2197 raise
2206 raise
2198 return times
2207 return times
2199
2208
2200 def savetimes(outputdir, result):
2209 def savetimes(outputdir, result):
2201 saved = dict(loadtimes(outputdir))
2210 saved = dict(loadtimes(outputdir))
2202 maxruns = 5
2211 maxruns = 5
2203 skipped = set([str(t[0]) for t in result.skipped])
2212 skipped = set([str(t[0]) for t in result.skipped])
2204 for tdata in result.times:
2213 for tdata in result.times:
2205 test, real = tdata[0], tdata[3]
2214 test, real = tdata[0], tdata[3]
2206 if test not in skipped:
2215 if test not in skipped:
2207 ts = saved.setdefault(test, [])
2216 ts = saved.setdefault(test, [])
2208 ts.append(real)
2217 ts.append(real)
2209 ts[:] = ts[-maxruns:]
2218 ts[:] = ts[-maxruns:]
2210
2219
2211 fd, tmpname = tempfile.mkstemp(prefix=b'.testtimes',
2220 fd, tmpname = tempfile.mkstemp(prefix=b'.testtimes',
2212 dir=outputdir, text=True)
2221 dir=outputdir, text=True)
2213 with os.fdopen(fd, 'w') as fp:
2222 with os.fdopen(fd, 'w') as fp:
2214 for name, ts in sorted(saved.items()):
2223 for name, ts in sorted(saved.items()):
2215 fp.write('%s %s\n' % (name, ' '.join(['%.3f' % (t,) for t in ts])))
2224 fp.write('%s %s\n' % (name, ' '.join(['%.3f' % (t,) for t in ts])))
2216 timepath = os.path.join(outputdir, b'.testtimes')
2225 timepath = os.path.join(outputdir, b'.testtimes')
2217 try:
2226 try:
2218 os.unlink(timepath)
2227 os.unlink(timepath)
2219 except OSError:
2228 except OSError:
2220 pass
2229 pass
2221 try:
2230 try:
2222 os.rename(tmpname, timepath)
2231 os.rename(tmpname, timepath)
2223 except OSError:
2232 except OSError:
2224 pass
2233 pass
2225
2234
2226 class TextTestRunner(unittest.TextTestRunner):
2235 class TextTestRunner(unittest.TextTestRunner):
2227 """Custom unittest test runner that uses appropriate settings."""
2236 """Custom unittest test runner that uses appropriate settings."""
2228
2237
2229 def __init__(self, runner, *args, **kwargs):
2238 def __init__(self, runner, *args, **kwargs):
2230 super(TextTestRunner, self).__init__(*args, **kwargs)
2239 super(TextTestRunner, self).__init__(*args, **kwargs)
2231
2240
2232 self._runner = runner
2241 self._runner = runner
2233
2242
2234 self._result = getTestResult()(self._runner.options, self.stream,
2243 self._result = getTestResult()(self._runner.options, self.stream,
2235 self.descriptions, self.verbosity)
2244 self.descriptions, self.verbosity)
2236
2245
2237 def listtests(self, test):
2246 def listtests(self, test):
2238 test = sorted(test, key=lambda t: t.name)
2247 test = sorted(test, key=lambda t: t.name)
2239
2248
2240 self._result.onStart(test)
2249 self._result.onStart(test)
2241
2250
2242 for t in test:
2251 for t in test:
2243 print(t.name)
2252 print(t.name)
2244 self._result.addSuccess(t)
2253 self._result.addSuccess(t)
2245
2254
2246 if self._runner.options.xunit:
2255 if self._runner.options.xunit:
2247 with open(self._runner.options.xunit, "wb") as xuf:
2256 with open(self._runner.options.xunit, "wb") as xuf:
2248 self._writexunit(self._result, xuf)
2257 self._writexunit(self._result, xuf)
2249
2258
2250 if self._runner.options.json:
2259 if self._runner.options.json:
2251 jsonpath = os.path.join(self._runner._outputdir, b'report.json')
2260 jsonpath = os.path.join(self._runner._outputdir, b'report.json')
2252 with open(jsonpath, 'w') as fp:
2261 with open(jsonpath, 'w') as fp:
2253 self._writejson(self._result, fp)
2262 self._writejson(self._result, fp)
2254
2263
2255 return self._result
2264 return self._result
2256
2265
2257 def run(self, test):
2266 def run(self, test):
2258 self._result.onStart(test)
2267 self._result.onStart(test)
2259 test(self._result)
2268 test(self._result)
2260
2269
2261 failed = len(self._result.failures)
2270 failed = len(self._result.failures)
2262 skipped = len(self._result.skipped)
2271 skipped = len(self._result.skipped)
2263 ignored = len(self._result.ignored)
2272 ignored = len(self._result.ignored)
2264
2273
2265 with iolock:
2274 with iolock:
2266 self.stream.writeln('')
2275 self.stream.writeln('')
2267
2276
2268 if not self._runner.options.noskips:
2277 if not self._runner.options.noskips:
2269 for test, msg in sorted(self._result.skipped,
2278 for test, msg in sorted(self._result.skipped,
2270 key=lambda s: s[0].name):
2279 key=lambda s: s[0].name):
2271 formatted = 'Skipped %s: %s\n' % (test.name, msg)
2280 formatted = 'Skipped %s: %s\n' % (test.name, msg)
2272 msg = highlightmsg(formatted, self._result.color)
2281 msg = highlightmsg(formatted, self._result.color)
2273 self.stream.write(msg)
2282 self.stream.write(msg)
2274 for test, msg in sorted(self._result.failures,
2283 for test, msg in sorted(self._result.failures,
2275 key=lambda f: f[0].name):
2284 key=lambda f: f[0].name):
2276 formatted = 'Failed %s: %s\n' % (test.name, msg)
2285 formatted = 'Failed %s: %s\n' % (test.name, msg)
2277 self.stream.write(highlightmsg(formatted, self._result.color))
2286 self.stream.write(highlightmsg(formatted, self._result.color))
2278 for test, msg in sorted(self._result.errors,
2287 for test, msg in sorted(self._result.errors,
2279 key=lambda e: e[0].name):
2288 key=lambda e: e[0].name):
2280 self.stream.writeln('Errored %s: %s' % (test.name, msg))
2289 self.stream.writeln('Errored %s: %s' % (test.name, msg))
2281
2290
2282 if self._runner.options.xunit:
2291 if self._runner.options.xunit:
2283 with open(self._runner.options.xunit, "wb") as xuf:
2292 with open(self._runner.options.xunit, "wb") as xuf:
2284 self._writexunit(self._result, xuf)
2293 self._writexunit(self._result, xuf)
2285
2294
2286 if self._runner.options.json:
2295 if self._runner.options.json:
2287 jsonpath = os.path.join(self._runner._outputdir, b'report.json')
2296 jsonpath = os.path.join(self._runner._outputdir, b'report.json')
2288 with open(jsonpath, 'w') as fp:
2297 with open(jsonpath, 'w') as fp:
2289 self._writejson(self._result, fp)
2298 self._writejson(self._result, fp)
2290
2299
2291 self._runner._checkhglib('Tested')
2300 self._runner._checkhglib('Tested')
2292
2301
2293 savetimes(self._runner._outputdir, self._result)
2302 savetimes(self._runner._outputdir, self._result)
2294
2303
2295 if failed and self._runner.options.known_good_rev:
2304 if failed and self._runner.options.known_good_rev:
2296 self._bisecttests(t for t, m in self._result.failures)
2305 self._bisecttests(t for t, m in self._result.failures)
2297 self.stream.writeln(
2306 self.stream.writeln(
2298 '# Ran %d tests, %d skipped, %d failed.'
2307 '# Ran %d tests, %d skipped, %d failed.'
2299 % (self._result.testsRun, skipped + ignored, failed))
2308 % (self._result.testsRun, skipped + ignored, failed))
2300 if failed:
2309 if failed:
2301 self.stream.writeln('python hash seed: %s' %
2310 self.stream.writeln('python hash seed: %s' %
2302 os.environ['PYTHONHASHSEED'])
2311 os.environ['PYTHONHASHSEED'])
2303 if self._runner.options.time:
2312 if self._runner.options.time:
2304 self.printtimes(self._result.times)
2313 self.printtimes(self._result.times)
2305
2314
2306 if self._runner.options.exceptions:
2315 if self._runner.options.exceptions:
2307 exceptions = aggregateexceptions(
2316 exceptions = aggregateexceptions(
2308 os.path.join(self._runner._outputdir, b'exceptions'))
2317 os.path.join(self._runner._outputdir, b'exceptions'))
2309
2318
2310 self.stream.writeln('Exceptions Report:')
2319 self.stream.writeln('Exceptions Report:')
2311 self.stream.writeln('%d total from %d frames' %
2320 self.stream.writeln('%d total from %d frames' %
2312 (exceptions['total'],
2321 (exceptions['total'],
2313 len(exceptions['exceptioncounts'])))
2322 len(exceptions['exceptioncounts'])))
2314 combined = exceptions['combined']
2323 combined = exceptions['combined']
2315 for key in sorted(combined, key=combined.get, reverse=True):
2324 for key in sorted(combined, key=combined.get, reverse=True):
2316 frame, line, exc = key
2325 frame, line, exc = key
2317 totalcount, testcount, leastcount, leasttest = combined[key]
2326 totalcount, testcount, leastcount, leasttest = combined[key]
2318
2327
2319 self.stream.writeln('%d (%d tests)\t%s: %s (%s - %d total)'
2328 self.stream.writeln('%d (%d tests)\t%s: %s (%s - %d total)'
2320 % (totalcount,
2329 % (totalcount,
2321 testcount,
2330 testcount,
2322 frame, exc,
2331 frame, exc,
2323 leasttest, leastcount))
2332 leasttest, leastcount))
2324
2333
2325 self.stream.flush()
2334 self.stream.flush()
2326
2335
2327 return self._result
2336 return self._result
2328
2337
2329 def _bisecttests(self, tests):
2338 def _bisecttests(self, tests):
2330 bisectcmd = ['hg', 'bisect']
2339 bisectcmd = ['hg', 'bisect']
2331 bisectrepo = self._runner.options.bisect_repo
2340 bisectrepo = self._runner.options.bisect_repo
2332 if bisectrepo:
2341 if bisectrepo:
2333 bisectcmd.extend(['-R', os.path.abspath(bisectrepo)])
2342 bisectcmd.extend(['-R', os.path.abspath(bisectrepo)])
2334 def pread(args):
2343 def pread(args):
2335 env = os.environ.copy()
2344 env = os.environ.copy()
2336 env['HGPLAIN'] = '1'
2345 env['HGPLAIN'] = '1'
2337 p = subprocess.Popen(args, stderr=subprocess.STDOUT,
2346 p = subprocess.Popen(args, stderr=subprocess.STDOUT,
2338 stdout=subprocess.PIPE, env=env)
2347 stdout=subprocess.PIPE, env=env)
2339 data = p.stdout.read()
2348 data = p.stdout.read()
2340 p.wait()
2349 p.wait()
2341 return data
2350 return data
2342 for test in tests:
2351 for test in tests:
2343 pread(bisectcmd + ['--reset']),
2352 pread(bisectcmd + ['--reset']),
2344 pread(bisectcmd + ['--bad', '.'])
2353 pread(bisectcmd + ['--bad', '.'])
2345 pread(bisectcmd + ['--good', self._runner.options.known_good_rev])
2354 pread(bisectcmd + ['--good', self._runner.options.known_good_rev])
2346 # TODO: we probably need to forward more options
2355 # TODO: we probably need to forward more options
2347 # that alter hg's behavior inside the tests.
2356 # that alter hg's behavior inside the tests.
2348 opts = ''
2357 opts = ''
2349 withhg = self._runner.options.with_hg
2358 withhg = self._runner.options.with_hg
2350 if withhg:
2359 if withhg:
2351 opts += ' --with-hg=%s ' % shellquote(_strpath(withhg))
2360 opts += ' --with-hg=%s ' % shellquote(_strpath(withhg))
2352 rtc = '%s %s %s %s' % (sys.executable, sys.argv[0], opts,
2361 rtc = '%s %s %s %s' % (sysexecutable, sys.argv[0], opts,
2353 test)
2362 test)
2354 data = pread(bisectcmd + ['--command', rtc])
2363 data = pread(bisectcmd + ['--command', rtc])
2355 m = re.search(
2364 m = re.search(
2356 (br'\nThe first (?P<goodbad>bad|good) revision '
2365 (br'\nThe first (?P<goodbad>bad|good) revision '
2357 br'is:\nchangeset: +\d+:(?P<node>[a-f0-9]+)\n.*\n'
2366 br'is:\nchangeset: +\d+:(?P<node>[a-f0-9]+)\n.*\n'
2358 br'summary: +(?P<summary>[^\n]+)\n'),
2367 br'summary: +(?P<summary>[^\n]+)\n'),
2359 data, (re.MULTILINE | re.DOTALL))
2368 data, (re.MULTILINE | re.DOTALL))
2360 if m is None:
2369 if m is None:
2361 self.stream.writeln(
2370 self.stream.writeln(
2362 'Failed to identify failure point for %s' % test)
2371 'Failed to identify failure point for %s' % test)
2363 continue
2372 continue
2364 dat = m.groupdict()
2373 dat = m.groupdict()
2365 verb = 'broken' if dat['goodbad'] == b'bad' else 'fixed'
2374 verb = 'broken' if dat['goodbad'] == b'bad' else 'fixed'
2366 self.stream.writeln(
2375 self.stream.writeln(
2367 '%s %s by %s (%s)' % (
2376 '%s %s by %s (%s)' % (
2368 test, verb, dat['node'].decode('ascii'),
2377 test, verb, dat['node'].decode('ascii'),
2369 dat['summary'].decode('utf8', 'ignore')))
2378 dat['summary'].decode('utf8', 'ignore')))
2370
2379
2371 def printtimes(self, times):
2380 def printtimes(self, times):
2372 # iolock held by run
2381 # iolock held by run
2373 self.stream.writeln('# Producing time report')
2382 self.stream.writeln('# Producing time report')
2374 times.sort(key=lambda t: (t[3]))
2383 times.sort(key=lambda t: (t[3]))
2375 cols = '%7.3f %7.3f %7.3f %7.3f %7.3f %s'
2384 cols = '%7.3f %7.3f %7.3f %7.3f %7.3f %s'
2376 self.stream.writeln('%-7s %-7s %-7s %-7s %-7s %s' %
2385 self.stream.writeln('%-7s %-7s %-7s %-7s %-7s %s' %
2377 ('start', 'end', 'cuser', 'csys', 'real', 'Test'))
2386 ('start', 'end', 'cuser', 'csys', 'real', 'Test'))
2378 for tdata in times:
2387 for tdata in times:
2379 test = tdata[0]
2388 test = tdata[0]
2380 cuser, csys, real, start, end = tdata[1:6]
2389 cuser, csys, real, start, end = tdata[1:6]
2381 self.stream.writeln(cols % (start, end, cuser, csys, real, test))
2390 self.stream.writeln(cols % (start, end, cuser, csys, real, test))
2382
2391
2383 @staticmethod
2392 @staticmethod
2384 def _writexunit(result, outf):
2393 def _writexunit(result, outf):
2385 # See http://llg.cubic.org/docs/junit/ for a reference.
2394 # See http://llg.cubic.org/docs/junit/ for a reference.
2386 timesd = dict((t[0], t[3]) for t in result.times)
2395 timesd = dict((t[0], t[3]) for t in result.times)
2387 doc = minidom.Document()
2396 doc = minidom.Document()
2388 s = doc.createElement('testsuite')
2397 s = doc.createElement('testsuite')
2389 s.setAttribute('errors', "0") # TODO
2398 s.setAttribute('errors', "0") # TODO
2390 s.setAttribute('failures', str(len(result.failures)))
2399 s.setAttribute('failures', str(len(result.failures)))
2391 s.setAttribute('name', 'run-tests')
2400 s.setAttribute('name', 'run-tests')
2392 s.setAttribute('skipped', str(len(result.skipped) +
2401 s.setAttribute('skipped', str(len(result.skipped) +
2393 len(result.ignored)))
2402 len(result.ignored)))
2394 s.setAttribute('tests', str(result.testsRun))
2403 s.setAttribute('tests', str(result.testsRun))
2395 doc.appendChild(s)
2404 doc.appendChild(s)
2396 for tc in result.successes:
2405 for tc in result.successes:
2397 t = doc.createElement('testcase')
2406 t = doc.createElement('testcase')
2398 t.setAttribute('name', tc.name)
2407 t.setAttribute('name', tc.name)
2399 tctime = timesd.get(tc.name)
2408 tctime = timesd.get(tc.name)
2400 if tctime is not None:
2409 if tctime is not None:
2401 t.setAttribute('time', '%.3f' % tctime)
2410 t.setAttribute('time', '%.3f' % tctime)
2402 s.appendChild(t)
2411 s.appendChild(t)
2403 for tc, err in sorted(result.faildata.items()):
2412 for tc, err in sorted(result.faildata.items()):
2404 t = doc.createElement('testcase')
2413 t = doc.createElement('testcase')
2405 t.setAttribute('name', tc)
2414 t.setAttribute('name', tc)
2406 tctime = timesd.get(tc)
2415 tctime = timesd.get(tc)
2407 if tctime is not None:
2416 if tctime is not None:
2408 t.setAttribute('time', '%.3f' % tctime)
2417 t.setAttribute('time', '%.3f' % tctime)
2409 # createCDATASection expects a unicode or it will
2418 # createCDATASection expects a unicode or it will
2410 # convert using default conversion rules, which will
2419 # convert using default conversion rules, which will
2411 # fail if string isn't ASCII.
2420 # fail if string isn't ASCII.
2412 err = cdatasafe(err).decode('utf-8', 'replace')
2421 err = cdatasafe(err).decode('utf-8', 'replace')
2413 cd = doc.createCDATASection(err)
2422 cd = doc.createCDATASection(err)
2414 # Use 'failure' here instead of 'error' to match errors = 0,
2423 # Use 'failure' here instead of 'error' to match errors = 0,
2415 # failures = len(result.failures) in the testsuite element.
2424 # failures = len(result.failures) in the testsuite element.
2416 failelem = doc.createElement('failure')
2425 failelem = doc.createElement('failure')
2417 failelem.setAttribute('message', 'output changed')
2426 failelem.setAttribute('message', 'output changed')
2418 failelem.setAttribute('type', 'output-mismatch')
2427 failelem.setAttribute('type', 'output-mismatch')
2419 failelem.appendChild(cd)
2428 failelem.appendChild(cd)
2420 t.appendChild(failelem)
2429 t.appendChild(failelem)
2421 s.appendChild(t)
2430 s.appendChild(t)
2422 for tc, message in result.skipped:
2431 for tc, message in result.skipped:
2423 # According to the schema, 'skipped' has no attributes. So store
2432 # According to the schema, 'skipped' has no attributes. So store
2424 # the skip message as a text node instead.
2433 # the skip message as a text node instead.
2425 t = doc.createElement('testcase')
2434 t = doc.createElement('testcase')
2426 t.setAttribute('name', tc.name)
2435 t.setAttribute('name', tc.name)
2427 binmessage = message.encode('utf-8')
2436 binmessage = message.encode('utf-8')
2428 message = cdatasafe(binmessage).decode('utf-8', 'replace')
2437 message = cdatasafe(binmessage).decode('utf-8', 'replace')
2429 cd = doc.createCDATASection(message)
2438 cd = doc.createCDATASection(message)
2430 skipelem = doc.createElement('skipped')
2439 skipelem = doc.createElement('skipped')
2431 skipelem.appendChild(cd)
2440 skipelem.appendChild(cd)
2432 t.appendChild(skipelem)
2441 t.appendChild(skipelem)
2433 s.appendChild(t)
2442 s.appendChild(t)
2434 outf.write(doc.toprettyxml(indent=' ', encoding='utf-8'))
2443 outf.write(doc.toprettyxml(indent=' ', encoding='utf-8'))
2435
2444
2436 @staticmethod
2445 @staticmethod
2437 def _writejson(result, outf):
2446 def _writejson(result, outf):
2438 timesd = {}
2447 timesd = {}
2439 for tdata in result.times:
2448 for tdata in result.times:
2440 test = tdata[0]
2449 test = tdata[0]
2441 timesd[test] = tdata[1:]
2450 timesd[test] = tdata[1:]
2442
2451
2443 outcome = {}
2452 outcome = {}
2444 groups = [('success', ((tc, None)
2453 groups = [('success', ((tc, None)
2445 for tc in result.successes)),
2454 for tc in result.successes)),
2446 ('failure', result.failures),
2455 ('failure', result.failures),
2447 ('skip', result.skipped)]
2456 ('skip', result.skipped)]
2448 for res, testcases in groups:
2457 for res, testcases in groups:
2449 for tc, __ in testcases:
2458 for tc, __ in testcases:
2450 if tc.name in timesd:
2459 if tc.name in timesd:
2451 diff = result.faildata.get(tc.name, b'')
2460 diff = result.faildata.get(tc.name, b'')
2452 try:
2461 try:
2453 diff = diff.decode('unicode_escape')
2462 diff = diff.decode('unicode_escape')
2454 except UnicodeDecodeError as e:
2463 except UnicodeDecodeError as e:
2455 diff = '%r decoding diff, sorry' % e
2464 diff = '%r decoding diff, sorry' % e
2456 tres = {'result': res,
2465 tres = {'result': res,
2457 'time': ('%0.3f' % timesd[tc.name][2]),
2466 'time': ('%0.3f' % timesd[tc.name][2]),
2458 'cuser': ('%0.3f' % timesd[tc.name][0]),
2467 'cuser': ('%0.3f' % timesd[tc.name][0]),
2459 'csys': ('%0.3f' % timesd[tc.name][1]),
2468 'csys': ('%0.3f' % timesd[tc.name][1]),
2460 'start': ('%0.3f' % timesd[tc.name][3]),
2469 'start': ('%0.3f' % timesd[tc.name][3]),
2461 'end': ('%0.3f' % timesd[tc.name][4]),
2470 'end': ('%0.3f' % timesd[tc.name][4]),
2462 'diff': diff,
2471 'diff': diff,
2463 }
2472 }
2464 else:
2473 else:
2465 # blacklisted test
2474 # blacklisted test
2466 tres = {'result': res}
2475 tres = {'result': res}
2467
2476
2468 outcome[tc.name] = tres
2477 outcome[tc.name] = tres
2469 jsonout = json.dumps(outcome, sort_keys=True, indent=4,
2478 jsonout = json.dumps(outcome, sort_keys=True, indent=4,
2470 separators=(',', ': '))
2479 separators=(',', ': '))
2471 outf.writelines(("testreport =", jsonout))
2480 outf.writelines(("testreport =", jsonout))
2472
2481
2473 def sorttests(testdescs, previoustimes, shuffle=False):
2482 def sorttests(testdescs, previoustimes, shuffle=False):
2474 """Do an in-place sort of tests."""
2483 """Do an in-place sort of tests."""
2475 if shuffle:
2484 if shuffle:
2476 random.shuffle(testdescs)
2485 random.shuffle(testdescs)
2477 return
2486 return
2478
2487
2479 if previoustimes:
2488 if previoustimes:
2480 def sortkey(f):
2489 def sortkey(f):
2481 f = f['path']
2490 f = f['path']
2482 if f in previoustimes:
2491 if f in previoustimes:
2483 # Use most recent time as estimate
2492 # Use most recent time as estimate
2484 return -previoustimes[f][-1]
2493 return -previoustimes[f][-1]
2485 else:
2494 else:
2486 # Default to a rather arbitrary value of 1 second for new tests
2495 # Default to a rather arbitrary value of 1 second for new tests
2487 return -1.0
2496 return -1.0
2488 else:
2497 else:
2489 # keywords for slow tests
2498 # keywords for slow tests
2490 slow = {b'svn': 10,
2499 slow = {b'svn': 10,
2491 b'cvs': 10,
2500 b'cvs': 10,
2492 b'hghave': 10,
2501 b'hghave': 10,
2493 b'largefiles-update': 10,
2502 b'largefiles-update': 10,
2494 b'run-tests': 10,
2503 b'run-tests': 10,
2495 b'corruption': 10,
2504 b'corruption': 10,
2496 b'race': 10,
2505 b'race': 10,
2497 b'i18n': 10,
2506 b'i18n': 10,
2498 b'check': 100,
2507 b'check': 100,
2499 b'gendoc': 100,
2508 b'gendoc': 100,
2500 b'contrib-perf': 200,
2509 b'contrib-perf': 200,
2501 }
2510 }
2502 perf = {}
2511 perf = {}
2503
2512
2504 def sortkey(f):
2513 def sortkey(f):
2505 # run largest tests first, as they tend to take the longest
2514 # run largest tests first, as they tend to take the longest
2506 f = f['path']
2515 f = f['path']
2507 try:
2516 try:
2508 return perf[f]
2517 return perf[f]
2509 except KeyError:
2518 except KeyError:
2510 try:
2519 try:
2511 val = -os.stat(f).st_size
2520 val = -os.stat(f).st_size
2512 except OSError as e:
2521 except OSError as e:
2513 if e.errno != errno.ENOENT:
2522 if e.errno != errno.ENOENT:
2514 raise
2523 raise
2515 perf[f] = -1e9 # file does not exist, tell early
2524 perf[f] = -1e9 # file does not exist, tell early
2516 return -1e9
2525 return -1e9
2517 for kw, mul in slow.items():
2526 for kw, mul in slow.items():
2518 if kw in f:
2527 if kw in f:
2519 val *= mul
2528 val *= mul
2520 if f.endswith(b'.py'):
2529 if f.endswith(b'.py'):
2521 val /= 10.0
2530 val /= 10.0
2522 perf[f] = val / 1000.0
2531 perf[f] = val / 1000.0
2523 return perf[f]
2532 return perf[f]
2524
2533
2525 testdescs.sort(key=sortkey)
2534 testdescs.sort(key=sortkey)
2526
2535
2527 class TestRunner(object):
2536 class TestRunner(object):
2528 """Holds context for executing tests.
2537 """Holds context for executing tests.
2529
2538
2530 Tests rely on a lot of state. This object holds it for them.
2539 Tests rely on a lot of state. This object holds it for them.
2531 """
2540 """
2532
2541
2533 # Programs required to run tests.
2542 # Programs required to run tests.
2534 REQUIREDTOOLS = [
2543 REQUIREDTOOLS = [
2535 b'diff',
2544 b'diff',
2536 b'grep',
2545 b'grep',
2537 b'unzip',
2546 b'unzip',
2538 b'gunzip',
2547 b'gunzip',
2539 b'bunzip2',
2548 b'bunzip2',
2540 b'sed',
2549 b'sed',
2541 ]
2550 ]
2542
2551
2543 # Maps file extensions to test class.
2552 # Maps file extensions to test class.
2544 TESTTYPES = [
2553 TESTTYPES = [
2545 (b'.py', PythonTest),
2554 (b'.py', PythonTest),
2546 (b'.t', TTest),
2555 (b'.t', TTest),
2547 ]
2556 ]
2548
2557
2549 def __init__(self):
2558 def __init__(self):
2550 self.options = None
2559 self.options = None
2551 self._hgroot = None
2560 self._hgroot = None
2552 self._testdir = None
2561 self._testdir = None
2553 self._outputdir = None
2562 self._outputdir = None
2554 self._hgtmp = None
2563 self._hgtmp = None
2555 self._installdir = None
2564 self._installdir = None
2556 self._bindir = None
2565 self._bindir = None
2557 self._tmpbinddir = None
2566 self._tmpbinddir = None
2558 self._pythondir = None
2567 self._pythondir = None
2559 self._coveragefile = None
2568 self._coveragefile = None
2560 self._createdfiles = []
2569 self._createdfiles = []
2561 self._hgcommand = None
2570 self._hgcommand = None
2562 self._hgpath = None
2571 self._hgpath = None
2563 self._portoffset = 0
2572 self._portoffset = 0
2564 self._ports = {}
2573 self._ports = {}
2565
2574
2566 def run(self, args, parser=None):
2575 def run(self, args, parser=None):
2567 """Run the test suite."""
2576 """Run the test suite."""
2568 oldmask = os.umask(0o22)
2577 oldmask = os.umask(0o22)
2569 try:
2578 try:
2570 parser = parser or getparser()
2579 parser = parser or getparser()
2571 options = parseargs(args, parser)
2580 options = parseargs(args, parser)
2572 tests = [_bytespath(a) for a in options.tests]
2581 tests = [_bytespath(a) for a in options.tests]
2573 if options.test_list is not None:
2582 if options.test_list is not None:
2574 for listfile in options.test_list:
2583 for listfile in options.test_list:
2575 with open(listfile, 'rb') as f:
2584 with open(listfile, 'rb') as f:
2576 tests.extend(t for t in f.read().splitlines() if t)
2585 tests.extend(t for t in f.read().splitlines() if t)
2577 self.options = options
2586 self.options = options
2578
2587
2579 self._checktools()
2588 self._checktools()
2580 testdescs = self.findtests(tests)
2589 testdescs = self.findtests(tests)
2581 if options.profile_runner:
2590 if options.profile_runner:
2582 import statprof
2591 import statprof
2583 statprof.start()
2592 statprof.start()
2584 result = self._run(testdescs)
2593 result = self._run(testdescs)
2585 if options.profile_runner:
2594 if options.profile_runner:
2586 statprof.stop()
2595 statprof.stop()
2587 statprof.display()
2596 statprof.display()
2588 return result
2597 return result
2589
2598
2590 finally:
2599 finally:
2591 os.umask(oldmask)
2600 os.umask(oldmask)
2592
2601
2593 def _run(self, testdescs):
2602 def _run(self, testdescs):
2594 testdir = getcwdb()
2603 testdir = getcwdb()
2595 self._testdir = osenvironb[b'TESTDIR'] = getcwdb()
2604 self._testdir = osenvironb[b'TESTDIR'] = getcwdb()
2596 # assume all tests in same folder for now
2605 # assume all tests in same folder for now
2597 if testdescs:
2606 if testdescs:
2598 pathname = os.path.dirname(testdescs[0]['path'])
2607 pathname = os.path.dirname(testdescs[0]['path'])
2599 if pathname:
2608 if pathname:
2600 testdir = os.path.join(testdir, pathname)
2609 testdir = os.path.join(testdir, pathname)
2601 self._testdir = osenvironb[b'TESTDIR'] = testdir
2610 self._testdir = osenvironb[b'TESTDIR'] = testdir
2602 if self.options.outputdir:
2611 if self.options.outputdir:
2603 self._outputdir = canonpath(_bytespath(self.options.outputdir))
2612 self._outputdir = canonpath(_bytespath(self.options.outputdir))
2604 else:
2613 else:
2605 self._outputdir = getcwdb()
2614 self._outputdir = getcwdb()
2606 if testdescs and pathname:
2615 if testdescs and pathname:
2607 self._outputdir = os.path.join(self._outputdir, pathname)
2616 self._outputdir = os.path.join(self._outputdir, pathname)
2608 previoustimes = {}
2617 previoustimes = {}
2609 if self.options.order_by_runtime:
2618 if self.options.order_by_runtime:
2610 previoustimes = dict(loadtimes(self._outputdir))
2619 previoustimes = dict(loadtimes(self._outputdir))
2611 sorttests(testdescs, previoustimes, shuffle=self.options.random)
2620 sorttests(testdescs, previoustimes, shuffle=self.options.random)
2612
2621
2613 if 'PYTHONHASHSEED' not in os.environ:
2622 if 'PYTHONHASHSEED' not in os.environ:
2614 # use a random python hash seed all the time
2623 # use a random python hash seed all the time
2615 # we do the randomness ourself to know what seed is used
2624 # we do the randomness ourself to know what seed is used
2616 os.environ['PYTHONHASHSEED'] = str(random.getrandbits(32))
2625 os.environ['PYTHONHASHSEED'] = str(random.getrandbits(32))
2617
2626
2618 if self.options.tmpdir:
2627 if self.options.tmpdir:
2619 self.options.keep_tmpdir = True
2628 self.options.keep_tmpdir = True
2620 tmpdir = _bytespath(self.options.tmpdir)
2629 tmpdir = _bytespath(self.options.tmpdir)
2621 if os.path.exists(tmpdir):
2630 if os.path.exists(tmpdir):
2622 # Meaning of tmpdir has changed since 1.3: we used to create
2631 # Meaning of tmpdir has changed since 1.3: we used to create
2623 # HGTMP inside tmpdir; now HGTMP is tmpdir. So fail if
2632 # HGTMP inside tmpdir; now HGTMP is tmpdir. So fail if
2624 # tmpdir already exists.
2633 # tmpdir already exists.
2625 print("error: temp dir %r already exists" % tmpdir)
2634 print("error: temp dir %r already exists" % tmpdir)
2626 return 1
2635 return 1
2627
2636
2628 os.makedirs(tmpdir)
2637 os.makedirs(tmpdir)
2629 else:
2638 else:
2630 d = None
2639 d = None
2631 if os.name == 'nt':
2640 if os.name == 'nt':
2632 # without this, we get the default temp dir location, but
2641 # without this, we get the default temp dir location, but
2633 # in all lowercase, which causes troubles with paths (issue3490)
2642 # in all lowercase, which causes troubles with paths (issue3490)
2634 d = osenvironb.get(b'TMP', None)
2643 d = osenvironb.get(b'TMP', None)
2635 tmpdir = tempfile.mkdtemp(b'', b'hgtests.', d)
2644 tmpdir = tempfile.mkdtemp(b'', b'hgtests.', d)
2636
2645
2637 self._hgtmp = osenvironb[b'HGTMP'] = (
2646 self._hgtmp = osenvironb[b'HGTMP'] = (
2638 os.path.realpath(tmpdir))
2647 os.path.realpath(tmpdir))
2639
2648
2640 if self.options.with_hg:
2649 if self.options.with_hg:
2641 self._installdir = None
2650 self._installdir = None
2642 whg = self.options.with_hg
2651 whg = self.options.with_hg
2643 self._bindir = os.path.dirname(os.path.realpath(whg))
2652 self._bindir = os.path.dirname(os.path.realpath(whg))
2644 assert isinstance(self._bindir, bytes)
2653 assert isinstance(self._bindir, bytes)
2645 self._hgcommand = os.path.basename(whg)
2654 self._hgcommand = os.path.basename(whg)
2646 self._tmpbindir = os.path.join(self._hgtmp, b'install', b'bin')
2655 self._tmpbindir = os.path.join(self._hgtmp, b'install', b'bin')
2647 os.makedirs(self._tmpbindir)
2656 os.makedirs(self._tmpbindir)
2648
2657
2649 normbin = os.path.normpath(os.path.abspath(whg))
2658 normbin = os.path.normpath(os.path.abspath(whg))
2650 normbin = normbin.replace(os.sep.encode('ascii'), b'/')
2659 normbin = normbin.replace(os.sep.encode('ascii'), b'/')
2651
2660
2652 # Other Python scripts in the test harness need to
2661 # Other Python scripts in the test harness need to
2653 # `import mercurial`. If `hg` is a Python script, we assume
2662 # `import mercurial`. If `hg` is a Python script, we assume
2654 # the Mercurial modules are relative to its path and tell the tests
2663 # the Mercurial modules are relative to its path and tell the tests
2655 # to load Python modules from its directory.
2664 # to load Python modules from its directory.
2656 with open(whg, 'rb') as fh:
2665 with open(whg, 'rb') as fh:
2657 initial = fh.read(1024)
2666 initial = fh.read(1024)
2658
2667
2659 if re.match(b'#!.*python', initial):
2668 if re.match(b'#!.*python', initial):
2660 self._pythondir = self._bindir
2669 self._pythondir = self._bindir
2661 # If it looks like our in-repo Rust binary, use the source root.
2670 # If it looks like our in-repo Rust binary, use the source root.
2662 # This is a bit hacky. But rhg is still not supported outside the
2671 # This is a bit hacky. But rhg is still not supported outside the
2663 # source directory. So until it is, do the simple thing.
2672 # source directory. So until it is, do the simple thing.
2664 elif re.search(b'/rust/target/[^/]+/hg', normbin):
2673 elif re.search(b'/rust/target/[^/]+/hg', normbin):
2665 self._pythondir = os.path.dirname(self._testdir)
2674 self._pythondir = os.path.dirname(self._testdir)
2666 # Fall back to the legacy behavior.
2675 # Fall back to the legacy behavior.
2667 else:
2676 else:
2668 self._pythondir = self._bindir
2677 self._pythondir = self._bindir
2669
2678
2670 else:
2679 else:
2671 self._installdir = os.path.join(self._hgtmp, b"install")
2680 self._installdir = os.path.join(self._hgtmp, b"install")
2672 self._bindir = os.path.join(self._installdir, b"bin")
2681 self._bindir = os.path.join(self._installdir, b"bin")
2673 self._hgcommand = b'hg'
2682 self._hgcommand = b'hg'
2674 self._tmpbindir = self._bindir
2683 self._tmpbindir = self._bindir
2675 self._pythondir = os.path.join(self._installdir, b"lib", b"python")
2684 self._pythondir = os.path.join(self._installdir, b"lib", b"python")
2676
2685
2677 # Force the use of hg.exe instead of relying on MSYS to recognize hg is
2686 # Force the use of hg.exe instead of relying on MSYS to recognize hg is
2678 # a python script and feed it to python.exe. Legacy stdio is force
2687 # a python script and feed it to python.exe. Legacy stdio is force
2679 # enabled by hg.exe, and this is a more realistic way to launch hg
2688 # enabled by hg.exe, and this is a more realistic way to launch hg
2680 # anyway.
2689 # anyway.
2681 if os.name == 'nt' and not self._hgcommand.endswith(b'.exe'):
2690 if os.name == 'nt' and not self._hgcommand.endswith(b'.exe'):
2682 self._hgcommand += b'.exe'
2691 self._hgcommand += b'.exe'
2683
2692
2684 # set CHGHG, then replace "hg" command by "chg"
2693 # set CHGHG, then replace "hg" command by "chg"
2685 chgbindir = self._bindir
2694 chgbindir = self._bindir
2686 if self.options.chg or self.options.with_chg:
2695 if self.options.chg or self.options.with_chg:
2687 osenvironb[b'CHGHG'] = os.path.join(self._bindir, self._hgcommand)
2696 osenvironb[b'CHGHG'] = os.path.join(self._bindir, self._hgcommand)
2688 else:
2697 else:
2689 osenvironb.pop(b'CHGHG', None) # drop flag for hghave
2698 osenvironb.pop(b'CHGHG', None) # drop flag for hghave
2690 if self.options.chg:
2699 if self.options.chg:
2691 self._hgcommand = b'chg'
2700 self._hgcommand = b'chg'
2692 elif self.options.with_chg:
2701 elif self.options.with_chg:
2693 chgbindir = os.path.dirname(os.path.realpath(self.options.with_chg))
2702 chgbindir = os.path.dirname(os.path.realpath(self.options.with_chg))
2694 self._hgcommand = os.path.basename(self.options.with_chg)
2703 self._hgcommand = os.path.basename(self.options.with_chg)
2695
2704
2696 osenvironb[b"BINDIR"] = self._bindir
2705 osenvironb[b"BINDIR"] = self._bindir
2697 osenvironb[b"PYTHON"] = PYTHON
2706 osenvironb[b"PYTHON"] = PYTHON
2698
2707
2699 fileb = _bytespath(__file__)
2708 fileb = _bytespath(__file__)
2700 runtestdir = os.path.abspath(os.path.dirname(fileb))
2709 runtestdir = os.path.abspath(os.path.dirname(fileb))
2701 osenvironb[b'RUNTESTDIR'] = runtestdir
2710 osenvironb[b'RUNTESTDIR'] = runtestdir
2702 if PYTHON3:
2711 if PYTHON3:
2703 sepb = _bytespath(os.pathsep)
2712 sepb = _bytespath(os.pathsep)
2704 else:
2713 else:
2705 sepb = os.pathsep
2714 sepb = os.pathsep
2706 path = [self._bindir, runtestdir] + osenvironb[b"PATH"].split(sepb)
2715 path = [self._bindir, runtestdir] + osenvironb[b"PATH"].split(sepb)
2707 if os.path.islink(__file__):
2716 if os.path.islink(__file__):
2708 # test helper will likely be at the end of the symlink
2717 # test helper will likely be at the end of the symlink
2709 realfile = os.path.realpath(fileb)
2718 realfile = os.path.realpath(fileb)
2710 realdir = os.path.abspath(os.path.dirname(realfile))
2719 realdir = os.path.abspath(os.path.dirname(realfile))
2711 path.insert(2, realdir)
2720 path.insert(2, realdir)
2712 if chgbindir != self._bindir:
2721 if chgbindir != self._bindir:
2713 path.insert(1, chgbindir)
2722 path.insert(1, chgbindir)
2714 if self._testdir != runtestdir:
2723 if self._testdir != runtestdir:
2715 path = [self._testdir] + path
2724 path = [self._testdir] + path
2716 if self._tmpbindir != self._bindir:
2725 if self._tmpbindir != self._bindir:
2717 path = [self._tmpbindir] + path
2726 path = [self._tmpbindir] + path
2718 osenvironb[b"PATH"] = sepb.join(path)
2727 osenvironb[b"PATH"] = sepb.join(path)
2719
2728
2720 # Include TESTDIR in PYTHONPATH so that out-of-tree extensions
2729 # Include TESTDIR in PYTHONPATH so that out-of-tree extensions
2721 # can run .../tests/run-tests.py test-foo where test-foo
2730 # can run .../tests/run-tests.py test-foo where test-foo
2722 # adds an extension to HGRC. Also include run-test.py directory to
2731 # adds an extension to HGRC. Also include run-test.py directory to
2723 # import modules like heredoctest.
2732 # import modules like heredoctest.
2724 pypath = [self._pythondir, self._testdir, runtestdir]
2733 pypath = [self._pythondir, self._testdir, runtestdir]
2725 # We have to augment PYTHONPATH, rather than simply replacing
2734 # We have to augment PYTHONPATH, rather than simply replacing
2726 # it, in case external libraries are only available via current
2735 # it, in case external libraries are only available via current
2727 # PYTHONPATH. (In particular, the Subversion bindings on OS X
2736 # PYTHONPATH. (In particular, the Subversion bindings on OS X
2728 # are in /opt/subversion.)
2737 # are in /opt/subversion.)
2729 oldpypath = osenvironb.get(IMPL_PATH)
2738 oldpypath = osenvironb.get(IMPL_PATH)
2730 if oldpypath:
2739 if oldpypath:
2731 pypath.append(oldpypath)
2740 pypath.append(oldpypath)
2732 osenvironb[IMPL_PATH] = sepb.join(pypath)
2741 osenvironb[IMPL_PATH] = sepb.join(pypath)
2733
2742
2734 if self.options.pure:
2743 if self.options.pure:
2735 os.environ["HGTEST_RUN_TESTS_PURE"] = "--pure"
2744 os.environ["HGTEST_RUN_TESTS_PURE"] = "--pure"
2736 os.environ["HGMODULEPOLICY"] = "py"
2745 os.environ["HGMODULEPOLICY"] = "py"
2737
2746
2738 if self.options.allow_slow_tests:
2747 if self.options.allow_slow_tests:
2739 os.environ["HGTEST_SLOW"] = "slow"
2748 os.environ["HGTEST_SLOW"] = "slow"
2740 elif 'HGTEST_SLOW' in os.environ:
2749 elif 'HGTEST_SLOW' in os.environ:
2741 del os.environ['HGTEST_SLOW']
2750 del os.environ['HGTEST_SLOW']
2742
2751
2743 self._coveragefile = os.path.join(self._testdir, b'.coverage')
2752 self._coveragefile = os.path.join(self._testdir, b'.coverage')
2744
2753
2745 if self.options.exceptions:
2754 if self.options.exceptions:
2746 exceptionsdir = os.path.join(self._outputdir, b'exceptions')
2755 exceptionsdir = os.path.join(self._outputdir, b'exceptions')
2747 try:
2756 try:
2748 os.makedirs(exceptionsdir)
2757 os.makedirs(exceptionsdir)
2749 except OSError as e:
2758 except OSError as e:
2750 if e.errno != errno.EEXIST:
2759 if e.errno != errno.EEXIST:
2751 raise
2760 raise
2752
2761
2753 # Remove all existing exception reports.
2762 # Remove all existing exception reports.
2754 for f in os.listdir(exceptionsdir):
2763 for f in os.listdir(exceptionsdir):
2755 os.unlink(os.path.join(exceptionsdir, f))
2764 os.unlink(os.path.join(exceptionsdir, f))
2756
2765
2757 osenvironb[b'HGEXCEPTIONSDIR'] = exceptionsdir
2766 osenvironb[b'HGEXCEPTIONSDIR'] = exceptionsdir
2758 logexceptions = os.path.join(self._testdir, b'logexceptions.py')
2767 logexceptions = os.path.join(self._testdir, b'logexceptions.py')
2759 self.options.extra_config_opt.append(
2768 self.options.extra_config_opt.append(
2760 'extensions.logexceptions=%s' % logexceptions.decode('utf-8'))
2769 'extensions.logexceptions=%s' % logexceptions.decode('utf-8'))
2761
2770
2762 vlog("# Using TESTDIR", self._testdir)
2771 vlog("# Using TESTDIR", self._testdir)
2763 vlog("# Using RUNTESTDIR", osenvironb[b'RUNTESTDIR'])
2772 vlog("# Using RUNTESTDIR", osenvironb[b'RUNTESTDIR'])
2764 vlog("# Using HGTMP", self._hgtmp)
2773 vlog("# Using HGTMP", self._hgtmp)
2765 vlog("# Using PATH", os.environ["PATH"])
2774 vlog("# Using PATH", os.environ["PATH"])
2766 vlog("# Using", IMPL_PATH, osenvironb[IMPL_PATH])
2775 vlog("# Using", IMPL_PATH, osenvironb[IMPL_PATH])
2767 vlog("# Writing to directory", self._outputdir)
2776 vlog("# Writing to directory", self._outputdir)
2768
2777
2769 try:
2778 try:
2770 return self._runtests(testdescs) or 0
2779 return self._runtests(testdescs) or 0
2771 finally:
2780 finally:
2772 time.sleep(.1)
2781 time.sleep(.1)
2773 self._cleanup()
2782 self._cleanup()
2774
2783
2775 def findtests(self, args):
2784 def findtests(self, args):
2776 """Finds possible test files from arguments.
2785 """Finds possible test files from arguments.
2777
2786
2778 If you wish to inject custom tests into the test harness, this would
2787 If you wish to inject custom tests into the test harness, this would
2779 be a good function to monkeypatch or override in a derived class.
2788 be a good function to monkeypatch or override in a derived class.
2780 """
2789 """
2781 if not args:
2790 if not args:
2782 if self.options.changed:
2791 if self.options.changed:
2783 proc = Popen4(b'hg st --rev "%s" -man0 .' %
2792 proc = Popen4(b'hg st --rev "%s" -man0 .' %
2784 _bytespath(self.options.changed), None, 0)
2793 _bytespath(self.options.changed), None, 0)
2785 stdout, stderr = proc.communicate()
2794 stdout, stderr = proc.communicate()
2786 args = stdout.strip(b'\0').split(b'\0')
2795 args = stdout.strip(b'\0').split(b'\0')
2787 else:
2796 else:
2788 args = os.listdir(b'.')
2797 args = os.listdir(b'.')
2789
2798
2790 expanded_args = []
2799 expanded_args = []
2791 for arg in args:
2800 for arg in args:
2792 if os.path.isdir(arg):
2801 if os.path.isdir(arg):
2793 if not arg.endswith(b'/'):
2802 if not arg.endswith(b'/'):
2794 arg += b'/'
2803 arg += b'/'
2795 expanded_args.extend([arg + a for a in os.listdir(arg)])
2804 expanded_args.extend([arg + a for a in os.listdir(arg)])
2796 else:
2805 else:
2797 expanded_args.append(arg)
2806 expanded_args.append(arg)
2798 args = expanded_args
2807 args = expanded_args
2799
2808
2800 testcasepattern = re.compile(
2809 testcasepattern = re.compile(
2801 br'([\w-]+\.t|py)(?:#([a-zA-Z0-9_\-\.#]+))')
2810 br'([\w-]+\.t|py)(?:#([a-zA-Z0-9_\-\.#]+))')
2802 tests = []
2811 tests = []
2803 for t in args:
2812 for t in args:
2804 case = []
2813 case = []
2805
2814
2806 if not (os.path.basename(t).startswith(b'test-')
2815 if not (os.path.basename(t).startswith(b'test-')
2807 and (t.endswith(b'.py') or t.endswith(b'.t'))):
2816 and (t.endswith(b'.py') or t.endswith(b'.t'))):
2808
2817
2809 m = testcasepattern.match(os.path.basename(t))
2818 m = testcasepattern.match(os.path.basename(t))
2810 if m is not None:
2819 if m is not None:
2811 t_basename, casestr = m.groups()
2820 t_basename, casestr = m.groups()
2812 t = os.path.join(os.path.dirname(t), t_basename)
2821 t = os.path.join(os.path.dirname(t), t_basename)
2813 if casestr:
2822 if casestr:
2814 case = casestr.split(b'#')
2823 case = casestr.split(b'#')
2815 else:
2824 else:
2816 continue
2825 continue
2817
2826
2818 if t.endswith(b'.t'):
2827 if t.endswith(b'.t'):
2819 # .t file may contain multiple test cases
2828 # .t file may contain multiple test cases
2820 casedimensions = parsettestcases(t)
2829 casedimensions = parsettestcases(t)
2821 if casedimensions:
2830 if casedimensions:
2822 cases = []
2831 cases = []
2823 def addcases(case, casedimensions):
2832 def addcases(case, casedimensions):
2824 if not casedimensions:
2833 if not casedimensions:
2825 cases.append(case)
2834 cases.append(case)
2826 else:
2835 else:
2827 for c in casedimensions[0]:
2836 for c in casedimensions[0]:
2828 addcases(case + [c], casedimensions[1:])
2837 addcases(case + [c], casedimensions[1:])
2829 addcases([], casedimensions)
2838 addcases([], casedimensions)
2830 if case and case in cases:
2839 if case and case in cases:
2831 cases = [case]
2840 cases = [case]
2832 elif case:
2841 elif case:
2833 # Ignore invalid cases
2842 # Ignore invalid cases
2834 cases = []
2843 cases = []
2835 else:
2844 else:
2836 pass
2845 pass
2837 tests += [{'path': t, 'case': c} for c in sorted(cases)]
2846 tests += [{'path': t, 'case': c} for c in sorted(cases)]
2838 else:
2847 else:
2839 tests.append({'path': t})
2848 tests.append({'path': t})
2840 else:
2849 else:
2841 tests.append({'path': t})
2850 tests.append({'path': t})
2842 return tests
2851 return tests
2843
2852
2844 def _runtests(self, testdescs):
2853 def _runtests(self, testdescs):
2845 def _reloadtest(test, i):
2854 def _reloadtest(test, i):
2846 # convert a test back to its description dict
2855 # convert a test back to its description dict
2847 desc = {'path': test.path}
2856 desc = {'path': test.path}
2848 case = getattr(test, '_case', [])
2857 case = getattr(test, '_case', [])
2849 if case:
2858 if case:
2850 desc['case'] = case
2859 desc['case'] = case
2851 return self._gettest(desc, i)
2860 return self._gettest(desc, i)
2852
2861
2853 try:
2862 try:
2854 if self.options.restart:
2863 if self.options.restart:
2855 orig = list(testdescs)
2864 orig = list(testdescs)
2856 while testdescs:
2865 while testdescs:
2857 desc = testdescs[0]
2866 desc = testdescs[0]
2858 # desc['path'] is a relative path
2867 # desc['path'] is a relative path
2859 if 'case' in desc:
2868 if 'case' in desc:
2860 casestr = b'#'.join(desc['case'])
2869 casestr = b'#'.join(desc['case'])
2861 errpath = b'%s#%s.err' % (desc['path'], casestr)
2870 errpath = b'%s#%s.err' % (desc['path'], casestr)
2862 else:
2871 else:
2863 errpath = b'%s.err' % desc['path']
2872 errpath = b'%s.err' % desc['path']
2864 errpath = os.path.join(self._outputdir, errpath)
2873 errpath = os.path.join(self._outputdir, errpath)
2865 if os.path.exists(errpath):
2874 if os.path.exists(errpath):
2866 break
2875 break
2867 testdescs.pop(0)
2876 testdescs.pop(0)
2868 if not testdescs:
2877 if not testdescs:
2869 print("running all tests")
2878 print("running all tests")
2870 testdescs = orig
2879 testdescs = orig
2871
2880
2872 tests = [self._gettest(d, i) for i, d in enumerate(testdescs)]
2881 tests = [self._gettest(d, i) for i, d in enumerate(testdescs)]
2873 num_tests = len(tests) * self.options.runs_per_test
2882 num_tests = len(tests) * self.options.runs_per_test
2874
2883
2875 jobs = min(num_tests, self.options.jobs)
2884 jobs = min(num_tests, self.options.jobs)
2876
2885
2877 failed = False
2886 failed = False
2878 kws = self.options.keywords
2887 kws = self.options.keywords
2879 if kws is not None and PYTHON3:
2888 if kws is not None and PYTHON3:
2880 kws = kws.encode('utf-8')
2889 kws = kws.encode('utf-8')
2881
2890
2882 suite = TestSuite(self._testdir,
2891 suite = TestSuite(self._testdir,
2883 jobs=jobs,
2892 jobs=jobs,
2884 whitelist=self.options.whitelisted,
2893 whitelist=self.options.whitelisted,
2885 blacklist=self.options.blacklist,
2894 blacklist=self.options.blacklist,
2886 retest=self.options.retest,
2895 retest=self.options.retest,
2887 keywords=kws,
2896 keywords=kws,
2888 loop=self.options.loop,
2897 loop=self.options.loop,
2889 runs_per_test=self.options.runs_per_test,
2898 runs_per_test=self.options.runs_per_test,
2890 showchannels=self.options.showchannels,
2899 showchannels=self.options.showchannels,
2891 tests=tests, loadtest=_reloadtest)
2900 tests=tests, loadtest=_reloadtest)
2892 verbosity = 1
2901 verbosity = 1
2893 if self.options.list_tests:
2902 if self.options.list_tests:
2894 verbosity = 0
2903 verbosity = 0
2895 elif self.options.verbose:
2904 elif self.options.verbose:
2896 verbosity = 2
2905 verbosity = 2
2897 runner = TextTestRunner(self, verbosity=verbosity)
2906 runner = TextTestRunner(self, verbosity=verbosity)
2898
2907
2899 if self.options.list_tests:
2908 if self.options.list_tests:
2900 result = runner.listtests(suite)
2909 result = runner.listtests(suite)
2901 else:
2910 else:
2902 if self._installdir:
2911 if self._installdir:
2903 self._installhg()
2912 self._installhg()
2904 self._checkhglib("Testing")
2913 self._checkhglib("Testing")
2905 else:
2914 else:
2906 self._usecorrectpython()
2915 self._usecorrectpython()
2907 if self.options.chg:
2916 if self.options.chg:
2908 assert self._installdir
2917 assert self._installdir
2909 self._installchg()
2918 self._installchg()
2910
2919
2911 log('running %d tests using %d parallel processes' % (
2920 log('running %d tests using %d parallel processes' % (
2912 num_tests, jobs))
2921 num_tests, jobs))
2913
2922
2914 result = runner.run(suite)
2923 result = runner.run(suite)
2915
2924
2916 if result.failures or result.errors:
2925 if result.failures or result.errors:
2917 failed = True
2926 failed = True
2918
2927
2919 result.onEnd()
2928 result.onEnd()
2920
2929
2921 if self.options.anycoverage:
2930 if self.options.anycoverage:
2922 self._outputcoverage()
2931 self._outputcoverage()
2923 except KeyboardInterrupt:
2932 except KeyboardInterrupt:
2924 failed = True
2933 failed = True
2925 print("\ninterrupted!")
2934 print("\ninterrupted!")
2926
2935
2927 if failed:
2936 if failed:
2928 return 1
2937 return 1
2929
2938
2930 def _getport(self, count):
2939 def _getport(self, count):
2931 port = self._ports.get(count) # do we have a cached entry?
2940 port = self._ports.get(count) # do we have a cached entry?
2932 if port is None:
2941 if port is None:
2933 portneeded = 3
2942 portneeded = 3
2934 # above 100 tries we just give up and let test reports failure
2943 # above 100 tries we just give up and let test reports failure
2935 for tries in xrange(100):
2944 for tries in xrange(100):
2936 allfree = True
2945 allfree = True
2937 port = self.options.port + self._portoffset
2946 port = self.options.port + self._portoffset
2938 for idx in xrange(portneeded):
2947 for idx in xrange(portneeded):
2939 if not checkportisavailable(port + idx):
2948 if not checkportisavailable(port + idx):
2940 allfree = False
2949 allfree = False
2941 break
2950 break
2942 self._portoffset += portneeded
2951 self._portoffset += portneeded
2943 if allfree:
2952 if allfree:
2944 break
2953 break
2945 self._ports[count] = port
2954 self._ports[count] = port
2946 return port
2955 return port
2947
2956
2948 def _gettest(self, testdesc, count):
2957 def _gettest(self, testdesc, count):
2949 """Obtain a Test by looking at its filename.
2958 """Obtain a Test by looking at its filename.
2950
2959
2951 Returns a Test instance. The Test may not be runnable if it doesn't
2960 Returns a Test instance. The Test may not be runnable if it doesn't
2952 map to a known type.
2961 map to a known type.
2953 """
2962 """
2954 path = testdesc['path']
2963 path = testdesc['path']
2955 lctest = path.lower()
2964 lctest = path.lower()
2956 testcls = Test
2965 testcls = Test
2957
2966
2958 for ext, cls in self.TESTTYPES:
2967 for ext, cls in self.TESTTYPES:
2959 if lctest.endswith(ext):
2968 if lctest.endswith(ext):
2960 testcls = cls
2969 testcls = cls
2961 break
2970 break
2962
2971
2963 refpath = os.path.join(getcwdb(), path)
2972 refpath = os.path.join(getcwdb(), path)
2964 tmpdir = os.path.join(self._hgtmp, b'child%d' % count)
2973 tmpdir = os.path.join(self._hgtmp, b'child%d' % count)
2965
2974
2966 # extra keyword parameters. 'case' is used by .t tests
2975 # extra keyword parameters. 'case' is used by .t tests
2967 kwds = dict((k, testdesc[k]) for k in ['case'] if k in testdesc)
2976 kwds = dict((k, testdesc[k]) for k in ['case'] if k in testdesc)
2968
2977
2969 t = testcls(refpath, self._outputdir, tmpdir,
2978 t = testcls(refpath, self._outputdir, tmpdir,
2970 keeptmpdir=self.options.keep_tmpdir,
2979 keeptmpdir=self.options.keep_tmpdir,
2971 debug=self.options.debug,
2980 debug=self.options.debug,
2972 first=self.options.first,
2981 first=self.options.first,
2973 timeout=self.options.timeout,
2982 timeout=self.options.timeout,
2974 startport=self._getport(count),
2983 startport=self._getport(count),
2975 extraconfigopts=self.options.extra_config_opt,
2984 extraconfigopts=self.options.extra_config_opt,
2976 py3warnings=self.options.py3_warnings,
2985 py3warnings=self.options.py3_warnings,
2977 shell=self.options.shell,
2986 shell=self.options.shell,
2978 hgcommand=self._hgcommand,
2987 hgcommand=self._hgcommand,
2979 usechg=bool(self.options.with_chg or self.options.chg),
2988 usechg=bool(self.options.with_chg or self.options.chg),
2980 useipv6=useipv6, **kwds)
2989 useipv6=useipv6, **kwds)
2981 t.should_reload = True
2990 t.should_reload = True
2982 return t
2991 return t
2983
2992
2984 def _cleanup(self):
2993 def _cleanup(self):
2985 """Clean up state from this test invocation."""
2994 """Clean up state from this test invocation."""
2986 if self.options.keep_tmpdir:
2995 if self.options.keep_tmpdir:
2987 return
2996 return
2988
2997
2989 vlog("# Cleaning up HGTMP", self._hgtmp)
2998 vlog("# Cleaning up HGTMP", self._hgtmp)
2990 shutil.rmtree(self._hgtmp, True)
2999 shutil.rmtree(self._hgtmp, True)
2991 for f in self._createdfiles:
3000 for f in self._createdfiles:
2992 try:
3001 try:
2993 os.remove(f)
3002 os.remove(f)
2994 except OSError:
3003 except OSError:
2995 pass
3004 pass
2996
3005
2997 def _usecorrectpython(self):
3006 def _usecorrectpython(self):
2998 """Configure the environment to use the appropriate Python in tests."""
3007 """Configure the environment to use the appropriate Python in tests."""
2999 # Tests must use the same interpreter as us or bad things will happen.
3008 # Tests must use the same interpreter as us or bad things will happen.
3000 pyexename = sys.platform == 'win32' and b'python.exe' or b'python'
3009 pyexename = sys.platform == 'win32' and b'python.exe' or b'python'
3001
3010
3002 # os.symlink() is a thing with py3 on Windows, but it requires
3011 # os.symlink() is a thing with py3 on Windows, but it requires
3003 # Administrator rights.
3012 # Administrator rights.
3004 if getattr(os, 'symlink', None) and os.name != 'nt':
3013 if getattr(os, 'symlink', None) and os.name != 'nt':
3005 vlog("# Making python executable in test path a symlink to '%s'" %
3014 vlog("# Making python executable in test path a symlink to '%s'" %
3006 sys.executable)
3015 sysexecutable)
3007 mypython = os.path.join(self._tmpbindir, pyexename)
3016 mypython = os.path.join(self._tmpbindir, pyexename)
3008 try:
3017 try:
3009 if os.readlink(mypython) == sys.executable:
3018 if os.readlink(mypython) == sysexecutable:
3010 return
3019 return
3011 os.unlink(mypython)
3020 os.unlink(mypython)
3012 except OSError as err:
3021 except OSError as err:
3013 if err.errno != errno.ENOENT:
3022 if err.errno != errno.ENOENT:
3014 raise
3023 raise
3015 if self._findprogram(pyexename) != sys.executable:
3024 if self._findprogram(pyexename) != sysexecutable:
3016 try:
3025 try:
3017 os.symlink(sys.executable, mypython)
3026 os.symlink(sysexecutable, mypython)
3018 self._createdfiles.append(mypython)
3027 self._createdfiles.append(mypython)
3019 except OSError as err:
3028 except OSError as err:
3020 # child processes may race, which is harmless
3029 # child processes may race, which is harmless
3021 if err.errno != errno.EEXIST:
3030 if err.errno != errno.EEXIST:
3022 raise
3031 raise
3023 else:
3032 else:
3024 exedir, exename = os.path.split(sys.executable)
3033 exedir, exename = os.path.split(sysexecutable)
3025 vlog("# Modifying search path to find %s as %s in '%s'" %
3034 vlog("# Modifying search path to find %s as %s in '%s'" %
3026 (exename, pyexename, exedir))
3035 (exename, pyexename, exedir))
3027 path = os.environ['PATH'].split(os.pathsep)
3036 path = os.environ['PATH'].split(os.pathsep)
3028 while exedir in path:
3037 while exedir in path:
3029 path.remove(exedir)
3038 path.remove(exedir)
3030 os.environ['PATH'] = os.pathsep.join([exedir] + path)
3039 os.environ['PATH'] = os.pathsep.join([exedir] + path)
3031 if not self._findprogram(pyexename):
3040 if not self._findprogram(pyexename):
3032 print("WARNING: Cannot find %s in search path" % pyexename)
3041 print("WARNING: Cannot find %s in search path" % pyexename)
3033
3042
3034 def _installhg(self):
3043 def _installhg(self):
3035 """Install hg into the test environment.
3044 """Install hg into the test environment.
3036
3045
3037 This will also configure hg with the appropriate testing settings.
3046 This will also configure hg with the appropriate testing settings.
3038 """
3047 """
3039 vlog("# Performing temporary installation of HG")
3048 vlog("# Performing temporary installation of HG")
3040 installerrs = os.path.join(self._hgtmp, b"install.err")
3049 installerrs = os.path.join(self._hgtmp, b"install.err")
3041 compiler = ''
3050 compiler = ''
3042 if self.options.compiler:
3051 if self.options.compiler:
3043 compiler = '--compiler ' + self.options.compiler
3052 compiler = '--compiler ' + self.options.compiler
3044 if self.options.pure:
3053 if self.options.pure:
3045 pure = b"--pure"
3054 pure = b"--pure"
3046 else:
3055 else:
3047 pure = b""
3056 pure = b""
3048
3057
3049 # Run installer in hg root
3058 # Run installer in hg root
3050 script = os.path.realpath(sys.argv[0])
3059 script = os.path.realpath(sys.argv[0])
3051 exe = sys.executable
3060 exe = sysexecutable
3052 if PYTHON3:
3061 if PYTHON3:
3053 compiler = _bytespath(compiler)
3062 compiler = _bytespath(compiler)
3054 script = _bytespath(script)
3063 script = _bytespath(script)
3055 exe = _bytespath(exe)
3064 exe = _bytespath(exe)
3056 hgroot = os.path.dirname(os.path.dirname(script))
3065 hgroot = os.path.dirname(os.path.dirname(script))
3057 self._hgroot = hgroot
3066 self._hgroot = hgroot
3058 os.chdir(hgroot)
3067 os.chdir(hgroot)
3059 nohome = b'--home=""'
3068 nohome = b'--home=""'
3060 if os.name == 'nt':
3069 if os.name == 'nt':
3061 # The --home="" trick works only on OS where os.sep == '/'
3070 # The --home="" trick works only on OS where os.sep == '/'
3062 # because of a distutils convert_path() fast-path. Avoid it at
3071 # because of a distutils convert_path() fast-path. Avoid it at
3063 # least on Windows for now, deal with .pydistutils.cfg bugs
3072 # least on Windows for now, deal with .pydistutils.cfg bugs
3064 # when they happen.
3073 # when they happen.
3065 nohome = b''
3074 nohome = b''
3066 cmd = (b'"%(exe)s" setup.py %(pure)s clean --all'
3075 cmd = (b'"%(exe)s" setup.py %(pure)s clean --all'
3067 b' build %(compiler)s --build-base="%(base)s"'
3076 b' build %(compiler)s --build-base="%(base)s"'
3068 b' install --force --prefix="%(prefix)s"'
3077 b' install --force --prefix="%(prefix)s"'
3069 b' --install-lib="%(libdir)s"'
3078 b' --install-lib="%(libdir)s"'
3070 b' --install-scripts="%(bindir)s" %(nohome)s >%(logfile)s 2>&1'
3079 b' --install-scripts="%(bindir)s" %(nohome)s >%(logfile)s 2>&1'
3071 % {b'exe': exe, b'pure': pure,
3080 % {b'exe': exe, b'pure': pure,
3072 b'compiler': compiler,
3081 b'compiler': compiler,
3073 b'base': os.path.join(self._hgtmp, b"build"),
3082 b'base': os.path.join(self._hgtmp, b"build"),
3074 b'prefix': self._installdir, b'libdir': self._pythondir,
3083 b'prefix': self._installdir, b'libdir': self._pythondir,
3075 b'bindir': self._bindir,
3084 b'bindir': self._bindir,
3076 b'nohome': nohome, b'logfile': installerrs})
3085 b'nohome': nohome, b'logfile': installerrs})
3077
3086
3078 # setuptools requires install directories to exist.
3087 # setuptools requires install directories to exist.
3079 def makedirs(p):
3088 def makedirs(p):
3080 try:
3089 try:
3081 os.makedirs(p)
3090 os.makedirs(p)
3082 except OSError as e:
3091 except OSError as e:
3083 if e.errno != errno.EEXIST:
3092 if e.errno != errno.EEXIST:
3084 raise
3093 raise
3085 makedirs(self._pythondir)
3094 makedirs(self._pythondir)
3086 makedirs(self._bindir)
3095 makedirs(self._bindir)
3087
3096
3088 vlog("# Running", cmd)
3097 vlog("# Running", cmd)
3089 if subprocess.call(_strpath(cmd), shell=True) == 0:
3098 if subprocess.call(_strpath(cmd), shell=True) == 0:
3090 if not self.options.verbose:
3099 if not self.options.verbose:
3091 try:
3100 try:
3092 os.remove(installerrs)
3101 os.remove(installerrs)
3093 except OSError as e:
3102 except OSError as e:
3094 if e.errno != errno.ENOENT:
3103 if e.errno != errno.ENOENT:
3095 raise
3104 raise
3096 else:
3105 else:
3097 with open(installerrs, 'rb') as f:
3106 with open(installerrs, 'rb') as f:
3098 for line in f:
3107 for line in f:
3099 if PYTHON3:
3108 if PYTHON3:
3100 sys.stdout.buffer.write(line)
3109 sys.stdout.buffer.write(line)
3101 else:
3110 else:
3102 sys.stdout.write(line)
3111 sys.stdout.write(line)
3103 sys.exit(1)
3112 sys.exit(1)
3104 os.chdir(self._testdir)
3113 os.chdir(self._testdir)
3105
3114
3106 self._usecorrectpython()
3115 self._usecorrectpython()
3107
3116
3108 if self.options.py3_warnings and not self.options.anycoverage:
3117 if self.options.py3_warnings and not self.options.anycoverage:
3109 vlog("# Updating hg command to enable Py3k Warnings switch")
3118 vlog("# Updating hg command to enable Py3k Warnings switch")
3110 with open(os.path.join(self._bindir, 'hg'), 'rb') as f:
3119 with open(os.path.join(self._bindir, 'hg'), 'rb') as f:
3111 lines = [line.rstrip() for line in f]
3120 lines = [line.rstrip() for line in f]
3112 lines[0] += ' -3'
3121 lines[0] += ' -3'
3113 with open(os.path.join(self._bindir, 'hg'), 'wb') as f:
3122 with open(os.path.join(self._bindir, 'hg'), 'wb') as f:
3114 for line in lines:
3123 for line in lines:
3115 f.write(line + '\n')
3124 f.write(line + '\n')
3116
3125
3117 hgbat = os.path.join(self._bindir, b'hg.bat')
3126 hgbat = os.path.join(self._bindir, b'hg.bat')
3118 if os.path.isfile(hgbat):
3127 if os.path.isfile(hgbat):
3119 # hg.bat expects to be put in bin/scripts while run-tests.py
3128 # hg.bat expects to be put in bin/scripts while run-tests.py
3120 # installation layout put it in bin/ directly. Fix it
3129 # installation layout put it in bin/ directly. Fix it
3121 with open(hgbat, 'rb') as f:
3130 with open(hgbat, 'rb') as f:
3122 data = f.read()
3131 data = f.read()
3123 if br'"%~dp0..\python" "%~dp0hg" %*' in data:
3132 if br'"%~dp0..\python" "%~dp0hg" %*' in data:
3124 data = data.replace(br'"%~dp0..\python" "%~dp0hg" %*',
3133 data = data.replace(br'"%~dp0..\python" "%~dp0hg" %*',
3125 b'"%~dp0python" "%~dp0hg" %*')
3134 b'"%~dp0python" "%~dp0hg" %*')
3126 with open(hgbat, 'wb') as f:
3135 with open(hgbat, 'wb') as f:
3127 f.write(data)
3136 f.write(data)
3128 else:
3137 else:
3129 print('WARNING: cannot fix hg.bat reference to python.exe')
3138 print('WARNING: cannot fix hg.bat reference to python.exe')
3130
3139
3131 if self.options.anycoverage:
3140 if self.options.anycoverage:
3132 custom = os.path.join(self._testdir, 'sitecustomize.py')
3141 custom = os.path.join(self._testdir, 'sitecustomize.py')
3133 target = os.path.join(self._pythondir, 'sitecustomize.py')
3142 target = os.path.join(self._pythondir, 'sitecustomize.py')
3134 vlog('# Installing coverage trigger to %s' % target)
3143 vlog('# Installing coverage trigger to %s' % target)
3135 shutil.copyfile(custom, target)
3144 shutil.copyfile(custom, target)
3136 rc = os.path.join(self._testdir, '.coveragerc')
3145 rc = os.path.join(self._testdir, '.coveragerc')
3137 vlog('# Installing coverage rc to %s' % rc)
3146 vlog('# Installing coverage rc to %s' % rc)
3138 os.environ['COVERAGE_PROCESS_START'] = rc
3147 os.environ['COVERAGE_PROCESS_START'] = rc
3139 covdir = os.path.join(self._installdir, '..', 'coverage')
3148 covdir = os.path.join(self._installdir, '..', 'coverage')
3140 try:
3149 try:
3141 os.mkdir(covdir)
3150 os.mkdir(covdir)
3142 except OSError as e:
3151 except OSError as e:
3143 if e.errno != errno.EEXIST:
3152 if e.errno != errno.EEXIST:
3144 raise
3153 raise
3145
3154
3146 os.environ['COVERAGE_DIR'] = covdir
3155 os.environ['COVERAGE_DIR'] = covdir
3147
3156
3148 def _checkhglib(self, verb):
3157 def _checkhglib(self, verb):
3149 """Ensure that the 'mercurial' package imported by python is
3158 """Ensure that the 'mercurial' package imported by python is
3150 the one we expect it to be. If not, print a warning to stderr."""
3159 the one we expect it to be. If not, print a warning to stderr."""
3151 if ((self._bindir == self._pythondir) and
3160 if ((self._bindir == self._pythondir) and
3152 (self._bindir != self._tmpbindir)):
3161 (self._bindir != self._tmpbindir)):
3153 # The pythondir has been inferred from --with-hg flag.
3162 # The pythondir has been inferred from --with-hg flag.
3154 # We cannot expect anything sensible here.
3163 # We cannot expect anything sensible here.
3155 return
3164 return
3156 expecthg = os.path.join(self._pythondir, b'mercurial')
3165 expecthg = os.path.join(self._pythondir, b'mercurial')
3157 actualhg = self._gethgpath()
3166 actualhg = self._gethgpath()
3158 if os.path.abspath(actualhg) != os.path.abspath(expecthg):
3167 if os.path.abspath(actualhg) != os.path.abspath(expecthg):
3159 sys.stderr.write('warning: %s with unexpected mercurial lib: %s\n'
3168 sys.stderr.write('warning: %s with unexpected mercurial lib: %s\n'
3160 ' (expected %s)\n'
3169 ' (expected %s)\n'
3161 % (verb, actualhg, expecthg))
3170 % (verb, actualhg, expecthg))
3162 def _gethgpath(self):
3171 def _gethgpath(self):
3163 """Return the path to the mercurial package that is actually found by
3172 """Return the path to the mercurial package that is actually found by
3164 the current Python interpreter."""
3173 the current Python interpreter."""
3165 if self._hgpath is not None:
3174 if self._hgpath is not None:
3166 return self._hgpath
3175 return self._hgpath
3167
3176
3168 cmd = b'"%s" -c "import mercurial; print (mercurial.__path__[0])"'
3177 cmd = b'"%s" -c "import mercurial; print (mercurial.__path__[0])"'
3169 cmd = cmd % PYTHON
3178 cmd = cmd % PYTHON
3170 if PYTHON3:
3179 if PYTHON3:
3171 cmd = _strpath(cmd)
3180 cmd = _strpath(cmd)
3172
3181
3173 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
3182 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
3174 out, err = p.communicate()
3183 out, err = p.communicate()
3175
3184
3176 self._hgpath = out.strip()
3185 self._hgpath = out.strip()
3177
3186
3178 return self._hgpath
3187 return self._hgpath
3179
3188
3180 def _installchg(self):
3189 def _installchg(self):
3181 """Install chg into the test environment"""
3190 """Install chg into the test environment"""
3182 vlog('# Performing temporary installation of CHG')
3191 vlog('# Performing temporary installation of CHG')
3183 assert os.path.dirname(self._bindir) == self._installdir
3192 assert os.path.dirname(self._bindir) == self._installdir
3184 assert self._hgroot, 'must be called after _installhg()'
3193 assert self._hgroot, 'must be called after _installhg()'
3185 cmd = (b'"%(make)s" clean install PREFIX="%(prefix)s"'
3194 cmd = (b'"%(make)s" clean install PREFIX="%(prefix)s"'
3186 % {b'make': b'make', # TODO: switch by option or environment?
3195 % {b'make': b'make', # TODO: switch by option or environment?
3187 b'prefix': self._installdir})
3196 b'prefix': self._installdir})
3188 cwd = os.path.join(self._hgroot, b'contrib', b'chg')
3197 cwd = os.path.join(self._hgroot, b'contrib', b'chg')
3189 vlog("# Running", cmd)
3198 vlog("# Running", cmd)
3190 proc = subprocess.Popen(cmd, shell=True, cwd=cwd,
3199 proc = subprocess.Popen(cmd, shell=True, cwd=cwd,
3191 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
3200 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
3192 stderr=subprocess.STDOUT)
3201 stderr=subprocess.STDOUT)
3193 out, _err = proc.communicate()
3202 out, _err = proc.communicate()
3194 if proc.returncode != 0:
3203 if proc.returncode != 0:
3195 if PYTHON3:
3204 if PYTHON3:
3196 sys.stdout.buffer.write(out)
3205 sys.stdout.buffer.write(out)
3197 else:
3206 else:
3198 sys.stdout.write(out)
3207 sys.stdout.write(out)
3199 sys.exit(1)
3208 sys.exit(1)
3200
3209
3201 def _outputcoverage(self):
3210 def _outputcoverage(self):
3202 """Produce code coverage output."""
3211 """Produce code coverage output."""
3203 import coverage
3212 import coverage
3204 coverage = coverage.coverage
3213 coverage = coverage.coverage
3205
3214
3206 vlog('# Producing coverage report')
3215 vlog('# Producing coverage report')
3207 # chdir is the easiest way to get short, relative paths in the
3216 # chdir is the easiest way to get short, relative paths in the
3208 # output.
3217 # output.
3209 os.chdir(self._hgroot)
3218 os.chdir(self._hgroot)
3210 covdir = os.path.join(self._installdir, '..', 'coverage')
3219 covdir = os.path.join(self._installdir, '..', 'coverage')
3211 cov = coverage(data_file=os.path.join(covdir, 'cov'))
3220 cov = coverage(data_file=os.path.join(covdir, 'cov'))
3212
3221
3213 # Map install directory paths back to source directory.
3222 # Map install directory paths back to source directory.
3214 cov.config.paths['srcdir'] = ['.', self._pythondir]
3223 cov.config.paths['srcdir'] = ['.', self._pythondir]
3215
3224
3216 cov.combine()
3225 cov.combine()
3217
3226
3218 omit = [os.path.join(x, '*') for x in [self._bindir, self._testdir]]
3227 omit = [os.path.join(x, '*') for x in [self._bindir, self._testdir]]
3219 cov.report(ignore_errors=True, omit=omit)
3228 cov.report(ignore_errors=True, omit=omit)
3220
3229
3221 if self.options.htmlcov:
3230 if self.options.htmlcov:
3222 htmldir = os.path.join(self._outputdir, 'htmlcov')
3231 htmldir = os.path.join(self._outputdir, 'htmlcov')
3223 cov.html_report(directory=htmldir, omit=omit)
3232 cov.html_report(directory=htmldir, omit=omit)
3224 if self.options.annotate:
3233 if self.options.annotate:
3225 adir = os.path.join(self._outputdir, 'annotated')
3234 adir = os.path.join(self._outputdir, 'annotated')
3226 if not os.path.isdir(adir):
3235 if not os.path.isdir(adir):
3227 os.mkdir(adir)
3236 os.mkdir(adir)
3228 cov.annotate(directory=adir, omit=omit)
3237 cov.annotate(directory=adir, omit=omit)
3229
3238
3230 def _findprogram(self, program):
3239 def _findprogram(self, program):
3231 """Search PATH for a executable program"""
3240 """Search PATH for a executable program"""
3232 dpb = _bytespath(os.defpath)
3241 dpb = _bytespath(os.defpath)
3233 sepb = _bytespath(os.pathsep)
3242 sepb = _bytespath(os.pathsep)
3234 for p in osenvironb.get(b'PATH', dpb).split(sepb):
3243 for p in osenvironb.get(b'PATH', dpb).split(sepb):
3235 name = os.path.join(p, program)
3244 name = os.path.join(p, program)
3236 if os.name == 'nt' or os.access(name, os.X_OK):
3245 if os.name == 'nt' or os.access(name, os.X_OK):
3237 return name
3246 return name
3238 return None
3247 return None
3239
3248
3240 def _checktools(self):
3249 def _checktools(self):
3241 """Ensure tools required to run tests are present."""
3250 """Ensure tools required to run tests are present."""
3242 for p in self.REQUIREDTOOLS:
3251 for p in self.REQUIREDTOOLS:
3243 if os.name == 'nt' and not p.endswith(b'.exe'):
3252 if os.name == 'nt' and not p.endswith(b'.exe'):
3244 p += b'.exe'
3253 p += b'.exe'
3245 found = self._findprogram(p)
3254 found = self._findprogram(p)
3246 if found:
3255 if found:
3247 vlog("# Found prerequisite", p, "at", found)
3256 vlog("# Found prerequisite", p, "at", found)
3248 else:
3257 else:
3249 print("WARNING: Did not find prerequisite tool: %s " %
3258 print("WARNING: Did not find prerequisite tool: %s " %
3250 p.decode("utf-8"))
3259 p.decode("utf-8"))
3251
3260
3252 def aggregateexceptions(path):
3261 def aggregateexceptions(path):
3253 exceptioncounts = collections.Counter()
3262 exceptioncounts = collections.Counter()
3254 testsbyfailure = collections.defaultdict(set)
3263 testsbyfailure = collections.defaultdict(set)
3255 failuresbytest = collections.defaultdict(set)
3264 failuresbytest = collections.defaultdict(set)
3256
3265
3257 for f in os.listdir(path):
3266 for f in os.listdir(path):
3258 with open(os.path.join(path, f), 'rb') as fh:
3267 with open(os.path.join(path, f), 'rb') as fh:
3259 data = fh.read().split(b'\0')
3268 data = fh.read().split(b'\0')
3260 if len(data) != 5:
3269 if len(data) != 5:
3261 continue
3270 continue
3262
3271
3263 exc, mainframe, hgframe, hgline, testname = data
3272 exc, mainframe, hgframe, hgline, testname = data
3264 exc = exc.decode('utf-8')
3273 exc = exc.decode('utf-8')
3265 mainframe = mainframe.decode('utf-8')
3274 mainframe = mainframe.decode('utf-8')
3266 hgframe = hgframe.decode('utf-8')
3275 hgframe = hgframe.decode('utf-8')
3267 hgline = hgline.decode('utf-8')
3276 hgline = hgline.decode('utf-8')
3268 testname = testname.decode('utf-8')
3277 testname = testname.decode('utf-8')
3269
3278
3270 key = (hgframe, hgline, exc)
3279 key = (hgframe, hgline, exc)
3271 exceptioncounts[key] += 1
3280 exceptioncounts[key] += 1
3272 testsbyfailure[key].add(testname)
3281 testsbyfailure[key].add(testname)
3273 failuresbytest[testname].add(key)
3282 failuresbytest[testname].add(key)
3274
3283
3275 # Find test having fewest failures for each failure.
3284 # Find test having fewest failures for each failure.
3276 leastfailing = {}
3285 leastfailing = {}
3277 for key, tests in testsbyfailure.items():
3286 for key, tests in testsbyfailure.items():
3278 fewesttest = None
3287 fewesttest = None
3279 fewestcount = 99999999
3288 fewestcount = 99999999
3280 for test in sorted(tests):
3289 for test in sorted(tests):
3281 if len(failuresbytest[test]) < fewestcount:
3290 if len(failuresbytest[test]) < fewestcount:
3282 fewesttest = test
3291 fewesttest = test
3283 fewestcount = len(failuresbytest[test])
3292 fewestcount = len(failuresbytest[test])
3284
3293
3285 leastfailing[key] = (fewestcount, fewesttest)
3294 leastfailing[key] = (fewestcount, fewesttest)
3286
3295
3287 # Create a combined counter so we can sort by total occurrences and
3296 # Create a combined counter so we can sort by total occurrences and
3288 # impacted tests.
3297 # impacted tests.
3289 combined = {}
3298 combined = {}
3290 for key in exceptioncounts:
3299 for key in exceptioncounts:
3291 combined[key] = (exceptioncounts[key],
3300 combined[key] = (exceptioncounts[key],
3292 len(testsbyfailure[key]),
3301 len(testsbyfailure[key]),
3293 leastfailing[key][0],
3302 leastfailing[key][0],
3294 leastfailing[key][1])
3303 leastfailing[key][1])
3295
3304
3296 return {
3305 return {
3297 'exceptioncounts': exceptioncounts,
3306 'exceptioncounts': exceptioncounts,
3298 'total': sum(exceptioncounts.values()),
3307 'total': sum(exceptioncounts.values()),
3299 'combined': combined,
3308 'combined': combined,
3300 'leastfailing': leastfailing,
3309 'leastfailing': leastfailing,
3301 'byfailure': testsbyfailure,
3310 'byfailure': testsbyfailure,
3302 'bytest': failuresbytest,
3311 'bytest': failuresbytest,
3303 }
3312 }
3304
3313
3305 if __name__ == '__main__':
3314 if __name__ == '__main__':
3306 runner = TestRunner()
3315 runner = TestRunner()
3307
3316
3308 try:
3317 try:
3309 import msvcrt
3318 import msvcrt
3310 msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
3319 msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
3311 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
3320 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
3312 msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
3321 msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
3313 except ImportError:
3322 except ImportError:
3314 pass
3323 pass
3315
3324
3316 sys.exit(runner.run(sys.argv[1:]))
3325 sys.exit(runner.run(sys.argv[1:]))
General Comments 0
You need to be logged in to leave comments. Login now