##// END OF EJS Templates
formatting: run black version 19.10b0 on the codebase...
marmoute -
r43663:6ada8a27 stable
parent child Browse files
Show More
@@ -1,382 +1,381 b''
1 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
1 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
2 #
2 #
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 '''commands to sign and verify changesets'''
6 '''commands to sign and verify changesets'''
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import binascii
10 import binascii
11 import os
11 import os
12
12
13 from mercurial.i18n import _
13 from mercurial.i18n import _
14 from mercurial import (
14 from mercurial import (
15 cmdutil,
15 cmdutil,
16 error,
16 error,
17 help,
17 help,
18 match,
18 match,
19 node as hgnode,
19 node as hgnode,
20 pycompat,
20 pycompat,
21 registrar,
21 registrar,
22 )
22 )
23 from mercurial.utils import (
23 from mercurial.utils import (
24 dateutil,
24 dateutil,
25 procutil,
25 procutil,
26 )
26 )
27
27
28 cmdtable = {}
28 cmdtable = {}
29 command = registrar.command(cmdtable)
29 command = registrar.command(cmdtable)
30 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
30 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
31 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
31 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
32 # be specifying the version(s) of Mercurial they are tested with, or
32 # be specifying the version(s) of Mercurial they are tested with, or
33 # leave the attribute unspecified.
33 # leave the attribute unspecified.
34 testedwith = b'ships-with-hg-core'
34 testedwith = b'ships-with-hg-core'
35
35
36 configtable = {}
36 configtable = {}
37 configitem = registrar.configitem(configtable)
37 configitem = registrar.configitem(configtable)
38
38
39 configitem(
39 configitem(
40 b'gpg', b'cmd', default=b'gpg',
40 b'gpg', b'cmd', default=b'gpg',
41 )
41 )
42 configitem(
42 configitem(
43 b'gpg', b'key', default=None,
43 b'gpg', b'key', default=None,
44 )
44 )
45 configitem(
45 configitem(
46 b'gpg', b'.*', default=None, generic=True,
46 b'gpg', b'.*', default=None, generic=True,
47 )
47 )
48
48
49 # Custom help category
49 # Custom help category
50 _HELP_CATEGORY = b'gpg'
50 _HELP_CATEGORY = b'gpg'
51 help.CATEGORY_ORDER.insert(
51 help.CATEGORY_ORDER.insert(
52 help.CATEGORY_ORDER.index(registrar.command.CATEGORY_HELP), _HELP_CATEGORY
52 help.CATEGORY_ORDER.index(registrar.command.CATEGORY_HELP), _HELP_CATEGORY
53 )
53 )
54 help.CATEGORY_NAMES[_HELP_CATEGORY] = b'Signing changes (GPG)'
54 help.CATEGORY_NAMES[_HELP_CATEGORY] = b'Signing changes (GPG)'
55
55
56
56
57 class gpg(object):
57 class gpg(object):
58 def __init__(self, path, key=None):
58 def __init__(self, path, key=None):
59 self.path = path
59 self.path = path
60 self.key = (key and b" --local-user \"%s\"" % key) or b""
60 self.key = (key and b" --local-user \"%s\"" % key) or b""
61
61
62 def sign(self, data):
62 def sign(self, data):
63 gpgcmd = b"%s --sign --detach-sign%s" % (self.path, self.key)
63 gpgcmd = b"%s --sign --detach-sign%s" % (self.path, self.key)
64 return procutil.filter(data, gpgcmd)
64 return procutil.filter(data, gpgcmd)
65
65
66 def verify(self, data, sig):
66 def verify(self, data, sig):
67 """ returns of the good and bad signatures"""
67 """ returns of the good and bad signatures"""
68 sigfile = datafile = None
68 sigfile = datafile = None
69 try:
69 try:
70 # create temporary files
70 # create temporary files
71 fd, sigfile = pycompat.mkstemp(prefix=b"hg-gpg-", suffix=b".sig")
71 fd, sigfile = pycompat.mkstemp(prefix=b"hg-gpg-", suffix=b".sig")
72 fp = os.fdopen(fd, r'wb')
72 fp = os.fdopen(fd, r'wb')
73 fp.write(sig)
73 fp.write(sig)
74 fp.close()
74 fp.close()
75 fd, datafile = pycompat.mkstemp(prefix=b"hg-gpg-", suffix=b".txt")
75 fd, datafile = pycompat.mkstemp(prefix=b"hg-gpg-", suffix=b".txt")
76 fp = os.fdopen(fd, r'wb')
76 fp = os.fdopen(fd, r'wb')
77 fp.write(data)
77 fp.write(data)
78 fp.close()
78 fp.close()
79 gpgcmd = b"%s --logger-fd 1 --status-fd 1 --verify \"%s\" \"%s\"" % (
79 gpgcmd = (
80 self.path,
80 b"%s --logger-fd 1 --status-fd 1 --verify \"%s\" \"%s\""
81 sigfile,
81 % (self.path, sigfile, datafile,)
82 datafile,
83 )
82 )
84 ret = procutil.filter(b"", gpgcmd)
83 ret = procutil.filter(b"", gpgcmd)
85 finally:
84 finally:
86 for f in (sigfile, datafile):
85 for f in (sigfile, datafile):
87 try:
86 try:
88 if f:
87 if f:
89 os.unlink(f)
88 os.unlink(f)
90 except OSError:
89 except OSError:
91 pass
90 pass
92 keys = []
91 keys = []
93 key, fingerprint = None, None
92 key, fingerprint = None, None
94 for l in ret.splitlines():
93 for l in ret.splitlines():
95 # see DETAILS in the gnupg documentation
94 # see DETAILS in the gnupg documentation
96 # filter the logger output
95 # filter the logger output
97 if not l.startswith(b"[GNUPG:]"):
96 if not l.startswith(b"[GNUPG:]"):
98 continue
97 continue
99 l = l[9:]
98 l = l[9:]
100 if l.startswith(b"VALIDSIG"):
99 if l.startswith(b"VALIDSIG"):
101 # fingerprint of the primary key
100 # fingerprint of the primary key
102 fingerprint = l.split()[10]
101 fingerprint = l.split()[10]
103 elif l.startswith(b"ERRSIG"):
102 elif l.startswith(b"ERRSIG"):
104 key = l.split(b" ", 3)[:2]
103 key = l.split(b" ", 3)[:2]
105 key.append(b"")
104 key.append(b"")
106 fingerprint = None
105 fingerprint = None
107 elif (
106 elif (
108 l.startswith(b"GOODSIG")
107 l.startswith(b"GOODSIG")
109 or l.startswith(b"EXPSIG")
108 or l.startswith(b"EXPSIG")
110 or l.startswith(b"EXPKEYSIG")
109 or l.startswith(b"EXPKEYSIG")
111 or l.startswith(b"BADSIG")
110 or l.startswith(b"BADSIG")
112 ):
111 ):
113 if key is not None:
112 if key is not None:
114 keys.append(key + [fingerprint])
113 keys.append(key + [fingerprint])
115 key = l.split(b" ", 2)
114 key = l.split(b" ", 2)
116 fingerprint = None
115 fingerprint = None
117 if key is not None:
116 if key is not None:
118 keys.append(key + [fingerprint])
117 keys.append(key + [fingerprint])
119 return keys
118 return keys
120
119
121
120
122 def newgpg(ui, **opts):
121 def newgpg(ui, **opts):
123 """create a new gpg instance"""
122 """create a new gpg instance"""
124 gpgpath = ui.config(b"gpg", b"cmd")
123 gpgpath = ui.config(b"gpg", b"cmd")
125 gpgkey = opts.get(r'key')
124 gpgkey = opts.get(r'key')
126 if not gpgkey:
125 if not gpgkey:
127 gpgkey = ui.config(b"gpg", b"key")
126 gpgkey = ui.config(b"gpg", b"key")
128 return gpg(gpgpath, gpgkey)
127 return gpg(gpgpath, gpgkey)
129
128
130
129
131 def sigwalk(repo):
130 def sigwalk(repo):
132 """
131 """
133 walk over every sigs, yields a couple
132 walk over every sigs, yields a couple
134 ((node, version, sig), (filename, linenumber))
133 ((node, version, sig), (filename, linenumber))
135 """
134 """
136
135
137 def parsefile(fileiter, context):
136 def parsefile(fileiter, context):
138 ln = 1
137 ln = 1
139 for l in fileiter:
138 for l in fileiter:
140 if not l:
139 if not l:
141 continue
140 continue
142 yield (l.split(b" ", 2), (context, ln))
141 yield (l.split(b" ", 2), (context, ln))
143 ln += 1
142 ln += 1
144
143
145 # read the heads
144 # read the heads
146 fl = repo.file(b".hgsigs")
145 fl = repo.file(b".hgsigs")
147 for r in reversed(fl.heads()):
146 for r in reversed(fl.heads()):
148 fn = b".hgsigs|%s" % hgnode.short(r)
147 fn = b".hgsigs|%s" % hgnode.short(r)
149 for item in parsefile(fl.read(r).splitlines(), fn):
148 for item in parsefile(fl.read(r).splitlines(), fn):
150 yield item
149 yield item
151 try:
150 try:
152 # read local signatures
151 # read local signatures
153 fn = b"localsigs"
152 fn = b"localsigs"
154 for item in parsefile(repo.vfs(fn), fn):
153 for item in parsefile(repo.vfs(fn), fn):
155 yield item
154 yield item
156 except IOError:
155 except IOError:
157 pass
156 pass
158
157
159
158
160 def getkeys(ui, repo, mygpg, sigdata, context):
159 def getkeys(ui, repo, mygpg, sigdata, context):
161 """get the keys who signed a data"""
160 """get the keys who signed a data"""
162 fn, ln = context
161 fn, ln = context
163 node, version, sig = sigdata
162 node, version, sig = sigdata
164 prefix = b"%s:%d" % (fn, ln)
163 prefix = b"%s:%d" % (fn, ln)
165 node = hgnode.bin(node)
164 node = hgnode.bin(node)
166
165
167 data = node2txt(repo, node, version)
166 data = node2txt(repo, node, version)
168 sig = binascii.a2b_base64(sig)
167 sig = binascii.a2b_base64(sig)
169 keys = mygpg.verify(data, sig)
168 keys = mygpg.verify(data, sig)
170
169
171 validkeys = []
170 validkeys = []
172 # warn for expired key and/or sigs
171 # warn for expired key and/or sigs
173 for key in keys:
172 for key in keys:
174 if key[0] == b"ERRSIG":
173 if key[0] == b"ERRSIG":
175 ui.write(_(b"%s Unknown key ID \"%s\"\n") % (prefix, key[1]))
174 ui.write(_(b"%s Unknown key ID \"%s\"\n") % (prefix, key[1]))
176 continue
175 continue
177 if key[0] == b"BADSIG":
176 if key[0] == b"BADSIG":
178 ui.write(_(b"%s Bad signature from \"%s\"\n") % (prefix, key[2]))
177 ui.write(_(b"%s Bad signature from \"%s\"\n") % (prefix, key[2]))
179 continue
178 continue
180 if key[0] == b"EXPSIG":
179 if key[0] == b"EXPSIG":
181 ui.write(
180 ui.write(
182 _(b"%s Note: Signature has expired (signed by: \"%s\")\n")
181 _(b"%s Note: Signature has expired (signed by: \"%s\")\n")
183 % (prefix, key[2])
182 % (prefix, key[2])
184 )
183 )
185 elif key[0] == b"EXPKEYSIG":
184 elif key[0] == b"EXPKEYSIG":
186 ui.write(
185 ui.write(
187 _(b"%s Note: This key has expired (signed by: \"%s\")\n")
186 _(b"%s Note: This key has expired (signed by: \"%s\")\n")
188 % (prefix, key[2])
187 % (prefix, key[2])
189 )
188 )
190 validkeys.append((key[1], key[2], key[3]))
189 validkeys.append((key[1], key[2], key[3]))
191 return validkeys
190 return validkeys
192
191
193
192
194 @command(b"sigs", [], _(b'hg sigs'), helpcategory=_HELP_CATEGORY)
193 @command(b"sigs", [], _(b'hg sigs'), helpcategory=_HELP_CATEGORY)
195 def sigs(ui, repo):
194 def sigs(ui, repo):
196 """list signed changesets"""
195 """list signed changesets"""
197 mygpg = newgpg(ui)
196 mygpg = newgpg(ui)
198 revs = {}
197 revs = {}
199
198
200 for data, context in sigwalk(repo):
199 for data, context in sigwalk(repo):
201 node, version, sig = data
200 node, version, sig = data
202 fn, ln = context
201 fn, ln = context
203 try:
202 try:
204 n = repo.lookup(node)
203 n = repo.lookup(node)
205 except KeyError:
204 except KeyError:
206 ui.warn(_(b"%s:%d node does not exist\n") % (fn, ln))
205 ui.warn(_(b"%s:%d node does not exist\n") % (fn, ln))
207 continue
206 continue
208 r = repo.changelog.rev(n)
207 r = repo.changelog.rev(n)
209 keys = getkeys(ui, repo, mygpg, data, context)
208 keys = getkeys(ui, repo, mygpg, data, context)
210 if not keys:
209 if not keys:
211 continue
210 continue
212 revs.setdefault(r, [])
211 revs.setdefault(r, [])
213 revs[r].extend(keys)
212 revs[r].extend(keys)
214 for rev in sorted(revs, reverse=True):
213 for rev in sorted(revs, reverse=True):
215 for k in revs[rev]:
214 for k in revs[rev]:
216 r = b"%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
215 r = b"%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
217 ui.write(b"%-30s %s\n" % (keystr(ui, k), r))
216 ui.write(b"%-30s %s\n" % (keystr(ui, k), r))
218
217
219
218
220 @command(b"sigcheck", [], _(b'hg sigcheck REV'), helpcategory=_HELP_CATEGORY)
219 @command(b"sigcheck", [], _(b'hg sigcheck REV'), helpcategory=_HELP_CATEGORY)
221 def sigcheck(ui, repo, rev):
220 def sigcheck(ui, repo, rev):
222 """verify all the signatures there may be for a particular revision"""
221 """verify all the signatures there may be for a particular revision"""
223 mygpg = newgpg(ui)
222 mygpg = newgpg(ui)
224 rev = repo.lookup(rev)
223 rev = repo.lookup(rev)
225 hexrev = hgnode.hex(rev)
224 hexrev = hgnode.hex(rev)
226 keys = []
225 keys = []
227
226
228 for data, context in sigwalk(repo):
227 for data, context in sigwalk(repo):
229 node, version, sig = data
228 node, version, sig = data
230 if node == hexrev:
229 if node == hexrev:
231 k = getkeys(ui, repo, mygpg, data, context)
230 k = getkeys(ui, repo, mygpg, data, context)
232 if k:
231 if k:
233 keys.extend(k)
232 keys.extend(k)
234
233
235 if not keys:
234 if not keys:
236 ui.write(_(b"no valid signature for %s\n") % hgnode.short(rev))
235 ui.write(_(b"no valid signature for %s\n") % hgnode.short(rev))
237 return
236 return
238
237
239 # print summary
238 # print summary
240 ui.write(_(b"%s is signed by:\n") % hgnode.short(rev))
239 ui.write(_(b"%s is signed by:\n") % hgnode.short(rev))
241 for key in keys:
240 for key in keys:
242 ui.write(b" %s\n" % keystr(ui, key))
241 ui.write(b" %s\n" % keystr(ui, key))
243
242
244
243
245 def keystr(ui, key):
244 def keystr(ui, key):
246 """associate a string to a key (username, comment)"""
245 """associate a string to a key (username, comment)"""
247 keyid, user, fingerprint = key
246 keyid, user, fingerprint = key
248 comment = ui.config(b"gpg", fingerprint)
247 comment = ui.config(b"gpg", fingerprint)
249 if comment:
248 if comment:
250 return b"%s (%s)" % (user, comment)
249 return b"%s (%s)" % (user, comment)
251 else:
250 else:
252 return user
251 return user
253
252
254
253
255 @command(
254 @command(
256 b"sign",
255 b"sign",
257 [
256 [
258 (b'l', b'local', None, _(b'make the signature local')),
257 (b'l', b'local', None, _(b'make the signature local')),
259 (b'f', b'force', None, _(b'sign even if the sigfile is modified')),
258 (b'f', b'force', None, _(b'sign even if the sigfile is modified')),
260 (
259 (
261 b'',
260 b'',
262 b'no-commit',
261 b'no-commit',
263 None,
262 None,
264 _(b'do not commit the sigfile after signing'),
263 _(b'do not commit the sigfile after signing'),
265 ),
264 ),
266 (b'k', b'key', b'', _(b'the key id to sign with'), _(b'ID')),
265 (b'k', b'key', b'', _(b'the key id to sign with'), _(b'ID')),
267 (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
266 (b'm', b'message', b'', _(b'use text as commit message'), _(b'TEXT')),
268 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
267 (b'e', b'edit', False, _(b'invoke editor on commit messages')),
269 ]
268 ]
270 + cmdutil.commitopts2,
269 + cmdutil.commitopts2,
271 _(b'hg sign [OPTION]... [REV]...'),
270 _(b'hg sign [OPTION]... [REV]...'),
272 helpcategory=_HELP_CATEGORY,
271 helpcategory=_HELP_CATEGORY,
273 )
272 )
274 def sign(ui, repo, *revs, **opts):
273 def sign(ui, repo, *revs, **opts):
275 """add a signature for the current or given revision
274 """add a signature for the current or given revision
276
275
277 If no revision is given, the parent of the working directory is used,
276 If no revision is given, the parent of the working directory is used,
278 or tip if no revision is checked out.
277 or tip if no revision is checked out.
279
278
280 The ``gpg.cmd`` config setting can be used to specify the command
279 The ``gpg.cmd`` config setting can be used to specify the command
281 to run. A default key can be specified with ``gpg.key``.
280 to run. A default key can be specified with ``gpg.key``.
282
281
283 See :hg:`help dates` for a list of formats valid for -d/--date.
282 See :hg:`help dates` for a list of formats valid for -d/--date.
284 """
283 """
285 with repo.wlock():
284 with repo.wlock():
286 return _dosign(ui, repo, *revs, **opts)
285 return _dosign(ui, repo, *revs, **opts)
287
286
288
287
289 def _dosign(ui, repo, *revs, **opts):
288 def _dosign(ui, repo, *revs, **opts):
290 mygpg = newgpg(ui, **opts)
289 mygpg = newgpg(ui, **opts)
291 opts = pycompat.byteskwargs(opts)
290 opts = pycompat.byteskwargs(opts)
292 sigver = b"0"
291 sigver = b"0"
293 sigmessage = b""
292 sigmessage = b""
294
293
295 date = opts.get(b'date')
294 date = opts.get(b'date')
296 if date:
295 if date:
297 opts[b'date'] = dateutil.parsedate(date)
296 opts[b'date'] = dateutil.parsedate(date)
298
297
299 if revs:
298 if revs:
300 nodes = [repo.lookup(n) for n in revs]
299 nodes = [repo.lookup(n) for n in revs]
301 else:
300 else:
302 nodes = [
301 nodes = [
303 node for node in repo.dirstate.parents() if node != hgnode.nullid
302 node for node in repo.dirstate.parents() if node != hgnode.nullid
304 ]
303 ]
305 if len(nodes) > 1:
304 if len(nodes) > 1:
306 raise error.Abort(
305 raise error.Abort(
307 _(b'uncommitted merge - please provide a specific revision')
306 _(b'uncommitted merge - please provide a specific revision')
308 )
307 )
309 if not nodes:
308 if not nodes:
310 nodes = [repo.changelog.tip()]
309 nodes = [repo.changelog.tip()]
311
310
312 for n in nodes:
311 for n in nodes:
313 hexnode = hgnode.hex(n)
312 hexnode = hgnode.hex(n)
314 ui.write(
313 ui.write(
315 _(b"signing %d:%s\n") % (repo.changelog.rev(n), hgnode.short(n))
314 _(b"signing %d:%s\n") % (repo.changelog.rev(n), hgnode.short(n))
316 )
315 )
317 # build data
316 # build data
318 data = node2txt(repo, n, sigver)
317 data = node2txt(repo, n, sigver)
319 sig = mygpg.sign(data)
318 sig = mygpg.sign(data)
320 if not sig:
319 if not sig:
321 raise error.Abort(_(b"error while signing"))
320 raise error.Abort(_(b"error while signing"))
322 sig = binascii.b2a_base64(sig)
321 sig = binascii.b2a_base64(sig)
323 sig = sig.replace(b"\n", b"")
322 sig = sig.replace(b"\n", b"")
324 sigmessage += b"%s %s %s\n" % (hexnode, sigver, sig)
323 sigmessage += b"%s %s %s\n" % (hexnode, sigver, sig)
325
324
326 # write it
325 # write it
327 if opts[b'local']:
326 if opts[b'local']:
328 repo.vfs.append(b"localsigs", sigmessage)
327 repo.vfs.append(b"localsigs", sigmessage)
329 return
328 return
330
329
331 if not opts[b"force"]:
330 if not opts[b"force"]:
332 msigs = match.exact([b'.hgsigs'])
331 msigs = match.exact([b'.hgsigs'])
333 if any(repo.status(match=msigs, unknown=True, ignored=True)):
332 if any(repo.status(match=msigs, unknown=True, ignored=True)):
334 raise error.Abort(
333 raise error.Abort(
335 _(b"working copy of .hgsigs is changed "),
334 _(b"working copy of .hgsigs is changed "),
336 hint=_(b"please commit .hgsigs manually"),
335 hint=_(b"please commit .hgsigs manually"),
337 )
336 )
338
337
339 sigsfile = repo.wvfs(b".hgsigs", b"ab")
338 sigsfile = repo.wvfs(b".hgsigs", b"ab")
340 sigsfile.write(sigmessage)
339 sigsfile.write(sigmessage)
341 sigsfile.close()
340 sigsfile.close()
342
341
343 if b'.hgsigs' not in repo.dirstate:
342 if b'.hgsigs' not in repo.dirstate:
344 repo[None].add([b".hgsigs"])
343 repo[None].add([b".hgsigs"])
345
344
346 if opts[b"no_commit"]:
345 if opts[b"no_commit"]:
347 return
346 return
348
347
349 message = opts[b'message']
348 message = opts[b'message']
350 if not message:
349 if not message:
351 # we don't translate commit messages
350 # we don't translate commit messages
352 message = b"\n".join(
351 message = b"\n".join(
353 [
352 [
354 b"Added signature for changeset %s" % hgnode.short(n)
353 b"Added signature for changeset %s" % hgnode.short(n)
355 for n in nodes
354 for n in nodes
356 ]
355 ]
357 )
356 )
358 try:
357 try:
359 editor = cmdutil.getcommiteditor(
358 editor = cmdutil.getcommiteditor(
360 editform=b'gpg.sign', **pycompat.strkwargs(opts)
359 editform=b'gpg.sign', **pycompat.strkwargs(opts)
361 )
360 )
362 repo.commit(
361 repo.commit(
363 message, opts[b'user'], opts[b'date'], match=msigs, editor=editor
362 message, opts[b'user'], opts[b'date'], match=msigs, editor=editor
364 )
363 )
365 except ValueError as inst:
364 except ValueError as inst:
366 raise error.Abort(pycompat.bytestr(inst))
365 raise error.Abort(pycompat.bytestr(inst))
367
366
368
367
369 def node2txt(repo, node, ver):
368 def node2txt(repo, node, ver):
370 """map a manifest into some text"""
369 """map a manifest into some text"""
371 if ver == b"0":
370 if ver == b"0":
372 return b"%s\n" % hgnode.hex(node)
371 return b"%s\n" % hgnode.hex(node)
373 else:
372 else:
374 raise error.Abort(_(b"unknown signature version"))
373 raise error.Abort(_(b"unknown signature version"))
375
374
376
375
377 def extsetup(ui):
376 def extsetup(ui):
378 # Add our category before "Repository maintenance".
377 # Add our category before "Repository maintenance".
379 help.CATEGORY_ORDER.insert(
378 help.CATEGORY_ORDER.insert(
380 help.CATEGORY_ORDER.index(command.CATEGORY_MAINTENANCE), _HELP_CATEGORY
379 help.CATEGORY_ORDER.index(command.CATEGORY_MAINTENANCE), _HELP_CATEGORY
381 )
380 )
382 help.CATEGORY_NAMES[_HELP_CATEGORY] = b'GPG signing'
381 help.CATEGORY_NAMES[_HELP_CATEGORY] = b'GPG signing'
@@ -1,313 +1,311 b''
1 # config.py - configuration parsing for Mercurial
1 # config.py - configuration parsing for Mercurial
2 #
2 #
3 # Copyright 2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import os
11 import os
12
12
13 from .i18n import _
13 from .i18n import _
14 from .pycompat import getattr
14 from .pycompat import getattr
15 from . import (
15 from . import (
16 encoding,
16 encoding,
17 error,
17 error,
18 pycompat,
18 pycompat,
19 util,
19 util,
20 )
20 )
21
21
22
22
23 class config(object):
23 class config(object):
24 def __init__(self, data=None, includepaths=None):
24 def __init__(self, data=None, includepaths=None):
25 self._data = {}
25 self._data = {}
26 self._unset = []
26 self._unset = []
27 self._includepaths = includepaths or []
27 self._includepaths = includepaths or []
28 if data:
28 if data:
29 for k in data._data:
29 for k in data._data:
30 self._data[k] = data[k].copy()
30 self._data[k] = data[k].copy()
31 self._source = data._source.copy()
31 self._source = data._source.copy()
32 else:
32 else:
33 self._source = util.cowdict()
33 self._source = util.cowdict()
34
34
35 def copy(self):
35 def copy(self):
36 return config(self)
36 return config(self)
37
37
38 def __contains__(self, section):
38 def __contains__(self, section):
39 return section in self._data
39 return section in self._data
40
40
41 def hasitem(self, section, item):
41 def hasitem(self, section, item):
42 return item in self._data.get(section, {})
42 return item in self._data.get(section, {})
43
43
44 def __getitem__(self, section):
44 def __getitem__(self, section):
45 return self._data.get(section, {})
45 return self._data.get(section, {})
46
46
47 def __iter__(self):
47 def __iter__(self):
48 for d in self.sections():
48 for d in self.sections():
49 yield d
49 yield d
50
50
51 def update(self, src):
51 def update(self, src):
52 self._source = self._source.preparewrite()
52 self._source = self._source.preparewrite()
53 for s, n in src._unset:
53 for s, n in src._unset:
54 ds = self._data.get(s, None)
54 ds = self._data.get(s, None)
55 if ds is not None and n in ds:
55 if ds is not None and n in ds:
56 self._data[s] = ds.preparewrite()
56 self._data[s] = ds.preparewrite()
57 del self._data[s][n]
57 del self._data[s][n]
58 del self._source[(s, n)]
58 del self._source[(s, n)]
59 for s in src:
59 for s in src:
60 ds = self._data.get(s, None)
60 ds = self._data.get(s, None)
61 if ds:
61 if ds:
62 self._data[s] = ds.preparewrite()
62 self._data[s] = ds.preparewrite()
63 else:
63 else:
64 self._data[s] = util.cowsortdict()
64 self._data[s] = util.cowsortdict()
65 self._data[s].update(src._data[s])
65 self._data[s].update(src._data[s])
66 self._source.update(src._source)
66 self._source.update(src._source)
67
67
68 def get(self, section, item, default=None):
68 def get(self, section, item, default=None):
69 return self._data.get(section, {}).get(item, default)
69 return self._data.get(section, {}).get(item, default)
70
70
71 def backup(self, section, item):
71 def backup(self, section, item):
72 """return a tuple allowing restore to reinstall a previous value
72 """return a tuple allowing restore to reinstall a previous value
73
73
74 The main reason we need it is because it handles the "no data" case.
74 The main reason we need it is because it handles the "no data" case.
75 """
75 """
76 try:
76 try:
77 value = self._data[section][item]
77 value = self._data[section][item]
78 source = self.source(section, item)
78 source = self.source(section, item)
79 return (section, item, value, source)
79 return (section, item, value, source)
80 except KeyError:
80 except KeyError:
81 return (section, item)
81 return (section, item)
82
82
83 def source(self, section, item):
83 def source(self, section, item):
84 return self._source.get((section, item), b"")
84 return self._source.get((section, item), b"")
85
85
86 def sections(self):
86 def sections(self):
87 return sorted(self._data.keys())
87 return sorted(self._data.keys())
88
88
89 def items(self, section):
89 def items(self, section):
90 return list(pycompat.iteritems(self._data.get(section, {})))
90 return list(pycompat.iteritems(self._data.get(section, {})))
91
91
92 def set(self, section, item, value, source=b""):
92 def set(self, section, item, value, source=b""):
93 if pycompat.ispy3:
93 if pycompat.ispy3:
94 assert not isinstance(
94 assert not isinstance(
95 section, str
95 section, str
96 ), b'config section may not be unicode strings on Python 3'
96 ), b'config section may not be unicode strings on Python 3'
97 assert not isinstance(
97 assert not isinstance(
98 item, str
98 item, str
99 ), b'config item may not be unicode strings on Python 3'
99 ), b'config item may not be unicode strings on Python 3'
100 assert not isinstance(
100 assert not isinstance(
101 value, str
101 value, str
102 ), b'config values may not be unicode strings on Python 3'
102 ), b'config values may not be unicode strings on Python 3'
103 if section not in self:
103 if section not in self:
104 self._data[section] = util.cowsortdict()
104 self._data[section] = util.cowsortdict()
105 else:
105 else:
106 self._data[section] = self._data[section].preparewrite()
106 self._data[section] = self._data[section].preparewrite()
107 self._data[section][item] = value
107 self._data[section][item] = value
108 if source:
108 if source:
109 self._source = self._source.preparewrite()
109 self._source = self._source.preparewrite()
110 self._source[(section, item)] = source
110 self._source[(section, item)] = source
111
111
112 def restore(self, data):
112 def restore(self, data):
113 """restore data returned by self.backup"""
113 """restore data returned by self.backup"""
114 self._source = self._source.preparewrite()
114 self._source = self._source.preparewrite()
115 if len(data) == 4:
115 if len(data) == 4:
116 # restore old data
116 # restore old data
117 section, item, value, source = data
117 section, item, value, source = data
118 self._data[section] = self._data[section].preparewrite()
118 self._data[section] = self._data[section].preparewrite()
119 self._data[section][item] = value
119 self._data[section][item] = value
120 self._source[(section, item)] = source
120 self._source[(section, item)] = source
121 else:
121 else:
122 # no data before, remove everything
122 # no data before, remove everything
123 section, item = data
123 section, item = data
124 if section in self._data:
124 if section in self._data:
125 self._data[section].pop(item, None)
125 self._data[section].pop(item, None)
126 self._source.pop((section, item), None)
126 self._source.pop((section, item), None)
127
127
128 def parse(self, src, data, sections=None, remap=None, include=None):
128 def parse(self, src, data, sections=None, remap=None, include=None):
129 sectionre = util.re.compile(br'\[([^\[]+)\]')
129 sectionre = util.re.compile(br'\[([^\[]+)\]')
130 itemre = util.re.compile(br'([^=\s][^=]*?)\s*=\s*(.*\S|)')
130 itemre = util.re.compile(br'([^=\s][^=]*?)\s*=\s*(.*\S|)')
131 contre = util.re.compile(br'\s+(\S|\S.*\S)\s*$')
131 contre = util.re.compile(br'\s+(\S|\S.*\S)\s*$')
132 emptyre = util.re.compile(br'(;|#|\s*$)')
132 emptyre = util.re.compile(br'(;|#|\s*$)')
133 commentre = util.re.compile(br'(;|#)')
133 commentre = util.re.compile(br'(;|#)')
134 unsetre = util.re.compile(br'%unset\s+(\S+)')
134 unsetre = util.re.compile(br'%unset\s+(\S+)')
135 includere = util.re.compile(br'%include\s+(\S|\S.*\S)\s*$')
135 includere = util.re.compile(br'%include\s+(\S|\S.*\S)\s*$')
136 section = b""
136 section = b""
137 item = None
137 item = None
138 line = 0
138 line = 0
139 cont = False
139 cont = False
140
140
141 if remap:
141 if remap:
142 section = remap.get(section, section)
142 section = remap.get(section, section)
143
143
144 for l in data.splitlines(True):
144 for l in data.splitlines(True):
145 line += 1
145 line += 1
146 if line == 1 and l.startswith(b'\xef\xbb\xbf'):
146 if line == 1 and l.startswith(b'\xef\xbb\xbf'):
147 # Someone set us up the BOM
147 # Someone set us up the BOM
148 l = l[3:]
148 l = l[3:]
149 if cont:
149 if cont:
150 if commentre.match(l):
150 if commentre.match(l):
151 continue
151 continue
152 m = contre.match(l)
152 m = contre.match(l)
153 if m:
153 if m:
154 if sections and section not in sections:
154 if sections and section not in sections:
155 continue
155 continue
156 v = self.get(section, item) + b"\n" + m.group(1)
156 v = self.get(section, item) + b"\n" + m.group(1)
157 self.set(section, item, v, b"%s:%d" % (src, line))
157 self.set(section, item, v, b"%s:%d" % (src, line))
158 continue
158 continue
159 item = None
159 item = None
160 cont = False
160 cont = False
161 m = includere.match(l)
161 m = includere.match(l)
162
162
163 if m and include:
163 if m and include:
164 expanded = util.expandpath(m.group(1))
164 expanded = util.expandpath(m.group(1))
165 includepaths = [os.path.dirname(src)] + self._includepaths
165 includepaths = [os.path.dirname(src)] + self._includepaths
166
166
167 for base in includepaths:
167 for base in includepaths:
168 inc = os.path.normpath(os.path.join(base, expanded))
168 inc = os.path.normpath(os.path.join(base, expanded))
169
169
170 try:
170 try:
171 include(inc, remap=remap, sections=sections)
171 include(inc, remap=remap, sections=sections)
172 break
172 break
173 except IOError as inst:
173 except IOError as inst:
174 if inst.errno != errno.ENOENT:
174 if inst.errno != errno.ENOENT:
175 raise error.ParseError(
175 raise error.ParseError(
176 _(b"cannot include %s (%s)")
176 _(b"cannot include %s (%s)")
177 % (inc, encoding.strtolocal(inst.strerror)),
177 % (inc, encoding.strtolocal(inst.strerror)),
178 b"%s:%d" % (src, line),
178 b"%s:%d" % (src, line),
179 )
179 )
180 continue
180 continue
181 if emptyre.match(l):
181 if emptyre.match(l):
182 continue
182 continue
183 m = sectionre.match(l)
183 m = sectionre.match(l)
184 if m:
184 if m:
185 section = m.group(1)
185 section = m.group(1)
186 if remap:
186 if remap:
187 section = remap.get(section, section)
187 section = remap.get(section, section)
188 if section not in self:
188 if section not in self:
189 self._data[section] = util.cowsortdict()
189 self._data[section] = util.cowsortdict()
190 continue
190 continue
191 m = itemre.match(l)
191 m = itemre.match(l)
192 if m:
192 if m:
193 item = m.group(1)
193 item = m.group(1)
194 cont = True
194 cont = True
195 if sections and section not in sections:
195 if sections and section not in sections:
196 continue
196 continue
197 self.set(section, item, m.group(2), b"%s:%d" % (src, line))
197 self.set(section, item, m.group(2), b"%s:%d" % (src, line))
198 continue
198 continue
199 m = unsetre.match(l)
199 m = unsetre.match(l)
200 if m:
200 if m:
201 name = m.group(1)
201 name = m.group(1)
202 if sections and section not in sections:
202 if sections and section not in sections:
203 continue
203 continue
204 if self.get(section, name) is not None:
204 if self.get(section, name) is not None:
205 self._data[section] = self._data[section].preparewrite()
205 self._data[section] = self._data[section].preparewrite()
206 del self._data[section][name]
206 del self._data[section][name]
207 self._unset.append((section, name))
207 self._unset.append((section, name))
208 continue
208 continue
209
209
210 raise error.ParseError(l.rstrip(), (b"%s:%d" % (src, line)))
210 raise error.ParseError(l.rstrip(), (b"%s:%d" % (src, line)))
211
211
212 def read(self, path, fp=None, sections=None, remap=None):
212 def read(self, path, fp=None, sections=None, remap=None):
213 if not fp:
213 if not fp:
214 fp = util.posixfile(path, b'rb')
214 fp = util.posixfile(path, b'rb')
215 assert (
215 assert getattr(fp, 'mode', r'rb') == r'rb', (
216 getattr(fp, 'mode', r'rb') == r'rb'
216 b'config files must be opened in binary mode, got fp=%r mode=%r'
217 ), b'config files must be opened in binary mode, got fp=%r mode=%r' % (
217 % (fp, fp.mode,)
218 fp,
219 fp.mode,
220 )
218 )
221 self.parse(
219 self.parse(
222 path, fp.read(), sections=sections, remap=remap, include=self.read
220 path, fp.read(), sections=sections, remap=remap, include=self.read
223 )
221 )
224
222
225
223
226 def parselist(value):
224 def parselist(value):
227 """parse a configuration value as a list of comma/space separated strings
225 """parse a configuration value as a list of comma/space separated strings
228
226
229 >>> parselist(b'this,is "a small" ,test')
227 >>> parselist(b'this,is "a small" ,test')
230 ['this', 'is', 'a small', 'test']
228 ['this', 'is', 'a small', 'test']
231 """
229 """
232
230
233 def _parse_plain(parts, s, offset):
231 def _parse_plain(parts, s, offset):
234 whitespace = False
232 whitespace = False
235 while offset < len(s) and (
233 while offset < len(s) and (
236 s[offset : offset + 1].isspace() or s[offset : offset + 1] == b','
234 s[offset : offset + 1].isspace() or s[offset : offset + 1] == b','
237 ):
235 ):
238 whitespace = True
236 whitespace = True
239 offset += 1
237 offset += 1
240 if offset >= len(s):
238 if offset >= len(s):
241 return None, parts, offset
239 return None, parts, offset
242 if whitespace:
240 if whitespace:
243 parts.append(b'')
241 parts.append(b'')
244 if s[offset : offset + 1] == b'"' and not parts[-1]:
242 if s[offset : offset + 1] == b'"' and not parts[-1]:
245 return _parse_quote, parts, offset + 1
243 return _parse_quote, parts, offset + 1
246 elif s[offset : offset + 1] == b'"' and parts[-1][-1:] == b'\\':
244 elif s[offset : offset + 1] == b'"' and parts[-1][-1:] == b'\\':
247 parts[-1] = parts[-1][:-1] + s[offset : offset + 1]
245 parts[-1] = parts[-1][:-1] + s[offset : offset + 1]
248 return _parse_plain, parts, offset + 1
246 return _parse_plain, parts, offset + 1
249 parts[-1] += s[offset : offset + 1]
247 parts[-1] += s[offset : offset + 1]
250 return _parse_plain, parts, offset + 1
248 return _parse_plain, parts, offset + 1
251
249
252 def _parse_quote(parts, s, offset):
250 def _parse_quote(parts, s, offset):
253 if offset < len(s) and s[offset : offset + 1] == b'"': # ""
251 if offset < len(s) and s[offset : offset + 1] == b'"': # ""
254 parts.append(b'')
252 parts.append(b'')
255 offset += 1
253 offset += 1
256 while offset < len(s) and (
254 while offset < len(s) and (
257 s[offset : offset + 1].isspace()
255 s[offset : offset + 1].isspace()
258 or s[offset : offset + 1] == b','
256 or s[offset : offset + 1] == b','
259 ):
257 ):
260 offset += 1
258 offset += 1
261 return _parse_plain, parts, offset
259 return _parse_plain, parts, offset
262
260
263 while offset < len(s) and s[offset : offset + 1] != b'"':
261 while offset < len(s) and s[offset : offset + 1] != b'"':
264 if (
262 if (
265 s[offset : offset + 1] == b'\\'
263 s[offset : offset + 1] == b'\\'
266 and offset + 1 < len(s)
264 and offset + 1 < len(s)
267 and s[offset + 1 : offset + 2] == b'"'
265 and s[offset + 1 : offset + 2] == b'"'
268 ):
266 ):
269 offset += 1
267 offset += 1
270 parts[-1] += b'"'
268 parts[-1] += b'"'
271 else:
269 else:
272 parts[-1] += s[offset : offset + 1]
270 parts[-1] += s[offset : offset + 1]
273 offset += 1
271 offset += 1
274
272
275 if offset >= len(s):
273 if offset >= len(s):
276 real_parts = _configlist(parts[-1])
274 real_parts = _configlist(parts[-1])
277 if not real_parts:
275 if not real_parts:
278 parts[-1] = b'"'
276 parts[-1] = b'"'
279 else:
277 else:
280 real_parts[0] = b'"' + real_parts[0]
278 real_parts[0] = b'"' + real_parts[0]
281 parts = parts[:-1]
279 parts = parts[:-1]
282 parts.extend(real_parts)
280 parts.extend(real_parts)
283 return None, parts, offset
281 return None, parts, offset
284
282
285 offset += 1
283 offset += 1
286 while offset < len(s) and s[offset : offset + 1] in [b' ', b',']:
284 while offset < len(s) and s[offset : offset + 1] in [b' ', b',']:
287 offset += 1
285 offset += 1
288
286
289 if offset < len(s):
287 if offset < len(s):
290 if offset + 1 == len(s) and s[offset : offset + 1] == b'"':
288 if offset + 1 == len(s) and s[offset : offset + 1] == b'"':
291 parts[-1] += b'"'
289 parts[-1] += b'"'
292 offset += 1
290 offset += 1
293 else:
291 else:
294 parts.append(b'')
292 parts.append(b'')
295 else:
293 else:
296 return None, parts, offset
294 return None, parts, offset
297
295
298 return _parse_plain, parts, offset
296 return _parse_plain, parts, offset
299
297
300 def _configlist(s):
298 def _configlist(s):
301 s = s.rstrip(b' ,')
299 s = s.rstrip(b' ,')
302 if not s:
300 if not s:
303 return []
301 return []
304 parser, parts, offset = _parse_plain, [b''], 0
302 parser, parts, offset = _parse_plain, [b''], 0
305 while parser:
303 while parser:
306 parser, parts, offset = parser(parts, s, offset)
304 parser, parts, offset = parser(parts, s, offset)
307 return parts
305 return parts
308
306
309 if value is not None and isinstance(value, bytes):
307 if value is not None and isinstance(value, bytes):
310 result = _configlist(value.lstrip(b' ,\n'))
308 result = _configlist(value.lstrip(b' ,\n'))
311 else:
309 else:
312 result = value
310 result = value
313 return result or []
311 return result or []
@@ -1,2987 +1,2986 b''
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import filecmp
11 import filecmp
12 import os
12 import os
13 import stat
13 import stat
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 addednodeid,
17 addednodeid,
18 hex,
18 hex,
19 modifiednodeid,
19 modifiednodeid,
20 nullid,
20 nullid,
21 nullrev,
21 nullrev,
22 short,
22 short,
23 wdirfilenodeids,
23 wdirfilenodeids,
24 wdirhex,
24 wdirhex,
25 )
25 )
26 from .pycompat import (
26 from .pycompat import (
27 getattr,
27 getattr,
28 open,
28 open,
29 )
29 )
30 from . import (
30 from . import (
31 copies,
31 copies,
32 dagop,
32 dagop,
33 encoding,
33 encoding,
34 error,
34 error,
35 fileset,
35 fileset,
36 match as matchmod,
36 match as matchmod,
37 obsolete as obsmod,
37 obsolete as obsmod,
38 patch,
38 patch,
39 pathutil,
39 pathutil,
40 phases,
40 phases,
41 pycompat,
41 pycompat,
42 repoview,
42 repoview,
43 scmutil,
43 scmutil,
44 sparse,
44 sparse,
45 subrepo,
45 subrepo,
46 subrepoutil,
46 subrepoutil,
47 util,
47 util,
48 )
48 )
49 from .utils import (
49 from .utils import (
50 dateutil,
50 dateutil,
51 stringutil,
51 stringutil,
52 )
52 )
53
53
54 propertycache = util.propertycache
54 propertycache = util.propertycache
55
55
56
56
57 class basectx(object):
57 class basectx(object):
58 """A basectx object represents the common logic for its children:
58 """A basectx object represents the common logic for its children:
59 changectx: read-only context that is already present in the repo,
59 changectx: read-only context that is already present in the repo,
60 workingctx: a context that represents the working directory and can
60 workingctx: a context that represents the working directory and can
61 be committed,
61 be committed,
62 memctx: a context that represents changes in-memory and can also
62 memctx: a context that represents changes in-memory and can also
63 be committed."""
63 be committed."""
64
64
65 def __init__(self, repo):
65 def __init__(self, repo):
66 self._repo = repo
66 self._repo = repo
67
67
68 def __bytes__(self):
68 def __bytes__(self):
69 return short(self.node())
69 return short(self.node())
70
70
71 __str__ = encoding.strmethod(__bytes__)
71 __str__ = encoding.strmethod(__bytes__)
72
72
73 def __repr__(self):
73 def __repr__(self):
74 return r"<%s %s>" % (type(self).__name__, str(self))
74 return r"<%s %s>" % (type(self).__name__, str(self))
75
75
76 def __eq__(self, other):
76 def __eq__(self, other):
77 try:
77 try:
78 return type(self) == type(other) and self._rev == other._rev
78 return type(self) == type(other) and self._rev == other._rev
79 except AttributeError:
79 except AttributeError:
80 return False
80 return False
81
81
82 def __ne__(self, other):
82 def __ne__(self, other):
83 return not (self == other)
83 return not (self == other)
84
84
85 def __contains__(self, key):
85 def __contains__(self, key):
86 return key in self._manifest
86 return key in self._manifest
87
87
88 def __getitem__(self, key):
88 def __getitem__(self, key):
89 return self.filectx(key)
89 return self.filectx(key)
90
90
91 def __iter__(self):
91 def __iter__(self):
92 return iter(self._manifest)
92 return iter(self._manifest)
93
93
94 def _buildstatusmanifest(self, status):
94 def _buildstatusmanifest(self, status):
95 """Builds a manifest that includes the given status results, if this is
95 """Builds a manifest that includes the given status results, if this is
96 a working copy context. For non-working copy contexts, it just returns
96 a working copy context. For non-working copy contexts, it just returns
97 the normal manifest."""
97 the normal manifest."""
98 return self.manifest()
98 return self.manifest()
99
99
100 def _matchstatus(self, other, match):
100 def _matchstatus(self, other, match):
101 """This internal method provides a way for child objects to override the
101 """This internal method provides a way for child objects to override the
102 match operator.
102 match operator.
103 """
103 """
104 return match
104 return match
105
105
106 def _buildstatus(
106 def _buildstatus(
107 self, other, s, match, listignored, listclean, listunknown
107 self, other, s, match, listignored, listclean, listunknown
108 ):
108 ):
109 """build a status with respect to another context"""
109 """build a status with respect to another context"""
110 # Load earliest manifest first for caching reasons. More specifically,
110 # Load earliest manifest first for caching reasons. More specifically,
111 # if you have revisions 1000 and 1001, 1001 is probably stored as a
111 # if you have revisions 1000 and 1001, 1001 is probably stored as a
112 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
112 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
113 # 1000 and cache it so that when you read 1001, we just need to apply a
113 # 1000 and cache it so that when you read 1001, we just need to apply a
114 # delta to what's in the cache. So that's one full reconstruction + one
114 # delta to what's in the cache. So that's one full reconstruction + one
115 # delta application.
115 # delta application.
116 mf2 = None
116 mf2 = None
117 if self.rev() is not None and self.rev() < other.rev():
117 if self.rev() is not None and self.rev() < other.rev():
118 mf2 = self._buildstatusmanifest(s)
118 mf2 = self._buildstatusmanifest(s)
119 mf1 = other._buildstatusmanifest(s)
119 mf1 = other._buildstatusmanifest(s)
120 if mf2 is None:
120 if mf2 is None:
121 mf2 = self._buildstatusmanifest(s)
121 mf2 = self._buildstatusmanifest(s)
122
122
123 modified, added = [], []
123 modified, added = [], []
124 removed = []
124 removed = []
125 clean = []
125 clean = []
126 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
126 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
127 deletedset = set(deleted)
127 deletedset = set(deleted)
128 d = mf1.diff(mf2, match=match, clean=listclean)
128 d = mf1.diff(mf2, match=match, clean=listclean)
129 for fn, value in pycompat.iteritems(d):
129 for fn, value in pycompat.iteritems(d):
130 if fn in deletedset:
130 if fn in deletedset:
131 continue
131 continue
132 if value is None:
132 if value is None:
133 clean.append(fn)
133 clean.append(fn)
134 continue
134 continue
135 (node1, flag1), (node2, flag2) = value
135 (node1, flag1), (node2, flag2) = value
136 if node1 is None:
136 if node1 is None:
137 added.append(fn)
137 added.append(fn)
138 elif node2 is None:
138 elif node2 is None:
139 removed.append(fn)
139 removed.append(fn)
140 elif flag1 != flag2:
140 elif flag1 != flag2:
141 modified.append(fn)
141 modified.append(fn)
142 elif node2 not in wdirfilenodeids:
142 elif node2 not in wdirfilenodeids:
143 # When comparing files between two commits, we save time by
143 # When comparing files between two commits, we save time by
144 # not comparing the file contents when the nodeids differ.
144 # not comparing the file contents when the nodeids differ.
145 # Note that this means we incorrectly report a reverted change
145 # Note that this means we incorrectly report a reverted change
146 # to a file as a modification.
146 # to a file as a modification.
147 modified.append(fn)
147 modified.append(fn)
148 elif self[fn].cmp(other[fn]):
148 elif self[fn].cmp(other[fn]):
149 modified.append(fn)
149 modified.append(fn)
150 else:
150 else:
151 clean.append(fn)
151 clean.append(fn)
152
152
153 if removed:
153 if removed:
154 # need to filter files if they are already reported as removed
154 # need to filter files if they are already reported as removed
155 unknown = [
155 unknown = [
156 fn
156 fn
157 for fn in unknown
157 for fn in unknown
158 if fn not in mf1 and (not match or match(fn))
158 if fn not in mf1 and (not match or match(fn))
159 ]
159 ]
160 ignored = [
160 ignored = [
161 fn
161 fn
162 for fn in ignored
162 for fn in ignored
163 if fn not in mf1 and (not match or match(fn))
163 if fn not in mf1 and (not match or match(fn))
164 ]
164 ]
165 # if they're deleted, don't report them as removed
165 # if they're deleted, don't report them as removed
166 removed = [fn for fn in removed if fn not in deletedset]
166 removed = [fn for fn in removed if fn not in deletedset]
167
167
168 return scmutil.status(
168 return scmutil.status(
169 modified, added, removed, deleted, unknown, ignored, clean
169 modified, added, removed, deleted, unknown, ignored, clean
170 )
170 )
171
171
172 @propertycache
172 @propertycache
173 def substate(self):
173 def substate(self):
174 return subrepoutil.state(self, self._repo.ui)
174 return subrepoutil.state(self, self._repo.ui)
175
175
176 def subrev(self, subpath):
176 def subrev(self, subpath):
177 return self.substate[subpath][1]
177 return self.substate[subpath][1]
178
178
179 def rev(self):
179 def rev(self):
180 return self._rev
180 return self._rev
181
181
182 def node(self):
182 def node(self):
183 return self._node
183 return self._node
184
184
185 def hex(self):
185 def hex(self):
186 return hex(self.node())
186 return hex(self.node())
187
187
188 def manifest(self):
188 def manifest(self):
189 return self._manifest
189 return self._manifest
190
190
191 def manifestctx(self):
191 def manifestctx(self):
192 return self._manifestctx
192 return self._manifestctx
193
193
194 def repo(self):
194 def repo(self):
195 return self._repo
195 return self._repo
196
196
197 def phasestr(self):
197 def phasestr(self):
198 return phases.phasenames[self.phase()]
198 return phases.phasenames[self.phase()]
199
199
200 def mutable(self):
200 def mutable(self):
201 return self.phase() > phases.public
201 return self.phase() > phases.public
202
202
203 def matchfileset(self, expr, badfn=None):
203 def matchfileset(self, expr, badfn=None):
204 return fileset.match(self, expr, badfn=badfn)
204 return fileset.match(self, expr, badfn=badfn)
205
205
206 def obsolete(self):
206 def obsolete(self):
207 """True if the changeset is obsolete"""
207 """True if the changeset is obsolete"""
208 return self.rev() in obsmod.getrevs(self._repo, b'obsolete')
208 return self.rev() in obsmod.getrevs(self._repo, b'obsolete')
209
209
210 def extinct(self):
210 def extinct(self):
211 """True if the changeset is extinct"""
211 """True if the changeset is extinct"""
212 return self.rev() in obsmod.getrevs(self._repo, b'extinct')
212 return self.rev() in obsmod.getrevs(self._repo, b'extinct')
213
213
214 def orphan(self):
214 def orphan(self):
215 """True if the changeset is not obsolete, but its ancestor is"""
215 """True if the changeset is not obsolete, but its ancestor is"""
216 return self.rev() in obsmod.getrevs(self._repo, b'orphan')
216 return self.rev() in obsmod.getrevs(self._repo, b'orphan')
217
217
218 def phasedivergent(self):
218 def phasedivergent(self):
219 """True if the changeset tries to be a successor of a public changeset
219 """True if the changeset tries to be a successor of a public changeset
220
220
221 Only non-public and non-obsolete changesets may be phase-divergent.
221 Only non-public and non-obsolete changesets may be phase-divergent.
222 """
222 """
223 return self.rev() in obsmod.getrevs(self._repo, b'phasedivergent')
223 return self.rev() in obsmod.getrevs(self._repo, b'phasedivergent')
224
224
225 def contentdivergent(self):
225 def contentdivergent(self):
226 """Is a successor of a changeset with multiple possible successor sets
226 """Is a successor of a changeset with multiple possible successor sets
227
227
228 Only non-public and non-obsolete changesets may be content-divergent.
228 Only non-public and non-obsolete changesets may be content-divergent.
229 """
229 """
230 return self.rev() in obsmod.getrevs(self._repo, b'contentdivergent')
230 return self.rev() in obsmod.getrevs(self._repo, b'contentdivergent')
231
231
232 def isunstable(self):
232 def isunstable(self):
233 """True if the changeset is either orphan, phase-divergent or
233 """True if the changeset is either orphan, phase-divergent or
234 content-divergent"""
234 content-divergent"""
235 return self.orphan() or self.phasedivergent() or self.contentdivergent()
235 return self.orphan() or self.phasedivergent() or self.contentdivergent()
236
236
237 def instabilities(self):
237 def instabilities(self):
238 """return the list of instabilities affecting this changeset.
238 """return the list of instabilities affecting this changeset.
239
239
240 Instabilities are returned as strings. possible values are:
240 Instabilities are returned as strings. possible values are:
241 - orphan,
241 - orphan,
242 - phase-divergent,
242 - phase-divergent,
243 - content-divergent.
243 - content-divergent.
244 """
244 """
245 instabilities = []
245 instabilities = []
246 if self.orphan():
246 if self.orphan():
247 instabilities.append(b'orphan')
247 instabilities.append(b'orphan')
248 if self.phasedivergent():
248 if self.phasedivergent():
249 instabilities.append(b'phase-divergent')
249 instabilities.append(b'phase-divergent')
250 if self.contentdivergent():
250 if self.contentdivergent():
251 instabilities.append(b'content-divergent')
251 instabilities.append(b'content-divergent')
252 return instabilities
252 return instabilities
253
253
254 def parents(self):
254 def parents(self):
255 """return contexts for each parent changeset"""
255 """return contexts for each parent changeset"""
256 return self._parents
256 return self._parents
257
257
258 def p1(self):
258 def p1(self):
259 return self._parents[0]
259 return self._parents[0]
260
260
261 def p2(self):
261 def p2(self):
262 parents = self._parents
262 parents = self._parents
263 if len(parents) == 2:
263 if len(parents) == 2:
264 return parents[1]
264 return parents[1]
265 return self._repo[nullrev]
265 return self._repo[nullrev]
266
266
267 def _fileinfo(self, path):
267 def _fileinfo(self, path):
268 if r'_manifest' in self.__dict__:
268 if r'_manifest' in self.__dict__:
269 try:
269 try:
270 return self._manifest[path], self._manifest.flags(path)
270 return self._manifest[path], self._manifest.flags(path)
271 except KeyError:
271 except KeyError:
272 raise error.ManifestLookupError(
272 raise error.ManifestLookupError(
273 self._node, path, _(b'not found in manifest')
273 self._node, path, _(b'not found in manifest')
274 )
274 )
275 if r'_manifestdelta' in self.__dict__ or path in self.files():
275 if r'_manifestdelta' in self.__dict__ or path in self.files():
276 if path in self._manifestdelta:
276 if path in self._manifestdelta:
277 return (
277 return (
278 self._manifestdelta[path],
278 self._manifestdelta[path],
279 self._manifestdelta.flags(path),
279 self._manifestdelta.flags(path),
280 )
280 )
281 mfl = self._repo.manifestlog
281 mfl = self._repo.manifestlog
282 try:
282 try:
283 node, flag = mfl[self._changeset.manifest].find(path)
283 node, flag = mfl[self._changeset.manifest].find(path)
284 except KeyError:
284 except KeyError:
285 raise error.ManifestLookupError(
285 raise error.ManifestLookupError(
286 self._node, path, _(b'not found in manifest')
286 self._node, path, _(b'not found in manifest')
287 )
287 )
288
288
289 return node, flag
289 return node, flag
290
290
291 def filenode(self, path):
291 def filenode(self, path):
292 return self._fileinfo(path)[0]
292 return self._fileinfo(path)[0]
293
293
294 def flags(self, path):
294 def flags(self, path):
295 try:
295 try:
296 return self._fileinfo(path)[1]
296 return self._fileinfo(path)[1]
297 except error.LookupError:
297 except error.LookupError:
298 return b''
298 return b''
299
299
300 @propertycache
300 @propertycache
301 def _copies(self):
301 def _copies(self):
302 return copies.computechangesetcopies(self)
302 return copies.computechangesetcopies(self)
303
303
304 def p1copies(self):
304 def p1copies(self):
305 return self._copies[0]
305 return self._copies[0]
306
306
307 def p2copies(self):
307 def p2copies(self):
308 return self._copies[1]
308 return self._copies[1]
309
309
310 def sub(self, path, allowcreate=True):
310 def sub(self, path, allowcreate=True):
311 '''return a subrepo for the stored revision of path, never wdir()'''
311 '''return a subrepo for the stored revision of path, never wdir()'''
312 return subrepo.subrepo(self, path, allowcreate=allowcreate)
312 return subrepo.subrepo(self, path, allowcreate=allowcreate)
313
313
314 def nullsub(self, path, pctx):
314 def nullsub(self, path, pctx):
315 return subrepo.nullsubrepo(self, path, pctx)
315 return subrepo.nullsubrepo(self, path, pctx)
316
316
317 def workingsub(self, path):
317 def workingsub(self, path):
318 '''return a subrepo for the stored revision, or wdir if this is a wdir
318 '''return a subrepo for the stored revision, or wdir if this is a wdir
319 context.
319 context.
320 '''
320 '''
321 return subrepo.subrepo(self, path, allowwdir=True)
321 return subrepo.subrepo(self, path, allowwdir=True)
322
322
323 def match(
323 def match(
324 self,
324 self,
325 pats=None,
325 pats=None,
326 include=None,
326 include=None,
327 exclude=None,
327 exclude=None,
328 default=b'glob',
328 default=b'glob',
329 listsubrepos=False,
329 listsubrepos=False,
330 badfn=None,
330 badfn=None,
331 ):
331 ):
332 r = self._repo
332 r = self._repo
333 return matchmod.match(
333 return matchmod.match(
334 r.root,
334 r.root,
335 r.getcwd(),
335 r.getcwd(),
336 pats,
336 pats,
337 include,
337 include,
338 exclude,
338 exclude,
339 default,
339 default,
340 auditor=r.nofsauditor,
340 auditor=r.nofsauditor,
341 ctx=self,
341 ctx=self,
342 listsubrepos=listsubrepos,
342 listsubrepos=listsubrepos,
343 badfn=badfn,
343 badfn=badfn,
344 )
344 )
345
345
346 def diff(
346 def diff(
347 self,
347 self,
348 ctx2=None,
348 ctx2=None,
349 match=None,
349 match=None,
350 changes=None,
350 changes=None,
351 opts=None,
351 opts=None,
352 losedatafn=None,
352 losedatafn=None,
353 pathfn=None,
353 pathfn=None,
354 copy=None,
354 copy=None,
355 copysourcematch=None,
355 copysourcematch=None,
356 hunksfilterfn=None,
356 hunksfilterfn=None,
357 ):
357 ):
358 """Returns a diff generator for the given contexts and matcher"""
358 """Returns a diff generator for the given contexts and matcher"""
359 if ctx2 is None:
359 if ctx2 is None:
360 ctx2 = self.p1()
360 ctx2 = self.p1()
361 if ctx2 is not None:
361 if ctx2 is not None:
362 ctx2 = self._repo[ctx2]
362 ctx2 = self._repo[ctx2]
363 return patch.diff(
363 return patch.diff(
364 self._repo,
364 self._repo,
365 ctx2,
365 ctx2,
366 self,
366 self,
367 match=match,
367 match=match,
368 changes=changes,
368 changes=changes,
369 opts=opts,
369 opts=opts,
370 losedatafn=losedatafn,
370 losedatafn=losedatafn,
371 pathfn=pathfn,
371 pathfn=pathfn,
372 copy=copy,
372 copy=copy,
373 copysourcematch=copysourcematch,
373 copysourcematch=copysourcematch,
374 hunksfilterfn=hunksfilterfn,
374 hunksfilterfn=hunksfilterfn,
375 )
375 )
376
376
377 def dirs(self):
377 def dirs(self):
378 return self._manifest.dirs()
378 return self._manifest.dirs()
379
379
380 def hasdir(self, dir):
380 def hasdir(self, dir):
381 return self._manifest.hasdir(dir)
381 return self._manifest.hasdir(dir)
382
382
383 def status(
383 def status(
384 self,
384 self,
385 other=None,
385 other=None,
386 match=None,
386 match=None,
387 listignored=False,
387 listignored=False,
388 listclean=False,
388 listclean=False,
389 listunknown=False,
389 listunknown=False,
390 listsubrepos=False,
390 listsubrepos=False,
391 ):
391 ):
392 """return status of files between two nodes or node and working
392 """return status of files between two nodes or node and working
393 directory.
393 directory.
394
394
395 If other is None, compare this node with working directory.
395 If other is None, compare this node with working directory.
396
396
397 returns (modified, added, removed, deleted, unknown, ignored, clean)
397 returns (modified, added, removed, deleted, unknown, ignored, clean)
398 """
398 """
399
399
400 ctx1 = self
400 ctx1 = self
401 ctx2 = self._repo[other]
401 ctx2 = self._repo[other]
402
402
403 # This next code block is, admittedly, fragile logic that tests for
403 # This next code block is, admittedly, fragile logic that tests for
404 # reversing the contexts and wouldn't need to exist if it weren't for
404 # reversing the contexts and wouldn't need to exist if it weren't for
405 # the fast (and common) code path of comparing the working directory
405 # the fast (and common) code path of comparing the working directory
406 # with its first parent.
406 # with its first parent.
407 #
407 #
408 # What we're aiming for here is the ability to call:
408 # What we're aiming for here is the ability to call:
409 #
409 #
410 # workingctx.status(parentctx)
410 # workingctx.status(parentctx)
411 #
411 #
412 # If we always built the manifest for each context and compared those,
412 # If we always built the manifest for each context and compared those,
413 # then we'd be done. But the special case of the above call means we
413 # then we'd be done. But the special case of the above call means we
414 # just copy the manifest of the parent.
414 # just copy the manifest of the parent.
415 reversed = False
415 reversed = False
416 if not isinstance(ctx1, changectx) and isinstance(ctx2, changectx):
416 if not isinstance(ctx1, changectx) and isinstance(ctx2, changectx):
417 reversed = True
417 reversed = True
418 ctx1, ctx2 = ctx2, ctx1
418 ctx1, ctx2 = ctx2, ctx1
419
419
420 match = self._repo.narrowmatch(match)
420 match = self._repo.narrowmatch(match)
421 match = ctx2._matchstatus(ctx1, match)
421 match = ctx2._matchstatus(ctx1, match)
422 r = scmutil.status([], [], [], [], [], [], [])
422 r = scmutil.status([], [], [], [], [], [], [])
423 r = ctx2._buildstatus(
423 r = ctx2._buildstatus(
424 ctx1, r, match, listignored, listclean, listunknown
424 ctx1, r, match, listignored, listclean, listunknown
425 )
425 )
426
426
427 if reversed:
427 if reversed:
428 # Reverse added and removed. Clear deleted, unknown and ignored as
428 # Reverse added and removed. Clear deleted, unknown and ignored as
429 # these make no sense to reverse.
429 # these make no sense to reverse.
430 r = scmutil.status(
430 r = scmutil.status(
431 r.modified, r.removed, r.added, [], [], [], r.clean
431 r.modified, r.removed, r.added, [], [], [], r.clean
432 )
432 )
433
433
434 if listsubrepos:
434 if listsubrepos:
435 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
435 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
436 try:
436 try:
437 rev2 = ctx2.subrev(subpath)
437 rev2 = ctx2.subrev(subpath)
438 except KeyError:
438 except KeyError:
439 # A subrepo that existed in node1 was deleted between
439 # A subrepo that existed in node1 was deleted between
440 # node1 and node2 (inclusive). Thus, ctx2's substate
440 # node1 and node2 (inclusive). Thus, ctx2's substate
441 # won't contain that subpath. The best we can do ignore it.
441 # won't contain that subpath. The best we can do ignore it.
442 rev2 = None
442 rev2 = None
443 submatch = matchmod.subdirmatcher(subpath, match)
443 submatch = matchmod.subdirmatcher(subpath, match)
444 s = sub.status(
444 s = sub.status(
445 rev2,
445 rev2,
446 match=submatch,
446 match=submatch,
447 ignored=listignored,
447 ignored=listignored,
448 clean=listclean,
448 clean=listclean,
449 unknown=listunknown,
449 unknown=listunknown,
450 listsubrepos=True,
450 listsubrepos=True,
451 )
451 )
452 for rfiles, sfiles in zip(r, s):
452 for rfiles, sfiles in zip(r, s):
453 rfiles.extend(b"%s/%s" % (subpath, f) for f in sfiles)
453 rfiles.extend(b"%s/%s" % (subpath, f) for f in sfiles)
454
454
455 for l in r:
455 for l in r:
456 l.sort()
456 l.sort()
457
457
458 return r
458 return r
459
459
460
460
461 class changectx(basectx):
461 class changectx(basectx):
462 """A changecontext object makes access to data related to a particular
462 """A changecontext object makes access to data related to a particular
463 changeset convenient. It represents a read-only context already present in
463 changeset convenient. It represents a read-only context already present in
464 the repo."""
464 the repo."""
465
465
466 def __init__(self, repo, rev, node):
466 def __init__(self, repo, rev, node):
467 super(changectx, self).__init__(repo)
467 super(changectx, self).__init__(repo)
468 self._rev = rev
468 self._rev = rev
469 self._node = node
469 self._node = node
470
470
471 def __hash__(self):
471 def __hash__(self):
472 try:
472 try:
473 return hash(self._rev)
473 return hash(self._rev)
474 except AttributeError:
474 except AttributeError:
475 return id(self)
475 return id(self)
476
476
477 def __nonzero__(self):
477 def __nonzero__(self):
478 return self._rev != nullrev
478 return self._rev != nullrev
479
479
480 __bool__ = __nonzero__
480 __bool__ = __nonzero__
481
481
482 @propertycache
482 @propertycache
483 def _changeset(self):
483 def _changeset(self):
484 return self._repo.changelog.changelogrevision(self.rev())
484 return self._repo.changelog.changelogrevision(self.rev())
485
485
486 @propertycache
486 @propertycache
487 def _manifest(self):
487 def _manifest(self):
488 return self._manifestctx.read()
488 return self._manifestctx.read()
489
489
490 @property
490 @property
491 def _manifestctx(self):
491 def _manifestctx(self):
492 return self._repo.manifestlog[self._changeset.manifest]
492 return self._repo.manifestlog[self._changeset.manifest]
493
493
494 @propertycache
494 @propertycache
495 def _manifestdelta(self):
495 def _manifestdelta(self):
496 return self._manifestctx.readdelta()
496 return self._manifestctx.readdelta()
497
497
498 @propertycache
498 @propertycache
499 def _parents(self):
499 def _parents(self):
500 repo = self._repo
500 repo = self._repo
501 p1, p2 = repo.changelog.parentrevs(self._rev)
501 p1, p2 = repo.changelog.parentrevs(self._rev)
502 if p2 == nullrev:
502 if p2 == nullrev:
503 return [repo[p1]]
503 return [repo[p1]]
504 return [repo[p1], repo[p2]]
504 return [repo[p1], repo[p2]]
505
505
506 def changeset(self):
506 def changeset(self):
507 c = self._changeset
507 c = self._changeset
508 return (
508 return (
509 c.manifest,
509 c.manifest,
510 c.user,
510 c.user,
511 c.date,
511 c.date,
512 c.files,
512 c.files,
513 c.description,
513 c.description,
514 c.extra,
514 c.extra,
515 )
515 )
516
516
517 def manifestnode(self):
517 def manifestnode(self):
518 return self._changeset.manifest
518 return self._changeset.manifest
519
519
520 def user(self):
520 def user(self):
521 return self._changeset.user
521 return self._changeset.user
522
522
523 def date(self):
523 def date(self):
524 return self._changeset.date
524 return self._changeset.date
525
525
526 def files(self):
526 def files(self):
527 return self._changeset.files
527 return self._changeset.files
528
528
529 def filesmodified(self):
529 def filesmodified(self):
530 modified = set(self.files())
530 modified = set(self.files())
531 modified.difference_update(self.filesadded())
531 modified.difference_update(self.filesadded())
532 modified.difference_update(self.filesremoved())
532 modified.difference_update(self.filesremoved())
533 return sorted(modified)
533 return sorted(modified)
534
534
535 def filesadded(self):
535 def filesadded(self):
536 filesadded = self._changeset.filesadded
536 filesadded = self._changeset.filesadded
537 compute_on_none = True
537 compute_on_none = True
538 if self._repo.filecopiesmode == b'changeset-sidedata':
538 if self._repo.filecopiesmode == b'changeset-sidedata':
539 compute_on_none = False
539 compute_on_none = False
540 else:
540 else:
541 source = self._repo.ui.config(b'experimental', b'copies.read-from')
541 source = self._repo.ui.config(b'experimental', b'copies.read-from')
542 if source == b'changeset-only':
542 if source == b'changeset-only':
543 compute_on_none = False
543 compute_on_none = False
544 elif source != b'compatibility':
544 elif source != b'compatibility':
545 # filelog mode, ignore any changelog content
545 # filelog mode, ignore any changelog content
546 filesadded = None
546 filesadded = None
547 if filesadded is None:
547 if filesadded is None:
548 if compute_on_none:
548 if compute_on_none:
549 filesadded = copies.computechangesetfilesadded(self)
549 filesadded = copies.computechangesetfilesadded(self)
550 else:
550 else:
551 filesadded = []
551 filesadded = []
552 return filesadded
552 return filesadded
553
553
554 def filesremoved(self):
554 def filesremoved(self):
555 filesremoved = self._changeset.filesremoved
555 filesremoved = self._changeset.filesremoved
556 compute_on_none = True
556 compute_on_none = True
557 if self._repo.filecopiesmode == b'changeset-sidedata':
557 if self._repo.filecopiesmode == b'changeset-sidedata':
558 compute_on_none = False
558 compute_on_none = False
559 else:
559 else:
560 source = self._repo.ui.config(b'experimental', b'copies.read-from')
560 source = self._repo.ui.config(b'experimental', b'copies.read-from')
561 if source == b'changeset-only':
561 if source == b'changeset-only':
562 compute_on_none = False
562 compute_on_none = False
563 elif source != b'compatibility':
563 elif source != b'compatibility':
564 # filelog mode, ignore any changelog content
564 # filelog mode, ignore any changelog content
565 filesremoved = None
565 filesremoved = None
566 if filesremoved is None:
566 if filesremoved is None:
567 if compute_on_none:
567 if compute_on_none:
568 filesremoved = copies.computechangesetfilesremoved(self)
568 filesremoved = copies.computechangesetfilesremoved(self)
569 else:
569 else:
570 filesremoved = []
570 filesremoved = []
571 return filesremoved
571 return filesremoved
572
572
573 @propertycache
573 @propertycache
574 def _copies(self):
574 def _copies(self):
575 p1copies = self._changeset.p1copies
575 p1copies = self._changeset.p1copies
576 p2copies = self._changeset.p2copies
576 p2copies = self._changeset.p2copies
577 compute_on_none = True
577 compute_on_none = True
578 if self._repo.filecopiesmode == b'changeset-sidedata':
578 if self._repo.filecopiesmode == b'changeset-sidedata':
579 compute_on_none = False
579 compute_on_none = False
580 else:
580 else:
581 source = self._repo.ui.config(b'experimental', b'copies.read-from')
581 source = self._repo.ui.config(b'experimental', b'copies.read-from')
582 # If config says to get copy metadata only from changeset, then
582 # If config says to get copy metadata only from changeset, then
583 # return that, defaulting to {} if there was no copy metadata. In
583 # return that, defaulting to {} if there was no copy metadata. In
584 # compatibility mode, we return copy data from the changeset if it
584 # compatibility mode, we return copy data from the changeset if it
585 # was recorded there, and otherwise we fall back to getting it from
585 # was recorded there, and otherwise we fall back to getting it from
586 # the filelogs (below).
586 # the filelogs (below).
587 #
587 #
588 # If we are in compatiblity mode and there is not data in the
588 # If we are in compatiblity mode and there is not data in the
589 # changeset), we get the copy metadata from the filelogs.
589 # changeset), we get the copy metadata from the filelogs.
590 #
590 #
591 # otherwise, when config said to read only from filelog, we get the
591 # otherwise, when config said to read only from filelog, we get the
592 # copy metadata from the filelogs.
592 # copy metadata from the filelogs.
593 if source == b'changeset-only':
593 if source == b'changeset-only':
594 compute_on_none = False
594 compute_on_none = False
595 elif source != b'compatibility':
595 elif source != b'compatibility':
596 # filelog mode, ignore any changelog content
596 # filelog mode, ignore any changelog content
597 p1copies = p2copies = None
597 p1copies = p2copies = None
598 if p1copies is None:
598 if p1copies is None:
599 if compute_on_none:
599 if compute_on_none:
600 p1copies, p2copies = super(changectx, self)._copies
600 p1copies, p2copies = super(changectx, self)._copies
601 else:
601 else:
602 if p1copies is None:
602 if p1copies is None:
603 p1copies = {}
603 p1copies = {}
604 if p2copies is None:
604 if p2copies is None:
605 p2copies = {}
605 p2copies = {}
606 return p1copies, p2copies
606 return p1copies, p2copies
607
607
608 def description(self):
608 def description(self):
609 return self._changeset.description
609 return self._changeset.description
610
610
611 def branch(self):
611 def branch(self):
612 return encoding.tolocal(self._changeset.extra.get(b"branch"))
612 return encoding.tolocal(self._changeset.extra.get(b"branch"))
613
613
614 def closesbranch(self):
614 def closesbranch(self):
615 return b'close' in self._changeset.extra
615 return b'close' in self._changeset.extra
616
616
617 def extra(self):
617 def extra(self):
618 """Return a dict of extra information."""
618 """Return a dict of extra information."""
619 return self._changeset.extra
619 return self._changeset.extra
620
620
621 def tags(self):
621 def tags(self):
622 """Return a list of byte tag names"""
622 """Return a list of byte tag names"""
623 return self._repo.nodetags(self._node)
623 return self._repo.nodetags(self._node)
624
624
625 def bookmarks(self):
625 def bookmarks(self):
626 """Return a list of byte bookmark names."""
626 """Return a list of byte bookmark names."""
627 return self._repo.nodebookmarks(self._node)
627 return self._repo.nodebookmarks(self._node)
628
628
629 def phase(self):
629 def phase(self):
630 return self._repo._phasecache.phase(self._repo, self._rev)
630 return self._repo._phasecache.phase(self._repo, self._rev)
631
631
632 def hidden(self):
632 def hidden(self):
633 return self._rev in repoview.filterrevs(self._repo, b'visible')
633 return self._rev in repoview.filterrevs(self._repo, b'visible')
634
634
635 def isinmemory(self):
635 def isinmemory(self):
636 return False
636 return False
637
637
638 def children(self):
638 def children(self):
639 """return list of changectx contexts for each child changeset.
639 """return list of changectx contexts for each child changeset.
640
640
641 This returns only the immediate child changesets. Use descendants() to
641 This returns only the immediate child changesets. Use descendants() to
642 recursively walk children.
642 recursively walk children.
643 """
643 """
644 c = self._repo.changelog.children(self._node)
644 c = self._repo.changelog.children(self._node)
645 return [self._repo[x] for x in c]
645 return [self._repo[x] for x in c]
646
646
647 def ancestors(self):
647 def ancestors(self):
648 for a in self._repo.changelog.ancestors([self._rev]):
648 for a in self._repo.changelog.ancestors([self._rev]):
649 yield self._repo[a]
649 yield self._repo[a]
650
650
651 def descendants(self):
651 def descendants(self):
652 """Recursively yield all children of the changeset.
652 """Recursively yield all children of the changeset.
653
653
654 For just the immediate children, use children()
654 For just the immediate children, use children()
655 """
655 """
656 for d in self._repo.changelog.descendants([self._rev]):
656 for d in self._repo.changelog.descendants([self._rev]):
657 yield self._repo[d]
657 yield self._repo[d]
658
658
659 def filectx(self, path, fileid=None, filelog=None):
659 def filectx(self, path, fileid=None, filelog=None):
660 """get a file context from this changeset"""
660 """get a file context from this changeset"""
661 if fileid is None:
661 if fileid is None:
662 fileid = self.filenode(path)
662 fileid = self.filenode(path)
663 return filectx(
663 return filectx(
664 self._repo, path, fileid=fileid, changectx=self, filelog=filelog
664 self._repo, path, fileid=fileid, changectx=self, filelog=filelog
665 )
665 )
666
666
667 def ancestor(self, c2, warn=False):
667 def ancestor(self, c2, warn=False):
668 """return the "best" ancestor context of self and c2
668 """return the "best" ancestor context of self and c2
669
669
670 If there are multiple candidates, it will show a message and check
670 If there are multiple candidates, it will show a message and check
671 merge.preferancestor configuration before falling back to the
671 merge.preferancestor configuration before falling back to the
672 revlog ancestor."""
672 revlog ancestor."""
673 # deal with workingctxs
673 # deal with workingctxs
674 n2 = c2._node
674 n2 = c2._node
675 if n2 is None:
675 if n2 is None:
676 n2 = c2._parents[0]._node
676 n2 = c2._parents[0]._node
677 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
677 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
678 if not cahs:
678 if not cahs:
679 anc = nullid
679 anc = nullid
680 elif len(cahs) == 1:
680 elif len(cahs) == 1:
681 anc = cahs[0]
681 anc = cahs[0]
682 else:
682 else:
683 # experimental config: merge.preferancestor
683 # experimental config: merge.preferancestor
684 for r in self._repo.ui.configlist(b'merge', b'preferancestor'):
684 for r in self._repo.ui.configlist(b'merge', b'preferancestor'):
685 try:
685 try:
686 ctx = scmutil.revsymbol(self._repo, r)
686 ctx = scmutil.revsymbol(self._repo, r)
687 except error.RepoLookupError:
687 except error.RepoLookupError:
688 continue
688 continue
689 anc = ctx.node()
689 anc = ctx.node()
690 if anc in cahs:
690 if anc in cahs:
691 break
691 break
692 else:
692 else:
693 anc = self._repo.changelog.ancestor(self._node, n2)
693 anc = self._repo.changelog.ancestor(self._node, n2)
694 if warn:
694 if warn:
695 self._repo.ui.status(
695 self._repo.ui.status(
696 (
696 (
697 _(b"note: using %s as ancestor of %s and %s\n")
697 _(b"note: using %s as ancestor of %s and %s\n")
698 % (short(anc), short(self._node), short(n2))
698 % (short(anc), short(self._node), short(n2))
699 )
699 )
700 + b''.join(
700 + b''.join(
701 _(
701 _(
702 b" alternatively, use --config "
702 b" alternatively, use --config "
703 b"merge.preferancestor=%s\n"
703 b"merge.preferancestor=%s\n"
704 )
704 )
705 % short(n)
705 % short(n)
706 for n in sorted(cahs)
706 for n in sorted(cahs)
707 if n != anc
707 if n != anc
708 )
708 )
709 )
709 )
710 return self._repo[anc]
710 return self._repo[anc]
711
711
712 def isancestorof(self, other):
712 def isancestorof(self, other):
713 """True if this changeset is an ancestor of other"""
713 """True if this changeset is an ancestor of other"""
714 return self._repo.changelog.isancestorrev(self._rev, other._rev)
714 return self._repo.changelog.isancestorrev(self._rev, other._rev)
715
715
716 def walk(self, match):
716 def walk(self, match):
717 '''Generates matching file names.'''
717 '''Generates matching file names.'''
718
718
719 # Wrap match.bad method to have message with nodeid
719 # Wrap match.bad method to have message with nodeid
720 def bad(fn, msg):
720 def bad(fn, msg):
721 # The manifest doesn't know about subrepos, so don't complain about
721 # The manifest doesn't know about subrepos, so don't complain about
722 # paths into valid subrepos.
722 # paths into valid subrepos.
723 if any(fn == s or fn.startswith(s + b'/') for s in self.substate):
723 if any(fn == s or fn.startswith(s + b'/') for s in self.substate):
724 return
724 return
725 match.bad(fn, _(b'no such file in rev %s') % self)
725 match.bad(fn, _(b'no such file in rev %s') % self)
726
726
727 m = matchmod.badmatch(self._repo.narrowmatch(match), bad)
727 m = matchmod.badmatch(self._repo.narrowmatch(match), bad)
728 return self._manifest.walk(m)
728 return self._manifest.walk(m)
729
729
730 def matches(self, match):
730 def matches(self, match):
731 return self.walk(match)
731 return self.walk(match)
732
732
733
733
734 class basefilectx(object):
734 class basefilectx(object):
735 """A filecontext object represents the common logic for its children:
735 """A filecontext object represents the common logic for its children:
736 filectx: read-only access to a filerevision that is already present
736 filectx: read-only access to a filerevision that is already present
737 in the repo,
737 in the repo,
738 workingfilectx: a filecontext that represents files from the working
738 workingfilectx: a filecontext that represents files from the working
739 directory,
739 directory,
740 memfilectx: a filecontext that represents files in-memory,
740 memfilectx: a filecontext that represents files in-memory,
741 """
741 """
742
742
743 @propertycache
743 @propertycache
744 def _filelog(self):
744 def _filelog(self):
745 return self._repo.file(self._path)
745 return self._repo.file(self._path)
746
746
747 @propertycache
747 @propertycache
748 def _changeid(self):
748 def _changeid(self):
749 if r'_changectx' in self.__dict__:
749 if r'_changectx' in self.__dict__:
750 return self._changectx.rev()
750 return self._changectx.rev()
751 elif r'_descendantrev' in self.__dict__:
751 elif r'_descendantrev' in self.__dict__:
752 # this file context was created from a revision with a known
752 # this file context was created from a revision with a known
753 # descendant, we can (lazily) correct for linkrev aliases
753 # descendant, we can (lazily) correct for linkrev aliases
754 return self._adjustlinkrev(self._descendantrev)
754 return self._adjustlinkrev(self._descendantrev)
755 else:
755 else:
756 return self._filelog.linkrev(self._filerev)
756 return self._filelog.linkrev(self._filerev)
757
757
758 @propertycache
758 @propertycache
759 def _filenode(self):
759 def _filenode(self):
760 if r'_fileid' in self.__dict__:
760 if r'_fileid' in self.__dict__:
761 return self._filelog.lookup(self._fileid)
761 return self._filelog.lookup(self._fileid)
762 else:
762 else:
763 return self._changectx.filenode(self._path)
763 return self._changectx.filenode(self._path)
764
764
765 @propertycache
765 @propertycache
766 def _filerev(self):
766 def _filerev(self):
767 return self._filelog.rev(self._filenode)
767 return self._filelog.rev(self._filenode)
768
768
769 @propertycache
769 @propertycache
770 def _repopath(self):
770 def _repopath(self):
771 return self._path
771 return self._path
772
772
773 def __nonzero__(self):
773 def __nonzero__(self):
774 try:
774 try:
775 self._filenode
775 self._filenode
776 return True
776 return True
777 except error.LookupError:
777 except error.LookupError:
778 # file is missing
778 # file is missing
779 return False
779 return False
780
780
781 __bool__ = __nonzero__
781 __bool__ = __nonzero__
782
782
783 def __bytes__(self):
783 def __bytes__(self):
784 try:
784 try:
785 return b"%s@%s" % (self.path(), self._changectx)
785 return b"%s@%s" % (self.path(), self._changectx)
786 except error.LookupError:
786 except error.LookupError:
787 return b"%s@???" % self.path()
787 return b"%s@???" % self.path()
788
788
789 __str__ = encoding.strmethod(__bytes__)
789 __str__ = encoding.strmethod(__bytes__)
790
790
791 def __repr__(self):
791 def __repr__(self):
792 return r"<%s %s>" % (type(self).__name__, str(self))
792 return r"<%s %s>" % (type(self).__name__, str(self))
793
793
794 def __hash__(self):
794 def __hash__(self):
795 try:
795 try:
796 return hash((self._path, self._filenode))
796 return hash((self._path, self._filenode))
797 except AttributeError:
797 except AttributeError:
798 return id(self)
798 return id(self)
799
799
800 def __eq__(self, other):
800 def __eq__(self, other):
801 try:
801 try:
802 return (
802 return (
803 type(self) == type(other)
803 type(self) == type(other)
804 and self._path == other._path
804 and self._path == other._path
805 and self._filenode == other._filenode
805 and self._filenode == other._filenode
806 )
806 )
807 except AttributeError:
807 except AttributeError:
808 return False
808 return False
809
809
810 def __ne__(self, other):
810 def __ne__(self, other):
811 return not (self == other)
811 return not (self == other)
812
812
813 def filerev(self):
813 def filerev(self):
814 return self._filerev
814 return self._filerev
815
815
816 def filenode(self):
816 def filenode(self):
817 return self._filenode
817 return self._filenode
818
818
819 @propertycache
819 @propertycache
820 def _flags(self):
820 def _flags(self):
821 return self._changectx.flags(self._path)
821 return self._changectx.flags(self._path)
822
822
823 def flags(self):
823 def flags(self):
824 return self._flags
824 return self._flags
825
825
826 def filelog(self):
826 def filelog(self):
827 return self._filelog
827 return self._filelog
828
828
829 def rev(self):
829 def rev(self):
830 return self._changeid
830 return self._changeid
831
831
832 def linkrev(self):
832 def linkrev(self):
833 return self._filelog.linkrev(self._filerev)
833 return self._filelog.linkrev(self._filerev)
834
834
835 def node(self):
835 def node(self):
836 return self._changectx.node()
836 return self._changectx.node()
837
837
838 def hex(self):
838 def hex(self):
839 return self._changectx.hex()
839 return self._changectx.hex()
840
840
841 def user(self):
841 def user(self):
842 return self._changectx.user()
842 return self._changectx.user()
843
843
844 def date(self):
844 def date(self):
845 return self._changectx.date()
845 return self._changectx.date()
846
846
847 def files(self):
847 def files(self):
848 return self._changectx.files()
848 return self._changectx.files()
849
849
850 def description(self):
850 def description(self):
851 return self._changectx.description()
851 return self._changectx.description()
852
852
853 def branch(self):
853 def branch(self):
854 return self._changectx.branch()
854 return self._changectx.branch()
855
855
856 def extra(self):
856 def extra(self):
857 return self._changectx.extra()
857 return self._changectx.extra()
858
858
859 def phase(self):
859 def phase(self):
860 return self._changectx.phase()
860 return self._changectx.phase()
861
861
862 def phasestr(self):
862 def phasestr(self):
863 return self._changectx.phasestr()
863 return self._changectx.phasestr()
864
864
865 def obsolete(self):
865 def obsolete(self):
866 return self._changectx.obsolete()
866 return self._changectx.obsolete()
867
867
868 def instabilities(self):
868 def instabilities(self):
869 return self._changectx.instabilities()
869 return self._changectx.instabilities()
870
870
871 def manifest(self):
871 def manifest(self):
872 return self._changectx.manifest()
872 return self._changectx.manifest()
873
873
874 def changectx(self):
874 def changectx(self):
875 return self._changectx
875 return self._changectx
876
876
877 def renamed(self):
877 def renamed(self):
878 return self._copied
878 return self._copied
879
879
880 def copysource(self):
880 def copysource(self):
881 return self._copied and self._copied[0]
881 return self._copied and self._copied[0]
882
882
883 def repo(self):
883 def repo(self):
884 return self._repo
884 return self._repo
885
885
886 def size(self):
886 def size(self):
887 return len(self.data())
887 return len(self.data())
888
888
889 def path(self):
889 def path(self):
890 return self._path
890 return self._path
891
891
892 def isbinary(self):
892 def isbinary(self):
893 try:
893 try:
894 return stringutil.binary(self.data())
894 return stringutil.binary(self.data())
895 except IOError:
895 except IOError:
896 return False
896 return False
897
897
898 def isexec(self):
898 def isexec(self):
899 return b'x' in self.flags()
899 return b'x' in self.flags()
900
900
901 def islink(self):
901 def islink(self):
902 return b'l' in self.flags()
902 return b'l' in self.flags()
903
903
904 def isabsent(self):
904 def isabsent(self):
905 """whether this filectx represents a file not in self._changectx
905 """whether this filectx represents a file not in self._changectx
906
906
907 This is mainly for merge code to detect change/delete conflicts. This is
907 This is mainly for merge code to detect change/delete conflicts. This is
908 expected to be True for all subclasses of basectx."""
908 expected to be True for all subclasses of basectx."""
909 return False
909 return False
910
910
911 _customcmp = False
911 _customcmp = False
912
912
913 def cmp(self, fctx):
913 def cmp(self, fctx):
914 """compare with other file context
914 """compare with other file context
915
915
916 returns True if different than fctx.
916 returns True if different than fctx.
917 """
917 """
918 if fctx._customcmp:
918 if fctx._customcmp:
919 return fctx.cmp(self)
919 return fctx.cmp(self)
920
920
921 if self._filenode is None:
921 if self._filenode is None:
922 raise error.ProgrammingError(
922 raise error.ProgrammingError(
923 b'filectx.cmp() must be reimplemented if not backed by revlog'
923 b'filectx.cmp() must be reimplemented if not backed by revlog'
924 )
924 )
925
925
926 if fctx._filenode is None:
926 if fctx._filenode is None:
927 if self._repo._encodefilterpats:
927 if self._repo._encodefilterpats:
928 # can't rely on size() because wdir content may be decoded
928 # can't rely on size() because wdir content may be decoded
929 return self._filelog.cmp(self._filenode, fctx.data())
929 return self._filelog.cmp(self._filenode, fctx.data())
930 if self.size() - 4 == fctx.size():
930 if self.size() - 4 == fctx.size():
931 # size() can match:
931 # size() can match:
932 # if file data starts with '\1\n', empty metadata block is
932 # if file data starts with '\1\n', empty metadata block is
933 # prepended, which adds 4 bytes to filelog.size().
933 # prepended, which adds 4 bytes to filelog.size().
934 return self._filelog.cmp(self._filenode, fctx.data())
934 return self._filelog.cmp(self._filenode, fctx.data())
935 if self.size() == fctx.size():
935 if self.size() == fctx.size():
936 # size() matches: need to compare content
936 # size() matches: need to compare content
937 return self._filelog.cmp(self._filenode, fctx.data())
937 return self._filelog.cmp(self._filenode, fctx.data())
938
938
939 # size() differs
939 # size() differs
940 return True
940 return True
941
941
942 def _adjustlinkrev(self, srcrev, inclusive=False, stoprev=None):
942 def _adjustlinkrev(self, srcrev, inclusive=False, stoprev=None):
943 """return the first ancestor of <srcrev> introducing <fnode>
943 """return the first ancestor of <srcrev> introducing <fnode>
944
944
945 If the linkrev of the file revision does not point to an ancestor of
945 If the linkrev of the file revision does not point to an ancestor of
946 srcrev, we'll walk down the ancestors until we find one introducing
946 srcrev, we'll walk down the ancestors until we find one introducing
947 this file revision.
947 this file revision.
948
948
949 :srcrev: the changeset revision we search ancestors from
949 :srcrev: the changeset revision we search ancestors from
950 :inclusive: if true, the src revision will also be checked
950 :inclusive: if true, the src revision will also be checked
951 :stoprev: an optional revision to stop the walk at. If no introduction
951 :stoprev: an optional revision to stop the walk at. If no introduction
952 of this file content could be found before this floor
952 of this file content could be found before this floor
953 revision, the function will returns "None" and stops its
953 revision, the function will returns "None" and stops its
954 iteration.
954 iteration.
955 """
955 """
956 repo = self._repo
956 repo = self._repo
957 cl = repo.unfiltered().changelog
957 cl = repo.unfiltered().changelog
958 mfl = repo.manifestlog
958 mfl = repo.manifestlog
959 # fetch the linkrev
959 # fetch the linkrev
960 lkr = self.linkrev()
960 lkr = self.linkrev()
961 if srcrev == lkr:
961 if srcrev == lkr:
962 return lkr
962 return lkr
963 # hack to reuse ancestor computation when searching for renames
963 # hack to reuse ancestor computation when searching for renames
964 memberanc = getattr(self, '_ancestrycontext', None)
964 memberanc = getattr(self, '_ancestrycontext', None)
965 iteranc = None
965 iteranc = None
966 if srcrev is None:
966 if srcrev is None:
967 # wctx case, used by workingfilectx during mergecopy
967 # wctx case, used by workingfilectx during mergecopy
968 revs = [p.rev() for p in self._repo[None].parents()]
968 revs = [p.rev() for p in self._repo[None].parents()]
969 inclusive = True # we skipped the real (revless) source
969 inclusive = True # we skipped the real (revless) source
970 else:
970 else:
971 revs = [srcrev]
971 revs = [srcrev]
972 if memberanc is None:
972 if memberanc is None:
973 memberanc = iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
973 memberanc = iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
974 # check if this linkrev is an ancestor of srcrev
974 # check if this linkrev is an ancestor of srcrev
975 if lkr not in memberanc:
975 if lkr not in memberanc:
976 if iteranc is None:
976 if iteranc is None:
977 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
977 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
978 fnode = self._filenode
978 fnode = self._filenode
979 path = self._path
979 path = self._path
980 for a in iteranc:
980 for a in iteranc:
981 if stoprev is not None and a < stoprev:
981 if stoprev is not None and a < stoprev:
982 return None
982 return None
983 ac = cl.read(a) # get changeset data (we avoid object creation)
983 ac = cl.read(a) # get changeset data (we avoid object creation)
984 if path in ac[3]: # checking the 'files' field.
984 if path in ac[3]: # checking the 'files' field.
985 # The file has been touched, check if the content is
985 # The file has been touched, check if the content is
986 # similar to the one we search for.
986 # similar to the one we search for.
987 if fnode == mfl[ac[0]].readfast().get(path):
987 if fnode == mfl[ac[0]].readfast().get(path):
988 return a
988 return a
989 # In theory, we should never get out of that loop without a result.
989 # In theory, we should never get out of that loop without a result.
990 # But if manifest uses a buggy file revision (not children of the
990 # But if manifest uses a buggy file revision (not children of the
991 # one it replaces) we could. Such a buggy situation will likely
991 # one it replaces) we could. Such a buggy situation will likely
992 # result is crash somewhere else at to some point.
992 # result is crash somewhere else at to some point.
993 return lkr
993 return lkr
994
994
995 def isintroducedafter(self, changelogrev):
995 def isintroducedafter(self, changelogrev):
996 """True if a filectx has been introduced after a given floor revision
996 """True if a filectx has been introduced after a given floor revision
997 """
997 """
998 if self.linkrev() >= changelogrev:
998 if self.linkrev() >= changelogrev:
999 return True
999 return True
1000 introrev = self._introrev(stoprev=changelogrev)
1000 introrev = self._introrev(stoprev=changelogrev)
1001 if introrev is None:
1001 if introrev is None:
1002 return False
1002 return False
1003 return introrev >= changelogrev
1003 return introrev >= changelogrev
1004
1004
1005 def introrev(self):
1005 def introrev(self):
1006 """return the rev of the changeset which introduced this file revision
1006 """return the rev of the changeset which introduced this file revision
1007
1007
1008 This method is different from linkrev because it take into account the
1008 This method is different from linkrev because it take into account the
1009 changeset the filectx was created from. It ensures the returned
1009 changeset the filectx was created from. It ensures the returned
1010 revision is one of its ancestors. This prevents bugs from
1010 revision is one of its ancestors. This prevents bugs from
1011 'linkrev-shadowing' when a file revision is used by multiple
1011 'linkrev-shadowing' when a file revision is used by multiple
1012 changesets.
1012 changesets.
1013 """
1013 """
1014 return self._introrev()
1014 return self._introrev()
1015
1015
1016 def _introrev(self, stoprev=None):
1016 def _introrev(self, stoprev=None):
1017 """
1017 """
1018 Same as `introrev` but, with an extra argument to limit changelog
1018 Same as `introrev` but, with an extra argument to limit changelog
1019 iteration range in some internal usecase.
1019 iteration range in some internal usecase.
1020
1020
1021 If `stoprev` is set, the `introrev` will not be searched past that
1021 If `stoprev` is set, the `introrev` will not be searched past that
1022 `stoprev` revision and "None" might be returned. This is useful to
1022 `stoprev` revision and "None" might be returned. This is useful to
1023 limit the iteration range.
1023 limit the iteration range.
1024 """
1024 """
1025 toprev = None
1025 toprev = None
1026 attrs = vars(self)
1026 attrs = vars(self)
1027 if r'_changeid' in attrs:
1027 if r'_changeid' in attrs:
1028 # We have a cached value already
1028 # We have a cached value already
1029 toprev = self._changeid
1029 toprev = self._changeid
1030 elif r'_changectx' in attrs:
1030 elif r'_changectx' in attrs:
1031 # We know which changelog entry we are coming from
1031 # We know which changelog entry we are coming from
1032 toprev = self._changectx.rev()
1032 toprev = self._changectx.rev()
1033
1033
1034 if toprev is not None:
1034 if toprev is not None:
1035 return self._adjustlinkrev(toprev, inclusive=True, stoprev=stoprev)
1035 return self._adjustlinkrev(toprev, inclusive=True, stoprev=stoprev)
1036 elif r'_descendantrev' in attrs:
1036 elif r'_descendantrev' in attrs:
1037 introrev = self._adjustlinkrev(self._descendantrev, stoprev=stoprev)
1037 introrev = self._adjustlinkrev(self._descendantrev, stoprev=stoprev)
1038 # be nice and cache the result of the computation
1038 # be nice and cache the result of the computation
1039 if introrev is not None:
1039 if introrev is not None:
1040 self._changeid = introrev
1040 self._changeid = introrev
1041 return introrev
1041 return introrev
1042 else:
1042 else:
1043 return self.linkrev()
1043 return self.linkrev()
1044
1044
1045 def introfilectx(self):
1045 def introfilectx(self):
1046 """Return filectx having identical contents, but pointing to the
1046 """Return filectx having identical contents, but pointing to the
1047 changeset revision where this filectx was introduced"""
1047 changeset revision where this filectx was introduced"""
1048 introrev = self.introrev()
1048 introrev = self.introrev()
1049 if self.rev() == introrev:
1049 if self.rev() == introrev:
1050 return self
1050 return self
1051 return self.filectx(self.filenode(), changeid=introrev)
1051 return self.filectx(self.filenode(), changeid=introrev)
1052
1052
1053 def _parentfilectx(self, path, fileid, filelog):
1053 def _parentfilectx(self, path, fileid, filelog):
1054 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
1054 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
1055 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
1055 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
1056 if r'_changeid' in vars(self) or r'_changectx' in vars(self):
1056 if r'_changeid' in vars(self) or r'_changectx' in vars(self):
1057 # If self is associated with a changeset (probably explicitly
1057 # If self is associated with a changeset (probably explicitly
1058 # fed), ensure the created filectx is associated with a
1058 # fed), ensure the created filectx is associated with a
1059 # changeset that is an ancestor of self.changectx.
1059 # changeset that is an ancestor of self.changectx.
1060 # This lets us later use _adjustlinkrev to get a correct link.
1060 # This lets us later use _adjustlinkrev to get a correct link.
1061 fctx._descendantrev = self.rev()
1061 fctx._descendantrev = self.rev()
1062 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
1062 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
1063 elif r'_descendantrev' in vars(self):
1063 elif r'_descendantrev' in vars(self):
1064 # Otherwise propagate _descendantrev if we have one associated.
1064 # Otherwise propagate _descendantrev if we have one associated.
1065 fctx._descendantrev = self._descendantrev
1065 fctx._descendantrev = self._descendantrev
1066 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
1066 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
1067 return fctx
1067 return fctx
1068
1068
1069 def parents(self):
1069 def parents(self):
1070 _path = self._path
1070 _path = self._path
1071 fl = self._filelog
1071 fl = self._filelog
1072 parents = self._filelog.parents(self._filenode)
1072 parents = self._filelog.parents(self._filenode)
1073 pl = [(_path, node, fl) for node in parents if node != nullid]
1073 pl = [(_path, node, fl) for node in parents if node != nullid]
1074
1074
1075 r = fl.renamed(self._filenode)
1075 r = fl.renamed(self._filenode)
1076 if r:
1076 if r:
1077 # - In the simple rename case, both parent are nullid, pl is empty.
1077 # - In the simple rename case, both parent are nullid, pl is empty.
1078 # - In case of merge, only one of the parent is null id and should
1078 # - In case of merge, only one of the parent is null id and should
1079 # be replaced with the rename information. This parent is -always-
1079 # be replaced with the rename information. This parent is -always-
1080 # the first one.
1080 # the first one.
1081 #
1081 #
1082 # As null id have always been filtered out in the previous list
1082 # As null id have always been filtered out in the previous list
1083 # comprehension, inserting to 0 will always result in "replacing
1083 # comprehension, inserting to 0 will always result in "replacing
1084 # first nullid parent with rename information.
1084 # first nullid parent with rename information.
1085 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
1085 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
1086
1086
1087 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
1087 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
1088
1088
1089 def p1(self):
1089 def p1(self):
1090 return self.parents()[0]
1090 return self.parents()[0]
1091
1091
1092 def p2(self):
1092 def p2(self):
1093 p = self.parents()
1093 p = self.parents()
1094 if len(p) == 2:
1094 if len(p) == 2:
1095 return p[1]
1095 return p[1]
1096 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
1096 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
1097
1097
1098 def annotate(self, follow=False, skiprevs=None, diffopts=None):
1098 def annotate(self, follow=False, skiprevs=None, diffopts=None):
1099 """Returns a list of annotateline objects for each line in the file
1099 """Returns a list of annotateline objects for each line in the file
1100
1100
1101 - line.fctx is the filectx of the node where that line was last changed
1101 - line.fctx is the filectx of the node where that line was last changed
1102 - line.lineno is the line number at the first appearance in the managed
1102 - line.lineno is the line number at the first appearance in the managed
1103 file
1103 file
1104 - line.text is the data on that line (including newline character)
1104 - line.text is the data on that line (including newline character)
1105 """
1105 """
1106 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
1106 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
1107
1107
1108 def parents(f):
1108 def parents(f):
1109 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
1109 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
1110 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
1110 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
1111 # from the topmost introrev (= srcrev) down to p.linkrev() if it
1111 # from the topmost introrev (= srcrev) down to p.linkrev() if it
1112 # isn't an ancestor of the srcrev.
1112 # isn't an ancestor of the srcrev.
1113 f._changeid
1113 f._changeid
1114 pl = f.parents()
1114 pl = f.parents()
1115
1115
1116 # Don't return renamed parents if we aren't following.
1116 # Don't return renamed parents if we aren't following.
1117 if not follow:
1117 if not follow:
1118 pl = [p for p in pl if p.path() == f.path()]
1118 pl = [p for p in pl if p.path() == f.path()]
1119
1119
1120 # renamed filectx won't have a filelog yet, so set it
1120 # renamed filectx won't have a filelog yet, so set it
1121 # from the cache to save time
1121 # from the cache to save time
1122 for p in pl:
1122 for p in pl:
1123 if not r'_filelog' in p.__dict__:
1123 if not r'_filelog' in p.__dict__:
1124 p._filelog = getlog(p.path())
1124 p._filelog = getlog(p.path())
1125
1125
1126 return pl
1126 return pl
1127
1127
1128 # use linkrev to find the first changeset where self appeared
1128 # use linkrev to find the first changeset where self appeared
1129 base = self.introfilectx()
1129 base = self.introfilectx()
1130 if getattr(base, '_ancestrycontext', None) is None:
1130 if getattr(base, '_ancestrycontext', None) is None:
1131 cl = self._repo.changelog
1131 cl = self._repo.changelog
1132 if base.rev() is None:
1132 if base.rev() is None:
1133 # wctx is not inclusive, but works because _ancestrycontext
1133 # wctx is not inclusive, but works because _ancestrycontext
1134 # is used to test filelog revisions
1134 # is used to test filelog revisions
1135 ac = cl.ancestors(
1135 ac = cl.ancestors(
1136 [p.rev() for p in base.parents()], inclusive=True
1136 [p.rev() for p in base.parents()], inclusive=True
1137 )
1137 )
1138 else:
1138 else:
1139 ac = cl.ancestors([base.rev()], inclusive=True)
1139 ac = cl.ancestors([base.rev()], inclusive=True)
1140 base._ancestrycontext = ac
1140 base._ancestrycontext = ac
1141
1141
1142 return dagop.annotate(
1142 return dagop.annotate(
1143 base, parents, skiprevs=skiprevs, diffopts=diffopts
1143 base, parents, skiprevs=skiprevs, diffopts=diffopts
1144 )
1144 )
1145
1145
1146 def ancestors(self, followfirst=False):
1146 def ancestors(self, followfirst=False):
1147 visit = {}
1147 visit = {}
1148 c = self
1148 c = self
1149 if followfirst:
1149 if followfirst:
1150 cut = 1
1150 cut = 1
1151 else:
1151 else:
1152 cut = None
1152 cut = None
1153
1153
1154 while True:
1154 while True:
1155 for parent in c.parents()[:cut]:
1155 for parent in c.parents()[:cut]:
1156 visit[(parent.linkrev(), parent.filenode())] = parent
1156 visit[(parent.linkrev(), parent.filenode())] = parent
1157 if not visit:
1157 if not visit:
1158 break
1158 break
1159 c = visit.pop(max(visit))
1159 c = visit.pop(max(visit))
1160 yield c
1160 yield c
1161
1161
1162 def decodeddata(self):
1162 def decodeddata(self):
1163 """Returns `data()` after running repository decoding filters.
1163 """Returns `data()` after running repository decoding filters.
1164
1164
1165 This is often equivalent to how the data would be expressed on disk.
1165 This is often equivalent to how the data would be expressed on disk.
1166 """
1166 """
1167 return self._repo.wwritedata(self.path(), self.data())
1167 return self._repo.wwritedata(self.path(), self.data())
1168
1168
1169
1169
1170 class filectx(basefilectx):
1170 class filectx(basefilectx):
1171 """A filecontext object makes access to data related to a particular
1171 """A filecontext object makes access to data related to a particular
1172 filerevision convenient."""
1172 filerevision convenient."""
1173
1173
1174 def __init__(
1174 def __init__(
1175 self,
1175 self,
1176 repo,
1176 repo,
1177 path,
1177 path,
1178 changeid=None,
1178 changeid=None,
1179 fileid=None,
1179 fileid=None,
1180 filelog=None,
1180 filelog=None,
1181 changectx=None,
1181 changectx=None,
1182 ):
1182 ):
1183 """changeid must be a revision number, if specified.
1183 """changeid must be a revision number, if specified.
1184 fileid can be a file revision or node."""
1184 fileid can be a file revision or node."""
1185 self._repo = repo
1185 self._repo = repo
1186 self._path = path
1186 self._path = path
1187
1187
1188 assert (
1188 assert (
1189 changeid is not None or fileid is not None or changectx is not None
1189 changeid is not None or fileid is not None or changectx is not None
1190 ), b"bad args: changeid=%r, fileid=%r, changectx=%r" % (
1190 ), (
1191 changeid,
1191 b"bad args: changeid=%r, fileid=%r, changectx=%r"
1192 fileid,
1192 % (changeid, fileid, changectx,)
1193 changectx,
1194 )
1193 )
1195
1194
1196 if filelog is not None:
1195 if filelog is not None:
1197 self._filelog = filelog
1196 self._filelog = filelog
1198
1197
1199 if changeid is not None:
1198 if changeid is not None:
1200 self._changeid = changeid
1199 self._changeid = changeid
1201 if changectx is not None:
1200 if changectx is not None:
1202 self._changectx = changectx
1201 self._changectx = changectx
1203 if fileid is not None:
1202 if fileid is not None:
1204 self._fileid = fileid
1203 self._fileid = fileid
1205
1204
1206 @propertycache
1205 @propertycache
1207 def _changectx(self):
1206 def _changectx(self):
1208 try:
1207 try:
1209 return self._repo[self._changeid]
1208 return self._repo[self._changeid]
1210 except error.FilteredRepoLookupError:
1209 except error.FilteredRepoLookupError:
1211 # Linkrev may point to any revision in the repository. When the
1210 # Linkrev may point to any revision in the repository. When the
1212 # repository is filtered this may lead to `filectx` trying to build
1211 # repository is filtered this may lead to `filectx` trying to build
1213 # `changectx` for filtered revision. In such case we fallback to
1212 # `changectx` for filtered revision. In such case we fallback to
1214 # creating `changectx` on the unfiltered version of the reposition.
1213 # creating `changectx` on the unfiltered version of the reposition.
1215 # This fallback should not be an issue because `changectx` from
1214 # This fallback should not be an issue because `changectx` from
1216 # `filectx` are not used in complex operations that care about
1215 # `filectx` are not used in complex operations that care about
1217 # filtering.
1216 # filtering.
1218 #
1217 #
1219 # This fallback is a cheap and dirty fix that prevent several
1218 # This fallback is a cheap and dirty fix that prevent several
1220 # crashes. It does not ensure the behavior is correct. However the
1219 # crashes. It does not ensure the behavior is correct. However the
1221 # behavior was not correct before filtering either and "incorrect
1220 # behavior was not correct before filtering either and "incorrect
1222 # behavior" is seen as better as "crash"
1221 # behavior" is seen as better as "crash"
1223 #
1222 #
1224 # Linkrevs have several serious troubles with filtering that are
1223 # Linkrevs have several serious troubles with filtering that are
1225 # complicated to solve. Proper handling of the issue here should be
1224 # complicated to solve. Proper handling of the issue here should be
1226 # considered when solving linkrev issue are on the table.
1225 # considered when solving linkrev issue are on the table.
1227 return self._repo.unfiltered()[self._changeid]
1226 return self._repo.unfiltered()[self._changeid]
1228
1227
1229 def filectx(self, fileid, changeid=None):
1228 def filectx(self, fileid, changeid=None):
1230 '''opens an arbitrary revision of the file without
1229 '''opens an arbitrary revision of the file without
1231 opening a new filelog'''
1230 opening a new filelog'''
1232 return filectx(
1231 return filectx(
1233 self._repo,
1232 self._repo,
1234 self._path,
1233 self._path,
1235 fileid=fileid,
1234 fileid=fileid,
1236 filelog=self._filelog,
1235 filelog=self._filelog,
1237 changeid=changeid,
1236 changeid=changeid,
1238 )
1237 )
1239
1238
1240 def rawdata(self):
1239 def rawdata(self):
1241 return self._filelog.rawdata(self._filenode)
1240 return self._filelog.rawdata(self._filenode)
1242
1241
1243 def rawflags(self):
1242 def rawflags(self):
1244 """low-level revlog flags"""
1243 """low-level revlog flags"""
1245 return self._filelog.flags(self._filerev)
1244 return self._filelog.flags(self._filerev)
1246
1245
1247 def data(self):
1246 def data(self):
1248 try:
1247 try:
1249 return self._filelog.read(self._filenode)
1248 return self._filelog.read(self._filenode)
1250 except error.CensoredNodeError:
1249 except error.CensoredNodeError:
1251 if self._repo.ui.config(b"censor", b"policy") == b"ignore":
1250 if self._repo.ui.config(b"censor", b"policy") == b"ignore":
1252 return b""
1251 return b""
1253 raise error.Abort(
1252 raise error.Abort(
1254 _(b"censored node: %s") % short(self._filenode),
1253 _(b"censored node: %s") % short(self._filenode),
1255 hint=_(b"set censor.policy to ignore errors"),
1254 hint=_(b"set censor.policy to ignore errors"),
1256 )
1255 )
1257
1256
1258 def size(self):
1257 def size(self):
1259 return self._filelog.size(self._filerev)
1258 return self._filelog.size(self._filerev)
1260
1259
1261 @propertycache
1260 @propertycache
1262 def _copied(self):
1261 def _copied(self):
1263 """check if file was actually renamed in this changeset revision
1262 """check if file was actually renamed in this changeset revision
1264
1263
1265 If rename logged in file revision, we report copy for changeset only
1264 If rename logged in file revision, we report copy for changeset only
1266 if file revisions linkrev points back to the changeset in question
1265 if file revisions linkrev points back to the changeset in question
1267 or both changeset parents contain different file revisions.
1266 or both changeset parents contain different file revisions.
1268 """
1267 """
1269
1268
1270 renamed = self._filelog.renamed(self._filenode)
1269 renamed = self._filelog.renamed(self._filenode)
1271 if not renamed:
1270 if not renamed:
1272 return None
1271 return None
1273
1272
1274 if self.rev() == self.linkrev():
1273 if self.rev() == self.linkrev():
1275 return renamed
1274 return renamed
1276
1275
1277 name = self.path()
1276 name = self.path()
1278 fnode = self._filenode
1277 fnode = self._filenode
1279 for p in self._changectx.parents():
1278 for p in self._changectx.parents():
1280 try:
1279 try:
1281 if fnode == p.filenode(name):
1280 if fnode == p.filenode(name):
1282 return None
1281 return None
1283 except error.LookupError:
1282 except error.LookupError:
1284 pass
1283 pass
1285 return renamed
1284 return renamed
1286
1285
1287 def children(self):
1286 def children(self):
1288 # hard for renames
1287 # hard for renames
1289 c = self._filelog.children(self._filenode)
1288 c = self._filelog.children(self._filenode)
1290 return [
1289 return [
1291 filectx(self._repo, self._path, fileid=x, filelog=self._filelog)
1290 filectx(self._repo, self._path, fileid=x, filelog=self._filelog)
1292 for x in c
1291 for x in c
1293 ]
1292 ]
1294
1293
1295
1294
1296 class committablectx(basectx):
1295 class committablectx(basectx):
1297 """A committablectx object provides common functionality for a context that
1296 """A committablectx object provides common functionality for a context that
1298 wants the ability to commit, e.g. workingctx or memctx."""
1297 wants the ability to commit, e.g. workingctx or memctx."""
1299
1298
1300 def __init__(
1299 def __init__(
1301 self,
1300 self,
1302 repo,
1301 repo,
1303 text=b"",
1302 text=b"",
1304 user=None,
1303 user=None,
1305 date=None,
1304 date=None,
1306 extra=None,
1305 extra=None,
1307 changes=None,
1306 changes=None,
1308 branch=None,
1307 branch=None,
1309 ):
1308 ):
1310 super(committablectx, self).__init__(repo)
1309 super(committablectx, self).__init__(repo)
1311 self._rev = None
1310 self._rev = None
1312 self._node = None
1311 self._node = None
1313 self._text = text
1312 self._text = text
1314 if date:
1313 if date:
1315 self._date = dateutil.parsedate(date)
1314 self._date = dateutil.parsedate(date)
1316 if user:
1315 if user:
1317 self._user = user
1316 self._user = user
1318 if changes:
1317 if changes:
1319 self._status = changes
1318 self._status = changes
1320
1319
1321 self._extra = {}
1320 self._extra = {}
1322 if extra:
1321 if extra:
1323 self._extra = extra.copy()
1322 self._extra = extra.copy()
1324 if branch is not None:
1323 if branch is not None:
1325 self._extra[b'branch'] = encoding.fromlocal(branch)
1324 self._extra[b'branch'] = encoding.fromlocal(branch)
1326 if not self._extra.get(b'branch'):
1325 if not self._extra.get(b'branch'):
1327 self._extra[b'branch'] = b'default'
1326 self._extra[b'branch'] = b'default'
1328
1327
1329 def __bytes__(self):
1328 def __bytes__(self):
1330 return bytes(self._parents[0]) + b"+"
1329 return bytes(self._parents[0]) + b"+"
1331
1330
1332 __str__ = encoding.strmethod(__bytes__)
1331 __str__ = encoding.strmethod(__bytes__)
1333
1332
1334 def __nonzero__(self):
1333 def __nonzero__(self):
1335 return True
1334 return True
1336
1335
1337 __bool__ = __nonzero__
1336 __bool__ = __nonzero__
1338
1337
1339 @propertycache
1338 @propertycache
1340 def _status(self):
1339 def _status(self):
1341 return self._repo.status()
1340 return self._repo.status()
1342
1341
1343 @propertycache
1342 @propertycache
1344 def _user(self):
1343 def _user(self):
1345 return self._repo.ui.username()
1344 return self._repo.ui.username()
1346
1345
1347 @propertycache
1346 @propertycache
1348 def _date(self):
1347 def _date(self):
1349 ui = self._repo.ui
1348 ui = self._repo.ui
1350 date = ui.configdate(b'devel', b'default-date')
1349 date = ui.configdate(b'devel', b'default-date')
1351 if date is None:
1350 if date is None:
1352 date = dateutil.makedate()
1351 date = dateutil.makedate()
1353 return date
1352 return date
1354
1353
1355 def subrev(self, subpath):
1354 def subrev(self, subpath):
1356 return None
1355 return None
1357
1356
1358 def manifestnode(self):
1357 def manifestnode(self):
1359 return None
1358 return None
1360
1359
1361 def user(self):
1360 def user(self):
1362 return self._user or self._repo.ui.username()
1361 return self._user or self._repo.ui.username()
1363
1362
1364 def date(self):
1363 def date(self):
1365 return self._date
1364 return self._date
1366
1365
1367 def description(self):
1366 def description(self):
1368 return self._text
1367 return self._text
1369
1368
1370 def files(self):
1369 def files(self):
1371 return sorted(
1370 return sorted(
1372 self._status.modified + self._status.added + self._status.removed
1371 self._status.modified + self._status.added + self._status.removed
1373 )
1372 )
1374
1373
1375 def modified(self):
1374 def modified(self):
1376 return self._status.modified
1375 return self._status.modified
1377
1376
1378 def added(self):
1377 def added(self):
1379 return self._status.added
1378 return self._status.added
1380
1379
1381 def removed(self):
1380 def removed(self):
1382 return self._status.removed
1381 return self._status.removed
1383
1382
1384 def deleted(self):
1383 def deleted(self):
1385 return self._status.deleted
1384 return self._status.deleted
1386
1385
1387 filesmodified = modified
1386 filesmodified = modified
1388 filesadded = added
1387 filesadded = added
1389 filesremoved = removed
1388 filesremoved = removed
1390
1389
1391 def branch(self):
1390 def branch(self):
1392 return encoding.tolocal(self._extra[b'branch'])
1391 return encoding.tolocal(self._extra[b'branch'])
1393
1392
1394 def closesbranch(self):
1393 def closesbranch(self):
1395 return b'close' in self._extra
1394 return b'close' in self._extra
1396
1395
1397 def extra(self):
1396 def extra(self):
1398 return self._extra
1397 return self._extra
1399
1398
1400 def isinmemory(self):
1399 def isinmemory(self):
1401 return False
1400 return False
1402
1401
1403 def tags(self):
1402 def tags(self):
1404 return []
1403 return []
1405
1404
1406 def bookmarks(self):
1405 def bookmarks(self):
1407 b = []
1406 b = []
1408 for p in self.parents():
1407 for p in self.parents():
1409 b.extend(p.bookmarks())
1408 b.extend(p.bookmarks())
1410 return b
1409 return b
1411
1410
1412 def phase(self):
1411 def phase(self):
1413 phase = phases.draft # default phase to draft
1412 phase = phases.draft # default phase to draft
1414 for p in self.parents():
1413 for p in self.parents():
1415 phase = max(phase, p.phase())
1414 phase = max(phase, p.phase())
1416 return phase
1415 return phase
1417
1416
1418 def hidden(self):
1417 def hidden(self):
1419 return False
1418 return False
1420
1419
1421 def children(self):
1420 def children(self):
1422 return []
1421 return []
1423
1422
1424 def ancestor(self, c2):
1423 def ancestor(self, c2):
1425 """return the "best" ancestor context of self and c2"""
1424 """return the "best" ancestor context of self and c2"""
1426 return self._parents[0].ancestor(c2) # punt on two parents for now
1425 return self._parents[0].ancestor(c2) # punt on two parents for now
1427
1426
1428 def ancestors(self):
1427 def ancestors(self):
1429 for p in self._parents:
1428 for p in self._parents:
1430 yield p
1429 yield p
1431 for a in self._repo.changelog.ancestors(
1430 for a in self._repo.changelog.ancestors(
1432 [p.rev() for p in self._parents]
1431 [p.rev() for p in self._parents]
1433 ):
1432 ):
1434 yield self._repo[a]
1433 yield self._repo[a]
1435
1434
1436 def markcommitted(self, node):
1435 def markcommitted(self, node):
1437 """Perform post-commit cleanup necessary after committing this ctx
1436 """Perform post-commit cleanup necessary after committing this ctx
1438
1437
1439 Specifically, this updates backing stores this working context
1438 Specifically, this updates backing stores this working context
1440 wraps to reflect the fact that the changes reflected by this
1439 wraps to reflect the fact that the changes reflected by this
1441 workingctx have been committed. For example, it marks
1440 workingctx have been committed. For example, it marks
1442 modified and added files as normal in the dirstate.
1441 modified and added files as normal in the dirstate.
1443
1442
1444 """
1443 """
1445
1444
1446 def dirty(self, missing=False, merge=True, branch=True):
1445 def dirty(self, missing=False, merge=True, branch=True):
1447 return False
1446 return False
1448
1447
1449
1448
1450 class workingctx(committablectx):
1449 class workingctx(committablectx):
1451 """A workingctx object makes access to data related to
1450 """A workingctx object makes access to data related to
1452 the current working directory convenient.
1451 the current working directory convenient.
1453 date - any valid date string or (unixtime, offset), or None.
1452 date - any valid date string or (unixtime, offset), or None.
1454 user - username string, or None.
1453 user - username string, or None.
1455 extra - a dictionary of extra values, or None.
1454 extra - a dictionary of extra values, or None.
1456 changes - a list of file lists as returned by localrepo.status()
1455 changes - a list of file lists as returned by localrepo.status()
1457 or None to use the repository status.
1456 or None to use the repository status.
1458 """
1457 """
1459
1458
1460 def __init__(
1459 def __init__(
1461 self, repo, text=b"", user=None, date=None, extra=None, changes=None
1460 self, repo, text=b"", user=None, date=None, extra=None, changes=None
1462 ):
1461 ):
1463 branch = None
1462 branch = None
1464 if not extra or b'branch' not in extra:
1463 if not extra or b'branch' not in extra:
1465 try:
1464 try:
1466 branch = repo.dirstate.branch()
1465 branch = repo.dirstate.branch()
1467 except UnicodeDecodeError:
1466 except UnicodeDecodeError:
1468 raise error.Abort(_(b'branch name not in UTF-8!'))
1467 raise error.Abort(_(b'branch name not in UTF-8!'))
1469 super(workingctx, self).__init__(
1468 super(workingctx, self).__init__(
1470 repo, text, user, date, extra, changes, branch=branch
1469 repo, text, user, date, extra, changes, branch=branch
1471 )
1470 )
1472
1471
1473 def __iter__(self):
1472 def __iter__(self):
1474 d = self._repo.dirstate
1473 d = self._repo.dirstate
1475 for f in d:
1474 for f in d:
1476 if d[f] != b'r':
1475 if d[f] != b'r':
1477 yield f
1476 yield f
1478
1477
1479 def __contains__(self, key):
1478 def __contains__(self, key):
1480 return self._repo.dirstate[key] not in b"?r"
1479 return self._repo.dirstate[key] not in b"?r"
1481
1480
1482 def hex(self):
1481 def hex(self):
1483 return wdirhex
1482 return wdirhex
1484
1483
1485 @propertycache
1484 @propertycache
1486 def _parents(self):
1485 def _parents(self):
1487 p = self._repo.dirstate.parents()
1486 p = self._repo.dirstate.parents()
1488 if p[1] == nullid:
1487 if p[1] == nullid:
1489 p = p[:-1]
1488 p = p[:-1]
1490 # use unfiltered repo to delay/avoid loading obsmarkers
1489 # use unfiltered repo to delay/avoid loading obsmarkers
1491 unfi = self._repo.unfiltered()
1490 unfi = self._repo.unfiltered()
1492 return [changectx(self._repo, unfi.changelog.rev(n), n) for n in p]
1491 return [changectx(self._repo, unfi.changelog.rev(n), n) for n in p]
1493
1492
1494 def _fileinfo(self, path):
1493 def _fileinfo(self, path):
1495 # populate __dict__['_manifest'] as workingctx has no _manifestdelta
1494 # populate __dict__['_manifest'] as workingctx has no _manifestdelta
1496 self._manifest
1495 self._manifest
1497 return super(workingctx, self)._fileinfo(path)
1496 return super(workingctx, self)._fileinfo(path)
1498
1497
1499 def _buildflagfunc(self):
1498 def _buildflagfunc(self):
1500 # Create a fallback function for getting file flags when the
1499 # Create a fallback function for getting file flags when the
1501 # filesystem doesn't support them
1500 # filesystem doesn't support them
1502
1501
1503 copiesget = self._repo.dirstate.copies().get
1502 copiesget = self._repo.dirstate.copies().get
1504 parents = self.parents()
1503 parents = self.parents()
1505 if len(parents) < 2:
1504 if len(parents) < 2:
1506 # when we have one parent, it's easy: copy from parent
1505 # when we have one parent, it's easy: copy from parent
1507 man = parents[0].manifest()
1506 man = parents[0].manifest()
1508
1507
1509 def func(f):
1508 def func(f):
1510 f = copiesget(f, f)
1509 f = copiesget(f, f)
1511 return man.flags(f)
1510 return man.flags(f)
1512
1511
1513 else:
1512 else:
1514 # merges are tricky: we try to reconstruct the unstored
1513 # merges are tricky: we try to reconstruct the unstored
1515 # result from the merge (issue1802)
1514 # result from the merge (issue1802)
1516 p1, p2 = parents
1515 p1, p2 = parents
1517 pa = p1.ancestor(p2)
1516 pa = p1.ancestor(p2)
1518 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1517 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1519
1518
1520 def func(f):
1519 def func(f):
1521 f = copiesget(f, f) # may be wrong for merges with copies
1520 f = copiesget(f, f) # may be wrong for merges with copies
1522 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1521 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1523 if fl1 == fl2:
1522 if fl1 == fl2:
1524 return fl1
1523 return fl1
1525 if fl1 == fla:
1524 if fl1 == fla:
1526 return fl2
1525 return fl2
1527 if fl2 == fla:
1526 if fl2 == fla:
1528 return fl1
1527 return fl1
1529 return b'' # punt for conflicts
1528 return b'' # punt for conflicts
1530
1529
1531 return func
1530 return func
1532
1531
1533 @propertycache
1532 @propertycache
1534 def _flagfunc(self):
1533 def _flagfunc(self):
1535 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1534 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1536
1535
1537 def flags(self, path):
1536 def flags(self, path):
1538 if r'_manifest' in self.__dict__:
1537 if r'_manifest' in self.__dict__:
1539 try:
1538 try:
1540 return self._manifest.flags(path)
1539 return self._manifest.flags(path)
1541 except KeyError:
1540 except KeyError:
1542 return b''
1541 return b''
1543
1542
1544 try:
1543 try:
1545 return self._flagfunc(path)
1544 return self._flagfunc(path)
1546 except OSError:
1545 except OSError:
1547 return b''
1546 return b''
1548
1547
1549 def filectx(self, path, filelog=None):
1548 def filectx(self, path, filelog=None):
1550 """get a file context from the working directory"""
1549 """get a file context from the working directory"""
1551 return workingfilectx(
1550 return workingfilectx(
1552 self._repo, path, workingctx=self, filelog=filelog
1551 self._repo, path, workingctx=self, filelog=filelog
1553 )
1552 )
1554
1553
1555 def dirty(self, missing=False, merge=True, branch=True):
1554 def dirty(self, missing=False, merge=True, branch=True):
1556 b"check whether a working directory is modified"
1555 b"check whether a working directory is modified"
1557 # check subrepos first
1556 # check subrepos first
1558 for s in sorted(self.substate):
1557 for s in sorted(self.substate):
1559 if self.sub(s).dirty(missing=missing):
1558 if self.sub(s).dirty(missing=missing):
1560 return True
1559 return True
1561 # check current working dir
1560 # check current working dir
1562 return (
1561 return (
1563 (merge and self.p2())
1562 (merge and self.p2())
1564 or (branch and self.branch() != self.p1().branch())
1563 or (branch and self.branch() != self.p1().branch())
1565 or self.modified()
1564 or self.modified()
1566 or self.added()
1565 or self.added()
1567 or self.removed()
1566 or self.removed()
1568 or (missing and self.deleted())
1567 or (missing and self.deleted())
1569 )
1568 )
1570
1569
1571 def add(self, list, prefix=b""):
1570 def add(self, list, prefix=b""):
1572 with self._repo.wlock():
1571 with self._repo.wlock():
1573 ui, ds = self._repo.ui, self._repo.dirstate
1572 ui, ds = self._repo.ui, self._repo.dirstate
1574 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1573 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1575 rejected = []
1574 rejected = []
1576 lstat = self._repo.wvfs.lstat
1575 lstat = self._repo.wvfs.lstat
1577 for f in list:
1576 for f in list:
1578 # ds.pathto() returns an absolute file when this is invoked from
1577 # ds.pathto() returns an absolute file when this is invoked from
1579 # the keyword extension. That gets flagged as non-portable on
1578 # the keyword extension. That gets flagged as non-portable on
1580 # Windows, since it contains the drive letter and colon.
1579 # Windows, since it contains the drive letter and colon.
1581 scmutil.checkportable(ui, os.path.join(prefix, f))
1580 scmutil.checkportable(ui, os.path.join(prefix, f))
1582 try:
1581 try:
1583 st = lstat(f)
1582 st = lstat(f)
1584 except OSError:
1583 except OSError:
1585 ui.warn(_(b"%s does not exist!\n") % uipath(f))
1584 ui.warn(_(b"%s does not exist!\n") % uipath(f))
1586 rejected.append(f)
1585 rejected.append(f)
1587 continue
1586 continue
1588 limit = ui.configbytes(b'ui', b'large-file-limit')
1587 limit = ui.configbytes(b'ui', b'large-file-limit')
1589 if limit != 0 and st.st_size > limit:
1588 if limit != 0 and st.st_size > limit:
1590 ui.warn(
1589 ui.warn(
1591 _(
1590 _(
1592 b"%s: up to %d MB of RAM may be required "
1591 b"%s: up to %d MB of RAM may be required "
1593 b"to manage this file\n"
1592 b"to manage this file\n"
1594 b"(use 'hg revert %s' to cancel the "
1593 b"(use 'hg revert %s' to cancel the "
1595 b"pending addition)\n"
1594 b"pending addition)\n"
1596 )
1595 )
1597 % (f, 3 * st.st_size // 1000000, uipath(f))
1596 % (f, 3 * st.st_size // 1000000, uipath(f))
1598 )
1597 )
1599 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1598 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1600 ui.warn(
1599 ui.warn(
1601 _(
1600 _(
1602 b"%s not added: only files and symlinks "
1601 b"%s not added: only files and symlinks "
1603 b"supported currently\n"
1602 b"supported currently\n"
1604 )
1603 )
1605 % uipath(f)
1604 % uipath(f)
1606 )
1605 )
1607 rejected.append(f)
1606 rejected.append(f)
1608 elif ds[f] in b'amn':
1607 elif ds[f] in b'amn':
1609 ui.warn(_(b"%s already tracked!\n") % uipath(f))
1608 ui.warn(_(b"%s already tracked!\n") % uipath(f))
1610 elif ds[f] == b'r':
1609 elif ds[f] == b'r':
1611 ds.normallookup(f)
1610 ds.normallookup(f)
1612 else:
1611 else:
1613 ds.add(f)
1612 ds.add(f)
1614 return rejected
1613 return rejected
1615
1614
1616 def forget(self, files, prefix=b""):
1615 def forget(self, files, prefix=b""):
1617 with self._repo.wlock():
1616 with self._repo.wlock():
1618 ds = self._repo.dirstate
1617 ds = self._repo.dirstate
1619 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1618 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1620 rejected = []
1619 rejected = []
1621 for f in files:
1620 for f in files:
1622 if f not in ds:
1621 if f not in ds:
1623 self._repo.ui.warn(_(b"%s not tracked!\n") % uipath(f))
1622 self._repo.ui.warn(_(b"%s not tracked!\n") % uipath(f))
1624 rejected.append(f)
1623 rejected.append(f)
1625 elif ds[f] != b'a':
1624 elif ds[f] != b'a':
1626 ds.remove(f)
1625 ds.remove(f)
1627 else:
1626 else:
1628 ds.drop(f)
1627 ds.drop(f)
1629 return rejected
1628 return rejected
1630
1629
1631 def copy(self, source, dest):
1630 def copy(self, source, dest):
1632 try:
1631 try:
1633 st = self._repo.wvfs.lstat(dest)
1632 st = self._repo.wvfs.lstat(dest)
1634 except OSError as err:
1633 except OSError as err:
1635 if err.errno != errno.ENOENT:
1634 if err.errno != errno.ENOENT:
1636 raise
1635 raise
1637 self._repo.ui.warn(
1636 self._repo.ui.warn(
1638 _(b"%s does not exist!\n") % self._repo.dirstate.pathto(dest)
1637 _(b"%s does not exist!\n") % self._repo.dirstate.pathto(dest)
1639 )
1638 )
1640 return
1639 return
1641 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1640 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1642 self._repo.ui.warn(
1641 self._repo.ui.warn(
1643 _(b"copy failed: %s is not a file or a symbolic link\n")
1642 _(b"copy failed: %s is not a file or a symbolic link\n")
1644 % self._repo.dirstate.pathto(dest)
1643 % self._repo.dirstate.pathto(dest)
1645 )
1644 )
1646 else:
1645 else:
1647 with self._repo.wlock():
1646 with self._repo.wlock():
1648 ds = self._repo.dirstate
1647 ds = self._repo.dirstate
1649 if ds[dest] in b'?':
1648 if ds[dest] in b'?':
1650 ds.add(dest)
1649 ds.add(dest)
1651 elif ds[dest] in b'r':
1650 elif ds[dest] in b'r':
1652 ds.normallookup(dest)
1651 ds.normallookup(dest)
1653 ds.copy(source, dest)
1652 ds.copy(source, dest)
1654
1653
1655 def match(
1654 def match(
1656 self,
1655 self,
1657 pats=None,
1656 pats=None,
1658 include=None,
1657 include=None,
1659 exclude=None,
1658 exclude=None,
1660 default=b'glob',
1659 default=b'glob',
1661 listsubrepos=False,
1660 listsubrepos=False,
1662 badfn=None,
1661 badfn=None,
1663 ):
1662 ):
1664 r = self._repo
1663 r = self._repo
1665
1664
1666 # Only a case insensitive filesystem needs magic to translate user input
1665 # Only a case insensitive filesystem needs magic to translate user input
1667 # to actual case in the filesystem.
1666 # to actual case in the filesystem.
1668 icasefs = not util.fscasesensitive(r.root)
1667 icasefs = not util.fscasesensitive(r.root)
1669 return matchmod.match(
1668 return matchmod.match(
1670 r.root,
1669 r.root,
1671 r.getcwd(),
1670 r.getcwd(),
1672 pats,
1671 pats,
1673 include,
1672 include,
1674 exclude,
1673 exclude,
1675 default,
1674 default,
1676 auditor=r.auditor,
1675 auditor=r.auditor,
1677 ctx=self,
1676 ctx=self,
1678 listsubrepos=listsubrepos,
1677 listsubrepos=listsubrepos,
1679 badfn=badfn,
1678 badfn=badfn,
1680 icasefs=icasefs,
1679 icasefs=icasefs,
1681 )
1680 )
1682
1681
1683 def _filtersuspectsymlink(self, files):
1682 def _filtersuspectsymlink(self, files):
1684 if not files or self._repo.dirstate._checklink:
1683 if not files or self._repo.dirstate._checklink:
1685 return files
1684 return files
1686
1685
1687 # Symlink placeholders may get non-symlink-like contents
1686 # Symlink placeholders may get non-symlink-like contents
1688 # via user error or dereferencing by NFS or Samba servers,
1687 # via user error or dereferencing by NFS or Samba servers,
1689 # so we filter out any placeholders that don't look like a
1688 # so we filter out any placeholders that don't look like a
1690 # symlink
1689 # symlink
1691 sane = []
1690 sane = []
1692 for f in files:
1691 for f in files:
1693 if self.flags(f) == b'l':
1692 if self.flags(f) == b'l':
1694 d = self[f].data()
1693 d = self[f].data()
1695 if (
1694 if (
1696 d == b''
1695 d == b''
1697 or len(d) >= 1024
1696 or len(d) >= 1024
1698 or b'\n' in d
1697 or b'\n' in d
1699 or stringutil.binary(d)
1698 or stringutil.binary(d)
1700 ):
1699 ):
1701 self._repo.ui.debug(
1700 self._repo.ui.debug(
1702 b'ignoring suspect symlink placeholder "%s"\n' % f
1701 b'ignoring suspect symlink placeholder "%s"\n' % f
1703 )
1702 )
1704 continue
1703 continue
1705 sane.append(f)
1704 sane.append(f)
1706 return sane
1705 return sane
1707
1706
1708 def _checklookup(self, files):
1707 def _checklookup(self, files):
1709 # check for any possibly clean files
1708 # check for any possibly clean files
1710 if not files:
1709 if not files:
1711 return [], [], []
1710 return [], [], []
1712
1711
1713 modified = []
1712 modified = []
1714 deleted = []
1713 deleted = []
1715 fixup = []
1714 fixup = []
1716 pctx = self._parents[0]
1715 pctx = self._parents[0]
1717 # do a full compare of any files that might have changed
1716 # do a full compare of any files that might have changed
1718 for f in sorted(files):
1717 for f in sorted(files):
1719 try:
1718 try:
1720 # This will return True for a file that got replaced by a
1719 # This will return True for a file that got replaced by a
1721 # directory in the interim, but fixing that is pretty hard.
1720 # directory in the interim, but fixing that is pretty hard.
1722 if (
1721 if (
1723 f not in pctx
1722 f not in pctx
1724 or self.flags(f) != pctx.flags(f)
1723 or self.flags(f) != pctx.flags(f)
1725 or pctx[f].cmp(self[f])
1724 or pctx[f].cmp(self[f])
1726 ):
1725 ):
1727 modified.append(f)
1726 modified.append(f)
1728 else:
1727 else:
1729 fixup.append(f)
1728 fixup.append(f)
1730 except (IOError, OSError):
1729 except (IOError, OSError):
1731 # A file become inaccessible in between? Mark it as deleted,
1730 # A file become inaccessible in between? Mark it as deleted,
1732 # matching dirstate behavior (issue5584).
1731 # matching dirstate behavior (issue5584).
1733 # The dirstate has more complex behavior around whether a
1732 # The dirstate has more complex behavior around whether a
1734 # missing file matches a directory, etc, but we don't need to
1733 # missing file matches a directory, etc, but we don't need to
1735 # bother with that: if f has made it to this point, we're sure
1734 # bother with that: if f has made it to this point, we're sure
1736 # it's in the dirstate.
1735 # it's in the dirstate.
1737 deleted.append(f)
1736 deleted.append(f)
1738
1737
1739 return modified, deleted, fixup
1738 return modified, deleted, fixup
1740
1739
1741 def _poststatusfixup(self, status, fixup):
1740 def _poststatusfixup(self, status, fixup):
1742 """update dirstate for files that are actually clean"""
1741 """update dirstate for files that are actually clean"""
1743 poststatus = self._repo.postdsstatus()
1742 poststatus = self._repo.postdsstatus()
1744 if fixup or poststatus:
1743 if fixup or poststatus:
1745 try:
1744 try:
1746 oldid = self._repo.dirstate.identity()
1745 oldid = self._repo.dirstate.identity()
1747
1746
1748 # updating the dirstate is optional
1747 # updating the dirstate is optional
1749 # so we don't wait on the lock
1748 # so we don't wait on the lock
1750 # wlock can invalidate the dirstate, so cache normal _after_
1749 # wlock can invalidate the dirstate, so cache normal _after_
1751 # taking the lock
1750 # taking the lock
1752 with self._repo.wlock(False):
1751 with self._repo.wlock(False):
1753 if self._repo.dirstate.identity() == oldid:
1752 if self._repo.dirstate.identity() == oldid:
1754 if fixup:
1753 if fixup:
1755 normal = self._repo.dirstate.normal
1754 normal = self._repo.dirstate.normal
1756 for f in fixup:
1755 for f in fixup:
1757 normal(f)
1756 normal(f)
1758 # write changes out explicitly, because nesting
1757 # write changes out explicitly, because nesting
1759 # wlock at runtime may prevent 'wlock.release()'
1758 # wlock at runtime may prevent 'wlock.release()'
1760 # after this block from doing so for subsequent
1759 # after this block from doing so for subsequent
1761 # changing files
1760 # changing files
1762 tr = self._repo.currenttransaction()
1761 tr = self._repo.currenttransaction()
1763 self._repo.dirstate.write(tr)
1762 self._repo.dirstate.write(tr)
1764
1763
1765 if poststatus:
1764 if poststatus:
1766 for ps in poststatus:
1765 for ps in poststatus:
1767 ps(self, status)
1766 ps(self, status)
1768 else:
1767 else:
1769 # in this case, writing changes out breaks
1768 # in this case, writing changes out breaks
1770 # consistency, because .hg/dirstate was
1769 # consistency, because .hg/dirstate was
1771 # already changed simultaneously after last
1770 # already changed simultaneously after last
1772 # caching (see also issue5584 for detail)
1771 # caching (see also issue5584 for detail)
1773 self._repo.ui.debug(
1772 self._repo.ui.debug(
1774 b'skip updating dirstate: identity mismatch\n'
1773 b'skip updating dirstate: identity mismatch\n'
1775 )
1774 )
1776 except error.LockError:
1775 except error.LockError:
1777 pass
1776 pass
1778 finally:
1777 finally:
1779 # Even if the wlock couldn't be grabbed, clear out the list.
1778 # Even if the wlock couldn't be grabbed, clear out the list.
1780 self._repo.clearpostdsstatus()
1779 self._repo.clearpostdsstatus()
1781
1780
1782 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1781 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1783 '''Gets the status from the dirstate -- internal use only.'''
1782 '''Gets the status from the dirstate -- internal use only.'''
1784 subrepos = []
1783 subrepos = []
1785 if b'.hgsub' in self:
1784 if b'.hgsub' in self:
1786 subrepos = sorted(self.substate)
1785 subrepos = sorted(self.substate)
1787 cmp, s = self._repo.dirstate.status(
1786 cmp, s = self._repo.dirstate.status(
1788 match, subrepos, ignored=ignored, clean=clean, unknown=unknown
1787 match, subrepos, ignored=ignored, clean=clean, unknown=unknown
1789 )
1788 )
1790
1789
1791 # check for any possibly clean files
1790 # check for any possibly clean files
1792 fixup = []
1791 fixup = []
1793 if cmp:
1792 if cmp:
1794 modified2, deleted2, fixup = self._checklookup(cmp)
1793 modified2, deleted2, fixup = self._checklookup(cmp)
1795 s.modified.extend(modified2)
1794 s.modified.extend(modified2)
1796 s.deleted.extend(deleted2)
1795 s.deleted.extend(deleted2)
1797
1796
1798 if fixup and clean:
1797 if fixup and clean:
1799 s.clean.extend(fixup)
1798 s.clean.extend(fixup)
1800
1799
1801 self._poststatusfixup(s, fixup)
1800 self._poststatusfixup(s, fixup)
1802
1801
1803 if match.always():
1802 if match.always():
1804 # cache for performance
1803 # cache for performance
1805 if s.unknown or s.ignored or s.clean:
1804 if s.unknown or s.ignored or s.clean:
1806 # "_status" is cached with list*=False in the normal route
1805 # "_status" is cached with list*=False in the normal route
1807 self._status = scmutil.status(
1806 self._status = scmutil.status(
1808 s.modified, s.added, s.removed, s.deleted, [], [], []
1807 s.modified, s.added, s.removed, s.deleted, [], [], []
1809 )
1808 )
1810 else:
1809 else:
1811 self._status = s
1810 self._status = s
1812
1811
1813 return s
1812 return s
1814
1813
1815 @propertycache
1814 @propertycache
1816 def _copies(self):
1815 def _copies(self):
1817 p1copies = {}
1816 p1copies = {}
1818 p2copies = {}
1817 p2copies = {}
1819 parents = self._repo.dirstate.parents()
1818 parents = self._repo.dirstate.parents()
1820 p1manifest = self._repo[parents[0]].manifest()
1819 p1manifest = self._repo[parents[0]].manifest()
1821 p2manifest = self._repo[parents[1]].manifest()
1820 p2manifest = self._repo[parents[1]].manifest()
1822 changedset = set(self.added()) | set(self.modified())
1821 changedset = set(self.added()) | set(self.modified())
1823 narrowmatch = self._repo.narrowmatch()
1822 narrowmatch = self._repo.narrowmatch()
1824 for dst, src in self._repo.dirstate.copies().items():
1823 for dst, src in self._repo.dirstate.copies().items():
1825 if dst not in changedset or not narrowmatch(dst):
1824 if dst not in changedset or not narrowmatch(dst):
1826 continue
1825 continue
1827 if src in p1manifest:
1826 if src in p1manifest:
1828 p1copies[dst] = src
1827 p1copies[dst] = src
1829 elif src in p2manifest:
1828 elif src in p2manifest:
1830 p2copies[dst] = src
1829 p2copies[dst] = src
1831 return p1copies, p2copies
1830 return p1copies, p2copies
1832
1831
1833 @propertycache
1832 @propertycache
1834 def _manifest(self):
1833 def _manifest(self):
1835 """generate a manifest corresponding to the values in self._status
1834 """generate a manifest corresponding to the values in self._status
1836
1835
1837 This reuse the file nodeid from parent, but we use special node
1836 This reuse the file nodeid from parent, but we use special node
1838 identifiers for added and modified files. This is used by manifests
1837 identifiers for added and modified files. This is used by manifests
1839 merge to see that files are different and by update logic to avoid
1838 merge to see that files are different and by update logic to avoid
1840 deleting newly added files.
1839 deleting newly added files.
1841 """
1840 """
1842 return self._buildstatusmanifest(self._status)
1841 return self._buildstatusmanifest(self._status)
1843
1842
1844 def _buildstatusmanifest(self, status):
1843 def _buildstatusmanifest(self, status):
1845 """Builds a manifest that includes the given status results."""
1844 """Builds a manifest that includes the given status results."""
1846 parents = self.parents()
1845 parents = self.parents()
1847
1846
1848 man = parents[0].manifest().copy()
1847 man = parents[0].manifest().copy()
1849
1848
1850 ff = self._flagfunc
1849 ff = self._flagfunc
1851 for i, l in (
1850 for i, l in (
1852 (addednodeid, status.added),
1851 (addednodeid, status.added),
1853 (modifiednodeid, status.modified),
1852 (modifiednodeid, status.modified),
1854 ):
1853 ):
1855 for f in l:
1854 for f in l:
1856 man[f] = i
1855 man[f] = i
1857 try:
1856 try:
1858 man.setflag(f, ff(f))
1857 man.setflag(f, ff(f))
1859 except OSError:
1858 except OSError:
1860 pass
1859 pass
1861
1860
1862 for f in status.deleted + status.removed:
1861 for f in status.deleted + status.removed:
1863 if f in man:
1862 if f in man:
1864 del man[f]
1863 del man[f]
1865
1864
1866 return man
1865 return man
1867
1866
1868 def _buildstatus(
1867 def _buildstatus(
1869 self, other, s, match, listignored, listclean, listunknown
1868 self, other, s, match, listignored, listclean, listunknown
1870 ):
1869 ):
1871 """build a status with respect to another context
1870 """build a status with respect to another context
1872
1871
1873 This includes logic for maintaining the fast path of status when
1872 This includes logic for maintaining the fast path of status when
1874 comparing the working directory against its parent, which is to skip
1873 comparing the working directory against its parent, which is to skip
1875 building a new manifest if self (working directory) is not comparing
1874 building a new manifest if self (working directory) is not comparing
1876 against its parent (repo['.']).
1875 against its parent (repo['.']).
1877 """
1876 """
1878 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1877 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1879 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1878 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1880 # might have accidentally ended up with the entire contents of the file
1879 # might have accidentally ended up with the entire contents of the file
1881 # they are supposed to be linking to.
1880 # they are supposed to be linking to.
1882 s.modified[:] = self._filtersuspectsymlink(s.modified)
1881 s.modified[:] = self._filtersuspectsymlink(s.modified)
1883 if other != self._repo[b'.']:
1882 if other != self._repo[b'.']:
1884 s = super(workingctx, self)._buildstatus(
1883 s = super(workingctx, self)._buildstatus(
1885 other, s, match, listignored, listclean, listunknown
1884 other, s, match, listignored, listclean, listunknown
1886 )
1885 )
1887 return s
1886 return s
1888
1887
1889 def _matchstatus(self, other, match):
1888 def _matchstatus(self, other, match):
1890 """override the match method with a filter for directory patterns
1889 """override the match method with a filter for directory patterns
1891
1890
1892 We use inheritance to customize the match.bad method only in cases of
1891 We use inheritance to customize the match.bad method only in cases of
1893 workingctx since it belongs only to the working directory when
1892 workingctx since it belongs only to the working directory when
1894 comparing against the parent changeset.
1893 comparing against the parent changeset.
1895
1894
1896 If we aren't comparing against the working directory's parent, then we
1895 If we aren't comparing against the working directory's parent, then we
1897 just use the default match object sent to us.
1896 just use the default match object sent to us.
1898 """
1897 """
1899 if other != self._repo[b'.']:
1898 if other != self._repo[b'.']:
1900
1899
1901 def bad(f, msg):
1900 def bad(f, msg):
1902 # 'f' may be a directory pattern from 'match.files()',
1901 # 'f' may be a directory pattern from 'match.files()',
1903 # so 'f not in ctx1' is not enough
1902 # so 'f not in ctx1' is not enough
1904 if f not in other and not other.hasdir(f):
1903 if f not in other and not other.hasdir(f):
1905 self._repo.ui.warn(
1904 self._repo.ui.warn(
1906 b'%s: %s\n' % (self._repo.dirstate.pathto(f), msg)
1905 b'%s: %s\n' % (self._repo.dirstate.pathto(f), msg)
1907 )
1906 )
1908
1907
1909 match.bad = bad
1908 match.bad = bad
1910 return match
1909 return match
1911
1910
1912 def walk(self, match):
1911 def walk(self, match):
1913 '''Generates matching file names.'''
1912 '''Generates matching file names.'''
1914 return sorted(
1913 return sorted(
1915 self._repo.dirstate.walk(
1914 self._repo.dirstate.walk(
1916 self._repo.narrowmatch(match),
1915 self._repo.narrowmatch(match),
1917 subrepos=sorted(self.substate),
1916 subrepos=sorted(self.substate),
1918 unknown=True,
1917 unknown=True,
1919 ignored=False,
1918 ignored=False,
1920 )
1919 )
1921 )
1920 )
1922
1921
1923 def matches(self, match):
1922 def matches(self, match):
1924 match = self._repo.narrowmatch(match)
1923 match = self._repo.narrowmatch(match)
1925 ds = self._repo.dirstate
1924 ds = self._repo.dirstate
1926 return sorted(f for f in ds.matches(match) if ds[f] != b'r')
1925 return sorted(f for f in ds.matches(match) if ds[f] != b'r')
1927
1926
1928 def markcommitted(self, node):
1927 def markcommitted(self, node):
1929 with self._repo.dirstate.parentchange():
1928 with self._repo.dirstate.parentchange():
1930 for f in self.modified() + self.added():
1929 for f in self.modified() + self.added():
1931 self._repo.dirstate.normal(f)
1930 self._repo.dirstate.normal(f)
1932 for f in self.removed():
1931 for f in self.removed():
1933 self._repo.dirstate.drop(f)
1932 self._repo.dirstate.drop(f)
1934 self._repo.dirstate.setparents(node)
1933 self._repo.dirstate.setparents(node)
1935
1934
1936 # write changes out explicitly, because nesting wlock at
1935 # write changes out explicitly, because nesting wlock at
1937 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1936 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1938 # from immediately doing so for subsequent changing files
1937 # from immediately doing so for subsequent changing files
1939 self._repo.dirstate.write(self._repo.currenttransaction())
1938 self._repo.dirstate.write(self._repo.currenttransaction())
1940
1939
1941 sparse.aftercommit(self._repo, node)
1940 sparse.aftercommit(self._repo, node)
1942
1941
1943
1942
1944 class committablefilectx(basefilectx):
1943 class committablefilectx(basefilectx):
1945 """A committablefilectx provides common functionality for a file context
1944 """A committablefilectx provides common functionality for a file context
1946 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1945 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1947
1946
1948 def __init__(self, repo, path, filelog=None, ctx=None):
1947 def __init__(self, repo, path, filelog=None, ctx=None):
1949 self._repo = repo
1948 self._repo = repo
1950 self._path = path
1949 self._path = path
1951 self._changeid = None
1950 self._changeid = None
1952 self._filerev = self._filenode = None
1951 self._filerev = self._filenode = None
1953
1952
1954 if filelog is not None:
1953 if filelog is not None:
1955 self._filelog = filelog
1954 self._filelog = filelog
1956 if ctx:
1955 if ctx:
1957 self._changectx = ctx
1956 self._changectx = ctx
1958
1957
1959 def __nonzero__(self):
1958 def __nonzero__(self):
1960 return True
1959 return True
1961
1960
1962 __bool__ = __nonzero__
1961 __bool__ = __nonzero__
1963
1962
1964 def linkrev(self):
1963 def linkrev(self):
1965 # linked to self._changectx no matter if file is modified or not
1964 # linked to self._changectx no matter if file is modified or not
1966 return self.rev()
1965 return self.rev()
1967
1966
1968 def renamed(self):
1967 def renamed(self):
1969 path = self.copysource()
1968 path = self.copysource()
1970 if not path:
1969 if not path:
1971 return None
1970 return None
1972 return path, self._changectx._parents[0]._manifest.get(path, nullid)
1971 return path, self._changectx._parents[0]._manifest.get(path, nullid)
1973
1972
1974 def parents(self):
1973 def parents(self):
1975 '''return parent filectxs, following copies if necessary'''
1974 '''return parent filectxs, following copies if necessary'''
1976
1975
1977 def filenode(ctx, path):
1976 def filenode(ctx, path):
1978 return ctx._manifest.get(path, nullid)
1977 return ctx._manifest.get(path, nullid)
1979
1978
1980 path = self._path
1979 path = self._path
1981 fl = self._filelog
1980 fl = self._filelog
1982 pcl = self._changectx._parents
1981 pcl = self._changectx._parents
1983 renamed = self.renamed()
1982 renamed = self.renamed()
1984
1983
1985 if renamed:
1984 if renamed:
1986 pl = [renamed + (None,)]
1985 pl = [renamed + (None,)]
1987 else:
1986 else:
1988 pl = [(path, filenode(pcl[0], path), fl)]
1987 pl = [(path, filenode(pcl[0], path), fl)]
1989
1988
1990 for pc in pcl[1:]:
1989 for pc in pcl[1:]:
1991 pl.append((path, filenode(pc, path), fl))
1990 pl.append((path, filenode(pc, path), fl))
1992
1991
1993 return [
1992 return [
1994 self._parentfilectx(p, fileid=n, filelog=l)
1993 self._parentfilectx(p, fileid=n, filelog=l)
1995 for p, n, l in pl
1994 for p, n, l in pl
1996 if n != nullid
1995 if n != nullid
1997 ]
1996 ]
1998
1997
1999 def children(self):
1998 def children(self):
2000 return []
1999 return []
2001
2000
2002
2001
2003 class workingfilectx(committablefilectx):
2002 class workingfilectx(committablefilectx):
2004 """A workingfilectx object makes access to data related to a particular
2003 """A workingfilectx object makes access to data related to a particular
2005 file in the working directory convenient."""
2004 file in the working directory convenient."""
2006
2005
2007 def __init__(self, repo, path, filelog=None, workingctx=None):
2006 def __init__(self, repo, path, filelog=None, workingctx=None):
2008 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
2007 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
2009
2008
2010 @propertycache
2009 @propertycache
2011 def _changectx(self):
2010 def _changectx(self):
2012 return workingctx(self._repo)
2011 return workingctx(self._repo)
2013
2012
2014 def data(self):
2013 def data(self):
2015 return self._repo.wread(self._path)
2014 return self._repo.wread(self._path)
2016
2015
2017 def copysource(self):
2016 def copysource(self):
2018 return self._repo.dirstate.copied(self._path)
2017 return self._repo.dirstate.copied(self._path)
2019
2018
2020 def size(self):
2019 def size(self):
2021 return self._repo.wvfs.lstat(self._path).st_size
2020 return self._repo.wvfs.lstat(self._path).st_size
2022
2021
2023 def lstat(self):
2022 def lstat(self):
2024 return self._repo.wvfs.lstat(self._path)
2023 return self._repo.wvfs.lstat(self._path)
2025
2024
2026 def date(self):
2025 def date(self):
2027 t, tz = self._changectx.date()
2026 t, tz = self._changectx.date()
2028 try:
2027 try:
2029 return (self._repo.wvfs.lstat(self._path)[stat.ST_MTIME], tz)
2028 return (self._repo.wvfs.lstat(self._path)[stat.ST_MTIME], tz)
2030 except OSError as err:
2029 except OSError as err:
2031 if err.errno != errno.ENOENT:
2030 if err.errno != errno.ENOENT:
2032 raise
2031 raise
2033 return (t, tz)
2032 return (t, tz)
2034
2033
2035 def exists(self):
2034 def exists(self):
2036 return self._repo.wvfs.exists(self._path)
2035 return self._repo.wvfs.exists(self._path)
2037
2036
2038 def lexists(self):
2037 def lexists(self):
2039 return self._repo.wvfs.lexists(self._path)
2038 return self._repo.wvfs.lexists(self._path)
2040
2039
2041 def audit(self):
2040 def audit(self):
2042 return self._repo.wvfs.audit(self._path)
2041 return self._repo.wvfs.audit(self._path)
2043
2042
2044 def cmp(self, fctx):
2043 def cmp(self, fctx):
2045 """compare with other file context
2044 """compare with other file context
2046
2045
2047 returns True if different than fctx.
2046 returns True if different than fctx.
2048 """
2047 """
2049 # fctx should be a filectx (not a workingfilectx)
2048 # fctx should be a filectx (not a workingfilectx)
2050 # invert comparison to reuse the same code path
2049 # invert comparison to reuse the same code path
2051 return fctx.cmp(self)
2050 return fctx.cmp(self)
2052
2051
2053 def remove(self, ignoremissing=False):
2052 def remove(self, ignoremissing=False):
2054 """wraps unlink for a repo's working directory"""
2053 """wraps unlink for a repo's working directory"""
2055 rmdir = self._repo.ui.configbool(b'experimental', b'removeemptydirs')
2054 rmdir = self._repo.ui.configbool(b'experimental', b'removeemptydirs')
2056 self._repo.wvfs.unlinkpath(
2055 self._repo.wvfs.unlinkpath(
2057 self._path, ignoremissing=ignoremissing, rmdir=rmdir
2056 self._path, ignoremissing=ignoremissing, rmdir=rmdir
2058 )
2057 )
2059
2058
2060 def write(self, data, flags, backgroundclose=False, **kwargs):
2059 def write(self, data, flags, backgroundclose=False, **kwargs):
2061 """wraps repo.wwrite"""
2060 """wraps repo.wwrite"""
2062 return self._repo.wwrite(
2061 return self._repo.wwrite(
2063 self._path, data, flags, backgroundclose=backgroundclose, **kwargs
2062 self._path, data, flags, backgroundclose=backgroundclose, **kwargs
2064 )
2063 )
2065
2064
2066 def markcopied(self, src):
2065 def markcopied(self, src):
2067 """marks this file a copy of `src`"""
2066 """marks this file a copy of `src`"""
2068 self._repo.dirstate.copy(src, self._path)
2067 self._repo.dirstate.copy(src, self._path)
2069
2068
2070 def clearunknown(self):
2069 def clearunknown(self):
2071 """Removes conflicting items in the working directory so that
2070 """Removes conflicting items in the working directory so that
2072 ``write()`` can be called successfully.
2071 ``write()`` can be called successfully.
2073 """
2072 """
2074 wvfs = self._repo.wvfs
2073 wvfs = self._repo.wvfs
2075 f = self._path
2074 f = self._path
2076 wvfs.audit(f)
2075 wvfs.audit(f)
2077 if self._repo.ui.configbool(
2076 if self._repo.ui.configbool(
2078 b'experimental', b'merge.checkpathconflicts'
2077 b'experimental', b'merge.checkpathconflicts'
2079 ):
2078 ):
2080 # remove files under the directory as they should already be
2079 # remove files under the directory as they should already be
2081 # warned and backed up
2080 # warned and backed up
2082 if wvfs.isdir(f) and not wvfs.islink(f):
2081 if wvfs.isdir(f) and not wvfs.islink(f):
2083 wvfs.rmtree(f, forcibly=True)
2082 wvfs.rmtree(f, forcibly=True)
2084 for p in reversed(list(util.finddirs(f))):
2083 for p in reversed(list(util.finddirs(f))):
2085 if wvfs.isfileorlink(p):
2084 if wvfs.isfileorlink(p):
2086 wvfs.unlink(p)
2085 wvfs.unlink(p)
2087 break
2086 break
2088 else:
2087 else:
2089 # don't remove files if path conflicts are not processed
2088 # don't remove files if path conflicts are not processed
2090 if wvfs.isdir(f) and not wvfs.islink(f):
2089 if wvfs.isdir(f) and not wvfs.islink(f):
2091 wvfs.removedirs(f)
2090 wvfs.removedirs(f)
2092
2091
2093 def setflags(self, l, x):
2092 def setflags(self, l, x):
2094 self._repo.wvfs.setflags(self._path, l, x)
2093 self._repo.wvfs.setflags(self._path, l, x)
2095
2094
2096
2095
2097 class overlayworkingctx(committablectx):
2096 class overlayworkingctx(committablectx):
2098 """Wraps another mutable context with a write-back cache that can be
2097 """Wraps another mutable context with a write-back cache that can be
2099 converted into a commit context.
2098 converted into a commit context.
2100
2099
2101 self._cache[path] maps to a dict with keys: {
2100 self._cache[path] maps to a dict with keys: {
2102 'exists': bool?
2101 'exists': bool?
2103 'date': date?
2102 'date': date?
2104 'data': str?
2103 'data': str?
2105 'flags': str?
2104 'flags': str?
2106 'copied': str? (path or None)
2105 'copied': str? (path or None)
2107 }
2106 }
2108 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
2107 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
2109 is `False`, the file was deleted.
2108 is `False`, the file was deleted.
2110 """
2109 """
2111
2110
2112 def __init__(self, repo):
2111 def __init__(self, repo):
2113 super(overlayworkingctx, self).__init__(repo)
2112 super(overlayworkingctx, self).__init__(repo)
2114 self.clean()
2113 self.clean()
2115
2114
2116 def setbase(self, wrappedctx):
2115 def setbase(self, wrappedctx):
2117 self._wrappedctx = wrappedctx
2116 self._wrappedctx = wrappedctx
2118 self._parents = [wrappedctx]
2117 self._parents = [wrappedctx]
2119 # Drop old manifest cache as it is now out of date.
2118 # Drop old manifest cache as it is now out of date.
2120 # This is necessary when, e.g., rebasing several nodes with one
2119 # This is necessary when, e.g., rebasing several nodes with one
2121 # ``overlayworkingctx`` (e.g. with --collapse).
2120 # ``overlayworkingctx`` (e.g. with --collapse).
2122 util.clearcachedproperty(self, b'_manifest')
2121 util.clearcachedproperty(self, b'_manifest')
2123
2122
2124 def data(self, path):
2123 def data(self, path):
2125 if self.isdirty(path):
2124 if self.isdirty(path):
2126 if self._cache[path][b'exists']:
2125 if self._cache[path][b'exists']:
2127 if self._cache[path][b'data'] is not None:
2126 if self._cache[path][b'data'] is not None:
2128 return self._cache[path][b'data']
2127 return self._cache[path][b'data']
2129 else:
2128 else:
2130 # Must fallback here, too, because we only set flags.
2129 # Must fallback here, too, because we only set flags.
2131 return self._wrappedctx[path].data()
2130 return self._wrappedctx[path].data()
2132 else:
2131 else:
2133 raise error.ProgrammingError(
2132 raise error.ProgrammingError(
2134 b"No such file or directory: %s" % path
2133 b"No such file or directory: %s" % path
2135 )
2134 )
2136 else:
2135 else:
2137 return self._wrappedctx[path].data()
2136 return self._wrappedctx[path].data()
2138
2137
2139 @propertycache
2138 @propertycache
2140 def _manifest(self):
2139 def _manifest(self):
2141 parents = self.parents()
2140 parents = self.parents()
2142 man = parents[0].manifest().copy()
2141 man = parents[0].manifest().copy()
2143
2142
2144 flag = self._flagfunc
2143 flag = self._flagfunc
2145 for path in self.added():
2144 for path in self.added():
2146 man[path] = addednodeid
2145 man[path] = addednodeid
2147 man.setflag(path, flag(path))
2146 man.setflag(path, flag(path))
2148 for path in self.modified():
2147 for path in self.modified():
2149 man[path] = modifiednodeid
2148 man[path] = modifiednodeid
2150 man.setflag(path, flag(path))
2149 man.setflag(path, flag(path))
2151 for path in self.removed():
2150 for path in self.removed():
2152 del man[path]
2151 del man[path]
2153 return man
2152 return man
2154
2153
2155 @propertycache
2154 @propertycache
2156 def _flagfunc(self):
2155 def _flagfunc(self):
2157 def f(path):
2156 def f(path):
2158 return self._cache[path][b'flags']
2157 return self._cache[path][b'flags']
2159
2158
2160 return f
2159 return f
2161
2160
2162 def files(self):
2161 def files(self):
2163 return sorted(self.added() + self.modified() + self.removed())
2162 return sorted(self.added() + self.modified() + self.removed())
2164
2163
2165 def modified(self):
2164 def modified(self):
2166 return [
2165 return [
2167 f
2166 f
2168 for f in self._cache.keys()
2167 for f in self._cache.keys()
2169 if self._cache[f][b'exists'] and self._existsinparent(f)
2168 if self._cache[f][b'exists'] and self._existsinparent(f)
2170 ]
2169 ]
2171
2170
2172 def added(self):
2171 def added(self):
2173 return [
2172 return [
2174 f
2173 f
2175 for f in self._cache.keys()
2174 for f in self._cache.keys()
2176 if self._cache[f][b'exists'] and not self._existsinparent(f)
2175 if self._cache[f][b'exists'] and not self._existsinparent(f)
2177 ]
2176 ]
2178
2177
2179 def removed(self):
2178 def removed(self):
2180 return [
2179 return [
2181 f
2180 f
2182 for f in self._cache.keys()
2181 for f in self._cache.keys()
2183 if not self._cache[f][b'exists'] and self._existsinparent(f)
2182 if not self._cache[f][b'exists'] and self._existsinparent(f)
2184 ]
2183 ]
2185
2184
2186 def p1copies(self):
2185 def p1copies(self):
2187 copies = self._repo._wrappedctx.p1copies().copy()
2186 copies = self._repo._wrappedctx.p1copies().copy()
2188 narrowmatch = self._repo.narrowmatch()
2187 narrowmatch = self._repo.narrowmatch()
2189 for f in self._cache.keys():
2188 for f in self._cache.keys():
2190 if not narrowmatch(f):
2189 if not narrowmatch(f):
2191 continue
2190 continue
2192 copies.pop(f, None) # delete if it exists
2191 copies.pop(f, None) # delete if it exists
2193 source = self._cache[f][b'copied']
2192 source = self._cache[f][b'copied']
2194 if source:
2193 if source:
2195 copies[f] = source
2194 copies[f] = source
2196 return copies
2195 return copies
2197
2196
2198 def p2copies(self):
2197 def p2copies(self):
2199 copies = self._repo._wrappedctx.p2copies().copy()
2198 copies = self._repo._wrappedctx.p2copies().copy()
2200 narrowmatch = self._repo.narrowmatch()
2199 narrowmatch = self._repo.narrowmatch()
2201 for f in self._cache.keys():
2200 for f in self._cache.keys():
2202 if not narrowmatch(f):
2201 if not narrowmatch(f):
2203 continue
2202 continue
2204 copies.pop(f, None) # delete if it exists
2203 copies.pop(f, None) # delete if it exists
2205 source = self._cache[f][b'copied']
2204 source = self._cache[f][b'copied']
2206 if source:
2205 if source:
2207 copies[f] = source
2206 copies[f] = source
2208 return copies
2207 return copies
2209
2208
2210 def isinmemory(self):
2209 def isinmemory(self):
2211 return True
2210 return True
2212
2211
2213 def filedate(self, path):
2212 def filedate(self, path):
2214 if self.isdirty(path):
2213 if self.isdirty(path):
2215 return self._cache[path][b'date']
2214 return self._cache[path][b'date']
2216 else:
2215 else:
2217 return self._wrappedctx[path].date()
2216 return self._wrappedctx[path].date()
2218
2217
2219 def markcopied(self, path, origin):
2218 def markcopied(self, path, origin):
2220 self._markdirty(
2219 self._markdirty(
2221 path,
2220 path,
2222 exists=True,
2221 exists=True,
2223 date=self.filedate(path),
2222 date=self.filedate(path),
2224 flags=self.flags(path),
2223 flags=self.flags(path),
2225 copied=origin,
2224 copied=origin,
2226 )
2225 )
2227
2226
2228 def copydata(self, path):
2227 def copydata(self, path):
2229 if self.isdirty(path):
2228 if self.isdirty(path):
2230 return self._cache[path][b'copied']
2229 return self._cache[path][b'copied']
2231 else:
2230 else:
2232 return None
2231 return None
2233
2232
2234 def flags(self, path):
2233 def flags(self, path):
2235 if self.isdirty(path):
2234 if self.isdirty(path):
2236 if self._cache[path][b'exists']:
2235 if self._cache[path][b'exists']:
2237 return self._cache[path][b'flags']
2236 return self._cache[path][b'flags']
2238 else:
2237 else:
2239 raise error.ProgrammingError(
2238 raise error.ProgrammingError(
2240 b"No such file or directory: %s" % self._path
2239 b"No such file or directory: %s" % self._path
2241 )
2240 )
2242 else:
2241 else:
2243 return self._wrappedctx[path].flags()
2242 return self._wrappedctx[path].flags()
2244
2243
2245 def __contains__(self, key):
2244 def __contains__(self, key):
2246 if key in self._cache:
2245 if key in self._cache:
2247 return self._cache[key][b'exists']
2246 return self._cache[key][b'exists']
2248 return key in self.p1()
2247 return key in self.p1()
2249
2248
2250 def _existsinparent(self, path):
2249 def _existsinparent(self, path):
2251 try:
2250 try:
2252 # ``commitctx` raises a ``ManifestLookupError`` if a path does not
2251 # ``commitctx` raises a ``ManifestLookupError`` if a path does not
2253 # exist, unlike ``workingctx``, which returns a ``workingfilectx``
2252 # exist, unlike ``workingctx``, which returns a ``workingfilectx``
2254 # with an ``exists()`` function.
2253 # with an ``exists()`` function.
2255 self._wrappedctx[path]
2254 self._wrappedctx[path]
2256 return True
2255 return True
2257 except error.ManifestLookupError:
2256 except error.ManifestLookupError:
2258 return False
2257 return False
2259
2258
2260 def _auditconflicts(self, path):
2259 def _auditconflicts(self, path):
2261 """Replicates conflict checks done by wvfs.write().
2260 """Replicates conflict checks done by wvfs.write().
2262
2261
2263 Since we never write to the filesystem and never call `applyupdates` in
2262 Since we never write to the filesystem and never call `applyupdates` in
2264 IMM, we'll never check that a path is actually writable -- e.g., because
2263 IMM, we'll never check that a path is actually writable -- e.g., because
2265 it adds `a/foo`, but `a` is actually a file in the other commit.
2264 it adds `a/foo`, but `a` is actually a file in the other commit.
2266 """
2265 """
2267
2266
2268 def fail(path, component):
2267 def fail(path, component):
2269 # p1() is the base and we're receiving "writes" for p2()'s
2268 # p1() is the base and we're receiving "writes" for p2()'s
2270 # files.
2269 # files.
2271 if b'l' in self.p1()[component].flags():
2270 if b'l' in self.p1()[component].flags():
2272 raise error.Abort(
2271 raise error.Abort(
2273 b"error: %s conflicts with symlink %s "
2272 b"error: %s conflicts with symlink %s "
2274 b"in %d." % (path, component, self.p1().rev())
2273 b"in %d." % (path, component, self.p1().rev())
2275 )
2274 )
2276 else:
2275 else:
2277 raise error.Abort(
2276 raise error.Abort(
2278 b"error: '%s' conflicts with file '%s' in "
2277 b"error: '%s' conflicts with file '%s' in "
2279 b"%d." % (path, component, self.p1().rev())
2278 b"%d." % (path, component, self.p1().rev())
2280 )
2279 )
2281
2280
2282 # Test that each new directory to be created to write this path from p2
2281 # Test that each new directory to be created to write this path from p2
2283 # is not a file in p1.
2282 # is not a file in p1.
2284 components = path.split(b'/')
2283 components = path.split(b'/')
2285 for i in pycompat.xrange(len(components)):
2284 for i in pycompat.xrange(len(components)):
2286 component = b"/".join(components[0:i])
2285 component = b"/".join(components[0:i])
2287 if component in self:
2286 if component in self:
2288 fail(path, component)
2287 fail(path, component)
2289
2288
2290 # Test the other direction -- that this path from p2 isn't a directory
2289 # Test the other direction -- that this path from p2 isn't a directory
2291 # in p1 (test that p1 doesn't have any paths matching `path/*`).
2290 # in p1 (test that p1 doesn't have any paths matching `path/*`).
2292 match = self.match([path], default=b'path')
2291 match = self.match([path], default=b'path')
2293 matches = self.p1().manifest().matches(match)
2292 matches = self.p1().manifest().matches(match)
2294 mfiles = matches.keys()
2293 mfiles = matches.keys()
2295 if len(mfiles) > 0:
2294 if len(mfiles) > 0:
2296 if len(mfiles) == 1 and mfiles[0] == path:
2295 if len(mfiles) == 1 and mfiles[0] == path:
2297 return
2296 return
2298 # omit the files which are deleted in current IMM wctx
2297 # omit the files which are deleted in current IMM wctx
2299 mfiles = [m for m in mfiles if m in self]
2298 mfiles = [m for m in mfiles if m in self]
2300 if not mfiles:
2299 if not mfiles:
2301 return
2300 return
2302 raise error.Abort(
2301 raise error.Abort(
2303 b"error: file '%s' cannot be written because "
2302 b"error: file '%s' cannot be written because "
2304 b" '%s/' is a directory in %s (containing %d "
2303 b" '%s/' is a directory in %s (containing %d "
2305 b"entries: %s)"
2304 b"entries: %s)"
2306 % (path, path, self.p1(), len(mfiles), b', '.join(mfiles))
2305 % (path, path, self.p1(), len(mfiles), b', '.join(mfiles))
2307 )
2306 )
2308
2307
2309 def write(self, path, data, flags=b'', **kwargs):
2308 def write(self, path, data, flags=b'', **kwargs):
2310 if data is None:
2309 if data is None:
2311 raise error.ProgrammingError(b"data must be non-None")
2310 raise error.ProgrammingError(b"data must be non-None")
2312 self._auditconflicts(path)
2311 self._auditconflicts(path)
2313 self._markdirty(
2312 self._markdirty(
2314 path, exists=True, data=data, date=dateutil.makedate(), flags=flags
2313 path, exists=True, data=data, date=dateutil.makedate(), flags=flags
2315 )
2314 )
2316
2315
2317 def setflags(self, path, l, x):
2316 def setflags(self, path, l, x):
2318 flag = b''
2317 flag = b''
2319 if l:
2318 if l:
2320 flag = b'l'
2319 flag = b'l'
2321 elif x:
2320 elif x:
2322 flag = b'x'
2321 flag = b'x'
2323 self._markdirty(path, exists=True, date=dateutil.makedate(), flags=flag)
2322 self._markdirty(path, exists=True, date=dateutil.makedate(), flags=flag)
2324
2323
2325 def remove(self, path):
2324 def remove(self, path):
2326 self._markdirty(path, exists=False)
2325 self._markdirty(path, exists=False)
2327
2326
2328 def exists(self, path):
2327 def exists(self, path):
2329 """exists behaves like `lexists`, but needs to follow symlinks and
2328 """exists behaves like `lexists`, but needs to follow symlinks and
2330 return False if they are broken.
2329 return False if they are broken.
2331 """
2330 """
2332 if self.isdirty(path):
2331 if self.isdirty(path):
2333 # If this path exists and is a symlink, "follow" it by calling
2332 # If this path exists and is a symlink, "follow" it by calling
2334 # exists on the destination path.
2333 # exists on the destination path.
2335 if (
2334 if (
2336 self._cache[path][b'exists']
2335 self._cache[path][b'exists']
2337 and b'l' in self._cache[path][b'flags']
2336 and b'l' in self._cache[path][b'flags']
2338 ):
2337 ):
2339 return self.exists(self._cache[path][b'data'].strip())
2338 return self.exists(self._cache[path][b'data'].strip())
2340 else:
2339 else:
2341 return self._cache[path][b'exists']
2340 return self._cache[path][b'exists']
2342
2341
2343 return self._existsinparent(path)
2342 return self._existsinparent(path)
2344
2343
2345 def lexists(self, path):
2344 def lexists(self, path):
2346 """lexists returns True if the path exists"""
2345 """lexists returns True if the path exists"""
2347 if self.isdirty(path):
2346 if self.isdirty(path):
2348 return self._cache[path][b'exists']
2347 return self._cache[path][b'exists']
2349
2348
2350 return self._existsinparent(path)
2349 return self._existsinparent(path)
2351
2350
2352 def size(self, path):
2351 def size(self, path):
2353 if self.isdirty(path):
2352 if self.isdirty(path):
2354 if self._cache[path][b'exists']:
2353 if self._cache[path][b'exists']:
2355 return len(self._cache[path][b'data'])
2354 return len(self._cache[path][b'data'])
2356 else:
2355 else:
2357 raise error.ProgrammingError(
2356 raise error.ProgrammingError(
2358 b"No such file or directory: %s" % self._path
2357 b"No such file or directory: %s" % self._path
2359 )
2358 )
2360 return self._wrappedctx[path].size()
2359 return self._wrappedctx[path].size()
2361
2360
2362 def tomemctx(
2361 def tomemctx(
2363 self,
2362 self,
2364 text,
2363 text,
2365 branch=None,
2364 branch=None,
2366 extra=None,
2365 extra=None,
2367 date=None,
2366 date=None,
2368 parents=None,
2367 parents=None,
2369 user=None,
2368 user=None,
2370 editor=None,
2369 editor=None,
2371 ):
2370 ):
2372 """Converts this ``overlayworkingctx`` into a ``memctx`` ready to be
2371 """Converts this ``overlayworkingctx`` into a ``memctx`` ready to be
2373 committed.
2372 committed.
2374
2373
2375 ``text`` is the commit message.
2374 ``text`` is the commit message.
2376 ``parents`` (optional) are rev numbers.
2375 ``parents`` (optional) are rev numbers.
2377 """
2376 """
2378 # Default parents to the wrapped contexts' if not passed.
2377 # Default parents to the wrapped contexts' if not passed.
2379 if parents is None:
2378 if parents is None:
2380 parents = self._wrappedctx.parents()
2379 parents = self._wrappedctx.parents()
2381 if len(parents) == 1:
2380 if len(parents) == 1:
2382 parents = (parents[0], None)
2381 parents = (parents[0], None)
2383
2382
2384 # ``parents`` is passed as rev numbers; convert to ``commitctxs``.
2383 # ``parents`` is passed as rev numbers; convert to ``commitctxs``.
2385 if parents[1] is None:
2384 if parents[1] is None:
2386 parents = (self._repo[parents[0]], None)
2385 parents = (self._repo[parents[0]], None)
2387 else:
2386 else:
2388 parents = (self._repo[parents[0]], self._repo[parents[1]])
2387 parents = (self._repo[parents[0]], self._repo[parents[1]])
2389
2388
2390 files = self.files()
2389 files = self.files()
2391
2390
2392 def getfile(repo, memctx, path):
2391 def getfile(repo, memctx, path):
2393 if self._cache[path][b'exists']:
2392 if self._cache[path][b'exists']:
2394 return memfilectx(
2393 return memfilectx(
2395 repo,
2394 repo,
2396 memctx,
2395 memctx,
2397 path,
2396 path,
2398 self._cache[path][b'data'],
2397 self._cache[path][b'data'],
2399 b'l' in self._cache[path][b'flags'],
2398 b'l' in self._cache[path][b'flags'],
2400 b'x' in self._cache[path][b'flags'],
2399 b'x' in self._cache[path][b'flags'],
2401 self._cache[path][b'copied'],
2400 self._cache[path][b'copied'],
2402 )
2401 )
2403 else:
2402 else:
2404 # Returning None, but including the path in `files`, is
2403 # Returning None, but including the path in `files`, is
2405 # necessary for memctx to register a deletion.
2404 # necessary for memctx to register a deletion.
2406 return None
2405 return None
2407
2406
2408 return memctx(
2407 return memctx(
2409 self._repo,
2408 self._repo,
2410 parents,
2409 parents,
2411 text,
2410 text,
2412 files,
2411 files,
2413 getfile,
2412 getfile,
2414 date=date,
2413 date=date,
2415 extra=extra,
2414 extra=extra,
2416 user=user,
2415 user=user,
2417 branch=branch,
2416 branch=branch,
2418 editor=editor,
2417 editor=editor,
2419 )
2418 )
2420
2419
2421 def isdirty(self, path):
2420 def isdirty(self, path):
2422 return path in self._cache
2421 return path in self._cache
2423
2422
2424 def isempty(self):
2423 def isempty(self):
2425 # We need to discard any keys that are actually clean before the empty
2424 # We need to discard any keys that are actually clean before the empty
2426 # commit check.
2425 # commit check.
2427 self._compact()
2426 self._compact()
2428 return len(self._cache) == 0
2427 return len(self._cache) == 0
2429
2428
2430 def clean(self):
2429 def clean(self):
2431 self._cache = {}
2430 self._cache = {}
2432
2431
2433 def _compact(self):
2432 def _compact(self):
2434 """Removes keys from the cache that are actually clean, by comparing
2433 """Removes keys from the cache that are actually clean, by comparing
2435 them with the underlying context.
2434 them with the underlying context.
2436
2435
2437 This can occur during the merge process, e.g. by passing --tool :local
2436 This can occur during the merge process, e.g. by passing --tool :local
2438 to resolve a conflict.
2437 to resolve a conflict.
2439 """
2438 """
2440 keys = []
2439 keys = []
2441 # This won't be perfect, but can help performance significantly when
2440 # This won't be perfect, but can help performance significantly when
2442 # using things like remotefilelog.
2441 # using things like remotefilelog.
2443 scmutil.prefetchfiles(
2442 scmutil.prefetchfiles(
2444 self.repo(),
2443 self.repo(),
2445 [self.p1().rev()],
2444 [self.p1().rev()],
2446 scmutil.matchfiles(self.repo(), self._cache.keys()),
2445 scmutil.matchfiles(self.repo(), self._cache.keys()),
2447 )
2446 )
2448
2447
2449 for path in self._cache.keys():
2448 for path in self._cache.keys():
2450 cache = self._cache[path]
2449 cache = self._cache[path]
2451 try:
2450 try:
2452 underlying = self._wrappedctx[path]
2451 underlying = self._wrappedctx[path]
2453 if (
2452 if (
2454 underlying.data() == cache[b'data']
2453 underlying.data() == cache[b'data']
2455 and underlying.flags() == cache[b'flags']
2454 and underlying.flags() == cache[b'flags']
2456 ):
2455 ):
2457 keys.append(path)
2456 keys.append(path)
2458 except error.ManifestLookupError:
2457 except error.ManifestLookupError:
2459 # Path not in the underlying manifest (created).
2458 # Path not in the underlying manifest (created).
2460 continue
2459 continue
2461
2460
2462 for path in keys:
2461 for path in keys:
2463 del self._cache[path]
2462 del self._cache[path]
2464 return keys
2463 return keys
2465
2464
2466 def _markdirty(
2465 def _markdirty(
2467 self, path, exists, data=None, date=None, flags=b'', copied=None
2466 self, path, exists, data=None, date=None, flags=b'', copied=None
2468 ):
2467 ):
2469 # data not provided, let's see if we already have some; if not, let's
2468 # data not provided, let's see if we already have some; if not, let's
2470 # grab it from our underlying context, so that we always have data if
2469 # grab it from our underlying context, so that we always have data if
2471 # the file is marked as existing.
2470 # the file is marked as existing.
2472 if exists and data is None:
2471 if exists and data is None:
2473 oldentry = self._cache.get(path) or {}
2472 oldentry = self._cache.get(path) or {}
2474 data = oldentry.get(b'data')
2473 data = oldentry.get(b'data')
2475 if data is None:
2474 if data is None:
2476 data = self._wrappedctx[path].data()
2475 data = self._wrappedctx[path].data()
2477
2476
2478 self._cache[path] = {
2477 self._cache[path] = {
2479 b'exists': exists,
2478 b'exists': exists,
2480 b'data': data,
2479 b'data': data,
2481 b'date': date,
2480 b'date': date,
2482 b'flags': flags,
2481 b'flags': flags,
2483 b'copied': copied,
2482 b'copied': copied,
2484 }
2483 }
2485
2484
2486 def filectx(self, path, filelog=None):
2485 def filectx(self, path, filelog=None):
2487 return overlayworkingfilectx(
2486 return overlayworkingfilectx(
2488 self._repo, path, parent=self, filelog=filelog
2487 self._repo, path, parent=self, filelog=filelog
2489 )
2488 )
2490
2489
2491
2490
2492 class overlayworkingfilectx(committablefilectx):
2491 class overlayworkingfilectx(committablefilectx):
2493 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2492 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2494 cache, which can be flushed through later by calling ``flush()``."""
2493 cache, which can be flushed through later by calling ``flush()``."""
2495
2494
2496 def __init__(self, repo, path, filelog=None, parent=None):
2495 def __init__(self, repo, path, filelog=None, parent=None):
2497 super(overlayworkingfilectx, self).__init__(repo, path, filelog, parent)
2496 super(overlayworkingfilectx, self).__init__(repo, path, filelog, parent)
2498 self._repo = repo
2497 self._repo = repo
2499 self._parent = parent
2498 self._parent = parent
2500 self._path = path
2499 self._path = path
2501
2500
2502 def cmp(self, fctx):
2501 def cmp(self, fctx):
2503 return self.data() != fctx.data()
2502 return self.data() != fctx.data()
2504
2503
2505 def changectx(self):
2504 def changectx(self):
2506 return self._parent
2505 return self._parent
2507
2506
2508 def data(self):
2507 def data(self):
2509 return self._parent.data(self._path)
2508 return self._parent.data(self._path)
2510
2509
2511 def date(self):
2510 def date(self):
2512 return self._parent.filedate(self._path)
2511 return self._parent.filedate(self._path)
2513
2512
2514 def exists(self):
2513 def exists(self):
2515 return self.lexists()
2514 return self.lexists()
2516
2515
2517 def lexists(self):
2516 def lexists(self):
2518 return self._parent.exists(self._path)
2517 return self._parent.exists(self._path)
2519
2518
2520 def copysource(self):
2519 def copysource(self):
2521 return self._parent.copydata(self._path)
2520 return self._parent.copydata(self._path)
2522
2521
2523 def size(self):
2522 def size(self):
2524 return self._parent.size(self._path)
2523 return self._parent.size(self._path)
2525
2524
2526 def markcopied(self, origin):
2525 def markcopied(self, origin):
2527 self._parent.markcopied(self._path, origin)
2526 self._parent.markcopied(self._path, origin)
2528
2527
2529 def audit(self):
2528 def audit(self):
2530 pass
2529 pass
2531
2530
2532 def flags(self):
2531 def flags(self):
2533 return self._parent.flags(self._path)
2532 return self._parent.flags(self._path)
2534
2533
2535 def setflags(self, islink, isexec):
2534 def setflags(self, islink, isexec):
2536 return self._parent.setflags(self._path, islink, isexec)
2535 return self._parent.setflags(self._path, islink, isexec)
2537
2536
2538 def write(self, data, flags, backgroundclose=False, **kwargs):
2537 def write(self, data, flags, backgroundclose=False, **kwargs):
2539 return self._parent.write(self._path, data, flags, **kwargs)
2538 return self._parent.write(self._path, data, flags, **kwargs)
2540
2539
2541 def remove(self, ignoremissing=False):
2540 def remove(self, ignoremissing=False):
2542 return self._parent.remove(self._path)
2541 return self._parent.remove(self._path)
2543
2542
2544 def clearunknown(self):
2543 def clearunknown(self):
2545 pass
2544 pass
2546
2545
2547
2546
2548 class workingcommitctx(workingctx):
2547 class workingcommitctx(workingctx):
2549 """A workingcommitctx object makes access to data related to
2548 """A workingcommitctx object makes access to data related to
2550 the revision being committed convenient.
2549 the revision being committed convenient.
2551
2550
2552 This hides changes in the working directory, if they aren't
2551 This hides changes in the working directory, if they aren't
2553 committed in this context.
2552 committed in this context.
2554 """
2553 """
2555
2554
2556 def __init__(
2555 def __init__(
2557 self, repo, changes, text=b"", user=None, date=None, extra=None
2556 self, repo, changes, text=b"", user=None, date=None, extra=None
2558 ):
2557 ):
2559 super(workingcommitctx, self).__init__(
2558 super(workingcommitctx, self).__init__(
2560 repo, text, user, date, extra, changes
2559 repo, text, user, date, extra, changes
2561 )
2560 )
2562
2561
2563 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2562 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2564 """Return matched files only in ``self._status``
2563 """Return matched files only in ``self._status``
2565
2564
2566 Uncommitted files appear "clean" via this context, even if
2565 Uncommitted files appear "clean" via this context, even if
2567 they aren't actually so in the working directory.
2566 they aren't actually so in the working directory.
2568 """
2567 """
2569 if clean:
2568 if clean:
2570 clean = [f for f in self._manifest if f not in self._changedset]
2569 clean = [f for f in self._manifest if f not in self._changedset]
2571 else:
2570 else:
2572 clean = []
2571 clean = []
2573 return scmutil.status(
2572 return scmutil.status(
2574 [f for f in self._status.modified if match(f)],
2573 [f for f in self._status.modified if match(f)],
2575 [f for f in self._status.added if match(f)],
2574 [f for f in self._status.added if match(f)],
2576 [f for f in self._status.removed if match(f)],
2575 [f for f in self._status.removed if match(f)],
2577 [],
2576 [],
2578 [],
2577 [],
2579 [],
2578 [],
2580 clean,
2579 clean,
2581 )
2580 )
2582
2581
2583 @propertycache
2582 @propertycache
2584 def _changedset(self):
2583 def _changedset(self):
2585 """Return the set of files changed in this context
2584 """Return the set of files changed in this context
2586 """
2585 """
2587 changed = set(self._status.modified)
2586 changed = set(self._status.modified)
2588 changed.update(self._status.added)
2587 changed.update(self._status.added)
2589 changed.update(self._status.removed)
2588 changed.update(self._status.removed)
2590 return changed
2589 return changed
2591
2590
2592
2591
2593 def makecachingfilectxfn(func):
2592 def makecachingfilectxfn(func):
2594 """Create a filectxfn that caches based on the path.
2593 """Create a filectxfn that caches based on the path.
2595
2594
2596 We can't use util.cachefunc because it uses all arguments as the cache
2595 We can't use util.cachefunc because it uses all arguments as the cache
2597 key and this creates a cycle since the arguments include the repo and
2596 key and this creates a cycle since the arguments include the repo and
2598 memctx.
2597 memctx.
2599 """
2598 """
2600 cache = {}
2599 cache = {}
2601
2600
2602 def getfilectx(repo, memctx, path):
2601 def getfilectx(repo, memctx, path):
2603 if path not in cache:
2602 if path not in cache:
2604 cache[path] = func(repo, memctx, path)
2603 cache[path] = func(repo, memctx, path)
2605 return cache[path]
2604 return cache[path]
2606
2605
2607 return getfilectx
2606 return getfilectx
2608
2607
2609
2608
2610 def memfilefromctx(ctx):
2609 def memfilefromctx(ctx):
2611 """Given a context return a memfilectx for ctx[path]
2610 """Given a context return a memfilectx for ctx[path]
2612
2611
2613 This is a convenience method for building a memctx based on another
2612 This is a convenience method for building a memctx based on another
2614 context.
2613 context.
2615 """
2614 """
2616
2615
2617 def getfilectx(repo, memctx, path):
2616 def getfilectx(repo, memctx, path):
2618 fctx = ctx[path]
2617 fctx = ctx[path]
2619 copysource = fctx.copysource()
2618 copysource = fctx.copysource()
2620 return memfilectx(
2619 return memfilectx(
2621 repo,
2620 repo,
2622 memctx,
2621 memctx,
2623 path,
2622 path,
2624 fctx.data(),
2623 fctx.data(),
2625 islink=fctx.islink(),
2624 islink=fctx.islink(),
2626 isexec=fctx.isexec(),
2625 isexec=fctx.isexec(),
2627 copysource=copysource,
2626 copysource=copysource,
2628 )
2627 )
2629
2628
2630 return getfilectx
2629 return getfilectx
2631
2630
2632
2631
2633 def memfilefrompatch(patchstore):
2632 def memfilefrompatch(patchstore):
2634 """Given a patch (e.g. patchstore object) return a memfilectx
2633 """Given a patch (e.g. patchstore object) return a memfilectx
2635
2634
2636 This is a convenience method for building a memctx based on a patchstore.
2635 This is a convenience method for building a memctx based on a patchstore.
2637 """
2636 """
2638
2637
2639 def getfilectx(repo, memctx, path):
2638 def getfilectx(repo, memctx, path):
2640 data, mode, copysource = patchstore.getfile(path)
2639 data, mode, copysource = patchstore.getfile(path)
2641 if data is None:
2640 if data is None:
2642 return None
2641 return None
2643 islink, isexec = mode
2642 islink, isexec = mode
2644 return memfilectx(
2643 return memfilectx(
2645 repo,
2644 repo,
2646 memctx,
2645 memctx,
2647 path,
2646 path,
2648 data,
2647 data,
2649 islink=islink,
2648 islink=islink,
2650 isexec=isexec,
2649 isexec=isexec,
2651 copysource=copysource,
2650 copysource=copysource,
2652 )
2651 )
2653
2652
2654 return getfilectx
2653 return getfilectx
2655
2654
2656
2655
2657 class memctx(committablectx):
2656 class memctx(committablectx):
2658 """Use memctx to perform in-memory commits via localrepo.commitctx().
2657 """Use memctx to perform in-memory commits via localrepo.commitctx().
2659
2658
2660 Revision information is supplied at initialization time while
2659 Revision information is supplied at initialization time while
2661 related files data and is made available through a callback
2660 related files data and is made available through a callback
2662 mechanism. 'repo' is the current localrepo, 'parents' is a
2661 mechanism. 'repo' is the current localrepo, 'parents' is a
2663 sequence of two parent revisions identifiers (pass None for every
2662 sequence of two parent revisions identifiers (pass None for every
2664 missing parent), 'text' is the commit message and 'files' lists
2663 missing parent), 'text' is the commit message and 'files' lists
2665 names of files touched by the revision (normalized and relative to
2664 names of files touched by the revision (normalized and relative to
2666 repository root).
2665 repository root).
2667
2666
2668 filectxfn(repo, memctx, path) is a callable receiving the
2667 filectxfn(repo, memctx, path) is a callable receiving the
2669 repository, the current memctx object and the normalized path of
2668 repository, the current memctx object and the normalized path of
2670 requested file, relative to repository root. It is fired by the
2669 requested file, relative to repository root. It is fired by the
2671 commit function for every file in 'files', but calls order is
2670 commit function for every file in 'files', but calls order is
2672 undefined. If the file is available in the revision being
2671 undefined. If the file is available in the revision being
2673 committed (updated or added), filectxfn returns a memfilectx
2672 committed (updated or added), filectxfn returns a memfilectx
2674 object. If the file was removed, filectxfn return None for recent
2673 object. If the file was removed, filectxfn return None for recent
2675 Mercurial. Moved files are represented by marking the source file
2674 Mercurial. Moved files are represented by marking the source file
2676 removed and the new file added with copy information (see
2675 removed and the new file added with copy information (see
2677 memfilectx).
2676 memfilectx).
2678
2677
2679 user receives the committer name and defaults to current
2678 user receives the committer name and defaults to current
2680 repository username, date is the commit date in any format
2679 repository username, date is the commit date in any format
2681 supported by dateutil.parsedate() and defaults to current date, extra
2680 supported by dateutil.parsedate() and defaults to current date, extra
2682 is a dictionary of metadata or is left empty.
2681 is a dictionary of metadata or is left empty.
2683 """
2682 """
2684
2683
2685 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2684 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2686 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2685 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2687 # this field to determine what to do in filectxfn.
2686 # this field to determine what to do in filectxfn.
2688 _returnnoneformissingfiles = True
2687 _returnnoneformissingfiles = True
2689
2688
2690 def __init__(
2689 def __init__(
2691 self,
2690 self,
2692 repo,
2691 repo,
2693 parents,
2692 parents,
2694 text,
2693 text,
2695 files,
2694 files,
2696 filectxfn,
2695 filectxfn,
2697 user=None,
2696 user=None,
2698 date=None,
2697 date=None,
2699 extra=None,
2698 extra=None,
2700 branch=None,
2699 branch=None,
2701 editor=False,
2700 editor=False,
2702 ):
2701 ):
2703 super(memctx, self).__init__(
2702 super(memctx, self).__init__(
2704 repo, text, user, date, extra, branch=branch
2703 repo, text, user, date, extra, branch=branch
2705 )
2704 )
2706 self._rev = None
2705 self._rev = None
2707 self._node = None
2706 self._node = None
2708 parents = [(p or nullid) for p in parents]
2707 parents = [(p or nullid) for p in parents]
2709 p1, p2 = parents
2708 p1, p2 = parents
2710 self._parents = [self._repo[p] for p in (p1, p2)]
2709 self._parents = [self._repo[p] for p in (p1, p2)]
2711 files = sorted(set(files))
2710 files = sorted(set(files))
2712 self._files = files
2711 self._files = files
2713 self.substate = {}
2712 self.substate = {}
2714
2713
2715 if isinstance(filectxfn, patch.filestore):
2714 if isinstance(filectxfn, patch.filestore):
2716 filectxfn = memfilefrompatch(filectxfn)
2715 filectxfn = memfilefrompatch(filectxfn)
2717 elif not callable(filectxfn):
2716 elif not callable(filectxfn):
2718 # if store is not callable, wrap it in a function
2717 # if store is not callable, wrap it in a function
2719 filectxfn = memfilefromctx(filectxfn)
2718 filectxfn = memfilefromctx(filectxfn)
2720
2719
2721 # memoizing increases performance for e.g. vcs convert scenarios.
2720 # memoizing increases performance for e.g. vcs convert scenarios.
2722 self._filectxfn = makecachingfilectxfn(filectxfn)
2721 self._filectxfn = makecachingfilectxfn(filectxfn)
2723
2722
2724 if editor:
2723 if editor:
2725 self._text = editor(self._repo, self, [])
2724 self._text = editor(self._repo, self, [])
2726 self._repo.savecommitmessage(self._text)
2725 self._repo.savecommitmessage(self._text)
2727
2726
2728 def filectx(self, path, filelog=None):
2727 def filectx(self, path, filelog=None):
2729 """get a file context from the working directory
2728 """get a file context from the working directory
2730
2729
2731 Returns None if file doesn't exist and should be removed."""
2730 Returns None if file doesn't exist and should be removed."""
2732 return self._filectxfn(self._repo, self, path)
2731 return self._filectxfn(self._repo, self, path)
2733
2732
2734 def commit(self):
2733 def commit(self):
2735 """commit context to the repo"""
2734 """commit context to the repo"""
2736 return self._repo.commitctx(self)
2735 return self._repo.commitctx(self)
2737
2736
2738 @propertycache
2737 @propertycache
2739 def _manifest(self):
2738 def _manifest(self):
2740 """generate a manifest based on the return values of filectxfn"""
2739 """generate a manifest based on the return values of filectxfn"""
2741
2740
2742 # keep this simple for now; just worry about p1
2741 # keep this simple for now; just worry about p1
2743 pctx = self._parents[0]
2742 pctx = self._parents[0]
2744 man = pctx.manifest().copy()
2743 man = pctx.manifest().copy()
2745
2744
2746 for f in self._status.modified:
2745 for f in self._status.modified:
2747 man[f] = modifiednodeid
2746 man[f] = modifiednodeid
2748
2747
2749 for f in self._status.added:
2748 for f in self._status.added:
2750 man[f] = addednodeid
2749 man[f] = addednodeid
2751
2750
2752 for f in self._status.removed:
2751 for f in self._status.removed:
2753 if f in man:
2752 if f in man:
2754 del man[f]
2753 del man[f]
2755
2754
2756 return man
2755 return man
2757
2756
2758 @propertycache
2757 @propertycache
2759 def _status(self):
2758 def _status(self):
2760 """Calculate exact status from ``files`` specified at construction
2759 """Calculate exact status from ``files`` specified at construction
2761 """
2760 """
2762 man1 = self.p1().manifest()
2761 man1 = self.p1().manifest()
2763 p2 = self._parents[1]
2762 p2 = self._parents[1]
2764 # "1 < len(self._parents)" can't be used for checking
2763 # "1 < len(self._parents)" can't be used for checking
2765 # existence of the 2nd parent, because "memctx._parents" is
2764 # existence of the 2nd parent, because "memctx._parents" is
2766 # explicitly initialized by the list, of which length is 2.
2765 # explicitly initialized by the list, of which length is 2.
2767 if p2.node() != nullid:
2766 if p2.node() != nullid:
2768 man2 = p2.manifest()
2767 man2 = p2.manifest()
2769 managing = lambda f: f in man1 or f in man2
2768 managing = lambda f: f in man1 or f in man2
2770 else:
2769 else:
2771 managing = lambda f: f in man1
2770 managing = lambda f: f in man1
2772
2771
2773 modified, added, removed = [], [], []
2772 modified, added, removed = [], [], []
2774 for f in self._files:
2773 for f in self._files:
2775 if not managing(f):
2774 if not managing(f):
2776 added.append(f)
2775 added.append(f)
2777 elif self[f]:
2776 elif self[f]:
2778 modified.append(f)
2777 modified.append(f)
2779 else:
2778 else:
2780 removed.append(f)
2779 removed.append(f)
2781
2780
2782 return scmutil.status(modified, added, removed, [], [], [], [])
2781 return scmutil.status(modified, added, removed, [], [], [], [])
2783
2782
2784
2783
2785 class memfilectx(committablefilectx):
2784 class memfilectx(committablefilectx):
2786 """memfilectx represents an in-memory file to commit.
2785 """memfilectx represents an in-memory file to commit.
2787
2786
2788 See memctx and committablefilectx for more details.
2787 See memctx and committablefilectx for more details.
2789 """
2788 """
2790
2789
2791 def __init__(
2790 def __init__(
2792 self,
2791 self,
2793 repo,
2792 repo,
2794 changectx,
2793 changectx,
2795 path,
2794 path,
2796 data,
2795 data,
2797 islink=False,
2796 islink=False,
2798 isexec=False,
2797 isexec=False,
2799 copysource=None,
2798 copysource=None,
2800 ):
2799 ):
2801 """
2800 """
2802 path is the normalized file path relative to repository root.
2801 path is the normalized file path relative to repository root.
2803 data is the file content as a string.
2802 data is the file content as a string.
2804 islink is True if the file is a symbolic link.
2803 islink is True if the file is a symbolic link.
2805 isexec is True if the file is executable.
2804 isexec is True if the file is executable.
2806 copied is the source file path if current file was copied in the
2805 copied is the source file path if current file was copied in the
2807 revision being committed, or None."""
2806 revision being committed, or None."""
2808 super(memfilectx, self).__init__(repo, path, None, changectx)
2807 super(memfilectx, self).__init__(repo, path, None, changectx)
2809 self._data = data
2808 self._data = data
2810 if islink:
2809 if islink:
2811 self._flags = b'l'
2810 self._flags = b'l'
2812 elif isexec:
2811 elif isexec:
2813 self._flags = b'x'
2812 self._flags = b'x'
2814 else:
2813 else:
2815 self._flags = b''
2814 self._flags = b''
2816 self._copysource = copysource
2815 self._copysource = copysource
2817
2816
2818 def copysource(self):
2817 def copysource(self):
2819 return self._copysource
2818 return self._copysource
2820
2819
2821 def cmp(self, fctx):
2820 def cmp(self, fctx):
2822 return self.data() != fctx.data()
2821 return self.data() != fctx.data()
2823
2822
2824 def data(self):
2823 def data(self):
2825 return self._data
2824 return self._data
2826
2825
2827 def remove(self, ignoremissing=False):
2826 def remove(self, ignoremissing=False):
2828 """wraps unlink for a repo's working directory"""
2827 """wraps unlink for a repo's working directory"""
2829 # need to figure out what to do here
2828 # need to figure out what to do here
2830 del self._changectx[self._path]
2829 del self._changectx[self._path]
2831
2830
2832 def write(self, data, flags, **kwargs):
2831 def write(self, data, flags, **kwargs):
2833 """wraps repo.wwrite"""
2832 """wraps repo.wwrite"""
2834 self._data = data
2833 self._data = data
2835
2834
2836
2835
2837 class metadataonlyctx(committablectx):
2836 class metadataonlyctx(committablectx):
2838 """Like memctx but it's reusing the manifest of different commit.
2837 """Like memctx but it's reusing the manifest of different commit.
2839 Intended to be used by lightweight operations that are creating
2838 Intended to be used by lightweight operations that are creating
2840 metadata-only changes.
2839 metadata-only changes.
2841
2840
2842 Revision information is supplied at initialization time. 'repo' is the
2841 Revision information is supplied at initialization time. 'repo' is the
2843 current localrepo, 'ctx' is original revision which manifest we're reuisng
2842 current localrepo, 'ctx' is original revision which manifest we're reuisng
2844 'parents' is a sequence of two parent revisions identifiers (pass None for
2843 'parents' is a sequence of two parent revisions identifiers (pass None for
2845 every missing parent), 'text' is the commit.
2844 every missing parent), 'text' is the commit.
2846
2845
2847 user receives the committer name and defaults to current repository
2846 user receives the committer name and defaults to current repository
2848 username, date is the commit date in any format supported by
2847 username, date is the commit date in any format supported by
2849 dateutil.parsedate() and defaults to current date, extra is a dictionary of
2848 dateutil.parsedate() and defaults to current date, extra is a dictionary of
2850 metadata or is left empty.
2849 metadata or is left empty.
2851 """
2850 """
2852
2851
2853 def __init__(
2852 def __init__(
2854 self,
2853 self,
2855 repo,
2854 repo,
2856 originalctx,
2855 originalctx,
2857 parents=None,
2856 parents=None,
2858 text=None,
2857 text=None,
2859 user=None,
2858 user=None,
2860 date=None,
2859 date=None,
2861 extra=None,
2860 extra=None,
2862 editor=False,
2861 editor=False,
2863 ):
2862 ):
2864 if text is None:
2863 if text is None:
2865 text = originalctx.description()
2864 text = originalctx.description()
2866 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2865 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2867 self._rev = None
2866 self._rev = None
2868 self._node = None
2867 self._node = None
2869 self._originalctx = originalctx
2868 self._originalctx = originalctx
2870 self._manifestnode = originalctx.manifestnode()
2869 self._manifestnode = originalctx.manifestnode()
2871 if parents is None:
2870 if parents is None:
2872 parents = originalctx.parents()
2871 parents = originalctx.parents()
2873 else:
2872 else:
2874 parents = [repo[p] for p in parents if p is not None]
2873 parents = [repo[p] for p in parents if p is not None]
2875 parents = parents[:]
2874 parents = parents[:]
2876 while len(parents) < 2:
2875 while len(parents) < 2:
2877 parents.append(repo[nullid])
2876 parents.append(repo[nullid])
2878 p1, p2 = self._parents = parents
2877 p1, p2 = self._parents = parents
2879
2878
2880 # sanity check to ensure that the reused manifest parents are
2879 # sanity check to ensure that the reused manifest parents are
2881 # manifests of our commit parents
2880 # manifests of our commit parents
2882 mp1, mp2 = self.manifestctx().parents
2881 mp1, mp2 = self.manifestctx().parents
2883 if p1 != nullid and p1.manifestnode() != mp1:
2882 if p1 != nullid and p1.manifestnode() != mp1:
2884 raise RuntimeError(
2883 raise RuntimeError(
2885 r"can't reuse the manifest: its p1 "
2884 r"can't reuse the manifest: its p1 "
2886 r"doesn't match the new ctx p1"
2885 r"doesn't match the new ctx p1"
2887 )
2886 )
2888 if p2 != nullid and p2.manifestnode() != mp2:
2887 if p2 != nullid and p2.manifestnode() != mp2:
2889 raise RuntimeError(
2888 raise RuntimeError(
2890 r"can't reuse the manifest: "
2889 r"can't reuse the manifest: "
2891 r"its p2 doesn't match the new ctx p2"
2890 r"its p2 doesn't match the new ctx p2"
2892 )
2891 )
2893
2892
2894 self._files = originalctx.files()
2893 self._files = originalctx.files()
2895 self.substate = {}
2894 self.substate = {}
2896
2895
2897 if editor:
2896 if editor:
2898 self._text = editor(self._repo, self, [])
2897 self._text = editor(self._repo, self, [])
2899 self._repo.savecommitmessage(self._text)
2898 self._repo.savecommitmessage(self._text)
2900
2899
2901 def manifestnode(self):
2900 def manifestnode(self):
2902 return self._manifestnode
2901 return self._manifestnode
2903
2902
2904 @property
2903 @property
2905 def _manifestctx(self):
2904 def _manifestctx(self):
2906 return self._repo.manifestlog[self._manifestnode]
2905 return self._repo.manifestlog[self._manifestnode]
2907
2906
2908 def filectx(self, path, filelog=None):
2907 def filectx(self, path, filelog=None):
2909 return self._originalctx.filectx(path, filelog=filelog)
2908 return self._originalctx.filectx(path, filelog=filelog)
2910
2909
2911 def commit(self):
2910 def commit(self):
2912 """commit context to the repo"""
2911 """commit context to the repo"""
2913 return self._repo.commitctx(self)
2912 return self._repo.commitctx(self)
2914
2913
2915 @property
2914 @property
2916 def _manifest(self):
2915 def _manifest(self):
2917 return self._originalctx.manifest()
2916 return self._originalctx.manifest()
2918
2917
2919 @propertycache
2918 @propertycache
2920 def _status(self):
2919 def _status(self):
2921 """Calculate exact status from ``files`` specified in the ``origctx``
2920 """Calculate exact status from ``files`` specified in the ``origctx``
2922 and parents manifests.
2921 and parents manifests.
2923 """
2922 """
2924 man1 = self.p1().manifest()
2923 man1 = self.p1().manifest()
2925 p2 = self._parents[1]
2924 p2 = self._parents[1]
2926 # "1 < len(self._parents)" can't be used for checking
2925 # "1 < len(self._parents)" can't be used for checking
2927 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2926 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2928 # explicitly initialized by the list, of which length is 2.
2927 # explicitly initialized by the list, of which length is 2.
2929 if p2.node() != nullid:
2928 if p2.node() != nullid:
2930 man2 = p2.manifest()
2929 man2 = p2.manifest()
2931 managing = lambda f: f in man1 or f in man2
2930 managing = lambda f: f in man1 or f in man2
2932 else:
2931 else:
2933 managing = lambda f: f in man1
2932 managing = lambda f: f in man1
2934
2933
2935 modified, added, removed = [], [], []
2934 modified, added, removed = [], [], []
2936 for f in self._files:
2935 for f in self._files:
2937 if not managing(f):
2936 if not managing(f):
2938 added.append(f)
2937 added.append(f)
2939 elif f in self:
2938 elif f in self:
2940 modified.append(f)
2939 modified.append(f)
2941 else:
2940 else:
2942 removed.append(f)
2941 removed.append(f)
2943
2942
2944 return scmutil.status(modified, added, removed, [], [], [], [])
2943 return scmutil.status(modified, added, removed, [], [], [], [])
2945
2944
2946
2945
2947 class arbitraryfilectx(object):
2946 class arbitraryfilectx(object):
2948 """Allows you to use filectx-like functions on a file in an arbitrary
2947 """Allows you to use filectx-like functions on a file in an arbitrary
2949 location on disk, possibly not in the working directory.
2948 location on disk, possibly not in the working directory.
2950 """
2949 """
2951
2950
2952 def __init__(self, path, repo=None):
2951 def __init__(self, path, repo=None):
2953 # Repo is optional because contrib/simplemerge uses this class.
2952 # Repo is optional because contrib/simplemerge uses this class.
2954 self._repo = repo
2953 self._repo = repo
2955 self._path = path
2954 self._path = path
2956
2955
2957 def cmp(self, fctx):
2956 def cmp(self, fctx):
2958 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2957 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2959 # path if either side is a symlink.
2958 # path if either side is a symlink.
2960 symlinks = b'l' in self.flags() or b'l' in fctx.flags()
2959 symlinks = b'l' in self.flags() or b'l' in fctx.flags()
2961 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
2960 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
2962 # Add a fast-path for merge if both sides are disk-backed.
2961 # Add a fast-path for merge if both sides are disk-backed.
2963 # Note that filecmp uses the opposite return values (True if same)
2962 # Note that filecmp uses the opposite return values (True if same)
2964 # from our cmp functions (True if different).
2963 # from our cmp functions (True if different).
2965 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
2964 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
2966 return self.data() != fctx.data()
2965 return self.data() != fctx.data()
2967
2966
2968 def path(self):
2967 def path(self):
2969 return self._path
2968 return self._path
2970
2969
2971 def flags(self):
2970 def flags(self):
2972 return b''
2971 return b''
2973
2972
2974 def data(self):
2973 def data(self):
2975 return util.readfile(self._path)
2974 return util.readfile(self._path)
2976
2975
2977 def decodeddata(self):
2976 def decodeddata(self):
2978 with open(self._path, b"rb") as f:
2977 with open(self._path, b"rb") as f:
2979 return f.read()
2978 return f.read()
2980
2979
2981 def remove(self):
2980 def remove(self):
2982 util.unlink(self._path)
2981 util.unlink(self._path)
2983
2982
2984 def write(self, data, flags, **kwargs):
2983 def write(self, data, flags, **kwargs):
2985 assert not flags
2984 assert not flags
2986 with open(self._path, b"wb") as f:
2985 with open(self._path, b"wb") as f:
2987 f.write(data)
2986 f.write(data)
@@ -1,2265 +1,2268 b''
1 # manifest.py - manifest revision class for mercurial
1 # manifest.py - manifest revision class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import heapq
10 import heapq
11 import itertools
11 import itertools
12 import struct
12 import struct
13 import weakref
13 import weakref
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 bin,
17 bin,
18 hex,
18 hex,
19 nullid,
19 nullid,
20 nullrev,
20 nullrev,
21 )
21 )
22 from .pycompat import getattr
22 from .pycompat import getattr
23 from . import (
23 from . import (
24 error,
24 error,
25 mdiff,
25 mdiff,
26 policy,
26 policy,
27 pycompat,
27 pycompat,
28 revlog,
28 revlog,
29 util,
29 util,
30 )
30 )
31 from .interfaces import (
31 from .interfaces import (
32 repository,
32 repository,
33 util as interfaceutil,
33 util as interfaceutil,
34 )
34 )
35
35
36 parsers = policy.importmod(r'parsers')
36 parsers = policy.importmod(r'parsers')
37 propertycache = util.propertycache
37 propertycache = util.propertycache
38
38
39 # Allow tests to more easily test the alternate path in manifestdict.fastdelta()
39 # Allow tests to more easily test the alternate path in manifestdict.fastdelta()
40 FASTDELTA_TEXTDIFF_THRESHOLD = 1000
40 FASTDELTA_TEXTDIFF_THRESHOLD = 1000
41
41
42
42
43 def _parse(data):
43 def _parse(data):
44 # This method does a little bit of excessive-looking
44 # This method does a little bit of excessive-looking
45 # precondition checking. This is so that the behavior of this
45 # precondition checking. This is so that the behavior of this
46 # class exactly matches its C counterpart to try and help
46 # class exactly matches its C counterpart to try and help
47 # prevent surprise breakage for anyone that develops against
47 # prevent surprise breakage for anyone that develops against
48 # the pure version.
48 # the pure version.
49 if data and data[-1:] != b'\n':
49 if data and data[-1:] != b'\n':
50 raise ValueError(b'Manifest did not end in a newline.')
50 raise ValueError(b'Manifest did not end in a newline.')
51 prev = None
51 prev = None
52 for l in data.splitlines():
52 for l in data.splitlines():
53 if prev is not None and prev > l:
53 if prev is not None and prev > l:
54 raise ValueError(b'Manifest lines not in sorted order.')
54 raise ValueError(b'Manifest lines not in sorted order.')
55 prev = l
55 prev = l
56 f, n = l.split(b'\0')
56 f, n = l.split(b'\0')
57 if len(n) > 40:
57 if len(n) > 40:
58 yield f, bin(n[:40]), n[40:]
58 yield f, bin(n[:40]), n[40:]
59 else:
59 else:
60 yield f, bin(n), b''
60 yield f, bin(n), b''
61
61
62
62
63 def _text(it):
63 def _text(it):
64 files = []
64 files = []
65 lines = []
65 lines = []
66 for f, n, fl in it:
66 for f, n, fl in it:
67 files.append(f)
67 files.append(f)
68 # if this is changed to support newlines in filenames,
68 # if this is changed to support newlines in filenames,
69 # be sure to check the templates/ dir again (especially *-raw.tmpl)
69 # be sure to check the templates/ dir again (especially *-raw.tmpl)
70 lines.append(b"%s\0%s%s\n" % (f, hex(n), fl))
70 lines.append(b"%s\0%s%s\n" % (f, hex(n), fl))
71
71
72 _checkforbidden(files)
72 _checkforbidden(files)
73 return b''.join(lines)
73 return b''.join(lines)
74
74
75
75
76 class lazymanifestiter(object):
76 class lazymanifestiter(object):
77 def __init__(self, lm):
77 def __init__(self, lm):
78 self.pos = 0
78 self.pos = 0
79 self.lm = lm
79 self.lm = lm
80
80
81 def __iter__(self):
81 def __iter__(self):
82 return self
82 return self
83
83
84 def next(self):
84 def next(self):
85 try:
85 try:
86 data, pos = self.lm._get(self.pos)
86 data, pos = self.lm._get(self.pos)
87 except IndexError:
87 except IndexError:
88 raise StopIteration
88 raise StopIteration
89 if pos == -1:
89 if pos == -1:
90 self.pos += 1
90 self.pos += 1
91 return data[0]
91 return data[0]
92 self.pos += 1
92 self.pos += 1
93 zeropos = data.find(b'\x00', pos)
93 zeropos = data.find(b'\x00', pos)
94 return data[pos:zeropos]
94 return data[pos:zeropos]
95
95
96 __next__ = next
96 __next__ = next
97
97
98
98
99 class lazymanifestiterentries(object):
99 class lazymanifestiterentries(object):
100 def __init__(self, lm):
100 def __init__(self, lm):
101 self.lm = lm
101 self.lm = lm
102 self.pos = 0
102 self.pos = 0
103
103
104 def __iter__(self):
104 def __iter__(self):
105 return self
105 return self
106
106
107 def next(self):
107 def next(self):
108 try:
108 try:
109 data, pos = self.lm._get(self.pos)
109 data, pos = self.lm._get(self.pos)
110 except IndexError:
110 except IndexError:
111 raise StopIteration
111 raise StopIteration
112 if pos == -1:
112 if pos == -1:
113 self.pos += 1
113 self.pos += 1
114 return data
114 return data
115 zeropos = data.find(b'\x00', pos)
115 zeropos = data.find(b'\x00', pos)
116 hashval = unhexlify(data, self.lm.extrainfo[self.pos], zeropos + 1, 40)
116 hashval = unhexlify(data, self.lm.extrainfo[self.pos], zeropos + 1, 40)
117 flags = self.lm._getflags(data, self.pos, zeropos)
117 flags = self.lm._getflags(data, self.pos, zeropos)
118 self.pos += 1
118 self.pos += 1
119 return (data[pos:zeropos], hashval, flags)
119 return (data[pos:zeropos], hashval, flags)
120
120
121 __next__ = next
121 __next__ = next
122
122
123
123
124 def unhexlify(data, extra, pos, length):
124 def unhexlify(data, extra, pos, length):
125 s = bin(data[pos : pos + length])
125 s = bin(data[pos : pos + length])
126 if extra:
126 if extra:
127 s += chr(extra & 0xFF)
127 s += chr(extra & 0xFF)
128 return s
128 return s
129
129
130
130
131 def _cmp(a, b):
131 def _cmp(a, b):
132 return (a > b) - (a < b)
132 return (a > b) - (a < b)
133
133
134
134
135 class _lazymanifest(object):
135 class _lazymanifest(object):
136 """A pure python manifest backed by a byte string. It is supplimented with
136 """A pure python manifest backed by a byte string. It is supplimented with
137 internal lists as it is modified, until it is compacted back to a pure byte
137 internal lists as it is modified, until it is compacted back to a pure byte
138 string.
138 string.
139
139
140 ``data`` is the initial manifest data.
140 ``data`` is the initial manifest data.
141
141
142 ``positions`` is a list of offsets, one per manifest entry. Positive
142 ``positions`` is a list of offsets, one per manifest entry. Positive
143 values are offsets into ``data``, negative values are offsets into the
143 values are offsets into ``data``, negative values are offsets into the
144 ``extradata`` list. When an entry is removed, its entry is dropped from
144 ``extradata`` list. When an entry is removed, its entry is dropped from
145 ``positions``. The values are encoded such that when walking the list and
145 ``positions``. The values are encoded such that when walking the list and
146 indexing into ``data`` or ``extradata`` as appropriate, the entries are
146 indexing into ``data`` or ``extradata`` as appropriate, the entries are
147 sorted by filename.
147 sorted by filename.
148
148
149 ``extradata`` is a list of (key, hash, flags) for entries that were added or
149 ``extradata`` is a list of (key, hash, flags) for entries that were added or
150 modified since the manifest was created or compacted.
150 modified since the manifest was created or compacted.
151 """
151 """
152
152
153 def __init__(
153 def __init__(
154 self,
154 self,
155 data,
155 data,
156 positions=None,
156 positions=None,
157 extrainfo=None,
157 extrainfo=None,
158 extradata=None,
158 extradata=None,
159 hasremovals=False,
159 hasremovals=False,
160 ):
160 ):
161 if positions is None:
161 if positions is None:
162 self.positions = self.findlines(data)
162 self.positions = self.findlines(data)
163 self.extrainfo = [0] * len(self.positions)
163 self.extrainfo = [0] * len(self.positions)
164 self.data = data
164 self.data = data
165 self.extradata = []
165 self.extradata = []
166 self.hasremovals = False
166 self.hasremovals = False
167 else:
167 else:
168 self.positions = positions[:]
168 self.positions = positions[:]
169 self.extrainfo = extrainfo[:]
169 self.extrainfo = extrainfo[:]
170 self.extradata = extradata[:]
170 self.extradata = extradata[:]
171 self.data = data
171 self.data = data
172 self.hasremovals = hasremovals
172 self.hasremovals = hasremovals
173
173
174 def findlines(self, data):
174 def findlines(self, data):
175 if not data:
175 if not data:
176 return []
176 return []
177 pos = data.find(b"\n")
177 pos = data.find(b"\n")
178 if pos == -1 or data[-1:] != b'\n':
178 if pos == -1 or data[-1:] != b'\n':
179 raise ValueError(b"Manifest did not end in a newline.")
179 raise ValueError(b"Manifest did not end in a newline.")
180 positions = [0]
180 positions = [0]
181 prev = data[: data.find(b'\x00')]
181 prev = data[: data.find(b'\x00')]
182 while pos < len(data) - 1 and pos != -1:
182 while pos < len(data) - 1 and pos != -1:
183 positions.append(pos + 1)
183 positions.append(pos + 1)
184 nexts = data[pos + 1 : data.find(b'\x00', pos + 1)]
184 nexts = data[pos + 1 : data.find(b'\x00', pos + 1)]
185 if nexts < prev:
185 if nexts < prev:
186 raise ValueError(b"Manifest lines not in sorted order.")
186 raise ValueError(b"Manifest lines not in sorted order.")
187 prev = nexts
187 prev = nexts
188 pos = data.find(b"\n", pos + 1)
188 pos = data.find(b"\n", pos + 1)
189 return positions
189 return positions
190
190
191 def _get(self, index):
191 def _get(self, index):
192 # get the position encoded in pos:
192 # get the position encoded in pos:
193 # positive number is an index in 'data'
193 # positive number is an index in 'data'
194 # negative number is in extrapieces
194 # negative number is in extrapieces
195 pos = self.positions[index]
195 pos = self.positions[index]
196 if pos >= 0:
196 if pos >= 0:
197 return self.data, pos
197 return self.data, pos
198 return self.extradata[-pos - 1], -1
198 return self.extradata[-pos - 1], -1
199
199
200 def _getkey(self, pos):
200 def _getkey(self, pos):
201 if pos >= 0:
201 if pos >= 0:
202 return self.data[pos : self.data.find(b'\x00', pos + 1)]
202 return self.data[pos : self.data.find(b'\x00', pos + 1)]
203 return self.extradata[-pos - 1][0]
203 return self.extradata[-pos - 1][0]
204
204
205 def bsearch(self, key):
205 def bsearch(self, key):
206 first = 0
206 first = 0
207 last = len(self.positions) - 1
207 last = len(self.positions) - 1
208
208
209 while first <= last:
209 while first <= last:
210 midpoint = (first + last) // 2
210 midpoint = (first + last) // 2
211 nextpos = self.positions[midpoint]
211 nextpos = self.positions[midpoint]
212 candidate = self._getkey(nextpos)
212 candidate = self._getkey(nextpos)
213 r = _cmp(key, candidate)
213 r = _cmp(key, candidate)
214 if r == 0:
214 if r == 0:
215 return midpoint
215 return midpoint
216 else:
216 else:
217 if r < 0:
217 if r < 0:
218 last = midpoint - 1
218 last = midpoint - 1
219 else:
219 else:
220 first = midpoint + 1
220 first = midpoint + 1
221 return -1
221 return -1
222
222
223 def bsearch2(self, key):
223 def bsearch2(self, key):
224 # same as the above, but will always return the position
224 # same as the above, but will always return the position
225 # done for performance reasons
225 # done for performance reasons
226 first = 0
226 first = 0
227 last = len(self.positions) - 1
227 last = len(self.positions) - 1
228
228
229 while first <= last:
229 while first <= last:
230 midpoint = (first + last) // 2
230 midpoint = (first + last) // 2
231 nextpos = self.positions[midpoint]
231 nextpos = self.positions[midpoint]
232 candidate = self._getkey(nextpos)
232 candidate = self._getkey(nextpos)
233 r = _cmp(key, candidate)
233 r = _cmp(key, candidate)
234 if r == 0:
234 if r == 0:
235 return (midpoint, True)
235 return (midpoint, True)
236 else:
236 else:
237 if r < 0:
237 if r < 0:
238 last = midpoint - 1
238 last = midpoint - 1
239 else:
239 else:
240 first = midpoint + 1
240 first = midpoint + 1
241 return (first, False)
241 return (first, False)
242
242
243 def __contains__(self, key):
243 def __contains__(self, key):
244 return self.bsearch(key) != -1
244 return self.bsearch(key) != -1
245
245
246 def _getflags(self, data, needle, pos):
246 def _getflags(self, data, needle, pos):
247 start = pos + 41
247 start = pos + 41
248 end = data.find(b"\n", start)
248 end = data.find(b"\n", start)
249 if end == -1:
249 if end == -1:
250 end = len(data) - 1
250 end = len(data) - 1
251 if start == end:
251 if start == end:
252 return b''
252 return b''
253 return self.data[start:end]
253 return self.data[start:end]
254
254
255 def __getitem__(self, key):
255 def __getitem__(self, key):
256 if not isinstance(key, bytes):
256 if not isinstance(key, bytes):
257 raise TypeError(b"getitem: manifest keys must be a bytes.")
257 raise TypeError(b"getitem: manifest keys must be a bytes.")
258 needle = self.bsearch(key)
258 needle = self.bsearch(key)
259 if needle == -1:
259 if needle == -1:
260 raise KeyError
260 raise KeyError
261 data, pos = self._get(needle)
261 data, pos = self._get(needle)
262 if pos == -1:
262 if pos == -1:
263 return (data[1], data[2])
263 return (data[1], data[2])
264 zeropos = data.find(b'\x00', pos)
264 zeropos = data.find(b'\x00', pos)
265 assert 0 <= needle <= len(self.positions)
265 assert 0 <= needle <= len(self.positions)
266 assert len(self.extrainfo) == len(self.positions)
266 assert len(self.extrainfo) == len(self.positions)
267 hashval = unhexlify(data, self.extrainfo[needle], zeropos + 1, 40)
267 hashval = unhexlify(data, self.extrainfo[needle], zeropos + 1, 40)
268 flags = self._getflags(data, needle, zeropos)
268 flags = self._getflags(data, needle, zeropos)
269 return (hashval, flags)
269 return (hashval, flags)
270
270
271 def __delitem__(self, key):
271 def __delitem__(self, key):
272 needle, found = self.bsearch2(key)
272 needle, found = self.bsearch2(key)
273 if not found:
273 if not found:
274 raise KeyError
274 raise KeyError
275 cur = self.positions[needle]
275 cur = self.positions[needle]
276 self.positions = self.positions[:needle] + self.positions[needle + 1 :]
276 self.positions = self.positions[:needle] + self.positions[needle + 1 :]
277 self.extrainfo = self.extrainfo[:needle] + self.extrainfo[needle + 1 :]
277 self.extrainfo = self.extrainfo[:needle] + self.extrainfo[needle + 1 :]
278 if cur >= 0:
278 if cur >= 0:
279 # This does NOT unsort the list as far as the search functions are
279 # This does NOT unsort the list as far as the search functions are
280 # concerned, as they only examine lines mapped by self.positions.
280 # concerned, as they only examine lines mapped by self.positions.
281 self.data = self.data[:cur] + b'\x00' + self.data[cur + 1 :]
281 self.data = self.data[:cur] + b'\x00' + self.data[cur + 1 :]
282 self.hasremovals = True
282 self.hasremovals = True
283
283
284 def __setitem__(self, key, value):
284 def __setitem__(self, key, value):
285 if not isinstance(key, bytes):
285 if not isinstance(key, bytes):
286 raise TypeError(b"setitem: manifest keys must be a byte string.")
286 raise TypeError(b"setitem: manifest keys must be a byte string.")
287 if not isinstance(value, tuple) or len(value) != 2:
287 if not isinstance(value, tuple) or len(value) != 2:
288 raise TypeError(
288 raise TypeError(
289 b"Manifest values must be a tuple of (node, flags)."
289 b"Manifest values must be a tuple of (node, flags)."
290 )
290 )
291 hashval = value[0]
291 hashval = value[0]
292 if not isinstance(hashval, bytes) or not 20 <= len(hashval) <= 22:
292 if not isinstance(hashval, bytes) or not 20 <= len(hashval) <= 22:
293 raise TypeError(b"node must be a 20-byte byte string")
293 raise TypeError(b"node must be a 20-byte byte string")
294 flags = value[1]
294 flags = value[1]
295 if len(hashval) == 22:
295 if len(hashval) == 22:
296 hashval = hashval[:-1]
296 hashval = hashval[:-1]
297 if not isinstance(flags, bytes) or len(flags) > 1:
297 if not isinstance(flags, bytes) or len(flags) > 1:
298 raise TypeError(b"flags must a 0 or 1 byte string, got %r", flags)
298 raise TypeError(b"flags must a 0 or 1 byte string, got %r", flags)
299 needle, found = self.bsearch2(key)
299 needle, found = self.bsearch2(key)
300 if found:
300 if found:
301 # put the item
301 # put the item
302 pos = self.positions[needle]
302 pos = self.positions[needle]
303 if pos < 0:
303 if pos < 0:
304 self.extradata[-pos - 1] = (key, hashval, value[1])
304 self.extradata[-pos - 1] = (key, hashval, value[1])
305 else:
305 else:
306 # just don't bother
306 # just don't bother
307 self.extradata.append((key, hashval, value[1]))
307 self.extradata.append((key, hashval, value[1]))
308 self.positions[needle] = -len(self.extradata)
308 self.positions[needle] = -len(self.extradata)
309 else:
309 else:
310 # not found, put it in with extra positions
310 # not found, put it in with extra positions
311 self.extradata.append((key, hashval, value[1]))
311 self.extradata.append((key, hashval, value[1]))
312 self.positions = (
312 self.positions = (
313 self.positions[:needle]
313 self.positions[:needle]
314 + [-len(self.extradata)]
314 + [-len(self.extradata)]
315 + self.positions[needle:]
315 + self.positions[needle:]
316 )
316 )
317 self.extrainfo = (
317 self.extrainfo = (
318 self.extrainfo[:needle] + [0] + self.extrainfo[needle:]
318 self.extrainfo[:needle] + [0] + self.extrainfo[needle:]
319 )
319 )
320
320
321 def copy(self):
321 def copy(self):
322 # XXX call _compact like in C?
322 # XXX call _compact like in C?
323 return _lazymanifest(
323 return _lazymanifest(
324 self.data,
324 self.data,
325 self.positions,
325 self.positions,
326 self.extrainfo,
326 self.extrainfo,
327 self.extradata,
327 self.extradata,
328 self.hasremovals,
328 self.hasremovals,
329 )
329 )
330
330
331 def _compact(self):
331 def _compact(self):
332 # hopefully not called TOO often
332 # hopefully not called TOO often
333 if len(self.extradata) == 0 and not self.hasremovals:
333 if len(self.extradata) == 0 and not self.hasremovals:
334 return
334 return
335 l = []
335 l = []
336 i = 0
336 i = 0
337 offset = 0
337 offset = 0
338 self.extrainfo = [0] * len(self.positions)
338 self.extrainfo = [0] * len(self.positions)
339 while i < len(self.positions):
339 while i < len(self.positions):
340 if self.positions[i] >= 0:
340 if self.positions[i] >= 0:
341 cur = self.positions[i]
341 cur = self.positions[i]
342 last_cut = cur
342 last_cut = cur
343
343
344 # Collect all contiguous entries in the buffer at the current
344 # Collect all contiguous entries in the buffer at the current
345 # offset, breaking out only for added/modified items held in
345 # offset, breaking out only for added/modified items held in
346 # extradata, or a deleted line prior to the next position.
346 # extradata, or a deleted line prior to the next position.
347 while True:
347 while True:
348 self.positions[i] = offset
348 self.positions[i] = offset
349 i += 1
349 i += 1
350 if i == len(self.positions) or self.positions[i] < 0:
350 if i == len(self.positions) or self.positions[i] < 0:
351 break
351 break
352
352
353 # A removed file has no positions[] entry, but does have an
353 # A removed file has no positions[] entry, but does have an
354 # overwritten first byte. Break out and find the end of the
354 # overwritten first byte. Break out and find the end of the
355 # current good entry/entries if there is a removed file
355 # current good entry/entries if there is a removed file
356 # before the next position.
356 # before the next position.
357 if (
357 if (
358 self.hasremovals
358 self.hasremovals
359 and self.data.find(b'\n\x00', cur, self.positions[i])
359 and self.data.find(b'\n\x00', cur, self.positions[i])
360 != -1
360 != -1
361 ):
361 ):
362 break
362 break
363
363
364 offset += self.positions[i] - cur
364 offset += self.positions[i] - cur
365 cur = self.positions[i]
365 cur = self.positions[i]
366 end_cut = self.data.find(b'\n', cur)
366 end_cut = self.data.find(b'\n', cur)
367 if end_cut != -1:
367 if end_cut != -1:
368 end_cut += 1
368 end_cut += 1
369 offset += end_cut - cur
369 offset += end_cut - cur
370 l.append(self.data[last_cut:end_cut])
370 l.append(self.data[last_cut:end_cut])
371 else:
371 else:
372 while i < len(self.positions) and self.positions[i] < 0:
372 while i < len(self.positions) and self.positions[i] < 0:
373 cur = self.positions[i]
373 cur = self.positions[i]
374 t = self.extradata[-cur - 1]
374 t = self.extradata[-cur - 1]
375 l.append(self._pack(t))
375 l.append(self._pack(t))
376 self.positions[i] = offset
376 self.positions[i] = offset
377 if len(t[1]) > 20:
377 if len(t[1]) > 20:
378 self.extrainfo[i] = ord(t[1][21])
378 self.extrainfo[i] = ord(t[1][21])
379 offset += len(l[-1])
379 offset += len(l[-1])
380 i += 1
380 i += 1
381 self.data = b''.join(l)
381 self.data = b''.join(l)
382 self.hasremovals = False
382 self.hasremovals = False
383 self.extradata = []
383 self.extradata = []
384
384
385 def _pack(self, d):
385 def _pack(self, d):
386 return d[0] + b'\x00' + hex(d[1][:20]) + d[2] + b'\n'
386 return d[0] + b'\x00' + hex(d[1][:20]) + d[2] + b'\n'
387
387
388 def text(self):
388 def text(self):
389 self._compact()
389 self._compact()
390 return self.data
390 return self.data
391
391
392 def diff(self, m2, clean=False):
392 def diff(self, m2, clean=False):
393 '''Finds changes between the current manifest and m2.'''
393 '''Finds changes between the current manifest and m2.'''
394 # XXX think whether efficiency matters here
394 # XXX think whether efficiency matters here
395 diff = {}
395 diff = {}
396
396
397 for fn, e1, flags in self.iterentries():
397 for fn, e1, flags in self.iterentries():
398 if fn not in m2:
398 if fn not in m2:
399 diff[fn] = (e1, flags), (None, b'')
399 diff[fn] = (e1, flags), (None, b'')
400 else:
400 else:
401 e2 = m2[fn]
401 e2 = m2[fn]
402 if (e1, flags) != e2:
402 if (e1, flags) != e2:
403 diff[fn] = (e1, flags), e2
403 diff[fn] = (e1, flags), e2
404 elif clean:
404 elif clean:
405 diff[fn] = None
405 diff[fn] = None
406
406
407 for fn, e2, flags in m2.iterentries():
407 for fn, e2, flags in m2.iterentries():
408 if fn not in self:
408 if fn not in self:
409 diff[fn] = (None, b''), (e2, flags)
409 diff[fn] = (None, b''), (e2, flags)
410
410
411 return diff
411 return diff
412
412
413 def iterentries(self):
413 def iterentries(self):
414 return lazymanifestiterentries(self)
414 return lazymanifestiterentries(self)
415
415
416 def iterkeys(self):
416 def iterkeys(self):
417 return lazymanifestiter(self)
417 return lazymanifestiter(self)
418
418
419 def __iter__(self):
419 def __iter__(self):
420 return lazymanifestiter(self)
420 return lazymanifestiter(self)
421
421
422 def __len__(self):
422 def __len__(self):
423 return len(self.positions)
423 return len(self.positions)
424
424
425 def filtercopy(self, filterfn):
425 def filtercopy(self, filterfn):
426 # XXX should be optimized
426 # XXX should be optimized
427 c = _lazymanifest(b'')
427 c = _lazymanifest(b'')
428 for f, n, fl in self.iterentries():
428 for f, n, fl in self.iterentries():
429 if filterfn(f):
429 if filterfn(f):
430 c[f] = n, fl
430 c[f] = n, fl
431 return c
431 return c
432
432
433
433
434 try:
434 try:
435 _lazymanifest = parsers.lazymanifest
435 _lazymanifest = parsers.lazymanifest
436 except AttributeError:
436 except AttributeError:
437 pass
437 pass
438
438
439
439
440 @interfaceutil.implementer(repository.imanifestdict)
440 @interfaceutil.implementer(repository.imanifestdict)
441 class manifestdict(object):
441 class manifestdict(object):
442 def __init__(self, data=b''):
442 def __init__(self, data=b''):
443 self._lm = _lazymanifest(data)
443 self._lm = _lazymanifest(data)
444
444
445 def __getitem__(self, key):
445 def __getitem__(self, key):
446 return self._lm[key][0]
446 return self._lm[key][0]
447
447
448 def find(self, key):
448 def find(self, key):
449 return self._lm[key]
449 return self._lm[key]
450
450
451 def __len__(self):
451 def __len__(self):
452 return len(self._lm)
452 return len(self._lm)
453
453
454 def __nonzero__(self):
454 def __nonzero__(self):
455 # nonzero is covered by the __len__ function, but implementing it here
455 # nonzero is covered by the __len__ function, but implementing it here
456 # makes it easier for extensions to override.
456 # makes it easier for extensions to override.
457 return len(self._lm) != 0
457 return len(self._lm) != 0
458
458
459 __bool__ = __nonzero__
459 __bool__ = __nonzero__
460
460
461 def __setitem__(self, key, node):
461 def __setitem__(self, key, node):
462 self._lm[key] = node, self.flags(key, b'')
462 self._lm[key] = node, self.flags(key, b'')
463
463
464 def __contains__(self, key):
464 def __contains__(self, key):
465 if key is None:
465 if key is None:
466 return False
466 return False
467 return key in self._lm
467 return key in self._lm
468
468
469 def __delitem__(self, key):
469 def __delitem__(self, key):
470 del self._lm[key]
470 del self._lm[key]
471
471
472 def __iter__(self):
472 def __iter__(self):
473 return self._lm.__iter__()
473 return self._lm.__iter__()
474
474
475 def iterkeys(self):
475 def iterkeys(self):
476 return self._lm.iterkeys()
476 return self._lm.iterkeys()
477
477
478 def keys(self):
478 def keys(self):
479 return list(self.iterkeys())
479 return list(self.iterkeys())
480
480
481 def filesnotin(self, m2, match=None):
481 def filesnotin(self, m2, match=None):
482 '''Set of files in this manifest that are not in the other'''
482 '''Set of files in this manifest that are not in the other'''
483 if match:
483 if match:
484 m1 = self.matches(match)
484 m1 = self.matches(match)
485 m2 = m2.matches(match)
485 m2 = m2.matches(match)
486 return m1.filesnotin(m2)
486 return m1.filesnotin(m2)
487 diff = self.diff(m2)
487 diff = self.diff(m2)
488 files = set(
488 files = set(
489 filepath
489 filepath
490 for filepath, hashflags in pycompat.iteritems(diff)
490 for filepath, hashflags in pycompat.iteritems(diff)
491 if hashflags[1][0] is None
491 if hashflags[1][0] is None
492 )
492 )
493 return files
493 return files
494
494
495 @propertycache
495 @propertycache
496 def _dirs(self):
496 def _dirs(self):
497 return util.dirs(self)
497 return util.dirs(self)
498
498
499 def dirs(self):
499 def dirs(self):
500 return self._dirs
500 return self._dirs
501
501
502 def hasdir(self, dir):
502 def hasdir(self, dir):
503 return dir in self._dirs
503 return dir in self._dirs
504
504
505 def _filesfastpath(self, match):
505 def _filesfastpath(self, match):
506 '''Checks whether we can correctly and quickly iterate over matcher
506 '''Checks whether we can correctly and quickly iterate over matcher
507 files instead of over manifest files.'''
507 files instead of over manifest files.'''
508 files = match.files()
508 files = match.files()
509 return len(files) < 100 and (
509 return len(files) < 100 and (
510 match.isexact()
510 match.isexact()
511 or (match.prefix() and all(fn in self for fn in files))
511 or (match.prefix() and all(fn in self for fn in files))
512 )
512 )
513
513
514 def walk(self, match):
514 def walk(self, match):
515 '''Generates matching file names.
515 '''Generates matching file names.
516
516
517 Equivalent to manifest.matches(match).iterkeys(), but without creating
517 Equivalent to manifest.matches(match).iterkeys(), but without creating
518 an entirely new manifest.
518 an entirely new manifest.
519
519
520 It also reports nonexistent files by marking them bad with match.bad().
520 It also reports nonexistent files by marking them bad with match.bad().
521 '''
521 '''
522 if match.always():
522 if match.always():
523 for f in iter(self):
523 for f in iter(self):
524 yield f
524 yield f
525 return
525 return
526
526
527 fset = set(match.files())
527 fset = set(match.files())
528
528
529 # avoid the entire walk if we're only looking for specific files
529 # avoid the entire walk if we're only looking for specific files
530 if self._filesfastpath(match):
530 if self._filesfastpath(match):
531 for fn in sorted(fset):
531 for fn in sorted(fset):
532 yield fn
532 yield fn
533 return
533 return
534
534
535 for fn in self:
535 for fn in self:
536 if fn in fset:
536 if fn in fset:
537 # specified pattern is the exact name
537 # specified pattern is the exact name
538 fset.remove(fn)
538 fset.remove(fn)
539 if match(fn):
539 if match(fn):
540 yield fn
540 yield fn
541
541
542 # for dirstate.walk, files=[''] means "walk the whole tree".
542 # for dirstate.walk, files=[''] means "walk the whole tree".
543 # follow that here, too
543 # follow that here, too
544 fset.discard(b'')
544 fset.discard(b'')
545
545
546 for fn in sorted(fset):
546 for fn in sorted(fset):
547 if not self.hasdir(fn):
547 if not self.hasdir(fn):
548 match.bad(fn, None)
548 match.bad(fn, None)
549
549
550 def matches(self, match):
550 def matches(self, match):
551 '''generate a new manifest filtered by the match argument'''
551 '''generate a new manifest filtered by the match argument'''
552 if match.always():
552 if match.always():
553 return self.copy()
553 return self.copy()
554
554
555 if self._filesfastpath(match):
555 if self._filesfastpath(match):
556 m = manifestdict()
556 m = manifestdict()
557 lm = self._lm
557 lm = self._lm
558 for fn in match.files():
558 for fn in match.files():
559 if fn in lm:
559 if fn in lm:
560 m._lm[fn] = lm[fn]
560 m._lm[fn] = lm[fn]
561 return m
561 return m
562
562
563 m = manifestdict()
563 m = manifestdict()
564 m._lm = self._lm.filtercopy(match)
564 m._lm = self._lm.filtercopy(match)
565 return m
565 return m
566
566
567 def diff(self, m2, match=None, clean=False):
567 def diff(self, m2, match=None, clean=False):
568 '''Finds changes between the current manifest and m2.
568 '''Finds changes between the current manifest and m2.
569
569
570 Args:
570 Args:
571 m2: the manifest to which this manifest should be compared.
571 m2: the manifest to which this manifest should be compared.
572 clean: if true, include files unchanged between these manifests
572 clean: if true, include files unchanged between these manifests
573 with a None value in the returned dictionary.
573 with a None value in the returned dictionary.
574
574
575 The result is returned as a dict with filename as key and
575 The result is returned as a dict with filename as key and
576 values of the form ((n1,fl1),(n2,fl2)), where n1/n2 is the
576 values of the form ((n1,fl1),(n2,fl2)), where n1/n2 is the
577 nodeid in the current/other manifest and fl1/fl2 is the flag
577 nodeid in the current/other manifest and fl1/fl2 is the flag
578 in the current/other manifest. Where the file does not exist,
578 in the current/other manifest. Where the file does not exist,
579 the nodeid will be None and the flags will be the empty
579 the nodeid will be None and the flags will be the empty
580 string.
580 string.
581 '''
581 '''
582 if match:
582 if match:
583 m1 = self.matches(match)
583 m1 = self.matches(match)
584 m2 = m2.matches(match)
584 m2 = m2.matches(match)
585 return m1.diff(m2, clean=clean)
585 return m1.diff(m2, clean=clean)
586 return self._lm.diff(m2._lm, clean)
586 return self._lm.diff(m2._lm, clean)
587
587
588 def setflag(self, key, flag):
588 def setflag(self, key, flag):
589 self._lm[key] = self[key], flag
589 self._lm[key] = self[key], flag
590
590
591 def get(self, key, default=None):
591 def get(self, key, default=None):
592 try:
592 try:
593 return self._lm[key][0]
593 return self._lm[key][0]
594 except KeyError:
594 except KeyError:
595 return default
595 return default
596
596
597 def flags(self, key, default=b''):
597 def flags(self, key, default=b''):
598 try:
598 try:
599 return self._lm[key][1]
599 return self._lm[key][1]
600 except KeyError:
600 except KeyError:
601 return default
601 return default
602
602
603 def copy(self):
603 def copy(self):
604 c = manifestdict()
604 c = manifestdict()
605 c._lm = self._lm.copy()
605 c._lm = self._lm.copy()
606 return c
606 return c
607
607
608 def items(self):
608 def items(self):
609 return (x[:2] for x in self._lm.iterentries())
609 return (x[:2] for x in self._lm.iterentries())
610
610
611 def iteritems(self):
611 def iteritems(self):
612 return (x[:2] for x in self._lm.iterentries())
612 return (x[:2] for x in self._lm.iterentries())
613
613
614 def iterentries(self):
614 def iterentries(self):
615 return self._lm.iterentries()
615 return self._lm.iterentries()
616
616
617 def text(self):
617 def text(self):
618 # most likely uses native version
618 # most likely uses native version
619 return self._lm.text()
619 return self._lm.text()
620
620
621 def fastdelta(self, base, changes):
621 def fastdelta(self, base, changes):
622 """Given a base manifest text as a bytearray and a list of changes
622 """Given a base manifest text as a bytearray and a list of changes
623 relative to that text, compute a delta that can be used by revlog.
623 relative to that text, compute a delta that can be used by revlog.
624 """
624 """
625 delta = []
625 delta = []
626 dstart = None
626 dstart = None
627 dend = None
627 dend = None
628 dline = [b""]
628 dline = [b""]
629 start = 0
629 start = 0
630 # zero copy representation of base as a buffer
630 # zero copy representation of base as a buffer
631 addbuf = util.buffer(base)
631 addbuf = util.buffer(base)
632
632
633 changes = list(changes)
633 changes = list(changes)
634 if len(changes) < FASTDELTA_TEXTDIFF_THRESHOLD:
634 if len(changes) < FASTDELTA_TEXTDIFF_THRESHOLD:
635 # start with a readonly loop that finds the offset of
635 # start with a readonly loop that finds the offset of
636 # each line and creates the deltas
636 # each line and creates the deltas
637 for f, todelete in changes:
637 for f, todelete in changes:
638 # bs will either be the index of the item or the insert point
638 # bs will either be the index of the item or the insert point
639 start, end = _msearch(addbuf, f, start)
639 start, end = _msearch(addbuf, f, start)
640 if not todelete:
640 if not todelete:
641 h, fl = self._lm[f]
641 h, fl = self._lm[f]
642 l = b"%s\0%s%s\n" % (f, hex(h), fl)
642 l = b"%s\0%s%s\n" % (f, hex(h), fl)
643 else:
643 else:
644 if start == end:
644 if start == end:
645 # item we want to delete was not found, error out
645 # item we want to delete was not found, error out
646 raise AssertionError(
646 raise AssertionError(
647 _(b"failed to remove %s from manifest") % f
647 _(b"failed to remove %s from manifest") % f
648 )
648 )
649 l = b""
649 l = b""
650 if dstart is not None and dstart <= start and dend >= start:
650 if dstart is not None and dstart <= start and dend >= start:
651 if dend < end:
651 if dend < end:
652 dend = end
652 dend = end
653 if l:
653 if l:
654 dline.append(l)
654 dline.append(l)
655 else:
655 else:
656 if dstart is not None:
656 if dstart is not None:
657 delta.append([dstart, dend, b"".join(dline)])
657 delta.append([dstart, dend, b"".join(dline)])
658 dstart = start
658 dstart = start
659 dend = end
659 dend = end
660 dline = [l]
660 dline = [l]
661
661
662 if dstart is not None:
662 if dstart is not None:
663 delta.append([dstart, dend, b"".join(dline)])
663 delta.append([dstart, dend, b"".join(dline)])
664 # apply the delta to the base, and get a delta for addrevision
664 # apply the delta to the base, and get a delta for addrevision
665 deltatext, arraytext = _addlistdelta(base, delta)
665 deltatext, arraytext = _addlistdelta(base, delta)
666 else:
666 else:
667 # For large changes, it's much cheaper to just build the text and
667 # For large changes, it's much cheaper to just build the text and
668 # diff it.
668 # diff it.
669 arraytext = bytearray(self.text())
669 arraytext = bytearray(self.text())
670 deltatext = mdiff.textdiff(
670 deltatext = mdiff.textdiff(
671 util.buffer(base), util.buffer(arraytext)
671 util.buffer(base), util.buffer(arraytext)
672 )
672 )
673
673
674 return arraytext, deltatext
674 return arraytext, deltatext
675
675
676
676
677 def _msearch(m, s, lo=0, hi=None):
677 def _msearch(m, s, lo=0, hi=None):
678 '''return a tuple (start, end) that says where to find s within m.
678 '''return a tuple (start, end) that says where to find s within m.
679
679
680 If the string is found m[start:end] are the line containing
680 If the string is found m[start:end] are the line containing
681 that string. If start == end the string was not found and
681 that string. If start == end the string was not found and
682 they indicate the proper sorted insertion point.
682 they indicate the proper sorted insertion point.
683
683
684 m should be a buffer, a memoryview or a byte string.
684 m should be a buffer, a memoryview or a byte string.
685 s is a byte string'''
685 s is a byte string'''
686
686
687 def advance(i, c):
687 def advance(i, c):
688 while i < lenm and m[i : i + 1] != c:
688 while i < lenm and m[i : i + 1] != c:
689 i += 1
689 i += 1
690 return i
690 return i
691
691
692 if not s:
692 if not s:
693 return (lo, lo)
693 return (lo, lo)
694 lenm = len(m)
694 lenm = len(m)
695 if not hi:
695 if not hi:
696 hi = lenm
696 hi = lenm
697 while lo < hi:
697 while lo < hi:
698 mid = (lo + hi) // 2
698 mid = (lo + hi) // 2
699 start = mid
699 start = mid
700 while start > 0 and m[start - 1 : start] != b'\n':
700 while start > 0 and m[start - 1 : start] != b'\n':
701 start -= 1
701 start -= 1
702 end = advance(start, b'\0')
702 end = advance(start, b'\0')
703 if bytes(m[start:end]) < s:
703 if bytes(m[start:end]) < s:
704 # we know that after the null there are 40 bytes of sha1
704 # we know that after the null there are 40 bytes of sha1
705 # this translates to the bisect lo = mid + 1
705 # this translates to the bisect lo = mid + 1
706 lo = advance(end + 40, b'\n') + 1
706 lo = advance(end + 40, b'\n') + 1
707 else:
707 else:
708 # this translates to the bisect hi = mid
708 # this translates to the bisect hi = mid
709 hi = start
709 hi = start
710 end = advance(lo, b'\0')
710 end = advance(lo, b'\0')
711 found = m[lo:end]
711 found = m[lo:end]
712 if s == found:
712 if s == found:
713 # we know that after the null there are 40 bytes of sha1
713 # we know that after the null there are 40 bytes of sha1
714 end = advance(end + 40, b'\n')
714 end = advance(end + 40, b'\n')
715 return (lo, end + 1)
715 return (lo, end + 1)
716 else:
716 else:
717 return (lo, lo)
717 return (lo, lo)
718
718
719
719
720 def _checkforbidden(l):
720 def _checkforbidden(l):
721 """Check filenames for illegal characters."""
721 """Check filenames for illegal characters."""
722 for f in l:
722 for f in l:
723 if b'\n' in f or b'\r' in f:
723 if b'\n' in f or b'\r' in f:
724 raise error.StorageError(
724 raise error.StorageError(
725 _(b"'\\n' and '\\r' disallowed in filenames: %r")
725 _(b"'\\n' and '\\r' disallowed in filenames: %r")
726 % pycompat.bytestr(f)
726 % pycompat.bytestr(f)
727 )
727 )
728
728
729
729
730 # apply the changes collected during the bisect loop to our addlist
730 # apply the changes collected during the bisect loop to our addlist
731 # return a delta suitable for addrevision
731 # return a delta suitable for addrevision
732 def _addlistdelta(addlist, x):
732 def _addlistdelta(addlist, x):
733 # for large addlist arrays, building a new array is cheaper
733 # for large addlist arrays, building a new array is cheaper
734 # than repeatedly modifying the existing one
734 # than repeatedly modifying the existing one
735 currentposition = 0
735 currentposition = 0
736 newaddlist = bytearray()
736 newaddlist = bytearray()
737
737
738 for start, end, content in x:
738 for start, end, content in x:
739 newaddlist += addlist[currentposition:start]
739 newaddlist += addlist[currentposition:start]
740 if content:
740 if content:
741 newaddlist += bytearray(content)
741 newaddlist += bytearray(content)
742
742
743 currentposition = end
743 currentposition = end
744
744
745 newaddlist += addlist[currentposition:]
745 newaddlist += addlist[currentposition:]
746
746
747 deltatext = b"".join(
747 deltatext = b"".join(
748 struct.pack(b">lll", start, end, len(content)) + content
748 struct.pack(b">lll", start, end, len(content)) + content
749 for start, end, content in x
749 for start, end, content in x
750 )
750 )
751 return deltatext, newaddlist
751 return deltatext, newaddlist
752
752
753
753
754 def _splittopdir(f):
754 def _splittopdir(f):
755 if b'/' in f:
755 if b'/' in f:
756 dir, subpath = f.split(b'/', 1)
756 dir, subpath = f.split(b'/', 1)
757 return dir + b'/', subpath
757 return dir + b'/', subpath
758 else:
758 else:
759 return b'', f
759 return b'', f
760
760
761
761
762 _noop = lambda s: None
762 _noop = lambda s: None
763
763
764
764
765 class treemanifest(object):
765 class treemanifest(object):
766 def __init__(self, dir=b'', text=b''):
766 def __init__(self, dir=b'', text=b''):
767 self._dir = dir
767 self._dir = dir
768 self._node = nullid
768 self._node = nullid
769 self._loadfunc = _noop
769 self._loadfunc = _noop
770 self._copyfunc = _noop
770 self._copyfunc = _noop
771 self._dirty = False
771 self._dirty = False
772 self._dirs = {}
772 self._dirs = {}
773 self._lazydirs = {}
773 self._lazydirs = {}
774 # Using _lazymanifest here is a little slower than plain old dicts
774 # Using _lazymanifest here is a little slower than plain old dicts
775 self._files = {}
775 self._files = {}
776 self._flags = {}
776 self._flags = {}
777 if text:
777 if text:
778
778
779 def readsubtree(subdir, subm):
779 def readsubtree(subdir, subm):
780 raise AssertionError(
780 raise AssertionError(
781 b'treemanifest constructor only accepts flat manifests'
781 b'treemanifest constructor only accepts flat manifests'
782 )
782 )
783
783
784 self.parse(text, readsubtree)
784 self.parse(text, readsubtree)
785 self._dirty = True # Mark flat manifest dirty after parsing
785 self._dirty = True # Mark flat manifest dirty after parsing
786
786
787 def _subpath(self, path):
787 def _subpath(self, path):
788 return self._dir + path
788 return self._dir + path
789
789
790 def _loadalllazy(self):
790 def _loadalllazy(self):
791 selfdirs = self._dirs
791 selfdirs = self._dirs
792 for d, (path, node, readsubtree, docopy) in pycompat.iteritems(
792 for d, (path, node, readsubtree, docopy) in pycompat.iteritems(
793 self._lazydirs
793 self._lazydirs
794 ):
794 ):
795 if docopy:
795 if docopy:
796 selfdirs[d] = readsubtree(path, node).copy()
796 selfdirs[d] = readsubtree(path, node).copy()
797 else:
797 else:
798 selfdirs[d] = readsubtree(path, node)
798 selfdirs[d] = readsubtree(path, node)
799 self._lazydirs = {}
799 self._lazydirs = {}
800
800
801 def _loadlazy(self, d):
801 def _loadlazy(self, d):
802 v = self._lazydirs.get(d)
802 v = self._lazydirs.get(d)
803 if v:
803 if v:
804 path, node, readsubtree, docopy = v
804 path, node, readsubtree, docopy = v
805 if docopy:
805 if docopy:
806 self._dirs[d] = readsubtree(path, node).copy()
806 self._dirs[d] = readsubtree(path, node).copy()
807 else:
807 else:
808 self._dirs[d] = readsubtree(path, node)
808 self._dirs[d] = readsubtree(path, node)
809 del self._lazydirs[d]
809 del self._lazydirs[d]
810
810
811 def _loadchildrensetlazy(self, visit):
811 def _loadchildrensetlazy(self, visit):
812 if not visit:
812 if not visit:
813 return None
813 return None
814 if visit == b'all' or visit == b'this':
814 if visit == b'all' or visit == b'this':
815 self._loadalllazy()
815 self._loadalllazy()
816 return None
816 return None
817
817
818 loadlazy = self._loadlazy
818 loadlazy = self._loadlazy
819 for k in visit:
819 for k in visit:
820 loadlazy(k + b'/')
820 loadlazy(k + b'/')
821 return visit
821 return visit
822
822
823 def _loaddifflazy(self, t1, t2):
823 def _loaddifflazy(self, t1, t2):
824 """load items in t1 and t2 if they're needed for diffing.
824 """load items in t1 and t2 if they're needed for diffing.
825
825
826 The criteria currently is:
826 The criteria currently is:
827 - if it's not present in _lazydirs in either t1 or t2, load it in the
827 - if it's not present in _lazydirs in either t1 or t2, load it in the
828 other (it may already be loaded or it may not exist, doesn't matter)
828 other (it may already be loaded or it may not exist, doesn't matter)
829 - if it's present in _lazydirs in both, compare the nodeid; if it
829 - if it's present in _lazydirs in both, compare the nodeid; if it
830 differs, load it in both
830 differs, load it in both
831 """
831 """
832 toloadlazy = []
832 toloadlazy = []
833 for d, v1 in pycompat.iteritems(t1._lazydirs):
833 for d, v1 in pycompat.iteritems(t1._lazydirs):
834 v2 = t2._lazydirs.get(d)
834 v2 = t2._lazydirs.get(d)
835 if not v2 or v2[1] != v1[1]:
835 if not v2 or v2[1] != v1[1]:
836 toloadlazy.append(d)
836 toloadlazy.append(d)
837 for d, v1 in pycompat.iteritems(t2._lazydirs):
837 for d, v1 in pycompat.iteritems(t2._lazydirs):
838 if d not in t1._lazydirs:
838 if d not in t1._lazydirs:
839 toloadlazy.append(d)
839 toloadlazy.append(d)
840
840
841 for d in toloadlazy:
841 for d in toloadlazy:
842 t1._loadlazy(d)
842 t1._loadlazy(d)
843 t2._loadlazy(d)
843 t2._loadlazy(d)
844
844
845 def __len__(self):
845 def __len__(self):
846 self._load()
846 self._load()
847 size = len(self._files)
847 size = len(self._files)
848 self._loadalllazy()
848 self._loadalllazy()
849 for m in self._dirs.values():
849 for m in self._dirs.values():
850 size += m.__len__()
850 size += m.__len__()
851 return size
851 return size
852
852
853 def __nonzero__(self):
853 def __nonzero__(self):
854 # Faster than "__len() != 0" since it avoids loading sub-manifests
854 # Faster than "__len() != 0" since it avoids loading sub-manifests
855 return not self._isempty()
855 return not self._isempty()
856
856
857 __bool__ = __nonzero__
857 __bool__ = __nonzero__
858
858
859 def _isempty(self):
859 def _isempty(self):
860 self._load() # for consistency; already loaded by all callers
860 self._load() # for consistency; already loaded by all callers
861 # See if we can skip loading everything.
861 # See if we can skip loading everything.
862 if self._files or (
862 if self._files or (
863 self._dirs and any(not m._isempty() for m in self._dirs.values())
863 self._dirs and any(not m._isempty() for m in self._dirs.values())
864 ):
864 ):
865 return False
865 return False
866 self._loadalllazy()
866 self._loadalllazy()
867 return not self._dirs or all(m._isempty() for m in self._dirs.values())
867 return not self._dirs or all(m._isempty() for m in self._dirs.values())
868
868
869 def __repr__(self):
869 def __repr__(self):
870 return b'<treemanifest dir=%s, node=%s, loaded=%s, dirty=%s at 0x%x>' % (
870 return (
871 self._dir,
871 b'<treemanifest dir=%s, node=%s, loaded=%s, dirty=%s at 0x%x>'
872 hex(self._node),
872 % (
873 bool(self._loadfunc is _noop),
873 self._dir,
874 self._dirty,
874 hex(self._node),
875 id(self),
875 bool(self._loadfunc is _noop),
876 self._dirty,
877 id(self),
878 )
876 )
879 )
877
880
878 def dir(self):
881 def dir(self):
879 '''The directory that this tree manifest represents, including a
882 '''The directory that this tree manifest represents, including a
880 trailing '/'. Empty string for the repo root directory.'''
883 trailing '/'. Empty string for the repo root directory.'''
881 return self._dir
884 return self._dir
882
885
883 def node(self):
886 def node(self):
884 '''This node of this instance. nullid for unsaved instances. Should
887 '''This node of this instance. nullid for unsaved instances. Should
885 be updated when the instance is read or written from a revlog.
888 be updated when the instance is read or written from a revlog.
886 '''
889 '''
887 assert not self._dirty
890 assert not self._dirty
888 return self._node
891 return self._node
889
892
890 def setnode(self, node):
893 def setnode(self, node):
891 self._node = node
894 self._node = node
892 self._dirty = False
895 self._dirty = False
893
896
894 def iterentries(self):
897 def iterentries(self):
895 self._load()
898 self._load()
896 self._loadalllazy()
899 self._loadalllazy()
897 for p, n in sorted(
900 for p, n in sorted(
898 itertools.chain(self._dirs.items(), self._files.items())
901 itertools.chain(self._dirs.items(), self._files.items())
899 ):
902 ):
900 if p in self._files:
903 if p in self._files:
901 yield self._subpath(p), n, self._flags.get(p, b'')
904 yield self._subpath(p), n, self._flags.get(p, b'')
902 else:
905 else:
903 for x in n.iterentries():
906 for x in n.iterentries():
904 yield x
907 yield x
905
908
906 def items(self):
909 def items(self):
907 self._load()
910 self._load()
908 self._loadalllazy()
911 self._loadalllazy()
909 for p, n in sorted(
912 for p, n in sorted(
910 itertools.chain(self._dirs.items(), self._files.items())
913 itertools.chain(self._dirs.items(), self._files.items())
911 ):
914 ):
912 if p in self._files:
915 if p in self._files:
913 yield self._subpath(p), n
916 yield self._subpath(p), n
914 else:
917 else:
915 for f, sn in pycompat.iteritems(n):
918 for f, sn in pycompat.iteritems(n):
916 yield f, sn
919 yield f, sn
917
920
918 iteritems = items
921 iteritems = items
919
922
920 def iterkeys(self):
923 def iterkeys(self):
921 self._load()
924 self._load()
922 self._loadalllazy()
925 self._loadalllazy()
923 for p in sorted(itertools.chain(self._dirs, self._files)):
926 for p in sorted(itertools.chain(self._dirs, self._files)):
924 if p in self._files:
927 if p in self._files:
925 yield self._subpath(p)
928 yield self._subpath(p)
926 else:
929 else:
927 for f in self._dirs[p]:
930 for f in self._dirs[p]:
928 yield f
931 yield f
929
932
930 def keys(self):
933 def keys(self):
931 return list(self.iterkeys())
934 return list(self.iterkeys())
932
935
933 def __iter__(self):
936 def __iter__(self):
934 return self.iterkeys()
937 return self.iterkeys()
935
938
936 def __contains__(self, f):
939 def __contains__(self, f):
937 if f is None:
940 if f is None:
938 return False
941 return False
939 self._load()
942 self._load()
940 dir, subpath = _splittopdir(f)
943 dir, subpath = _splittopdir(f)
941 if dir:
944 if dir:
942 self._loadlazy(dir)
945 self._loadlazy(dir)
943
946
944 if dir not in self._dirs:
947 if dir not in self._dirs:
945 return False
948 return False
946
949
947 return self._dirs[dir].__contains__(subpath)
950 return self._dirs[dir].__contains__(subpath)
948 else:
951 else:
949 return f in self._files
952 return f in self._files
950
953
951 def get(self, f, default=None):
954 def get(self, f, default=None):
952 self._load()
955 self._load()
953 dir, subpath = _splittopdir(f)
956 dir, subpath = _splittopdir(f)
954 if dir:
957 if dir:
955 self._loadlazy(dir)
958 self._loadlazy(dir)
956
959
957 if dir not in self._dirs:
960 if dir not in self._dirs:
958 return default
961 return default
959 return self._dirs[dir].get(subpath, default)
962 return self._dirs[dir].get(subpath, default)
960 else:
963 else:
961 return self._files.get(f, default)
964 return self._files.get(f, default)
962
965
963 def __getitem__(self, f):
966 def __getitem__(self, f):
964 self._load()
967 self._load()
965 dir, subpath = _splittopdir(f)
968 dir, subpath = _splittopdir(f)
966 if dir:
969 if dir:
967 self._loadlazy(dir)
970 self._loadlazy(dir)
968
971
969 return self._dirs[dir].__getitem__(subpath)
972 return self._dirs[dir].__getitem__(subpath)
970 else:
973 else:
971 return self._files[f]
974 return self._files[f]
972
975
973 def flags(self, f):
976 def flags(self, f):
974 self._load()
977 self._load()
975 dir, subpath = _splittopdir(f)
978 dir, subpath = _splittopdir(f)
976 if dir:
979 if dir:
977 self._loadlazy(dir)
980 self._loadlazy(dir)
978
981
979 if dir not in self._dirs:
982 if dir not in self._dirs:
980 return b''
983 return b''
981 return self._dirs[dir].flags(subpath)
984 return self._dirs[dir].flags(subpath)
982 else:
985 else:
983 if f in self._lazydirs or f in self._dirs:
986 if f in self._lazydirs or f in self._dirs:
984 return b''
987 return b''
985 return self._flags.get(f, b'')
988 return self._flags.get(f, b'')
986
989
987 def find(self, f):
990 def find(self, f):
988 self._load()
991 self._load()
989 dir, subpath = _splittopdir(f)
992 dir, subpath = _splittopdir(f)
990 if dir:
993 if dir:
991 self._loadlazy(dir)
994 self._loadlazy(dir)
992
995
993 return self._dirs[dir].find(subpath)
996 return self._dirs[dir].find(subpath)
994 else:
997 else:
995 return self._files[f], self._flags.get(f, b'')
998 return self._files[f], self._flags.get(f, b'')
996
999
997 def __delitem__(self, f):
1000 def __delitem__(self, f):
998 self._load()
1001 self._load()
999 dir, subpath = _splittopdir(f)
1002 dir, subpath = _splittopdir(f)
1000 if dir:
1003 if dir:
1001 self._loadlazy(dir)
1004 self._loadlazy(dir)
1002
1005
1003 self._dirs[dir].__delitem__(subpath)
1006 self._dirs[dir].__delitem__(subpath)
1004 # If the directory is now empty, remove it
1007 # If the directory is now empty, remove it
1005 if self._dirs[dir]._isempty():
1008 if self._dirs[dir]._isempty():
1006 del self._dirs[dir]
1009 del self._dirs[dir]
1007 else:
1010 else:
1008 del self._files[f]
1011 del self._files[f]
1009 if f in self._flags:
1012 if f in self._flags:
1010 del self._flags[f]
1013 del self._flags[f]
1011 self._dirty = True
1014 self._dirty = True
1012
1015
1013 def __setitem__(self, f, n):
1016 def __setitem__(self, f, n):
1014 assert n is not None
1017 assert n is not None
1015 self._load()
1018 self._load()
1016 dir, subpath = _splittopdir(f)
1019 dir, subpath = _splittopdir(f)
1017 if dir:
1020 if dir:
1018 self._loadlazy(dir)
1021 self._loadlazy(dir)
1019 if dir not in self._dirs:
1022 if dir not in self._dirs:
1020 self._dirs[dir] = treemanifest(self._subpath(dir))
1023 self._dirs[dir] = treemanifest(self._subpath(dir))
1021 self._dirs[dir].__setitem__(subpath, n)
1024 self._dirs[dir].__setitem__(subpath, n)
1022 else:
1025 else:
1023 self._files[f] = n[:21] # to match manifestdict's behavior
1026 self._files[f] = n[:21] # to match manifestdict's behavior
1024 self._dirty = True
1027 self._dirty = True
1025
1028
1026 def _load(self):
1029 def _load(self):
1027 if self._loadfunc is not _noop:
1030 if self._loadfunc is not _noop:
1028 lf, self._loadfunc = self._loadfunc, _noop
1031 lf, self._loadfunc = self._loadfunc, _noop
1029 lf(self)
1032 lf(self)
1030 elif self._copyfunc is not _noop:
1033 elif self._copyfunc is not _noop:
1031 cf, self._copyfunc = self._copyfunc, _noop
1034 cf, self._copyfunc = self._copyfunc, _noop
1032 cf(self)
1035 cf(self)
1033
1036
1034 def setflag(self, f, flags):
1037 def setflag(self, f, flags):
1035 """Set the flags (symlink, executable) for path f."""
1038 """Set the flags (symlink, executable) for path f."""
1036 self._load()
1039 self._load()
1037 dir, subpath = _splittopdir(f)
1040 dir, subpath = _splittopdir(f)
1038 if dir:
1041 if dir:
1039 self._loadlazy(dir)
1042 self._loadlazy(dir)
1040 if dir not in self._dirs:
1043 if dir not in self._dirs:
1041 self._dirs[dir] = treemanifest(self._subpath(dir))
1044 self._dirs[dir] = treemanifest(self._subpath(dir))
1042 self._dirs[dir].setflag(subpath, flags)
1045 self._dirs[dir].setflag(subpath, flags)
1043 else:
1046 else:
1044 self._flags[f] = flags
1047 self._flags[f] = flags
1045 self._dirty = True
1048 self._dirty = True
1046
1049
1047 def copy(self):
1050 def copy(self):
1048 copy = treemanifest(self._dir)
1051 copy = treemanifest(self._dir)
1049 copy._node = self._node
1052 copy._node = self._node
1050 copy._dirty = self._dirty
1053 copy._dirty = self._dirty
1051 if self._copyfunc is _noop:
1054 if self._copyfunc is _noop:
1052
1055
1053 def _copyfunc(s):
1056 def _copyfunc(s):
1054 self._load()
1057 self._load()
1055 s._lazydirs = {
1058 s._lazydirs = {
1056 d: (p, n, r, True)
1059 d: (p, n, r, True)
1057 for d, (p, n, r, c) in pycompat.iteritems(self._lazydirs)
1060 for d, (p, n, r, c) in pycompat.iteritems(self._lazydirs)
1058 }
1061 }
1059 sdirs = s._dirs
1062 sdirs = s._dirs
1060 for d, v in pycompat.iteritems(self._dirs):
1063 for d, v in pycompat.iteritems(self._dirs):
1061 sdirs[d] = v.copy()
1064 sdirs[d] = v.copy()
1062 s._files = dict.copy(self._files)
1065 s._files = dict.copy(self._files)
1063 s._flags = dict.copy(self._flags)
1066 s._flags = dict.copy(self._flags)
1064
1067
1065 if self._loadfunc is _noop:
1068 if self._loadfunc is _noop:
1066 _copyfunc(copy)
1069 _copyfunc(copy)
1067 else:
1070 else:
1068 copy._copyfunc = _copyfunc
1071 copy._copyfunc = _copyfunc
1069 else:
1072 else:
1070 copy._copyfunc = self._copyfunc
1073 copy._copyfunc = self._copyfunc
1071 return copy
1074 return copy
1072
1075
1073 def filesnotin(self, m2, match=None):
1076 def filesnotin(self, m2, match=None):
1074 '''Set of files in this manifest that are not in the other'''
1077 '''Set of files in this manifest that are not in the other'''
1075 if match and not match.always():
1078 if match and not match.always():
1076 m1 = self.matches(match)
1079 m1 = self.matches(match)
1077 m2 = m2.matches(match)
1080 m2 = m2.matches(match)
1078 return m1.filesnotin(m2)
1081 return m1.filesnotin(m2)
1079
1082
1080 files = set()
1083 files = set()
1081
1084
1082 def _filesnotin(t1, t2):
1085 def _filesnotin(t1, t2):
1083 if t1._node == t2._node and not t1._dirty and not t2._dirty:
1086 if t1._node == t2._node and not t1._dirty and not t2._dirty:
1084 return
1087 return
1085 t1._load()
1088 t1._load()
1086 t2._load()
1089 t2._load()
1087 self._loaddifflazy(t1, t2)
1090 self._loaddifflazy(t1, t2)
1088 for d, m1 in pycompat.iteritems(t1._dirs):
1091 for d, m1 in pycompat.iteritems(t1._dirs):
1089 if d in t2._dirs:
1092 if d in t2._dirs:
1090 m2 = t2._dirs[d]
1093 m2 = t2._dirs[d]
1091 _filesnotin(m1, m2)
1094 _filesnotin(m1, m2)
1092 else:
1095 else:
1093 files.update(m1.iterkeys())
1096 files.update(m1.iterkeys())
1094
1097
1095 for fn in t1._files:
1098 for fn in t1._files:
1096 if fn not in t2._files:
1099 if fn not in t2._files:
1097 files.add(t1._subpath(fn))
1100 files.add(t1._subpath(fn))
1098
1101
1099 _filesnotin(self, m2)
1102 _filesnotin(self, m2)
1100 return files
1103 return files
1101
1104
1102 @propertycache
1105 @propertycache
1103 def _alldirs(self):
1106 def _alldirs(self):
1104 return util.dirs(self)
1107 return util.dirs(self)
1105
1108
1106 def dirs(self):
1109 def dirs(self):
1107 return self._alldirs
1110 return self._alldirs
1108
1111
1109 def hasdir(self, dir):
1112 def hasdir(self, dir):
1110 self._load()
1113 self._load()
1111 topdir, subdir = _splittopdir(dir)
1114 topdir, subdir = _splittopdir(dir)
1112 if topdir:
1115 if topdir:
1113 self._loadlazy(topdir)
1116 self._loadlazy(topdir)
1114 if topdir in self._dirs:
1117 if topdir in self._dirs:
1115 return self._dirs[topdir].hasdir(subdir)
1118 return self._dirs[topdir].hasdir(subdir)
1116 return False
1119 return False
1117 dirslash = dir + b'/'
1120 dirslash = dir + b'/'
1118 return dirslash in self._dirs or dirslash in self._lazydirs
1121 return dirslash in self._dirs or dirslash in self._lazydirs
1119
1122
1120 def walk(self, match):
1123 def walk(self, match):
1121 '''Generates matching file names.
1124 '''Generates matching file names.
1122
1125
1123 Equivalent to manifest.matches(match).iterkeys(), but without creating
1126 Equivalent to manifest.matches(match).iterkeys(), but without creating
1124 an entirely new manifest.
1127 an entirely new manifest.
1125
1128
1126 It also reports nonexistent files by marking them bad with match.bad().
1129 It also reports nonexistent files by marking them bad with match.bad().
1127 '''
1130 '''
1128 if match.always():
1131 if match.always():
1129 for f in iter(self):
1132 for f in iter(self):
1130 yield f
1133 yield f
1131 return
1134 return
1132
1135
1133 fset = set(match.files())
1136 fset = set(match.files())
1134
1137
1135 for fn in self._walk(match):
1138 for fn in self._walk(match):
1136 if fn in fset:
1139 if fn in fset:
1137 # specified pattern is the exact name
1140 # specified pattern is the exact name
1138 fset.remove(fn)
1141 fset.remove(fn)
1139 yield fn
1142 yield fn
1140
1143
1141 # for dirstate.walk, files=[''] means "walk the whole tree".
1144 # for dirstate.walk, files=[''] means "walk the whole tree".
1142 # follow that here, too
1145 # follow that here, too
1143 fset.discard(b'')
1146 fset.discard(b'')
1144
1147
1145 for fn in sorted(fset):
1148 for fn in sorted(fset):
1146 if not self.hasdir(fn):
1149 if not self.hasdir(fn):
1147 match.bad(fn, None)
1150 match.bad(fn, None)
1148
1151
1149 def _walk(self, match):
1152 def _walk(self, match):
1150 '''Recursively generates matching file names for walk().'''
1153 '''Recursively generates matching file names for walk().'''
1151 visit = match.visitchildrenset(self._dir[:-1])
1154 visit = match.visitchildrenset(self._dir[:-1])
1152 if not visit:
1155 if not visit:
1153 return
1156 return
1154
1157
1155 # yield this dir's files and walk its submanifests
1158 # yield this dir's files and walk its submanifests
1156 self._load()
1159 self._load()
1157 visit = self._loadchildrensetlazy(visit)
1160 visit = self._loadchildrensetlazy(visit)
1158 for p in sorted(list(self._dirs) + list(self._files)):
1161 for p in sorted(list(self._dirs) + list(self._files)):
1159 if p in self._files:
1162 if p in self._files:
1160 fullp = self._subpath(p)
1163 fullp = self._subpath(p)
1161 if match(fullp):
1164 if match(fullp):
1162 yield fullp
1165 yield fullp
1163 else:
1166 else:
1164 if not visit or p[:-1] in visit:
1167 if not visit or p[:-1] in visit:
1165 for f in self._dirs[p]._walk(match):
1168 for f in self._dirs[p]._walk(match):
1166 yield f
1169 yield f
1167
1170
1168 def matches(self, match):
1171 def matches(self, match):
1169 '''generate a new manifest filtered by the match argument'''
1172 '''generate a new manifest filtered by the match argument'''
1170 if match.always():
1173 if match.always():
1171 return self.copy()
1174 return self.copy()
1172
1175
1173 return self._matches(match)
1176 return self._matches(match)
1174
1177
1175 def _matches(self, match):
1178 def _matches(self, match):
1176 '''recursively generate a new manifest filtered by the match argument.
1179 '''recursively generate a new manifest filtered by the match argument.
1177 '''
1180 '''
1178
1181
1179 visit = match.visitchildrenset(self._dir[:-1])
1182 visit = match.visitchildrenset(self._dir[:-1])
1180 if visit == b'all':
1183 if visit == b'all':
1181 return self.copy()
1184 return self.copy()
1182 ret = treemanifest(self._dir)
1185 ret = treemanifest(self._dir)
1183 if not visit:
1186 if not visit:
1184 return ret
1187 return ret
1185
1188
1186 self._load()
1189 self._load()
1187 for fn in self._files:
1190 for fn in self._files:
1188 # While visitchildrenset *usually* lists only subdirs, this is
1191 # While visitchildrenset *usually* lists only subdirs, this is
1189 # actually up to the matcher and may have some files in the set().
1192 # actually up to the matcher and may have some files in the set().
1190 # If visit == 'this', we should obviously look at the files in this
1193 # If visit == 'this', we should obviously look at the files in this
1191 # directory; if visit is a set, and fn is in it, we should inspect
1194 # directory; if visit is a set, and fn is in it, we should inspect
1192 # fn (but no need to inspect things not in the set).
1195 # fn (but no need to inspect things not in the set).
1193 if visit != b'this' and fn not in visit:
1196 if visit != b'this' and fn not in visit:
1194 continue
1197 continue
1195 fullp = self._subpath(fn)
1198 fullp = self._subpath(fn)
1196 # visitchildrenset isn't perfect, we still need to call the regular
1199 # visitchildrenset isn't perfect, we still need to call the regular
1197 # matcher code to further filter results.
1200 # matcher code to further filter results.
1198 if not match(fullp):
1201 if not match(fullp):
1199 continue
1202 continue
1200 ret._files[fn] = self._files[fn]
1203 ret._files[fn] = self._files[fn]
1201 if fn in self._flags:
1204 if fn in self._flags:
1202 ret._flags[fn] = self._flags[fn]
1205 ret._flags[fn] = self._flags[fn]
1203
1206
1204 visit = self._loadchildrensetlazy(visit)
1207 visit = self._loadchildrensetlazy(visit)
1205 for dir, subm in pycompat.iteritems(self._dirs):
1208 for dir, subm in pycompat.iteritems(self._dirs):
1206 if visit and dir[:-1] not in visit:
1209 if visit and dir[:-1] not in visit:
1207 continue
1210 continue
1208 m = subm._matches(match)
1211 m = subm._matches(match)
1209 if not m._isempty():
1212 if not m._isempty():
1210 ret._dirs[dir] = m
1213 ret._dirs[dir] = m
1211
1214
1212 if not ret._isempty():
1215 if not ret._isempty():
1213 ret._dirty = True
1216 ret._dirty = True
1214 return ret
1217 return ret
1215
1218
1216 def diff(self, m2, match=None, clean=False):
1219 def diff(self, m2, match=None, clean=False):
1217 '''Finds changes between the current manifest and m2.
1220 '''Finds changes between the current manifest and m2.
1218
1221
1219 Args:
1222 Args:
1220 m2: the manifest to which this manifest should be compared.
1223 m2: the manifest to which this manifest should be compared.
1221 clean: if true, include files unchanged between these manifests
1224 clean: if true, include files unchanged between these manifests
1222 with a None value in the returned dictionary.
1225 with a None value in the returned dictionary.
1223
1226
1224 The result is returned as a dict with filename as key and
1227 The result is returned as a dict with filename as key and
1225 values of the form ((n1,fl1),(n2,fl2)), where n1/n2 is the
1228 values of the form ((n1,fl1),(n2,fl2)), where n1/n2 is the
1226 nodeid in the current/other manifest and fl1/fl2 is the flag
1229 nodeid in the current/other manifest and fl1/fl2 is the flag
1227 in the current/other manifest. Where the file does not exist,
1230 in the current/other manifest. Where the file does not exist,
1228 the nodeid will be None and the flags will be the empty
1231 the nodeid will be None and the flags will be the empty
1229 string.
1232 string.
1230 '''
1233 '''
1231 if match and not match.always():
1234 if match and not match.always():
1232 m1 = self.matches(match)
1235 m1 = self.matches(match)
1233 m2 = m2.matches(match)
1236 m2 = m2.matches(match)
1234 return m1.diff(m2, clean=clean)
1237 return m1.diff(m2, clean=clean)
1235 result = {}
1238 result = {}
1236 emptytree = treemanifest()
1239 emptytree = treemanifest()
1237
1240
1238 def _iterativediff(t1, t2, stack):
1241 def _iterativediff(t1, t2, stack):
1239 """compares two tree manifests and append new tree-manifests which
1242 """compares two tree manifests and append new tree-manifests which
1240 needs to be compared to stack"""
1243 needs to be compared to stack"""
1241 if t1._node == t2._node and not t1._dirty and not t2._dirty:
1244 if t1._node == t2._node and not t1._dirty and not t2._dirty:
1242 return
1245 return
1243 t1._load()
1246 t1._load()
1244 t2._load()
1247 t2._load()
1245 self._loaddifflazy(t1, t2)
1248 self._loaddifflazy(t1, t2)
1246
1249
1247 for d, m1 in pycompat.iteritems(t1._dirs):
1250 for d, m1 in pycompat.iteritems(t1._dirs):
1248 m2 = t2._dirs.get(d, emptytree)
1251 m2 = t2._dirs.get(d, emptytree)
1249 stack.append((m1, m2))
1252 stack.append((m1, m2))
1250
1253
1251 for d, m2 in pycompat.iteritems(t2._dirs):
1254 for d, m2 in pycompat.iteritems(t2._dirs):
1252 if d not in t1._dirs:
1255 if d not in t1._dirs:
1253 stack.append((emptytree, m2))
1256 stack.append((emptytree, m2))
1254
1257
1255 for fn, n1 in pycompat.iteritems(t1._files):
1258 for fn, n1 in pycompat.iteritems(t1._files):
1256 fl1 = t1._flags.get(fn, b'')
1259 fl1 = t1._flags.get(fn, b'')
1257 n2 = t2._files.get(fn, None)
1260 n2 = t2._files.get(fn, None)
1258 fl2 = t2._flags.get(fn, b'')
1261 fl2 = t2._flags.get(fn, b'')
1259 if n1 != n2 or fl1 != fl2:
1262 if n1 != n2 or fl1 != fl2:
1260 result[t1._subpath(fn)] = ((n1, fl1), (n2, fl2))
1263 result[t1._subpath(fn)] = ((n1, fl1), (n2, fl2))
1261 elif clean:
1264 elif clean:
1262 result[t1._subpath(fn)] = None
1265 result[t1._subpath(fn)] = None
1263
1266
1264 for fn, n2 in pycompat.iteritems(t2._files):
1267 for fn, n2 in pycompat.iteritems(t2._files):
1265 if fn not in t1._files:
1268 if fn not in t1._files:
1266 fl2 = t2._flags.get(fn, b'')
1269 fl2 = t2._flags.get(fn, b'')
1267 result[t2._subpath(fn)] = ((None, b''), (n2, fl2))
1270 result[t2._subpath(fn)] = ((None, b''), (n2, fl2))
1268
1271
1269 stackls = []
1272 stackls = []
1270 _iterativediff(self, m2, stackls)
1273 _iterativediff(self, m2, stackls)
1271 while stackls:
1274 while stackls:
1272 t1, t2 = stackls.pop()
1275 t1, t2 = stackls.pop()
1273 # stackls is populated in the function call
1276 # stackls is populated in the function call
1274 _iterativediff(t1, t2, stackls)
1277 _iterativediff(t1, t2, stackls)
1275 return result
1278 return result
1276
1279
1277 def unmodifiedsince(self, m2):
1280 def unmodifiedsince(self, m2):
1278 return not self._dirty and not m2._dirty and self._node == m2._node
1281 return not self._dirty and not m2._dirty and self._node == m2._node
1279
1282
1280 def parse(self, text, readsubtree):
1283 def parse(self, text, readsubtree):
1281 selflazy = self._lazydirs
1284 selflazy = self._lazydirs
1282 subpath = self._subpath
1285 subpath = self._subpath
1283 for f, n, fl in _parse(text):
1286 for f, n, fl in _parse(text):
1284 if fl == b't':
1287 if fl == b't':
1285 f = f + b'/'
1288 f = f + b'/'
1286 # False below means "doesn't need to be copied" and can use the
1289 # False below means "doesn't need to be copied" and can use the
1287 # cached value from readsubtree directly.
1290 # cached value from readsubtree directly.
1288 selflazy[f] = (subpath(f), n, readsubtree, False)
1291 selflazy[f] = (subpath(f), n, readsubtree, False)
1289 elif b'/' in f:
1292 elif b'/' in f:
1290 # This is a flat manifest, so use __setitem__ and setflag rather
1293 # This is a flat manifest, so use __setitem__ and setflag rather
1291 # than assigning directly to _files and _flags, so we can
1294 # than assigning directly to _files and _flags, so we can
1292 # assign a path in a subdirectory, and to mark dirty (compared
1295 # assign a path in a subdirectory, and to mark dirty (compared
1293 # to nullid).
1296 # to nullid).
1294 self[f] = n
1297 self[f] = n
1295 if fl:
1298 if fl:
1296 self.setflag(f, fl)
1299 self.setflag(f, fl)
1297 else:
1300 else:
1298 # Assigning to _files and _flags avoids marking as dirty,
1301 # Assigning to _files and _flags avoids marking as dirty,
1299 # and should be a little faster.
1302 # and should be a little faster.
1300 self._files[f] = n
1303 self._files[f] = n
1301 if fl:
1304 if fl:
1302 self._flags[f] = fl
1305 self._flags[f] = fl
1303
1306
1304 def text(self):
1307 def text(self):
1305 """Get the full data of this manifest as a bytestring."""
1308 """Get the full data of this manifest as a bytestring."""
1306 self._load()
1309 self._load()
1307 return _text(self.iterentries())
1310 return _text(self.iterentries())
1308
1311
1309 def dirtext(self):
1312 def dirtext(self):
1310 """Get the full data of this directory as a bytestring. Make sure that
1313 """Get the full data of this directory as a bytestring. Make sure that
1311 any submanifests have been written first, so their nodeids are correct.
1314 any submanifests have been written first, so their nodeids are correct.
1312 """
1315 """
1313 self._load()
1316 self._load()
1314 flags = self.flags
1317 flags = self.flags
1315 lazydirs = [
1318 lazydirs = [
1316 (d[:-1], v[1], b't') for d, v in pycompat.iteritems(self._lazydirs)
1319 (d[:-1], v[1], b't') for d, v in pycompat.iteritems(self._lazydirs)
1317 ]
1320 ]
1318 dirs = [(d[:-1], self._dirs[d]._node, b't') for d in self._dirs]
1321 dirs = [(d[:-1], self._dirs[d]._node, b't') for d in self._dirs]
1319 files = [(f, self._files[f], flags(f)) for f in self._files]
1322 files = [(f, self._files[f], flags(f)) for f in self._files]
1320 return _text(sorted(dirs + files + lazydirs))
1323 return _text(sorted(dirs + files + lazydirs))
1321
1324
1322 def read(self, gettext, readsubtree):
1325 def read(self, gettext, readsubtree):
1323 def _load_for_read(s):
1326 def _load_for_read(s):
1324 s.parse(gettext(), readsubtree)
1327 s.parse(gettext(), readsubtree)
1325 s._dirty = False
1328 s._dirty = False
1326
1329
1327 self._loadfunc = _load_for_read
1330 self._loadfunc = _load_for_read
1328
1331
1329 def writesubtrees(self, m1, m2, writesubtree, match):
1332 def writesubtrees(self, m1, m2, writesubtree, match):
1330 self._load() # for consistency; should never have any effect here
1333 self._load() # for consistency; should never have any effect here
1331 m1._load()
1334 m1._load()
1332 m2._load()
1335 m2._load()
1333 emptytree = treemanifest()
1336 emptytree = treemanifest()
1334
1337
1335 def getnode(m, d):
1338 def getnode(m, d):
1336 ld = m._lazydirs.get(d)
1339 ld = m._lazydirs.get(d)
1337 if ld:
1340 if ld:
1338 return ld[1]
1341 return ld[1]
1339 return m._dirs.get(d, emptytree)._node
1342 return m._dirs.get(d, emptytree)._node
1340
1343
1341 # let's skip investigating things that `match` says we do not need.
1344 # let's skip investigating things that `match` says we do not need.
1342 visit = match.visitchildrenset(self._dir[:-1])
1345 visit = match.visitchildrenset(self._dir[:-1])
1343 visit = self._loadchildrensetlazy(visit)
1346 visit = self._loadchildrensetlazy(visit)
1344 if visit == b'this' or visit == b'all':
1347 if visit == b'this' or visit == b'all':
1345 visit = None
1348 visit = None
1346 for d, subm in pycompat.iteritems(self._dirs):
1349 for d, subm in pycompat.iteritems(self._dirs):
1347 if visit and d[:-1] not in visit:
1350 if visit and d[:-1] not in visit:
1348 continue
1351 continue
1349 subp1 = getnode(m1, d)
1352 subp1 = getnode(m1, d)
1350 subp2 = getnode(m2, d)
1353 subp2 = getnode(m2, d)
1351 if subp1 == nullid:
1354 if subp1 == nullid:
1352 subp1, subp2 = subp2, subp1
1355 subp1, subp2 = subp2, subp1
1353 writesubtree(subm, subp1, subp2, match)
1356 writesubtree(subm, subp1, subp2, match)
1354
1357
1355 def walksubtrees(self, matcher=None):
1358 def walksubtrees(self, matcher=None):
1356 """Returns an iterator of the subtrees of this manifest, including this
1359 """Returns an iterator of the subtrees of this manifest, including this
1357 manifest itself.
1360 manifest itself.
1358
1361
1359 If `matcher` is provided, it only returns subtrees that match.
1362 If `matcher` is provided, it only returns subtrees that match.
1360 """
1363 """
1361 if matcher and not matcher.visitdir(self._dir[:-1]):
1364 if matcher and not matcher.visitdir(self._dir[:-1]):
1362 return
1365 return
1363 if not matcher or matcher(self._dir[:-1]):
1366 if not matcher or matcher(self._dir[:-1]):
1364 yield self
1367 yield self
1365
1368
1366 self._load()
1369 self._load()
1367 # OPT: use visitchildrenset to avoid loading everything.
1370 # OPT: use visitchildrenset to avoid loading everything.
1368 self._loadalllazy()
1371 self._loadalllazy()
1369 for d, subm in pycompat.iteritems(self._dirs):
1372 for d, subm in pycompat.iteritems(self._dirs):
1370 for subtree in subm.walksubtrees(matcher=matcher):
1373 for subtree in subm.walksubtrees(matcher=matcher):
1371 yield subtree
1374 yield subtree
1372
1375
1373
1376
1374 class manifestfulltextcache(util.lrucachedict):
1377 class manifestfulltextcache(util.lrucachedict):
1375 """File-backed LRU cache for the manifest cache
1378 """File-backed LRU cache for the manifest cache
1376
1379
1377 File consists of entries, up to EOF:
1380 File consists of entries, up to EOF:
1378
1381
1379 - 20 bytes node, 4 bytes length, <length> manifest data
1382 - 20 bytes node, 4 bytes length, <length> manifest data
1380
1383
1381 These are written in reverse cache order (oldest to newest).
1384 These are written in reverse cache order (oldest to newest).
1382
1385
1383 """
1386 """
1384
1387
1385 _file = b'manifestfulltextcache'
1388 _file = b'manifestfulltextcache'
1386
1389
1387 def __init__(self, max):
1390 def __init__(self, max):
1388 super(manifestfulltextcache, self).__init__(max)
1391 super(manifestfulltextcache, self).__init__(max)
1389 self._dirty = False
1392 self._dirty = False
1390 self._read = False
1393 self._read = False
1391 self._opener = None
1394 self._opener = None
1392
1395
1393 def read(self):
1396 def read(self):
1394 if self._read or self._opener is None:
1397 if self._read or self._opener is None:
1395 return
1398 return
1396
1399
1397 try:
1400 try:
1398 with self._opener(self._file) as fp:
1401 with self._opener(self._file) as fp:
1399 set = super(manifestfulltextcache, self).__setitem__
1402 set = super(manifestfulltextcache, self).__setitem__
1400 # ignore trailing data, this is a cache, corruption is skipped
1403 # ignore trailing data, this is a cache, corruption is skipped
1401 while True:
1404 while True:
1402 node = fp.read(20)
1405 node = fp.read(20)
1403 if len(node) < 20:
1406 if len(node) < 20:
1404 break
1407 break
1405 try:
1408 try:
1406 size = struct.unpack(b'>L', fp.read(4))[0]
1409 size = struct.unpack(b'>L', fp.read(4))[0]
1407 except struct.error:
1410 except struct.error:
1408 break
1411 break
1409 value = bytearray(fp.read(size))
1412 value = bytearray(fp.read(size))
1410 if len(value) != size:
1413 if len(value) != size:
1411 break
1414 break
1412 set(node, value)
1415 set(node, value)
1413 except IOError:
1416 except IOError:
1414 # the file is allowed to be missing
1417 # the file is allowed to be missing
1415 pass
1418 pass
1416
1419
1417 self._read = True
1420 self._read = True
1418 self._dirty = False
1421 self._dirty = False
1419
1422
1420 def write(self):
1423 def write(self):
1421 if not self._dirty or self._opener is None:
1424 if not self._dirty or self._opener is None:
1422 return
1425 return
1423 # rotate backwards to the first used node
1426 # rotate backwards to the first used node
1424 with self._opener(
1427 with self._opener(
1425 self._file, b'w', atomictemp=True, checkambig=True
1428 self._file, b'w', atomictemp=True, checkambig=True
1426 ) as fp:
1429 ) as fp:
1427 node = self._head.prev
1430 node = self._head.prev
1428 while True:
1431 while True:
1429 if node.key in self._cache:
1432 if node.key in self._cache:
1430 fp.write(node.key)
1433 fp.write(node.key)
1431 fp.write(struct.pack(b'>L', len(node.value)))
1434 fp.write(struct.pack(b'>L', len(node.value)))
1432 fp.write(node.value)
1435 fp.write(node.value)
1433 if node is self._head:
1436 if node is self._head:
1434 break
1437 break
1435 node = node.prev
1438 node = node.prev
1436
1439
1437 def __len__(self):
1440 def __len__(self):
1438 if not self._read:
1441 if not self._read:
1439 self.read()
1442 self.read()
1440 return super(manifestfulltextcache, self).__len__()
1443 return super(manifestfulltextcache, self).__len__()
1441
1444
1442 def __contains__(self, k):
1445 def __contains__(self, k):
1443 if not self._read:
1446 if not self._read:
1444 self.read()
1447 self.read()
1445 return super(manifestfulltextcache, self).__contains__(k)
1448 return super(manifestfulltextcache, self).__contains__(k)
1446
1449
1447 def __iter__(self):
1450 def __iter__(self):
1448 if not self._read:
1451 if not self._read:
1449 self.read()
1452 self.read()
1450 return super(manifestfulltextcache, self).__iter__()
1453 return super(manifestfulltextcache, self).__iter__()
1451
1454
1452 def __getitem__(self, k):
1455 def __getitem__(self, k):
1453 if not self._read:
1456 if not self._read:
1454 self.read()
1457 self.read()
1455 # the cache lru order can change on read
1458 # the cache lru order can change on read
1456 setdirty = self._cache.get(k) is not self._head
1459 setdirty = self._cache.get(k) is not self._head
1457 value = super(manifestfulltextcache, self).__getitem__(k)
1460 value = super(manifestfulltextcache, self).__getitem__(k)
1458 if setdirty:
1461 if setdirty:
1459 self._dirty = True
1462 self._dirty = True
1460 return value
1463 return value
1461
1464
1462 def __setitem__(self, k, v):
1465 def __setitem__(self, k, v):
1463 if not self._read:
1466 if not self._read:
1464 self.read()
1467 self.read()
1465 super(manifestfulltextcache, self).__setitem__(k, v)
1468 super(manifestfulltextcache, self).__setitem__(k, v)
1466 self._dirty = True
1469 self._dirty = True
1467
1470
1468 def __delitem__(self, k):
1471 def __delitem__(self, k):
1469 if not self._read:
1472 if not self._read:
1470 self.read()
1473 self.read()
1471 super(manifestfulltextcache, self).__delitem__(k)
1474 super(manifestfulltextcache, self).__delitem__(k)
1472 self._dirty = True
1475 self._dirty = True
1473
1476
1474 def get(self, k, default=None):
1477 def get(self, k, default=None):
1475 if not self._read:
1478 if not self._read:
1476 self.read()
1479 self.read()
1477 return super(manifestfulltextcache, self).get(k, default=default)
1480 return super(manifestfulltextcache, self).get(k, default=default)
1478
1481
1479 def clear(self, clear_persisted_data=False):
1482 def clear(self, clear_persisted_data=False):
1480 super(manifestfulltextcache, self).clear()
1483 super(manifestfulltextcache, self).clear()
1481 if clear_persisted_data:
1484 if clear_persisted_data:
1482 self._dirty = True
1485 self._dirty = True
1483 self.write()
1486 self.write()
1484 self._read = False
1487 self._read = False
1485
1488
1486
1489
1487 # and upper bound of what we expect from compression
1490 # and upper bound of what we expect from compression
1488 # (real live value seems to be "3")
1491 # (real live value seems to be "3")
1489 MAXCOMPRESSION = 3
1492 MAXCOMPRESSION = 3
1490
1493
1491
1494
1492 @interfaceutil.implementer(repository.imanifeststorage)
1495 @interfaceutil.implementer(repository.imanifeststorage)
1493 class manifestrevlog(object):
1496 class manifestrevlog(object):
1494 '''A revlog that stores manifest texts. This is responsible for caching the
1497 '''A revlog that stores manifest texts. This is responsible for caching the
1495 full-text manifest contents.
1498 full-text manifest contents.
1496 '''
1499 '''
1497
1500
1498 def __init__(
1501 def __init__(
1499 self,
1502 self,
1500 opener,
1503 opener,
1501 tree=b'',
1504 tree=b'',
1502 dirlogcache=None,
1505 dirlogcache=None,
1503 indexfile=None,
1506 indexfile=None,
1504 treemanifest=False,
1507 treemanifest=False,
1505 ):
1508 ):
1506 """Constructs a new manifest revlog
1509 """Constructs a new manifest revlog
1507
1510
1508 `indexfile` - used by extensions to have two manifests at once, like
1511 `indexfile` - used by extensions to have two manifests at once, like
1509 when transitioning between flatmanifeset and treemanifests.
1512 when transitioning between flatmanifeset and treemanifests.
1510
1513
1511 `treemanifest` - used to indicate this is a tree manifest revlog. Opener
1514 `treemanifest` - used to indicate this is a tree manifest revlog. Opener
1512 options can also be used to make this a tree manifest revlog. The opener
1515 options can also be used to make this a tree manifest revlog. The opener
1513 option takes precedence, so if it is set to True, we ignore whatever
1516 option takes precedence, so if it is set to True, we ignore whatever
1514 value is passed in to the constructor.
1517 value is passed in to the constructor.
1515 """
1518 """
1516 # During normal operations, we expect to deal with not more than four
1519 # During normal operations, we expect to deal with not more than four
1517 # revs at a time (such as during commit --amend). When rebasing large
1520 # revs at a time (such as during commit --amend). When rebasing large
1518 # stacks of commits, the number can go up, hence the config knob below.
1521 # stacks of commits, the number can go up, hence the config knob below.
1519 cachesize = 4
1522 cachesize = 4
1520 optiontreemanifest = False
1523 optiontreemanifest = False
1521 opts = getattr(opener, 'options', None)
1524 opts = getattr(opener, 'options', None)
1522 if opts is not None:
1525 if opts is not None:
1523 cachesize = opts.get(b'manifestcachesize', cachesize)
1526 cachesize = opts.get(b'manifestcachesize', cachesize)
1524 optiontreemanifest = opts.get(b'treemanifest', False)
1527 optiontreemanifest = opts.get(b'treemanifest', False)
1525
1528
1526 self._treeondisk = optiontreemanifest or treemanifest
1529 self._treeondisk = optiontreemanifest or treemanifest
1527
1530
1528 self._fulltextcache = manifestfulltextcache(cachesize)
1531 self._fulltextcache = manifestfulltextcache(cachesize)
1529
1532
1530 if tree:
1533 if tree:
1531 assert self._treeondisk, b'opts is %r' % opts
1534 assert self._treeondisk, b'opts is %r' % opts
1532
1535
1533 if indexfile is None:
1536 if indexfile is None:
1534 indexfile = b'00manifest.i'
1537 indexfile = b'00manifest.i'
1535 if tree:
1538 if tree:
1536 indexfile = b"meta/" + tree + indexfile
1539 indexfile = b"meta/" + tree + indexfile
1537
1540
1538 self.tree = tree
1541 self.tree = tree
1539
1542
1540 # The dirlogcache is kept on the root manifest log
1543 # The dirlogcache is kept on the root manifest log
1541 if tree:
1544 if tree:
1542 self._dirlogcache = dirlogcache
1545 self._dirlogcache = dirlogcache
1543 else:
1546 else:
1544 self._dirlogcache = {b'': self}
1547 self._dirlogcache = {b'': self}
1545
1548
1546 self._revlog = revlog.revlog(
1549 self._revlog = revlog.revlog(
1547 opener,
1550 opener,
1548 indexfile,
1551 indexfile,
1549 # only root indexfile is cached
1552 # only root indexfile is cached
1550 checkambig=not bool(tree),
1553 checkambig=not bool(tree),
1551 mmaplargeindex=True,
1554 mmaplargeindex=True,
1552 upperboundcomp=MAXCOMPRESSION,
1555 upperboundcomp=MAXCOMPRESSION,
1553 )
1556 )
1554
1557
1555 self.index = self._revlog.index
1558 self.index = self._revlog.index
1556 self.version = self._revlog.version
1559 self.version = self._revlog.version
1557 self._generaldelta = self._revlog._generaldelta
1560 self._generaldelta = self._revlog._generaldelta
1558
1561
1559 def _setupmanifestcachehooks(self, repo):
1562 def _setupmanifestcachehooks(self, repo):
1560 """Persist the manifestfulltextcache on lock release"""
1563 """Persist the manifestfulltextcache on lock release"""
1561 if not util.safehasattr(repo, b'_wlockref'):
1564 if not util.safehasattr(repo, b'_wlockref'):
1562 return
1565 return
1563
1566
1564 self._fulltextcache._opener = repo.wcachevfs
1567 self._fulltextcache._opener = repo.wcachevfs
1565 if repo._currentlock(repo._wlockref) is None:
1568 if repo._currentlock(repo._wlockref) is None:
1566 return
1569 return
1567
1570
1568 reporef = weakref.ref(repo)
1571 reporef = weakref.ref(repo)
1569 manifestrevlogref = weakref.ref(self)
1572 manifestrevlogref = weakref.ref(self)
1570
1573
1571 def persistmanifestcache():
1574 def persistmanifestcache():
1572 repo = reporef()
1575 repo = reporef()
1573 self = manifestrevlogref()
1576 self = manifestrevlogref()
1574 if repo is None or self is None:
1577 if repo is None or self is None:
1575 return
1578 return
1576 if repo.manifestlog.getstorage(b'') is not self:
1579 if repo.manifestlog.getstorage(b'') is not self:
1577 # there's a different manifest in play now, abort
1580 # there's a different manifest in play now, abort
1578 return
1581 return
1579 self._fulltextcache.write()
1582 self._fulltextcache.write()
1580
1583
1581 repo._afterlock(persistmanifestcache)
1584 repo._afterlock(persistmanifestcache)
1582
1585
1583 @property
1586 @property
1584 def fulltextcache(self):
1587 def fulltextcache(self):
1585 return self._fulltextcache
1588 return self._fulltextcache
1586
1589
1587 def clearcaches(self, clear_persisted_data=False):
1590 def clearcaches(self, clear_persisted_data=False):
1588 self._revlog.clearcaches()
1591 self._revlog.clearcaches()
1589 self._fulltextcache.clear(clear_persisted_data=clear_persisted_data)
1592 self._fulltextcache.clear(clear_persisted_data=clear_persisted_data)
1590 self._dirlogcache = {self.tree: self}
1593 self._dirlogcache = {self.tree: self}
1591
1594
1592 def dirlog(self, d):
1595 def dirlog(self, d):
1593 if d:
1596 if d:
1594 assert self._treeondisk
1597 assert self._treeondisk
1595 if d not in self._dirlogcache:
1598 if d not in self._dirlogcache:
1596 mfrevlog = manifestrevlog(
1599 mfrevlog = manifestrevlog(
1597 self.opener, d, self._dirlogcache, treemanifest=self._treeondisk
1600 self.opener, d, self._dirlogcache, treemanifest=self._treeondisk
1598 )
1601 )
1599 self._dirlogcache[d] = mfrevlog
1602 self._dirlogcache[d] = mfrevlog
1600 return self._dirlogcache[d]
1603 return self._dirlogcache[d]
1601
1604
1602 def add(
1605 def add(
1603 self,
1606 self,
1604 m,
1607 m,
1605 transaction,
1608 transaction,
1606 link,
1609 link,
1607 p1,
1610 p1,
1608 p2,
1611 p2,
1609 added,
1612 added,
1610 removed,
1613 removed,
1611 readtree=None,
1614 readtree=None,
1612 match=None,
1615 match=None,
1613 ):
1616 ):
1614 if p1 in self.fulltextcache and util.safehasattr(m, b'fastdelta'):
1617 if p1 in self.fulltextcache and util.safehasattr(m, b'fastdelta'):
1615 # If our first parent is in the manifest cache, we can
1618 # If our first parent is in the manifest cache, we can
1616 # compute a delta here using properties we know about the
1619 # compute a delta here using properties we know about the
1617 # manifest up-front, which may save time later for the
1620 # manifest up-front, which may save time later for the
1618 # revlog layer.
1621 # revlog layer.
1619
1622
1620 _checkforbidden(added)
1623 _checkforbidden(added)
1621 # combine the changed lists into one sorted iterator
1624 # combine the changed lists into one sorted iterator
1622 work = heapq.merge(
1625 work = heapq.merge(
1623 [(x, False) for x in sorted(added)],
1626 [(x, False) for x in sorted(added)],
1624 [(x, True) for x in sorted(removed)],
1627 [(x, True) for x in sorted(removed)],
1625 )
1628 )
1626
1629
1627 arraytext, deltatext = m.fastdelta(self.fulltextcache[p1], work)
1630 arraytext, deltatext = m.fastdelta(self.fulltextcache[p1], work)
1628 cachedelta = self._revlog.rev(p1), deltatext
1631 cachedelta = self._revlog.rev(p1), deltatext
1629 text = util.buffer(arraytext)
1632 text = util.buffer(arraytext)
1630 n = self._revlog.addrevision(
1633 n = self._revlog.addrevision(
1631 text, transaction, link, p1, p2, cachedelta
1634 text, transaction, link, p1, p2, cachedelta
1632 )
1635 )
1633 else:
1636 else:
1634 # The first parent manifest isn't already loaded, so we'll
1637 # The first parent manifest isn't already loaded, so we'll
1635 # just encode a fulltext of the manifest and pass that
1638 # just encode a fulltext of the manifest and pass that
1636 # through to the revlog layer, and let it handle the delta
1639 # through to the revlog layer, and let it handle the delta
1637 # process.
1640 # process.
1638 if self._treeondisk:
1641 if self._treeondisk:
1639 assert readtree, b"readtree must be set for treemanifest writes"
1642 assert readtree, b"readtree must be set for treemanifest writes"
1640 assert match, b"match must be specified for treemanifest writes"
1643 assert match, b"match must be specified for treemanifest writes"
1641 m1 = readtree(self.tree, p1)
1644 m1 = readtree(self.tree, p1)
1642 m2 = readtree(self.tree, p2)
1645 m2 = readtree(self.tree, p2)
1643 n = self._addtree(
1646 n = self._addtree(
1644 m, transaction, link, m1, m2, readtree, match=match
1647 m, transaction, link, m1, m2, readtree, match=match
1645 )
1648 )
1646 arraytext = None
1649 arraytext = None
1647 else:
1650 else:
1648 text = m.text()
1651 text = m.text()
1649 n = self._revlog.addrevision(text, transaction, link, p1, p2)
1652 n = self._revlog.addrevision(text, transaction, link, p1, p2)
1650 arraytext = bytearray(text)
1653 arraytext = bytearray(text)
1651
1654
1652 if arraytext is not None:
1655 if arraytext is not None:
1653 self.fulltextcache[n] = arraytext
1656 self.fulltextcache[n] = arraytext
1654
1657
1655 return n
1658 return n
1656
1659
1657 def _addtree(self, m, transaction, link, m1, m2, readtree, match):
1660 def _addtree(self, m, transaction, link, m1, m2, readtree, match):
1658 # If the manifest is unchanged compared to one parent,
1661 # If the manifest is unchanged compared to one parent,
1659 # don't write a new revision
1662 # don't write a new revision
1660 if self.tree != b'' and (
1663 if self.tree != b'' and (
1661 m.unmodifiedsince(m1) or m.unmodifiedsince(m2)
1664 m.unmodifiedsince(m1) or m.unmodifiedsince(m2)
1662 ):
1665 ):
1663 return m.node()
1666 return m.node()
1664
1667
1665 def writesubtree(subm, subp1, subp2, match):
1668 def writesubtree(subm, subp1, subp2, match):
1666 sublog = self.dirlog(subm.dir())
1669 sublog = self.dirlog(subm.dir())
1667 sublog.add(
1670 sublog.add(
1668 subm,
1671 subm,
1669 transaction,
1672 transaction,
1670 link,
1673 link,
1671 subp1,
1674 subp1,
1672 subp2,
1675 subp2,
1673 None,
1676 None,
1674 None,
1677 None,
1675 readtree=readtree,
1678 readtree=readtree,
1676 match=match,
1679 match=match,
1677 )
1680 )
1678
1681
1679 m.writesubtrees(m1, m2, writesubtree, match)
1682 m.writesubtrees(m1, m2, writesubtree, match)
1680 text = m.dirtext()
1683 text = m.dirtext()
1681 n = None
1684 n = None
1682 if self.tree != b'':
1685 if self.tree != b'':
1683 # Double-check whether contents are unchanged to one parent
1686 # Double-check whether contents are unchanged to one parent
1684 if text == m1.dirtext():
1687 if text == m1.dirtext():
1685 n = m1.node()
1688 n = m1.node()
1686 elif text == m2.dirtext():
1689 elif text == m2.dirtext():
1687 n = m2.node()
1690 n = m2.node()
1688
1691
1689 if not n:
1692 if not n:
1690 n = self._revlog.addrevision(
1693 n = self._revlog.addrevision(
1691 text, transaction, link, m1.node(), m2.node()
1694 text, transaction, link, m1.node(), m2.node()
1692 )
1695 )
1693
1696
1694 # Save nodeid so parent manifest can calculate its nodeid
1697 # Save nodeid so parent manifest can calculate its nodeid
1695 m.setnode(n)
1698 m.setnode(n)
1696 return n
1699 return n
1697
1700
1698 def __len__(self):
1701 def __len__(self):
1699 return len(self._revlog)
1702 return len(self._revlog)
1700
1703
1701 def __iter__(self):
1704 def __iter__(self):
1702 return self._revlog.__iter__()
1705 return self._revlog.__iter__()
1703
1706
1704 def rev(self, node):
1707 def rev(self, node):
1705 return self._revlog.rev(node)
1708 return self._revlog.rev(node)
1706
1709
1707 def node(self, rev):
1710 def node(self, rev):
1708 return self._revlog.node(rev)
1711 return self._revlog.node(rev)
1709
1712
1710 def lookup(self, value):
1713 def lookup(self, value):
1711 return self._revlog.lookup(value)
1714 return self._revlog.lookup(value)
1712
1715
1713 def parentrevs(self, rev):
1716 def parentrevs(self, rev):
1714 return self._revlog.parentrevs(rev)
1717 return self._revlog.parentrevs(rev)
1715
1718
1716 def parents(self, node):
1719 def parents(self, node):
1717 return self._revlog.parents(node)
1720 return self._revlog.parents(node)
1718
1721
1719 def linkrev(self, rev):
1722 def linkrev(self, rev):
1720 return self._revlog.linkrev(rev)
1723 return self._revlog.linkrev(rev)
1721
1724
1722 def checksize(self):
1725 def checksize(self):
1723 return self._revlog.checksize()
1726 return self._revlog.checksize()
1724
1727
1725 def revision(self, node, _df=None, raw=False):
1728 def revision(self, node, _df=None, raw=False):
1726 return self._revlog.revision(node, _df=_df, raw=raw)
1729 return self._revlog.revision(node, _df=_df, raw=raw)
1727
1730
1728 def rawdata(self, node, _df=None):
1731 def rawdata(self, node, _df=None):
1729 return self._revlog.rawdata(node, _df=_df)
1732 return self._revlog.rawdata(node, _df=_df)
1730
1733
1731 def revdiff(self, rev1, rev2):
1734 def revdiff(self, rev1, rev2):
1732 return self._revlog.revdiff(rev1, rev2)
1735 return self._revlog.revdiff(rev1, rev2)
1733
1736
1734 def cmp(self, node, text):
1737 def cmp(self, node, text):
1735 return self._revlog.cmp(node, text)
1738 return self._revlog.cmp(node, text)
1736
1739
1737 def deltaparent(self, rev):
1740 def deltaparent(self, rev):
1738 return self._revlog.deltaparent(rev)
1741 return self._revlog.deltaparent(rev)
1739
1742
1740 def emitrevisions(
1743 def emitrevisions(
1741 self,
1744 self,
1742 nodes,
1745 nodes,
1743 nodesorder=None,
1746 nodesorder=None,
1744 revisiondata=False,
1747 revisiondata=False,
1745 assumehaveparentrevisions=False,
1748 assumehaveparentrevisions=False,
1746 deltamode=repository.CG_DELTAMODE_STD,
1749 deltamode=repository.CG_DELTAMODE_STD,
1747 ):
1750 ):
1748 return self._revlog.emitrevisions(
1751 return self._revlog.emitrevisions(
1749 nodes,
1752 nodes,
1750 nodesorder=nodesorder,
1753 nodesorder=nodesorder,
1751 revisiondata=revisiondata,
1754 revisiondata=revisiondata,
1752 assumehaveparentrevisions=assumehaveparentrevisions,
1755 assumehaveparentrevisions=assumehaveparentrevisions,
1753 deltamode=deltamode,
1756 deltamode=deltamode,
1754 )
1757 )
1755
1758
1756 def addgroup(self, deltas, linkmapper, transaction, addrevisioncb=None):
1759 def addgroup(self, deltas, linkmapper, transaction, addrevisioncb=None):
1757 return self._revlog.addgroup(
1760 return self._revlog.addgroup(
1758 deltas, linkmapper, transaction, addrevisioncb=addrevisioncb
1761 deltas, linkmapper, transaction, addrevisioncb=addrevisioncb
1759 )
1762 )
1760
1763
1761 def rawsize(self, rev):
1764 def rawsize(self, rev):
1762 return self._revlog.rawsize(rev)
1765 return self._revlog.rawsize(rev)
1763
1766
1764 def getstrippoint(self, minlink):
1767 def getstrippoint(self, minlink):
1765 return self._revlog.getstrippoint(minlink)
1768 return self._revlog.getstrippoint(minlink)
1766
1769
1767 def strip(self, minlink, transaction):
1770 def strip(self, minlink, transaction):
1768 return self._revlog.strip(minlink, transaction)
1771 return self._revlog.strip(minlink, transaction)
1769
1772
1770 def files(self):
1773 def files(self):
1771 return self._revlog.files()
1774 return self._revlog.files()
1772
1775
1773 def clone(self, tr, destrevlog, **kwargs):
1776 def clone(self, tr, destrevlog, **kwargs):
1774 if not isinstance(destrevlog, manifestrevlog):
1777 if not isinstance(destrevlog, manifestrevlog):
1775 raise error.ProgrammingError(b'expected manifestrevlog to clone()')
1778 raise error.ProgrammingError(b'expected manifestrevlog to clone()')
1776
1779
1777 return self._revlog.clone(tr, destrevlog._revlog, **kwargs)
1780 return self._revlog.clone(tr, destrevlog._revlog, **kwargs)
1778
1781
1779 def storageinfo(
1782 def storageinfo(
1780 self,
1783 self,
1781 exclusivefiles=False,
1784 exclusivefiles=False,
1782 sharedfiles=False,
1785 sharedfiles=False,
1783 revisionscount=False,
1786 revisionscount=False,
1784 trackedsize=False,
1787 trackedsize=False,
1785 storedsize=False,
1788 storedsize=False,
1786 ):
1789 ):
1787 return self._revlog.storageinfo(
1790 return self._revlog.storageinfo(
1788 exclusivefiles=exclusivefiles,
1791 exclusivefiles=exclusivefiles,
1789 sharedfiles=sharedfiles,
1792 sharedfiles=sharedfiles,
1790 revisionscount=revisionscount,
1793 revisionscount=revisionscount,
1791 trackedsize=trackedsize,
1794 trackedsize=trackedsize,
1792 storedsize=storedsize,
1795 storedsize=storedsize,
1793 )
1796 )
1794
1797
1795 @property
1798 @property
1796 def indexfile(self):
1799 def indexfile(self):
1797 return self._revlog.indexfile
1800 return self._revlog.indexfile
1798
1801
1799 @indexfile.setter
1802 @indexfile.setter
1800 def indexfile(self, value):
1803 def indexfile(self, value):
1801 self._revlog.indexfile = value
1804 self._revlog.indexfile = value
1802
1805
1803 @property
1806 @property
1804 def opener(self):
1807 def opener(self):
1805 return self._revlog.opener
1808 return self._revlog.opener
1806
1809
1807 @opener.setter
1810 @opener.setter
1808 def opener(self, value):
1811 def opener(self, value):
1809 self._revlog.opener = value
1812 self._revlog.opener = value
1810
1813
1811
1814
1812 @interfaceutil.implementer(repository.imanifestlog)
1815 @interfaceutil.implementer(repository.imanifestlog)
1813 class manifestlog(object):
1816 class manifestlog(object):
1814 """A collection class representing the collection of manifest snapshots
1817 """A collection class representing the collection of manifest snapshots
1815 referenced by commits in the repository.
1818 referenced by commits in the repository.
1816
1819
1817 In this situation, 'manifest' refers to the abstract concept of a snapshot
1820 In this situation, 'manifest' refers to the abstract concept of a snapshot
1818 of the list of files in the given commit. Consumers of the output of this
1821 of the list of files in the given commit. Consumers of the output of this
1819 class do not care about the implementation details of the actual manifests
1822 class do not care about the implementation details of the actual manifests
1820 they receive (i.e. tree or flat or lazily loaded, etc)."""
1823 they receive (i.e. tree or flat or lazily loaded, etc)."""
1821
1824
1822 def __init__(self, opener, repo, rootstore, narrowmatch):
1825 def __init__(self, opener, repo, rootstore, narrowmatch):
1823 usetreemanifest = False
1826 usetreemanifest = False
1824 cachesize = 4
1827 cachesize = 4
1825
1828
1826 opts = getattr(opener, 'options', None)
1829 opts = getattr(opener, 'options', None)
1827 if opts is not None:
1830 if opts is not None:
1828 usetreemanifest = opts.get(b'treemanifest', usetreemanifest)
1831 usetreemanifest = opts.get(b'treemanifest', usetreemanifest)
1829 cachesize = opts.get(b'manifestcachesize', cachesize)
1832 cachesize = opts.get(b'manifestcachesize', cachesize)
1830
1833
1831 self._treemanifests = usetreemanifest
1834 self._treemanifests = usetreemanifest
1832
1835
1833 self._rootstore = rootstore
1836 self._rootstore = rootstore
1834 self._rootstore._setupmanifestcachehooks(repo)
1837 self._rootstore._setupmanifestcachehooks(repo)
1835 self._narrowmatch = narrowmatch
1838 self._narrowmatch = narrowmatch
1836
1839
1837 # A cache of the manifestctx or treemanifestctx for each directory
1840 # A cache of the manifestctx or treemanifestctx for each directory
1838 self._dirmancache = {}
1841 self._dirmancache = {}
1839 self._dirmancache[b''] = util.lrucachedict(cachesize)
1842 self._dirmancache[b''] = util.lrucachedict(cachesize)
1840
1843
1841 self._cachesize = cachesize
1844 self._cachesize = cachesize
1842
1845
1843 def __getitem__(self, node):
1846 def __getitem__(self, node):
1844 """Retrieves the manifest instance for the given node. Throws a
1847 """Retrieves the manifest instance for the given node. Throws a
1845 LookupError if not found.
1848 LookupError if not found.
1846 """
1849 """
1847 return self.get(b'', node)
1850 return self.get(b'', node)
1848
1851
1849 def get(self, tree, node, verify=True):
1852 def get(self, tree, node, verify=True):
1850 """Retrieves the manifest instance for the given node. Throws a
1853 """Retrieves the manifest instance for the given node. Throws a
1851 LookupError if not found.
1854 LookupError if not found.
1852
1855
1853 `verify` - if True an exception will be thrown if the node is not in
1856 `verify` - if True an exception will be thrown if the node is not in
1854 the revlog
1857 the revlog
1855 """
1858 """
1856 if node in self._dirmancache.get(tree, ()):
1859 if node in self._dirmancache.get(tree, ()):
1857 return self._dirmancache[tree][node]
1860 return self._dirmancache[tree][node]
1858
1861
1859 if not self._narrowmatch.always():
1862 if not self._narrowmatch.always():
1860 if not self._narrowmatch.visitdir(tree[:-1]):
1863 if not self._narrowmatch.visitdir(tree[:-1]):
1861 return excludeddirmanifestctx(tree, node)
1864 return excludeddirmanifestctx(tree, node)
1862 if tree:
1865 if tree:
1863 if self._rootstore._treeondisk:
1866 if self._rootstore._treeondisk:
1864 if verify:
1867 if verify:
1865 # Side-effect is LookupError is raised if node doesn't
1868 # Side-effect is LookupError is raised if node doesn't
1866 # exist.
1869 # exist.
1867 self.getstorage(tree).rev(node)
1870 self.getstorage(tree).rev(node)
1868
1871
1869 m = treemanifestctx(self, tree, node)
1872 m = treemanifestctx(self, tree, node)
1870 else:
1873 else:
1871 raise error.Abort(
1874 raise error.Abort(
1872 _(
1875 _(
1873 b"cannot ask for manifest directory '%s' in a flat "
1876 b"cannot ask for manifest directory '%s' in a flat "
1874 b"manifest"
1877 b"manifest"
1875 )
1878 )
1876 % tree
1879 % tree
1877 )
1880 )
1878 else:
1881 else:
1879 if verify:
1882 if verify:
1880 # Side-effect is LookupError is raised if node doesn't exist.
1883 # Side-effect is LookupError is raised if node doesn't exist.
1881 self._rootstore.rev(node)
1884 self._rootstore.rev(node)
1882
1885
1883 if self._treemanifests:
1886 if self._treemanifests:
1884 m = treemanifestctx(self, b'', node)
1887 m = treemanifestctx(self, b'', node)
1885 else:
1888 else:
1886 m = manifestctx(self, node)
1889 m = manifestctx(self, node)
1887
1890
1888 if node != nullid:
1891 if node != nullid:
1889 mancache = self._dirmancache.get(tree)
1892 mancache = self._dirmancache.get(tree)
1890 if not mancache:
1893 if not mancache:
1891 mancache = util.lrucachedict(self._cachesize)
1894 mancache = util.lrucachedict(self._cachesize)
1892 self._dirmancache[tree] = mancache
1895 self._dirmancache[tree] = mancache
1893 mancache[node] = m
1896 mancache[node] = m
1894 return m
1897 return m
1895
1898
1896 def getstorage(self, tree):
1899 def getstorage(self, tree):
1897 return self._rootstore.dirlog(tree)
1900 return self._rootstore.dirlog(tree)
1898
1901
1899 def clearcaches(self, clear_persisted_data=False):
1902 def clearcaches(self, clear_persisted_data=False):
1900 self._dirmancache.clear()
1903 self._dirmancache.clear()
1901 self._rootstore.clearcaches(clear_persisted_data=clear_persisted_data)
1904 self._rootstore.clearcaches(clear_persisted_data=clear_persisted_data)
1902
1905
1903 def rev(self, node):
1906 def rev(self, node):
1904 return self._rootstore.rev(node)
1907 return self._rootstore.rev(node)
1905
1908
1906
1909
1907 @interfaceutil.implementer(repository.imanifestrevisionwritable)
1910 @interfaceutil.implementer(repository.imanifestrevisionwritable)
1908 class memmanifestctx(object):
1911 class memmanifestctx(object):
1909 def __init__(self, manifestlog):
1912 def __init__(self, manifestlog):
1910 self._manifestlog = manifestlog
1913 self._manifestlog = manifestlog
1911 self._manifestdict = manifestdict()
1914 self._manifestdict = manifestdict()
1912
1915
1913 def _storage(self):
1916 def _storage(self):
1914 return self._manifestlog.getstorage(b'')
1917 return self._manifestlog.getstorage(b'')
1915
1918
1916 def new(self):
1919 def new(self):
1917 return memmanifestctx(self._manifestlog)
1920 return memmanifestctx(self._manifestlog)
1918
1921
1919 def copy(self):
1922 def copy(self):
1920 memmf = memmanifestctx(self._manifestlog)
1923 memmf = memmanifestctx(self._manifestlog)
1921 memmf._manifestdict = self.read().copy()
1924 memmf._manifestdict = self.read().copy()
1922 return memmf
1925 return memmf
1923
1926
1924 def read(self):
1927 def read(self):
1925 return self._manifestdict
1928 return self._manifestdict
1926
1929
1927 def write(self, transaction, link, p1, p2, added, removed, match=None):
1930 def write(self, transaction, link, p1, p2, added, removed, match=None):
1928 return self._storage().add(
1931 return self._storage().add(
1929 self._manifestdict,
1932 self._manifestdict,
1930 transaction,
1933 transaction,
1931 link,
1934 link,
1932 p1,
1935 p1,
1933 p2,
1936 p2,
1934 added,
1937 added,
1935 removed,
1938 removed,
1936 match=match,
1939 match=match,
1937 )
1940 )
1938
1941
1939
1942
1940 @interfaceutil.implementer(repository.imanifestrevisionstored)
1943 @interfaceutil.implementer(repository.imanifestrevisionstored)
1941 class manifestctx(object):
1944 class manifestctx(object):
1942 """A class representing a single revision of a manifest, including its
1945 """A class representing a single revision of a manifest, including its
1943 contents, its parent revs, and its linkrev.
1946 contents, its parent revs, and its linkrev.
1944 """
1947 """
1945
1948
1946 def __init__(self, manifestlog, node):
1949 def __init__(self, manifestlog, node):
1947 self._manifestlog = manifestlog
1950 self._manifestlog = manifestlog
1948 self._data = None
1951 self._data = None
1949
1952
1950 self._node = node
1953 self._node = node
1951
1954
1952 # TODO: We eventually want p1, p2, and linkrev exposed on this class,
1955 # TODO: We eventually want p1, p2, and linkrev exposed on this class,
1953 # but let's add it later when something needs it and we can load it
1956 # but let's add it later when something needs it and we can load it
1954 # lazily.
1957 # lazily.
1955 # self.p1, self.p2 = store.parents(node)
1958 # self.p1, self.p2 = store.parents(node)
1956 # rev = store.rev(node)
1959 # rev = store.rev(node)
1957 # self.linkrev = store.linkrev(rev)
1960 # self.linkrev = store.linkrev(rev)
1958
1961
1959 def _storage(self):
1962 def _storage(self):
1960 return self._manifestlog.getstorage(b'')
1963 return self._manifestlog.getstorage(b'')
1961
1964
1962 def node(self):
1965 def node(self):
1963 return self._node
1966 return self._node
1964
1967
1965 def new(self):
1968 def new(self):
1966 return memmanifestctx(self._manifestlog)
1969 return memmanifestctx(self._manifestlog)
1967
1970
1968 def copy(self):
1971 def copy(self):
1969 memmf = memmanifestctx(self._manifestlog)
1972 memmf = memmanifestctx(self._manifestlog)
1970 memmf._manifestdict = self.read().copy()
1973 memmf._manifestdict = self.read().copy()
1971 return memmf
1974 return memmf
1972
1975
1973 @propertycache
1976 @propertycache
1974 def parents(self):
1977 def parents(self):
1975 return self._storage().parents(self._node)
1978 return self._storage().parents(self._node)
1976
1979
1977 def read(self):
1980 def read(self):
1978 if self._data is None:
1981 if self._data is None:
1979 if self._node == nullid:
1982 if self._node == nullid:
1980 self._data = manifestdict()
1983 self._data = manifestdict()
1981 else:
1984 else:
1982 store = self._storage()
1985 store = self._storage()
1983 if self._node in store.fulltextcache:
1986 if self._node in store.fulltextcache:
1984 text = pycompat.bytestr(store.fulltextcache[self._node])
1987 text = pycompat.bytestr(store.fulltextcache[self._node])
1985 else:
1988 else:
1986 text = store.revision(self._node)
1989 text = store.revision(self._node)
1987 arraytext = bytearray(text)
1990 arraytext = bytearray(text)
1988 store.fulltextcache[self._node] = arraytext
1991 store.fulltextcache[self._node] = arraytext
1989 self._data = manifestdict(text)
1992 self._data = manifestdict(text)
1990 return self._data
1993 return self._data
1991
1994
1992 def readfast(self, shallow=False):
1995 def readfast(self, shallow=False):
1993 '''Calls either readdelta or read, based on which would be less work.
1996 '''Calls either readdelta or read, based on which would be less work.
1994 readdelta is called if the delta is against the p1, and therefore can be
1997 readdelta is called if the delta is against the p1, and therefore can be
1995 read quickly.
1998 read quickly.
1996
1999
1997 If `shallow` is True, nothing changes since this is a flat manifest.
2000 If `shallow` is True, nothing changes since this is a flat manifest.
1998 '''
2001 '''
1999 store = self._storage()
2002 store = self._storage()
2000 r = store.rev(self._node)
2003 r = store.rev(self._node)
2001 deltaparent = store.deltaparent(r)
2004 deltaparent = store.deltaparent(r)
2002 if deltaparent != nullrev and deltaparent in store.parentrevs(r):
2005 if deltaparent != nullrev and deltaparent in store.parentrevs(r):
2003 return self.readdelta()
2006 return self.readdelta()
2004 return self.read()
2007 return self.read()
2005
2008
2006 def readdelta(self, shallow=False):
2009 def readdelta(self, shallow=False):
2007 '''Returns a manifest containing just the entries that are present
2010 '''Returns a manifest containing just the entries that are present
2008 in this manifest, but not in its p1 manifest. This is efficient to read
2011 in this manifest, but not in its p1 manifest. This is efficient to read
2009 if the revlog delta is already p1.
2012 if the revlog delta is already p1.
2010
2013
2011 Changing the value of `shallow` has no effect on flat manifests.
2014 Changing the value of `shallow` has no effect on flat manifests.
2012 '''
2015 '''
2013 store = self._storage()
2016 store = self._storage()
2014 r = store.rev(self._node)
2017 r = store.rev(self._node)
2015 d = mdiff.patchtext(store.revdiff(store.deltaparent(r), r))
2018 d = mdiff.patchtext(store.revdiff(store.deltaparent(r), r))
2016 return manifestdict(d)
2019 return manifestdict(d)
2017
2020
2018 def find(self, key):
2021 def find(self, key):
2019 return self.read().find(key)
2022 return self.read().find(key)
2020
2023
2021
2024
2022 @interfaceutil.implementer(repository.imanifestrevisionwritable)
2025 @interfaceutil.implementer(repository.imanifestrevisionwritable)
2023 class memtreemanifestctx(object):
2026 class memtreemanifestctx(object):
2024 def __init__(self, manifestlog, dir=b''):
2027 def __init__(self, manifestlog, dir=b''):
2025 self._manifestlog = manifestlog
2028 self._manifestlog = manifestlog
2026 self._dir = dir
2029 self._dir = dir
2027 self._treemanifest = treemanifest()
2030 self._treemanifest = treemanifest()
2028
2031
2029 def _storage(self):
2032 def _storage(self):
2030 return self._manifestlog.getstorage(b'')
2033 return self._manifestlog.getstorage(b'')
2031
2034
2032 def new(self, dir=b''):
2035 def new(self, dir=b''):
2033 return memtreemanifestctx(self._manifestlog, dir=dir)
2036 return memtreemanifestctx(self._manifestlog, dir=dir)
2034
2037
2035 def copy(self):
2038 def copy(self):
2036 memmf = memtreemanifestctx(self._manifestlog, dir=self._dir)
2039 memmf = memtreemanifestctx(self._manifestlog, dir=self._dir)
2037 memmf._treemanifest = self._treemanifest.copy()
2040 memmf._treemanifest = self._treemanifest.copy()
2038 return memmf
2041 return memmf
2039
2042
2040 def read(self):
2043 def read(self):
2041 return self._treemanifest
2044 return self._treemanifest
2042
2045
2043 def write(self, transaction, link, p1, p2, added, removed, match=None):
2046 def write(self, transaction, link, p1, p2, added, removed, match=None):
2044 def readtree(dir, node):
2047 def readtree(dir, node):
2045 return self._manifestlog.get(dir, node).read()
2048 return self._manifestlog.get(dir, node).read()
2046
2049
2047 return self._storage().add(
2050 return self._storage().add(
2048 self._treemanifest,
2051 self._treemanifest,
2049 transaction,
2052 transaction,
2050 link,
2053 link,
2051 p1,
2054 p1,
2052 p2,
2055 p2,
2053 added,
2056 added,
2054 removed,
2057 removed,
2055 readtree=readtree,
2058 readtree=readtree,
2056 match=match,
2059 match=match,
2057 )
2060 )
2058
2061
2059
2062
2060 @interfaceutil.implementer(repository.imanifestrevisionstored)
2063 @interfaceutil.implementer(repository.imanifestrevisionstored)
2061 class treemanifestctx(object):
2064 class treemanifestctx(object):
2062 def __init__(self, manifestlog, dir, node):
2065 def __init__(self, manifestlog, dir, node):
2063 self._manifestlog = manifestlog
2066 self._manifestlog = manifestlog
2064 self._dir = dir
2067 self._dir = dir
2065 self._data = None
2068 self._data = None
2066
2069
2067 self._node = node
2070 self._node = node
2068
2071
2069 # TODO: Load p1/p2/linkrev lazily. They need to be lazily loaded so that
2072 # TODO: Load p1/p2/linkrev lazily. They need to be lazily loaded so that
2070 # we can instantiate treemanifestctx objects for directories we don't
2073 # we can instantiate treemanifestctx objects for directories we don't
2071 # have on disk.
2074 # have on disk.
2072 # self.p1, self.p2 = store.parents(node)
2075 # self.p1, self.p2 = store.parents(node)
2073 # rev = store.rev(node)
2076 # rev = store.rev(node)
2074 # self.linkrev = store.linkrev(rev)
2077 # self.linkrev = store.linkrev(rev)
2075
2078
2076 def _storage(self):
2079 def _storage(self):
2077 narrowmatch = self._manifestlog._narrowmatch
2080 narrowmatch = self._manifestlog._narrowmatch
2078 if not narrowmatch.always():
2081 if not narrowmatch.always():
2079 if not narrowmatch.visitdir(self._dir[:-1]):
2082 if not narrowmatch.visitdir(self._dir[:-1]):
2080 return excludedmanifestrevlog(self._dir)
2083 return excludedmanifestrevlog(self._dir)
2081 return self._manifestlog.getstorage(self._dir)
2084 return self._manifestlog.getstorage(self._dir)
2082
2085
2083 def read(self):
2086 def read(self):
2084 if self._data is None:
2087 if self._data is None:
2085 store = self._storage()
2088 store = self._storage()
2086 if self._node == nullid:
2089 if self._node == nullid:
2087 self._data = treemanifest()
2090 self._data = treemanifest()
2088 # TODO accessing non-public API
2091 # TODO accessing non-public API
2089 elif store._treeondisk:
2092 elif store._treeondisk:
2090 m = treemanifest(dir=self._dir)
2093 m = treemanifest(dir=self._dir)
2091
2094
2092 def gettext():
2095 def gettext():
2093 return store.revision(self._node)
2096 return store.revision(self._node)
2094
2097
2095 def readsubtree(dir, subm):
2098 def readsubtree(dir, subm):
2096 # Set verify to False since we need to be able to create
2099 # Set verify to False since we need to be able to create
2097 # subtrees for trees that don't exist on disk.
2100 # subtrees for trees that don't exist on disk.
2098 return self._manifestlog.get(dir, subm, verify=False).read()
2101 return self._manifestlog.get(dir, subm, verify=False).read()
2099
2102
2100 m.read(gettext, readsubtree)
2103 m.read(gettext, readsubtree)
2101 m.setnode(self._node)
2104 m.setnode(self._node)
2102 self._data = m
2105 self._data = m
2103 else:
2106 else:
2104 if self._node in store.fulltextcache:
2107 if self._node in store.fulltextcache:
2105 text = pycompat.bytestr(store.fulltextcache[self._node])
2108 text = pycompat.bytestr(store.fulltextcache[self._node])
2106 else:
2109 else:
2107 text = store.revision(self._node)
2110 text = store.revision(self._node)
2108 arraytext = bytearray(text)
2111 arraytext = bytearray(text)
2109 store.fulltextcache[self._node] = arraytext
2112 store.fulltextcache[self._node] = arraytext
2110 self._data = treemanifest(dir=self._dir, text=text)
2113 self._data = treemanifest(dir=self._dir, text=text)
2111
2114
2112 return self._data
2115 return self._data
2113
2116
2114 def node(self):
2117 def node(self):
2115 return self._node
2118 return self._node
2116
2119
2117 def new(self, dir=b''):
2120 def new(self, dir=b''):
2118 return memtreemanifestctx(self._manifestlog, dir=dir)
2121 return memtreemanifestctx(self._manifestlog, dir=dir)
2119
2122
2120 def copy(self):
2123 def copy(self):
2121 memmf = memtreemanifestctx(self._manifestlog, dir=self._dir)
2124 memmf = memtreemanifestctx(self._manifestlog, dir=self._dir)
2122 memmf._treemanifest = self.read().copy()
2125 memmf._treemanifest = self.read().copy()
2123 return memmf
2126 return memmf
2124
2127
2125 @propertycache
2128 @propertycache
2126 def parents(self):
2129 def parents(self):
2127 return self._storage().parents(self._node)
2130 return self._storage().parents(self._node)
2128
2131
2129 def readdelta(self, shallow=False):
2132 def readdelta(self, shallow=False):
2130 '''Returns a manifest containing just the entries that are present
2133 '''Returns a manifest containing just the entries that are present
2131 in this manifest, but not in its p1 manifest. This is efficient to read
2134 in this manifest, but not in its p1 manifest. This is efficient to read
2132 if the revlog delta is already p1.
2135 if the revlog delta is already p1.
2133
2136
2134 If `shallow` is True, this will read the delta for this directory,
2137 If `shallow` is True, this will read the delta for this directory,
2135 without recursively reading subdirectory manifests. Instead, any
2138 without recursively reading subdirectory manifests. Instead, any
2136 subdirectory entry will be reported as it appears in the manifest, i.e.
2139 subdirectory entry will be reported as it appears in the manifest, i.e.
2137 the subdirectory will be reported among files and distinguished only by
2140 the subdirectory will be reported among files and distinguished only by
2138 its 't' flag.
2141 its 't' flag.
2139 '''
2142 '''
2140 store = self._storage()
2143 store = self._storage()
2141 if shallow:
2144 if shallow:
2142 r = store.rev(self._node)
2145 r = store.rev(self._node)
2143 d = mdiff.patchtext(store.revdiff(store.deltaparent(r), r))
2146 d = mdiff.patchtext(store.revdiff(store.deltaparent(r), r))
2144 return manifestdict(d)
2147 return manifestdict(d)
2145 else:
2148 else:
2146 # Need to perform a slow delta
2149 # Need to perform a slow delta
2147 r0 = store.deltaparent(store.rev(self._node))
2150 r0 = store.deltaparent(store.rev(self._node))
2148 m0 = self._manifestlog.get(self._dir, store.node(r0)).read()
2151 m0 = self._manifestlog.get(self._dir, store.node(r0)).read()
2149 m1 = self.read()
2152 m1 = self.read()
2150 md = treemanifest(dir=self._dir)
2153 md = treemanifest(dir=self._dir)
2151 for f, ((n0, fl0), (n1, fl1)) in pycompat.iteritems(m0.diff(m1)):
2154 for f, ((n0, fl0), (n1, fl1)) in pycompat.iteritems(m0.diff(m1)):
2152 if n1:
2155 if n1:
2153 md[f] = n1
2156 md[f] = n1
2154 if fl1:
2157 if fl1:
2155 md.setflag(f, fl1)
2158 md.setflag(f, fl1)
2156 return md
2159 return md
2157
2160
2158 def readfast(self, shallow=False):
2161 def readfast(self, shallow=False):
2159 '''Calls either readdelta or read, based on which would be less work.
2162 '''Calls either readdelta or read, based on which would be less work.
2160 readdelta is called if the delta is against the p1, and therefore can be
2163 readdelta is called if the delta is against the p1, and therefore can be
2161 read quickly.
2164 read quickly.
2162
2165
2163 If `shallow` is True, it only returns the entries from this manifest,
2166 If `shallow` is True, it only returns the entries from this manifest,
2164 and not any submanifests.
2167 and not any submanifests.
2165 '''
2168 '''
2166 store = self._storage()
2169 store = self._storage()
2167 r = store.rev(self._node)
2170 r = store.rev(self._node)
2168 deltaparent = store.deltaparent(r)
2171 deltaparent = store.deltaparent(r)
2169 if deltaparent != nullrev and deltaparent in store.parentrevs(r):
2172 if deltaparent != nullrev and deltaparent in store.parentrevs(r):
2170 return self.readdelta(shallow=shallow)
2173 return self.readdelta(shallow=shallow)
2171
2174
2172 if shallow:
2175 if shallow:
2173 return manifestdict(store.revision(self._node))
2176 return manifestdict(store.revision(self._node))
2174 else:
2177 else:
2175 return self.read()
2178 return self.read()
2176
2179
2177 def find(self, key):
2180 def find(self, key):
2178 return self.read().find(key)
2181 return self.read().find(key)
2179
2182
2180
2183
2181 class excludeddir(treemanifest):
2184 class excludeddir(treemanifest):
2182 """Stand-in for a directory that is excluded from the repository.
2185 """Stand-in for a directory that is excluded from the repository.
2183
2186
2184 With narrowing active on a repository that uses treemanifests,
2187 With narrowing active on a repository that uses treemanifests,
2185 some of the directory revlogs will be excluded from the resulting
2188 some of the directory revlogs will be excluded from the resulting
2186 clone. This is a huge storage win for clients, but means we need
2189 clone. This is a huge storage win for clients, but means we need
2187 some sort of pseudo-manifest to surface to internals so we can
2190 some sort of pseudo-manifest to surface to internals so we can
2188 detect a merge conflict outside the narrowspec. That's what this
2191 detect a merge conflict outside the narrowspec. That's what this
2189 class is: it stands in for a directory whose node is known, but
2192 class is: it stands in for a directory whose node is known, but
2190 whose contents are unknown.
2193 whose contents are unknown.
2191 """
2194 """
2192
2195
2193 def __init__(self, dir, node):
2196 def __init__(self, dir, node):
2194 super(excludeddir, self).__init__(dir)
2197 super(excludeddir, self).__init__(dir)
2195 self._node = node
2198 self._node = node
2196 # Add an empty file, which will be included by iterators and such,
2199 # Add an empty file, which will be included by iterators and such,
2197 # appearing as the directory itself (i.e. something like "dir/")
2200 # appearing as the directory itself (i.e. something like "dir/")
2198 self._files[b''] = node
2201 self._files[b''] = node
2199 self._flags[b''] = b't'
2202 self._flags[b''] = b't'
2200
2203
2201 # Manifests outside the narrowspec should never be modified, so avoid
2204 # Manifests outside the narrowspec should never be modified, so avoid
2202 # copying. This makes a noticeable difference when there are very many
2205 # copying. This makes a noticeable difference when there are very many
2203 # directories outside the narrowspec. Also, it makes sense for the copy to
2206 # directories outside the narrowspec. Also, it makes sense for the copy to
2204 # be of the same type as the original, which would not happen with the
2207 # be of the same type as the original, which would not happen with the
2205 # super type's copy().
2208 # super type's copy().
2206 def copy(self):
2209 def copy(self):
2207 return self
2210 return self
2208
2211
2209
2212
2210 class excludeddirmanifestctx(treemanifestctx):
2213 class excludeddirmanifestctx(treemanifestctx):
2211 """context wrapper for excludeddir - see that docstring for rationale"""
2214 """context wrapper for excludeddir - see that docstring for rationale"""
2212
2215
2213 def __init__(self, dir, node):
2216 def __init__(self, dir, node):
2214 self._dir = dir
2217 self._dir = dir
2215 self._node = node
2218 self._node = node
2216
2219
2217 def read(self):
2220 def read(self):
2218 return excludeddir(self._dir, self._node)
2221 return excludeddir(self._dir, self._node)
2219
2222
2220 def write(self, *args):
2223 def write(self, *args):
2221 raise error.ProgrammingError(
2224 raise error.ProgrammingError(
2222 b'attempt to write manifest from excluded dir %s' % self._dir
2225 b'attempt to write manifest from excluded dir %s' % self._dir
2223 )
2226 )
2224
2227
2225
2228
2226 class excludedmanifestrevlog(manifestrevlog):
2229 class excludedmanifestrevlog(manifestrevlog):
2227 """Stand-in for excluded treemanifest revlogs.
2230 """Stand-in for excluded treemanifest revlogs.
2228
2231
2229 When narrowing is active on a treemanifest repository, we'll have
2232 When narrowing is active on a treemanifest repository, we'll have
2230 references to directories we can't see due to the revlog being
2233 references to directories we can't see due to the revlog being
2231 skipped. This class exists to conform to the manifestrevlog
2234 skipped. This class exists to conform to the manifestrevlog
2232 interface for those directories and proactively prevent writes to
2235 interface for those directories and proactively prevent writes to
2233 outside the narrowspec.
2236 outside the narrowspec.
2234 """
2237 """
2235
2238
2236 def __init__(self, dir):
2239 def __init__(self, dir):
2237 self._dir = dir
2240 self._dir = dir
2238
2241
2239 def __len__(self):
2242 def __len__(self):
2240 raise error.ProgrammingError(
2243 raise error.ProgrammingError(
2241 b'attempt to get length of excluded dir %s' % self._dir
2244 b'attempt to get length of excluded dir %s' % self._dir
2242 )
2245 )
2243
2246
2244 def rev(self, node):
2247 def rev(self, node):
2245 raise error.ProgrammingError(
2248 raise error.ProgrammingError(
2246 b'attempt to get rev from excluded dir %s' % self._dir
2249 b'attempt to get rev from excluded dir %s' % self._dir
2247 )
2250 )
2248
2251
2249 def linkrev(self, node):
2252 def linkrev(self, node):
2250 raise error.ProgrammingError(
2253 raise error.ProgrammingError(
2251 b'attempt to get linkrev from excluded dir %s' % self._dir
2254 b'attempt to get linkrev from excluded dir %s' % self._dir
2252 )
2255 )
2253
2256
2254 def node(self, rev):
2257 def node(self, rev):
2255 raise error.ProgrammingError(
2258 raise error.ProgrammingError(
2256 b'attempt to get node from excluded dir %s' % self._dir
2259 b'attempt to get node from excluded dir %s' % self._dir
2257 )
2260 )
2258
2261
2259 def add(self, *args, **kwargs):
2262 def add(self, *args, **kwargs):
2260 # We should never write entries in dirlogs outside the narrow clone.
2263 # We should never write entries in dirlogs outside the narrow clone.
2261 # However, the method still gets called from writesubtree() in
2264 # However, the method still gets called from writesubtree() in
2262 # _addtree(), so we need to handle it. We should possibly make that
2265 # _addtree(), so we need to handle it. We should possibly make that
2263 # avoid calling add() with a clean manifest (_dirty is always False
2266 # avoid calling add() with a clean manifest (_dirty is always False
2264 # in excludeddir instances).
2267 # in excludeddir instances).
2265 pass
2268 pass
@@ -1,3725 +1,3723 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # run-tests.py - Run a set of tests on Mercurial
3 # run-tests.py - Run a set of tests on Mercurial
4 #
4 #
5 # Copyright 2006 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Matt Mackall <mpm@selenic.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 # Modifying this script is tricky because it has many modes:
10 # Modifying this script is tricky because it has many modes:
11 # - serial (default) vs parallel (-jN, N > 1)
11 # - serial (default) vs parallel (-jN, N > 1)
12 # - no coverage (default) vs coverage (-c, -C, -s)
12 # - no coverage (default) vs coverage (-c, -C, -s)
13 # - temp install (default) vs specific hg script (--with-hg, --local)
13 # - temp install (default) vs specific hg script (--with-hg, --local)
14 # - tests are a mix of shell scripts and Python scripts
14 # - tests are a mix of shell scripts and Python scripts
15 #
15 #
16 # If you change this script, it is recommended that you ensure you
16 # If you change this script, it is recommended that you ensure you
17 # haven't broken it by running it in various modes with a representative
17 # haven't broken it by running it in various modes with a representative
18 # sample of test scripts. For example:
18 # sample of test scripts. For example:
19 #
19 #
20 # 1) serial, no coverage, temp install:
20 # 1) serial, no coverage, temp install:
21 # ./run-tests.py test-s*
21 # ./run-tests.py test-s*
22 # 2) serial, no coverage, local hg:
22 # 2) serial, no coverage, local hg:
23 # ./run-tests.py --local test-s*
23 # ./run-tests.py --local test-s*
24 # 3) serial, coverage, temp install:
24 # 3) serial, coverage, temp install:
25 # ./run-tests.py -c test-s*
25 # ./run-tests.py -c test-s*
26 # 4) serial, coverage, local hg:
26 # 4) serial, coverage, local hg:
27 # ./run-tests.py -c --local test-s* # unsupported
27 # ./run-tests.py -c --local test-s* # unsupported
28 # 5) parallel, no coverage, temp install:
28 # 5) parallel, no coverage, temp install:
29 # ./run-tests.py -j2 test-s*
29 # ./run-tests.py -j2 test-s*
30 # 6) parallel, no coverage, local hg:
30 # 6) parallel, no coverage, local hg:
31 # ./run-tests.py -j2 --local test-s*
31 # ./run-tests.py -j2 --local test-s*
32 # 7) parallel, coverage, temp install:
32 # 7) parallel, coverage, temp install:
33 # ./run-tests.py -j2 -c test-s* # currently broken
33 # ./run-tests.py -j2 -c test-s* # currently broken
34 # 8) parallel, coverage, local install:
34 # 8) parallel, coverage, local install:
35 # ./run-tests.py -j2 -c --local test-s* # unsupported (and broken)
35 # ./run-tests.py -j2 -c --local test-s* # unsupported (and broken)
36 # 9) parallel, custom tmp dir:
36 # 9) parallel, custom tmp dir:
37 # ./run-tests.py -j2 --tmpdir /tmp/myhgtests
37 # ./run-tests.py -j2 --tmpdir /tmp/myhgtests
38 # 10) parallel, pure, tests that call run-tests:
38 # 10) parallel, pure, tests that call run-tests:
39 # ./run-tests.py --pure `grep -l run-tests.py *.t`
39 # ./run-tests.py --pure `grep -l run-tests.py *.t`
40 #
40 #
41 # (You could use any subset of the tests: test-s* happens to match
41 # (You could use any subset of the tests: test-s* happens to match
42 # enough that it's worth doing parallel runs, few enough that it
42 # enough that it's worth doing parallel runs, few enough that it
43 # completes fairly quickly, includes both shell and Python scripts, and
43 # completes fairly quickly, includes both shell and Python scripts, and
44 # includes some scripts that run daemon processes.)
44 # includes some scripts that run daemon processes.)
45
45
46 from __future__ import absolute_import, print_function
46 from __future__ import absolute_import, print_function
47
47
48 import argparse
48 import argparse
49 import collections
49 import collections
50 import difflib
50 import difflib
51 import distutils.version as version
51 import distutils.version as version
52 import errno
52 import errno
53 import json
53 import json
54 import multiprocessing
54 import multiprocessing
55 import os
55 import os
56 import random
56 import random
57 import re
57 import re
58 import shutil
58 import shutil
59 import signal
59 import signal
60 import socket
60 import socket
61 import subprocess
61 import subprocess
62 import sys
62 import sys
63 import sysconfig
63 import sysconfig
64 import tempfile
64 import tempfile
65 import threading
65 import threading
66 import time
66 import time
67 import unittest
67 import unittest
68 import uuid
68 import uuid
69 import xml.dom.minidom as minidom
69 import xml.dom.minidom as minidom
70
70
71 try:
71 try:
72 import Queue as queue
72 import Queue as queue
73 except ImportError:
73 except ImportError:
74 import queue
74 import queue
75
75
76 try:
76 try:
77 import shlex
77 import shlex
78
78
79 shellquote = shlex.quote
79 shellquote = shlex.quote
80 except (ImportError, AttributeError):
80 except (ImportError, AttributeError):
81 import pipes
81 import pipes
82
82
83 shellquote = pipes.quote
83 shellquote = pipes.quote
84
84
85 processlock = threading.Lock()
85 processlock = threading.Lock()
86
86
87 pygmentspresent = False
87 pygmentspresent = False
88 # ANSI color is unsupported prior to Windows 10
88 # ANSI color is unsupported prior to Windows 10
89 if os.name != 'nt':
89 if os.name != 'nt':
90 try: # is pygments installed
90 try: # is pygments installed
91 import pygments
91 import pygments
92 import pygments.lexers as lexers
92 import pygments.lexers as lexers
93 import pygments.lexer as lexer
93 import pygments.lexer as lexer
94 import pygments.formatters as formatters
94 import pygments.formatters as formatters
95 import pygments.token as token
95 import pygments.token as token
96 import pygments.style as style
96 import pygments.style as style
97
97
98 pygmentspresent = True
98 pygmentspresent = True
99 difflexer = lexers.DiffLexer()
99 difflexer = lexers.DiffLexer()
100 terminal256formatter = formatters.Terminal256Formatter()
100 terminal256formatter = formatters.Terminal256Formatter()
101 except ImportError:
101 except ImportError:
102 pass
102 pass
103
103
104 if pygmentspresent:
104 if pygmentspresent:
105
105
106 class TestRunnerStyle(style.Style):
106 class TestRunnerStyle(style.Style):
107 default_style = ""
107 default_style = ""
108 skipped = token.string_to_tokentype("Token.Generic.Skipped")
108 skipped = token.string_to_tokentype("Token.Generic.Skipped")
109 failed = token.string_to_tokentype("Token.Generic.Failed")
109 failed = token.string_to_tokentype("Token.Generic.Failed")
110 skippedname = token.string_to_tokentype("Token.Generic.SName")
110 skippedname = token.string_to_tokentype("Token.Generic.SName")
111 failedname = token.string_to_tokentype("Token.Generic.FName")
111 failedname = token.string_to_tokentype("Token.Generic.FName")
112 styles = {
112 styles = {
113 skipped: '#e5e5e5',
113 skipped: '#e5e5e5',
114 skippedname: '#00ffff',
114 skippedname: '#00ffff',
115 failed: '#7f0000',
115 failed: '#7f0000',
116 failedname: '#ff0000',
116 failedname: '#ff0000',
117 }
117 }
118
118
119 class TestRunnerLexer(lexer.RegexLexer):
119 class TestRunnerLexer(lexer.RegexLexer):
120 testpattern = r'[\w-]+\.(t|py)(#[a-zA-Z0-9_\-\.]+)?'
120 testpattern = r'[\w-]+\.(t|py)(#[a-zA-Z0-9_\-\.]+)?'
121 tokens = {
121 tokens = {
122 'root': [
122 'root': [
123 (r'^Skipped', token.Generic.Skipped, 'skipped'),
123 (r'^Skipped', token.Generic.Skipped, 'skipped'),
124 (r'^Failed ', token.Generic.Failed, 'failed'),
124 (r'^Failed ', token.Generic.Failed, 'failed'),
125 (r'^ERROR: ', token.Generic.Failed, 'failed'),
125 (r'^ERROR: ', token.Generic.Failed, 'failed'),
126 ],
126 ],
127 'skipped': [
127 'skipped': [
128 (testpattern, token.Generic.SName),
128 (testpattern, token.Generic.SName),
129 (r':.*', token.Generic.Skipped),
129 (r':.*', token.Generic.Skipped),
130 ],
130 ],
131 'failed': [
131 'failed': [
132 (testpattern, token.Generic.FName),
132 (testpattern, token.Generic.FName),
133 (r'(:| ).*', token.Generic.Failed),
133 (r'(:| ).*', token.Generic.Failed),
134 ],
134 ],
135 }
135 }
136
136
137 runnerformatter = formatters.Terminal256Formatter(style=TestRunnerStyle)
137 runnerformatter = formatters.Terminal256Formatter(style=TestRunnerStyle)
138 runnerlexer = TestRunnerLexer()
138 runnerlexer = TestRunnerLexer()
139
139
140 origenviron = os.environ.copy()
140 origenviron = os.environ.copy()
141
141
142 if sys.version_info > (3, 5, 0):
142 if sys.version_info > (3, 5, 0):
143 PYTHON3 = True
143 PYTHON3 = True
144 xrange = range # we use xrange in one place, and we'd rather not use range
144 xrange = range # we use xrange in one place, and we'd rather not use range
145
145
146 def _bytespath(p):
146 def _bytespath(p):
147 if p is None:
147 if p is None:
148 return p
148 return p
149 return p.encode('utf-8')
149 return p.encode('utf-8')
150
150
151 def _strpath(p):
151 def _strpath(p):
152 if p is None:
152 if p is None:
153 return p
153 return p
154 return p.decode('utf-8')
154 return p.decode('utf-8')
155
155
156 osenvironb = getattr(os, 'environb', None)
156 osenvironb = getattr(os, 'environb', None)
157 if osenvironb is None:
157 if osenvironb is None:
158 # Windows lacks os.environb, for instance. A proxy over the real thing
158 # Windows lacks os.environb, for instance. A proxy over the real thing
159 # instead of a copy allows the environment to be updated via bytes on
159 # instead of a copy allows the environment to be updated via bytes on
160 # all platforms.
160 # all platforms.
161 class environbytes(object):
161 class environbytes(object):
162 def __init__(self, strenv):
162 def __init__(self, strenv):
163 self.__len__ = strenv.__len__
163 self.__len__ = strenv.__len__
164 self.clear = strenv.clear
164 self.clear = strenv.clear
165 self._strenv = strenv
165 self._strenv = strenv
166
166
167 def __getitem__(self, k):
167 def __getitem__(self, k):
168 v = self._strenv.__getitem__(_strpath(k))
168 v = self._strenv.__getitem__(_strpath(k))
169 return _bytespath(v)
169 return _bytespath(v)
170
170
171 def __setitem__(self, k, v):
171 def __setitem__(self, k, v):
172 self._strenv.__setitem__(_strpath(k), _strpath(v))
172 self._strenv.__setitem__(_strpath(k), _strpath(v))
173
173
174 def __delitem__(self, k):
174 def __delitem__(self, k):
175 self._strenv.__delitem__(_strpath(k))
175 self._strenv.__delitem__(_strpath(k))
176
176
177 def __contains__(self, k):
177 def __contains__(self, k):
178 return self._strenv.__contains__(_strpath(k))
178 return self._strenv.__contains__(_strpath(k))
179
179
180 def __iter__(self):
180 def __iter__(self):
181 return iter([_bytespath(k) for k in iter(self._strenv)])
181 return iter([_bytespath(k) for k in iter(self._strenv)])
182
182
183 def get(self, k, default=None):
183 def get(self, k, default=None):
184 v = self._strenv.get(_strpath(k), _strpath(default))
184 v = self._strenv.get(_strpath(k), _strpath(default))
185 return _bytespath(v)
185 return _bytespath(v)
186
186
187 def pop(self, k, default=None):
187 def pop(self, k, default=None):
188 v = self._strenv.pop(_strpath(k), _strpath(default))
188 v = self._strenv.pop(_strpath(k), _strpath(default))
189 return _bytespath(v)
189 return _bytespath(v)
190
190
191 osenvironb = environbytes(os.environ)
191 osenvironb = environbytes(os.environ)
192
192
193 getcwdb = getattr(os, 'getcwdb')
193 getcwdb = getattr(os, 'getcwdb')
194 if not getcwdb or os.name == 'nt':
194 if not getcwdb or os.name == 'nt':
195 getcwdb = lambda: _bytespath(os.getcwd())
195 getcwdb = lambda: _bytespath(os.getcwd())
196
196
197 elif sys.version_info >= (3, 0, 0):
197 elif sys.version_info >= (3, 0, 0):
198 print(
198 print(
199 '%s is only supported on Python 3.5+ and 2.7, not %s'
199 '%s is only supported on Python 3.5+ and 2.7, not %s'
200 % (sys.argv[0], '.'.join(str(v) for v in sys.version_info[:3]))
200 % (sys.argv[0], '.'.join(str(v) for v in sys.version_info[:3]))
201 )
201 )
202 sys.exit(70) # EX_SOFTWARE from `man 3 sysexit`
202 sys.exit(70) # EX_SOFTWARE from `man 3 sysexit`
203 else:
203 else:
204 PYTHON3 = False
204 PYTHON3 = False
205
205
206 # In python 2.x, path operations are generally done using
206 # In python 2.x, path operations are generally done using
207 # bytestrings by default, so we don't have to do any extra
207 # bytestrings by default, so we don't have to do any extra
208 # fiddling there. We define the wrapper functions anyway just to
208 # fiddling there. We define the wrapper functions anyway just to
209 # help keep code consistent between platforms.
209 # help keep code consistent between platforms.
210 def _bytespath(p):
210 def _bytespath(p):
211 return p
211 return p
212
212
213 _strpath = _bytespath
213 _strpath = _bytespath
214 osenvironb = os.environ
214 osenvironb = os.environ
215 getcwdb = os.getcwd
215 getcwdb = os.getcwd
216
216
217 # For Windows support
217 # For Windows support
218 wifexited = getattr(os, "WIFEXITED", lambda x: False)
218 wifexited = getattr(os, "WIFEXITED", lambda x: False)
219
219
220 # Whether to use IPv6
220 # Whether to use IPv6
221 def checksocketfamily(name, port=20058):
221 def checksocketfamily(name, port=20058):
222 """return true if we can listen on localhost using family=name
222 """return true if we can listen on localhost using family=name
223
223
224 name should be either 'AF_INET', or 'AF_INET6'.
224 name should be either 'AF_INET', or 'AF_INET6'.
225 port being used is okay - EADDRINUSE is considered as successful.
225 port being used is okay - EADDRINUSE is considered as successful.
226 """
226 """
227 family = getattr(socket, name, None)
227 family = getattr(socket, name, None)
228 if family is None:
228 if family is None:
229 return False
229 return False
230 try:
230 try:
231 s = socket.socket(family, socket.SOCK_STREAM)
231 s = socket.socket(family, socket.SOCK_STREAM)
232 s.bind(('localhost', port))
232 s.bind(('localhost', port))
233 s.close()
233 s.close()
234 return True
234 return True
235 except socket.error as exc:
235 except socket.error as exc:
236 if exc.errno == errno.EADDRINUSE:
236 if exc.errno == errno.EADDRINUSE:
237 return True
237 return True
238 elif exc.errno in (errno.EADDRNOTAVAIL, errno.EPROTONOSUPPORT):
238 elif exc.errno in (errno.EADDRNOTAVAIL, errno.EPROTONOSUPPORT):
239 return False
239 return False
240 else:
240 else:
241 raise
241 raise
242 else:
242 else:
243 return False
243 return False
244
244
245
245
246 # useipv6 will be set by parseargs
246 # useipv6 will be set by parseargs
247 useipv6 = None
247 useipv6 = None
248
248
249
249
250 def checkportisavailable(port):
250 def checkportisavailable(port):
251 """return true if a port seems free to bind on localhost"""
251 """return true if a port seems free to bind on localhost"""
252 if useipv6:
252 if useipv6:
253 family = socket.AF_INET6
253 family = socket.AF_INET6
254 else:
254 else:
255 family = socket.AF_INET
255 family = socket.AF_INET
256 try:
256 try:
257 s = socket.socket(family, socket.SOCK_STREAM)
257 s = socket.socket(family, socket.SOCK_STREAM)
258 s.bind(('localhost', port))
258 s.bind(('localhost', port))
259 s.close()
259 s.close()
260 return True
260 return True
261 except socket.error as exc:
261 except socket.error as exc:
262 if exc.errno not in (
262 if exc.errno not in (
263 errno.EADDRINUSE,
263 errno.EADDRINUSE,
264 errno.EADDRNOTAVAIL,
264 errno.EADDRNOTAVAIL,
265 errno.EPROTONOSUPPORT,
265 errno.EPROTONOSUPPORT,
266 ):
266 ):
267 raise
267 raise
268 return False
268 return False
269
269
270
270
271 closefds = os.name == 'posix'
271 closefds = os.name == 'posix'
272
272
273
273
274 def Popen4(cmd, wd, timeout, env=None):
274 def Popen4(cmd, wd, timeout, env=None):
275 processlock.acquire()
275 processlock.acquire()
276 p = subprocess.Popen(
276 p = subprocess.Popen(
277 _strpath(cmd),
277 _strpath(cmd),
278 shell=True,
278 shell=True,
279 bufsize=-1,
279 bufsize=-1,
280 cwd=_strpath(wd),
280 cwd=_strpath(wd),
281 env=env,
281 env=env,
282 close_fds=closefds,
282 close_fds=closefds,
283 stdin=subprocess.PIPE,
283 stdin=subprocess.PIPE,
284 stdout=subprocess.PIPE,
284 stdout=subprocess.PIPE,
285 stderr=subprocess.STDOUT,
285 stderr=subprocess.STDOUT,
286 )
286 )
287 processlock.release()
287 processlock.release()
288
288
289 p.fromchild = p.stdout
289 p.fromchild = p.stdout
290 p.tochild = p.stdin
290 p.tochild = p.stdin
291 p.childerr = p.stderr
291 p.childerr = p.stderr
292
292
293 p.timeout = False
293 p.timeout = False
294 if timeout:
294 if timeout:
295
295
296 def t():
296 def t():
297 start = time.time()
297 start = time.time()
298 while time.time() - start < timeout and p.returncode is None:
298 while time.time() - start < timeout and p.returncode is None:
299 time.sleep(0.1)
299 time.sleep(0.1)
300 p.timeout = True
300 p.timeout = True
301 if p.returncode is None:
301 if p.returncode is None:
302 terminate(p)
302 terminate(p)
303
303
304 threading.Thread(target=t).start()
304 threading.Thread(target=t).start()
305
305
306 return p
306 return p
307
307
308
308
309 if sys.executable:
309 if sys.executable:
310 sysexecutable = sys.executable
310 sysexecutable = sys.executable
311 elif os.environ.get('PYTHONEXECUTABLE'):
311 elif os.environ.get('PYTHONEXECUTABLE'):
312 sysexecutable = os.environ['PYTHONEXECUTABLE']
312 sysexecutable = os.environ['PYTHONEXECUTABLE']
313 elif os.environ.get('PYTHON'):
313 elif os.environ.get('PYTHON'):
314 sysexecutable = os.environ['PYTHON']
314 sysexecutable = os.environ['PYTHON']
315 else:
315 else:
316 raise AssertionError('Could not find Python interpreter')
316 raise AssertionError('Could not find Python interpreter')
317
317
318 PYTHON = _bytespath(sysexecutable.replace('\\', '/'))
318 PYTHON = _bytespath(sysexecutable.replace('\\', '/'))
319 IMPL_PATH = b'PYTHONPATH'
319 IMPL_PATH = b'PYTHONPATH'
320 if 'java' in sys.platform:
320 if 'java' in sys.platform:
321 IMPL_PATH = b'JYTHONPATH'
321 IMPL_PATH = b'JYTHONPATH'
322
322
323 defaults = {
323 defaults = {
324 'jobs': ('HGTEST_JOBS', multiprocessing.cpu_count()),
324 'jobs': ('HGTEST_JOBS', multiprocessing.cpu_count()),
325 'timeout': ('HGTEST_TIMEOUT', 180),
325 'timeout': ('HGTEST_TIMEOUT', 180),
326 'slowtimeout': ('HGTEST_SLOWTIMEOUT', 1500),
326 'slowtimeout': ('HGTEST_SLOWTIMEOUT', 1500),
327 'port': ('HGTEST_PORT', 20059),
327 'port': ('HGTEST_PORT', 20059),
328 'shell': ('HGTEST_SHELL', 'sh'),
328 'shell': ('HGTEST_SHELL', 'sh'),
329 }
329 }
330
330
331
331
332 def canonpath(path):
332 def canonpath(path):
333 return os.path.realpath(os.path.expanduser(path))
333 return os.path.realpath(os.path.expanduser(path))
334
334
335
335
336 def parselistfiles(files, listtype, warn=True):
336 def parselistfiles(files, listtype, warn=True):
337 entries = dict()
337 entries = dict()
338 for filename in files:
338 for filename in files:
339 try:
339 try:
340 path = os.path.expanduser(os.path.expandvars(filename))
340 path = os.path.expanduser(os.path.expandvars(filename))
341 f = open(path, "rb")
341 f = open(path, "rb")
342 except IOError as err:
342 except IOError as err:
343 if err.errno != errno.ENOENT:
343 if err.errno != errno.ENOENT:
344 raise
344 raise
345 if warn:
345 if warn:
346 print("warning: no such %s file: %s" % (listtype, filename))
346 print("warning: no such %s file: %s" % (listtype, filename))
347 continue
347 continue
348
348
349 for line in f.readlines():
349 for line in f.readlines():
350 line = line.split(b'#', 1)[0].strip()
350 line = line.split(b'#', 1)[0].strip()
351 if line:
351 if line:
352 entries[line] = filename
352 entries[line] = filename
353
353
354 f.close()
354 f.close()
355 return entries
355 return entries
356
356
357
357
358 def parsettestcases(path):
358 def parsettestcases(path):
359 """read a .t test file, return a set of test case names
359 """read a .t test file, return a set of test case names
360
360
361 If path does not exist, return an empty set.
361 If path does not exist, return an empty set.
362 """
362 """
363 cases = []
363 cases = []
364 try:
364 try:
365 with open(path, 'rb') as f:
365 with open(path, 'rb') as f:
366 for l in f:
366 for l in f:
367 if l.startswith(b'#testcases '):
367 if l.startswith(b'#testcases '):
368 cases.append(sorted(l[11:].split()))
368 cases.append(sorted(l[11:].split()))
369 except IOError as ex:
369 except IOError as ex:
370 if ex.errno != errno.ENOENT:
370 if ex.errno != errno.ENOENT:
371 raise
371 raise
372 return cases
372 return cases
373
373
374
374
375 def getparser():
375 def getparser():
376 """Obtain the OptionParser used by the CLI."""
376 """Obtain the OptionParser used by the CLI."""
377 parser = argparse.ArgumentParser(usage='%(prog)s [options] [tests]')
377 parser = argparse.ArgumentParser(usage='%(prog)s [options] [tests]')
378
378
379 selection = parser.add_argument_group('Test Selection')
379 selection = parser.add_argument_group('Test Selection')
380 selection.add_argument(
380 selection.add_argument(
381 '--allow-slow-tests',
381 '--allow-slow-tests',
382 action='store_true',
382 action='store_true',
383 help='allow extremely slow tests',
383 help='allow extremely slow tests',
384 )
384 )
385 selection.add_argument(
385 selection.add_argument(
386 "--blacklist",
386 "--blacklist",
387 action="append",
387 action="append",
388 help="skip tests listed in the specified blacklist file",
388 help="skip tests listed in the specified blacklist file",
389 )
389 )
390 selection.add_argument(
390 selection.add_argument(
391 "--changed",
391 "--changed",
392 help="run tests that are changed in parent rev or working directory",
392 help="run tests that are changed in parent rev or working directory",
393 )
393 )
394 selection.add_argument(
394 selection.add_argument(
395 "-k", "--keywords", help="run tests matching keywords"
395 "-k", "--keywords", help="run tests matching keywords"
396 )
396 )
397 selection.add_argument(
397 selection.add_argument(
398 "-r", "--retest", action="store_true", help="retest failed tests"
398 "-r", "--retest", action="store_true", help="retest failed tests"
399 )
399 )
400 selection.add_argument(
400 selection.add_argument(
401 "--test-list",
401 "--test-list",
402 action="append",
402 action="append",
403 help="read tests to run from the specified file",
403 help="read tests to run from the specified file",
404 )
404 )
405 selection.add_argument(
405 selection.add_argument(
406 "--whitelist",
406 "--whitelist",
407 action="append",
407 action="append",
408 help="always run tests listed in the specified whitelist file",
408 help="always run tests listed in the specified whitelist file",
409 )
409 )
410 selection.add_argument(
410 selection.add_argument(
411 'tests', metavar='TESTS', nargs='*', help='Tests to run'
411 'tests', metavar='TESTS', nargs='*', help='Tests to run'
412 )
412 )
413
413
414 harness = parser.add_argument_group('Test Harness Behavior')
414 harness = parser.add_argument_group('Test Harness Behavior')
415 harness.add_argument(
415 harness.add_argument(
416 '--bisect-repo',
416 '--bisect-repo',
417 metavar='bisect_repo',
417 metavar='bisect_repo',
418 help=(
418 help=(
419 "Path of a repo to bisect. Use together with " "--known-good-rev"
419 "Path of a repo to bisect. Use together with " "--known-good-rev"
420 ),
420 ),
421 )
421 )
422 harness.add_argument(
422 harness.add_argument(
423 "-d",
423 "-d",
424 "--debug",
424 "--debug",
425 action="store_true",
425 action="store_true",
426 help="debug mode: write output of test scripts to console"
426 help="debug mode: write output of test scripts to console"
427 " rather than capturing and diffing it (disables timeout)",
427 " rather than capturing and diffing it (disables timeout)",
428 )
428 )
429 harness.add_argument(
429 harness.add_argument(
430 "-f",
430 "-f",
431 "--first",
431 "--first",
432 action="store_true",
432 action="store_true",
433 help="exit on the first test failure",
433 help="exit on the first test failure",
434 )
434 )
435 harness.add_argument(
435 harness.add_argument(
436 "-i",
436 "-i",
437 "--interactive",
437 "--interactive",
438 action="store_true",
438 action="store_true",
439 help="prompt to accept changed output",
439 help="prompt to accept changed output",
440 )
440 )
441 harness.add_argument(
441 harness.add_argument(
442 "-j",
442 "-j",
443 "--jobs",
443 "--jobs",
444 type=int,
444 type=int,
445 help="number of jobs to run in parallel"
445 help="number of jobs to run in parallel"
446 " (default: $%s or %d)" % defaults['jobs'],
446 " (default: $%s or %d)" % defaults['jobs'],
447 )
447 )
448 harness.add_argument(
448 harness.add_argument(
449 "--keep-tmpdir",
449 "--keep-tmpdir",
450 action="store_true",
450 action="store_true",
451 help="keep temporary directory after running tests",
451 help="keep temporary directory after running tests",
452 )
452 )
453 harness.add_argument(
453 harness.add_argument(
454 '--known-good-rev',
454 '--known-good-rev',
455 metavar="known_good_rev",
455 metavar="known_good_rev",
456 help=(
456 help=(
457 "Automatically bisect any failures using this "
457 "Automatically bisect any failures using this "
458 "revision as a known-good revision."
458 "revision as a known-good revision."
459 ),
459 ),
460 )
460 )
461 harness.add_argument(
461 harness.add_argument(
462 "--list-tests",
462 "--list-tests",
463 action="store_true",
463 action="store_true",
464 help="list tests instead of running them",
464 help="list tests instead of running them",
465 )
465 )
466 harness.add_argument(
466 harness.add_argument(
467 "--loop", action="store_true", help="loop tests repeatedly"
467 "--loop", action="store_true", help="loop tests repeatedly"
468 )
468 )
469 harness.add_argument(
469 harness.add_argument(
470 '--random', action="store_true", help='run tests in random order'
470 '--random', action="store_true", help='run tests in random order'
471 )
471 )
472 harness.add_argument(
472 harness.add_argument(
473 '--order-by-runtime',
473 '--order-by-runtime',
474 action="store_true",
474 action="store_true",
475 help='run slowest tests first, according to .testtimes',
475 help='run slowest tests first, according to .testtimes',
476 )
476 )
477 harness.add_argument(
477 harness.add_argument(
478 "-p",
478 "-p",
479 "--port",
479 "--port",
480 type=int,
480 type=int,
481 help="port on which servers should listen"
481 help="port on which servers should listen"
482 " (default: $%s or %d)" % defaults['port'],
482 " (default: $%s or %d)" % defaults['port'],
483 )
483 )
484 harness.add_argument(
484 harness.add_argument(
485 '--profile-runner',
485 '--profile-runner',
486 action='store_true',
486 action='store_true',
487 help='run statprof on run-tests',
487 help='run statprof on run-tests',
488 )
488 )
489 harness.add_argument(
489 harness.add_argument(
490 "-R", "--restart", action="store_true", help="restart at last error"
490 "-R", "--restart", action="store_true", help="restart at last error"
491 )
491 )
492 harness.add_argument(
492 harness.add_argument(
493 "--runs-per-test",
493 "--runs-per-test",
494 type=int,
494 type=int,
495 dest="runs_per_test",
495 dest="runs_per_test",
496 help="run each test N times (default=1)",
496 help="run each test N times (default=1)",
497 default=1,
497 default=1,
498 )
498 )
499 harness.add_argument(
499 harness.add_argument(
500 "--shell", help="shell to use (default: $%s or %s)" % defaults['shell']
500 "--shell", help="shell to use (default: $%s or %s)" % defaults['shell']
501 )
501 )
502 harness.add_argument(
502 harness.add_argument(
503 '--showchannels', action='store_true', help='show scheduling channels'
503 '--showchannels', action='store_true', help='show scheduling channels'
504 )
504 )
505 harness.add_argument(
505 harness.add_argument(
506 "--slowtimeout",
506 "--slowtimeout",
507 type=int,
507 type=int,
508 help="kill errant slow tests after SLOWTIMEOUT seconds"
508 help="kill errant slow tests after SLOWTIMEOUT seconds"
509 " (default: $%s or %d)" % defaults['slowtimeout'],
509 " (default: $%s or %d)" % defaults['slowtimeout'],
510 )
510 )
511 harness.add_argument(
511 harness.add_argument(
512 "-t",
512 "-t",
513 "--timeout",
513 "--timeout",
514 type=int,
514 type=int,
515 help="kill errant tests after TIMEOUT seconds"
515 help="kill errant tests after TIMEOUT seconds"
516 " (default: $%s or %d)" % defaults['timeout'],
516 " (default: $%s or %d)" % defaults['timeout'],
517 )
517 )
518 harness.add_argument(
518 harness.add_argument(
519 "--tmpdir",
519 "--tmpdir",
520 help="run tests in the given temporary directory"
520 help="run tests in the given temporary directory"
521 " (implies --keep-tmpdir)",
521 " (implies --keep-tmpdir)",
522 )
522 )
523 harness.add_argument(
523 harness.add_argument(
524 "-v", "--verbose", action="store_true", help="output verbose messages"
524 "-v", "--verbose", action="store_true", help="output verbose messages"
525 )
525 )
526
526
527 hgconf = parser.add_argument_group('Mercurial Configuration')
527 hgconf = parser.add_argument_group('Mercurial Configuration')
528 hgconf.add_argument(
528 hgconf.add_argument(
529 "--chg",
529 "--chg",
530 action="store_true",
530 action="store_true",
531 help="install and use chg wrapper in place of hg",
531 help="install and use chg wrapper in place of hg",
532 )
532 )
533 hgconf.add_argument("--compiler", help="compiler to build with")
533 hgconf.add_argument("--compiler", help="compiler to build with")
534 hgconf.add_argument(
534 hgconf.add_argument(
535 '--extra-config-opt',
535 '--extra-config-opt',
536 action="append",
536 action="append",
537 default=[],
537 default=[],
538 help='set the given config opt in the test hgrc',
538 help='set the given config opt in the test hgrc',
539 )
539 )
540 hgconf.add_argument(
540 hgconf.add_argument(
541 "-l",
541 "-l",
542 "--local",
542 "--local",
543 action="store_true",
543 action="store_true",
544 help="shortcut for --with-hg=<testdir>/../hg, "
544 help="shortcut for --with-hg=<testdir>/../hg, "
545 "and --with-chg=<testdir>/../contrib/chg/chg if --chg is set",
545 "and --with-chg=<testdir>/../contrib/chg/chg if --chg is set",
546 )
546 )
547 hgconf.add_argument(
547 hgconf.add_argument(
548 "--ipv6",
548 "--ipv6",
549 action="store_true",
549 action="store_true",
550 help="prefer IPv6 to IPv4 for network related tests",
550 help="prefer IPv6 to IPv4 for network related tests",
551 )
551 )
552 hgconf.add_argument(
552 hgconf.add_argument(
553 "--pure",
553 "--pure",
554 action="store_true",
554 action="store_true",
555 help="use pure Python code instead of C extensions",
555 help="use pure Python code instead of C extensions",
556 )
556 )
557 hgconf.add_argument(
557 hgconf.add_argument(
558 "-3",
558 "-3",
559 "--py3-warnings",
559 "--py3-warnings",
560 action="store_true",
560 action="store_true",
561 help="enable Py3k warnings on Python 2.7+",
561 help="enable Py3k warnings on Python 2.7+",
562 )
562 )
563 hgconf.add_argument(
563 hgconf.add_argument(
564 "--with-chg",
564 "--with-chg",
565 metavar="CHG",
565 metavar="CHG",
566 help="use specified chg wrapper in place of hg",
566 help="use specified chg wrapper in place of hg",
567 )
567 )
568 hgconf.add_argument(
568 hgconf.add_argument(
569 "--with-hg",
569 "--with-hg",
570 metavar="HG",
570 metavar="HG",
571 help="test using specified hg script rather than a "
571 help="test using specified hg script rather than a "
572 "temporary installation",
572 "temporary installation",
573 )
573 )
574
574
575 reporting = parser.add_argument_group('Results Reporting')
575 reporting = parser.add_argument_group('Results Reporting')
576 reporting.add_argument(
576 reporting.add_argument(
577 "-C",
577 "-C",
578 "--annotate",
578 "--annotate",
579 action="store_true",
579 action="store_true",
580 help="output files annotated with coverage",
580 help="output files annotated with coverage",
581 )
581 )
582 reporting.add_argument(
582 reporting.add_argument(
583 "--color",
583 "--color",
584 choices=["always", "auto", "never"],
584 choices=["always", "auto", "never"],
585 default=os.environ.get('HGRUNTESTSCOLOR', 'auto'),
585 default=os.environ.get('HGRUNTESTSCOLOR', 'auto'),
586 help="colorisation: always|auto|never (default: auto)",
586 help="colorisation: always|auto|never (default: auto)",
587 )
587 )
588 reporting.add_argument(
588 reporting.add_argument(
589 "-c",
589 "-c",
590 "--cover",
590 "--cover",
591 action="store_true",
591 action="store_true",
592 help="print a test coverage report",
592 help="print a test coverage report",
593 )
593 )
594 reporting.add_argument(
594 reporting.add_argument(
595 '--exceptions',
595 '--exceptions',
596 action='store_true',
596 action='store_true',
597 help='log all exceptions and generate an exception report',
597 help='log all exceptions and generate an exception report',
598 )
598 )
599 reporting.add_argument(
599 reporting.add_argument(
600 "-H",
600 "-H",
601 "--htmlcov",
601 "--htmlcov",
602 action="store_true",
602 action="store_true",
603 help="create an HTML report of the coverage of the files",
603 help="create an HTML report of the coverage of the files",
604 )
604 )
605 reporting.add_argument(
605 reporting.add_argument(
606 "--json",
606 "--json",
607 action="store_true",
607 action="store_true",
608 help="store test result data in 'report.json' file",
608 help="store test result data in 'report.json' file",
609 )
609 )
610 reporting.add_argument(
610 reporting.add_argument(
611 "--outputdir",
611 "--outputdir",
612 help="directory to write error logs to (default=test directory)",
612 help="directory to write error logs to (default=test directory)",
613 )
613 )
614 reporting.add_argument(
614 reporting.add_argument(
615 "-n", "--nodiff", action="store_true", help="skip showing test changes"
615 "-n", "--nodiff", action="store_true", help="skip showing test changes"
616 )
616 )
617 reporting.add_argument(
617 reporting.add_argument(
618 "-S",
618 "-S",
619 "--noskips",
619 "--noskips",
620 action="store_true",
620 action="store_true",
621 help="don't report skip tests verbosely",
621 help="don't report skip tests verbosely",
622 )
622 )
623 reporting.add_argument(
623 reporting.add_argument(
624 "--time", action="store_true", help="time how long each test takes"
624 "--time", action="store_true", help="time how long each test takes"
625 )
625 )
626 reporting.add_argument("--view", help="external diff viewer")
626 reporting.add_argument("--view", help="external diff viewer")
627 reporting.add_argument(
627 reporting.add_argument(
628 "--xunit", help="record xunit results at specified path"
628 "--xunit", help="record xunit results at specified path"
629 )
629 )
630
630
631 for option, (envvar, default) in defaults.items():
631 for option, (envvar, default) in defaults.items():
632 defaults[option] = type(default)(os.environ.get(envvar, default))
632 defaults[option] = type(default)(os.environ.get(envvar, default))
633 parser.set_defaults(**defaults)
633 parser.set_defaults(**defaults)
634
634
635 return parser
635 return parser
636
636
637
637
638 def parseargs(args, parser):
638 def parseargs(args, parser):
639 """Parse arguments with our OptionParser and validate results."""
639 """Parse arguments with our OptionParser and validate results."""
640 options = parser.parse_args(args)
640 options = parser.parse_args(args)
641
641
642 # jython is always pure
642 # jython is always pure
643 if 'java' in sys.platform or '__pypy__' in sys.modules:
643 if 'java' in sys.platform or '__pypy__' in sys.modules:
644 options.pure = True
644 options.pure = True
645
645
646 if options.local:
646 if options.local:
647 if options.with_hg or options.with_chg:
647 if options.with_hg or options.with_chg:
648 parser.error('--local cannot be used with --with-hg or --with-chg')
648 parser.error('--local cannot be used with --with-hg or --with-chg')
649 testdir = os.path.dirname(_bytespath(canonpath(sys.argv[0])))
649 testdir = os.path.dirname(_bytespath(canonpath(sys.argv[0])))
650 reporootdir = os.path.dirname(testdir)
650 reporootdir = os.path.dirname(testdir)
651 pathandattrs = [(b'hg', 'with_hg')]
651 pathandattrs = [(b'hg', 'with_hg')]
652 if options.chg:
652 if options.chg:
653 pathandattrs.append((b'contrib/chg/chg', 'with_chg'))
653 pathandattrs.append((b'contrib/chg/chg', 'with_chg'))
654 for relpath, attr in pathandattrs:
654 for relpath, attr in pathandattrs:
655 binpath = os.path.join(reporootdir, relpath)
655 binpath = os.path.join(reporootdir, relpath)
656 if os.name != 'nt' and not os.access(binpath, os.X_OK):
656 if os.name != 'nt' and not os.access(binpath, os.X_OK):
657 parser.error(
657 parser.error(
658 '--local specified, but %r not found or '
658 '--local specified, but %r not found or '
659 'not executable' % binpath
659 'not executable' % binpath
660 )
660 )
661 setattr(options, attr, _strpath(binpath))
661 setattr(options, attr, _strpath(binpath))
662
662
663 if options.with_hg:
663 if options.with_hg:
664 options.with_hg = canonpath(_bytespath(options.with_hg))
664 options.with_hg = canonpath(_bytespath(options.with_hg))
665 if not (
665 if not (
666 os.path.isfile(options.with_hg)
666 os.path.isfile(options.with_hg)
667 and os.access(options.with_hg, os.X_OK)
667 and os.access(options.with_hg, os.X_OK)
668 ):
668 ):
669 parser.error('--with-hg must specify an executable hg script')
669 parser.error('--with-hg must specify an executable hg script')
670 if os.path.basename(options.with_hg) not in [b'hg', b'hg.exe']:
670 if os.path.basename(options.with_hg) not in [b'hg', b'hg.exe']:
671 sys.stderr.write('warning: --with-hg should specify an hg script\n')
671 sys.stderr.write('warning: --with-hg should specify an hg script\n')
672 sys.stderr.flush()
672 sys.stderr.flush()
673
673
674 if (options.chg or options.with_chg) and os.name == 'nt':
674 if (options.chg or options.with_chg) and os.name == 'nt':
675 parser.error('chg does not work on %s' % os.name)
675 parser.error('chg does not work on %s' % os.name)
676 if options.with_chg:
676 if options.with_chg:
677 options.chg = False # no installation to temporary location
677 options.chg = False # no installation to temporary location
678 options.with_chg = canonpath(_bytespath(options.with_chg))
678 options.with_chg = canonpath(_bytespath(options.with_chg))
679 if not (
679 if not (
680 os.path.isfile(options.with_chg)
680 os.path.isfile(options.with_chg)
681 and os.access(options.with_chg, os.X_OK)
681 and os.access(options.with_chg, os.X_OK)
682 ):
682 ):
683 parser.error('--with-chg must specify a chg executable')
683 parser.error('--with-chg must specify a chg executable')
684 if options.chg and options.with_hg:
684 if options.chg and options.with_hg:
685 # chg shares installation location with hg
685 # chg shares installation location with hg
686 parser.error(
686 parser.error(
687 '--chg does not work when --with-hg is specified '
687 '--chg does not work when --with-hg is specified '
688 '(use --with-chg instead)'
688 '(use --with-chg instead)'
689 )
689 )
690
690
691 if options.color == 'always' and not pygmentspresent:
691 if options.color == 'always' and not pygmentspresent:
692 sys.stderr.write(
692 sys.stderr.write(
693 'warning: --color=always ignored because '
693 'warning: --color=always ignored because '
694 'pygments is not installed\n'
694 'pygments is not installed\n'
695 )
695 )
696
696
697 if options.bisect_repo and not options.known_good_rev:
697 if options.bisect_repo and not options.known_good_rev:
698 parser.error("--bisect-repo cannot be used without --known-good-rev")
698 parser.error("--bisect-repo cannot be used without --known-good-rev")
699
699
700 global useipv6
700 global useipv6
701 if options.ipv6:
701 if options.ipv6:
702 useipv6 = checksocketfamily('AF_INET6')
702 useipv6 = checksocketfamily('AF_INET6')
703 else:
703 else:
704 # only use IPv6 if IPv4 is unavailable and IPv6 is available
704 # only use IPv6 if IPv4 is unavailable and IPv6 is available
705 useipv6 = (not checksocketfamily('AF_INET')) and checksocketfamily(
705 useipv6 = (not checksocketfamily('AF_INET')) and checksocketfamily(
706 'AF_INET6'
706 'AF_INET6'
707 )
707 )
708
708
709 options.anycoverage = options.cover or options.annotate or options.htmlcov
709 options.anycoverage = options.cover or options.annotate or options.htmlcov
710 if options.anycoverage:
710 if options.anycoverage:
711 try:
711 try:
712 import coverage
712 import coverage
713
713
714 covver = version.StrictVersion(coverage.__version__).version
714 covver = version.StrictVersion(coverage.__version__).version
715 if covver < (3, 3):
715 if covver < (3, 3):
716 parser.error('coverage options require coverage 3.3 or later')
716 parser.error('coverage options require coverage 3.3 or later')
717 except ImportError:
717 except ImportError:
718 parser.error('coverage options now require the coverage package')
718 parser.error('coverage options now require the coverage package')
719
719
720 if options.anycoverage and options.local:
720 if options.anycoverage and options.local:
721 # this needs some path mangling somewhere, I guess
721 # this needs some path mangling somewhere, I guess
722 parser.error(
722 parser.error(
723 "sorry, coverage options do not work when --local " "is specified"
723 "sorry, coverage options do not work when --local " "is specified"
724 )
724 )
725
725
726 if options.anycoverage and options.with_hg:
726 if options.anycoverage and options.with_hg:
727 parser.error(
727 parser.error(
728 "sorry, coverage options do not work when --with-hg " "is specified"
728 "sorry, coverage options do not work when --with-hg " "is specified"
729 )
729 )
730
730
731 global verbose
731 global verbose
732 if options.verbose:
732 if options.verbose:
733 verbose = ''
733 verbose = ''
734
734
735 if options.tmpdir:
735 if options.tmpdir:
736 options.tmpdir = canonpath(options.tmpdir)
736 options.tmpdir = canonpath(options.tmpdir)
737
737
738 if options.jobs < 1:
738 if options.jobs < 1:
739 parser.error('--jobs must be positive')
739 parser.error('--jobs must be positive')
740 if options.interactive and options.debug:
740 if options.interactive and options.debug:
741 parser.error("-i/--interactive and -d/--debug are incompatible")
741 parser.error("-i/--interactive and -d/--debug are incompatible")
742 if options.debug:
742 if options.debug:
743 if options.timeout != defaults['timeout']:
743 if options.timeout != defaults['timeout']:
744 sys.stderr.write('warning: --timeout option ignored with --debug\n')
744 sys.stderr.write('warning: --timeout option ignored with --debug\n')
745 if options.slowtimeout != defaults['slowtimeout']:
745 if options.slowtimeout != defaults['slowtimeout']:
746 sys.stderr.write(
746 sys.stderr.write(
747 'warning: --slowtimeout option ignored with --debug\n'
747 'warning: --slowtimeout option ignored with --debug\n'
748 )
748 )
749 options.timeout = 0
749 options.timeout = 0
750 options.slowtimeout = 0
750 options.slowtimeout = 0
751 if options.py3_warnings:
751 if options.py3_warnings:
752 if PYTHON3:
752 if PYTHON3:
753 parser.error('--py3-warnings can only be used on Python 2.7')
753 parser.error('--py3-warnings can only be used on Python 2.7')
754
754
755 if options.blacklist:
755 if options.blacklist:
756 options.blacklist = parselistfiles(options.blacklist, 'blacklist')
756 options.blacklist = parselistfiles(options.blacklist, 'blacklist')
757 if options.whitelist:
757 if options.whitelist:
758 options.whitelisted = parselistfiles(options.whitelist, 'whitelist')
758 options.whitelisted = parselistfiles(options.whitelist, 'whitelist')
759 else:
759 else:
760 options.whitelisted = {}
760 options.whitelisted = {}
761
761
762 if options.showchannels:
762 if options.showchannels:
763 options.nodiff = True
763 options.nodiff = True
764
764
765 return options
765 return options
766
766
767
767
768 def rename(src, dst):
768 def rename(src, dst):
769 """Like os.rename(), trade atomicity and opened files friendliness
769 """Like os.rename(), trade atomicity and opened files friendliness
770 for existing destination support.
770 for existing destination support.
771 """
771 """
772 shutil.copy(src, dst)
772 shutil.copy(src, dst)
773 os.remove(src)
773 os.remove(src)
774
774
775
775
776 def makecleanable(path):
776 def makecleanable(path):
777 """Try to fix directory permission recursively so that the entire tree
777 """Try to fix directory permission recursively so that the entire tree
778 can be deleted"""
778 can be deleted"""
779 for dirpath, dirnames, _filenames in os.walk(path, topdown=True):
779 for dirpath, dirnames, _filenames in os.walk(path, topdown=True):
780 for d in dirnames:
780 for d in dirnames:
781 p = os.path.join(dirpath, d)
781 p = os.path.join(dirpath, d)
782 try:
782 try:
783 os.chmod(p, os.stat(p).st_mode & 0o777 | 0o700) # chmod u+rwx
783 os.chmod(p, os.stat(p).st_mode & 0o777 | 0o700) # chmod u+rwx
784 except OSError:
784 except OSError:
785 pass
785 pass
786
786
787
787
788 _unified_diff = difflib.unified_diff
788 _unified_diff = difflib.unified_diff
789 if PYTHON3:
789 if PYTHON3:
790 import functools
790 import functools
791
791
792 _unified_diff = functools.partial(difflib.diff_bytes, difflib.unified_diff)
792 _unified_diff = functools.partial(difflib.diff_bytes, difflib.unified_diff)
793
793
794
794
795 def getdiff(expected, output, ref, err):
795 def getdiff(expected, output, ref, err):
796 servefail = False
796 servefail = False
797 lines = []
797 lines = []
798 for line in _unified_diff(expected, output, ref, err):
798 for line in _unified_diff(expected, output, ref, err):
799 if line.startswith(b'+++') or line.startswith(b'---'):
799 if line.startswith(b'+++') or line.startswith(b'---'):
800 line = line.replace(b'\\', b'/')
800 line = line.replace(b'\\', b'/')
801 if line.endswith(b' \n'):
801 if line.endswith(b' \n'):
802 line = line[:-2] + b'\n'
802 line = line[:-2] + b'\n'
803 lines.append(line)
803 lines.append(line)
804 if not servefail and line.startswith(
804 if not servefail and line.startswith(
805 b'+ abort: child process failed to start'
805 b'+ abort: child process failed to start'
806 ):
806 ):
807 servefail = True
807 servefail = True
808
808
809 return servefail, lines
809 return servefail, lines
810
810
811
811
812 verbose = False
812 verbose = False
813
813
814
814
815 def vlog(*msg):
815 def vlog(*msg):
816 """Log only when in verbose mode."""
816 """Log only when in verbose mode."""
817 if verbose is False:
817 if verbose is False:
818 return
818 return
819
819
820 return log(*msg)
820 return log(*msg)
821
821
822
822
823 # Bytes that break XML even in a CDATA block: control characters 0-31
823 # Bytes that break XML even in a CDATA block: control characters 0-31
824 # sans \t, \n and \r
824 # sans \t, \n and \r
825 CDATA_EVIL = re.compile(br"[\000-\010\013\014\016-\037]")
825 CDATA_EVIL = re.compile(br"[\000-\010\013\014\016-\037]")
826
826
827 # Match feature conditionalized output lines in the form, capturing the feature
827 # Match feature conditionalized output lines in the form, capturing the feature
828 # list in group 2, and the preceeding line output in group 1:
828 # list in group 2, and the preceeding line output in group 1:
829 #
829 #
830 # output..output (feature !)\n
830 # output..output (feature !)\n
831 optline = re.compile(br'(.*) \((.+?) !\)\n$')
831 optline = re.compile(br'(.*) \((.+?) !\)\n$')
832
832
833
833
834 def cdatasafe(data):
834 def cdatasafe(data):
835 """Make a string safe to include in a CDATA block.
835 """Make a string safe to include in a CDATA block.
836
836
837 Certain control characters are illegal in a CDATA block, and
837 Certain control characters are illegal in a CDATA block, and
838 there's no way to include a ]]> in a CDATA either. This function
838 there's no way to include a ]]> in a CDATA either. This function
839 replaces illegal bytes with ? and adds a space between the ]] so
839 replaces illegal bytes with ? and adds a space between the ]] so
840 that it won't break the CDATA block.
840 that it won't break the CDATA block.
841 """
841 """
842 return CDATA_EVIL.sub(b'?', data).replace(b']]>', b'] ]>')
842 return CDATA_EVIL.sub(b'?', data).replace(b']]>', b'] ]>')
843
843
844
844
845 def log(*msg):
845 def log(*msg):
846 """Log something to stdout.
846 """Log something to stdout.
847
847
848 Arguments are strings to print.
848 Arguments are strings to print.
849 """
849 """
850 with iolock:
850 with iolock:
851 if verbose:
851 if verbose:
852 print(verbose, end=' ')
852 print(verbose, end=' ')
853 for m in msg:
853 for m in msg:
854 print(m, end=' ')
854 print(m, end=' ')
855 print()
855 print()
856 sys.stdout.flush()
856 sys.stdout.flush()
857
857
858
858
859 def highlightdiff(line, color):
859 def highlightdiff(line, color):
860 if not color:
860 if not color:
861 return line
861 return line
862 assert pygmentspresent
862 assert pygmentspresent
863 return pygments.highlight(
863 return pygments.highlight(
864 line.decode('latin1'), difflexer, terminal256formatter
864 line.decode('latin1'), difflexer, terminal256formatter
865 ).encode('latin1')
865 ).encode('latin1')
866
866
867
867
868 def highlightmsg(msg, color):
868 def highlightmsg(msg, color):
869 if not color:
869 if not color:
870 return msg
870 return msg
871 assert pygmentspresent
871 assert pygmentspresent
872 return pygments.highlight(msg, runnerlexer, runnerformatter)
872 return pygments.highlight(msg, runnerlexer, runnerformatter)
873
873
874
874
875 def terminate(proc):
875 def terminate(proc):
876 """Terminate subprocess"""
876 """Terminate subprocess"""
877 vlog('# Terminating process %d' % proc.pid)
877 vlog('# Terminating process %d' % proc.pid)
878 try:
878 try:
879 proc.terminate()
879 proc.terminate()
880 except OSError:
880 except OSError:
881 pass
881 pass
882
882
883
883
884 def killdaemons(pidfile):
884 def killdaemons(pidfile):
885 import killdaemons as killmod
885 import killdaemons as killmod
886
886
887 return killmod.killdaemons(pidfile, tryhard=False, remove=True, logfn=vlog)
887 return killmod.killdaemons(pidfile, tryhard=False, remove=True, logfn=vlog)
888
888
889
889
890 class Test(unittest.TestCase):
890 class Test(unittest.TestCase):
891 """Encapsulates a single, runnable test.
891 """Encapsulates a single, runnable test.
892
892
893 While this class conforms to the unittest.TestCase API, it differs in that
893 While this class conforms to the unittest.TestCase API, it differs in that
894 instances need to be instantiated manually. (Typically, unittest.TestCase
894 instances need to be instantiated manually. (Typically, unittest.TestCase
895 classes are instantiated automatically by scanning modules.)
895 classes are instantiated automatically by scanning modules.)
896 """
896 """
897
897
898 # Status code reserved for skipped tests (used by hghave).
898 # Status code reserved for skipped tests (used by hghave).
899 SKIPPED_STATUS = 80
899 SKIPPED_STATUS = 80
900
900
901 def __init__(
901 def __init__(
902 self,
902 self,
903 path,
903 path,
904 outputdir,
904 outputdir,
905 tmpdir,
905 tmpdir,
906 keeptmpdir=False,
906 keeptmpdir=False,
907 debug=False,
907 debug=False,
908 first=False,
908 first=False,
909 timeout=None,
909 timeout=None,
910 startport=None,
910 startport=None,
911 extraconfigopts=None,
911 extraconfigopts=None,
912 py3warnings=False,
912 py3warnings=False,
913 shell=None,
913 shell=None,
914 hgcommand=None,
914 hgcommand=None,
915 slowtimeout=None,
915 slowtimeout=None,
916 usechg=False,
916 usechg=False,
917 useipv6=False,
917 useipv6=False,
918 ):
918 ):
919 """Create a test from parameters.
919 """Create a test from parameters.
920
920
921 path is the full path to the file defining the test.
921 path is the full path to the file defining the test.
922
922
923 tmpdir is the main temporary directory to use for this test.
923 tmpdir is the main temporary directory to use for this test.
924
924
925 keeptmpdir determines whether to keep the test's temporary directory
925 keeptmpdir determines whether to keep the test's temporary directory
926 after execution. It defaults to removal (False).
926 after execution. It defaults to removal (False).
927
927
928 debug mode will make the test execute verbosely, with unfiltered
928 debug mode will make the test execute verbosely, with unfiltered
929 output.
929 output.
930
930
931 timeout controls the maximum run time of the test. It is ignored when
931 timeout controls the maximum run time of the test. It is ignored when
932 debug is True. See slowtimeout for tests with #require slow.
932 debug is True. See slowtimeout for tests with #require slow.
933
933
934 slowtimeout overrides timeout if the test has #require slow.
934 slowtimeout overrides timeout if the test has #require slow.
935
935
936 startport controls the starting port number to use for this test. Each
936 startport controls the starting port number to use for this test. Each
937 test will reserve 3 port numbers for execution. It is the caller's
937 test will reserve 3 port numbers for execution. It is the caller's
938 responsibility to allocate a non-overlapping port range to Test
938 responsibility to allocate a non-overlapping port range to Test
939 instances.
939 instances.
940
940
941 extraconfigopts is an iterable of extra hgrc config options. Values
941 extraconfigopts is an iterable of extra hgrc config options. Values
942 must have the form "key=value" (something understood by hgrc). Values
942 must have the form "key=value" (something understood by hgrc). Values
943 of the form "foo.key=value" will result in "[foo] key=value".
943 of the form "foo.key=value" will result in "[foo] key=value".
944
944
945 py3warnings enables Py3k warnings.
945 py3warnings enables Py3k warnings.
946
946
947 shell is the shell to execute tests in.
947 shell is the shell to execute tests in.
948 """
948 """
949 if timeout is None:
949 if timeout is None:
950 timeout = defaults['timeout']
950 timeout = defaults['timeout']
951 if startport is None:
951 if startport is None:
952 startport = defaults['port']
952 startport = defaults['port']
953 if slowtimeout is None:
953 if slowtimeout is None:
954 slowtimeout = defaults['slowtimeout']
954 slowtimeout = defaults['slowtimeout']
955 self.path = path
955 self.path = path
956 self.bname = os.path.basename(path)
956 self.bname = os.path.basename(path)
957 self.name = _strpath(self.bname)
957 self.name = _strpath(self.bname)
958 self._testdir = os.path.dirname(path)
958 self._testdir = os.path.dirname(path)
959 self._outputdir = outputdir
959 self._outputdir = outputdir
960 self._tmpname = os.path.basename(path)
960 self._tmpname = os.path.basename(path)
961 self.errpath = os.path.join(self._outputdir, b'%s.err' % self.bname)
961 self.errpath = os.path.join(self._outputdir, b'%s.err' % self.bname)
962
962
963 self._threadtmp = tmpdir
963 self._threadtmp = tmpdir
964 self._keeptmpdir = keeptmpdir
964 self._keeptmpdir = keeptmpdir
965 self._debug = debug
965 self._debug = debug
966 self._first = first
966 self._first = first
967 self._timeout = timeout
967 self._timeout = timeout
968 self._slowtimeout = slowtimeout
968 self._slowtimeout = slowtimeout
969 self._startport = startport
969 self._startport = startport
970 self._extraconfigopts = extraconfigopts or []
970 self._extraconfigopts = extraconfigopts or []
971 self._py3warnings = py3warnings
971 self._py3warnings = py3warnings
972 self._shell = _bytespath(shell)
972 self._shell = _bytespath(shell)
973 self._hgcommand = hgcommand or b'hg'
973 self._hgcommand = hgcommand or b'hg'
974 self._usechg = usechg
974 self._usechg = usechg
975 self._useipv6 = useipv6
975 self._useipv6 = useipv6
976
976
977 self._aborted = False
977 self._aborted = False
978 self._daemonpids = []
978 self._daemonpids = []
979 self._finished = None
979 self._finished = None
980 self._ret = None
980 self._ret = None
981 self._out = None
981 self._out = None
982 self._skipped = None
982 self._skipped = None
983 self._testtmp = None
983 self._testtmp = None
984 self._chgsockdir = None
984 self._chgsockdir = None
985
985
986 self._refout = self.readrefout()
986 self._refout = self.readrefout()
987
987
988 def readrefout(self):
988 def readrefout(self):
989 """read reference output"""
989 """read reference output"""
990 # If we're not in --debug mode and reference output file exists,
990 # If we're not in --debug mode and reference output file exists,
991 # check test output against it.
991 # check test output against it.
992 if self._debug:
992 if self._debug:
993 return None # to match "out is None"
993 return None # to match "out is None"
994 elif os.path.exists(self.refpath):
994 elif os.path.exists(self.refpath):
995 with open(self.refpath, 'rb') as f:
995 with open(self.refpath, 'rb') as f:
996 return f.read().splitlines(True)
996 return f.read().splitlines(True)
997 else:
997 else:
998 return []
998 return []
999
999
1000 # needed to get base class __repr__ running
1000 # needed to get base class __repr__ running
1001 @property
1001 @property
1002 def _testMethodName(self):
1002 def _testMethodName(self):
1003 return self.name
1003 return self.name
1004
1004
1005 def __str__(self):
1005 def __str__(self):
1006 return self.name
1006 return self.name
1007
1007
1008 def shortDescription(self):
1008 def shortDescription(self):
1009 return self.name
1009 return self.name
1010
1010
1011 def setUp(self):
1011 def setUp(self):
1012 """Tasks to perform before run()."""
1012 """Tasks to perform before run()."""
1013 self._finished = False
1013 self._finished = False
1014 self._ret = None
1014 self._ret = None
1015 self._out = None
1015 self._out = None
1016 self._skipped = None
1016 self._skipped = None
1017
1017
1018 try:
1018 try:
1019 os.mkdir(self._threadtmp)
1019 os.mkdir(self._threadtmp)
1020 except OSError as e:
1020 except OSError as e:
1021 if e.errno != errno.EEXIST:
1021 if e.errno != errno.EEXIST:
1022 raise
1022 raise
1023
1023
1024 name = self._tmpname
1024 name = self._tmpname
1025 self._testtmp = os.path.join(self._threadtmp, name)
1025 self._testtmp = os.path.join(self._threadtmp, name)
1026 os.mkdir(self._testtmp)
1026 os.mkdir(self._testtmp)
1027
1027
1028 # Remove any previous output files.
1028 # Remove any previous output files.
1029 if os.path.exists(self.errpath):
1029 if os.path.exists(self.errpath):
1030 try:
1030 try:
1031 os.remove(self.errpath)
1031 os.remove(self.errpath)
1032 except OSError as e:
1032 except OSError as e:
1033 # We might have raced another test to clean up a .err
1033 # We might have raced another test to clean up a .err
1034 # file, so ignore ENOENT when removing a previous .err
1034 # file, so ignore ENOENT when removing a previous .err
1035 # file.
1035 # file.
1036 if e.errno != errno.ENOENT:
1036 if e.errno != errno.ENOENT:
1037 raise
1037 raise
1038
1038
1039 if self._usechg:
1039 if self._usechg:
1040 self._chgsockdir = os.path.join(
1040 self._chgsockdir = os.path.join(
1041 self._threadtmp, b'%s.chgsock' % name
1041 self._threadtmp, b'%s.chgsock' % name
1042 )
1042 )
1043 os.mkdir(self._chgsockdir)
1043 os.mkdir(self._chgsockdir)
1044
1044
1045 def run(self, result):
1045 def run(self, result):
1046 """Run this test and report results against a TestResult instance."""
1046 """Run this test and report results against a TestResult instance."""
1047 # This function is extremely similar to unittest.TestCase.run(). Once
1047 # This function is extremely similar to unittest.TestCase.run(). Once
1048 # we require Python 2.7 (or at least its version of unittest), this
1048 # we require Python 2.7 (or at least its version of unittest), this
1049 # function can largely go away.
1049 # function can largely go away.
1050 self._result = result
1050 self._result = result
1051 result.startTest(self)
1051 result.startTest(self)
1052 try:
1052 try:
1053 try:
1053 try:
1054 self.setUp()
1054 self.setUp()
1055 except (KeyboardInterrupt, SystemExit):
1055 except (KeyboardInterrupt, SystemExit):
1056 self._aborted = True
1056 self._aborted = True
1057 raise
1057 raise
1058 except Exception:
1058 except Exception:
1059 result.addError(self, sys.exc_info())
1059 result.addError(self, sys.exc_info())
1060 return
1060 return
1061
1061
1062 success = False
1062 success = False
1063 try:
1063 try:
1064 self.runTest()
1064 self.runTest()
1065 except KeyboardInterrupt:
1065 except KeyboardInterrupt:
1066 self._aborted = True
1066 self._aborted = True
1067 raise
1067 raise
1068 except unittest.SkipTest as e:
1068 except unittest.SkipTest as e:
1069 result.addSkip(self, str(e))
1069 result.addSkip(self, str(e))
1070 # The base class will have already counted this as a
1070 # The base class will have already counted this as a
1071 # test we "ran", but we want to exclude skipped tests
1071 # test we "ran", but we want to exclude skipped tests
1072 # from those we count towards those run.
1072 # from those we count towards those run.
1073 result.testsRun -= 1
1073 result.testsRun -= 1
1074 except self.failureException as e:
1074 except self.failureException as e:
1075 # This differs from unittest in that we don't capture
1075 # This differs from unittest in that we don't capture
1076 # the stack trace. This is for historical reasons and
1076 # the stack trace. This is for historical reasons and
1077 # this decision could be revisited in the future,
1077 # this decision could be revisited in the future,
1078 # especially for PythonTest instances.
1078 # especially for PythonTest instances.
1079 if result.addFailure(self, str(e)):
1079 if result.addFailure(self, str(e)):
1080 success = True
1080 success = True
1081 except Exception:
1081 except Exception:
1082 result.addError(self, sys.exc_info())
1082 result.addError(self, sys.exc_info())
1083 else:
1083 else:
1084 success = True
1084 success = True
1085
1085
1086 try:
1086 try:
1087 self.tearDown()
1087 self.tearDown()
1088 except (KeyboardInterrupt, SystemExit):
1088 except (KeyboardInterrupt, SystemExit):
1089 self._aborted = True
1089 self._aborted = True
1090 raise
1090 raise
1091 except Exception:
1091 except Exception:
1092 result.addError(self, sys.exc_info())
1092 result.addError(self, sys.exc_info())
1093 success = False
1093 success = False
1094
1094
1095 if success:
1095 if success:
1096 result.addSuccess(self)
1096 result.addSuccess(self)
1097 finally:
1097 finally:
1098 result.stopTest(self, interrupted=self._aborted)
1098 result.stopTest(self, interrupted=self._aborted)
1099
1099
1100 def runTest(self):
1100 def runTest(self):
1101 """Run this test instance.
1101 """Run this test instance.
1102
1102
1103 This will return a tuple describing the result of the test.
1103 This will return a tuple describing the result of the test.
1104 """
1104 """
1105 env = self._getenv()
1105 env = self._getenv()
1106 self._genrestoreenv(env)
1106 self._genrestoreenv(env)
1107 self._daemonpids.append(env['DAEMON_PIDS'])
1107 self._daemonpids.append(env['DAEMON_PIDS'])
1108 self._createhgrc(env['HGRCPATH'])
1108 self._createhgrc(env['HGRCPATH'])
1109
1109
1110 vlog('# Test', self.name)
1110 vlog('# Test', self.name)
1111
1111
1112 ret, out = self._run(env)
1112 ret, out = self._run(env)
1113 self._finished = True
1113 self._finished = True
1114 self._ret = ret
1114 self._ret = ret
1115 self._out = out
1115 self._out = out
1116
1116
1117 def describe(ret):
1117 def describe(ret):
1118 if ret < 0:
1118 if ret < 0:
1119 return 'killed by signal: %d' % -ret
1119 return 'killed by signal: %d' % -ret
1120 return 'returned error code %d' % ret
1120 return 'returned error code %d' % ret
1121
1121
1122 self._skipped = False
1122 self._skipped = False
1123
1123
1124 if ret == self.SKIPPED_STATUS:
1124 if ret == self.SKIPPED_STATUS:
1125 if out is None: # Debug mode, nothing to parse.
1125 if out is None: # Debug mode, nothing to parse.
1126 missing = ['unknown']
1126 missing = ['unknown']
1127 failed = None
1127 failed = None
1128 else:
1128 else:
1129 missing, failed = TTest.parsehghaveoutput(out)
1129 missing, failed = TTest.parsehghaveoutput(out)
1130
1130
1131 if not missing:
1131 if not missing:
1132 missing = ['skipped']
1132 missing = ['skipped']
1133
1133
1134 if failed:
1134 if failed:
1135 self.fail('hg have failed checking for %s' % failed[-1])
1135 self.fail('hg have failed checking for %s' % failed[-1])
1136 else:
1136 else:
1137 self._skipped = True
1137 self._skipped = True
1138 raise unittest.SkipTest(missing[-1])
1138 raise unittest.SkipTest(missing[-1])
1139 elif ret == 'timeout':
1139 elif ret == 'timeout':
1140 self.fail('timed out')
1140 self.fail('timed out')
1141 elif ret is False:
1141 elif ret is False:
1142 self.fail('no result code from test')
1142 self.fail('no result code from test')
1143 elif out != self._refout:
1143 elif out != self._refout:
1144 # Diff generation may rely on written .err file.
1144 # Diff generation may rely on written .err file.
1145 if (
1145 if (
1146 (ret != 0 or out != self._refout)
1146 (ret != 0 or out != self._refout)
1147 and not self._skipped
1147 and not self._skipped
1148 and not self._debug
1148 and not self._debug
1149 ):
1149 ):
1150 with open(self.errpath, 'wb') as f:
1150 with open(self.errpath, 'wb') as f:
1151 for line in out:
1151 for line in out:
1152 f.write(line)
1152 f.write(line)
1153
1153
1154 # The result object handles diff calculation for us.
1154 # The result object handles diff calculation for us.
1155 with firstlock:
1155 with firstlock:
1156 if self._result.addOutputMismatch(self, ret, out, self._refout):
1156 if self._result.addOutputMismatch(self, ret, out, self._refout):
1157 # change was accepted, skip failing
1157 # change was accepted, skip failing
1158 return
1158 return
1159 if self._first:
1159 if self._first:
1160 global firsterror
1160 global firsterror
1161 firsterror = True
1161 firsterror = True
1162
1162
1163 if ret:
1163 if ret:
1164 msg = 'output changed and ' + describe(ret)
1164 msg = 'output changed and ' + describe(ret)
1165 else:
1165 else:
1166 msg = 'output changed'
1166 msg = 'output changed'
1167
1167
1168 self.fail(msg)
1168 self.fail(msg)
1169 elif ret:
1169 elif ret:
1170 self.fail(describe(ret))
1170 self.fail(describe(ret))
1171
1171
1172 def tearDown(self):
1172 def tearDown(self):
1173 """Tasks to perform after run()."""
1173 """Tasks to perform after run()."""
1174 for entry in self._daemonpids:
1174 for entry in self._daemonpids:
1175 killdaemons(entry)
1175 killdaemons(entry)
1176 self._daemonpids = []
1176 self._daemonpids = []
1177
1177
1178 if self._keeptmpdir:
1178 if self._keeptmpdir:
1179 log(
1179 log(
1180 '\nKeeping testtmp dir: %s\nKeeping threadtmp dir: %s'
1180 '\nKeeping testtmp dir: %s\nKeeping threadtmp dir: %s'
1181 % (
1181 % (
1182 self._testtmp.decode('utf-8'),
1182 self._testtmp.decode('utf-8'),
1183 self._threadtmp.decode('utf-8'),
1183 self._threadtmp.decode('utf-8'),
1184 )
1184 )
1185 )
1185 )
1186 else:
1186 else:
1187 try:
1187 try:
1188 shutil.rmtree(self._testtmp)
1188 shutil.rmtree(self._testtmp)
1189 except OSError:
1189 except OSError:
1190 # unreadable directory may be left in $TESTTMP; fix permission
1190 # unreadable directory may be left in $TESTTMP; fix permission
1191 # and try again
1191 # and try again
1192 makecleanable(self._testtmp)
1192 makecleanable(self._testtmp)
1193 shutil.rmtree(self._testtmp, True)
1193 shutil.rmtree(self._testtmp, True)
1194 shutil.rmtree(self._threadtmp, True)
1194 shutil.rmtree(self._threadtmp, True)
1195
1195
1196 if self._usechg:
1196 if self._usechg:
1197 # chgservers will stop automatically after they find the socket
1197 # chgservers will stop automatically after they find the socket
1198 # files are deleted
1198 # files are deleted
1199 shutil.rmtree(self._chgsockdir, True)
1199 shutil.rmtree(self._chgsockdir, True)
1200
1200
1201 if (
1201 if (
1202 (self._ret != 0 or self._out != self._refout)
1202 (self._ret != 0 or self._out != self._refout)
1203 and not self._skipped
1203 and not self._skipped
1204 and not self._debug
1204 and not self._debug
1205 and self._out
1205 and self._out
1206 ):
1206 ):
1207 with open(self.errpath, 'wb') as f:
1207 with open(self.errpath, 'wb') as f:
1208 for line in self._out:
1208 for line in self._out:
1209 f.write(line)
1209 f.write(line)
1210
1210
1211 vlog("# Ret was:", self._ret, '(%s)' % self.name)
1211 vlog("# Ret was:", self._ret, '(%s)' % self.name)
1212
1212
1213 def _run(self, env):
1213 def _run(self, env):
1214 # This should be implemented in child classes to run tests.
1214 # This should be implemented in child classes to run tests.
1215 raise unittest.SkipTest('unknown test type')
1215 raise unittest.SkipTest('unknown test type')
1216
1216
1217 def abort(self):
1217 def abort(self):
1218 """Terminate execution of this test."""
1218 """Terminate execution of this test."""
1219 self._aborted = True
1219 self._aborted = True
1220
1220
1221 def _portmap(self, i):
1221 def _portmap(self, i):
1222 offset = b'' if i == 0 else b'%d' % i
1222 offset = b'' if i == 0 else b'%d' % i
1223 return (br':%d\b' % (self._startport + i), b':$HGPORT%s' % offset)
1223 return (br':%d\b' % (self._startport + i), b':$HGPORT%s' % offset)
1224
1224
1225 def _getreplacements(self):
1225 def _getreplacements(self):
1226 """Obtain a mapping of text replacements to apply to test output.
1226 """Obtain a mapping of text replacements to apply to test output.
1227
1227
1228 Test output needs to be normalized so it can be compared to expected
1228 Test output needs to be normalized so it can be compared to expected
1229 output. This function defines how some of that normalization will
1229 output. This function defines how some of that normalization will
1230 occur.
1230 occur.
1231 """
1231 """
1232 r = [
1232 r = [
1233 # This list should be parallel to defineport in _getenv
1233 # This list should be parallel to defineport in _getenv
1234 self._portmap(0),
1234 self._portmap(0),
1235 self._portmap(1),
1235 self._portmap(1),
1236 self._portmap(2),
1236 self._portmap(2),
1237 (br'([^0-9])%s' % re.escape(self._localip()), br'\1$LOCALIP'),
1237 (br'([^0-9])%s' % re.escape(self._localip()), br'\1$LOCALIP'),
1238 (br'\bHG_TXNID=TXN:[a-f0-9]{40}\b', br'HG_TXNID=TXN:$ID$'),
1238 (br'\bHG_TXNID=TXN:[a-f0-9]{40}\b', br'HG_TXNID=TXN:$ID$'),
1239 ]
1239 ]
1240 r.append((self._escapepath(self._testtmp), b'$TESTTMP'))
1240 r.append((self._escapepath(self._testtmp), b'$TESTTMP'))
1241
1241
1242 replacementfile = os.path.join(self._testdir, b'common-pattern.py')
1242 replacementfile = os.path.join(self._testdir, b'common-pattern.py')
1243
1243
1244 if os.path.exists(replacementfile):
1244 if os.path.exists(replacementfile):
1245 data = {}
1245 data = {}
1246 with open(replacementfile, mode='rb') as source:
1246 with open(replacementfile, mode='rb') as source:
1247 # the intermediate 'compile' step help with debugging
1247 # the intermediate 'compile' step help with debugging
1248 code = compile(source.read(), replacementfile, 'exec')
1248 code = compile(source.read(), replacementfile, 'exec')
1249 exec(code, data)
1249 exec(code, data)
1250 for value in data.get('substitutions', ()):
1250 for value in data.get('substitutions', ()):
1251 if len(value) != 2:
1251 if len(value) != 2:
1252 msg = 'malformatted substitution in %s: %r'
1252 msg = 'malformatted substitution in %s: %r'
1253 msg %= (replacementfile, value)
1253 msg %= (replacementfile, value)
1254 raise ValueError(msg)
1254 raise ValueError(msg)
1255 r.append(value)
1255 r.append(value)
1256 return r
1256 return r
1257
1257
1258 def _escapepath(self, p):
1258 def _escapepath(self, p):
1259 if os.name == 'nt':
1259 if os.name == 'nt':
1260 return b''.join(
1260 return b''.join(
1261 c.isalpha()
1261 c.isalpha()
1262 and b'[%s%s]' % (c.lower(), c.upper())
1262 and b'[%s%s]' % (c.lower(), c.upper())
1263 or c in b'/\\'
1263 or c in b'/\\'
1264 and br'[/\\]'
1264 and br'[/\\]'
1265 or c.isdigit()
1265 or c.isdigit()
1266 and c
1266 and c
1267 or b'\\' + c
1267 or b'\\' + c
1268 for c in [p[i : i + 1] for i in range(len(p))]
1268 for c in [p[i : i + 1] for i in range(len(p))]
1269 )
1269 )
1270 else:
1270 else:
1271 return re.escape(p)
1271 return re.escape(p)
1272
1272
1273 def _localip(self):
1273 def _localip(self):
1274 if self._useipv6:
1274 if self._useipv6:
1275 return b'::1'
1275 return b'::1'
1276 else:
1276 else:
1277 return b'127.0.0.1'
1277 return b'127.0.0.1'
1278
1278
1279 def _genrestoreenv(self, testenv):
1279 def _genrestoreenv(self, testenv):
1280 """Generate a script that can be used by tests to restore the original
1280 """Generate a script that can be used by tests to restore the original
1281 environment."""
1281 environment."""
1282 # Put the restoreenv script inside self._threadtmp
1282 # Put the restoreenv script inside self._threadtmp
1283 scriptpath = os.path.join(self._threadtmp, b'restoreenv.sh')
1283 scriptpath = os.path.join(self._threadtmp, b'restoreenv.sh')
1284 testenv['HGTEST_RESTOREENV'] = _strpath(scriptpath)
1284 testenv['HGTEST_RESTOREENV'] = _strpath(scriptpath)
1285
1285
1286 # Only restore environment variable names that the shell allows
1286 # Only restore environment variable names that the shell allows
1287 # us to export.
1287 # us to export.
1288 name_regex = re.compile('^[a-zA-Z][a-zA-Z0-9_]*$')
1288 name_regex = re.compile('^[a-zA-Z][a-zA-Z0-9_]*$')
1289
1289
1290 # Do not restore these variables; otherwise tests would fail.
1290 # Do not restore these variables; otherwise tests would fail.
1291 reqnames = {'PYTHON', 'TESTDIR', 'TESTTMP'}
1291 reqnames = {'PYTHON', 'TESTDIR', 'TESTTMP'}
1292
1292
1293 with open(scriptpath, 'w') as envf:
1293 with open(scriptpath, 'w') as envf:
1294 for name, value in origenviron.items():
1294 for name, value in origenviron.items():
1295 if not name_regex.match(name):
1295 if not name_regex.match(name):
1296 # Skip environment variables with unusual names not
1296 # Skip environment variables with unusual names not
1297 # allowed by most shells.
1297 # allowed by most shells.
1298 continue
1298 continue
1299 if name in reqnames:
1299 if name in reqnames:
1300 continue
1300 continue
1301 envf.write('%s=%s\n' % (name, shellquote(value)))
1301 envf.write('%s=%s\n' % (name, shellquote(value)))
1302
1302
1303 for name in testenv:
1303 for name in testenv:
1304 if name in origenviron or name in reqnames:
1304 if name in origenviron or name in reqnames:
1305 continue
1305 continue
1306 envf.write('unset %s\n' % (name,))
1306 envf.write('unset %s\n' % (name,))
1307
1307
1308 def _getenv(self):
1308 def _getenv(self):
1309 """Obtain environment variables to use during test execution."""
1309 """Obtain environment variables to use during test execution."""
1310
1310
1311 def defineport(i):
1311 def defineport(i):
1312 offset = '' if i == 0 else '%s' % i
1312 offset = '' if i == 0 else '%s' % i
1313 env["HGPORT%s" % offset] = '%s' % (self._startport + i)
1313 env["HGPORT%s" % offset] = '%s' % (self._startport + i)
1314
1314
1315 env = os.environ.copy()
1315 env = os.environ.copy()
1316 env['PYTHONUSERBASE'] = sysconfig.get_config_var('userbase') or ''
1316 env['PYTHONUSERBASE'] = sysconfig.get_config_var('userbase') or ''
1317 env['HGEMITWARNINGS'] = '1'
1317 env['HGEMITWARNINGS'] = '1'
1318 env['TESTTMP'] = _strpath(self._testtmp)
1318 env['TESTTMP'] = _strpath(self._testtmp)
1319 env['TESTNAME'] = self.name
1319 env['TESTNAME'] = self.name
1320 env['HOME'] = _strpath(self._testtmp)
1320 env['HOME'] = _strpath(self._testtmp)
1321 # This number should match portneeded in _getport
1321 # This number should match portneeded in _getport
1322 for port in xrange(3):
1322 for port in xrange(3):
1323 # This list should be parallel to _portmap in _getreplacements
1323 # This list should be parallel to _portmap in _getreplacements
1324 defineport(port)
1324 defineport(port)
1325 env["HGRCPATH"] = _strpath(os.path.join(self._threadtmp, b'.hgrc'))
1325 env["HGRCPATH"] = _strpath(os.path.join(self._threadtmp, b'.hgrc'))
1326 env["DAEMON_PIDS"] = _strpath(
1326 env["DAEMON_PIDS"] = _strpath(
1327 os.path.join(self._threadtmp, b'daemon.pids')
1327 os.path.join(self._threadtmp, b'daemon.pids')
1328 )
1328 )
1329 env["HGEDITOR"] = (
1329 env["HGEDITOR"] = (
1330 '"' + sysexecutable + '"' + ' -c "import sys; sys.exit(0)"'
1330 '"' + sysexecutable + '"' + ' -c "import sys; sys.exit(0)"'
1331 )
1331 )
1332 env["HGUSER"] = "test"
1332 env["HGUSER"] = "test"
1333 env["HGENCODING"] = "ascii"
1333 env["HGENCODING"] = "ascii"
1334 env["HGENCODINGMODE"] = "strict"
1334 env["HGENCODINGMODE"] = "strict"
1335 env["HGHOSTNAME"] = "test-hostname"
1335 env["HGHOSTNAME"] = "test-hostname"
1336 env['HGIPV6'] = str(int(self._useipv6))
1336 env['HGIPV6'] = str(int(self._useipv6))
1337 # See contrib/catapipe.py for how to use this functionality.
1337 # See contrib/catapipe.py for how to use this functionality.
1338 if 'HGTESTCATAPULTSERVERPIPE' not in env:
1338 if 'HGTESTCATAPULTSERVERPIPE' not in env:
1339 # If we don't have HGTESTCATAPULTSERVERPIPE explicitly set, pull the
1339 # If we don't have HGTESTCATAPULTSERVERPIPE explicitly set, pull the
1340 # non-test one in as a default, otherwise set to devnull
1340 # non-test one in as a default, otherwise set to devnull
1341 env['HGTESTCATAPULTSERVERPIPE'] = env.get(
1341 env['HGTESTCATAPULTSERVERPIPE'] = env.get(
1342 'HGCATAPULTSERVERPIPE', os.devnull
1342 'HGCATAPULTSERVERPIPE', os.devnull
1343 )
1343 )
1344
1344
1345 extraextensions = []
1345 extraextensions = []
1346 for opt in self._extraconfigopts:
1346 for opt in self._extraconfigopts:
1347 section, key = opt.encode('utf-8').split(b'.', 1)
1347 section, key = opt.encode('utf-8').split(b'.', 1)
1348 if section != 'extensions':
1348 if section != 'extensions':
1349 continue
1349 continue
1350 name = key.split(b'=', 1)[0]
1350 name = key.split(b'=', 1)[0]
1351 extraextensions.append(name)
1351 extraextensions.append(name)
1352
1352
1353 if extraextensions:
1353 if extraextensions:
1354 env['HGTESTEXTRAEXTENSIONS'] = b' '.join(extraextensions)
1354 env['HGTESTEXTRAEXTENSIONS'] = b' '.join(extraextensions)
1355
1355
1356 # LOCALIP could be ::1 or 127.0.0.1. Useful for tests that require raw
1356 # LOCALIP could be ::1 or 127.0.0.1. Useful for tests that require raw
1357 # IP addresses.
1357 # IP addresses.
1358 env['LOCALIP'] = _strpath(self._localip())
1358 env['LOCALIP'] = _strpath(self._localip())
1359
1359
1360 # This has the same effect as Py_LegacyWindowsStdioFlag in exewrapper.c,
1360 # This has the same effect as Py_LegacyWindowsStdioFlag in exewrapper.c,
1361 # but this is needed for testing python instances like dummyssh,
1361 # but this is needed for testing python instances like dummyssh,
1362 # dummysmtpd.py, and dumbhttp.py.
1362 # dummysmtpd.py, and dumbhttp.py.
1363 if PYTHON3 and os.name == 'nt':
1363 if PYTHON3 and os.name == 'nt':
1364 env['PYTHONLEGACYWINDOWSSTDIO'] = '1'
1364 env['PYTHONLEGACYWINDOWSSTDIO'] = '1'
1365
1365
1366 # Reset some environment variables to well-known values so that
1366 # Reset some environment variables to well-known values so that
1367 # the tests produce repeatable output.
1367 # the tests produce repeatable output.
1368 env['LANG'] = env['LC_ALL'] = env['LANGUAGE'] = 'C'
1368 env['LANG'] = env['LC_ALL'] = env['LANGUAGE'] = 'C'
1369 env['TZ'] = 'GMT'
1369 env['TZ'] = 'GMT'
1370 env["EMAIL"] = "Foo Bar <foo.bar@example.com>"
1370 env["EMAIL"] = "Foo Bar <foo.bar@example.com>"
1371 env['COLUMNS'] = '80'
1371 env['COLUMNS'] = '80'
1372 env['TERM'] = 'xterm'
1372 env['TERM'] = 'xterm'
1373
1373
1374 dropped = [
1374 dropped = [
1375 'CDPATH',
1375 'CDPATH',
1376 'CHGDEBUG',
1376 'CHGDEBUG',
1377 'EDITOR',
1377 'EDITOR',
1378 'GREP_OPTIONS',
1378 'GREP_OPTIONS',
1379 'HG',
1379 'HG',
1380 'HGMERGE',
1380 'HGMERGE',
1381 'HGPLAIN',
1381 'HGPLAIN',
1382 'HGPLAINEXCEPT',
1382 'HGPLAINEXCEPT',
1383 'HGPROF',
1383 'HGPROF',
1384 'http_proxy',
1384 'http_proxy',
1385 'no_proxy',
1385 'no_proxy',
1386 'NO_PROXY',
1386 'NO_PROXY',
1387 'PAGER',
1387 'PAGER',
1388 'VISUAL',
1388 'VISUAL',
1389 ]
1389 ]
1390
1390
1391 for k in dropped:
1391 for k in dropped:
1392 if k in env:
1392 if k in env:
1393 del env[k]
1393 del env[k]
1394
1394
1395 # unset env related to hooks
1395 # unset env related to hooks
1396 for k in list(env):
1396 for k in list(env):
1397 if k.startswith('HG_'):
1397 if k.startswith('HG_'):
1398 del env[k]
1398 del env[k]
1399
1399
1400 if self._usechg:
1400 if self._usechg:
1401 env['CHGSOCKNAME'] = os.path.join(self._chgsockdir, b'server')
1401 env['CHGSOCKNAME'] = os.path.join(self._chgsockdir, b'server')
1402
1402
1403 return env
1403 return env
1404
1404
1405 def _createhgrc(self, path):
1405 def _createhgrc(self, path):
1406 """Create an hgrc file for this test."""
1406 """Create an hgrc file for this test."""
1407 with open(path, 'wb') as hgrc:
1407 with open(path, 'wb') as hgrc:
1408 hgrc.write(b'[ui]\n')
1408 hgrc.write(b'[ui]\n')
1409 hgrc.write(b'slash = True\n')
1409 hgrc.write(b'slash = True\n')
1410 hgrc.write(b'interactive = False\n')
1410 hgrc.write(b'interactive = False\n')
1411 hgrc.write(b'merge = internal:merge\n')
1411 hgrc.write(b'merge = internal:merge\n')
1412 hgrc.write(b'mergemarkers = detailed\n')
1412 hgrc.write(b'mergemarkers = detailed\n')
1413 hgrc.write(b'promptecho = True\n')
1413 hgrc.write(b'promptecho = True\n')
1414 hgrc.write(b'[defaults]\n')
1414 hgrc.write(b'[defaults]\n')
1415 hgrc.write(b'[devel]\n')
1415 hgrc.write(b'[devel]\n')
1416 hgrc.write(b'all-warnings = true\n')
1416 hgrc.write(b'all-warnings = true\n')
1417 hgrc.write(b'default-date = 0 0\n')
1417 hgrc.write(b'default-date = 0 0\n')
1418 hgrc.write(b'[largefiles]\n')
1418 hgrc.write(b'[largefiles]\n')
1419 hgrc.write(
1419 hgrc.write(
1420 b'usercache = %s\n'
1420 b'usercache = %s\n'
1421 % (os.path.join(self._testtmp, b'.cache/largefiles'))
1421 % (os.path.join(self._testtmp, b'.cache/largefiles'))
1422 )
1422 )
1423 hgrc.write(b'[lfs]\n')
1423 hgrc.write(b'[lfs]\n')
1424 hgrc.write(
1424 hgrc.write(
1425 b'usercache = %s\n'
1425 b'usercache = %s\n'
1426 % (os.path.join(self._testtmp, b'.cache/lfs'))
1426 % (os.path.join(self._testtmp, b'.cache/lfs'))
1427 )
1427 )
1428 hgrc.write(b'[web]\n')
1428 hgrc.write(b'[web]\n')
1429 hgrc.write(b'address = localhost\n')
1429 hgrc.write(b'address = localhost\n')
1430 hgrc.write(b'ipv6 = %s\n' % str(self._useipv6).encode('ascii'))
1430 hgrc.write(b'ipv6 = %s\n' % str(self._useipv6).encode('ascii'))
1431 hgrc.write(b'server-header = testing stub value\n')
1431 hgrc.write(b'server-header = testing stub value\n')
1432
1432
1433 for opt in self._extraconfigopts:
1433 for opt in self._extraconfigopts:
1434 section, key = opt.encode('utf-8').split(b'.', 1)
1434 section, key = opt.encode('utf-8').split(b'.', 1)
1435 assert b'=' in key, (
1435 assert b'=' in key, (
1436 'extra config opt %s must ' 'have an = for assignment' % opt
1436 'extra config opt %s must ' 'have an = for assignment' % opt
1437 )
1437 )
1438 hgrc.write(b'[%s]\n%s\n' % (section, key))
1438 hgrc.write(b'[%s]\n%s\n' % (section, key))
1439
1439
1440 def fail(self, msg):
1440 def fail(self, msg):
1441 # unittest differentiates between errored and failed.
1441 # unittest differentiates between errored and failed.
1442 # Failed is denoted by AssertionError (by default at least).
1442 # Failed is denoted by AssertionError (by default at least).
1443 raise AssertionError(msg)
1443 raise AssertionError(msg)
1444
1444
1445 def _runcommand(self, cmd, env, normalizenewlines=False):
1445 def _runcommand(self, cmd, env, normalizenewlines=False):
1446 """Run command in a sub-process, capturing the output (stdout and
1446 """Run command in a sub-process, capturing the output (stdout and
1447 stderr).
1447 stderr).
1448
1448
1449 Return a tuple (exitcode, output). output is None in debug mode.
1449 Return a tuple (exitcode, output). output is None in debug mode.
1450 """
1450 """
1451 if self._debug:
1451 if self._debug:
1452 proc = subprocess.Popen(
1452 proc = subprocess.Popen(
1453 _strpath(cmd), shell=True, cwd=_strpath(self._testtmp), env=env
1453 _strpath(cmd), shell=True, cwd=_strpath(self._testtmp), env=env
1454 )
1454 )
1455 ret = proc.wait()
1455 ret = proc.wait()
1456 return (ret, None)
1456 return (ret, None)
1457
1457
1458 proc = Popen4(cmd, self._testtmp, self._timeout, env)
1458 proc = Popen4(cmd, self._testtmp, self._timeout, env)
1459
1459
1460 def cleanup():
1460 def cleanup():
1461 terminate(proc)
1461 terminate(proc)
1462 ret = proc.wait()
1462 ret = proc.wait()
1463 if ret == 0:
1463 if ret == 0:
1464 ret = signal.SIGTERM << 8
1464 ret = signal.SIGTERM << 8
1465 killdaemons(env['DAEMON_PIDS'])
1465 killdaemons(env['DAEMON_PIDS'])
1466 return ret
1466 return ret
1467
1467
1468 proc.tochild.close()
1468 proc.tochild.close()
1469
1469
1470 try:
1470 try:
1471 output = proc.fromchild.read()
1471 output = proc.fromchild.read()
1472 except KeyboardInterrupt:
1472 except KeyboardInterrupt:
1473 vlog('# Handling keyboard interrupt')
1473 vlog('# Handling keyboard interrupt')
1474 cleanup()
1474 cleanup()
1475 raise
1475 raise
1476
1476
1477 ret = proc.wait()
1477 ret = proc.wait()
1478 if wifexited(ret):
1478 if wifexited(ret):
1479 ret = os.WEXITSTATUS(ret)
1479 ret = os.WEXITSTATUS(ret)
1480
1480
1481 if proc.timeout:
1481 if proc.timeout:
1482 ret = 'timeout'
1482 ret = 'timeout'
1483
1483
1484 if ret:
1484 if ret:
1485 killdaemons(env['DAEMON_PIDS'])
1485 killdaemons(env['DAEMON_PIDS'])
1486
1486
1487 for s, r in self._getreplacements():
1487 for s, r in self._getreplacements():
1488 output = re.sub(s, r, output)
1488 output = re.sub(s, r, output)
1489
1489
1490 if normalizenewlines:
1490 if normalizenewlines:
1491 output = output.replace(b'\r\n', b'\n')
1491 output = output.replace(b'\r\n', b'\n')
1492
1492
1493 return ret, output.splitlines(True)
1493 return ret, output.splitlines(True)
1494
1494
1495
1495
1496 class PythonTest(Test):
1496 class PythonTest(Test):
1497 """A Python-based test."""
1497 """A Python-based test."""
1498
1498
1499 @property
1499 @property
1500 def refpath(self):
1500 def refpath(self):
1501 return os.path.join(self._testdir, b'%s.out' % self.bname)
1501 return os.path.join(self._testdir, b'%s.out' % self.bname)
1502
1502
1503 def _run(self, env):
1503 def _run(self, env):
1504 py3switch = self._py3warnings and b' -3' or b''
1504 py3switch = self._py3warnings and b' -3' or b''
1505 # Quote the python(3) executable for Windows
1505 # Quote the python(3) executable for Windows
1506 cmd = b'"%s"%s "%s"' % (PYTHON, py3switch, self.path)
1506 cmd = b'"%s"%s "%s"' % (PYTHON, py3switch, self.path)
1507 vlog("# Running", cmd.decode("utf-8"))
1507 vlog("# Running", cmd.decode("utf-8"))
1508 normalizenewlines = os.name == 'nt'
1508 normalizenewlines = os.name == 'nt'
1509 result = self._runcommand(cmd, env, normalizenewlines=normalizenewlines)
1509 result = self._runcommand(cmd, env, normalizenewlines=normalizenewlines)
1510 if self._aborted:
1510 if self._aborted:
1511 raise KeyboardInterrupt()
1511 raise KeyboardInterrupt()
1512
1512
1513 return result
1513 return result
1514
1514
1515
1515
1516 # Some glob patterns apply only in some circumstances, so the script
1516 # Some glob patterns apply only in some circumstances, so the script
1517 # might want to remove (glob) annotations that otherwise should be
1517 # might want to remove (glob) annotations that otherwise should be
1518 # retained.
1518 # retained.
1519 checkcodeglobpats = [
1519 checkcodeglobpats = [
1520 # On Windows it looks like \ doesn't require a (glob), but we know
1520 # On Windows it looks like \ doesn't require a (glob), but we know
1521 # better.
1521 # better.
1522 re.compile(br'^pushing to \$TESTTMP/.*[^)]$'),
1522 re.compile(br'^pushing to \$TESTTMP/.*[^)]$'),
1523 re.compile(br'^moving \S+/.*[^)]$'),
1523 re.compile(br'^moving \S+/.*[^)]$'),
1524 re.compile(br'^pulling from \$TESTTMP/.*[^)]$'),
1524 re.compile(br'^pulling from \$TESTTMP/.*[^)]$'),
1525 # Not all platforms have 127.0.0.1 as loopback (though most do),
1525 # Not all platforms have 127.0.0.1 as loopback (though most do),
1526 # so we always glob that too.
1526 # so we always glob that too.
1527 re.compile(br'.*\$LOCALIP.*$'),
1527 re.compile(br'.*\$LOCALIP.*$'),
1528 ]
1528 ]
1529
1529
1530 bchr = chr
1530 bchr = chr
1531 if PYTHON3:
1531 if PYTHON3:
1532 bchr = lambda x: bytes([x])
1532 bchr = lambda x: bytes([x])
1533
1533
1534 WARN_UNDEFINED = 1
1534 WARN_UNDEFINED = 1
1535 WARN_YES = 2
1535 WARN_YES = 2
1536 WARN_NO = 3
1536 WARN_NO = 3
1537
1537
1538 MARK_OPTIONAL = b" (?)\n"
1538 MARK_OPTIONAL = b" (?)\n"
1539
1539
1540
1540
1541 def isoptional(line):
1541 def isoptional(line):
1542 return line.endswith(MARK_OPTIONAL)
1542 return line.endswith(MARK_OPTIONAL)
1543
1543
1544
1544
1545 class TTest(Test):
1545 class TTest(Test):
1546 """A "t test" is a test backed by a .t file."""
1546 """A "t test" is a test backed by a .t file."""
1547
1547
1548 SKIPPED_PREFIX = b'skipped: '
1548 SKIPPED_PREFIX = b'skipped: '
1549 FAILED_PREFIX = b'hghave check failed: '
1549 FAILED_PREFIX = b'hghave check failed: '
1550 NEEDESCAPE = re.compile(br'[\x00-\x08\x0b-\x1f\x7f-\xff]').search
1550 NEEDESCAPE = re.compile(br'[\x00-\x08\x0b-\x1f\x7f-\xff]').search
1551
1551
1552 ESCAPESUB = re.compile(br'[\x00-\x08\x0b-\x1f\\\x7f-\xff]').sub
1552 ESCAPESUB = re.compile(br'[\x00-\x08\x0b-\x1f\\\x7f-\xff]').sub
1553 ESCAPEMAP = dict((bchr(i), br'\x%02x' % i) for i in range(256))
1553 ESCAPEMAP = dict((bchr(i), br'\x%02x' % i) for i in range(256))
1554 ESCAPEMAP.update({b'\\': b'\\\\', b'\r': br'\r'})
1554 ESCAPEMAP.update({b'\\': b'\\\\', b'\r': br'\r'})
1555
1555
1556 def __init__(self, path, *args, **kwds):
1556 def __init__(self, path, *args, **kwds):
1557 # accept an extra "case" parameter
1557 # accept an extra "case" parameter
1558 case = kwds.pop('case', [])
1558 case = kwds.pop('case', [])
1559 self._case = case
1559 self._case = case
1560 self._allcases = {x for y in parsettestcases(path) for x in y}
1560 self._allcases = {x for y in parsettestcases(path) for x in y}
1561 super(TTest, self).__init__(path, *args, **kwds)
1561 super(TTest, self).__init__(path, *args, **kwds)
1562 if case:
1562 if case:
1563 casepath = b'#'.join(case)
1563 casepath = b'#'.join(case)
1564 self.name = '%s#%s' % (self.name, _strpath(casepath))
1564 self.name = '%s#%s' % (self.name, _strpath(casepath))
1565 self.errpath = b'%s#%s.err' % (self.errpath[:-4], casepath)
1565 self.errpath = b'%s#%s.err' % (self.errpath[:-4], casepath)
1566 self._tmpname += b'-%s' % casepath
1566 self._tmpname += b'-%s' % casepath
1567 self._have = {}
1567 self._have = {}
1568
1568
1569 @property
1569 @property
1570 def refpath(self):
1570 def refpath(self):
1571 return os.path.join(self._testdir, self.bname)
1571 return os.path.join(self._testdir, self.bname)
1572
1572
1573 def _run(self, env):
1573 def _run(self, env):
1574 with open(self.path, 'rb') as f:
1574 with open(self.path, 'rb') as f:
1575 lines = f.readlines()
1575 lines = f.readlines()
1576
1576
1577 # .t file is both reference output and the test input, keep reference
1577 # .t file is both reference output and the test input, keep reference
1578 # output updated with the the test input. This avoids some race
1578 # output updated with the the test input. This avoids some race
1579 # conditions where the reference output does not match the actual test.
1579 # conditions where the reference output does not match the actual test.
1580 if self._refout is not None:
1580 if self._refout is not None:
1581 self._refout = lines
1581 self._refout = lines
1582
1582
1583 salt, script, after, expected = self._parsetest(lines)
1583 salt, script, after, expected = self._parsetest(lines)
1584
1584
1585 # Write out the generated script.
1585 # Write out the generated script.
1586 fname = b'%s.sh' % self._testtmp
1586 fname = b'%s.sh' % self._testtmp
1587 with open(fname, 'wb') as f:
1587 with open(fname, 'wb') as f:
1588 for l in script:
1588 for l in script:
1589 f.write(l)
1589 f.write(l)
1590
1590
1591 cmd = b'%s "%s"' % (self._shell, fname)
1591 cmd = b'%s "%s"' % (self._shell, fname)
1592 vlog("# Running", cmd.decode("utf-8"))
1592 vlog("# Running", cmd.decode("utf-8"))
1593
1593
1594 exitcode, output = self._runcommand(cmd, env)
1594 exitcode, output = self._runcommand(cmd, env)
1595
1595
1596 if self._aborted:
1596 if self._aborted:
1597 raise KeyboardInterrupt()
1597 raise KeyboardInterrupt()
1598
1598
1599 # Do not merge output if skipped. Return hghave message instead.
1599 # Do not merge output if skipped. Return hghave message instead.
1600 # Similarly, with --debug, output is None.
1600 # Similarly, with --debug, output is None.
1601 if exitcode == self.SKIPPED_STATUS or output is None:
1601 if exitcode == self.SKIPPED_STATUS or output is None:
1602 return exitcode, output
1602 return exitcode, output
1603
1603
1604 return self._processoutput(exitcode, output, salt, after, expected)
1604 return self._processoutput(exitcode, output, salt, after, expected)
1605
1605
1606 def _hghave(self, reqs):
1606 def _hghave(self, reqs):
1607 allreqs = b' '.join(reqs)
1607 allreqs = b' '.join(reqs)
1608
1608
1609 self._detectslow(reqs)
1609 self._detectslow(reqs)
1610
1610
1611 if allreqs in self._have:
1611 if allreqs in self._have:
1612 return self._have.get(allreqs)
1612 return self._have.get(allreqs)
1613
1613
1614 # TODO do something smarter when all other uses of hghave are gone.
1614 # TODO do something smarter when all other uses of hghave are gone.
1615 runtestdir = os.path.abspath(os.path.dirname(_bytespath(__file__)))
1615 runtestdir = os.path.abspath(os.path.dirname(_bytespath(__file__)))
1616 tdir = runtestdir.replace(b'\\', b'/')
1616 tdir = runtestdir.replace(b'\\', b'/')
1617 proc = Popen4(
1617 proc = Popen4(
1618 b'%s -c "%s/hghave %s"' % (self._shell, tdir, allreqs),
1618 b'%s -c "%s/hghave %s"' % (self._shell, tdir, allreqs),
1619 self._testtmp,
1619 self._testtmp,
1620 0,
1620 0,
1621 self._getenv(),
1621 self._getenv(),
1622 )
1622 )
1623 stdout, stderr = proc.communicate()
1623 stdout, stderr = proc.communicate()
1624 ret = proc.wait()
1624 ret = proc.wait()
1625 if wifexited(ret):
1625 if wifexited(ret):
1626 ret = os.WEXITSTATUS(ret)
1626 ret = os.WEXITSTATUS(ret)
1627 if ret == 2:
1627 if ret == 2:
1628 print(stdout.decode('utf-8'))
1628 print(stdout.decode('utf-8'))
1629 sys.exit(1)
1629 sys.exit(1)
1630
1630
1631 if ret != 0:
1631 if ret != 0:
1632 self._have[allreqs] = (False, stdout)
1632 self._have[allreqs] = (False, stdout)
1633 return False, stdout
1633 return False, stdout
1634
1634
1635 self._have[allreqs] = (True, None)
1635 self._have[allreqs] = (True, None)
1636 return True, None
1636 return True, None
1637
1637
1638 def _detectslow(self, reqs):
1638 def _detectslow(self, reqs):
1639 """update the timeout of slow test when appropriate"""
1639 """update the timeout of slow test when appropriate"""
1640 if b'slow' in reqs:
1640 if b'slow' in reqs:
1641 self._timeout = self._slowtimeout
1641 self._timeout = self._slowtimeout
1642
1642
1643 def _iftest(self, args):
1643 def _iftest(self, args):
1644 # implements "#if"
1644 # implements "#if"
1645 reqs = []
1645 reqs = []
1646 for arg in args:
1646 for arg in args:
1647 if arg.startswith(b'no-') and arg[3:] in self._allcases:
1647 if arg.startswith(b'no-') and arg[3:] in self._allcases:
1648 if arg[3:] in self._case:
1648 if arg[3:] in self._case:
1649 return False
1649 return False
1650 elif arg in self._allcases:
1650 elif arg in self._allcases:
1651 if arg not in self._case:
1651 if arg not in self._case:
1652 return False
1652 return False
1653 else:
1653 else:
1654 reqs.append(arg)
1654 reqs.append(arg)
1655 self._detectslow(reqs)
1655 self._detectslow(reqs)
1656 return self._hghave(reqs)[0]
1656 return self._hghave(reqs)[0]
1657
1657
1658 def _parsetest(self, lines):
1658 def _parsetest(self, lines):
1659 # We generate a shell script which outputs unique markers to line
1659 # We generate a shell script which outputs unique markers to line
1660 # up script results with our source. These markers include input
1660 # up script results with our source. These markers include input
1661 # line number and the last return code.
1661 # line number and the last return code.
1662 salt = b"SALT%d" % time.time()
1662 salt = b"SALT%d" % time.time()
1663
1663
1664 def addsalt(line, inpython):
1664 def addsalt(line, inpython):
1665 if inpython:
1665 if inpython:
1666 script.append(b'%s %d 0\n' % (salt, line))
1666 script.append(b'%s %d 0\n' % (salt, line))
1667 else:
1667 else:
1668 script.append(b'echo %s %d $?\n' % (salt, line))
1668 script.append(b'echo %s %d $?\n' % (salt, line))
1669
1669
1670 activetrace = []
1670 activetrace = []
1671 session = str(uuid.uuid4())
1671 session = str(uuid.uuid4())
1672 if PYTHON3:
1672 if PYTHON3:
1673 session = session.encode('ascii')
1673 session = session.encode('ascii')
1674 hgcatapult = os.getenv('HGTESTCATAPULTSERVERPIPE') or os.getenv(
1674 hgcatapult = os.getenv('HGTESTCATAPULTSERVERPIPE') or os.getenv(
1675 'HGCATAPULTSERVERPIPE'
1675 'HGCATAPULTSERVERPIPE'
1676 )
1676 )
1677
1677
1678 def toggletrace(cmd=None):
1678 def toggletrace(cmd=None):
1679 if not hgcatapult or hgcatapult == os.devnull:
1679 if not hgcatapult or hgcatapult == os.devnull:
1680 return
1680 return
1681
1681
1682 if activetrace:
1682 if activetrace:
1683 script.append(
1683 script.append(
1684 b'echo END %s %s >> "$HGTESTCATAPULTSERVERPIPE"\n'
1684 b'echo END %s %s >> "$HGTESTCATAPULTSERVERPIPE"\n'
1685 % (session, activetrace[0])
1685 % (session, activetrace[0])
1686 )
1686 )
1687 if cmd is None:
1687 if cmd is None:
1688 return
1688 return
1689
1689
1690 if isinstance(cmd, str):
1690 if isinstance(cmd, str):
1691 quoted = shellquote(cmd.strip())
1691 quoted = shellquote(cmd.strip())
1692 else:
1692 else:
1693 quoted = shellquote(cmd.strip().decode('utf8')).encode('utf8')
1693 quoted = shellquote(cmd.strip().decode('utf8')).encode('utf8')
1694 quoted = quoted.replace(b'\\', b'\\\\')
1694 quoted = quoted.replace(b'\\', b'\\\\')
1695 script.append(
1695 script.append(
1696 b'echo START %s %s >> "$HGTESTCATAPULTSERVERPIPE"\n'
1696 b'echo START %s %s >> "$HGTESTCATAPULTSERVERPIPE"\n'
1697 % (session, quoted)
1697 % (session, quoted)
1698 )
1698 )
1699 activetrace[0:] = [quoted]
1699 activetrace[0:] = [quoted]
1700
1700
1701 script = []
1701 script = []
1702
1702
1703 # After we run the shell script, we re-unify the script output
1703 # After we run the shell script, we re-unify the script output
1704 # with non-active parts of the source, with synchronization by our
1704 # with non-active parts of the source, with synchronization by our
1705 # SALT line number markers. The after table contains the non-active
1705 # SALT line number markers. The after table contains the non-active
1706 # components, ordered by line number.
1706 # components, ordered by line number.
1707 after = {}
1707 after = {}
1708
1708
1709 # Expected shell script output.
1709 # Expected shell script output.
1710 expected = {}
1710 expected = {}
1711
1711
1712 pos = prepos = -1
1712 pos = prepos = -1
1713
1713
1714 # True or False when in a true or false conditional section
1714 # True or False when in a true or false conditional section
1715 skipping = None
1715 skipping = None
1716
1716
1717 # We keep track of whether or not we're in a Python block so we
1717 # We keep track of whether or not we're in a Python block so we
1718 # can generate the surrounding doctest magic.
1718 # can generate the surrounding doctest magic.
1719 inpython = False
1719 inpython = False
1720
1720
1721 if self._debug:
1721 if self._debug:
1722 script.append(b'set -x\n')
1722 script.append(b'set -x\n')
1723 if self._hgcommand != b'hg':
1723 if self._hgcommand != b'hg':
1724 script.append(b'alias hg="%s"\n' % self._hgcommand)
1724 script.append(b'alias hg="%s"\n' % self._hgcommand)
1725 if os.getenv('MSYSTEM'):
1725 if os.getenv('MSYSTEM'):
1726 script.append(b'alias pwd="pwd -W"\n')
1726 script.append(b'alias pwd="pwd -W"\n')
1727
1727
1728 if hgcatapult and hgcatapult != os.devnull:
1728 if hgcatapult and hgcatapult != os.devnull:
1729 if PYTHON3:
1729 if PYTHON3:
1730 hgcatapult = hgcatapult.encode('utf8')
1730 hgcatapult = hgcatapult.encode('utf8')
1731 cataname = self.name.encode('utf8')
1731 cataname = self.name.encode('utf8')
1732 else:
1732 else:
1733 cataname = self.name
1733 cataname = self.name
1734
1734
1735 # Kludge: use a while loop to keep the pipe from getting
1735 # Kludge: use a while loop to keep the pipe from getting
1736 # closed by our echo commands. The still-running file gets
1736 # closed by our echo commands. The still-running file gets
1737 # reaped at the end of the script, which causes the while
1737 # reaped at the end of the script, which causes the while
1738 # loop to exit and closes the pipe. Sigh.
1738 # loop to exit and closes the pipe. Sigh.
1739 script.append(
1739 script.append(
1740 b'rtendtracing() {\n'
1740 b'rtendtracing() {\n'
1741 b' echo END %(session)s %(name)s >> %(catapult)s\n'
1741 b' echo END %(session)s %(name)s >> %(catapult)s\n'
1742 b' rm -f "$TESTTMP/.still-running"\n'
1742 b' rm -f "$TESTTMP/.still-running"\n'
1743 b'}\n'
1743 b'}\n'
1744 b'trap "rtendtracing" 0\n'
1744 b'trap "rtendtracing" 0\n'
1745 b'touch "$TESTTMP/.still-running"\n'
1745 b'touch "$TESTTMP/.still-running"\n'
1746 b'while [ -f "$TESTTMP/.still-running" ]; do sleep 1; done '
1746 b'while [ -f "$TESTTMP/.still-running" ]; do sleep 1; done '
1747 b'> %(catapult)s &\n'
1747 b'> %(catapult)s &\n'
1748 b'HGCATAPULTSESSION=%(session)s ; export HGCATAPULTSESSION\n'
1748 b'HGCATAPULTSESSION=%(session)s ; export HGCATAPULTSESSION\n'
1749 b'echo START %(session)s %(name)s >> %(catapult)s\n'
1749 b'echo START %(session)s %(name)s >> %(catapult)s\n'
1750 % {
1750 % {
1751 b'name': cataname,
1751 b'name': cataname,
1752 b'session': session,
1752 b'session': session,
1753 b'catapult': hgcatapult,
1753 b'catapult': hgcatapult,
1754 }
1754 }
1755 )
1755 )
1756
1756
1757 if self._case:
1757 if self._case:
1758 casestr = b'#'.join(self._case)
1758 casestr = b'#'.join(self._case)
1759 if isinstance(self._case, str):
1759 if isinstance(self._case, str):
1760 quoted = shellquote(casestr)
1760 quoted = shellquote(casestr)
1761 else:
1761 else:
1762 quoted = shellquote(casestr.decode('utf8')).encode('utf8')
1762 quoted = shellquote(casestr.decode('utf8')).encode('utf8')
1763 script.append(b'TESTCASE=%s\n' % quoted)
1763 script.append(b'TESTCASE=%s\n' % quoted)
1764 script.append(b'export TESTCASE\n')
1764 script.append(b'export TESTCASE\n')
1765
1765
1766 n = 0
1766 n = 0
1767 for n, l in enumerate(lines):
1767 for n, l in enumerate(lines):
1768 if not l.endswith(b'\n'):
1768 if not l.endswith(b'\n'):
1769 l += b'\n'
1769 l += b'\n'
1770 if l.startswith(b'#require'):
1770 if l.startswith(b'#require'):
1771 lsplit = l.split()
1771 lsplit = l.split()
1772 if len(lsplit) < 2 or lsplit[0] != b'#require':
1772 if len(lsplit) < 2 or lsplit[0] != b'#require':
1773 after.setdefault(pos, []).append(' !!! invalid #require\n')
1773 after.setdefault(pos, []).append(' !!! invalid #require\n')
1774 if not skipping:
1774 if not skipping:
1775 haveresult, message = self._hghave(lsplit[1:])
1775 haveresult, message = self._hghave(lsplit[1:])
1776 if not haveresult:
1776 if not haveresult:
1777 script = [b'echo "%s"\nexit 80\n' % message]
1777 script = [b'echo "%s"\nexit 80\n' % message]
1778 break
1778 break
1779 after.setdefault(pos, []).append(l)
1779 after.setdefault(pos, []).append(l)
1780 elif l.startswith(b'#if'):
1780 elif l.startswith(b'#if'):
1781 lsplit = l.split()
1781 lsplit = l.split()
1782 if len(lsplit) < 2 or lsplit[0] != b'#if':
1782 if len(lsplit) < 2 or lsplit[0] != b'#if':
1783 after.setdefault(pos, []).append(' !!! invalid #if\n')
1783 after.setdefault(pos, []).append(' !!! invalid #if\n')
1784 if skipping is not None:
1784 if skipping is not None:
1785 after.setdefault(pos, []).append(' !!! nested #if\n')
1785 after.setdefault(pos, []).append(' !!! nested #if\n')
1786 skipping = not self._iftest(lsplit[1:])
1786 skipping = not self._iftest(lsplit[1:])
1787 after.setdefault(pos, []).append(l)
1787 after.setdefault(pos, []).append(l)
1788 elif l.startswith(b'#else'):
1788 elif l.startswith(b'#else'):
1789 if skipping is None:
1789 if skipping is None:
1790 after.setdefault(pos, []).append(' !!! missing #if\n')
1790 after.setdefault(pos, []).append(' !!! missing #if\n')
1791 skipping = not skipping
1791 skipping = not skipping
1792 after.setdefault(pos, []).append(l)
1792 after.setdefault(pos, []).append(l)
1793 elif l.startswith(b'#endif'):
1793 elif l.startswith(b'#endif'):
1794 if skipping is None:
1794 if skipping is None:
1795 after.setdefault(pos, []).append(' !!! missing #if\n')
1795 after.setdefault(pos, []).append(' !!! missing #if\n')
1796 skipping = None
1796 skipping = None
1797 after.setdefault(pos, []).append(l)
1797 after.setdefault(pos, []).append(l)
1798 elif skipping:
1798 elif skipping:
1799 after.setdefault(pos, []).append(l)
1799 after.setdefault(pos, []).append(l)
1800 elif l.startswith(b' >>> '): # python inlines
1800 elif l.startswith(b' >>> '): # python inlines
1801 after.setdefault(pos, []).append(l)
1801 after.setdefault(pos, []).append(l)
1802 prepos = pos
1802 prepos = pos
1803 pos = n
1803 pos = n
1804 if not inpython:
1804 if not inpython:
1805 # We've just entered a Python block. Add the header.
1805 # We've just entered a Python block. Add the header.
1806 inpython = True
1806 inpython = True
1807 addsalt(prepos, False) # Make sure we report the exit code.
1807 addsalt(prepos, False) # Make sure we report the exit code.
1808 script.append(b'"%s" -m heredoctest <<EOF\n' % PYTHON)
1808 script.append(b'"%s" -m heredoctest <<EOF\n' % PYTHON)
1809 addsalt(n, True)
1809 addsalt(n, True)
1810 script.append(l[2:])
1810 script.append(l[2:])
1811 elif l.startswith(b' ... '): # python inlines
1811 elif l.startswith(b' ... '): # python inlines
1812 after.setdefault(prepos, []).append(l)
1812 after.setdefault(prepos, []).append(l)
1813 script.append(l[2:])
1813 script.append(l[2:])
1814 elif l.startswith(b' $ '): # commands
1814 elif l.startswith(b' $ '): # commands
1815 if inpython:
1815 if inpython:
1816 script.append(b'EOF\n')
1816 script.append(b'EOF\n')
1817 inpython = False
1817 inpython = False
1818 after.setdefault(pos, []).append(l)
1818 after.setdefault(pos, []).append(l)
1819 prepos = pos
1819 prepos = pos
1820 pos = n
1820 pos = n
1821 addsalt(n, False)
1821 addsalt(n, False)
1822 rawcmd = l[4:]
1822 rawcmd = l[4:]
1823 cmd = rawcmd.split()
1823 cmd = rawcmd.split()
1824 toggletrace(rawcmd)
1824 toggletrace(rawcmd)
1825 if len(cmd) == 2 and cmd[0] == b'cd':
1825 if len(cmd) == 2 and cmd[0] == b'cd':
1826 l = b' $ cd %s || exit 1\n' % cmd[1]
1826 l = b' $ cd %s || exit 1\n' % cmd[1]
1827 script.append(rawcmd)
1827 script.append(rawcmd)
1828 elif l.startswith(b' > '): # continuations
1828 elif l.startswith(b' > '): # continuations
1829 after.setdefault(prepos, []).append(l)
1829 after.setdefault(prepos, []).append(l)
1830 script.append(l[4:])
1830 script.append(l[4:])
1831 elif l.startswith(b' '): # results
1831 elif l.startswith(b' '): # results
1832 # Queue up a list of expected results.
1832 # Queue up a list of expected results.
1833 expected.setdefault(pos, []).append(l[2:])
1833 expected.setdefault(pos, []).append(l[2:])
1834 else:
1834 else:
1835 if inpython:
1835 if inpython:
1836 script.append(b'EOF\n')
1836 script.append(b'EOF\n')
1837 inpython = False
1837 inpython = False
1838 # Non-command/result. Queue up for merged output.
1838 # Non-command/result. Queue up for merged output.
1839 after.setdefault(pos, []).append(l)
1839 after.setdefault(pos, []).append(l)
1840
1840
1841 if inpython:
1841 if inpython:
1842 script.append(b'EOF\n')
1842 script.append(b'EOF\n')
1843 if skipping is not None:
1843 if skipping is not None:
1844 after.setdefault(pos, []).append(' !!! missing #endif\n')
1844 after.setdefault(pos, []).append(' !!! missing #endif\n')
1845 addsalt(n + 1, False)
1845 addsalt(n + 1, False)
1846 # Need to end any current per-command trace
1846 # Need to end any current per-command trace
1847 if activetrace:
1847 if activetrace:
1848 toggletrace()
1848 toggletrace()
1849 return salt, script, after, expected
1849 return salt, script, after, expected
1850
1850
1851 def _processoutput(self, exitcode, output, salt, after, expected):
1851 def _processoutput(self, exitcode, output, salt, after, expected):
1852 # Merge the script output back into a unified test.
1852 # Merge the script output back into a unified test.
1853 warnonly = WARN_UNDEFINED # 1: not yet; 2: yes; 3: for sure not
1853 warnonly = WARN_UNDEFINED # 1: not yet; 2: yes; 3: for sure not
1854 if exitcode != 0:
1854 if exitcode != 0:
1855 warnonly = WARN_NO
1855 warnonly = WARN_NO
1856
1856
1857 pos = -1
1857 pos = -1
1858 postout = []
1858 postout = []
1859 for out_rawline in output:
1859 for out_rawline in output:
1860 out_line, cmd_line = out_rawline, None
1860 out_line, cmd_line = out_rawline, None
1861 if salt in out_rawline:
1861 if salt in out_rawline:
1862 out_line, cmd_line = out_rawline.split(salt, 1)
1862 out_line, cmd_line = out_rawline.split(salt, 1)
1863
1863
1864 pos, postout, warnonly = self._process_out_line(
1864 pos, postout, warnonly = self._process_out_line(
1865 out_line, pos, postout, expected, warnonly
1865 out_line, pos, postout, expected, warnonly
1866 )
1866 )
1867 pos, postout = self._process_cmd_line(cmd_line, pos, postout, after)
1867 pos, postout = self._process_cmd_line(cmd_line, pos, postout, after)
1868
1868
1869 if pos in after:
1869 if pos in after:
1870 postout += after.pop(pos)
1870 postout += after.pop(pos)
1871
1871
1872 if warnonly == WARN_YES:
1872 if warnonly == WARN_YES:
1873 exitcode = False # Set exitcode to warned.
1873 exitcode = False # Set exitcode to warned.
1874
1874
1875 return exitcode, postout
1875 return exitcode, postout
1876
1876
1877 def _process_out_line(self, out_line, pos, postout, expected, warnonly):
1877 def _process_out_line(self, out_line, pos, postout, expected, warnonly):
1878 while out_line:
1878 while out_line:
1879 if not out_line.endswith(b'\n'):
1879 if not out_line.endswith(b'\n'):
1880 out_line += b' (no-eol)\n'
1880 out_line += b' (no-eol)\n'
1881
1881
1882 # Find the expected output at the current position.
1882 # Find the expected output at the current position.
1883 els = [None]
1883 els = [None]
1884 if expected.get(pos, None):
1884 if expected.get(pos, None):
1885 els = expected[pos]
1885 els = expected[pos]
1886
1886
1887 optional = []
1887 optional = []
1888 for i, el in enumerate(els):
1888 for i, el in enumerate(els):
1889 r = False
1889 r = False
1890 if el:
1890 if el:
1891 r, exact = self.linematch(el, out_line)
1891 r, exact = self.linematch(el, out_line)
1892 if isinstance(r, str):
1892 if isinstance(r, str):
1893 if r == '-glob':
1893 if r == '-glob':
1894 out_line = ''.join(el.rsplit(' (glob)', 1))
1894 out_line = ''.join(el.rsplit(' (glob)', 1))
1895 r = '' # Warn only this line.
1895 r = '' # Warn only this line.
1896 elif r == "retry":
1896 elif r == "retry":
1897 postout.append(b' ' + el)
1897 postout.append(b' ' + el)
1898 else:
1898 else:
1899 log('\ninfo, unknown linematch result: %r\n' % r)
1899 log('\ninfo, unknown linematch result: %r\n' % r)
1900 r = False
1900 r = False
1901 if r:
1901 if r:
1902 els.pop(i)
1902 els.pop(i)
1903 break
1903 break
1904 if el:
1904 if el:
1905 if isoptional(el):
1905 if isoptional(el):
1906 optional.append(i)
1906 optional.append(i)
1907 else:
1907 else:
1908 m = optline.match(el)
1908 m = optline.match(el)
1909 if m:
1909 if m:
1910 conditions = [c for c in m.group(2).split(b' ')]
1910 conditions = [c for c in m.group(2).split(b' ')]
1911
1911
1912 if not self._iftest(conditions):
1912 if not self._iftest(conditions):
1913 optional.append(i)
1913 optional.append(i)
1914 if exact:
1914 if exact:
1915 # Don't allow line to be matches against a later
1915 # Don't allow line to be matches against a later
1916 # line in the output
1916 # line in the output
1917 els.pop(i)
1917 els.pop(i)
1918 break
1918 break
1919
1919
1920 if r:
1920 if r:
1921 if r == "retry":
1921 if r == "retry":
1922 continue
1922 continue
1923 # clean up any optional leftovers
1923 # clean up any optional leftovers
1924 for i in optional:
1924 for i in optional:
1925 postout.append(b' ' + els[i])
1925 postout.append(b' ' + els[i])
1926 for i in reversed(optional):
1926 for i in reversed(optional):
1927 del els[i]
1927 del els[i]
1928 postout.append(b' ' + el)
1928 postout.append(b' ' + el)
1929 else:
1929 else:
1930 if self.NEEDESCAPE(out_line):
1930 if self.NEEDESCAPE(out_line):
1931 out_line = TTest._stringescape(
1931 out_line = TTest._stringescape(
1932 b'%s (esc)\n' % out_line.rstrip(b'\n')
1932 b'%s (esc)\n' % out_line.rstrip(b'\n')
1933 )
1933 )
1934 postout.append(b' ' + out_line) # Let diff deal with it.
1934 postout.append(b' ' + out_line) # Let diff deal with it.
1935 if r != '': # If line failed.
1935 if r != '': # If line failed.
1936 warnonly = WARN_NO
1936 warnonly = WARN_NO
1937 elif warnonly == WARN_UNDEFINED:
1937 elif warnonly == WARN_UNDEFINED:
1938 warnonly = WARN_YES
1938 warnonly = WARN_YES
1939 break
1939 break
1940 else:
1940 else:
1941 # clean up any optional leftovers
1941 # clean up any optional leftovers
1942 while expected.get(pos, None):
1942 while expected.get(pos, None):
1943 el = expected[pos].pop(0)
1943 el = expected[pos].pop(0)
1944 if el:
1944 if el:
1945 if not isoptional(el):
1945 if not isoptional(el):
1946 m = optline.match(el)
1946 m = optline.match(el)
1947 if m:
1947 if m:
1948 conditions = [c for c in m.group(2).split(b' ')]
1948 conditions = [c for c in m.group(2).split(b' ')]
1949
1949
1950 if self._iftest(conditions):
1950 if self._iftest(conditions):
1951 # Don't append as optional line
1951 # Don't append as optional line
1952 continue
1952 continue
1953 else:
1953 else:
1954 continue
1954 continue
1955 postout.append(b' ' + el)
1955 postout.append(b' ' + el)
1956 return pos, postout, warnonly
1956 return pos, postout, warnonly
1957
1957
1958 def _process_cmd_line(self, cmd_line, pos, postout, after):
1958 def _process_cmd_line(self, cmd_line, pos, postout, after):
1959 """process a "command" part of a line from unified test output"""
1959 """process a "command" part of a line from unified test output"""
1960 if cmd_line:
1960 if cmd_line:
1961 # Add on last return code.
1961 # Add on last return code.
1962 ret = int(cmd_line.split()[1])
1962 ret = int(cmd_line.split()[1])
1963 if ret != 0:
1963 if ret != 0:
1964 postout.append(b' [%d]\n' % ret)
1964 postout.append(b' [%d]\n' % ret)
1965 if pos in after:
1965 if pos in after:
1966 # Merge in non-active test bits.
1966 # Merge in non-active test bits.
1967 postout += after.pop(pos)
1967 postout += after.pop(pos)
1968 pos = int(cmd_line.split()[0])
1968 pos = int(cmd_line.split()[0])
1969 return pos, postout
1969 return pos, postout
1970
1970
1971 @staticmethod
1971 @staticmethod
1972 def rematch(el, l):
1972 def rematch(el, l):
1973 try:
1973 try:
1974 el = b'(?:' + el + b')'
1974 el = b'(?:' + el + b')'
1975 # use \Z to ensure that the regex matches to the end of the string
1975 # use \Z to ensure that the regex matches to the end of the string
1976 if os.name == 'nt':
1976 if os.name == 'nt':
1977 return re.match(el + br'\r?\n\Z', l)
1977 return re.match(el + br'\r?\n\Z', l)
1978 return re.match(el + br'\n\Z', l)
1978 return re.match(el + br'\n\Z', l)
1979 except re.error:
1979 except re.error:
1980 # el is an invalid regex
1980 # el is an invalid regex
1981 return False
1981 return False
1982
1982
1983 @staticmethod
1983 @staticmethod
1984 def globmatch(el, l):
1984 def globmatch(el, l):
1985 # The only supported special characters are * and ? plus / which also
1985 # The only supported special characters are * and ? plus / which also
1986 # matches \ on windows. Escaping of these characters is supported.
1986 # matches \ on windows. Escaping of these characters is supported.
1987 if el + b'\n' == l:
1987 if el + b'\n' == l:
1988 if os.altsep:
1988 if os.altsep:
1989 # matching on "/" is not needed for this line
1989 # matching on "/" is not needed for this line
1990 for pat in checkcodeglobpats:
1990 for pat in checkcodeglobpats:
1991 if pat.match(el):
1991 if pat.match(el):
1992 return True
1992 return True
1993 return b'-glob'
1993 return b'-glob'
1994 return True
1994 return True
1995 el = el.replace(b'$LOCALIP', b'*')
1995 el = el.replace(b'$LOCALIP', b'*')
1996 i, n = 0, len(el)
1996 i, n = 0, len(el)
1997 res = b''
1997 res = b''
1998 while i < n:
1998 while i < n:
1999 c = el[i : i + 1]
1999 c = el[i : i + 1]
2000 i += 1
2000 i += 1
2001 if c == b'\\' and i < n and el[i : i + 1] in b'*?\\/':
2001 if c == b'\\' and i < n and el[i : i + 1] in b'*?\\/':
2002 res += el[i - 1 : i + 1]
2002 res += el[i - 1 : i + 1]
2003 i += 1
2003 i += 1
2004 elif c == b'*':
2004 elif c == b'*':
2005 res += b'.*'
2005 res += b'.*'
2006 elif c == b'?':
2006 elif c == b'?':
2007 res += b'.'
2007 res += b'.'
2008 elif c == b'/' and os.altsep:
2008 elif c == b'/' and os.altsep:
2009 res += b'[/\\\\]'
2009 res += b'[/\\\\]'
2010 else:
2010 else:
2011 res += re.escape(c)
2011 res += re.escape(c)
2012 return TTest.rematch(res, l)
2012 return TTest.rematch(res, l)
2013
2013
2014 def linematch(self, el, l):
2014 def linematch(self, el, l):
2015 if el == l: # perfect match (fast)
2015 if el == l: # perfect match (fast)
2016 return True, True
2016 return True, True
2017 retry = False
2017 retry = False
2018 if isoptional(el):
2018 if isoptional(el):
2019 retry = "retry"
2019 retry = "retry"
2020 el = el[: -len(MARK_OPTIONAL)] + b"\n"
2020 el = el[: -len(MARK_OPTIONAL)] + b"\n"
2021 else:
2021 else:
2022 m = optline.match(el)
2022 m = optline.match(el)
2023 if m:
2023 if m:
2024 conditions = [c for c in m.group(2).split(b' ')]
2024 conditions = [c for c in m.group(2).split(b' ')]
2025
2025
2026 el = m.group(1) + b"\n"
2026 el = m.group(1) + b"\n"
2027 if not self._iftest(conditions):
2027 if not self._iftest(conditions):
2028 # listed feature missing, should not match
2028 # listed feature missing, should not match
2029 return "retry", False
2029 return "retry", False
2030
2030
2031 if el.endswith(b" (esc)\n"):
2031 if el.endswith(b" (esc)\n"):
2032 if PYTHON3:
2032 if PYTHON3:
2033 el = el[:-7].decode('unicode_escape') + '\n'
2033 el = el[:-7].decode('unicode_escape') + '\n'
2034 el = el.encode('utf-8')
2034 el = el.encode('utf-8')
2035 else:
2035 else:
2036 el = el[:-7].decode('string-escape') + '\n'
2036 el = el[:-7].decode('string-escape') + '\n'
2037 if el == l or os.name == 'nt' and el[:-1] + b'\r\n' == l:
2037 if el == l or os.name == 'nt' and el[:-1] + b'\r\n' == l:
2038 return True, True
2038 return True, True
2039 if el.endswith(b" (re)\n"):
2039 if el.endswith(b" (re)\n"):
2040 return (TTest.rematch(el[:-6], l) or retry), False
2040 return (TTest.rematch(el[:-6], l) or retry), False
2041 if el.endswith(b" (glob)\n"):
2041 if el.endswith(b" (glob)\n"):
2042 # ignore '(glob)' added to l by 'replacements'
2042 # ignore '(glob)' added to l by 'replacements'
2043 if l.endswith(b" (glob)\n"):
2043 if l.endswith(b" (glob)\n"):
2044 l = l[:-8] + b"\n"
2044 l = l[:-8] + b"\n"
2045 return (TTest.globmatch(el[:-8], l) or retry), False
2045 return (TTest.globmatch(el[:-8], l) or retry), False
2046 if os.altsep:
2046 if os.altsep:
2047 _l = l.replace(b'\\', b'/')
2047 _l = l.replace(b'\\', b'/')
2048 if el == _l or os.name == 'nt' and el[:-1] + b'\r\n' == _l:
2048 if el == _l or os.name == 'nt' and el[:-1] + b'\r\n' == _l:
2049 return True, True
2049 return True, True
2050 return retry, True
2050 return retry, True
2051
2051
2052 @staticmethod
2052 @staticmethod
2053 def parsehghaveoutput(lines):
2053 def parsehghaveoutput(lines):
2054 '''Parse hghave log lines.
2054 '''Parse hghave log lines.
2055
2055
2056 Return tuple of lists (missing, failed):
2056 Return tuple of lists (missing, failed):
2057 * the missing/unknown features
2057 * the missing/unknown features
2058 * the features for which existence check failed'''
2058 * the features for which existence check failed'''
2059 missing = []
2059 missing = []
2060 failed = []
2060 failed = []
2061 for line in lines:
2061 for line in lines:
2062 if line.startswith(TTest.SKIPPED_PREFIX):
2062 if line.startswith(TTest.SKIPPED_PREFIX):
2063 line = line.splitlines()[0]
2063 line = line.splitlines()[0]
2064 missing.append(
2064 missing.append(
2065 line[len(TTest.SKIPPED_PREFIX) :].decode('utf-8')
2065 line[len(TTest.SKIPPED_PREFIX) :].decode('utf-8')
2066 )
2066 )
2067 elif line.startswith(TTest.FAILED_PREFIX):
2067 elif line.startswith(TTest.FAILED_PREFIX):
2068 line = line.splitlines()[0]
2068 line = line.splitlines()[0]
2069 failed.append(line[len(TTest.FAILED_PREFIX) :].decode('utf-8'))
2069 failed.append(line[len(TTest.FAILED_PREFIX) :].decode('utf-8'))
2070
2070
2071 return missing, failed
2071 return missing, failed
2072
2072
2073 @staticmethod
2073 @staticmethod
2074 def _escapef(m):
2074 def _escapef(m):
2075 return TTest.ESCAPEMAP[m.group(0)]
2075 return TTest.ESCAPEMAP[m.group(0)]
2076
2076
2077 @staticmethod
2077 @staticmethod
2078 def _stringescape(s):
2078 def _stringescape(s):
2079 return TTest.ESCAPESUB(TTest._escapef, s)
2079 return TTest.ESCAPESUB(TTest._escapef, s)
2080
2080
2081
2081
2082 iolock = threading.RLock()
2082 iolock = threading.RLock()
2083 firstlock = threading.RLock()
2083 firstlock = threading.RLock()
2084 firsterror = False
2084 firsterror = False
2085
2085
2086
2086
2087 class TestResult(unittest._TextTestResult):
2087 class TestResult(unittest._TextTestResult):
2088 """Holds results when executing via unittest."""
2088 """Holds results when executing via unittest."""
2089
2089
2090 # Don't worry too much about accessing the non-public _TextTestResult.
2090 # Don't worry too much about accessing the non-public _TextTestResult.
2091 # It is relatively common in Python testing tools.
2091 # It is relatively common in Python testing tools.
2092 def __init__(self, options, *args, **kwargs):
2092 def __init__(self, options, *args, **kwargs):
2093 super(TestResult, self).__init__(*args, **kwargs)
2093 super(TestResult, self).__init__(*args, **kwargs)
2094
2094
2095 self._options = options
2095 self._options = options
2096
2096
2097 # unittest.TestResult didn't have skipped until 2.7. We need to
2097 # unittest.TestResult didn't have skipped until 2.7. We need to
2098 # polyfill it.
2098 # polyfill it.
2099 self.skipped = []
2099 self.skipped = []
2100
2100
2101 # We have a custom "ignored" result that isn't present in any Python
2101 # We have a custom "ignored" result that isn't present in any Python
2102 # unittest implementation. It is very similar to skipped. It may make
2102 # unittest implementation. It is very similar to skipped. It may make
2103 # sense to map it into skip some day.
2103 # sense to map it into skip some day.
2104 self.ignored = []
2104 self.ignored = []
2105
2105
2106 self.times = []
2106 self.times = []
2107 self._firststarttime = None
2107 self._firststarttime = None
2108 # Data stored for the benefit of generating xunit reports.
2108 # Data stored for the benefit of generating xunit reports.
2109 self.successes = []
2109 self.successes = []
2110 self.faildata = {}
2110 self.faildata = {}
2111
2111
2112 if options.color == 'auto':
2112 if options.color == 'auto':
2113 self.color = pygmentspresent and self.stream.isatty()
2113 self.color = pygmentspresent and self.stream.isatty()
2114 elif options.color == 'never':
2114 elif options.color == 'never':
2115 self.color = False
2115 self.color = False
2116 else: # 'always', for testing purposes
2116 else: # 'always', for testing purposes
2117 self.color = pygmentspresent
2117 self.color = pygmentspresent
2118
2118
2119 def onStart(self, test):
2119 def onStart(self, test):
2120 """ Can be overriden by custom TestResult
2120 """ Can be overriden by custom TestResult
2121 """
2121 """
2122
2122
2123 def onEnd(self):
2123 def onEnd(self):
2124 """ Can be overriden by custom TestResult
2124 """ Can be overriden by custom TestResult
2125 """
2125 """
2126
2126
2127 def addFailure(self, test, reason):
2127 def addFailure(self, test, reason):
2128 self.failures.append((test, reason))
2128 self.failures.append((test, reason))
2129
2129
2130 if self._options.first:
2130 if self._options.first:
2131 self.stop()
2131 self.stop()
2132 else:
2132 else:
2133 with iolock:
2133 with iolock:
2134 if reason == "timed out":
2134 if reason == "timed out":
2135 self.stream.write('t')
2135 self.stream.write('t')
2136 else:
2136 else:
2137 if not self._options.nodiff:
2137 if not self._options.nodiff:
2138 self.stream.write('\n')
2138 self.stream.write('\n')
2139 # Exclude the '\n' from highlighting to lex correctly
2139 # Exclude the '\n' from highlighting to lex correctly
2140 formatted = 'ERROR: %s output changed\n' % test
2140 formatted = 'ERROR: %s output changed\n' % test
2141 self.stream.write(highlightmsg(formatted, self.color))
2141 self.stream.write(highlightmsg(formatted, self.color))
2142 self.stream.write('!')
2142 self.stream.write('!')
2143
2143
2144 self.stream.flush()
2144 self.stream.flush()
2145
2145
2146 def addSuccess(self, test):
2146 def addSuccess(self, test):
2147 with iolock:
2147 with iolock:
2148 super(TestResult, self).addSuccess(test)
2148 super(TestResult, self).addSuccess(test)
2149 self.successes.append(test)
2149 self.successes.append(test)
2150
2150
2151 def addError(self, test, err):
2151 def addError(self, test, err):
2152 super(TestResult, self).addError(test, err)
2152 super(TestResult, self).addError(test, err)
2153 if self._options.first:
2153 if self._options.first:
2154 self.stop()
2154 self.stop()
2155
2155
2156 # Polyfill.
2156 # Polyfill.
2157 def addSkip(self, test, reason):
2157 def addSkip(self, test, reason):
2158 self.skipped.append((test, reason))
2158 self.skipped.append((test, reason))
2159 with iolock:
2159 with iolock:
2160 if self.showAll:
2160 if self.showAll:
2161 self.stream.writeln('skipped %s' % reason)
2161 self.stream.writeln('skipped %s' % reason)
2162 else:
2162 else:
2163 self.stream.write('s')
2163 self.stream.write('s')
2164 self.stream.flush()
2164 self.stream.flush()
2165
2165
2166 def addIgnore(self, test, reason):
2166 def addIgnore(self, test, reason):
2167 self.ignored.append((test, reason))
2167 self.ignored.append((test, reason))
2168 with iolock:
2168 with iolock:
2169 if self.showAll:
2169 if self.showAll:
2170 self.stream.writeln('ignored %s' % reason)
2170 self.stream.writeln('ignored %s' % reason)
2171 else:
2171 else:
2172 if reason not in ('not retesting', "doesn't match keyword"):
2172 if reason not in ('not retesting', "doesn't match keyword"):
2173 self.stream.write('i')
2173 self.stream.write('i')
2174 else:
2174 else:
2175 self.testsRun += 1
2175 self.testsRun += 1
2176 self.stream.flush()
2176 self.stream.flush()
2177
2177
2178 def addOutputMismatch(self, test, ret, got, expected):
2178 def addOutputMismatch(self, test, ret, got, expected):
2179 """Record a mismatch in test output for a particular test."""
2179 """Record a mismatch in test output for a particular test."""
2180 if self.shouldStop or firsterror:
2180 if self.shouldStop or firsterror:
2181 # don't print, some other test case already failed and
2181 # don't print, some other test case already failed and
2182 # printed, we're just stale and probably failed due to our
2182 # printed, we're just stale and probably failed due to our
2183 # temp dir getting cleaned up.
2183 # temp dir getting cleaned up.
2184 return
2184 return
2185
2185
2186 accepted = False
2186 accepted = False
2187 lines = []
2187 lines = []
2188
2188
2189 with iolock:
2189 with iolock:
2190 if self._options.nodiff:
2190 if self._options.nodiff:
2191 pass
2191 pass
2192 elif self._options.view:
2192 elif self._options.view:
2193 v = self._options.view
2193 v = self._options.view
2194 subprocess.call(
2194 subprocess.call(
2195 r'"%s" "%s" "%s"'
2195 r'"%s" "%s" "%s"'
2196 % (v, _strpath(test.refpath), _strpath(test.errpath)),
2196 % (v, _strpath(test.refpath), _strpath(test.errpath)),
2197 shell=True,
2197 shell=True,
2198 )
2198 )
2199 else:
2199 else:
2200 servefail, lines = getdiff(
2200 servefail, lines = getdiff(
2201 expected, got, test.refpath, test.errpath
2201 expected, got, test.refpath, test.errpath
2202 )
2202 )
2203 self.stream.write('\n')
2203 self.stream.write('\n')
2204 for line in lines:
2204 for line in lines:
2205 line = highlightdiff(line, self.color)
2205 line = highlightdiff(line, self.color)
2206 if PYTHON3:
2206 if PYTHON3:
2207 self.stream.flush()
2207 self.stream.flush()
2208 self.stream.buffer.write(line)
2208 self.stream.buffer.write(line)
2209 self.stream.buffer.flush()
2209 self.stream.buffer.flush()
2210 else:
2210 else:
2211 self.stream.write(line)
2211 self.stream.write(line)
2212 self.stream.flush()
2212 self.stream.flush()
2213
2213
2214 if servefail:
2214 if servefail:
2215 raise test.failureException(
2215 raise test.failureException(
2216 'server failed to start (HGPORT=%s)' % test._startport
2216 'server failed to start (HGPORT=%s)' % test._startport
2217 )
2217 )
2218
2218
2219 # handle interactive prompt without releasing iolock
2219 # handle interactive prompt without releasing iolock
2220 if self._options.interactive:
2220 if self._options.interactive:
2221 if test.readrefout() != expected:
2221 if test.readrefout() != expected:
2222 self.stream.write(
2222 self.stream.write(
2223 'Reference output has changed (run again to prompt '
2223 'Reference output has changed (run again to prompt '
2224 'changes)'
2224 'changes)'
2225 )
2225 )
2226 else:
2226 else:
2227 self.stream.write('Accept this change? [n] ')
2227 self.stream.write('Accept this change? [n] ')
2228 self.stream.flush()
2228 self.stream.flush()
2229 answer = sys.stdin.readline().strip()
2229 answer = sys.stdin.readline().strip()
2230 if answer.lower() in ('y', 'yes'):
2230 if answer.lower() in ('y', 'yes'):
2231 if test.path.endswith(b'.t'):
2231 if test.path.endswith(b'.t'):
2232 rename(test.errpath, test.path)
2232 rename(test.errpath, test.path)
2233 else:
2233 else:
2234 rename(test.errpath, '%s.out' % test.path)
2234 rename(test.errpath, '%s.out' % test.path)
2235 accepted = True
2235 accepted = True
2236 if not accepted:
2236 if not accepted:
2237 self.faildata[test.name] = b''.join(lines)
2237 self.faildata[test.name] = b''.join(lines)
2238
2238
2239 return accepted
2239 return accepted
2240
2240
2241 def startTest(self, test):
2241 def startTest(self, test):
2242 super(TestResult, self).startTest(test)
2242 super(TestResult, self).startTest(test)
2243
2243
2244 # os.times module computes the user time and system time spent by
2244 # os.times module computes the user time and system time spent by
2245 # child's processes along with real elapsed time taken by a process.
2245 # child's processes along with real elapsed time taken by a process.
2246 # This module has one limitation. It can only work for Linux user
2246 # This module has one limitation. It can only work for Linux user
2247 # and not for Windows.
2247 # and not for Windows.
2248 test.started = os.times()
2248 test.started = os.times()
2249 if self._firststarttime is None: # thread racy but irrelevant
2249 if self._firststarttime is None: # thread racy but irrelevant
2250 self._firststarttime = test.started[4]
2250 self._firststarttime = test.started[4]
2251
2251
2252 def stopTest(self, test, interrupted=False):
2252 def stopTest(self, test, interrupted=False):
2253 super(TestResult, self).stopTest(test)
2253 super(TestResult, self).stopTest(test)
2254
2254
2255 test.stopped = os.times()
2255 test.stopped = os.times()
2256
2256
2257 starttime = test.started
2257 starttime = test.started
2258 endtime = test.stopped
2258 endtime = test.stopped
2259 origin = self._firststarttime
2259 origin = self._firststarttime
2260 self.times.append(
2260 self.times.append(
2261 (
2261 (
2262 test.name,
2262 test.name,
2263 endtime[2] - starttime[2], # user space CPU time
2263 endtime[2] - starttime[2], # user space CPU time
2264 endtime[3] - starttime[3], # sys space CPU time
2264 endtime[3] - starttime[3], # sys space CPU time
2265 endtime[4] - starttime[4], # real time
2265 endtime[4] - starttime[4], # real time
2266 starttime[4] - origin, # start date in run context
2266 starttime[4] - origin, # start date in run context
2267 endtime[4] - origin, # end date in run context
2267 endtime[4] - origin, # end date in run context
2268 )
2268 )
2269 )
2269 )
2270
2270
2271 if interrupted:
2271 if interrupted:
2272 with iolock:
2272 with iolock:
2273 self.stream.writeln(
2273 self.stream.writeln(
2274 'INTERRUPTED: %s (after %d seconds)'
2274 'INTERRUPTED: %s (after %d seconds)'
2275 % (test.name, self.times[-1][3])
2275 % (test.name, self.times[-1][3])
2276 )
2276 )
2277
2277
2278
2278
2279 def getTestResult():
2279 def getTestResult():
2280 """
2280 """
2281 Returns the relevant test result
2281 Returns the relevant test result
2282 """
2282 """
2283 if "CUSTOM_TEST_RESULT" in os.environ:
2283 if "CUSTOM_TEST_RESULT" in os.environ:
2284 testresultmodule = __import__(os.environ["CUSTOM_TEST_RESULT"])
2284 testresultmodule = __import__(os.environ["CUSTOM_TEST_RESULT"])
2285 return testresultmodule.TestResult
2285 return testresultmodule.TestResult
2286 else:
2286 else:
2287 return TestResult
2287 return TestResult
2288
2288
2289
2289
2290 class TestSuite(unittest.TestSuite):
2290 class TestSuite(unittest.TestSuite):
2291 """Custom unittest TestSuite that knows how to execute Mercurial tests."""
2291 """Custom unittest TestSuite that knows how to execute Mercurial tests."""
2292
2292
2293 def __init__(
2293 def __init__(
2294 self,
2294 self,
2295 testdir,
2295 testdir,
2296 jobs=1,
2296 jobs=1,
2297 whitelist=None,
2297 whitelist=None,
2298 blacklist=None,
2298 blacklist=None,
2299 retest=False,
2299 retest=False,
2300 keywords=None,
2300 keywords=None,
2301 loop=False,
2301 loop=False,
2302 runs_per_test=1,
2302 runs_per_test=1,
2303 loadtest=None,
2303 loadtest=None,
2304 showchannels=False,
2304 showchannels=False,
2305 *args,
2305 *args,
2306 **kwargs
2306 **kwargs
2307 ):
2307 ):
2308 """Create a new instance that can run tests with a configuration.
2308 """Create a new instance that can run tests with a configuration.
2309
2309
2310 testdir specifies the directory where tests are executed from. This
2310 testdir specifies the directory where tests are executed from. This
2311 is typically the ``tests`` directory from Mercurial's source
2311 is typically the ``tests`` directory from Mercurial's source
2312 repository.
2312 repository.
2313
2313
2314 jobs specifies the number of jobs to run concurrently. Each test
2314 jobs specifies the number of jobs to run concurrently. Each test
2315 executes on its own thread. Tests actually spawn new processes, so
2315 executes on its own thread. Tests actually spawn new processes, so
2316 state mutation should not be an issue.
2316 state mutation should not be an issue.
2317
2317
2318 If there is only one job, it will use the main thread.
2318 If there is only one job, it will use the main thread.
2319
2319
2320 whitelist and blacklist denote tests that have been whitelisted and
2320 whitelist and blacklist denote tests that have been whitelisted and
2321 blacklisted, respectively. These arguments don't belong in TestSuite.
2321 blacklisted, respectively. These arguments don't belong in TestSuite.
2322 Instead, whitelist and blacklist should be handled by the thing that
2322 Instead, whitelist and blacklist should be handled by the thing that
2323 populates the TestSuite with tests. They are present to preserve
2323 populates the TestSuite with tests. They are present to preserve
2324 backwards compatible behavior which reports skipped tests as part
2324 backwards compatible behavior which reports skipped tests as part
2325 of the results.
2325 of the results.
2326
2326
2327 retest denotes whether to retest failed tests. This arguably belongs
2327 retest denotes whether to retest failed tests. This arguably belongs
2328 outside of TestSuite.
2328 outside of TestSuite.
2329
2329
2330 keywords denotes key words that will be used to filter which tests
2330 keywords denotes key words that will be used to filter which tests
2331 to execute. This arguably belongs outside of TestSuite.
2331 to execute. This arguably belongs outside of TestSuite.
2332
2332
2333 loop denotes whether to loop over tests forever.
2333 loop denotes whether to loop over tests forever.
2334 """
2334 """
2335 super(TestSuite, self).__init__(*args, **kwargs)
2335 super(TestSuite, self).__init__(*args, **kwargs)
2336
2336
2337 self._jobs = jobs
2337 self._jobs = jobs
2338 self._whitelist = whitelist
2338 self._whitelist = whitelist
2339 self._blacklist = blacklist
2339 self._blacklist = blacklist
2340 self._retest = retest
2340 self._retest = retest
2341 self._keywords = keywords
2341 self._keywords = keywords
2342 self._loop = loop
2342 self._loop = loop
2343 self._runs_per_test = runs_per_test
2343 self._runs_per_test = runs_per_test
2344 self._loadtest = loadtest
2344 self._loadtest = loadtest
2345 self._showchannels = showchannels
2345 self._showchannels = showchannels
2346
2346
2347 def run(self, result):
2347 def run(self, result):
2348 # We have a number of filters that need to be applied. We do this
2348 # We have a number of filters that need to be applied. We do this
2349 # here instead of inside Test because it makes the running logic for
2349 # here instead of inside Test because it makes the running logic for
2350 # Test simpler.
2350 # Test simpler.
2351 tests = []
2351 tests = []
2352 num_tests = [0]
2352 num_tests = [0]
2353 for test in self._tests:
2353 for test in self._tests:
2354
2354
2355 def get():
2355 def get():
2356 num_tests[0] += 1
2356 num_tests[0] += 1
2357 if getattr(test, 'should_reload', False):
2357 if getattr(test, 'should_reload', False):
2358 return self._loadtest(test, num_tests[0])
2358 return self._loadtest(test, num_tests[0])
2359 return test
2359 return test
2360
2360
2361 if not os.path.exists(test.path):
2361 if not os.path.exists(test.path):
2362 result.addSkip(test, "Doesn't exist")
2362 result.addSkip(test, "Doesn't exist")
2363 continue
2363 continue
2364
2364
2365 if not (self._whitelist and test.bname in self._whitelist):
2365 if not (self._whitelist and test.bname in self._whitelist):
2366 if self._blacklist and test.bname in self._blacklist:
2366 if self._blacklist and test.bname in self._blacklist:
2367 result.addSkip(test, 'blacklisted')
2367 result.addSkip(test, 'blacklisted')
2368 continue
2368 continue
2369
2369
2370 if self._retest and not os.path.exists(test.errpath):
2370 if self._retest and not os.path.exists(test.errpath):
2371 result.addIgnore(test, 'not retesting')
2371 result.addIgnore(test, 'not retesting')
2372 continue
2372 continue
2373
2373
2374 if self._keywords:
2374 if self._keywords:
2375 with open(test.path, 'rb') as f:
2375 with open(test.path, 'rb') as f:
2376 t = f.read().lower() + test.bname.lower()
2376 t = f.read().lower() + test.bname.lower()
2377 ignored = False
2377 ignored = False
2378 for k in self._keywords.lower().split():
2378 for k in self._keywords.lower().split():
2379 if k not in t:
2379 if k not in t:
2380 result.addIgnore(test, "doesn't match keyword")
2380 result.addIgnore(test, "doesn't match keyword")
2381 ignored = True
2381 ignored = True
2382 break
2382 break
2383
2383
2384 if ignored:
2384 if ignored:
2385 continue
2385 continue
2386 for _ in xrange(self._runs_per_test):
2386 for _ in xrange(self._runs_per_test):
2387 tests.append(get())
2387 tests.append(get())
2388
2388
2389 runtests = list(tests)
2389 runtests = list(tests)
2390 done = queue.Queue()
2390 done = queue.Queue()
2391 running = 0
2391 running = 0
2392
2392
2393 channels = [""] * self._jobs
2393 channels = [""] * self._jobs
2394
2394
2395 def job(test, result):
2395 def job(test, result):
2396 for n, v in enumerate(channels):
2396 for n, v in enumerate(channels):
2397 if not v:
2397 if not v:
2398 channel = n
2398 channel = n
2399 break
2399 break
2400 else:
2400 else:
2401 raise ValueError('Could not find output channel')
2401 raise ValueError('Could not find output channel')
2402 channels[channel] = "=" + test.name[5:].split(".")[0]
2402 channels[channel] = "=" + test.name[5:].split(".")[0]
2403 try:
2403 try:
2404 test(result)
2404 test(result)
2405 done.put(None)
2405 done.put(None)
2406 except KeyboardInterrupt:
2406 except KeyboardInterrupt:
2407 pass
2407 pass
2408 except: # re-raises
2408 except: # re-raises
2409 done.put(('!', test, 'run-test raised an error, see traceback'))
2409 done.put(('!', test, 'run-test raised an error, see traceback'))
2410 raise
2410 raise
2411 finally:
2411 finally:
2412 try:
2412 try:
2413 channels[channel] = ''
2413 channels[channel] = ''
2414 except IndexError:
2414 except IndexError:
2415 pass
2415 pass
2416
2416
2417 def stat():
2417 def stat():
2418 count = 0
2418 count = 0
2419 while channels:
2419 while channels:
2420 d = '\n%03s ' % count
2420 d = '\n%03s ' % count
2421 for n, v in enumerate(channels):
2421 for n, v in enumerate(channels):
2422 if v:
2422 if v:
2423 d += v[0]
2423 d += v[0]
2424 channels[n] = v[1:] or '.'
2424 channels[n] = v[1:] or '.'
2425 else:
2425 else:
2426 d += ' '
2426 d += ' '
2427 d += ' '
2427 d += ' '
2428 with iolock:
2428 with iolock:
2429 sys.stdout.write(d + ' ')
2429 sys.stdout.write(d + ' ')
2430 sys.stdout.flush()
2430 sys.stdout.flush()
2431 for x in xrange(10):
2431 for x in xrange(10):
2432 if channels:
2432 if channels:
2433 time.sleep(0.1)
2433 time.sleep(0.1)
2434 count += 1
2434 count += 1
2435
2435
2436 stoppedearly = False
2436 stoppedearly = False
2437
2437
2438 if self._showchannels:
2438 if self._showchannels:
2439 statthread = threading.Thread(target=stat, name="stat")
2439 statthread = threading.Thread(target=stat, name="stat")
2440 statthread.start()
2440 statthread.start()
2441
2441
2442 try:
2442 try:
2443 while tests or running:
2443 while tests or running:
2444 if not done.empty() or running == self._jobs or not tests:
2444 if not done.empty() or running == self._jobs or not tests:
2445 try:
2445 try:
2446 done.get(True, 1)
2446 done.get(True, 1)
2447 running -= 1
2447 running -= 1
2448 if result and result.shouldStop:
2448 if result and result.shouldStop:
2449 stoppedearly = True
2449 stoppedearly = True
2450 break
2450 break
2451 except queue.Empty:
2451 except queue.Empty:
2452 continue
2452 continue
2453 if tests and not running == self._jobs:
2453 if tests and not running == self._jobs:
2454 test = tests.pop(0)
2454 test = tests.pop(0)
2455 if self._loop:
2455 if self._loop:
2456 if getattr(test, 'should_reload', False):
2456 if getattr(test, 'should_reload', False):
2457 num_tests[0] += 1
2457 num_tests[0] += 1
2458 tests.append(self._loadtest(test, num_tests[0]))
2458 tests.append(self._loadtest(test, num_tests[0]))
2459 else:
2459 else:
2460 tests.append(test)
2460 tests.append(test)
2461 if self._jobs == 1:
2461 if self._jobs == 1:
2462 job(test, result)
2462 job(test, result)
2463 else:
2463 else:
2464 t = threading.Thread(
2464 t = threading.Thread(
2465 target=job, name=test.name, args=(test, result)
2465 target=job, name=test.name, args=(test, result)
2466 )
2466 )
2467 t.start()
2467 t.start()
2468 running += 1
2468 running += 1
2469
2469
2470 # If we stop early we still need to wait on started tests to
2470 # If we stop early we still need to wait on started tests to
2471 # finish. Otherwise, there is a race between the test completing
2471 # finish. Otherwise, there is a race between the test completing
2472 # and the test's cleanup code running. This could result in the
2472 # and the test's cleanup code running. This could result in the
2473 # test reporting incorrect.
2473 # test reporting incorrect.
2474 if stoppedearly:
2474 if stoppedearly:
2475 while running:
2475 while running:
2476 try:
2476 try:
2477 done.get(True, 1)
2477 done.get(True, 1)
2478 running -= 1
2478 running -= 1
2479 except queue.Empty:
2479 except queue.Empty:
2480 continue
2480 continue
2481 except KeyboardInterrupt:
2481 except KeyboardInterrupt:
2482 for test in runtests:
2482 for test in runtests:
2483 test.abort()
2483 test.abort()
2484
2484
2485 channels = []
2485 channels = []
2486
2486
2487 return result
2487 return result
2488
2488
2489
2489
2490 # Save the most recent 5 wall-clock runtimes of each test to a
2490 # Save the most recent 5 wall-clock runtimes of each test to a
2491 # human-readable text file named .testtimes. Tests are sorted
2491 # human-readable text file named .testtimes. Tests are sorted
2492 # alphabetically, while times for each test are listed from oldest to
2492 # alphabetically, while times for each test are listed from oldest to
2493 # newest.
2493 # newest.
2494
2494
2495
2495
2496 def loadtimes(outputdir):
2496 def loadtimes(outputdir):
2497 times = []
2497 times = []
2498 try:
2498 try:
2499 with open(os.path.join(outputdir, b'.testtimes')) as fp:
2499 with open(os.path.join(outputdir, b'.testtimes')) as fp:
2500 for line in fp:
2500 for line in fp:
2501 m = re.match('(.*?) ([0-9. ]+)', line)
2501 m = re.match('(.*?) ([0-9. ]+)', line)
2502 times.append(
2502 times.append(
2503 (m.group(1), [float(t) for t in m.group(2).split()])
2503 (m.group(1), [float(t) for t in m.group(2).split()])
2504 )
2504 )
2505 except IOError as err:
2505 except IOError as err:
2506 if err.errno != errno.ENOENT:
2506 if err.errno != errno.ENOENT:
2507 raise
2507 raise
2508 return times
2508 return times
2509
2509
2510
2510
2511 def savetimes(outputdir, result):
2511 def savetimes(outputdir, result):
2512 saved = dict(loadtimes(outputdir))
2512 saved = dict(loadtimes(outputdir))
2513 maxruns = 5
2513 maxruns = 5
2514 skipped = set([str(t[0]) for t in result.skipped])
2514 skipped = set([str(t[0]) for t in result.skipped])
2515 for tdata in result.times:
2515 for tdata in result.times:
2516 test, real = tdata[0], tdata[3]
2516 test, real = tdata[0], tdata[3]
2517 if test not in skipped:
2517 if test not in skipped:
2518 ts = saved.setdefault(test, [])
2518 ts = saved.setdefault(test, [])
2519 ts.append(real)
2519 ts.append(real)
2520 ts[:] = ts[-maxruns:]
2520 ts[:] = ts[-maxruns:]
2521
2521
2522 fd, tmpname = tempfile.mkstemp(
2522 fd, tmpname = tempfile.mkstemp(
2523 prefix=b'.testtimes', dir=outputdir, text=True
2523 prefix=b'.testtimes', dir=outputdir, text=True
2524 )
2524 )
2525 with os.fdopen(fd, 'w') as fp:
2525 with os.fdopen(fd, 'w') as fp:
2526 for name, ts in sorted(saved.items()):
2526 for name, ts in sorted(saved.items()):
2527 fp.write('%s %s\n' % (name, ' '.join(['%.3f' % (t,) for t in ts])))
2527 fp.write('%s %s\n' % (name, ' '.join(['%.3f' % (t,) for t in ts])))
2528 timepath = os.path.join(outputdir, b'.testtimes')
2528 timepath = os.path.join(outputdir, b'.testtimes')
2529 try:
2529 try:
2530 os.unlink(timepath)
2530 os.unlink(timepath)
2531 except OSError:
2531 except OSError:
2532 pass
2532 pass
2533 try:
2533 try:
2534 os.rename(tmpname, timepath)
2534 os.rename(tmpname, timepath)
2535 except OSError:
2535 except OSError:
2536 pass
2536 pass
2537
2537
2538
2538
2539 class TextTestRunner(unittest.TextTestRunner):
2539 class TextTestRunner(unittest.TextTestRunner):
2540 """Custom unittest test runner that uses appropriate settings."""
2540 """Custom unittest test runner that uses appropriate settings."""
2541
2541
2542 def __init__(self, runner, *args, **kwargs):
2542 def __init__(self, runner, *args, **kwargs):
2543 super(TextTestRunner, self).__init__(*args, **kwargs)
2543 super(TextTestRunner, self).__init__(*args, **kwargs)
2544
2544
2545 self._runner = runner
2545 self._runner = runner
2546
2546
2547 self._result = getTestResult()(
2547 self._result = getTestResult()(
2548 self._runner.options, self.stream, self.descriptions, self.verbosity
2548 self._runner.options, self.stream, self.descriptions, self.verbosity
2549 )
2549 )
2550
2550
2551 def listtests(self, test):
2551 def listtests(self, test):
2552 test = sorted(test, key=lambda t: t.name)
2552 test = sorted(test, key=lambda t: t.name)
2553
2553
2554 self._result.onStart(test)
2554 self._result.onStart(test)
2555
2555
2556 for t in test:
2556 for t in test:
2557 print(t.name)
2557 print(t.name)
2558 self._result.addSuccess(t)
2558 self._result.addSuccess(t)
2559
2559
2560 if self._runner.options.xunit:
2560 if self._runner.options.xunit:
2561 with open(self._runner.options.xunit, "wb") as xuf:
2561 with open(self._runner.options.xunit, "wb") as xuf:
2562 self._writexunit(self._result, xuf)
2562 self._writexunit(self._result, xuf)
2563
2563
2564 if self._runner.options.json:
2564 if self._runner.options.json:
2565 jsonpath = os.path.join(self._runner._outputdir, b'report.json')
2565 jsonpath = os.path.join(self._runner._outputdir, b'report.json')
2566 with open(jsonpath, 'w') as fp:
2566 with open(jsonpath, 'w') as fp:
2567 self._writejson(self._result, fp)
2567 self._writejson(self._result, fp)
2568
2568
2569 return self._result
2569 return self._result
2570
2570
2571 def run(self, test):
2571 def run(self, test):
2572 self._result.onStart(test)
2572 self._result.onStart(test)
2573 test(self._result)
2573 test(self._result)
2574
2574
2575 failed = len(self._result.failures)
2575 failed = len(self._result.failures)
2576 skipped = len(self._result.skipped)
2576 skipped = len(self._result.skipped)
2577 ignored = len(self._result.ignored)
2577 ignored = len(self._result.ignored)
2578
2578
2579 with iolock:
2579 with iolock:
2580 self.stream.writeln('')
2580 self.stream.writeln('')
2581
2581
2582 if not self._runner.options.noskips:
2582 if not self._runner.options.noskips:
2583 for test, msg in sorted(
2583 for test, msg in sorted(
2584 self._result.skipped, key=lambda s: s[0].name
2584 self._result.skipped, key=lambda s: s[0].name
2585 ):
2585 ):
2586 formatted = 'Skipped %s: %s\n' % (test.name, msg)
2586 formatted = 'Skipped %s: %s\n' % (test.name, msg)
2587 msg = highlightmsg(formatted, self._result.color)
2587 msg = highlightmsg(formatted, self._result.color)
2588 self.stream.write(msg)
2588 self.stream.write(msg)
2589 for test, msg in sorted(
2589 for test, msg in sorted(
2590 self._result.failures, key=lambda f: f[0].name
2590 self._result.failures, key=lambda f: f[0].name
2591 ):
2591 ):
2592 formatted = 'Failed %s: %s\n' % (test.name, msg)
2592 formatted = 'Failed %s: %s\n' % (test.name, msg)
2593 self.stream.write(highlightmsg(formatted, self._result.color))
2593 self.stream.write(highlightmsg(formatted, self._result.color))
2594 for test, msg in sorted(
2594 for test, msg in sorted(
2595 self._result.errors, key=lambda e: e[0].name
2595 self._result.errors, key=lambda e: e[0].name
2596 ):
2596 ):
2597 self.stream.writeln('Errored %s: %s' % (test.name, msg))
2597 self.stream.writeln('Errored %s: %s' % (test.name, msg))
2598
2598
2599 if self._runner.options.xunit:
2599 if self._runner.options.xunit:
2600 with open(self._runner.options.xunit, "wb") as xuf:
2600 with open(self._runner.options.xunit, "wb") as xuf:
2601 self._writexunit(self._result, xuf)
2601 self._writexunit(self._result, xuf)
2602
2602
2603 if self._runner.options.json:
2603 if self._runner.options.json:
2604 jsonpath = os.path.join(self._runner._outputdir, b'report.json')
2604 jsonpath = os.path.join(self._runner._outputdir, b'report.json')
2605 with open(jsonpath, 'w') as fp:
2605 with open(jsonpath, 'w') as fp:
2606 self._writejson(self._result, fp)
2606 self._writejson(self._result, fp)
2607
2607
2608 self._runner._checkhglib('Tested')
2608 self._runner._checkhglib('Tested')
2609
2609
2610 savetimes(self._runner._outputdir, self._result)
2610 savetimes(self._runner._outputdir, self._result)
2611
2611
2612 if failed and self._runner.options.known_good_rev:
2612 if failed and self._runner.options.known_good_rev:
2613 self._bisecttests(t for t, m in self._result.failures)
2613 self._bisecttests(t for t, m in self._result.failures)
2614 self.stream.writeln(
2614 self.stream.writeln(
2615 '# Ran %d tests, %d skipped, %d failed.'
2615 '# Ran %d tests, %d skipped, %d failed.'
2616 % (self._result.testsRun, skipped + ignored, failed)
2616 % (self._result.testsRun, skipped + ignored, failed)
2617 )
2617 )
2618 if failed:
2618 if failed:
2619 self.stream.writeln(
2619 self.stream.writeln(
2620 'python hash seed: %s' % os.environ['PYTHONHASHSEED']
2620 'python hash seed: %s' % os.environ['PYTHONHASHSEED']
2621 )
2621 )
2622 if self._runner.options.time:
2622 if self._runner.options.time:
2623 self.printtimes(self._result.times)
2623 self.printtimes(self._result.times)
2624
2624
2625 if self._runner.options.exceptions:
2625 if self._runner.options.exceptions:
2626 exceptions = aggregateexceptions(
2626 exceptions = aggregateexceptions(
2627 os.path.join(self._runner._outputdir, b'exceptions')
2627 os.path.join(self._runner._outputdir, b'exceptions')
2628 )
2628 )
2629
2629
2630 self.stream.writeln('Exceptions Report:')
2630 self.stream.writeln('Exceptions Report:')
2631 self.stream.writeln(
2631 self.stream.writeln(
2632 '%d total from %d frames'
2632 '%d total from %d frames'
2633 % (exceptions['total'], len(exceptions['exceptioncounts']))
2633 % (exceptions['total'], len(exceptions['exceptioncounts']))
2634 )
2634 )
2635 combined = exceptions['combined']
2635 combined = exceptions['combined']
2636 for key in sorted(combined, key=combined.get, reverse=True):
2636 for key in sorted(combined, key=combined.get, reverse=True):
2637 frame, line, exc = key
2637 frame, line, exc = key
2638 totalcount, testcount, leastcount, leasttest = combined[key]
2638 totalcount, testcount, leastcount, leasttest = combined[key]
2639
2639
2640 self.stream.writeln(
2640 self.stream.writeln(
2641 '%d (%d tests)\t%s: %s (%s - %d total)'
2641 '%d (%d tests)\t%s: %s (%s - %d total)'
2642 % (
2642 % (
2643 totalcount,
2643 totalcount,
2644 testcount,
2644 testcount,
2645 frame,
2645 frame,
2646 exc,
2646 exc,
2647 leasttest,
2647 leasttest,
2648 leastcount,
2648 leastcount,
2649 )
2649 )
2650 )
2650 )
2651
2651
2652 self.stream.flush()
2652 self.stream.flush()
2653
2653
2654 return self._result
2654 return self._result
2655
2655
2656 def _bisecttests(self, tests):
2656 def _bisecttests(self, tests):
2657 bisectcmd = ['hg', 'bisect']
2657 bisectcmd = ['hg', 'bisect']
2658 bisectrepo = self._runner.options.bisect_repo
2658 bisectrepo = self._runner.options.bisect_repo
2659 if bisectrepo:
2659 if bisectrepo:
2660 bisectcmd.extend(['-R', os.path.abspath(bisectrepo)])
2660 bisectcmd.extend(['-R', os.path.abspath(bisectrepo)])
2661
2661
2662 def pread(args):
2662 def pread(args):
2663 env = os.environ.copy()
2663 env = os.environ.copy()
2664 env['HGPLAIN'] = '1'
2664 env['HGPLAIN'] = '1'
2665 p = subprocess.Popen(
2665 p = subprocess.Popen(
2666 args, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=env
2666 args, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=env
2667 )
2667 )
2668 data = p.stdout.read()
2668 data = p.stdout.read()
2669 p.wait()
2669 p.wait()
2670 return data
2670 return data
2671
2671
2672 for test in tests:
2672 for test in tests:
2673 pread(bisectcmd + ['--reset']),
2673 pread(bisectcmd + ['--reset']),
2674 pread(bisectcmd + ['--bad', '.'])
2674 pread(bisectcmd + ['--bad', '.'])
2675 pread(bisectcmd + ['--good', self._runner.options.known_good_rev])
2675 pread(bisectcmd + ['--good', self._runner.options.known_good_rev])
2676 # TODO: we probably need to forward more options
2676 # TODO: we probably need to forward more options
2677 # that alter hg's behavior inside the tests.
2677 # that alter hg's behavior inside the tests.
2678 opts = ''
2678 opts = ''
2679 withhg = self._runner.options.with_hg
2679 withhg = self._runner.options.with_hg
2680 if withhg:
2680 if withhg:
2681 opts += ' --with-hg=%s ' % shellquote(_strpath(withhg))
2681 opts += ' --with-hg=%s ' % shellquote(_strpath(withhg))
2682 rtc = '%s %s %s %s' % (sysexecutable, sys.argv[0], opts, test)
2682 rtc = '%s %s %s %s' % (sysexecutable, sys.argv[0], opts, test)
2683 data = pread(bisectcmd + ['--command', rtc])
2683 data = pread(bisectcmd + ['--command', rtc])
2684 m = re.search(
2684 m = re.search(
2685 (
2685 (
2686 br'\nThe first (?P<goodbad>bad|good) revision '
2686 br'\nThe first (?P<goodbad>bad|good) revision '
2687 br'is:\nchangeset: +\d+:(?P<node>[a-f0-9]+)\n.*\n'
2687 br'is:\nchangeset: +\d+:(?P<node>[a-f0-9]+)\n.*\n'
2688 br'summary: +(?P<summary>[^\n]+)\n'
2688 br'summary: +(?P<summary>[^\n]+)\n'
2689 ),
2689 ),
2690 data,
2690 data,
2691 (re.MULTILINE | re.DOTALL),
2691 (re.MULTILINE | re.DOTALL),
2692 )
2692 )
2693 if m is None:
2693 if m is None:
2694 self.stream.writeln(
2694 self.stream.writeln(
2695 'Failed to identify failure point for %s' % test
2695 'Failed to identify failure point for %s' % test
2696 )
2696 )
2697 continue
2697 continue
2698 dat = m.groupdict()
2698 dat = m.groupdict()
2699 verb = 'broken' if dat['goodbad'] == b'bad' else 'fixed'
2699 verb = 'broken' if dat['goodbad'] == b'bad' else 'fixed'
2700 self.stream.writeln(
2700 self.stream.writeln(
2701 '%s %s by %s (%s)'
2701 '%s %s by %s (%s)'
2702 % (
2702 % (
2703 test,
2703 test,
2704 verb,
2704 verb,
2705 dat['node'].decode('ascii'),
2705 dat['node'].decode('ascii'),
2706 dat['summary'].decode('utf8', 'ignore'),
2706 dat['summary'].decode('utf8', 'ignore'),
2707 )
2707 )
2708 )
2708 )
2709
2709
2710 def printtimes(self, times):
2710 def printtimes(self, times):
2711 # iolock held by run
2711 # iolock held by run
2712 self.stream.writeln('# Producing time report')
2712 self.stream.writeln('# Producing time report')
2713 times.sort(key=lambda t: (t[3]))
2713 times.sort(key=lambda t: (t[3]))
2714 cols = '%7.3f %7.3f %7.3f %7.3f %7.3f %s'
2714 cols = '%7.3f %7.3f %7.3f %7.3f %7.3f %s'
2715 self.stream.writeln(
2715 self.stream.writeln(
2716 '%-7s %-7s %-7s %-7s %-7s %s'
2716 '%-7s %-7s %-7s %-7s %-7s %s'
2717 % ('start', 'end', 'cuser', 'csys', 'real', 'Test')
2717 % ('start', 'end', 'cuser', 'csys', 'real', 'Test')
2718 )
2718 )
2719 for tdata in times:
2719 for tdata in times:
2720 test = tdata[0]
2720 test = tdata[0]
2721 cuser, csys, real, start, end = tdata[1:6]
2721 cuser, csys, real, start, end = tdata[1:6]
2722 self.stream.writeln(cols % (start, end, cuser, csys, real, test))
2722 self.stream.writeln(cols % (start, end, cuser, csys, real, test))
2723
2723
2724 @staticmethod
2724 @staticmethod
2725 def _writexunit(result, outf):
2725 def _writexunit(result, outf):
2726 # See http://llg.cubic.org/docs/junit/ for a reference.
2726 # See http://llg.cubic.org/docs/junit/ for a reference.
2727 timesd = dict((t[0], t[3]) for t in result.times)
2727 timesd = dict((t[0], t[3]) for t in result.times)
2728 doc = minidom.Document()
2728 doc = minidom.Document()
2729 s = doc.createElement('testsuite')
2729 s = doc.createElement('testsuite')
2730 s.setAttribute('errors', "0") # TODO
2730 s.setAttribute('errors', "0") # TODO
2731 s.setAttribute('failures', str(len(result.failures)))
2731 s.setAttribute('failures', str(len(result.failures)))
2732 s.setAttribute('name', 'run-tests')
2732 s.setAttribute('name', 'run-tests')
2733 s.setAttribute(
2733 s.setAttribute(
2734 'skipped', str(len(result.skipped) + len(result.ignored))
2734 'skipped', str(len(result.skipped) + len(result.ignored))
2735 )
2735 )
2736 s.setAttribute('tests', str(result.testsRun))
2736 s.setAttribute('tests', str(result.testsRun))
2737 doc.appendChild(s)
2737 doc.appendChild(s)
2738 for tc in result.successes:
2738 for tc in result.successes:
2739 t = doc.createElement('testcase')
2739 t = doc.createElement('testcase')
2740 t.setAttribute('name', tc.name)
2740 t.setAttribute('name', tc.name)
2741 tctime = timesd.get(tc.name)
2741 tctime = timesd.get(tc.name)
2742 if tctime is not None:
2742 if tctime is not None:
2743 t.setAttribute('time', '%.3f' % tctime)
2743 t.setAttribute('time', '%.3f' % tctime)
2744 s.appendChild(t)
2744 s.appendChild(t)
2745 for tc, err in sorted(result.faildata.items()):
2745 for tc, err in sorted(result.faildata.items()):
2746 t = doc.createElement('testcase')
2746 t = doc.createElement('testcase')
2747 t.setAttribute('name', tc)
2747 t.setAttribute('name', tc)
2748 tctime = timesd.get(tc)
2748 tctime = timesd.get(tc)
2749 if tctime is not None:
2749 if tctime is not None:
2750 t.setAttribute('time', '%.3f' % tctime)
2750 t.setAttribute('time', '%.3f' % tctime)
2751 # createCDATASection expects a unicode or it will
2751 # createCDATASection expects a unicode or it will
2752 # convert using default conversion rules, which will
2752 # convert using default conversion rules, which will
2753 # fail if string isn't ASCII.
2753 # fail if string isn't ASCII.
2754 err = cdatasafe(err).decode('utf-8', 'replace')
2754 err = cdatasafe(err).decode('utf-8', 'replace')
2755 cd = doc.createCDATASection(err)
2755 cd = doc.createCDATASection(err)
2756 # Use 'failure' here instead of 'error' to match errors = 0,
2756 # Use 'failure' here instead of 'error' to match errors = 0,
2757 # failures = len(result.failures) in the testsuite element.
2757 # failures = len(result.failures) in the testsuite element.
2758 failelem = doc.createElement('failure')
2758 failelem = doc.createElement('failure')
2759 failelem.setAttribute('message', 'output changed')
2759 failelem.setAttribute('message', 'output changed')
2760 failelem.setAttribute('type', 'output-mismatch')
2760 failelem.setAttribute('type', 'output-mismatch')
2761 failelem.appendChild(cd)
2761 failelem.appendChild(cd)
2762 t.appendChild(failelem)
2762 t.appendChild(failelem)
2763 s.appendChild(t)
2763 s.appendChild(t)
2764 for tc, message in result.skipped:
2764 for tc, message in result.skipped:
2765 # According to the schema, 'skipped' has no attributes. So store
2765 # According to the schema, 'skipped' has no attributes. So store
2766 # the skip message as a text node instead.
2766 # the skip message as a text node instead.
2767 t = doc.createElement('testcase')
2767 t = doc.createElement('testcase')
2768 t.setAttribute('name', tc.name)
2768 t.setAttribute('name', tc.name)
2769 binmessage = message.encode('utf-8')
2769 binmessage = message.encode('utf-8')
2770 message = cdatasafe(binmessage).decode('utf-8', 'replace')
2770 message = cdatasafe(binmessage).decode('utf-8', 'replace')
2771 cd = doc.createCDATASection(message)
2771 cd = doc.createCDATASection(message)
2772 skipelem = doc.createElement('skipped')
2772 skipelem = doc.createElement('skipped')
2773 skipelem.appendChild(cd)
2773 skipelem.appendChild(cd)
2774 t.appendChild(skipelem)
2774 t.appendChild(skipelem)
2775 s.appendChild(t)
2775 s.appendChild(t)
2776 outf.write(doc.toprettyxml(indent=' ', encoding='utf-8'))
2776 outf.write(doc.toprettyxml(indent=' ', encoding='utf-8'))
2777
2777
2778 @staticmethod
2778 @staticmethod
2779 def _writejson(result, outf):
2779 def _writejson(result, outf):
2780 timesd = {}
2780 timesd = {}
2781 for tdata in result.times:
2781 for tdata in result.times:
2782 test = tdata[0]
2782 test = tdata[0]
2783 timesd[test] = tdata[1:]
2783 timesd[test] = tdata[1:]
2784
2784
2785 outcome = {}
2785 outcome = {}
2786 groups = [
2786 groups = [
2787 ('success', ((tc, None) for tc in result.successes)),
2787 ('success', ((tc, None) for tc in result.successes)),
2788 ('failure', result.failures),
2788 ('failure', result.failures),
2789 ('skip', result.skipped),
2789 ('skip', result.skipped),
2790 ]
2790 ]
2791 for res, testcases in groups:
2791 for res, testcases in groups:
2792 for tc, __ in testcases:
2792 for tc, __ in testcases:
2793 if tc.name in timesd:
2793 if tc.name in timesd:
2794 diff = result.faildata.get(tc.name, b'')
2794 diff = result.faildata.get(tc.name, b'')
2795 try:
2795 try:
2796 diff = diff.decode('unicode_escape')
2796 diff = diff.decode('unicode_escape')
2797 except UnicodeDecodeError as e:
2797 except UnicodeDecodeError as e:
2798 diff = '%r decoding diff, sorry' % e
2798 diff = '%r decoding diff, sorry' % e
2799 tres = {
2799 tres = {
2800 'result': res,
2800 'result': res,
2801 'time': ('%0.3f' % timesd[tc.name][2]),
2801 'time': ('%0.3f' % timesd[tc.name][2]),
2802 'cuser': ('%0.3f' % timesd[tc.name][0]),
2802 'cuser': ('%0.3f' % timesd[tc.name][0]),
2803 'csys': ('%0.3f' % timesd[tc.name][1]),
2803 'csys': ('%0.3f' % timesd[tc.name][1]),
2804 'start': ('%0.3f' % timesd[tc.name][3]),
2804 'start': ('%0.3f' % timesd[tc.name][3]),
2805 'end': ('%0.3f' % timesd[tc.name][4]),
2805 'end': ('%0.3f' % timesd[tc.name][4]),
2806 'diff': diff,
2806 'diff': diff,
2807 }
2807 }
2808 else:
2808 else:
2809 # blacklisted test
2809 # blacklisted test
2810 tres = {'result': res}
2810 tres = {'result': res}
2811
2811
2812 outcome[tc.name] = tres
2812 outcome[tc.name] = tres
2813 jsonout = json.dumps(
2813 jsonout = json.dumps(
2814 outcome, sort_keys=True, indent=4, separators=(',', ': ')
2814 outcome, sort_keys=True, indent=4, separators=(',', ': ')
2815 )
2815 )
2816 outf.writelines(("testreport =", jsonout))
2816 outf.writelines(("testreport =", jsonout))
2817
2817
2818
2818
2819 def sorttests(testdescs, previoustimes, shuffle=False):
2819 def sorttests(testdescs, previoustimes, shuffle=False):
2820 """Do an in-place sort of tests."""
2820 """Do an in-place sort of tests."""
2821 if shuffle:
2821 if shuffle:
2822 random.shuffle(testdescs)
2822 random.shuffle(testdescs)
2823 return
2823 return
2824
2824
2825 if previoustimes:
2825 if previoustimes:
2826
2826
2827 def sortkey(f):
2827 def sortkey(f):
2828 f = f['path']
2828 f = f['path']
2829 if f in previoustimes:
2829 if f in previoustimes:
2830 # Use most recent time as estimate
2830 # Use most recent time as estimate
2831 return -(previoustimes[f][-1])
2831 return -(previoustimes[f][-1])
2832 else:
2832 else:
2833 # Default to a rather arbitrary value of 1 second for new tests
2833 # Default to a rather arbitrary value of 1 second for new tests
2834 return -1.0
2834 return -1.0
2835
2835
2836 else:
2836 else:
2837 # keywords for slow tests
2837 # keywords for slow tests
2838 slow = {
2838 slow = {
2839 b'svn': 10,
2839 b'svn': 10,
2840 b'cvs': 10,
2840 b'cvs': 10,
2841 b'hghave': 10,
2841 b'hghave': 10,
2842 b'largefiles-update': 10,
2842 b'largefiles-update': 10,
2843 b'run-tests': 10,
2843 b'run-tests': 10,
2844 b'corruption': 10,
2844 b'corruption': 10,
2845 b'race': 10,
2845 b'race': 10,
2846 b'i18n': 10,
2846 b'i18n': 10,
2847 b'check': 100,
2847 b'check': 100,
2848 b'gendoc': 100,
2848 b'gendoc': 100,
2849 b'contrib-perf': 200,
2849 b'contrib-perf': 200,
2850 b'merge-combination': 100,
2850 b'merge-combination': 100,
2851 }
2851 }
2852 perf = {}
2852 perf = {}
2853
2853
2854 def sortkey(f):
2854 def sortkey(f):
2855 # run largest tests first, as they tend to take the longest
2855 # run largest tests first, as they tend to take the longest
2856 f = f['path']
2856 f = f['path']
2857 try:
2857 try:
2858 return perf[f]
2858 return perf[f]
2859 except KeyError:
2859 except KeyError:
2860 try:
2860 try:
2861 val = -os.stat(f).st_size
2861 val = -os.stat(f).st_size
2862 except OSError as e:
2862 except OSError as e:
2863 if e.errno != errno.ENOENT:
2863 if e.errno != errno.ENOENT:
2864 raise
2864 raise
2865 perf[f] = -1e9 # file does not exist, tell early
2865 perf[f] = -1e9 # file does not exist, tell early
2866 return -1e9
2866 return -1e9
2867 for kw, mul in slow.items():
2867 for kw, mul in slow.items():
2868 if kw in f:
2868 if kw in f:
2869 val *= mul
2869 val *= mul
2870 if f.endswith(b'.py'):
2870 if f.endswith(b'.py'):
2871 val /= 10.0
2871 val /= 10.0
2872 perf[f] = val / 1000.0
2872 perf[f] = val / 1000.0
2873 return perf[f]
2873 return perf[f]
2874
2874
2875 testdescs.sort(key=sortkey)
2875 testdescs.sort(key=sortkey)
2876
2876
2877
2877
2878 class TestRunner(object):
2878 class TestRunner(object):
2879 """Holds context for executing tests.
2879 """Holds context for executing tests.
2880
2880
2881 Tests rely on a lot of state. This object holds it for them.
2881 Tests rely on a lot of state. This object holds it for them.
2882 """
2882 """
2883
2883
2884 # Programs required to run tests.
2884 # Programs required to run tests.
2885 REQUIREDTOOLS = [
2885 REQUIREDTOOLS = [
2886 b'diff',
2886 b'diff',
2887 b'grep',
2887 b'grep',
2888 b'unzip',
2888 b'unzip',
2889 b'gunzip',
2889 b'gunzip',
2890 b'bunzip2',
2890 b'bunzip2',
2891 b'sed',
2891 b'sed',
2892 ]
2892 ]
2893
2893
2894 # Maps file extensions to test class.
2894 # Maps file extensions to test class.
2895 TESTTYPES = [
2895 TESTTYPES = [
2896 (b'.py', PythonTest),
2896 (b'.py', PythonTest),
2897 (b'.t', TTest),
2897 (b'.t', TTest),
2898 ]
2898 ]
2899
2899
2900 def __init__(self):
2900 def __init__(self):
2901 self.options = None
2901 self.options = None
2902 self._hgroot = None
2902 self._hgroot = None
2903 self._testdir = None
2903 self._testdir = None
2904 self._outputdir = None
2904 self._outputdir = None
2905 self._hgtmp = None
2905 self._hgtmp = None
2906 self._installdir = None
2906 self._installdir = None
2907 self._bindir = None
2907 self._bindir = None
2908 self._tmpbinddir = None
2908 self._tmpbinddir = None
2909 self._pythondir = None
2909 self._pythondir = None
2910 self._coveragefile = None
2910 self._coveragefile = None
2911 self._createdfiles = []
2911 self._createdfiles = []
2912 self._hgcommand = None
2912 self._hgcommand = None
2913 self._hgpath = None
2913 self._hgpath = None
2914 self._portoffset = 0
2914 self._portoffset = 0
2915 self._ports = {}
2915 self._ports = {}
2916
2916
2917 def run(self, args, parser=None):
2917 def run(self, args, parser=None):
2918 """Run the test suite."""
2918 """Run the test suite."""
2919 oldmask = os.umask(0o22)
2919 oldmask = os.umask(0o22)
2920 try:
2920 try:
2921 parser = parser or getparser()
2921 parser = parser or getparser()
2922 options = parseargs(args, parser)
2922 options = parseargs(args, parser)
2923 tests = [_bytespath(a) for a in options.tests]
2923 tests = [_bytespath(a) for a in options.tests]
2924 if options.test_list is not None:
2924 if options.test_list is not None:
2925 for listfile in options.test_list:
2925 for listfile in options.test_list:
2926 with open(listfile, 'rb') as f:
2926 with open(listfile, 'rb') as f:
2927 tests.extend(t for t in f.read().splitlines() if t)
2927 tests.extend(t for t in f.read().splitlines() if t)
2928 self.options = options
2928 self.options = options
2929
2929
2930 self._checktools()
2930 self._checktools()
2931 testdescs = self.findtests(tests)
2931 testdescs = self.findtests(tests)
2932 if options.profile_runner:
2932 if options.profile_runner:
2933 import statprof
2933 import statprof
2934
2934
2935 statprof.start()
2935 statprof.start()
2936 result = self._run(testdescs)
2936 result = self._run(testdescs)
2937 if options.profile_runner:
2937 if options.profile_runner:
2938 statprof.stop()
2938 statprof.stop()
2939 statprof.display()
2939 statprof.display()
2940 return result
2940 return result
2941
2941
2942 finally:
2942 finally:
2943 os.umask(oldmask)
2943 os.umask(oldmask)
2944
2944
2945 def _run(self, testdescs):
2945 def _run(self, testdescs):
2946 testdir = getcwdb()
2946 testdir = getcwdb()
2947 self._testdir = osenvironb[b'TESTDIR'] = getcwdb()
2947 self._testdir = osenvironb[b'TESTDIR'] = getcwdb()
2948 # assume all tests in same folder for now
2948 # assume all tests in same folder for now
2949 if testdescs:
2949 if testdescs:
2950 pathname = os.path.dirname(testdescs[0]['path'])
2950 pathname = os.path.dirname(testdescs[0]['path'])
2951 if pathname:
2951 if pathname:
2952 testdir = os.path.join(testdir, pathname)
2952 testdir = os.path.join(testdir, pathname)
2953 self._testdir = osenvironb[b'TESTDIR'] = testdir
2953 self._testdir = osenvironb[b'TESTDIR'] = testdir
2954 if self.options.outputdir:
2954 if self.options.outputdir:
2955 self._outputdir = canonpath(_bytespath(self.options.outputdir))
2955 self._outputdir = canonpath(_bytespath(self.options.outputdir))
2956 else:
2956 else:
2957 self._outputdir = getcwdb()
2957 self._outputdir = getcwdb()
2958 if testdescs and pathname:
2958 if testdescs and pathname:
2959 self._outputdir = os.path.join(self._outputdir, pathname)
2959 self._outputdir = os.path.join(self._outputdir, pathname)
2960 previoustimes = {}
2960 previoustimes = {}
2961 if self.options.order_by_runtime:
2961 if self.options.order_by_runtime:
2962 previoustimes = dict(loadtimes(self._outputdir))
2962 previoustimes = dict(loadtimes(self._outputdir))
2963 sorttests(testdescs, previoustimes, shuffle=self.options.random)
2963 sorttests(testdescs, previoustimes, shuffle=self.options.random)
2964
2964
2965 if 'PYTHONHASHSEED' not in os.environ:
2965 if 'PYTHONHASHSEED' not in os.environ:
2966 # use a random python hash seed all the time
2966 # use a random python hash seed all the time
2967 # we do the randomness ourself to know what seed is used
2967 # we do the randomness ourself to know what seed is used
2968 os.environ['PYTHONHASHSEED'] = str(random.getrandbits(32))
2968 os.environ['PYTHONHASHSEED'] = str(random.getrandbits(32))
2969
2969
2970 if self.options.tmpdir:
2970 if self.options.tmpdir:
2971 self.options.keep_tmpdir = True
2971 self.options.keep_tmpdir = True
2972 tmpdir = _bytespath(self.options.tmpdir)
2972 tmpdir = _bytespath(self.options.tmpdir)
2973 if os.path.exists(tmpdir):
2973 if os.path.exists(tmpdir):
2974 # Meaning of tmpdir has changed since 1.3: we used to create
2974 # Meaning of tmpdir has changed since 1.3: we used to create
2975 # HGTMP inside tmpdir; now HGTMP is tmpdir. So fail if
2975 # HGTMP inside tmpdir; now HGTMP is tmpdir. So fail if
2976 # tmpdir already exists.
2976 # tmpdir already exists.
2977 print("error: temp dir %r already exists" % tmpdir)
2977 print("error: temp dir %r already exists" % tmpdir)
2978 return 1
2978 return 1
2979
2979
2980 os.makedirs(tmpdir)
2980 os.makedirs(tmpdir)
2981 else:
2981 else:
2982 d = None
2982 d = None
2983 if os.name == 'nt':
2983 if os.name == 'nt':
2984 # without this, we get the default temp dir location, but
2984 # without this, we get the default temp dir location, but
2985 # in all lowercase, which causes troubles with paths (issue3490)
2985 # in all lowercase, which causes troubles with paths (issue3490)
2986 d = osenvironb.get(b'TMP', None)
2986 d = osenvironb.get(b'TMP', None)
2987 tmpdir = tempfile.mkdtemp(b'', b'hgtests.', d)
2987 tmpdir = tempfile.mkdtemp(b'', b'hgtests.', d)
2988
2988
2989 self._hgtmp = osenvironb[b'HGTMP'] = os.path.realpath(tmpdir)
2989 self._hgtmp = osenvironb[b'HGTMP'] = os.path.realpath(tmpdir)
2990
2990
2991 if self.options.with_hg:
2991 if self.options.with_hg:
2992 self._installdir = None
2992 self._installdir = None
2993 whg = self.options.with_hg
2993 whg = self.options.with_hg
2994 self._bindir = os.path.dirname(os.path.realpath(whg))
2994 self._bindir = os.path.dirname(os.path.realpath(whg))
2995 assert isinstance(self._bindir, bytes)
2995 assert isinstance(self._bindir, bytes)
2996 self._hgcommand = os.path.basename(whg)
2996 self._hgcommand = os.path.basename(whg)
2997 self._tmpbindir = os.path.join(self._hgtmp, b'install', b'bin')
2997 self._tmpbindir = os.path.join(self._hgtmp, b'install', b'bin')
2998 os.makedirs(self._tmpbindir)
2998 os.makedirs(self._tmpbindir)
2999
2999
3000 normbin = os.path.normpath(os.path.abspath(whg))
3000 normbin = os.path.normpath(os.path.abspath(whg))
3001 normbin = normbin.replace(os.sep.encode('ascii'), b'/')
3001 normbin = normbin.replace(os.sep.encode('ascii'), b'/')
3002
3002
3003 # Other Python scripts in the test harness need to
3003 # Other Python scripts in the test harness need to
3004 # `import mercurial`. If `hg` is a Python script, we assume
3004 # `import mercurial`. If `hg` is a Python script, we assume
3005 # the Mercurial modules are relative to its path and tell the tests
3005 # the Mercurial modules are relative to its path and tell the tests
3006 # to load Python modules from its directory.
3006 # to load Python modules from its directory.
3007 with open(whg, 'rb') as fh:
3007 with open(whg, 'rb') as fh:
3008 initial = fh.read(1024)
3008 initial = fh.read(1024)
3009
3009
3010 if re.match(b'#!.*python', initial):
3010 if re.match(b'#!.*python', initial):
3011 self._pythondir = self._bindir
3011 self._pythondir = self._bindir
3012 # If it looks like our in-repo Rust binary, use the source root.
3012 # If it looks like our in-repo Rust binary, use the source root.
3013 # This is a bit hacky. But rhg is still not supported outside the
3013 # This is a bit hacky. But rhg is still not supported outside the
3014 # source directory. So until it is, do the simple thing.
3014 # source directory. So until it is, do the simple thing.
3015 elif re.search(b'/rust/target/[^/]+/hg', normbin):
3015 elif re.search(b'/rust/target/[^/]+/hg', normbin):
3016 self._pythondir = os.path.dirname(self._testdir)
3016 self._pythondir = os.path.dirname(self._testdir)
3017 # Fall back to the legacy behavior.
3017 # Fall back to the legacy behavior.
3018 else:
3018 else:
3019 self._pythondir = self._bindir
3019 self._pythondir = self._bindir
3020
3020
3021 else:
3021 else:
3022 self._installdir = os.path.join(self._hgtmp, b"install")
3022 self._installdir = os.path.join(self._hgtmp, b"install")
3023 self._bindir = os.path.join(self._installdir, b"bin")
3023 self._bindir = os.path.join(self._installdir, b"bin")
3024 self._hgcommand = b'hg'
3024 self._hgcommand = b'hg'
3025 self._tmpbindir = self._bindir
3025 self._tmpbindir = self._bindir
3026 self._pythondir = os.path.join(self._installdir, b"lib", b"python")
3026 self._pythondir = os.path.join(self._installdir, b"lib", b"python")
3027
3027
3028 # Force the use of hg.exe instead of relying on MSYS to recognize hg is
3028 # Force the use of hg.exe instead of relying on MSYS to recognize hg is
3029 # a python script and feed it to python.exe. Legacy stdio is force
3029 # a python script and feed it to python.exe. Legacy stdio is force
3030 # enabled by hg.exe, and this is a more realistic way to launch hg
3030 # enabled by hg.exe, and this is a more realistic way to launch hg
3031 # anyway.
3031 # anyway.
3032 if os.name == 'nt' and not self._hgcommand.endswith(b'.exe'):
3032 if os.name == 'nt' and not self._hgcommand.endswith(b'.exe'):
3033 self._hgcommand += b'.exe'
3033 self._hgcommand += b'.exe'
3034
3034
3035 # set CHGHG, then replace "hg" command by "chg"
3035 # set CHGHG, then replace "hg" command by "chg"
3036 chgbindir = self._bindir
3036 chgbindir = self._bindir
3037 if self.options.chg or self.options.with_chg:
3037 if self.options.chg or self.options.with_chg:
3038 osenvironb[b'CHGHG'] = os.path.join(self._bindir, self._hgcommand)
3038 osenvironb[b'CHGHG'] = os.path.join(self._bindir, self._hgcommand)
3039 else:
3039 else:
3040 osenvironb.pop(b'CHGHG', None) # drop flag for hghave
3040 osenvironb.pop(b'CHGHG', None) # drop flag for hghave
3041 if self.options.chg:
3041 if self.options.chg:
3042 self._hgcommand = b'chg'
3042 self._hgcommand = b'chg'
3043 elif self.options.with_chg:
3043 elif self.options.with_chg:
3044 chgbindir = os.path.dirname(os.path.realpath(self.options.with_chg))
3044 chgbindir = os.path.dirname(os.path.realpath(self.options.with_chg))
3045 self._hgcommand = os.path.basename(self.options.with_chg)
3045 self._hgcommand = os.path.basename(self.options.with_chg)
3046
3046
3047 osenvironb[b"BINDIR"] = self._bindir
3047 osenvironb[b"BINDIR"] = self._bindir
3048 osenvironb[b"PYTHON"] = PYTHON
3048 osenvironb[b"PYTHON"] = PYTHON
3049
3049
3050 fileb = _bytespath(__file__)
3050 fileb = _bytespath(__file__)
3051 runtestdir = os.path.abspath(os.path.dirname(fileb))
3051 runtestdir = os.path.abspath(os.path.dirname(fileb))
3052 osenvironb[b'RUNTESTDIR'] = runtestdir
3052 osenvironb[b'RUNTESTDIR'] = runtestdir
3053 if PYTHON3:
3053 if PYTHON3:
3054 sepb = _bytespath(os.pathsep)
3054 sepb = _bytespath(os.pathsep)
3055 else:
3055 else:
3056 sepb = os.pathsep
3056 sepb = os.pathsep
3057 path = [self._bindir, runtestdir] + osenvironb[b"PATH"].split(sepb)
3057 path = [self._bindir, runtestdir] + osenvironb[b"PATH"].split(sepb)
3058 if os.path.islink(__file__):
3058 if os.path.islink(__file__):
3059 # test helper will likely be at the end of the symlink
3059 # test helper will likely be at the end of the symlink
3060 realfile = os.path.realpath(fileb)
3060 realfile = os.path.realpath(fileb)
3061 realdir = os.path.abspath(os.path.dirname(realfile))
3061 realdir = os.path.abspath(os.path.dirname(realfile))
3062 path.insert(2, realdir)
3062 path.insert(2, realdir)
3063 if chgbindir != self._bindir:
3063 if chgbindir != self._bindir:
3064 path.insert(1, chgbindir)
3064 path.insert(1, chgbindir)
3065 if self._testdir != runtestdir:
3065 if self._testdir != runtestdir:
3066 path = [self._testdir] + path
3066 path = [self._testdir] + path
3067 if self._tmpbindir != self._bindir:
3067 if self._tmpbindir != self._bindir:
3068 path = [self._tmpbindir] + path
3068 path = [self._tmpbindir] + path
3069 osenvironb[b"PATH"] = sepb.join(path)
3069 osenvironb[b"PATH"] = sepb.join(path)
3070
3070
3071 # Include TESTDIR in PYTHONPATH so that out-of-tree extensions
3071 # Include TESTDIR in PYTHONPATH so that out-of-tree extensions
3072 # can run .../tests/run-tests.py test-foo where test-foo
3072 # can run .../tests/run-tests.py test-foo where test-foo
3073 # adds an extension to HGRC. Also include run-test.py directory to
3073 # adds an extension to HGRC. Also include run-test.py directory to
3074 # import modules like heredoctest.
3074 # import modules like heredoctest.
3075 pypath = [self._pythondir, self._testdir, runtestdir]
3075 pypath = [self._pythondir, self._testdir, runtestdir]
3076 # We have to augment PYTHONPATH, rather than simply replacing
3076 # We have to augment PYTHONPATH, rather than simply replacing
3077 # it, in case external libraries are only available via current
3077 # it, in case external libraries are only available via current
3078 # PYTHONPATH. (In particular, the Subversion bindings on OS X
3078 # PYTHONPATH. (In particular, the Subversion bindings on OS X
3079 # are in /opt/subversion.)
3079 # are in /opt/subversion.)
3080 oldpypath = osenvironb.get(IMPL_PATH)
3080 oldpypath = osenvironb.get(IMPL_PATH)
3081 if oldpypath:
3081 if oldpypath:
3082 pypath.append(oldpypath)
3082 pypath.append(oldpypath)
3083 osenvironb[IMPL_PATH] = sepb.join(pypath)
3083 osenvironb[IMPL_PATH] = sepb.join(pypath)
3084
3084
3085 if self.options.pure:
3085 if self.options.pure:
3086 os.environ["HGTEST_RUN_TESTS_PURE"] = "--pure"
3086 os.environ["HGTEST_RUN_TESTS_PURE"] = "--pure"
3087 os.environ["HGMODULEPOLICY"] = "py"
3087 os.environ["HGMODULEPOLICY"] = "py"
3088
3088
3089 if self.options.allow_slow_tests:
3089 if self.options.allow_slow_tests:
3090 os.environ["HGTEST_SLOW"] = "slow"
3090 os.environ["HGTEST_SLOW"] = "slow"
3091 elif 'HGTEST_SLOW' in os.environ:
3091 elif 'HGTEST_SLOW' in os.environ:
3092 del os.environ['HGTEST_SLOW']
3092 del os.environ['HGTEST_SLOW']
3093
3093
3094 self._coveragefile = os.path.join(self._testdir, b'.coverage')
3094 self._coveragefile = os.path.join(self._testdir, b'.coverage')
3095
3095
3096 if self.options.exceptions:
3096 if self.options.exceptions:
3097 exceptionsdir = os.path.join(self._outputdir, b'exceptions')
3097 exceptionsdir = os.path.join(self._outputdir, b'exceptions')
3098 try:
3098 try:
3099 os.makedirs(exceptionsdir)
3099 os.makedirs(exceptionsdir)
3100 except OSError as e:
3100 except OSError as e:
3101 if e.errno != errno.EEXIST:
3101 if e.errno != errno.EEXIST:
3102 raise
3102 raise
3103
3103
3104 # Remove all existing exception reports.
3104 # Remove all existing exception reports.
3105 for f in os.listdir(exceptionsdir):
3105 for f in os.listdir(exceptionsdir):
3106 os.unlink(os.path.join(exceptionsdir, f))
3106 os.unlink(os.path.join(exceptionsdir, f))
3107
3107
3108 osenvironb[b'HGEXCEPTIONSDIR'] = exceptionsdir
3108 osenvironb[b'HGEXCEPTIONSDIR'] = exceptionsdir
3109 logexceptions = os.path.join(self._testdir, b'logexceptions.py')
3109 logexceptions = os.path.join(self._testdir, b'logexceptions.py')
3110 self.options.extra_config_opt.append(
3110 self.options.extra_config_opt.append(
3111 'extensions.logexceptions=%s' % logexceptions.decode('utf-8')
3111 'extensions.logexceptions=%s' % logexceptions.decode('utf-8')
3112 )
3112 )
3113
3113
3114 vlog("# Using TESTDIR", _strpath(self._testdir))
3114 vlog("# Using TESTDIR", _strpath(self._testdir))
3115 vlog("# Using RUNTESTDIR", _strpath(osenvironb[b'RUNTESTDIR']))
3115 vlog("# Using RUNTESTDIR", _strpath(osenvironb[b'RUNTESTDIR']))
3116 vlog("# Using HGTMP", _strpath(self._hgtmp))
3116 vlog("# Using HGTMP", _strpath(self._hgtmp))
3117 vlog("# Using PATH", os.environ["PATH"])
3117 vlog("# Using PATH", os.environ["PATH"])
3118 vlog(
3118 vlog(
3119 "# Using",
3119 "# Using", _strpath(IMPL_PATH), _strpath(osenvironb[IMPL_PATH]),
3120 _strpath(IMPL_PATH),
3121 _strpath(osenvironb[IMPL_PATH]),
3122 )
3120 )
3123 vlog("# Writing to directory", _strpath(self._outputdir))
3121 vlog("# Writing to directory", _strpath(self._outputdir))
3124
3122
3125 try:
3123 try:
3126 return self._runtests(testdescs) or 0
3124 return self._runtests(testdescs) or 0
3127 finally:
3125 finally:
3128 time.sleep(0.1)
3126 time.sleep(0.1)
3129 self._cleanup()
3127 self._cleanup()
3130
3128
3131 def findtests(self, args):
3129 def findtests(self, args):
3132 """Finds possible test files from arguments.
3130 """Finds possible test files from arguments.
3133
3131
3134 If you wish to inject custom tests into the test harness, this would
3132 If you wish to inject custom tests into the test harness, this would
3135 be a good function to monkeypatch or override in a derived class.
3133 be a good function to monkeypatch or override in a derived class.
3136 """
3134 """
3137 if not args:
3135 if not args:
3138 if self.options.changed:
3136 if self.options.changed:
3139 proc = Popen4(
3137 proc = Popen4(
3140 b'hg st --rev "%s" -man0 .'
3138 b'hg st --rev "%s" -man0 .'
3141 % _bytespath(self.options.changed),
3139 % _bytespath(self.options.changed),
3142 None,
3140 None,
3143 0,
3141 0,
3144 )
3142 )
3145 stdout, stderr = proc.communicate()
3143 stdout, stderr = proc.communicate()
3146 args = stdout.strip(b'\0').split(b'\0')
3144 args = stdout.strip(b'\0').split(b'\0')
3147 else:
3145 else:
3148 args = os.listdir(b'.')
3146 args = os.listdir(b'.')
3149
3147
3150 expanded_args = []
3148 expanded_args = []
3151 for arg in args:
3149 for arg in args:
3152 if os.path.isdir(arg):
3150 if os.path.isdir(arg):
3153 if not arg.endswith(b'/'):
3151 if not arg.endswith(b'/'):
3154 arg += b'/'
3152 arg += b'/'
3155 expanded_args.extend([arg + a for a in os.listdir(arg)])
3153 expanded_args.extend([arg + a for a in os.listdir(arg)])
3156 else:
3154 else:
3157 expanded_args.append(arg)
3155 expanded_args.append(arg)
3158 args = expanded_args
3156 args = expanded_args
3159
3157
3160 testcasepattern = re.compile(
3158 testcasepattern = re.compile(
3161 br'([\w-]+\.t|py)(?:#([a-zA-Z0-9_\-\.#]+))'
3159 br'([\w-]+\.t|py)(?:#([a-zA-Z0-9_\-\.#]+))'
3162 )
3160 )
3163 tests = []
3161 tests = []
3164 for t in args:
3162 for t in args:
3165 case = []
3163 case = []
3166
3164
3167 if not (
3165 if not (
3168 os.path.basename(t).startswith(b'test-')
3166 os.path.basename(t).startswith(b'test-')
3169 and (t.endswith(b'.py') or t.endswith(b'.t'))
3167 and (t.endswith(b'.py') or t.endswith(b'.t'))
3170 ):
3168 ):
3171
3169
3172 m = testcasepattern.match(os.path.basename(t))
3170 m = testcasepattern.match(os.path.basename(t))
3173 if m is not None:
3171 if m is not None:
3174 t_basename, casestr = m.groups()
3172 t_basename, casestr = m.groups()
3175 t = os.path.join(os.path.dirname(t), t_basename)
3173 t = os.path.join(os.path.dirname(t), t_basename)
3176 if casestr:
3174 if casestr:
3177 case = casestr.split(b'#')
3175 case = casestr.split(b'#')
3178 else:
3176 else:
3179 continue
3177 continue
3180
3178
3181 if t.endswith(b'.t'):
3179 if t.endswith(b'.t'):
3182 # .t file may contain multiple test cases
3180 # .t file may contain multiple test cases
3183 casedimensions = parsettestcases(t)
3181 casedimensions = parsettestcases(t)
3184 if casedimensions:
3182 if casedimensions:
3185 cases = []
3183 cases = []
3186
3184
3187 def addcases(case, casedimensions):
3185 def addcases(case, casedimensions):
3188 if not casedimensions:
3186 if not casedimensions:
3189 cases.append(case)
3187 cases.append(case)
3190 else:
3188 else:
3191 for c in casedimensions[0]:
3189 for c in casedimensions[0]:
3192 addcases(case + [c], casedimensions[1:])
3190 addcases(case + [c], casedimensions[1:])
3193
3191
3194 addcases([], casedimensions)
3192 addcases([], casedimensions)
3195 if case and case in cases:
3193 if case and case in cases:
3196 cases = [case]
3194 cases = [case]
3197 elif case:
3195 elif case:
3198 # Ignore invalid cases
3196 # Ignore invalid cases
3199 cases = []
3197 cases = []
3200 else:
3198 else:
3201 pass
3199 pass
3202 tests += [{'path': t, 'case': c} for c in sorted(cases)]
3200 tests += [{'path': t, 'case': c} for c in sorted(cases)]
3203 else:
3201 else:
3204 tests.append({'path': t})
3202 tests.append({'path': t})
3205 else:
3203 else:
3206 tests.append({'path': t})
3204 tests.append({'path': t})
3207 return tests
3205 return tests
3208
3206
3209 def _runtests(self, testdescs):
3207 def _runtests(self, testdescs):
3210 def _reloadtest(test, i):
3208 def _reloadtest(test, i):
3211 # convert a test back to its description dict
3209 # convert a test back to its description dict
3212 desc = {'path': test.path}
3210 desc = {'path': test.path}
3213 case = getattr(test, '_case', [])
3211 case = getattr(test, '_case', [])
3214 if case:
3212 if case:
3215 desc['case'] = case
3213 desc['case'] = case
3216 return self._gettest(desc, i)
3214 return self._gettest(desc, i)
3217
3215
3218 try:
3216 try:
3219 if self.options.restart:
3217 if self.options.restart:
3220 orig = list(testdescs)
3218 orig = list(testdescs)
3221 while testdescs:
3219 while testdescs:
3222 desc = testdescs[0]
3220 desc = testdescs[0]
3223 # desc['path'] is a relative path
3221 # desc['path'] is a relative path
3224 if 'case' in desc:
3222 if 'case' in desc:
3225 casestr = b'#'.join(desc['case'])
3223 casestr = b'#'.join(desc['case'])
3226 errpath = b'%s#%s.err' % (desc['path'], casestr)
3224 errpath = b'%s#%s.err' % (desc['path'], casestr)
3227 else:
3225 else:
3228 errpath = b'%s.err' % desc['path']
3226 errpath = b'%s.err' % desc['path']
3229 errpath = os.path.join(self._outputdir, errpath)
3227 errpath = os.path.join(self._outputdir, errpath)
3230 if os.path.exists(errpath):
3228 if os.path.exists(errpath):
3231 break
3229 break
3232 testdescs.pop(0)
3230 testdescs.pop(0)
3233 if not testdescs:
3231 if not testdescs:
3234 print("running all tests")
3232 print("running all tests")
3235 testdescs = orig
3233 testdescs = orig
3236
3234
3237 tests = [self._gettest(d, i) for i, d in enumerate(testdescs)]
3235 tests = [self._gettest(d, i) for i, d in enumerate(testdescs)]
3238 num_tests = len(tests) * self.options.runs_per_test
3236 num_tests = len(tests) * self.options.runs_per_test
3239
3237
3240 jobs = min(num_tests, self.options.jobs)
3238 jobs = min(num_tests, self.options.jobs)
3241
3239
3242 failed = False
3240 failed = False
3243 kws = self.options.keywords
3241 kws = self.options.keywords
3244 if kws is not None and PYTHON3:
3242 if kws is not None and PYTHON3:
3245 kws = kws.encode('utf-8')
3243 kws = kws.encode('utf-8')
3246
3244
3247 suite = TestSuite(
3245 suite = TestSuite(
3248 self._testdir,
3246 self._testdir,
3249 jobs=jobs,
3247 jobs=jobs,
3250 whitelist=self.options.whitelisted,
3248 whitelist=self.options.whitelisted,
3251 blacklist=self.options.blacklist,
3249 blacklist=self.options.blacklist,
3252 retest=self.options.retest,
3250 retest=self.options.retest,
3253 keywords=kws,
3251 keywords=kws,
3254 loop=self.options.loop,
3252 loop=self.options.loop,
3255 runs_per_test=self.options.runs_per_test,
3253 runs_per_test=self.options.runs_per_test,
3256 showchannels=self.options.showchannels,
3254 showchannels=self.options.showchannels,
3257 tests=tests,
3255 tests=tests,
3258 loadtest=_reloadtest,
3256 loadtest=_reloadtest,
3259 )
3257 )
3260 verbosity = 1
3258 verbosity = 1
3261 if self.options.list_tests:
3259 if self.options.list_tests:
3262 verbosity = 0
3260 verbosity = 0
3263 elif self.options.verbose:
3261 elif self.options.verbose:
3264 verbosity = 2
3262 verbosity = 2
3265 runner = TextTestRunner(self, verbosity=verbosity)
3263 runner = TextTestRunner(self, verbosity=verbosity)
3266
3264
3267 if self.options.list_tests:
3265 if self.options.list_tests:
3268 result = runner.listtests(suite)
3266 result = runner.listtests(suite)
3269 else:
3267 else:
3270 if self._installdir:
3268 if self._installdir:
3271 self._installhg()
3269 self._installhg()
3272 self._checkhglib("Testing")
3270 self._checkhglib("Testing")
3273 else:
3271 else:
3274 self._usecorrectpython()
3272 self._usecorrectpython()
3275 if self.options.chg:
3273 if self.options.chg:
3276 assert self._installdir
3274 assert self._installdir
3277 self._installchg()
3275 self._installchg()
3278
3276
3279 log(
3277 log(
3280 'running %d tests using %d parallel processes'
3278 'running %d tests using %d parallel processes'
3281 % (num_tests, jobs)
3279 % (num_tests, jobs)
3282 )
3280 )
3283
3281
3284 result = runner.run(suite)
3282 result = runner.run(suite)
3285
3283
3286 if result.failures or result.errors:
3284 if result.failures or result.errors:
3287 failed = True
3285 failed = True
3288
3286
3289 result.onEnd()
3287 result.onEnd()
3290
3288
3291 if self.options.anycoverage:
3289 if self.options.anycoverage:
3292 self._outputcoverage()
3290 self._outputcoverage()
3293 except KeyboardInterrupt:
3291 except KeyboardInterrupt:
3294 failed = True
3292 failed = True
3295 print("\ninterrupted!")
3293 print("\ninterrupted!")
3296
3294
3297 if failed:
3295 if failed:
3298 return 1
3296 return 1
3299
3297
3300 def _getport(self, count):
3298 def _getport(self, count):
3301 port = self._ports.get(count) # do we have a cached entry?
3299 port = self._ports.get(count) # do we have a cached entry?
3302 if port is None:
3300 if port is None:
3303 portneeded = 3
3301 portneeded = 3
3304 # above 100 tries we just give up and let test reports failure
3302 # above 100 tries we just give up and let test reports failure
3305 for tries in xrange(100):
3303 for tries in xrange(100):
3306 allfree = True
3304 allfree = True
3307 port = self.options.port + self._portoffset
3305 port = self.options.port + self._portoffset
3308 for idx in xrange(portneeded):
3306 for idx in xrange(portneeded):
3309 if not checkportisavailable(port + idx):
3307 if not checkportisavailable(port + idx):
3310 allfree = False
3308 allfree = False
3311 break
3309 break
3312 self._portoffset += portneeded
3310 self._portoffset += portneeded
3313 if allfree:
3311 if allfree:
3314 break
3312 break
3315 self._ports[count] = port
3313 self._ports[count] = port
3316 return port
3314 return port
3317
3315
3318 def _gettest(self, testdesc, count):
3316 def _gettest(self, testdesc, count):
3319 """Obtain a Test by looking at its filename.
3317 """Obtain a Test by looking at its filename.
3320
3318
3321 Returns a Test instance. The Test may not be runnable if it doesn't
3319 Returns a Test instance. The Test may not be runnable if it doesn't
3322 map to a known type.
3320 map to a known type.
3323 """
3321 """
3324 path = testdesc['path']
3322 path = testdesc['path']
3325 lctest = path.lower()
3323 lctest = path.lower()
3326 testcls = Test
3324 testcls = Test
3327
3325
3328 for ext, cls in self.TESTTYPES:
3326 for ext, cls in self.TESTTYPES:
3329 if lctest.endswith(ext):
3327 if lctest.endswith(ext):
3330 testcls = cls
3328 testcls = cls
3331 break
3329 break
3332
3330
3333 refpath = os.path.join(getcwdb(), path)
3331 refpath = os.path.join(getcwdb(), path)
3334 tmpdir = os.path.join(self._hgtmp, b'child%d' % count)
3332 tmpdir = os.path.join(self._hgtmp, b'child%d' % count)
3335
3333
3336 # extra keyword parameters. 'case' is used by .t tests
3334 # extra keyword parameters. 'case' is used by .t tests
3337 kwds = dict((k, testdesc[k]) for k in ['case'] if k in testdesc)
3335 kwds = dict((k, testdesc[k]) for k in ['case'] if k in testdesc)
3338
3336
3339 t = testcls(
3337 t = testcls(
3340 refpath,
3338 refpath,
3341 self._outputdir,
3339 self._outputdir,
3342 tmpdir,
3340 tmpdir,
3343 keeptmpdir=self.options.keep_tmpdir,
3341 keeptmpdir=self.options.keep_tmpdir,
3344 debug=self.options.debug,
3342 debug=self.options.debug,
3345 first=self.options.first,
3343 first=self.options.first,
3346 timeout=self.options.timeout,
3344 timeout=self.options.timeout,
3347 startport=self._getport(count),
3345 startport=self._getport(count),
3348 extraconfigopts=self.options.extra_config_opt,
3346 extraconfigopts=self.options.extra_config_opt,
3349 py3warnings=self.options.py3_warnings,
3347 py3warnings=self.options.py3_warnings,
3350 shell=self.options.shell,
3348 shell=self.options.shell,
3351 hgcommand=self._hgcommand,
3349 hgcommand=self._hgcommand,
3352 usechg=bool(self.options.with_chg or self.options.chg),
3350 usechg=bool(self.options.with_chg or self.options.chg),
3353 useipv6=useipv6,
3351 useipv6=useipv6,
3354 **kwds
3352 **kwds
3355 )
3353 )
3356 t.should_reload = True
3354 t.should_reload = True
3357 return t
3355 return t
3358
3356
3359 def _cleanup(self):
3357 def _cleanup(self):
3360 """Clean up state from this test invocation."""
3358 """Clean up state from this test invocation."""
3361 if self.options.keep_tmpdir:
3359 if self.options.keep_tmpdir:
3362 return
3360 return
3363
3361
3364 vlog("# Cleaning up HGTMP", _strpath(self._hgtmp))
3362 vlog("# Cleaning up HGTMP", _strpath(self._hgtmp))
3365 shutil.rmtree(self._hgtmp, True)
3363 shutil.rmtree(self._hgtmp, True)
3366 for f in self._createdfiles:
3364 for f in self._createdfiles:
3367 try:
3365 try:
3368 os.remove(f)
3366 os.remove(f)
3369 except OSError:
3367 except OSError:
3370 pass
3368 pass
3371
3369
3372 def _usecorrectpython(self):
3370 def _usecorrectpython(self):
3373 """Configure the environment to use the appropriate Python in tests."""
3371 """Configure the environment to use the appropriate Python in tests."""
3374 # Tests must use the same interpreter as us or bad things will happen.
3372 # Tests must use the same interpreter as us or bad things will happen.
3375 pyexename = sys.platform == 'win32' and b'python.exe' or b'python'
3373 pyexename = sys.platform == 'win32' and b'python.exe' or b'python'
3376
3374
3377 # os.symlink() is a thing with py3 on Windows, but it requires
3375 # os.symlink() is a thing with py3 on Windows, but it requires
3378 # Administrator rights.
3376 # Administrator rights.
3379 if getattr(os, 'symlink', None) and os.name != 'nt':
3377 if getattr(os, 'symlink', None) and os.name != 'nt':
3380 vlog(
3378 vlog(
3381 "# Making python executable in test path a symlink to '%s'"
3379 "# Making python executable in test path a symlink to '%s'"
3382 % sysexecutable
3380 % sysexecutable
3383 )
3381 )
3384 mypython = os.path.join(self._tmpbindir, pyexename)
3382 mypython = os.path.join(self._tmpbindir, pyexename)
3385 try:
3383 try:
3386 if os.readlink(mypython) == sysexecutable:
3384 if os.readlink(mypython) == sysexecutable:
3387 return
3385 return
3388 os.unlink(mypython)
3386 os.unlink(mypython)
3389 except OSError as err:
3387 except OSError as err:
3390 if err.errno != errno.ENOENT:
3388 if err.errno != errno.ENOENT:
3391 raise
3389 raise
3392 if self._findprogram(pyexename) != sysexecutable:
3390 if self._findprogram(pyexename) != sysexecutable:
3393 try:
3391 try:
3394 os.symlink(sysexecutable, mypython)
3392 os.symlink(sysexecutable, mypython)
3395 self._createdfiles.append(mypython)
3393 self._createdfiles.append(mypython)
3396 except OSError as err:
3394 except OSError as err:
3397 # child processes may race, which is harmless
3395 # child processes may race, which is harmless
3398 if err.errno != errno.EEXIST:
3396 if err.errno != errno.EEXIST:
3399 raise
3397 raise
3400 else:
3398 else:
3401 exedir, exename = os.path.split(sysexecutable)
3399 exedir, exename = os.path.split(sysexecutable)
3402 vlog(
3400 vlog(
3403 "# Modifying search path to find %s as %s in '%s'"
3401 "# Modifying search path to find %s as %s in '%s'"
3404 % (exename, pyexename, exedir)
3402 % (exename, pyexename, exedir)
3405 )
3403 )
3406 path = os.environ['PATH'].split(os.pathsep)
3404 path = os.environ['PATH'].split(os.pathsep)
3407 while exedir in path:
3405 while exedir in path:
3408 path.remove(exedir)
3406 path.remove(exedir)
3409 os.environ['PATH'] = os.pathsep.join([exedir] + path)
3407 os.environ['PATH'] = os.pathsep.join([exedir] + path)
3410 if not self._findprogram(pyexename):
3408 if not self._findprogram(pyexename):
3411 print("WARNING: Cannot find %s in search path" % pyexename)
3409 print("WARNING: Cannot find %s in search path" % pyexename)
3412
3410
3413 def _installhg(self):
3411 def _installhg(self):
3414 """Install hg into the test environment.
3412 """Install hg into the test environment.
3415
3413
3416 This will also configure hg with the appropriate testing settings.
3414 This will also configure hg with the appropriate testing settings.
3417 """
3415 """
3418 vlog("# Performing temporary installation of HG")
3416 vlog("# Performing temporary installation of HG")
3419 installerrs = os.path.join(self._hgtmp, b"install.err")
3417 installerrs = os.path.join(self._hgtmp, b"install.err")
3420 compiler = ''
3418 compiler = ''
3421 if self.options.compiler:
3419 if self.options.compiler:
3422 compiler = '--compiler ' + self.options.compiler
3420 compiler = '--compiler ' + self.options.compiler
3423 if self.options.pure:
3421 if self.options.pure:
3424 pure = b"--pure"
3422 pure = b"--pure"
3425 else:
3423 else:
3426 pure = b""
3424 pure = b""
3427
3425
3428 # Run installer in hg root
3426 # Run installer in hg root
3429 script = os.path.realpath(sys.argv[0])
3427 script = os.path.realpath(sys.argv[0])
3430 exe = sysexecutable
3428 exe = sysexecutable
3431 if PYTHON3:
3429 if PYTHON3:
3432 compiler = _bytespath(compiler)
3430 compiler = _bytespath(compiler)
3433 script = _bytespath(script)
3431 script = _bytespath(script)
3434 exe = _bytespath(exe)
3432 exe = _bytespath(exe)
3435 hgroot = os.path.dirname(os.path.dirname(script))
3433 hgroot = os.path.dirname(os.path.dirname(script))
3436 self._hgroot = hgroot
3434 self._hgroot = hgroot
3437 os.chdir(hgroot)
3435 os.chdir(hgroot)
3438 nohome = b'--home=""'
3436 nohome = b'--home=""'
3439 if os.name == 'nt':
3437 if os.name == 'nt':
3440 # The --home="" trick works only on OS where os.sep == '/'
3438 # The --home="" trick works only on OS where os.sep == '/'
3441 # because of a distutils convert_path() fast-path. Avoid it at
3439 # because of a distutils convert_path() fast-path. Avoid it at
3442 # least on Windows for now, deal with .pydistutils.cfg bugs
3440 # least on Windows for now, deal with .pydistutils.cfg bugs
3443 # when they happen.
3441 # when they happen.
3444 nohome = b''
3442 nohome = b''
3445 cmd = (
3443 cmd = (
3446 b'"%(exe)s" setup.py %(pure)s clean --all'
3444 b'"%(exe)s" setup.py %(pure)s clean --all'
3447 b' build %(compiler)s --build-base="%(base)s"'
3445 b' build %(compiler)s --build-base="%(base)s"'
3448 b' install --force --prefix="%(prefix)s"'
3446 b' install --force --prefix="%(prefix)s"'
3449 b' --install-lib="%(libdir)s"'
3447 b' --install-lib="%(libdir)s"'
3450 b' --install-scripts="%(bindir)s" %(nohome)s >%(logfile)s 2>&1'
3448 b' --install-scripts="%(bindir)s" %(nohome)s >%(logfile)s 2>&1'
3451 % {
3449 % {
3452 b'exe': exe,
3450 b'exe': exe,
3453 b'pure': pure,
3451 b'pure': pure,
3454 b'compiler': compiler,
3452 b'compiler': compiler,
3455 b'base': os.path.join(self._hgtmp, b"build"),
3453 b'base': os.path.join(self._hgtmp, b"build"),
3456 b'prefix': self._installdir,
3454 b'prefix': self._installdir,
3457 b'libdir': self._pythondir,
3455 b'libdir': self._pythondir,
3458 b'bindir': self._bindir,
3456 b'bindir': self._bindir,
3459 b'nohome': nohome,
3457 b'nohome': nohome,
3460 b'logfile': installerrs,
3458 b'logfile': installerrs,
3461 }
3459 }
3462 )
3460 )
3463
3461
3464 # setuptools requires install directories to exist.
3462 # setuptools requires install directories to exist.
3465 def makedirs(p):
3463 def makedirs(p):
3466 try:
3464 try:
3467 os.makedirs(p)
3465 os.makedirs(p)
3468 except OSError as e:
3466 except OSError as e:
3469 if e.errno != errno.EEXIST:
3467 if e.errno != errno.EEXIST:
3470 raise
3468 raise
3471
3469
3472 makedirs(self._pythondir)
3470 makedirs(self._pythondir)
3473 makedirs(self._bindir)
3471 makedirs(self._bindir)
3474
3472
3475 vlog("# Running", cmd.decode("utf-8"))
3473 vlog("# Running", cmd.decode("utf-8"))
3476 if subprocess.call(_strpath(cmd), shell=True) == 0:
3474 if subprocess.call(_strpath(cmd), shell=True) == 0:
3477 if not self.options.verbose:
3475 if not self.options.verbose:
3478 try:
3476 try:
3479 os.remove(installerrs)
3477 os.remove(installerrs)
3480 except OSError as e:
3478 except OSError as e:
3481 if e.errno != errno.ENOENT:
3479 if e.errno != errno.ENOENT:
3482 raise
3480 raise
3483 else:
3481 else:
3484 with open(installerrs, 'rb') as f:
3482 with open(installerrs, 'rb') as f:
3485 for line in f:
3483 for line in f:
3486 if PYTHON3:
3484 if PYTHON3:
3487 sys.stdout.buffer.write(line)
3485 sys.stdout.buffer.write(line)
3488 else:
3486 else:
3489 sys.stdout.write(line)
3487 sys.stdout.write(line)
3490 sys.exit(1)
3488 sys.exit(1)
3491 os.chdir(self._testdir)
3489 os.chdir(self._testdir)
3492
3490
3493 self._usecorrectpython()
3491 self._usecorrectpython()
3494
3492
3495 if self.options.py3_warnings and not self.options.anycoverage:
3493 if self.options.py3_warnings and not self.options.anycoverage:
3496 vlog("# Updating hg command to enable Py3k Warnings switch")
3494 vlog("# Updating hg command to enable Py3k Warnings switch")
3497 with open(os.path.join(self._bindir, 'hg'), 'rb') as f:
3495 with open(os.path.join(self._bindir, 'hg'), 'rb') as f:
3498 lines = [line.rstrip() for line in f]
3496 lines = [line.rstrip() for line in f]
3499 lines[0] += ' -3'
3497 lines[0] += ' -3'
3500 with open(os.path.join(self._bindir, 'hg'), 'wb') as f:
3498 with open(os.path.join(self._bindir, 'hg'), 'wb') as f:
3501 for line in lines:
3499 for line in lines:
3502 f.write(line + '\n')
3500 f.write(line + '\n')
3503
3501
3504 hgbat = os.path.join(self._bindir, b'hg.bat')
3502 hgbat = os.path.join(self._bindir, b'hg.bat')
3505 if os.path.isfile(hgbat):
3503 if os.path.isfile(hgbat):
3506 # hg.bat expects to be put in bin/scripts while run-tests.py
3504 # hg.bat expects to be put in bin/scripts while run-tests.py
3507 # installation layout put it in bin/ directly. Fix it
3505 # installation layout put it in bin/ directly. Fix it
3508 with open(hgbat, 'rb') as f:
3506 with open(hgbat, 'rb') as f:
3509 data = f.read()
3507 data = f.read()
3510 if br'"%~dp0..\python" "%~dp0hg" %*' in data:
3508 if br'"%~dp0..\python" "%~dp0hg" %*' in data:
3511 data = data.replace(
3509 data = data.replace(
3512 br'"%~dp0..\python" "%~dp0hg" %*',
3510 br'"%~dp0..\python" "%~dp0hg" %*',
3513 b'"%~dp0python" "%~dp0hg" %*',
3511 b'"%~dp0python" "%~dp0hg" %*',
3514 )
3512 )
3515 with open(hgbat, 'wb') as f:
3513 with open(hgbat, 'wb') as f:
3516 f.write(data)
3514 f.write(data)
3517 else:
3515 else:
3518 print('WARNING: cannot fix hg.bat reference to python.exe')
3516 print('WARNING: cannot fix hg.bat reference to python.exe')
3519
3517
3520 if self.options.anycoverage:
3518 if self.options.anycoverage:
3521 custom = os.path.join(
3519 custom = os.path.join(
3522 osenvironb[b'RUNTESTDIR'], b'sitecustomize.py'
3520 osenvironb[b'RUNTESTDIR'], b'sitecustomize.py'
3523 )
3521 )
3524 target = os.path.join(self._pythondir, b'sitecustomize.py')
3522 target = os.path.join(self._pythondir, b'sitecustomize.py')
3525 vlog('# Installing coverage trigger to %s' % target)
3523 vlog('# Installing coverage trigger to %s' % target)
3526 shutil.copyfile(custom, target)
3524 shutil.copyfile(custom, target)
3527 rc = os.path.join(self._testdir, b'.coveragerc')
3525 rc = os.path.join(self._testdir, b'.coveragerc')
3528 vlog('# Installing coverage rc to %s' % rc)
3526 vlog('# Installing coverage rc to %s' % rc)
3529 osenvironb[b'COVERAGE_PROCESS_START'] = rc
3527 osenvironb[b'COVERAGE_PROCESS_START'] = rc
3530 covdir = os.path.join(self._installdir, b'..', b'coverage')
3528 covdir = os.path.join(self._installdir, b'..', b'coverage')
3531 try:
3529 try:
3532 os.mkdir(covdir)
3530 os.mkdir(covdir)
3533 except OSError as e:
3531 except OSError as e:
3534 if e.errno != errno.EEXIST:
3532 if e.errno != errno.EEXIST:
3535 raise
3533 raise
3536
3534
3537 osenvironb[b'COVERAGE_DIR'] = covdir
3535 osenvironb[b'COVERAGE_DIR'] = covdir
3538
3536
3539 def _checkhglib(self, verb):
3537 def _checkhglib(self, verb):
3540 """Ensure that the 'mercurial' package imported by python is
3538 """Ensure that the 'mercurial' package imported by python is
3541 the one we expect it to be. If not, print a warning to stderr."""
3539 the one we expect it to be. If not, print a warning to stderr."""
3542 if (self._bindir == self._pythondir) and (
3540 if (self._bindir == self._pythondir) and (
3543 self._bindir != self._tmpbindir
3541 self._bindir != self._tmpbindir
3544 ):
3542 ):
3545 # The pythondir has been inferred from --with-hg flag.
3543 # The pythondir has been inferred from --with-hg flag.
3546 # We cannot expect anything sensible here.
3544 # We cannot expect anything sensible here.
3547 return
3545 return
3548 expecthg = os.path.join(self._pythondir, b'mercurial')
3546 expecthg = os.path.join(self._pythondir, b'mercurial')
3549 actualhg = self._gethgpath()
3547 actualhg = self._gethgpath()
3550 if os.path.abspath(actualhg) != os.path.abspath(expecthg):
3548 if os.path.abspath(actualhg) != os.path.abspath(expecthg):
3551 sys.stderr.write(
3549 sys.stderr.write(
3552 'warning: %s with unexpected mercurial lib: %s\n'
3550 'warning: %s with unexpected mercurial lib: %s\n'
3553 ' (expected %s)\n' % (verb, actualhg, expecthg)
3551 ' (expected %s)\n' % (verb, actualhg, expecthg)
3554 )
3552 )
3555
3553
3556 def _gethgpath(self):
3554 def _gethgpath(self):
3557 """Return the path to the mercurial package that is actually found by
3555 """Return the path to the mercurial package that is actually found by
3558 the current Python interpreter."""
3556 the current Python interpreter."""
3559 if self._hgpath is not None:
3557 if self._hgpath is not None:
3560 return self._hgpath
3558 return self._hgpath
3561
3559
3562 cmd = b'"%s" -c "import mercurial; print (mercurial.__path__[0])"'
3560 cmd = b'"%s" -c "import mercurial; print (mercurial.__path__[0])"'
3563 cmd = cmd % PYTHON
3561 cmd = cmd % PYTHON
3564 if PYTHON3:
3562 if PYTHON3:
3565 cmd = _strpath(cmd)
3563 cmd = _strpath(cmd)
3566
3564
3567 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
3565 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
3568 out, err = p.communicate()
3566 out, err = p.communicate()
3569
3567
3570 self._hgpath = out.strip()
3568 self._hgpath = out.strip()
3571
3569
3572 return self._hgpath
3570 return self._hgpath
3573
3571
3574 def _installchg(self):
3572 def _installchg(self):
3575 """Install chg into the test environment"""
3573 """Install chg into the test environment"""
3576 vlog('# Performing temporary installation of CHG')
3574 vlog('# Performing temporary installation of CHG')
3577 assert os.path.dirname(self._bindir) == self._installdir
3575 assert os.path.dirname(self._bindir) == self._installdir
3578 assert self._hgroot, 'must be called after _installhg()'
3576 assert self._hgroot, 'must be called after _installhg()'
3579 cmd = b'"%(make)s" clean install PREFIX="%(prefix)s"' % {
3577 cmd = b'"%(make)s" clean install PREFIX="%(prefix)s"' % {
3580 b'make': b'make', # TODO: switch by option or environment?
3578 b'make': b'make', # TODO: switch by option or environment?
3581 b'prefix': self._installdir,
3579 b'prefix': self._installdir,
3582 }
3580 }
3583 cwd = os.path.join(self._hgroot, b'contrib', b'chg')
3581 cwd = os.path.join(self._hgroot, b'contrib', b'chg')
3584 vlog("# Running", cmd)
3582 vlog("# Running", cmd)
3585 proc = subprocess.Popen(
3583 proc = subprocess.Popen(
3586 cmd,
3584 cmd,
3587 shell=True,
3585 shell=True,
3588 cwd=cwd,
3586 cwd=cwd,
3589 stdin=subprocess.PIPE,
3587 stdin=subprocess.PIPE,
3590 stdout=subprocess.PIPE,
3588 stdout=subprocess.PIPE,
3591 stderr=subprocess.STDOUT,
3589 stderr=subprocess.STDOUT,
3592 )
3590 )
3593 out, _err = proc.communicate()
3591 out, _err = proc.communicate()
3594 if proc.returncode != 0:
3592 if proc.returncode != 0:
3595 if PYTHON3:
3593 if PYTHON3:
3596 sys.stdout.buffer.write(out)
3594 sys.stdout.buffer.write(out)
3597 else:
3595 else:
3598 sys.stdout.write(out)
3596 sys.stdout.write(out)
3599 sys.exit(1)
3597 sys.exit(1)
3600
3598
3601 def _outputcoverage(self):
3599 def _outputcoverage(self):
3602 """Produce code coverage output."""
3600 """Produce code coverage output."""
3603 import coverage
3601 import coverage
3604
3602
3605 coverage = coverage.coverage
3603 coverage = coverage.coverage
3606
3604
3607 vlog('# Producing coverage report')
3605 vlog('# Producing coverage report')
3608 # chdir is the easiest way to get short, relative paths in the
3606 # chdir is the easiest way to get short, relative paths in the
3609 # output.
3607 # output.
3610 os.chdir(self._hgroot)
3608 os.chdir(self._hgroot)
3611 covdir = os.path.join(_strpath(self._installdir), '..', 'coverage')
3609 covdir = os.path.join(_strpath(self._installdir), '..', 'coverage')
3612 cov = coverage(data_file=os.path.join(covdir, 'cov'))
3610 cov = coverage(data_file=os.path.join(covdir, 'cov'))
3613
3611
3614 # Map install directory paths back to source directory.
3612 # Map install directory paths back to source directory.
3615 cov.config.paths['srcdir'] = ['.', _strpath(self._pythondir)]
3613 cov.config.paths['srcdir'] = ['.', _strpath(self._pythondir)]
3616
3614
3617 cov.combine()
3615 cov.combine()
3618
3616
3619 omit = [
3617 omit = [
3620 _strpath(os.path.join(x, b'*'))
3618 _strpath(os.path.join(x, b'*'))
3621 for x in [self._bindir, self._testdir]
3619 for x in [self._bindir, self._testdir]
3622 ]
3620 ]
3623 cov.report(ignore_errors=True, omit=omit)
3621 cov.report(ignore_errors=True, omit=omit)
3624
3622
3625 if self.options.htmlcov:
3623 if self.options.htmlcov:
3626 htmldir = os.path.join(_strpath(self._outputdir), 'htmlcov')
3624 htmldir = os.path.join(_strpath(self._outputdir), 'htmlcov')
3627 cov.html_report(directory=htmldir, omit=omit)
3625 cov.html_report(directory=htmldir, omit=omit)
3628 if self.options.annotate:
3626 if self.options.annotate:
3629 adir = os.path.join(_strpath(self._outputdir), 'annotated')
3627 adir = os.path.join(_strpath(self._outputdir), 'annotated')
3630 if not os.path.isdir(adir):
3628 if not os.path.isdir(adir):
3631 os.mkdir(adir)
3629 os.mkdir(adir)
3632 cov.annotate(directory=adir, omit=omit)
3630 cov.annotate(directory=adir, omit=omit)
3633
3631
3634 def _findprogram(self, program):
3632 def _findprogram(self, program):
3635 """Search PATH for a executable program"""
3633 """Search PATH for a executable program"""
3636 dpb = _bytespath(os.defpath)
3634 dpb = _bytespath(os.defpath)
3637 sepb = _bytespath(os.pathsep)
3635 sepb = _bytespath(os.pathsep)
3638 for p in osenvironb.get(b'PATH', dpb).split(sepb):
3636 for p in osenvironb.get(b'PATH', dpb).split(sepb):
3639 name = os.path.join(p, program)
3637 name = os.path.join(p, program)
3640 if os.name == 'nt' or os.access(name, os.X_OK):
3638 if os.name == 'nt' or os.access(name, os.X_OK):
3641 return name
3639 return name
3642 return None
3640 return None
3643
3641
3644 def _checktools(self):
3642 def _checktools(self):
3645 """Ensure tools required to run tests are present."""
3643 """Ensure tools required to run tests are present."""
3646 for p in self.REQUIREDTOOLS:
3644 for p in self.REQUIREDTOOLS:
3647 if os.name == 'nt' and not p.endswith(b'.exe'):
3645 if os.name == 'nt' and not p.endswith(b'.exe'):
3648 p += b'.exe'
3646 p += b'.exe'
3649 found = self._findprogram(p)
3647 found = self._findprogram(p)
3650 p = p.decode("utf-8")
3648 p = p.decode("utf-8")
3651 if found:
3649 if found:
3652 vlog("# Found prerequisite", p, "at", _strpath(found))
3650 vlog("# Found prerequisite", p, "at", _strpath(found))
3653 else:
3651 else:
3654 print("WARNING: Did not find prerequisite tool: %s " % p)
3652 print("WARNING: Did not find prerequisite tool: %s " % p)
3655
3653
3656
3654
3657 def aggregateexceptions(path):
3655 def aggregateexceptions(path):
3658 exceptioncounts = collections.Counter()
3656 exceptioncounts = collections.Counter()
3659 testsbyfailure = collections.defaultdict(set)
3657 testsbyfailure = collections.defaultdict(set)
3660 failuresbytest = collections.defaultdict(set)
3658 failuresbytest = collections.defaultdict(set)
3661
3659
3662 for f in os.listdir(path):
3660 for f in os.listdir(path):
3663 with open(os.path.join(path, f), 'rb') as fh:
3661 with open(os.path.join(path, f), 'rb') as fh:
3664 data = fh.read().split(b'\0')
3662 data = fh.read().split(b'\0')
3665 if len(data) != 5:
3663 if len(data) != 5:
3666 continue
3664 continue
3667
3665
3668 exc, mainframe, hgframe, hgline, testname = data
3666 exc, mainframe, hgframe, hgline, testname = data
3669 exc = exc.decode('utf-8')
3667 exc = exc.decode('utf-8')
3670 mainframe = mainframe.decode('utf-8')
3668 mainframe = mainframe.decode('utf-8')
3671 hgframe = hgframe.decode('utf-8')
3669 hgframe = hgframe.decode('utf-8')
3672 hgline = hgline.decode('utf-8')
3670 hgline = hgline.decode('utf-8')
3673 testname = testname.decode('utf-8')
3671 testname = testname.decode('utf-8')
3674
3672
3675 key = (hgframe, hgline, exc)
3673 key = (hgframe, hgline, exc)
3676 exceptioncounts[key] += 1
3674 exceptioncounts[key] += 1
3677 testsbyfailure[key].add(testname)
3675 testsbyfailure[key].add(testname)
3678 failuresbytest[testname].add(key)
3676 failuresbytest[testname].add(key)
3679
3677
3680 # Find test having fewest failures for each failure.
3678 # Find test having fewest failures for each failure.
3681 leastfailing = {}
3679 leastfailing = {}
3682 for key, tests in testsbyfailure.items():
3680 for key, tests in testsbyfailure.items():
3683 fewesttest = None
3681 fewesttest = None
3684 fewestcount = 99999999
3682 fewestcount = 99999999
3685 for test in sorted(tests):
3683 for test in sorted(tests):
3686 if len(failuresbytest[test]) < fewestcount:
3684 if len(failuresbytest[test]) < fewestcount:
3687 fewesttest = test
3685 fewesttest = test
3688 fewestcount = len(failuresbytest[test])
3686 fewestcount = len(failuresbytest[test])
3689
3687
3690 leastfailing[key] = (fewestcount, fewesttest)
3688 leastfailing[key] = (fewestcount, fewesttest)
3691
3689
3692 # Create a combined counter so we can sort by total occurrences and
3690 # Create a combined counter so we can sort by total occurrences and
3693 # impacted tests.
3691 # impacted tests.
3694 combined = {}
3692 combined = {}
3695 for key in exceptioncounts:
3693 for key in exceptioncounts:
3696 combined[key] = (
3694 combined[key] = (
3697 exceptioncounts[key],
3695 exceptioncounts[key],
3698 len(testsbyfailure[key]),
3696 len(testsbyfailure[key]),
3699 leastfailing[key][0],
3697 leastfailing[key][0],
3700 leastfailing[key][1],
3698 leastfailing[key][1],
3701 )
3699 )
3702
3700
3703 return {
3701 return {
3704 'exceptioncounts': exceptioncounts,
3702 'exceptioncounts': exceptioncounts,
3705 'total': sum(exceptioncounts.values()),
3703 'total': sum(exceptioncounts.values()),
3706 'combined': combined,
3704 'combined': combined,
3707 'leastfailing': leastfailing,
3705 'leastfailing': leastfailing,
3708 'byfailure': testsbyfailure,
3706 'byfailure': testsbyfailure,
3709 'bytest': failuresbytest,
3707 'bytest': failuresbytest,
3710 }
3708 }
3711
3709
3712
3710
3713 if __name__ == '__main__':
3711 if __name__ == '__main__':
3714 runner = TestRunner()
3712 runner = TestRunner()
3715
3713
3716 try:
3714 try:
3717 import msvcrt
3715 import msvcrt
3718
3716
3719 msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
3717 msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
3720 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
3718 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
3721 msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
3719 msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
3722 except ImportError:
3720 except ImportError:
3723 pass
3721 pass
3724
3722
3725 sys.exit(runner.run(sys.argv[1:]))
3723 sys.exit(runner.run(sys.argv[1:]))
General Comments 0
You need to be logged in to leave comments. Login now