##// END OF EJS Templates
treemanifests: fix streaming clone...
Martin von Zweigbergk -
r28007:fb92927f default
parent child Browse files
Show More
@@ -1,304 +1,314 b''
1 # repair.py - functions for repository repair for mercurial
1 # repair.py - functions for repository repair for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 # Copyright 2007 Matt Mackall
4 # Copyright 2007 Matt Mackall
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 import errno
11 import errno
12
12
13 from .i18n import _
13 from .i18n import _
14 from .node import short
14 from .node import short
15 from . import (
15 from . import (
16 bundle2,
16 bundle2,
17 changegroup,
17 changegroup,
18 error,
18 error,
19 exchange,
19 exchange,
20 util,
20 util,
21 )
21 )
22
22
23 def _bundle(repo, bases, heads, node, suffix, compress=True):
23 def _bundle(repo, bases, heads, node, suffix, compress=True):
24 """create a bundle with the specified revisions as a backup"""
24 """create a bundle with the specified revisions as a backup"""
25 cgversion = changegroup.safeversion(repo)
25 cgversion = changegroup.safeversion(repo)
26
26
27 cg = changegroup.changegroupsubset(repo, bases, heads, 'strip',
27 cg = changegroup.changegroupsubset(repo, bases, heads, 'strip',
28 version=cgversion)
28 version=cgversion)
29 backupdir = "strip-backup"
29 backupdir = "strip-backup"
30 vfs = repo.vfs
30 vfs = repo.vfs
31 if not vfs.isdir(backupdir):
31 if not vfs.isdir(backupdir):
32 vfs.mkdir(backupdir)
32 vfs.mkdir(backupdir)
33
33
34 # Include a hash of all the nodes in the filename for uniqueness
34 # Include a hash of all the nodes in the filename for uniqueness
35 allcommits = repo.set('%ln::%ln', bases, heads)
35 allcommits = repo.set('%ln::%ln', bases, heads)
36 allhashes = sorted(c.hex() for c in allcommits)
36 allhashes = sorted(c.hex() for c in allcommits)
37 totalhash = util.sha1(''.join(allhashes)).hexdigest()
37 totalhash = util.sha1(''.join(allhashes)).hexdigest()
38 name = "%s/%s-%s-%s.hg" % (backupdir, short(node), totalhash[:8], suffix)
38 name = "%s/%s-%s-%s.hg" % (backupdir, short(node), totalhash[:8], suffix)
39
39
40 comp = None
40 comp = None
41 if cgversion != '01':
41 if cgversion != '01':
42 bundletype = "HG20"
42 bundletype = "HG20"
43 if compress:
43 if compress:
44 comp = 'BZ'
44 comp = 'BZ'
45 elif compress:
45 elif compress:
46 bundletype = "HG10BZ"
46 bundletype = "HG10BZ"
47 else:
47 else:
48 bundletype = "HG10UN"
48 bundletype = "HG10UN"
49 return changegroup.writebundle(repo.ui, cg, name, bundletype, vfs,
49 return changegroup.writebundle(repo.ui, cg, name, bundletype, vfs,
50 compression=comp)
50 compression=comp)
51
51
52 def _collectfiles(repo, striprev):
52 def _collectfiles(repo, striprev):
53 """find out the filelogs affected by the strip"""
53 """find out the filelogs affected by the strip"""
54 files = set()
54 files = set()
55
55
56 for x in xrange(striprev, len(repo)):
56 for x in xrange(striprev, len(repo)):
57 files.update(repo[x].files())
57 files.update(repo[x].files())
58
58
59 return sorted(files)
59 return sorted(files)
60
60
61 def _collectbrokencsets(repo, files, striprev):
61 def _collectbrokencsets(repo, files, striprev):
62 """return the changesets which will be broken by the truncation"""
62 """return the changesets which will be broken by the truncation"""
63 s = set()
63 s = set()
64 def collectone(revlog):
64 def collectone(revlog):
65 _, brokenset = revlog.getstrippoint(striprev)
65 _, brokenset = revlog.getstrippoint(striprev)
66 s.update([revlog.linkrev(r) for r in brokenset])
66 s.update([revlog.linkrev(r) for r in brokenset])
67
67
68 collectone(repo.manifest)
68 collectone(repo.manifest)
69 for fname in files:
69 for fname in files:
70 collectone(repo.file(fname))
70 collectone(repo.file(fname))
71
71
72 return s
72 return s
73
73
74 def strip(ui, repo, nodelist, backup=True, topic='backup'):
74 def strip(ui, repo, nodelist, backup=True, topic='backup'):
75 # This function operates within a transaction of its own, but does
75 # This function operates within a transaction of its own, but does
76 # not take any lock on the repo.
76 # not take any lock on the repo.
77 # Simple way to maintain backwards compatibility for this
77 # Simple way to maintain backwards compatibility for this
78 # argument.
78 # argument.
79 if backup in ['none', 'strip']:
79 if backup in ['none', 'strip']:
80 backup = False
80 backup = False
81
81
82 repo = repo.unfiltered()
82 repo = repo.unfiltered()
83 repo.destroying()
83 repo.destroying()
84
84
85 cl = repo.changelog
85 cl = repo.changelog
86 # TODO handle undo of merge sets
86 # TODO handle undo of merge sets
87 if isinstance(nodelist, str):
87 if isinstance(nodelist, str):
88 nodelist = [nodelist]
88 nodelist = [nodelist]
89 striplist = [cl.rev(node) for node in nodelist]
89 striplist = [cl.rev(node) for node in nodelist]
90 striprev = min(striplist)
90 striprev = min(striplist)
91
91
92 # Some revisions with rev > striprev may not be descendants of striprev.
92 # Some revisions with rev > striprev may not be descendants of striprev.
93 # We have to find these revisions and put them in a bundle, so that
93 # We have to find these revisions and put them in a bundle, so that
94 # we can restore them after the truncations.
94 # we can restore them after the truncations.
95 # To create the bundle we use repo.changegroupsubset which requires
95 # To create the bundle we use repo.changegroupsubset which requires
96 # the list of heads and bases of the set of interesting revisions.
96 # the list of heads and bases of the set of interesting revisions.
97 # (head = revision in the set that has no descendant in the set;
97 # (head = revision in the set that has no descendant in the set;
98 # base = revision in the set that has no ancestor in the set)
98 # base = revision in the set that has no ancestor in the set)
99 tostrip = set(striplist)
99 tostrip = set(striplist)
100 for rev in striplist:
100 for rev in striplist:
101 for desc in cl.descendants([rev]):
101 for desc in cl.descendants([rev]):
102 tostrip.add(desc)
102 tostrip.add(desc)
103
103
104 files = _collectfiles(repo, striprev)
104 files = _collectfiles(repo, striprev)
105 saverevs = _collectbrokencsets(repo, files, striprev)
105 saverevs = _collectbrokencsets(repo, files, striprev)
106
106
107 # compute heads
107 # compute heads
108 saveheads = set(saverevs)
108 saveheads = set(saverevs)
109 for r in xrange(striprev + 1, len(cl)):
109 for r in xrange(striprev + 1, len(cl)):
110 if r not in tostrip:
110 if r not in tostrip:
111 saverevs.add(r)
111 saverevs.add(r)
112 saveheads.difference_update(cl.parentrevs(r))
112 saveheads.difference_update(cl.parentrevs(r))
113 saveheads.add(r)
113 saveheads.add(r)
114 saveheads = [cl.node(r) for r in saveheads]
114 saveheads = [cl.node(r) for r in saveheads]
115
115
116 # compute base nodes
116 # compute base nodes
117 if saverevs:
117 if saverevs:
118 descendants = set(cl.descendants(saverevs))
118 descendants = set(cl.descendants(saverevs))
119 saverevs.difference_update(descendants)
119 saverevs.difference_update(descendants)
120 savebases = [cl.node(r) for r in saverevs]
120 savebases = [cl.node(r) for r in saverevs]
121 stripbases = [cl.node(r) for r in tostrip]
121 stripbases = [cl.node(r) for r in tostrip]
122
122
123 # For a set s, max(parents(s) - s) is the same as max(heads(::s - s)), but
123 # For a set s, max(parents(s) - s) is the same as max(heads(::s - s)), but
124 # is much faster
124 # is much faster
125 newbmtarget = repo.revs('max(parents(%ld) - (%ld))', tostrip, tostrip)
125 newbmtarget = repo.revs('max(parents(%ld) - (%ld))', tostrip, tostrip)
126 if newbmtarget:
126 if newbmtarget:
127 newbmtarget = repo[newbmtarget.first()].node()
127 newbmtarget = repo[newbmtarget.first()].node()
128 else:
128 else:
129 newbmtarget = '.'
129 newbmtarget = '.'
130
130
131 bm = repo._bookmarks
131 bm = repo._bookmarks
132 updatebm = []
132 updatebm = []
133 for m in bm:
133 for m in bm:
134 rev = repo[bm[m]].rev()
134 rev = repo[bm[m]].rev()
135 if rev in tostrip:
135 if rev in tostrip:
136 updatebm.append(m)
136 updatebm.append(m)
137
137
138 # create a changegroup for all the branches we need to keep
138 # create a changegroup for all the branches we need to keep
139 backupfile = None
139 backupfile = None
140 vfs = repo.vfs
140 vfs = repo.vfs
141 node = nodelist[-1]
141 node = nodelist[-1]
142 if backup:
142 if backup:
143 backupfile = _bundle(repo, stripbases, cl.heads(), node, topic)
143 backupfile = _bundle(repo, stripbases, cl.heads(), node, topic)
144 repo.ui.status(_("saved backup bundle to %s\n") %
144 repo.ui.status(_("saved backup bundle to %s\n") %
145 vfs.join(backupfile))
145 vfs.join(backupfile))
146 repo.ui.log("backupbundle", "saved backup bundle to %s\n",
146 repo.ui.log("backupbundle", "saved backup bundle to %s\n",
147 vfs.join(backupfile))
147 vfs.join(backupfile))
148 if saveheads or savebases:
148 if saveheads or savebases:
149 # do not compress partial bundle if we remove it from disk later
149 # do not compress partial bundle if we remove it from disk later
150 chgrpfile = _bundle(repo, savebases, saveheads, node, 'temp',
150 chgrpfile = _bundle(repo, savebases, saveheads, node, 'temp',
151 compress=False)
151 compress=False)
152
152
153 mfst = repo.manifest
153 mfst = repo.manifest
154
154
155 curtr = repo.currenttransaction()
155 curtr = repo.currenttransaction()
156 if curtr is not None:
156 if curtr is not None:
157 del curtr # avoid carrying reference to transaction for nothing
157 del curtr # avoid carrying reference to transaction for nothing
158 msg = _('programming error: cannot strip from inside a transaction')
158 msg = _('programming error: cannot strip from inside a transaction')
159 raise error.Abort(msg, hint=_('contact your extension maintainer'))
159 raise error.Abort(msg, hint=_('contact your extension maintainer'))
160
160
161 try:
161 try:
162 with repo.transaction("strip") as tr:
162 with repo.transaction("strip") as tr:
163 offset = len(tr.entries)
163 offset = len(tr.entries)
164
164
165 tr.startgroup()
165 tr.startgroup()
166 cl.strip(striprev, tr)
166 cl.strip(striprev, tr)
167 mfst.strip(striprev, tr)
167 mfst.strip(striprev, tr)
168 for fn in files:
168 for fn in files:
169 repo.file(fn).strip(striprev, tr)
169 repo.file(fn).strip(striprev, tr)
170 tr.endgroup()
170 tr.endgroup()
171
171
172 for i in xrange(offset, len(tr.entries)):
172 for i in xrange(offset, len(tr.entries)):
173 file, troffset, ignore = tr.entries[i]
173 file, troffset, ignore = tr.entries[i]
174 repo.svfs(file, 'a').truncate(troffset)
174 repo.svfs(file, 'a').truncate(troffset)
175 if troffset == 0:
175 if troffset == 0:
176 repo.store.markremoved(file)
176 repo.store.markremoved(file)
177
177
178 if saveheads or savebases:
178 if saveheads or savebases:
179 ui.note(_("adding branch\n"))
179 ui.note(_("adding branch\n"))
180 f = vfs.open(chgrpfile, "rb")
180 f = vfs.open(chgrpfile, "rb")
181 gen = exchange.readbundle(ui, f, chgrpfile, vfs)
181 gen = exchange.readbundle(ui, f, chgrpfile, vfs)
182 if not repo.ui.verbose:
182 if not repo.ui.verbose:
183 # silence internal shuffling chatter
183 # silence internal shuffling chatter
184 repo.ui.pushbuffer()
184 repo.ui.pushbuffer()
185 if isinstance(gen, bundle2.unbundle20):
185 if isinstance(gen, bundle2.unbundle20):
186 with repo.transaction('strip') as tr:
186 with repo.transaction('strip') as tr:
187 tr.hookargs = {'source': 'strip',
187 tr.hookargs = {'source': 'strip',
188 'url': 'bundle:' + vfs.join(chgrpfile)}
188 'url': 'bundle:' + vfs.join(chgrpfile)}
189 bundle2.applybundle(repo, gen, tr, source='strip',
189 bundle2.applybundle(repo, gen, tr, source='strip',
190 url='bundle:' + vfs.join(chgrpfile))
190 url='bundle:' + vfs.join(chgrpfile))
191 else:
191 else:
192 gen.apply(repo, 'strip', 'bundle:' + vfs.join(chgrpfile), True)
192 gen.apply(repo, 'strip', 'bundle:' + vfs.join(chgrpfile), True)
193 if not repo.ui.verbose:
193 if not repo.ui.verbose:
194 repo.ui.popbuffer()
194 repo.ui.popbuffer()
195 f.close()
195 f.close()
196
196
197 for m in updatebm:
197 for m in updatebm:
198 bm[m] = repo[newbmtarget].node()
198 bm[m] = repo[newbmtarget].node()
199 lock = tr = None
199 lock = tr = None
200 try:
200 try:
201 lock = repo.lock()
201 lock = repo.lock()
202 tr = repo.transaction('repair')
202 tr = repo.transaction('repair')
203 bm.recordchange(tr)
203 bm.recordchange(tr)
204 tr.close()
204 tr.close()
205 finally:
205 finally:
206 tr.release()
206 tr.release()
207 lock.release()
207 lock.release()
208
208
209 # remove undo files
209 # remove undo files
210 for undovfs, undofile in repo.undofiles():
210 for undovfs, undofile in repo.undofiles():
211 try:
211 try:
212 undovfs.unlink(undofile)
212 undovfs.unlink(undofile)
213 except OSError as e:
213 except OSError as e:
214 if e.errno != errno.ENOENT:
214 if e.errno != errno.ENOENT:
215 ui.warn(_('error removing %s: %s\n') %
215 ui.warn(_('error removing %s: %s\n') %
216 (undovfs.join(undofile), str(e)))
216 (undovfs.join(undofile), str(e)))
217
217
218 except: # re-raises
218 except: # re-raises
219 if backupfile:
219 if backupfile:
220 ui.warn(_("strip failed, full bundle stored in '%s'\n")
220 ui.warn(_("strip failed, full bundle stored in '%s'\n")
221 % vfs.join(backupfile))
221 % vfs.join(backupfile))
222 elif saveheads:
222 elif saveheads:
223 ui.warn(_("strip failed, partial bundle stored in '%s'\n")
223 ui.warn(_("strip failed, partial bundle stored in '%s'\n")
224 % vfs.join(chgrpfile))
224 % vfs.join(chgrpfile))
225 raise
225 raise
226 else:
226 else:
227 if saveheads or savebases:
227 if saveheads or savebases:
228 # Remove partial backup only if there were no exceptions
228 # Remove partial backup only if there were no exceptions
229 vfs.unlink(chgrpfile)
229 vfs.unlink(chgrpfile)
230
230
231 repo.destroyed()
231 repo.destroyed()
232
232
233 def rebuildfncache(ui, repo):
233 def rebuildfncache(ui, repo):
234 """Rebuilds the fncache file from repo history.
234 """Rebuilds the fncache file from repo history.
235
235
236 Missing entries will be added. Extra entries will be removed.
236 Missing entries will be added. Extra entries will be removed.
237 """
237 """
238 repo = repo.unfiltered()
238 repo = repo.unfiltered()
239
239
240 if 'fncache' not in repo.requirements:
240 if 'fncache' not in repo.requirements:
241 ui.warn(_('(not rebuilding fncache because repository does not '
241 ui.warn(_('(not rebuilding fncache because repository does not '
242 'support fncache)\n'))
242 'support fncache)\n'))
243 return
243 return
244
244
245 with repo.lock():
245 with repo.lock():
246 fnc = repo.store.fncache
246 fnc = repo.store.fncache
247 # Trigger load of fncache.
247 # Trigger load of fncache.
248 if 'irrelevant' in fnc:
248 if 'irrelevant' in fnc:
249 pass
249 pass
250
250
251 oldentries = set(fnc.entries)
251 oldentries = set(fnc.entries)
252 newentries = set()
252 newentries = set()
253 seenfiles = set()
253 seenfiles = set()
254
254
255 repolen = len(repo)
255 repolen = len(repo)
256 for rev in repo:
256 for rev in repo:
257 ui.progress(_('changeset'), rev, total=repolen)
257 ui.progress(_('changeset'), rev, total=repolen)
258
258
259 ctx = repo[rev]
259 ctx = repo[rev]
260 for f in ctx.files():
260 for f in ctx.files():
261 # This is to minimize I/O.
261 # This is to minimize I/O.
262 if f in seenfiles:
262 if f in seenfiles:
263 continue
263 continue
264 seenfiles.add(f)
264 seenfiles.add(f)
265
265
266 i = 'data/%s.i' % f
266 i = 'data/%s.i' % f
267 d = 'data/%s.d' % f
267 d = 'data/%s.d' % f
268
268
269 if repo.store._exists(i):
269 if repo.store._exists(i):
270 newentries.add(i)
270 newentries.add(i)
271 if repo.store._exists(d):
271 if repo.store._exists(d):
272 newentries.add(d)
272 newentries.add(d)
273
273
274 ui.progress(_('changeset'), None)
274 ui.progress(_('changeset'), None)
275
275
276 if 'treemanifest' in repo: # safe but unnecessary otherwise
277 for dir in util.dirs(seenfiles):
278 i = 'meta/%s/00manifest.i' % dir
279 d = 'meta/%s/00manifest.d' % dir
280
281 if repo.store._exists(i):
282 newentries.add(i)
283 if repo.store._exists(d):
284 newentries.add(d)
285
276 addcount = len(newentries - oldentries)
286 addcount = len(newentries - oldentries)
277 removecount = len(oldentries - newentries)
287 removecount = len(oldentries - newentries)
278 for p in sorted(oldentries - newentries):
288 for p in sorted(oldentries - newentries):
279 ui.write(_('removing %s\n') % p)
289 ui.write(_('removing %s\n') % p)
280 for p in sorted(newentries - oldentries):
290 for p in sorted(newentries - oldentries):
281 ui.write(_('adding %s\n') % p)
291 ui.write(_('adding %s\n') % p)
282
292
283 if addcount or removecount:
293 if addcount or removecount:
284 ui.write(_('%d items added, %d removed from fncache\n') %
294 ui.write(_('%d items added, %d removed from fncache\n') %
285 (addcount, removecount))
295 (addcount, removecount))
286 fnc.entries = newentries
296 fnc.entries = newentries
287 fnc._dirty = True
297 fnc._dirty = True
288
298
289 with repo.transaction('fncache') as tr:
299 with repo.transaction('fncache') as tr:
290 fnc.write(tr)
300 fnc.write(tr)
291 else:
301 else:
292 ui.write(_('fncache already up to date\n'))
302 ui.write(_('fncache already up to date\n'))
293
303
294 def stripbmrevset(repo, mark):
304 def stripbmrevset(repo, mark):
295 """
305 """
296 The revset to strip when strip is called with -B mark
306 The revset to strip when strip is called with -B mark
297
307
298 Needs to live here so extensions can use it and wrap it even when strip is
308 Needs to live here so extensions can use it and wrap it even when strip is
299 not enabled or not present on a box.
309 not enabled or not present on a box.
300 """
310 """
301 return repo.revs("ancestors(bookmark(%s)) - "
311 return repo.revs("ancestors(bookmark(%s)) - "
302 "ancestors(head() and not bookmark(%s)) - "
312 "ancestors(head() and not bookmark(%s)) - "
303 "ancestors(bookmark() and not bookmark(%s))",
313 "ancestors(bookmark() and not bookmark(%s))",
304 mark, mark, mark)
314 mark, mark, mark)
@@ -1,552 +1,553 b''
1 # store.py - repository store handling for Mercurial
1 # store.py - repository store handling for Mercurial
2 #
2 #
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import os
11 import os
12 import stat
12 import stat
13
13
14 from .i18n import _
14 from .i18n import _
15 from . import (
15 from . import (
16 error,
16 error,
17 parsers,
17 parsers,
18 scmutil,
18 scmutil,
19 util,
19 util,
20 )
20 )
21
21
22 _sha = util.sha1
22 _sha = util.sha1
23
23
24 # This avoids a collision between a file named foo and a dir named
24 # This avoids a collision between a file named foo and a dir named
25 # foo.i or foo.d
25 # foo.i or foo.d
26 def _encodedir(path):
26 def _encodedir(path):
27 '''
27 '''
28 >>> _encodedir('data/foo.i')
28 >>> _encodedir('data/foo.i')
29 'data/foo.i'
29 'data/foo.i'
30 >>> _encodedir('data/foo.i/bla.i')
30 >>> _encodedir('data/foo.i/bla.i')
31 'data/foo.i.hg/bla.i'
31 'data/foo.i.hg/bla.i'
32 >>> _encodedir('data/foo.i.hg/bla.i')
32 >>> _encodedir('data/foo.i.hg/bla.i')
33 'data/foo.i.hg.hg/bla.i'
33 'data/foo.i.hg.hg/bla.i'
34 >>> _encodedir('data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
34 >>> _encodedir('data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
35 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
35 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
36 '''
36 '''
37 return (path
37 return (path
38 .replace(".hg/", ".hg.hg/")
38 .replace(".hg/", ".hg.hg/")
39 .replace(".i/", ".i.hg/")
39 .replace(".i/", ".i.hg/")
40 .replace(".d/", ".d.hg/"))
40 .replace(".d/", ".d.hg/"))
41
41
42 encodedir = getattr(parsers, 'encodedir', _encodedir)
42 encodedir = getattr(parsers, 'encodedir', _encodedir)
43
43
44 def decodedir(path):
44 def decodedir(path):
45 '''
45 '''
46 >>> decodedir('data/foo.i')
46 >>> decodedir('data/foo.i')
47 'data/foo.i'
47 'data/foo.i'
48 >>> decodedir('data/foo.i.hg/bla.i')
48 >>> decodedir('data/foo.i.hg/bla.i')
49 'data/foo.i/bla.i'
49 'data/foo.i/bla.i'
50 >>> decodedir('data/foo.i.hg.hg/bla.i')
50 >>> decodedir('data/foo.i.hg.hg/bla.i')
51 'data/foo.i.hg/bla.i'
51 'data/foo.i.hg/bla.i'
52 '''
52 '''
53 if ".hg/" not in path:
53 if ".hg/" not in path:
54 return path
54 return path
55 return (path
55 return (path
56 .replace(".d.hg/", ".d/")
56 .replace(".d.hg/", ".d/")
57 .replace(".i.hg/", ".i/")
57 .replace(".i.hg/", ".i/")
58 .replace(".hg.hg/", ".hg/"))
58 .replace(".hg.hg/", ".hg/"))
59
59
60 def _buildencodefun():
60 def _buildencodefun():
61 '''
61 '''
62 >>> enc, dec = _buildencodefun()
62 >>> enc, dec = _buildencodefun()
63
63
64 >>> enc('nothing/special.txt')
64 >>> enc('nothing/special.txt')
65 'nothing/special.txt'
65 'nothing/special.txt'
66 >>> dec('nothing/special.txt')
66 >>> dec('nothing/special.txt')
67 'nothing/special.txt'
67 'nothing/special.txt'
68
68
69 >>> enc('HELLO')
69 >>> enc('HELLO')
70 '_h_e_l_l_o'
70 '_h_e_l_l_o'
71 >>> dec('_h_e_l_l_o')
71 >>> dec('_h_e_l_l_o')
72 'HELLO'
72 'HELLO'
73
73
74 >>> enc('hello:world?')
74 >>> enc('hello:world?')
75 'hello~3aworld~3f'
75 'hello~3aworld~3f'
76 >>> dec('hello~3aworld~3f')
76 >>> dec('hello~3aworld~3f')
77 'hello:world?'
77 'hello:world?'
78
78
79 >>> enc('the\x07quick\xADshot')
79 >>> enc('the\x07quick\xADshot')
80 'the~07quick~adshot'
80 'the~07quick~adshot'
81 >>> dec('the~07quick~adshot')
81 >>> dec('the~07quick~adshot')
82 'the\\x07quick\\xadshot'
82 'the\\x07quick\\xadshot'
83 '''
83 '''
84 e = '_'
84 e = '_'
85 winreserved = [ord(x) for x in '\\:*?"<>|']
85 winreserved = [ord(x) for x in '\\:*?"<>|']
86 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
86 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
87 for x in (range(32) + range(126, 256) + winreserved):
87 for x in (range(32) + range(126, 256) + winreserved):
88 cmap[chr(x)] = "~%02x" % x
88 cmap[chr(x)] = "~%02x" % x
89 for x in range(ord("A"), ord("Z") + 1) + [ord(e)]:
89 for x in range(ord("A"), ord("Z") + 1) + [ord(e)]:
90 cmap[chr(x)] = e + chr(x).lower()
90 cmap[chr(x)] = e + chr(x).lower()
91 dmap = {}
91 dmap = {}
92 for k, v in cmap.iteritems():
92 for k, v in cmap.iteritems():
93 dmap[v] = k
93 dmap[v] = k
94 def decode(s):
94 def decode(s):
95 i = 0
95 i = 0
96 while i < len(s):
96 while i < len(s):
97 for l in xrange(1, 4):
97 for l in xrange(1, 4):
98 try:
98 try:
99 yield dmap[s[i:i + l]]
99 yield dmap[s[i:i + l]]
100 i += l
100 i += l
101 break
101 break
102 except KeyError:
102 except KeyError:
103 pass
103 pass
104 else:
104 else:
105 raise KeyError
105 raise KeyError
106 return (lambda s: ''.join([cmap[c] for c in s]),
106 return (lambda s: ''.join([cmap[c] for c in s]),
107 lambda s: ''.join(list(decode(s))))
107 lambda s: ''.join(list(decode(s))))
108
108
109 _encodefname, _decodefname = _buildencodefun()
109 _encodefname, _decodefname = _buildencodefun()
110
110
111 def encodefilename(s):
111 def encodefilename(s):
112 '''
112 '''
113 >>> encodefilename('foo.i/bar.d/bla.hg/hi:world?/HELLO')
113 >>> encodefilename('foo.i/bar.d/bla.hg/hi:world?/HELLO')
114 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
114 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
115 '''
115 '''
116 return _encodefname(encodedir(s))
116 return _encodefname(encodedir(s))
117
117
118 def decodefilename(s):
118 def decodefilename(s):
119 '''
119 '''
120 >>> decodefilename('foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
120 >>> decodefilename('foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
121 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
121 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
122 '''
122 '''
123 return decodedir(_decodefname(s))
123 return decodedir(_decodefname(s))
124
124
125 def _buildlowerencodefun():
125 def _buildlowerencodefun():
126 '''
126 '''
127 >>> f = _buildlowerencodefun()
127 >>> f = _buildlowerencodefun()
128 >>> f('nothing/special.txt')
128 >>> f('nothing/special.txt')
129 'nothing/special.txt'
129 'nothing/special.txt'
130 >>> f('HELLO')
130 >>> f('HELLO')
131 'hello'
131 'hello'
132 >>> f('hello:world?')
132 >>> f('hello:world?')
133 'hello~3aworld~3f'
133 'hello~3aworld~3f'
134 >>> f('the\x07quick\xADshot')
134 >>> f('the\x07quick\xADshot')
135 'the~07quick~adshot'
135 'the~07quick~adshot'
136 '''
136 '''
137 winreserved = [ord(x) for x in '\\:*?"<>|']
137 winreserved = [ord(x) for x in '\\:*?"<>|']
138 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
138 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
139 for x in (range(32) + range(126, 256) + winreserved):
139 for x in (range(32) + range(126, 256) + winreserved):
140 cmap[chr(x)] = "~%02x" % x
140 cmap[chr(x)] = "~%02x" % x
141 for x in range(ord("A"), ord("Z") + 1):
141 for x in range(ord("A"), ord("Z") + 1):
142 cmap[chr(x)] = chr(x).lower()
142 cmap[chr(x)] = chr(x).lower()
143 return lambda s: "".join([cmap[c] for c in s])
143 return lambda s: "".join([cmap[c] for c in s])
144
144
145 lowerencode = getattr(parsers, 'lowerencode', None) or _buildlowerencodefun()
145 lowerencode = getattr(parsers, 'lowerencode', None) or _buildlowerencodefun()
146
146
147 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
147 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
148 _winres3 = ('aux', 'con', 'prn', 'nul') # length 3
148 _winres3 = ('aux', 'con', 'prn', 'nul') # length 3
149 _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9)
149 _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9)
150 def _auxencode(path, dotencode):
150 def _auxencode(path, dotencode):
151 '''
151 '''
152 Encodes filenames containing names reserved by Windows or which end in
152 Encodes filenames containing names reserved by Windows or which end in
153 period or space. Does not touch other single reserved characters c.
153 period or space. Does not touch other single reserved characters c.
154 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
154 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
155 Additionally encodes space or period at the beginning, if dotencode is
155 Additionally encodes space or period at the beginning, if dotencode is
156 True. Parameter path is assumed to be all lowercase.
156 True. Parameter path is assumed to be all lowercase.
157 A segment only needs encoding if a reserved name appears as a
157 A segment only needs encoding if a reserved name appears as a
158 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
158 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
159 doesn't need encoding.
159 doesn't need encoding.
160
160
161 >>> s = '.foo/aux.txt/txt.aux/con/prn/nul/foo.'
161 >>> s = '.foo/aux.txt/txt.aux/con/prn/nul/foo.'
162 >>> _auxencode(s.split('/'), True)
162 >>> _auxencode(s.split('/'), True)
163 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
163 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
164 >>> s = '.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
164 >>> s = '.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
165 >>> _auxencode(s.split('/'), False)
165 >>> _auxencode(s.split('/'), False)
166 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
166 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
167 >>> _auxencode(['foo. '], True)
167 >>> _auxencode(['foo. '], True)
168 ['foo.~20']
168 ['foo.~20']
169 >>> _auxencode([' .foo'], True)
169 >>> _auxencode([' .foo'], True)
170 ['~20.foo']
170 ['~20.foo']
171 '''
171 '''
172 for i, n in enumerate(path):
172 for i, n in enumerate(path):
173 if not n:
173 if not n:
174 continue
174 continue
175 if dotencode and n[0] in '. ':
175 if dotencode and n[0] in '. ':
176 n = "~%02x" % ord(n[0]) + n[1:]
176 n = "~%02x" % ord(n[0]) + n[1:]
177 path[i] = n
177 path[i] = n
178 else:
178 else:
179 l = n.find('.')
179 l = n.find('.')
180 if l == -1:
180 if l == -1:
181 l = len(n)
181 l = len(n)
182 if ((l == 3 and n[:3] in _winres3) or
182 if ((l == 3 and n[:3] in _winres3) or
183 (l == 4 and n[3] <= '9' and n[3] >= '1'
183 (l == 4 and n[3] <= '9' and n[3] >= '1'
184 and n[:3] in _winres4)):
184 and n[:3] in _winres4)):
185 # encode third letter ('aux' -> 'au~78')
185 # encode third letter ('aux' -> 'au~78')
186 ec = "~%02x" % ord(n[2])
186 ec = "~%02x" % ord(n[2])
187 n = n[0:2] + ec + n[3:]
187 n = n[0:2] + ec + n[3:]
188 path[i] = n
188 path[i] = n
189 if n[-1] in '. ':
189 if n[-1] in '. ':
190 # encode last period or space ('foo...' -> 'foo..~2e')
190 # encode last period or space ('foo...' -> 'foo..~2e')
191 path[i] = n[:-1] + "~%02x" % ord(n[-1])
191 path[i] = n[:-1] + "~%02x" % ord(n[-1])
192 return path
192 return path
193
193
194 _maxstorepathlen = 120
194 _maxstorepathlen = 120
195 _dirprefixlen = 8
195 _dirprefixlen = 8
196 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
196 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
197
197
198 def _hashencode(path, dotencode):
198 def _hashencode(path, dotencode):
199 digest = _sha(path).hexdigest()
199 digest = _sha(path).hexdigest()
200 le = lowerencode(path[5:]).split('/') # skips prefix 'data/' or 'meta/'
200 le = lowerencode(path[5:]).split('/') # skips prefix 'data/' or 'meta/'
201 parts = _auxencode(le, dotencode)
201 parts = _auxencode(le, dotencode)
202 basename = parts[-1]
202 basename = parts[-1]
203 _root, ext = os.path.splitext(basename)
203 _root, ext = os.path.splitext(basename)
204 sdirs = []
204 sdirs = []
205 sdirslen = 0
205 sdirslen = 0
206 for p in parts[:-1]:
206 for p in parts[:-1]:
207 d = p[:_dirprefixlen]
207 d = p[:_dirprefixlen]
208 if d[-1] in '. ':
208 if d[-1] in '. ':
209 # Windows can't access dirs ending in period or space
209 # Windows can't access dirs ending in period or space
210 d = d[:-1] + '_'
210 d = d[:-1] + '_'
211 if sdirslen == 0:
211 if sdirslen == 0:
212 t = len(d)
212 t = len(d)
213 else:
213 else:
214 t = sdirslen + 1 + len(d)
214 t = sdirslen + 1 + len(d)
215 if t > _maxshortdirslen:
215 if t > _maxshortdirslen:
216 break
216 break
217 sdirs.append(d)
217 sdirs.append(d)
218 sdirslen = t
218 sdirslen = t
219 dirs = '/'.join(sdirs)
219 dirs = '/'.join(sdirs)
220 if len(dirs) > 0:
220 if len(dirs) > 0:
221 dirs += '/'
221 dirs += '/'
222 res = 'dh/' + dirs + digest + ext
222 res = 'dh/' + dirs + digest + ext
223 spaceleft = _maxstorepathlen - len(res)
223 spaceleft = _maxstorepathlen - len(res)
224 if spaceleft > 0:
224 if spaceleft > 0:
225 filler = basename[:spaceleft]
225 filler = basename[:spaceleft]
226 res = 'dh/' + dirs + filler + digest + ext
226 res = 'dh/' + dirs + filler + digest + ext
227 return res
227 return res
228
228
229 def _hybridencode(path, dotencode):
229 def _hybridencode(path, dotencode):
230 '''encodes path with a length limit
230 '''encodes path with a length limit
231
231
232 Encodes all paths that begin with 'data/', according to the following.
232 Encodes all paths that begin with 'data/', according to the following.
233
233
234 Default encoding (reversible):
234 Default encoding (reversible):
235
235
236 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
236 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
237 characters are encoded as '~xx', where xx is the two digit hex code
237 characters are encoded as '~xx', where xx is the two digit hex code
238 of the character (see encodefilename).
238 of the character (see encodefilename).
239 Relevant path components consisting of Windows reserved filenames are
239 Relevant path components consisting of Windows reserved filenames are
240 masked by encoding the third character ('aux' -> 'au~78', see _auxencode).
240 masked by encoding the third character ('aux' -> 'au~78', see _auxencode).
241
241
242 Hashed encoding (not reversible):
242 Hashed encoding (not reversible):
243
243
244 If the default-encoded path is longer than _maxstorepathlen, a
244 If the default-encoded path is longer than _maxstorepathlen, a
245 non-reversible hybrid hashing of the path is done instead.
245 non-reversible hybrid hashing of the path is done instead.
246 This encoding uses up to _dirprefixlen characters of all directory
246 This encoding uses up to _dirprefixlen characters of all directory
247 levels of the lowerencoded path, but not more levels than can fit into
247 levels of the lowerencoded path, but not more levels than can fit into
248 _maxshortdirslen.
248 _maxshortdirslen.
249 Then follows the filler followed by the sha digest of the full path.
249 Then follows the filler followed by the sha digest of the full path.
250 The filler is the beginning of the basename of the lowerencoded path
250 The filler is the beginning of the basename of the lowerencoded path
251 (the basename is everything after the last path separator). The filler
251 (the basename is everything after the last path separator). The filler
252 is as long as possible, filling in characters from the basename until
252 is as long as possible, filling in characters from the basename until
253 the encoded path has _maxstorepathlen characters (or all chars of the
253 the encoded path has _maxstorepathlen characters (or all chars of the
254 basename have been taken).
254 basename have been taken).
255 The extension (e.g. '.i' or '.d') is preserved.
255 The extension (e.g. '.i' or '.d') is preserved.
256
256
257 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
257 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
258 encoding was used.
258 encoding was used.
259 '''
259 '''
260 path = encodedir(path)
260 path = encodedir(path)
261 ef = _encodefname(path).split('/')
261 ef = _encodefname(path).split('/')
262 res = '/'.join(_auxencode(ef, dotencode))
262 res = '/'.join(_auxencode(ef, dotencode))
263 if len(res) > _maxstorepathlen:
263 if len(res) > _maxstorepathlen:
264 res = _hashencode(path, dotencode)
264 res = _hashencode(path, dotencode)
265 return res
265 return res
266
266
267 def _pathencode(path):
267 def _pathencode(path):
268 de = encodedir(path)
268 de = encodedir(path)
269 if len(path) > _maxstorepathlen:
269 if len(path) > _maxstorepathlen:
270 return _hashencode(de, True)
270 return _hashencode(de, True)
271 ef = _encodefname(de).split('/')
271 ef = _encodefname(de).split('/')
272 res = '/'.join(_auxencode(ef, True))
272 res = '/'.join(_auxencode(ef, True))
273 if len(res) > _maxstorepathlen:
273 if len(res) > _maxstorepathlen:
274 return _hashencode(de, True)
274 return _hashencode(de, True)
275 return res
275 return res
276
276
277 _pathencode = getattr(parsers, 'pathencode', _pathencode)
277 _pathencode = getattr(parsers, 'pathencode', _pathencode)
278
278
279 def _plainhybridencode(f):
279 def _plainhybridencode(f):
280 return _hybridencode(f, False)
280 return _hybridencode(f, False)
281
281
282 def _calcmode(vfs):
282 def _calcmode(vfs):
283 try:
283 try:
284 # files in .hg/ will be created using this mode
284 # files in .hg/ will be created using this mode
285 mode = vfs.stat().st_mode
285 mode = vfs.stat().st_mode
286 # avoid some useless chmods
286 # avoid some useless chmods
287 if (0o777 & ~util.umask) == (0o777 & mode):
287 if (0o777 & ~util.umask) == (0o777 & mode):
288 mode = None
288 mode = None
289 except OSError:
289 except OSError:
290 mode = None
290 mode = None
291 return mode
291 return mode
292
292
293 _data = ('data meta 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
293 _data = ('data meta 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
294 ' phaseroots obsstore')
294 ' phaseroots obsstore')
295
295
296 class basicstore(object):
296 class basicstore(object):
297 '''base class for local repository stores'''
297 '''base class for local repository stores'''
298 def __init__(self, path, vfstype):
298 def __init__(self, path, vfstype):
299 vfs = vfstype(path)
299 vfs = vfstype(path)
300 self.path = vfs.base
300 self.path = vfs.base
301 self.createmode = _calcmode(vfs)
301 self.createmode = _calcmode(vfs)
302 vfs.createmode = self.createmode
302 vfs.createmode = self.createmode
303 self.rawvfs = vfs
303 self.rawvfs = vfs
304 self.vfs = scmutil.filtervfs(vfs, encodedir)
304 self.vfs = scmutil.filtervfs(vfs, encodedir)
305 self.opener = self.vfs
305 self.opener = self.vfs
306
306
307 def join(self, f):
307 def join(self, f):
308 return self.path + '/' + encodedir(f)
308 return self.path + '/' + encodedir(f)
309
309
310 def _walk(self, relpath, recurse):
310 def _walk(self, relpath, recurse):
311 '''yields (unencoded, encoded, size)'''
311 '''yields (unencoded, encoded, size)'''
312 path = self.path
312 path = self.path
313 if relpath:
313 if relpath:
314 path += '/' + relpath
314 path += '/' + relpath
315 striplen = len(self.path) + 1
315 striplen = len(self.path) + 1
316 l = []
316 l = []
317 if self.rawvfs.isdir(path):
317 if self.rawvfs.isdir(path):
318 visit = [path]
318 visit = [path]
319 readdir = self.rawvfs.readdir
319 readdir = self.rawvfs.readdir
320 while visit:
320 while visit:
321 p = visit.pop()
321 p = visit.pop()
322 for f, kind, st in readdir(p, stat=True):
322 for f, kind, st in readdir(p, stat=True):
323 fp = p + '/' + f
323 fp = p + '/' + f
324 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
324 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
325 n = util.pconvert(fp[striplen:])
325 n = util.pconvert(fp[striplen:])
326 l.append((decodedir(n), n, st.st_size))
326 l.append((decodedir(n), n, st.st_size))
327 elif kind == stat.S_IFDIR and recurse:
327 elif kind == stat.S_IFDIR and recurse:
328 visit.append(fp)
328 visit.append(fp)
329 l.sort()
329 l.sort()
330 return l
330 return l
331
331
332 def datafiles(self):
332 def datafiles(self):
333 return self._walk('data', True)
333 return self._walk('data', True) + self._walk('meta', True)
334
334
335 def topfiles(self):
335 def topfiles(self):
336 # yield manifest before changelog
336 # yield manifest before changelog
337 return reversed(self._walk('', False))
337 return reversed(self._walk('', False))
338
338
339 def walk(self):
339 def walk(self):
340 '''yields (unencoded, encoded, size)'''
340 '''yields (unencoded, encoded, size)'''
341 # yield data files first
341 # yield data files first
342 for x in self.datafiles():
342 for x in self.datafiles():
343 yield x
343 yield x
344 for x in self.topfiles():
344 for x in self.topfiles():
345 yield x
345 yield x
346
346
347 def copylist(self):
347 def copylist(self):
348 return ['requires'] + _data.split()
348 return ['requires'] + _data.split()
349
349
350 def write(self, tr):
350 def write(self, tr):
351 pass
351 pass
352
352
353 def invalidatecaches(self):
353 def invalidatecaches(self):
354 pass
354 pass
355
355
356 def markremoved(self, fn):
356 def markremoved(self, fn):
357 pass
357 pass
358
358
359 def __contains__(self, path):
359 def __contains__(self, path):
360 '''Checks if the store contains path'''
360 '''Checks if the store contains path'''
361 path = "/".join(("data", path))
361 path = "/".join(("data", path))
362 # file?
362 # file?
363 if self.vfs.exists(path + ".i"):
363 if self.vfs.exists(path + ".i"):
364 return True
364 return True
365 # dir?
365 # dir?
366 if not path.endswith("/"):
366 if not path.endswith("/"):
367 path = path + "/"
367 path = path + "/"
368 return self.vfs.exists(path)
368 return self.vfs.exists(path)
369
369
370 class encodedstore(basicstore):
370 class encodedstore(basicstore):
371 def __init__(self, path, vfstype):
371 def __init__(self, path, vfstype):
372 vfs = vfstype(path + '/store')
372 vfs = vfstype(path + '/store')
373 self.path = vfs.base
373 self.path = vfs.base
374 self.createmode = _calcmode(vfs)
374 self.createmode = _calcmode(vfs)
375 vfs.createmode = self.createmode
375 vfs.createmode = self.createmode
376 self.rawvfs = vfs
376 self.rawvfs = vfs
377 self.vfs = scmutil.filtervfs(vfs, encodefilename)
377 self.vfs = scmutil.filtervfs(vfs, encodefilename)
378 self.opener = self.vfs
378 self.opener = self.vfs
379
379
380 def datafiles(self):
380 def datafiles(self):
381 for a, b, size in self._walk('data', True):
381 for a, b, size in super(encodedstore, self).datafiles():
382 try:
382 try:
383 a = decodefilename(a)
383 a = decodefilename(a)
384 except KeyError:
384 except KeyError:
385 a = None
385 a = None
386 yield a, b, size
386 yield a, b, size
387
387
388 def join(self, f):
388 def join(self, f):
389 return self.path + '/' + encodefilename(f)
389 return self.path + '/' + encodefilename(f)
390
390
391 def copylist(self):
391 def copylist(self):
392 return (['requires', '00changelog.i'] +
392 return (['requires', '00changelog.i'] +
393 ['store/' + f for f in _data.split()])
393 ['store/' + f for f in _data.split()])
394
394
395 class fncache(object):
395 class fncache(object):
396 # the filename used to be partially encoded
396 # the filename used to be partially encoded
397 # hence the encodedir/decodedir dance
397 # hence the encodedir/decodedir dance
398 def __init__(self, vfs):
398 def __init__(self, vfs):
399 self.vfs = vfs
399 self.vfs = vfs
400 self.entries = None
400 self.entries = None
401 self._dirty = False
401 self._dirty = False
402
402
403 def _load(self):
403 def _load(self):
404 '''fill the entries from the fncache file'''
404 '''fill the entries from the fncache file'''
405 self._dirty = False
405 self._dirty = False
406 try:
406 try:
407 fp = self.vfs('fncache', mode='rb')
407 fp = self.vfs('fncache', mode='rb')
408 except IOError:
408 except IOError:
409 # skip nonexistent file
409 # skip nonexistent file
410 self.entries = set()
410 self.entries = set()
411 return
411 return
412 self.entries = set(decodedir(fp.read()).splitlines())
412 self.entries = set(decodedir(fp.read()).splitlines())
413 if '' in self.entries:
413 if '' in self.entries:
414 fp.seek(0)
414 fp.seek(0)
415 for n, line in enumerate(fp):
415 for n, line in enumerate(fp):
416 if not line.rstrip('\n'):
416 if not line.rstrip('\n'):
417 t = _('invalid entry in fncache, line %d') % (n + 1)
417 t = _('invalid entry in fncache, line %d') % (n + 1)
418 raise error.Abort(t)
418 raise error.Abort(t)
419 fp.close()
419 fp.close()
420
420
421 def write(self, tr):
421 def write(self, tr):
422 if self._dirty:
422 if self._dirty:
423 tr.addbackup('fncache')
423 tr.addbackup('fncache')
424 fp = self.vfs('fncache', mode='wb', atomictemp=True)
424 fp = self.vfs('fncache', mode='wb', atomictemp=True)
425 if self.entries:
425 if self.entries:
426 fp.write(encodedir('\n'.join(self.entries) + '\n'))
426 fp.write(encodedir('\n'.join(self.entries) + '\n'))
427 fp.close()
427 fp.close()
428 self._dirty = False
428 self._dirty = False
429
429
430 def add(self, fn):
430 def add(self, fn):
431 if self.entries is None:
431 if self.entries is None:
432 self._load()
432 self._load()
433 if fn not in self.entries:
433 if fn not in self.entries:
434 self._dirty = True
434 self._dirty = True
435 self.entries.add(fn)
435 self.entries.add(fn)
436
436
437 def remove(self, fn):
437 def remove(self, fn):
438 if self.entries is None:
438 if self.entries is None:
439 self._load()
439 self._load()
440 try:
440 try:
441 self.entries.remove(fn)
441 self.entries.remove(fn)
442 self._dirty = True
442 self._dirty = True
443 except KeyError:
443 except KeyError:
444 pass
444 pass
445
445
446 def __contains__(self, fn):
446 def __contains__(self, fn):
447 if self.entries is None:
447 if self.entries is None:
448 self._load()
448 self._load()
449 return fn in self.entries
449 return fn in self.entries
450
450
451 def __iter__(self):
451 def __iter__(self):
452 if self.entries is None:
452 if self.entries is None:
453 self._load()
453 self._load()
454 return iter(self.entries)
454 return iter(self.entries)
455
455
456 class _fncachevfs(scmutil.abstractvfs, scmutil.auditvfs):
456 class _fncachevfs(scmutil.abstractvfs, scmutil.auditvfs):
457 def __init__(self, vfs, fnc, encode):
457 def __init__(self, vfs, fnc, encode):
458 scmutil.auditvfs.__init__(self, vfs)
458 scmutil.auditvfs.__init__(self, vfs)
459 self.fncache = fnc
459 self.fncache = fnc
460 self.encode = encode
460 self.encode = encode
461
461
462 def __call__(self, path, mode='r', *args, **kw):
462 def __call__(self, path, mode='r', *args, **kw):
463 if mode not in ('r', 'rb') and path.startswith('data/'):
463 if mode not in ('r', 'rb') and (path.startswith('data/') or
464 path.startswith('meta/')):
464 self.fncache.add(path)
465 self.fncache.add(path)
465 return self.vfs(self.encode(path), mode, *args, **kw)
466 return self.vfs(self.encode(path), mode, *args, **kw)
466
467
467 def join(self, path):
468 def join(self, path):
468 if path:
469 if path:
469 return self.vfs.join(self.encode(path))
470 return self.vfs.join(self.encode(path))
470 else:
471 else:
471 return self.vfs.join(path)
472 return self.vfs.join(path)
472
473
473 class fncachestore(basicstore):
474 class fncachestore(basicstore):
474 def __init__(self, path, vfstype, dotencode):
475 def __init__(self, path, vfstype, dotencode):
475 if dotencode:
476 if dotencode:
476 encode = _pathencode
477 encode = _pathencode
477 else:
478 else:
478 encode = _plainhybridencode
479 encode = _plainhybridencode
479 self.encode = encode
480 self.encode = encode
480 vfs = vfstype(path + '/store')
481 vfs = vfstype(path + '/store')
481 self.path = vfs.base
482 self.path = vfs.base
482 self.pathsep = self.path + '/'
483 self.pathsep = self.path + '/'
483 self.createmode = _calcmode(vfs)
484 self.createmode = _calcmode(vfs)
484 vfs.createmode = self.createmode
485 vfs.createmode = self.createmode
485 self.rawvfs = vfs
486 self.rawvfs = vfs
486 fnc = fncache(vfs)
487 fnc = fncache(vfs)
487 self.fncache = fnc
488 self.fncache = fnc
488 self.vfs = _fncachevfs(vfs, fnc, encode)
489 self.vfs = _fncachevfs(vfs, fnc, encode)
489 self.opener = self.vfs
490 self.opener = self.vfs
490
491
491 def join(self, f):
492 def join(self, f):
492 return self.pathsep + self.encode(f)
493 return self.pathsep + self.encode(f)
493
494
494 def getsize(self, path):
495 def getsize(self, path):
495 return self.rawvfs.stat(path).st_size
496 return self.rawvfs.stat(path).st_size
496
497
497 def datafiles(self):
498 def datafiles(self):
498 for f in sorted(self.fncache):
499 for f in sorted(self.fncache):
499 ef = self.encode(f)
500 ef = self.encode(f)
500 try:
501 try:
501 yield f, ef, self.getsize(ef)
502 yield f, ef, self.getsize(ef)
502 except OSError as err:
503 except OSError as err:
503 if err.errno != errno.ENOENT:
504 if err.errno != errno.ENOENT:
504 raise
505 raise
505
506
506 def copylist(self):
507 def copylist(self):
507 d = ('data meta dh fncache phaseroots obsstore'
508 d = ('data meta dh fncache phaseroots obsstore'
508 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
509 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
509 return (['requires', '00changelog.i'] +
510 return (['requires', '00changelog.i'] +
510 ['store/' + f for f in d.split()])
511 ['store/' + f for f in d.split()])
511
512
512 def write(self, tr):
513 def write(self, tr):
513 self.fncache.write(tr)
514 self.fncache.write(tr)
514
515
515 def invalidatecaches(self):
516 def invalidatecaches(self):
516 self.fncache.entries = None
517 self.fncache.entries = None
517
518
518 def markremoved(self, fn):
519 def markremoved(self, fn):
519 self.fncache.remove(fn)
520 self.fncache.remove(fn)
520
521
521 def _exists(self, f):
522 def _exists(self, f):
522 ef = self.encode(f)
523 ef = self.encode(f)
523 try:
524 try:
524 self.getsize(ef)
525 self.getsize(ef)
525 return True
526 return True
526 except OSError as err:
527 except OSError as err:
527 if err.errno != errno.ENOENT:
528 if err.errno != errno.ENOENT:
528 raise
529 raise
529 # nonexistent entry
530 # nonexistent entry
530 return False
531 return False
531
532
532 def __contains__(self, path):
533 def __contains__(self, path):
533 '''Checks if the store contains path'''
534 '''Checks if the store contains path'''
534 path = "/".join(("data", path))
535 path = "/".join(("data", path))
535 # check for files (exact match)
536 # check for files (exact match)
536 e = path + '.i'
537 e = path + '.i'
537 if e in self.fncache and self._exists(e):
538 if e in self.fncache and self._exists(e):
538 return True
539 return True
539 # now check for directories (prefix match)
540 # now check for directories (prefix match)
540 if not path.endswith('/'):
541 if not path.endswith('/'):
541 path += '/'
542 path += '/'
542 for e in self.fncache:
543 for e in self.fncache:
543 if e.startswith(path) and self._exists(e):
544 if e.startswith(path) and self._exists(e):
544 return True
545 return True
545 return False
546 return False
546
547
547 def store(requirements, path, vfstype):
548 def store(requirements, path, vfstype):
548 if 'store' in requirements:
549 if 'store' in requirements:
549 if 'fncache' in requirements:
550 if 'fncache' in requirements:
550 return fncachestore(path, vfstype, 'dotencode' in requirements)
551 return fncachestore(path, vfstype, 'dotencode' in requirements)
551 return encodedstore(path, vfstype)
552 return encodedstore(path, vfstype)
552 return basicstore(path, vfstype)
553 return basicstore(path, vfstype)
@@ -1,384 +1,384 b''
1 # verify.py - repository integrity checking for Mercurial
1 # verify.py - repository integrity checking for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import os
10 import os
11
11
12 from .i18n import _
12 from .i18n import _
13 from .node import (
13 from .node import (
14 nullid,
14 nullid,
15 short,
15 short,
16 )
16 )
17
17
18 from . import (
18 from . import (
19 error,
19 error,
20 revlog,
20 revlog,
21 util,
21 util,
22 )
22 )
23
23
24 def verify(repo):
24 def verify(repo):
25 with repo.lock():
25 with repo.lock():
26 return verifier(repo).verify()
26 return verifier(repo).verify()
27
27
28 def _normpath(f):
28 def _normpath(f):
29 # under hg < 2.4, convert didn't sanitize paths properly, so a
29 # under hg < 2.4, convert didn't sanitize paths properly, so a
30 # converted repo may contain repeated slashes
30 # converted repo may contain repeated slashes
31 while '//' in f:
31 while '//' in f:
32 f = f.replace('//', '/')
32 f = f.replace('//', '/')
33 return f
33 return f
34
34
35 def _validpath(repo, path):
35 def _validpath(repo, path):
36 """Returns False if a path should NOT be treated as part of a repo.
36 """Returns False if a path should NOT be treated as part of a repo.
37
37
38 For all in-core cases, this returns True, as we have no way for a
38 For all in-core cases, this returns True, as we have no way for a
39 path to be mentioned in the history but not actually be
39 path to be mentioned in the history but not actually be
40 relevant. For narrow clones, this is important because many
40 relevant. For narrow clones, this is important because many
41 filelogs will be missing, and changelog entries may mention
41 filelogs will be missing, and changelog entries may mention
42 modified files that are outside the narrow scope.
42 modified files that are outside the narrow scope.
43 """
43 """
44 return True
44 return True
45
45
46 class verifier(object):
46 class verifier(object):
47 def __init__(self, repo):
47 def __init__(self, repo):
48 self.repo = repo.unfiltered()
48 self.repo = repo.unfiltered()
49 self.ui = repo.ui
49 self.ui = repo.ui
50 self.badrevs = set()
50 self.badrevs = set()
51 self.errors = 0
51 self.errors = 0
52 self.warnings = 0
52 self.warnings = 0
53 self.havecl = len(repo.changelog) > 0
53 self.havecl = len(repo.changelog) > 0
54 self.havemf = len(repo.manifest) > 0
54 self.havemf = len(repo.manifest) > 0
55 self.revlogv1 = repo.changelog.version != revlog.REVLOGV0
55 self.revlogv1 = repo.changelog.version != revlog.REVLOGV0
56 self.lrugetctx = util.lrucachefunc(repo.changectx)
56 self.lrugetctx = util.lrucachefunc(repo.changectx)
57 self.refersmf = False
57 self.refersmf = False
58 self.fncachewarned = False
58 self.fncachewarned = False
59
59
60 def warn(self, msg):
60 def warn(self, msg):
61 self.ui.warn(msg + "\n")
61 self.ui.warn(msg + "\n")
62 self.warnings += 1
62 self.warnings += 1
63
63
64 def err(self, linkrev, msg, filename=None):
64 def err(self, linkrev, msg, filename=None):
65 if linkrev is not None:
65 if linkrev is not None:
66 self.badrevs.add(linkrev)
66 self.badrevs.add(linkrev)
67 else:
67 else:
68 linkrev = '?'
68 linkrev = '?'
69 msg = "%s: %s" % (linkrev, msg)
69 msg = "%s: %s" % (linkrev, msg)
70 if filename:
70 if filename:
71 msg = "%s@%s" % (filename, msg)
71 msg = "%s@%s" % (filename, msg)
72 self.ui.warn(" " + msg + "\n")
72 self.ui.warn(" " + msg + "\n")
73 self.errors += 1
73 self.errors += 1
74
74
75 def exc(self, linkrev, msg, inst, filename=None):
75 def exc(self, linkrev, msg, inst, filename=None):
76 if not str(inst):
76 if not str(inst):
77 inst = repr(inst)
77 inst = repr(inst)
78 self.err(linkrev, "%s: %s" % (msg, inst), filename)
78 self.err(linkrev, "%s: %s" % (msg, inst), filename)
79
79
80 def checklog(self, obj, name, linkrev):
80 def checklog(self, obj, name, linkrev):
81 if not len(obj) and (self.havecl or self.havemf):
81 if not len(obj) and (self.havecl or self.havemf):
82 self.err(linkrev, _("empty or missing %s") % name)
82 self.err(linkrev, _("empty or missing %s") % name)
83 return
83 return
84
84
85 d = obj.checksize()
85 d = obj.checksize()
86 if d[0]:
86 if d[0]:
87 self.err(None, _("data length off by %d bytes") % d[0], name)
87 self.err(None, _("data length off by %d bytes") % d[0], name)
88 if d[1]:
88 if d[1]:
89 self.err(None, _("index contains %d extra bytes") % d[1], name)
89 self.err(None, _("index contains %d extra bytes") % d[1], name)
90
90
91 if obj.version != revlog.REVLOGV0:
91 if obj.version != revlog.REVLOGV0:
92 if not self.revlogv1:
92 if not self.revlogv1:
93 self.warn(_("warning: `%s' uses revlog format 1") % name)
93 self.warn(_("warning: `%s' uses revlog format 1") % name)
94 elif self.revlogv1:
94 elif self.revlogv1:
95 self.warn(_("warning: `%s' uses revlog format 0") % name)
95 self.warn(_("warning: `%s' uses revlog format 0") % name)
96
96
97 def checkentry(self, obj, i, node, seen, linkrevs, f):
97 def checkentry(self, obj, i, node, seen, linkrevs, f):
98 lr = obj.linkrev(obj.rev(node))
98 lr = obj.linkrev(obj.rev(node))
99 if lr < 0 or (self.havecl and lr not in linkrevs):
99 if lr < 0 or (self.havecl and lr not in linkrevs):
100 if lr < 0 or lr >= len(self.repo.changelog):
100 if lr < 0 or lr >= len(self.repo.changelog):
101 msg = _("rev %d points to nonexistent changeset %d")
101 msg = _("rev %d points to nonexistent changeset %d")
102 else:
102 else:
103 msg = _("rev %d points to unexpected changeset %d")
103 msg = _("rev %d points to unexpected changeset %d")
104 self.err(None, msg % (i, lr), f)
104 self.err(None, msg % (i, lr), f)
105 if linkrevs:
105 if linkrevs:
106 if f and len(linkrevs) > 1:
106 if f and len(linkrevs) > 1:
107 try:
107 try:
108 # attempt to filter down to real linkrevs
108 # attempt to filter down to real linkrevs
109 linkrevs = [l for l in linkrevs
109 linkrevs = [l for l in linkrevs
110 if self.lrugetctx(l)[f].filenode() == node]
110 if self.lrugetctx(l)[f].filenode() == node]
111 except Exception:
111 except Exception:
112 pass
112 pass
113 self.warn(_(" (expected %s)") % " ".join(map(str, linkrevs)))
113 self.warn(_(" (expected %s)") % " ".join(map(str, linkrevs)))
114 lr = None # can't be trusted
114 lr = None # can't be trusted
115
115
116 try:
116 try:
117 p1, p2 = obj.parents(node)
117 p1, p2 = obj.parents(node)
118 if p1 not in seen and p1 != nullid:
118 if p1 not in seen and p1 != nullid:
119 self.err(lr, _("unknown parent 1 %s of %s") %
119 self.err(lr, _("unknown parent 1 %s of %s") %
120 (short(p1), short(node)), f)
120 (short(p1), short(node)), f)
121 if p2 not in seen and p2 != nullid:
121 if p2 not in seen and p2 != nullid:
122 self.err(lr, _("unknown parent 2 %s of %s") %
122 self.err(lr, _("unknown parent 2 %s of %s") %
123 (short(p2), short(node)), f)
123 (short(p2), short(node)), f)
124 except Exception as inst:
124 except Exception as inst:
125 self.exc(lr, _("checking parents of %s") % short(node), inst, f)
125 self.exc(lr, _("checking parents of %s") % short(node), inst, f)
126
126
127 if node in seen:
127 if node in seen:
128 self.err(lr, _("duplicate revision %d (%d)") % (i, seen[node]), f)
128 self.err(lr, _("duplicate revision %d (%d)") % (i, seen[node]), f)
129 seen[node] = i
129 seen[node] = i
130 return lr
130 return lr
131
131
132 def verify(self):
132 def verify(self):
133 repo = self.repo
133 repo = self.repo
134
134
135 ui = repo.ui
135 ui = repo.ui
136
136
137 if not repo.url().startswith('file:'):
137 if not repo.url().startswith('file:'):
138 raise error.Abort(_("cannot verify bundle or remote repos"))
138 raise error.Abort(_("cannot verify bundle or remote repos"))
139
139
140 if os.path.exists(repo.sjoin("journal")):
140 if os.path.exists(repo.sjoin("journal")):
141 ui.warn(_("abandoned transaction found - run hg recover\n"))
141 ui.warn(_("abandoned transaction found - run hg recover\n"))
142
142
143 if ui.verbose or not self.revlogv1:
143 if ui.verbose or not self.revlogv1:
144 ui.status(_("repository uses revlog format %d\n") %
144 ui.status(_("repository uses revlog format %d\n") %
145 (self.revlogv1 and 1 or 0))
145 (self.revlogv1 and 1 or 0))
146
146
147 mflinkrevs, filelinkrevs = self._verifychangelog()
147 mflinkrevs, filelinkrevs = self._verifychangelog()
148
148
149 filenodes = self._verifymanifest(mflinkrevs)
149 filenodes = self._verifymanifest(mflinkrevs)
150
150
151 self._crosscheckfiles(mflinkrevs, filelinkrevs, filenodes)
151 self._crosscheckfiles(mflinkrevs, filelinkrevs, filenodes)
152 del mflinkrevs
152 del mflinkrevs
153
153
154 totalfiles, filerevisions = self._verifyfiles(filenodes, filelinkrevs)
154 totalfiles, filerevisions = self._verifyfiles(filenodes, filelinkrevs)
155
155
156 ui.status(_("%d files, %d changesets, %d total revisions\n") %
156 ui.status(_("%d files, %d changesets, %d total revisions\n") %
157 (totalfiles, len(repo.changelog), filerevisions))
157 (totalfiles, len(repo.changelog), filerevisions))
158 if self.warnings:
158 if self.warnings:
159 ui.warn(_("%d warnings encountered!\n") % self.warnings)
159 ui.warn(_("%d warnings encountered!\n") % self.warnings)
160 if self.fncachewarned:
160 if self.fncachewarned:
161 ui.warn(_('hint: run "hg debugrebuildfncache" to recover from '
161 ui.warn(_('hint: run "hg debugrebuildfncache" to recover from '
162 'corrupt fncache\n'))
162 'corrupt fncache\n'))
163 if self.errors:
163 if self.errors:
164 ui.warn(_("%d integrity errors encountered!\n") % self.errors)
164 ui.warn(_("%d integrity errors encountered!\n") % self.errors)
165 if self.badrevs:
165 if self.badrevs:
166 ui.warn(_("(first damaged changeset appears to be %d)\n")
166 ui.warn(_("(first damaged changeset appears to be %d)\n")
167 % min(self.badrevs))
167 % min(self.badrevs))
168 return 1
168 return 1
169
169
170 def _verifychangelog(self):
170 def _verifychangelog(self):
171 ui = self.ui
171 ui = self.ui
172 repo = self.repo
172 repo = self.repo
173 cl = repo.changelog
173 cl = repo.changelog
174
174
175 ui.status(_("checking changesets\n"))
175 ui.status(_("checking changesets\n"))
176 mflinkrevs = {}
176 mflinkrevs = {}
177 filelinkrevs = {}
177 filelinkrevs = {}
178 seen = {}
178 seen = {}
179 self.checklog(cl, "changelog", 0)
179 self.checklog(cl, "changelog", 0)
180 total = len(repo)
180 total = len(repo)
181 for i in repo:
181 for i in repo:
182 ui.progress(_('checking'), i, total=total, unit=_('changesets'))
182 ui.progress(_('checking'), i, total=total, unit=_('changesets'))
183 n = cl.node(i)
183 n = cl.node(i)
184 self.checkentry(cl, i, n, seen, [i], "changelog")
184 self.checkentry(cl, i, n, seen, [i], "changelog")
185
185
186 try:
186 try:
187 changes = cl.read(n)
187 changes = cl.read(n)
188 if changes[0] != nullid:
188 if changes[0] != nullid:
189 mflinkrevs.setdefault(changes[0], []).append(i)
189 mflinkrevs.setdefault(changes[0], []).append(i)
190 self.refersmf = True
190 self.refersmf = True
191 for f in changes[3]:
191 for f in changes[3]:
192 if _validpath(repo, f):
192 if _validpath(repo, f):
193 filelinkrevs.setdefault(_normpath(f), []).append(i)
193 filelinkrevs.setdefault(_normpath(f), []).append(i)
194 except Exception as inst:
194 except Exception as inst:
195 self.refersmf = True
195 self.refersmf = True
196 self.exc(i, _("unpacking changeset %s") % short(n), inst)
196 self.exc(i, _("unpacking changeset %s") % short(n), inst)
197 ui.progress(_('checking'), None)
197 ui.progress(_('checking'), None)
198 return mflinkrevs, filelinkrevs
198 return mflinkrevs, filelinkrevs
199
199
200 def _verifymanifest(self, mflinkrevs):
200 def _verifymanifest(self, mflinkrevs):
201 repo = self.repo
201 repo = self.repo
202 ui = self.ui
202 ui = self.ui
203 mf = self.repo.manifest
203 mf = self.repo.manifest
204
204
205 ui.status(_("checking manifests\n"))
205 ui.status(_("checking manifests\n"))
206 filenodes = {}
206 filenodes = {}
207 seen = {}
207 seen = {}
208 if self.refersmf:
208 if self.refersmf:
209 # Do not check manifest if there are only changelog entries with
209 # Do not check manifest if there are only changelog entries with
210 # null manifests.
210 # null manifests.
211 self.checklog(mf, "manifest", 0)
211 self.checklog(mf, "manifest", 0)
212 total = len(mf)
212 total = len(mf)
213 for i in mf:
213 for i in mf:
214 ui.progress(_('checking'), i, total=total, unit=_('manifests'))
214 ui.progress(_('checking'), i, total=total, unit=_('manifests'))
215 n = mf.node(i)
215 n = mf.node(i)
216 lr = self.checkentry(mf, i, n, seen, mflinkrevs.get(n, []),
216 lr = self.checkentry(mf, i, n, seen, mflinkrevs.get(n, []),
217 "manifest")
217 "manifest")
218 if n in mflinkrevs:
218 if n in mflinkrevs:
219 del mflinkrevs[n]
219 del mflinkrevs[n]
220 else:
220 else:
221 self.err(lr, _("%s not in changesets") % short(n), "manifest")
221 self.err(lr, _("%s not in changesets") % short(n), "manifest")
222
222
223 try:
223 try:
224 for f, fn in mf.readdelta(n).iteritems():
224 for f, fn in mf.readdelta(n).iteritems():
225 if not f:
225 if not f:
226 self.err(lr, _("file without name in manifest"))
226 self.err(lr, _("file without name in manifest"))
227 elif f != "/dev/null": # ignore this in very old repos
227 elif f != "/dev/null": # ignore this in very old repos
228 if _validpath(repo, f):
228 if _validpath(repo, f):
229 filenodes.setdefault(
229 filenodes.setdefault(
230 _normpath(f), {}).setdefault(fn, lr)
230 _normpath(f), {}).setdefault(fn, lr)
231 except Exception as inst:
231 except Exception as inst:
232 self.exc(lr, _("reading manifest delta %s") % short(n), inst)
232 self.exc(lr, _("reading manifest delta %s") % short(n), inst)
233 ui.progress(_('checking'), None)
233 ui.progress(_('checking'), None)
234
234
235 return filenodes
235 return filenodes
236
236
237 def _crosscheckfiles(self, mflinkrevs, filelinkrevs, filenodes):
237 def _crosscheckfiles(self, mflinkrevs, filelinkrevs, filenodes):
238 repo = self.repo
238 repo = self.repo
239 ui = self.ui
239 ui = self.ui
240 ui.status(_("crosschecking files in changesets and manifests\n"))
240 ui.status(_("crosschecking files in changesets and manifests\n"))
241
241
242 total = len(mflinkrevs) + len(filelinkrevs) + len(filenodes)
242 total = len(mflinkrevs) + len(filelinkrevs) + len(filenodes)
243 count = 0
243 count = 0
244 if self.havemf:
244 if self.havemf:
245 for c, m in sorted([(c, m) for m in mflinkrevs
245 for c, m in sorted([(c, m) for m in mflinkrevs
246 for c in mflinkrevs[m]]):
246 for c in mflinkrevs[m]]):
247 count += 1
247 count += 1
248 if m == nullid:
248 if m == nullid:
249 continue
249 continue
250 ui.progress(_('crosschecking'), count, total=total)
250 ui.progress(_('crosschecking'), count, total=total)
251 self.err(c, _("changeset refers to unknown manifest %s") %
251 self.err(c, _("changeset refers to unknown manifest %s") %
252 short(m))
252 short(m))
253
253
254 for f in sorted(filelinkrevs):
254 for f in sorted(filelinkrevs):
255 count += 1
255 count += 1
256 ui.progress(_('crosschecking'), count, total=total)
256 ui.progress(_('crosschecking'), count, total=total)
257 if f not in filenodes:
257 if f not in filenodes:
258 lr = filelinkrevs[f][0]
258 lr = filelinkrevs[f][0]
259 self.err(lr, _("in changeset but not in manifest"), f)
259 self.err(lr, _("in changeset but not in manifest"), f)
260
260
261 if self.havecl:
261 if self.havecl:
262 for f in sorted(filenodes):
262 for f in sorted(filenodes):
263 count += 1
263 count += 1
264 ui.progress(_('crosschecking'), count, total=total)
264 ui.progress(_('crosschecking'), count, total=total)
265 if f not in filelinkrevs:
265 if f not in filelinkrevs:
266 try:
266 try:
267 fl = repo.file(f)
267 fl = repo.file(f)
268 lr = min([fl.linkrev(fl.rev(n)) for n in filenodes[f]])
268 lr = min([fl.linkrev(fl.rev(n)) for n in filenodes[f]])
269 except Exception:
269 except Exception:
270 lr = None
270 lr = None
271 self.err(lr, _("in manifest but not in changeset"), f)
271 self.err(lr, _("in manifest but not in changeset"), f)
272
272
273 ui.progress(_('crosschecking'), None)
273 ui.progress(_('crosschecking'), None)
274
274
275 def _verifyfiles(self, filenodes, filelinkrevs):
275 def _verifyfiles(self, filenodes, filelinkrevs):
276 repo = self.repo
276 repo = self.repo
277 ui = self.ui
277 ui = self.ui
278 lrugetctx = self.lrugetctx
278 lrugetctx = self.lrugetctx
279 revlogv1 = self.revlogv1
279 revlogv1 = self.revlogv1
280 havemf = self.havemf
280 havemf = self.havemf
281 ui.status(_("checking files\n"))
281 ui.status(_("checking files\n"))
282
282
283 storefiles = set()
283 storefiles = set()
284 for f, f2, size in repo.store.datafiles():
284 for f, f2, size in repo.store.datafiles():
285 if not f:
285 if not f:
286 self.err(None, _("cannot decode filename '%s'") % f2)
286 self.err(None, _("cannot decode filename '%s'") % f2)
287 elif size > 0 or not revlogv1:
287 elif (size > 0 or not revlogv1) and f.startswith('data/'):
288 storefiles.add(_normpath(f))
288 storefiles.add(_normpath(f))
289
289
290 files = sorted(set(filenodes) | set(filelinkrevs))
290 files = sorted(set(filenodes) | set(filelinkrevs))
291 total = len(files)
291 total = len(files)
292 revisions = 0
292 revisions = 0
293 for i, f in enumerate(files):
293 for i, f in enumerate(files):
294 ui.progress(_('checking'), i, item=f, total=total)
294 ui.progress(_('checking'), i, item=f, total=total)
295 try:
295 try:
296 linkrevs = filelinkrevs[f]
296 linkrevs = filelinkrevs[f]
297 except KeyError:
297 except KeyError:
298 # in manifest but not in changelog
298 # in manifest but not in changelog
299 linkrevs = []
299 linkrevs = []
300
300
301 if linkrevs:
301 if linkrevs:
302 lr = linkrevs[0]
302 lr = linkrevs[0]
303 else:
303 else:
304 lr = None
304 lr = None
305
305
306 try:
306 try:
307 fl = repo.file(f)
307 fl = repo.file(f)
308 except error.RevlogError as e:
308 except error.RevlogError as e:
309 self.err(lr, _("broken revlog! (%s)") % e, f)
309 self.err(lr, _("broken revlog! (%s)") % e, f)
310 continue
310 continue
311
311
312 for ff in fl.files():
312 for ff in fl.files():
313 try:
313 try:
314 storefiles.remove(ff)
314 storefiles.remove(ff)
315 except KeyError:
315 except KeyError:
316 self.warn(_(" warning: revlog '%s' not in fncache!") % ff)
316 self.warn(_(" warning: revlog '%s' not in fncache!") % ff)
317 self.fncachewarned = True
317 self.fncachewarned = True
318
318
319 self.checklog(fl, f, lr)
319 self.checklog(fl, f, lr)
320 seen = {}
320 seen = {}
321 rp = None
321 rp = None
322 for i in fl:
322 for i in fl:
323 revisions += 1
323 revisions += 1
324 n = fl.node(i)
324 n = fl.node(i)
325 lr = self.checkentry(fl, i, n, seen, linkrevs, f)
325 lr = self.checkentry(fl, i, n, seen, linkrevs, f)
326 if f in filenodes:
326 if f in filenodes:
327 if havemf and n not in filenodes[f]:
327 if havemf and n not in filenodes[f]:
328 self.err(lr, _("%s not in manifests") % (short(n)), f)
328 self.err(lr, _("%s not in manifests") % (short(n)), f)
329 else:
329 else:
330 del filenodes[f][n]
330 del filenodes[f][n]
331
331
332 # verify contents
332 # verify contents
333 try:
333 try:
334 l = len(fl.read(n))
334 l = len(fl.read(n))
335 rp = fl.renamed(n)
335 rp = fl.renamed(n)
336 if l != fl.size(i):
336 if l != fl.size(i):
337 if len(fl.revision(n)) != fl.size(i):
337 if len(fl.revision(n)) != fl.size(i):
338 self.err(lr, _("unpacked size is %s, %s expected") %
338 self.err(lr, _("unpacked size is %s, %s expected") %
339 (l, fl.size(i)), f)
339 (l, fl.size(i)), f)
340 except error.CensoredNodeError:
340 except error.CensoredNodeError:
341 # experimental config: censor.policy
341 # experimental config: censor.policy
342 if ui.config("censor", "policy", "abort") == "abort":
342 if ui.config("censor", "policy", "abort") == "abort":
343 self.err(lr, _("censored file data"), f)
343 self.err(lr, _("censored file data"), f)
344 except Exception as inst:
344 except Exception as inst:
345 self.exc(lr, _("unpacking %s") % short(n), inst, f)
345 self.exc(lr, _("unpacking %s") % short(n), inst, f)
346
346
347 # check renames
347 # check renames
348 try:
348 try:
349 if rp:
349 if rp:
350 if lr is not None and ui.verbose:
350 if lr is not None and ui.verbose:
351 ctx = lrugetctx(lr)
351 ctx = lrugetctx(lr)
352 found = False
352 found = False
353 for pctx in ctx.parents():
353 for pctx in ctx.parents():
354 if rp[0] in pctx:
354 if rp[0] in pctx:
355 found = True
355 found = True
356 break
356 break
357 if not found:
357 if not found:
358 self.warn(_("warning: copy source of '%s' not"
358 self.warn(_("warning: copy source of '%s' not"
359 " in parents of %s") % (f, ctx))
359 " in parents of %s") % (f, ctx))
360 fl2 = repo.file(rp[0])
360 fl2 = repo.file(rp[0])
361 if not len(fl2):
361 if not len(fl2):
362 self.err(lr, _("empty or missing copy source "
362 self.err(lr, _("empty or missing copy source "
363 "revlog %s:%s") % (rp[0], short(rp[1])), f)
363 "revlog %s:%s") % (rp[0], short(rp[1])), f)
364 elif rp[1] == nullid:
364 elif rp[1] == nullid:
365 ui.note(_("warning: %s@%s: copy source"
365 ui.note(_("warning: %s@%s: copy source"
366 " revision is nullid %s:%s\n")
366 " revision is nullid %s:%s\n")
367 % (f, lr, rp[0], short(rp[1])))
367 % (f, lr, rp[0], short(rp[1])))
368 else:
368 else:
369 fl2.rev(rp[1])
369 fl2.rev(rp[1])
370 except Exception as inst:
370 except Exception as inst:
371 self.exc(lr, _("checking rename of %s") % short(n), inst, f)
371 self.exc(lr, _("checking rename of %s") % short(n), inst, f)
372
372
373 # cross-check
373 # cross-check
374 if f in filenodes:
374 if f in filenodes:
375 fns = [(lr, n) for n, lr in filenodes[f].iteritems()]
375 fns = [(lr, n) for n, lr in filenodes[f].iteritems()]
376 for lr, node in sorted(fns):
376 for lr, node in sorted(fns):
377 self.err(lr, _("%s in manifests not found") % short(node),
377 self.err(lr, _("%s in manifests not found") % short(node),
378 f)
378 f)
379 ui.progress(_('checking'), None)
379 ui.progress(_('checking'), None)
380
380
381 for f in storefiles:
381 for f in storefiles:
382 self.warn(_("warning: orphan revlog '%s'") % f)
382 self.warn(_("warning: orphan revlog '%s'") % f)
383
383
384 return len(files), revisions
384 return len(files), revisions
@@ -1,2384 +1,2388 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # run-tests.py - Run a set of tests on Mercurial
3 # run-tests.py - Run a set of tests on Mercurial
4 #
4 #
5 # Copyright 2006 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Matt Mackall <mpm@selenic.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 # Modifying this script is tricky because it has many modes:
10 # Modifying this script is tricky because it has many modes:
11 # - serial (default) vs parallel (-jN, N > 1)
11 # - serial (default) vs parallel (-jN, N > 1)
12 # - no coverage (default) vs coverage (-c, -C, -s)
12 # - no coverage (default) vs coverage (-c, -C, -s)
13 # - temp install (default) vs specific hg script (--with-hg, --local)
13 # - temp install (default) vs specific hg script (--with-hg, --local)
14 # - tests are a mix of shell scripts and Python scripts
14 # - tests are a mix of shell scripts and Python scripts
15 #
15 #
16 # If you change this script, it is recommended that you ensure you
16 # If you change this script, it is recommended that you ensure you
17 # haven't broken it by running it in various modes with a representative
17 # haven't broken it by running it in various modes with a representative
18 # sample of test scripts. For example:
18 # sample of test scripts. For example:
19 #
19 #
20 # 1) serial, no coverage, temp install:
20 # 1) serial, no coverage, temp install:
21 # ./run-tests.py test-s*
21 # ./run-tests.py test-s*
22 # 2) serial, no coverage, local hg:
22 # 2) serial, no coverage, local hg:
23 # ./run-tests.py --local test-s*
23 # ./run-tests.py --local test-s*
24 # 3) serial, coverage, temp install:
24 # 3) serial, coverage, temp install:
25 # ./run-tests.py -c test-s*
25 # ./run-tests.py -c test-s*
26 # 4) serial, coverage, local hg:
26 # 4) serial, coverage, local hg:
27 # ./run-tests.py -c --local test-s* # unsupported
27 # ./run-tests.py -c --local test-s* # unsupported
28 # 5) parallel, no coverage, temp install:
28 # 5) parallel, no coverage, temp install:
29 # ./run-tests.py -j2 test-s*
29 # ./run-tests.py -j2 test-s*
30 # 6) parallel, no coverage, local hg:
30 # 6) parallel, no coverage, local hg:
31 # ./run-tests.py -j2 --local test-s*
31 # ./run-tests.py -j2 --local test-s*
32 # 7) parallel, coverage, temp install:
32 # 7) parallel, coverage, temp install:
33 # ./run-tests.py -j2 -c test-s* # currently broken
33 # ./run-tests.py -j2 -c test-s* # currently broken
34 # 8) parallel, coverage, local install:
34 # 8) parallel, coverage, local install:
35 # ./run-tests.py -j2 -c --local test-s* # unsupported (and broken)
35 # ./run-tests.py -j2 -c --local test-s* # unsupported (and broken)
36 # 9) parallel, custom tmp dir:
36 # 9) parallel, custom tmp dir:
37 # ./run-tests.py -j2 --tmpdir /tmp/myhgtests
37 # ./run-tests.py -j2 --tmpdir /tmp/myhgtests
38 # 10) parallel, pure, tests that call run-tests:
38 # 10) parallel, pure, tests that call run-tests:
39 # ./run-tests.py --pure `grep -l run-tests.py *.t`
39 # ./run-tests.py --pure `grep -l run-tests.py *.t`
40 #
40 #
41 # (You could use any subset of the tests: test-s* happens to match
41 # (You could use any subset of the tests: test-s* happens to match
42 # enough that it's worth doing parallel runs, few enough that it
42 # enough that it's worth doing parallel runs, few enough that it
43 # completes fairly quickly, includes both shell and Python scripts, and
43 # completes fairly quickly, includes both shell and Python scripts, and
44 # includes some scripts that run daemon processes.)
44 # includes some scripts that run daemon processes.)
45
45
46 from __future__ import print_function
46 from __future__ import print_function
47
47
48 from distutils import version
48 from distutils import version
49 import difflib
49 import difflib
50 import errno
50 import errno
51 import optparse
51 import optparse
52 import os
52 import os
53 import shutil
53 import shutil
54 import subprocess
54 import subprocess
55 import signal
55 import signal
56 import socket
56 import socket
57 import sys
57 import sys
58 import tempfile
58 import tempfile
59 import time
59 import time
60 import random
60 import random
61 import re
61 import re
62 import threading
62 import threading
63 import killdaemons as killmod
63 import killdaemons as killmod
64 try:
64 try:
65 import Queue as queue
65 import Queue as queue
66 except ImportError:
66 except ImportError:
67 import queue
67 import queue
68 from xml.dom import minidom
68 from xml.dom import minidom
69 import unittest
69 import unittest
70
70
71 osenvironb = getattr(os, 'environb', os.environ)
71 osenvironb = getattr(os, 'environb', os.environ)
72
72
73 try:
73 try:
74 import json
74 import json
75 except ImportError:
75 except ImportError:
76 try:
76 try:
77 import simplejson as json
77 import simplejson as json
78 except ImportError:
78 except ImportError:
79 json = None
79 json = None
80
80
81 processlock = threading.Lock()
81 processlock = threading.Lock()
82
82
83 if sys.version_info > (3, 5, 0):
83 if sys.version_info > (3, 5, 0):
84 PYTHON3 = True
84 PYTHON3 = True
85 xrange = range # we use xrange in one place, and we'd rather not use range
85 xrange = range # we use xrange in one place, and we'd rather not use range
86 def _bytespath(p):
86 def _bytespath(p):
87 return p.encode('utf-8')
87 return p.encode('utf-8')
88
88
89 def _strpath(p):
89 def _strpath(p):
90 return p.decode('utf-8')
90 return p.decode('utf-8')
91
91
92 elif sys.version_info >= (3, 0, 0):
92 elif sys.version_info >= (3, 0, 0):
93 print('%s is only supported on Python 3.5+ and 2.6-2.7, not %s' %
93 print('%s is only supported on Python 3.5+ and 2.6-2.7, not %s' %
94 (sys.argv[0], '.'.join(str(v) for v in sys.version_info[:3])))
94 (sys.argv[0], '.'.join(str(v) for v in sys.version_info[:3])))
95 sys.exit(70) # EX_SOFTWARE from `man 3 sysexit`
95 sys.exit(70) # EX_SOFTWARE from `man 3 sysexit`
96 else:
96 else:
97 PYTHON3 = False
97 PYTHON3 = False
98
98
99 # In python 2.x, path operations are generally done using
99 # In python 2.x, path operations are generally done using
100 # bytestrings by default, so we don't have to do any extra
100 # bytestrings by default, so we don't have to do any extra
101 # fiddling there. We define the wrapper functions anyway just to
101 # fiddling there. We define the wrapper functions anyway just to
102 # help keep code consistent between platforms.
102 # help keep code consistent between platforms.
103 def _bytespath(p):
103 def _bytespath(p):
104 return p
104 return p
105
105
106 _strpath = _bytespath
106 _strpath = _bytespath
107
107
108 # For Windows support
108 # For Windows support
109 wifexited = getattr(os, "WIFEXITED", lambda x: False)
109 wifexited = getattr(os, "WIFEXITED", lambda x: False)
110
110
111 def checkportisavailable(port):
111 def checkportisavailable(port):
112 """return true if a port seems free to bind on localhost"""
112 """return true if a port seems free to bind on localhost"""
113 try:
113 try:
114 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
114 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
115 s.bind(('localhost', port))
115 s.bind(('localhost', port))
116 s.close()
116 s.close()
117 return True
117 return True
118 except socket.error as exc:
118 except socket.error as exc:
119 if not exc.errno == errno.EADDRINUSE:
119 if not exc.errno == errno.EADDRINUSE:
120 raise
120 raise
121 return False
121 return False
122
122
123 closefds = os.name == 'posix'
123 closefds = os.name == 'posix'
124 def Popen4(cmd, wd, timeout, env=None):
124 def Popen4(cmd, wd, timeout, env=None):
125 processlock.acquire()
125 processlock.acquire()
126 p = subprocess.Popen(cmd, shell=True, bufsize=-1, cwd=wd, env=env,
126 p = subprocess.Popen(cmd, shell=True, bufsize=-1, cwd=wd, env=env,
127 close_fds=closefds,
127 close_fds=closefds,
128 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
128 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
129 stderr=subprocess.STDOUT)
129 stderr=subprocess.STDOUT)
130 processlock.release()
130 processlock.release()
131
131
132 p.fromchild = p.stdout
132 p.fromchild = p.stdout
133 p.tochild = p.stdin
133 p.tochild = p.stdin
134 p.childerr = p.stderr
134 p.childerr = p.stderr
135
135
136 p.timeout = False
136 p.timeout = False
137 if timeout:
137 if timeout:
138 def t():
138 def t():
139 start = time.time()
139 start = time.time()
140 while time.time() - start < timeout and p.returncode is None:
140 while time.time() - start < timeout and p.returncode is None:
141 time.sleep(.1)
141 time.sleep(.1)
142 p.timeout = True
142 p.timeout = True
143 if p.returncode is None:
143 if p.returncode is None:
144 terminate(p)
144 terminate(p)
145 threading.Thread(target=t).start()
145 threading.Thread(target=t).start()
146
146
147 return p
147 return p
148
148
149 PYTHON = _bytespath(sys.executable.replace('\\', '/'))
149 PYTHON = _bytespath(sys.executable.replace('\\', '/'))
150 IMPL_PATH = b'PYTHONPATH'
150 IMPL_PATH = b'PYTHONPATH'
151 if 'java' in sys.platform:
151 if 'java' in sys.platform:
152 IMPL_PATH = b'JYTHONPATH'
152 IMPL_PATH = b'JYTHONPATH'
153
153
154 defaults = {
154 defaults = {
155 'jobs': ('HGTEST_JOBS', 1),
155 'jobs': ('HGTEST_JOBS', 1),
156 'timeout': ('HGTEST_TIMEOUT', 180),
156 'timeout': ('HGTEST_TIMEOUT', 180),
157 'slowtimeout': ('HGTEST_SLOWTIMEOUT', 500),
157 'slowtimeout': ('HGTEST_SLOWTIMEOUT', 500),
158 'port': ('HGTEST_PORT', 20059),
158 'port': ('HGTEST_PORT', 20059),
159 'shell': ('HGTEST_SHELL', 'sh'),
159 'shell': ('HGTEST_SHELL', 'sh'),
160 }
160 }
161
161
162 def parselistfiles(files, listtype, warn=True):
162 def parselistfiles(files, listtype, warn=True):
163 entries = dict()
163 entries = dict()
164 for filename in files:
164 for filename in files:
165 try:
165 try:
166 path = os.path.expanduser(os.path.expandvars(filename))
166 path = os.path.expanduser(os.path.expandvars(filename))
167 f = open(path, "rb")
167 f = open(path, "rb")
168 except IOError as err:
168 except IOError as err:
169 if err.errno != errno.ENOENT:
169 if err.errno != errno.ENOENT:
170 raise
170 raise
171 if warn:
171 if warn:
172 print("warning: no such %s file: %s" % (listtype, filename))
172 print("warning: no such %s file: %s" % (listtype, filename))
173 continue
173 continue
174
174
175 for line in f.readlines():
175 for line in f.readlines():
176 line = line.split(b'#', 1)[0].strip()
176 line = line.split(b'#', 1)[0].strip()
177 if line:
177 if line:
178 entries[line] = filename
178 entries[line] = filename
179
179
180 f.close()
180 f.close()
181 return entries
181 return entries
182
182
183 def getparser():
183 def getparser():
184 """Obtain the OptionParser used by the CLI."""
184 """Obtain the OptionParser used by the CLI."""
185 parser = optparse.OptionParser("%prog [options] [tests]")
185 parser = optparse.OptionParser("%prog [options] [tests]")
186
186
187 # keep these sorted
187 # keep these sorted
188 parser.add_option("--blacklist", action="append",
188 parser.add_option("--blacklist", action="append",
189 help="skip tests listed in the specified blacklist file")
189 help="skip tests listed in the specified blacklist file")
190 parser.add_option("--whitelist", action="append",
190 parser.add_option("--whitelist", action="append",
191 help="always run tests listed in the specified whitelist file")
191 help="always run tests listed in the specified whitelist file")
192 parser.add_option("--changed", type="string",
192 parser.add_option("--changed", type="string",
193 help="run tests that are changed in parent rev or working directory")
193 help="run tests that are changed in parent rev or working directory")
194 parser.add_option("-C", "--annotate", action="store_true",
194 parser.add_option("-C", "--annotate", action="store_true",
195 help="output files annotated with coverage")
195 help="output files annotated with coverage")
196 parser.add_option("-c", "--cover", action="store_true",
196 parser.add_option("-c", "--cover", action="store_true",
197 help="print a test coverage report")
197 help="print a test coverage report")
198 parser.add_option("-d", "--debug", action="store_true",
198 parser.add_option("-d", "--debug", action="store_true",
199 help="debug mode: write output of test scripts to console"
199 help="debug mode: write output of test scripts to console"
200 " rather than capturing and diffing it (disables timeout)")
200 " rather than capturing and diffing it (disables timeout)")
201 parser.add_option("-f", "--first", action="store_true",
201 parser.add_option("-f", "--first", action="store_true",
202 help="exit on the first test failure")
202 help="exit on the first test failure")
203 parser.add_option("-H", "--htmlcov", action="store_true",
203 parser.add_option("-H", "--htmlcov", action="store_true",
204 help="create an HTML report of the coverage of the files")
204 help="create an HTML report of the coverage of the files")
205 parser.add_option("-i", "--interactive", action="store_true",
205 parser.add_option("-i", "--interactive", action="store_true",
206 help="prompt to accept changed output")
206 help="prompt to accept changed output")
207 parser.add_option("-j", "--jobs", type="int",
207 parser.add_option("-j", "--jobs", type="int",
208 help="number of jobs to run in parallel"
208 help="number of jobs to run in parallel"
209 " (default: $%s or %d)" % defaults['jobs'])
209 " (default: $%s or %d)" % defaults['jobs'])
210 parser.add_option("--keep-tmpdir", action="store_true",
210 parser.add_option("--keep-tmpdir", action="store_true",
211 help="keep temporary directory after running tests")
211 help="keep temporary directory after running tests")
212 parser.add_option("-k", "--keywords",
212 parser.add_option("-k", "--keywords",
213 help="run tests matching keywords")
213 help="run tests matching keywords")
214 parser.add_option("-l", "--local", action="store_true",
214 parser.add_option("-l", "--local", action="store_true",
215 help="shortcut for --with-hg=<testdir>/../hg")
215 help="shortcut for --with-hg=<testdir>/../hg")
216 parser.add_option("--loop", action="store_true",
216 parser.add_option("--loop", action="store_true",
217 help="loop tests repeatedly")
217 help="loop tests repeatedly")
218 parser.add_option("--runs-per-test", type="int", dest="runs_per_test",
218 parser.add_option("--runs-per-test", type="int", dest="runs_per_test",
219 help="run each test N times (default=1)", default=1)
219 help="run each test N times (default=1)", default=1)
220 parser.add_option("-n", "--nodiff", action="store_true",
220 parser.add_option("-n", "--nodiff", action="store_true",
221 help="skip showing test changes")
221 help="skip showing test changes")
222 parser.add_option("-p", "--port", type="int",
222 parser.add_option("-p", "--port", type="int",
223 help="port on which servers should listen"
223 help="port on which servers should listen"
224 " (default: $%s or %d)" % defaults['port'])
224 " (default: $%s or %d)" % defaults['port'])
225 parser.add_option("--compiler", type="string",
225 parser.add_option("--compiler", type="string",
226 help="compiler to build with")
226 help="compiler to build with")
227 parser.add_option("--pure", action="store_true",
227 parser.add_option("--pure", action="store_true",
228 help="use pure Python code instead of C extensions")
228 help="use pure Python code instead of C extensions")
229 parser.add_option("-R", "--restart", action="store_true",
229 parser.add_option("-R", "--restart", action="store_true",
230 help="restart at last error")
230 help="restart at last error")
231 parser.add_option("-r", "--retest", action="store_true",
231 parser.add_option("-r", "--retest", action="store_true",
232 help="retest failed tests")
232 help="retest failed tests")
233 parser.add_option("-S", "--noskips", action="store_true",
233 parser.add_option("-S", "--noskips", action="store_true",
234 help="don't report skip tests verbosely")
234 help="don't report skip tests verbosely")
235 parser.add_option("--shell", type="string",
235 parser.add_option("--shell", type="string",
236 help="shell to use (default: $%s or %s)" % defaults['shell'])
236 help="shell to use (default: $%s or %s)" % defaults['shell'])
237 parser.add_option("-t", "--timeout", type="int",
237 parser.add_option("-t", "--timeout", type="int",
238 help="kill errant tests after TIMEOUT seconds"
238 help="kill errant tests after TIMEOUT seconds"
239 " (default: $%s or %d)" % defaults['timeout'])
239 " (default: $%s or %d)" % defaults['timeout'])
240 parser.add_option("--slowtimeout", type="int",
240 parser.add_option("--slowtimeout", type="int",
241 help="kill errant slow tests after SLOWTIMEOUT seconds"
241 help="kill errant slow tests after SLOWTIMEOUT seconds"
242 " (default: $%s or %d)" % defaults['slowtimeout'])
242 " (default: $%s or %d)" % defaults['slowtimeout'])
243 parser.add_option("--time", action="store_true",
243 parser.add_option("--time", action="store_true",
244 help="time how long each test takes")
244 help="time how long each test takes")
245 parser.add_option("--json", action="store_true",
245 parser.add_option("--json", action="store_true",
246 help="store test result data in 'report.json' file")
246 help="store test result data in 'report.json' file")
247 parser.add_option("--tmpdir", type="string",
247 parser.add_option("--tmpdir", type="string",
248 help="run tests in the given temporary directory"
248 help="run tests in the given temporary directory"
249 " (implies --keep-tmpdir)")
249 " (implies --keep-tmpdir)")
250 parser.add_option("-v", "--verbose", action="store_true",
250 parser.add_option("-v", "--verbose", action="store_true",
251 help="output verbose messages")
251 help="output verbose messages")
252 parser.add_option("--xunit", type="string",
252 parser.add_option("--xunit", type="string",
253 help="record xunit results at specified path")
253 help="record xunit results at specified path")
254 parser.add_option("--view", type="string",
254 parser.add_option("--view", type="string",
255 help="external diff viewer")
255 help="external diff viewer")
256 parser.add_option("--with-hg", type="string",
256 parser.add_option("--with-hg", type="string",
257 metavar="HG",
257 metavar="HG",
258 help="test using specified hg script rather than a "
258 help="test using specified hg script rather than a "
259 "temporary installation")
259 "temporary installation")
260 parser.add_option("-3", "--py3k-warnings", action="store_true",
260 parser.add_option("-3", "--py3k-warnings", action="store_true",
261 help="enable Py3k warnings on Python 2.6+")
261 help="enable Py3k warnings on Python 2.6+")
262 parser.add_option('--extra-config-opt', action="append",
262 parser.add_option('--extra-config-opt', action="append",
263 help='set the given config opt in the test hgrc')
263 help='set the given config opt in the test hgrc')
264 parser.add_option('--random', action="store_true",
264 parser.add_option('--random', action="store_true",
265 help='run tests in random order')
265 help='run tests in random order')
266 parser.add_option('--profile-runner', action='store_true',
266 parser.add_option('--profile-runner', action='store_true',
267 help='run statprof on run-tests')
267 help='run statprof on run-tests')
268 parser.add_option('--allow-slow-tests', action='store_true',
268 parser.add_option('--allow-slow-tests', action='store_true',
269 help='allow extremely slow tests')
269 help='allow extremely slow tests')
270 parser.add_option('--showchannels', action='store_true',
270 parser.add_option('--showchannels', action='store_true',
271 help='show scheduling channels')
271 help='show scheduling channels')
272
272
273 for option, (envvar, default) in defaults.items():
273 for option, (envvar, default) in defaults.items():
274 defaults[option] = type(default)(os.environ.get(envvar, default))
274 defaults[option] = type(default)(os.environ.get(envvar, default))
275 parser.set_defaults(**defaults)
275 parser.set_defaults(**defaults)
276
276
277 return parser
277 return parser
278
278
279 def parseargs(args, parser):
279 def parseargs(args, parser):
280 """Parse arguments with our OptionParser and validate results."""
280 """Parse arguments with our OptionParser and validate results."""
281 (options, args) = parser.parse_args(args)
281 (options, args) = parser.parse_args(args)
282
282
283 # jython is always pure
283 # jython is always pure
284 if 'java' in sys.platform or '__pypy__' in sys.modules:
284 if 'java' in sys.platform or '__pypy__' in sys.modules:
285 options.pure = True
285 options.pure = True
286
286
287 if options.with_hg:
287 if options.with_hg:
288 options.with_hg = os.path.expanduser(options.with_hg)
288 options.with_hg = os.path.expanduser(options.with_hg)
289 if not (os.path.isfile(options.with_hg) and
289 if not (os.path.isfile(options.with_hg) and
290 os.access(options.with_hg, os.X_OK)):
290 os.access(options.with_hg, os.X_OK)):
291 parser.error('--with-hg must specify an executable hg script')
291 parser.error('--with-hg must specify an executable hg script')
292 if not os.path.basename(options.with_hg) == 'hg':
292 if not os.path.basename(options.with_hg) == 'hg':
293 sys.stderr.write('warning: --with-hg should specify an hg script\n')
293 sys.stderr.write('warning: --with-hg should specify an hg script\n')
294 if options.local:
294 if options.local:
295 testdir = os.path.dirname(_bytespath(os.path.realpath(sys.argv[0])))
295 testdir = os.path.dirname(_bytespath(os.path.realpath(sys.argv[0])))
296 hgbin = os.path.join(os.path.dirname(testdir), b'hg')
296 hgbin = os.path.join(os.path.dirname(testdir), b'hg')
297 if os.name != 'nt' and not os.access(hgbin, os.X_OK):
297 if os.name != 'nt' and not os.access(hgbin, os.X_OK):
298 parser.error('--local specified, but %r not found or not executable'
298 parser.error('--local specified, but %r not found or not executable'
299 % hgbin)
299 % hgbin)
300 options.with_hg = hgbin
300 options.with_hg = hgbin
301
301
302 options.anycoverage = options.cover or options.annotate or options.htmlcov
302 options.anycoverage = options.cover or options.annotate or options.htmlcov
303 if options.anycoverage:
303 if options.anycoverage:
304 try:
304 try:
305 import coverage
305 import coverage
306 covver = version.StrictVersion(coverage.__version__).version
306 covver = version.StrictVersion(coverage.__version__).version
307 if covver < (3, 3):
307 if covver < (3, 3):
308 parser.error('coverage options require coverage 3.3 or later')
308 parser.error('coverage options require coverage 3.3 or later')
309 except ImportError:
309 except ImportError:
310 parser.error('coverage options now require the coverage package')
310 parser.error('coverage options now require the coverage package')
311
311
312 if options.anycoverage and options.local:
312 if options.anycoverage and options.local:
313 # this needs some path mangling somewhere, I guess
313 # this needs some path mangling somewhere, I guess
314 parser.error("sorry, coverage options do not work when --local "
314 parser.error("sorry, coverage options do not work when --local "
315 "is specified")
315 "is specified")
316
316
317 if options.anycoverage and options.with_hg:
317 if options.anycoverage and options.with_hg:
318 parser.error("sorry, coverage options do not work when --with-hg "
318 parser.error("sorry, coverage options do not work when --with-hg "
319 "is specified")
319 "is specified")
320
320
321 global verbose
321 global verbose
322 if options.verbose:
322 if options.verbose:
323 verbose = ''
323 verbose = ''
324
324
325 if options.tmpdir:
325 if options.tmpdir:
326 options.tmpdir = os.path.expanduser(options.tmpdir)
326 options.tmpdir = os.path.expanduser(options.tmpdir)
327
327
328 if options.jobs < 1:
328 if options.jobs < 1:
329 parser.error('--jobs must be positive')
329 parser.error('--jobs must be positive')
330 if options.interactive and options.debug:
330 if options.interactive and options.debug:
331 parser.error("-i/--interactive and -d/--debug are incompatible")
331 parser.error("-i/--interactive and -d/--debug are incompatible")
332 if options.debug:
332 if options.debug:
333 if options.timeout != defaults['timeout']:
333 if options.timeout != defaults['timeout']:
334 sys.stderr.write(
334 sys.stderr.write(
335 'warning: --timeout option ignored with --debug\n')
335 'warning: --timeout option ignored with --debug\n')
336 if options.slowtimeout != defaults['slowtimeout']:
336 if options.slowtimeout != defaults['slowtimeout']:
337 sys.stderr.write(
337 sys.stderr.write(
338 'warning: --slowtimeout option ignored with --debug\n')
338 'warning: --slowtimeout option ignored with --debug\n')
339 options.timeout = 0
339 options.timeout = 0
340 options.slowtimeout = 0
340 options.slowtimeout = 0
341 if options.py3k_warnings:
341 if options.py3k_warnings:
342 if PYTHON3:
342 if PYTHON3:
343 parser.error(
343 parser.error(
344 '--py3k-warnings can only be used on Python 2.6 and 2.7')
344 '--py3k-warnings can only be used on Python 2.6 and 2.7')
345 if options.blacklist:
345 if options.blacklist:
346 options.blacklist = parselistfiles(options.blacklist, 'blacklist')
346 options.blacklist = parselistfiles(options.blacklist, 'blacklist')
347 if options.whitelist:
347 if options.whitelist:
348 options.whitelisted = parselistfiles(options.whitelist, 'whitelist')
348 options.whitelisted = parselistfiles(options.whitelist, 'whitelist')
349 else:
349 else:
350 options.whitelisted = {}
350 options.whitelisted = {}
351
351
352 if options.showchannels:
352 if options.showchannels:
353 options.nodiff = True
353 options.nodiff = True
354
354
355 return (options, args)
355 return (options, args)
356
356
357 def rename(src, dst):
357 def rename(src, dst):
358 """Like os.rename(), trade atomicity and opened files friendliness
358 """Like os.rename(), trade atomicity and opened files friendliness
359 for existing destination support.
359 for existing destination support.
360 """
360 """
361 shutil.copy(src, dst)
361 shutil.copy(src, dst)
362 os.remove(src)
362 os.remove(src)
363
363
364 _unified_diff = difflib.unified_diff
364 _unified_diff = difflib.unified_diff
365 if PYTHON3:
365 if PYTHON3:
366 import functools
366 import functools
367 _unified_diff = functools.partial(difflib.diff_bytes, difflib.unified_diff)
367 _unified_diff = functools.partial(difflib.diff_bytes, difflib.unified_diff)
368
368
369 def getdiff(expected, output, ref, err):
369 def getdiff(expected, output, ref, err):
370 servefail = False
370 servefail = False
371 lines = []
371 lines = []
372 for line in _unified_diff(expected, output, ref, err):
372 for line in _unified_diff(expected, output, ref, err):
373 if line.startswith(b'+++') or line.startswith(b'---'):
373 if line.startswith(b'+++') or line.startswith(b'---'):
374 line = line.replace(b'\\', b'/')
374 line = line.replace(b'\\', b'/')
375 if line.endswith(b' \n'):
375 if line.endswith(b' \n'):
376 line = line[:-2] + b'\n'
376 line = line[:-2] + b'\n'
377 lines.append(line)
377 lines.append(line)
378 if not servefail and line.startswith(
378 if not servefail and line.startswith(
379 b'+ abort: child process failed to start'):
379 b'+ abort: child process failed to start'):
380 servefail = True
380 servefail = True
381
381
382 return servefail, lines
382 return servefail, lines
383
383
384 verbose = False
384 verbose = False
385 def vlog(*msg):
385 def vlog(*msg):
386 """Log only when in verbose mode."""
386 """Log only when in verbose mode."""
387 if verbose is False:
387 if verbose is False:
388 return
388 return
389
389
390 return log(*msg)
390 return log(*msg)
391
391
392 # Bytes that break XML even in a CDATA block: control characters 0-31
392 # Bytes that break XML even in a CDATA block: control characters 0-31
393 # sans \t, \n and \r
393 # sans \t, \n and \r
394 CDATA_EVIL = re.compile(br"[\000-\010\013\014\016-\037]")
394 CDATA_EVIL = re.compile(br"[\000-\010\013\014\016-\037]")
395
395
396 def cdatasafe(data):
396 def cdatasafe(data):
397 """Make a string safe to include in a CDATA block.
397 """Make a string safe to include in a CDATA block.
398
398
399 Certain control characters are illegal in a CDATA block, and
399 Certain control characters are illegal in a CDATA block, and
400 there's no way to include a ]]> in a CDATA either. This function
400 there's no way to include a ]]> in a CDATA either. This function
401 replaces illegal bytes with ? and adds a space between the ]] so
401 replaces illegal bytes with ? and adds a space between the ]] so
402 that it won't break the CDATA block.
402 that it won't break the CDATA block.
403 """
403 """
404 return CDATA_EVIL.sub(b'?', data).replace(b']]>', b'] ]>')
404 return CDATA_EVIL.sub(b'?', data).replace(b']]>', b'] ]>')
405
405
406 def log(*msg):
406 def log(*msg):
407 """Log something to stdout.
407 """Log something to stdout.
408
408
409 Arguments are strings to print.
409 Arguments are strings to print.
410 """
410 """
411 with iolock:
411 with iolock:
412 if verbose:
412 if verbose:
413 print(verbose, end=' ')
413 print(verbose, end=' ')
414 for m in msg:
414 for m in msg:
415 print(m, end=' ')
415 print(m, end=' ')
416 print()
416 print()
417 sys.stdout.flush()
417 sys.stdout.flush()
418
418
419 def terminate(proc):
419 def terminate(proc):
420 """Terminate subprocess (with fallback for Python versions < 2.6)"""
420 """Terminate subprocess (with fallback for Python versions < 2.6)"""
421 vlog('# Terminating process %d' % proc.pid)
421 vlog('# Terminating process %d' % proc.pid)
422 try:
422 try:
423 getattr(proc, 'terminate', lambda : os.kill(proc.pid, signal.SIGTERM))()
423 getattr(proc, 'terminate', lambda : os.kill(proc.pid, signal.SIGTERM))()
424 except OSError:
424 except OSError:
425 pass
425 pass
426
426
427 def killdaemons(pidfile):
427 def killdaemons(pidfile):
428 return killmod.killdaemons(pidfile, tryhard=False, remove=True,
428 return killmod.killdaemons(pidfile, tryhard=False, remove=True,
429 logfn=vlog)
429 logfn=vlog)
430
430
431 class Test(unittest.TestCase):
431 class Test(unittest.TestCase):
432 """Encapsulates a single, runnable test.
432 """Encapsulates a single, runnable test.
433
433
434 While this class conforms to the unittest.TestCase API, it differs in that
434 While this class conforms to the unittest.TestCase API, it differs in that
435 instances need to be instantiated manually. (Typically, unittest.TestCase
435 instances need to be instantiated manually. (Typically, unittest.TestCase
436 classes are instantiated automatically by scanning modules.)
436 classes are instantiated automatically by scanning modules.)
437 """
437 """
438
438
439 # Status code reserved for skipped tests (used by hghave).
439 # Status code reserved for skipped tests (used by hghave).
440 SKIPPED_STATUS = 80
440 SKIPPED_STATUS = 80
441
441
442 def __init__(self, path, tmpdir, keeptmpdir=False,
442 def __init__(self, path, tmpdir, keeptmpdir=False,
443 debug=False,
443 debug=False,
444 timeout=defaults['timeout'],
444 timeout=defaults['timeout'],
445 startport=defaults['port'], extraconfigopts=None,
445 startport=defaults['port'], extraconfigopts=None,
446 py3kwarnings=False, shell=None,
446 py3kwarnings=False, shell=None,
447 slowtimeout=defaults['slowtimeout']):
447 slowtimeout=defaults['slowtimeout']):
448 """Create a test from parameters.
448 """Create a test from parameters.
449
449
450 path is the full path to the file defining the test.
450 path is the full path to the file defining the test.
451
451
452 tmpdir is the main temporary directory to use for this test.
452 tmpdir is the main temporary directory to use for this test.
453
453
454 keeptmpdir determines whether to keep the test's temporary directory
454 keeptmpdir determines whether to keep the test's temporary directory
455 after execution. It defaults to removal (False).
455 after execution. It defaults to removal (False).
456
456
457 debug mode will make the test execute verbosely, with unfiltered
457 debug mode will make the test execute verbosely, with unfiltered
458 output.
458 output.
459
459
460 timeout controls the maximum run time of the test. It is ignored when
460 timeout controls the maximum run time of the test. It is ignored when
461 debug is True. See slowtimeout for tests with #require slow.
461 debug is True. See slowtimeout for tests with #require slow.
462
462
463 slowtimeout overrides timeout if the test has #require slow.
463 slowtimeout overrides timeout if the test has #require slow.
464
464
465 startport controls the starting port number to use for this test. Each
465 startport controls the starting port number to use for this test. Each
466 test will reserve 3 port numbers for execution. It is the caller's
466 test will reserve 3 port numbers for execution. It is the caller's
467 responsibility to allocate a non-overlapping port range to Test
467 responsibility to allocate a non-overlapping port range to Test
468 instances.
468 instances.
469
469
470 extraconfigopts is an iterable of extra hgrc config options. Values
470 extraconfigopts is an iterable of extra hgrc config options. Values
471 must have the form "key=value" (something understood by hgrc). Values
471 must have the form "key=value" (something understood by hgrc). Values
472 of the form "foo.key=value" will result in "[foo] key=value".
472 of the form "foo.key=value" will result in "[foo] key=value".
473
473
474 py3kwarnings enables Py3k warnings.
474 py3kwarnings enables Py3k warnings.
475
475
476 shell is the shell to execute tests in.
476 shell is the shell to execute tests in.
477 """
477 """
478 self.path = path
478 self.path = path
479 self.bname = os.path.basename(path)
479 self.bname = os.path.basename(path)
480 self.name = _strpath(self.bname)
480 self.name = _strpath(self.bname)
481 self._testdir = os.path.dirname(path)
481 self._testdir = os.path.dirname(path)
482 self.errpath = os.path.join(self._testdir, b'%s.err' % self.bname)
482 self.errpath = os.path.join(self._testdir, b'%s.err' % self.bname)
483
483
484 self._threadtmp = tmpdir
484 self._threadtmp = tmpdir
485 self._keeptmpdir = keeptmpdir
485 self._keeptmpdir = keeptmpdir
486 self._debug = debug
486 self._debug = debug
487 self._timeout = timeout
487 self._timeout = timeout
488 self._slowtimeout = slowtimeout
488 self._slowtimeout = slowtimeout
489 self._startport = startport
489 self._startport = startport
490 self._extraconfigopts = extraconfigopts or []
490 self._extraconfigopts = extraconfigopts or []
491 self._py3kwarnings = py3kwarnings
491 self._py3kwarnings = py3kwarnings
492 self._shell = _bytespath(shell)
492 self._shell = _bytespath(shell)
493
493
494 self._aborted = False
494 self._aborted = False
495 self._daemonpids = []
495 self._daemonpids = []
496 self._finished = None
496 self._finished = None
497 self._ret = None
497 self._ret = None
498 self._out = None
498 self._out = None
499 self._skipped = None
499 self._skipped = None
500 self._testtmp = None
500 self._testtmp = None
501
501
502 # If we're not in --debug mode and reference output file exists,
502 # If we're not in --debug mode and reference output file exists,
503 # check test output against it.
503 # check test output against it.
504 if debug:
504 if debug:
505 self._refout = None # to match "out is None"
505 self._refout = None # to match "out is None"
506 elif os.path.exists(self.refpath):
506 elif os.path.exists(self.refpath):
507 f = open(self.refpath, 'rb')
507 f = open(self.refpath, 'rb')
508 self._refout = f.read().splitlines(True)
508 self._refout = f.read().splitlines(True)
509 f.close()
509 f.close()
510 else:
510 else:
511 self._refout = []
511 self._refout = []
512
512
513 # needed to get base class __repr__ running
513 # needed to get base class __repr__ running
514 @property
514 @property
515 def _testMethodName(self):
515 def _testMethodName(self):
516 return self.name
516 return self.name
517
517
518 def __str__(self):
518 def __str__(self):
519 return self.name
519 return self.name
520
520
521 def shortDescription(self):
521 def shortDescription(self):
522 return self.name
522 return self.name
523
523
524 def setUp(self):
524 def setUp(self):
525 """Tasks to perform before run()."""
525 """Tasks to perform before run()."""
526 self._finished = False
526 self._finished = False
527 self._ret = None
527 self._ret = None
528 self._out = None
528 self._out = None
529 self._skipped = None
529 self._skipped = None
530
530
531 try:
531 try:
532 os.mkdir(self._threadtmp)
532 os.mkdir(self._threadtmp)
533 except OSError as e:
533 except OSError as e:
534 if e.errno != errno.EEXIST:
534 if e.errno != errno.EEXIST:
535 raise
535 raise
536
536
537 self._testtmp = os.path.join(self._threadtmp,
537 self._testtmp = os.path.join(self._threadtmp,
538 os.path.basename(self.path))
538 os.path.basename(self.path))
539 os.mkdir(self._testtmp)
539 os.mkdir(self._testtmp)
540
540
541 # Remove any previous output files.
541 # Remove any previous output files.
542 if os.path.exists(self.errpath):
542 if os.path.exists(self.errpath):
543 try:
543 try:
544 os.remove(self.errpath)
544 os.remove(self.errpath)
545 except OSError as e:
545 except OSError as e:
546 # We might have raced another test to clean up a .err
546 # We might have raced another test to clean up a .err
547 # file, so ignore ENOENT when removing a previous .err
547 # file, so ignore ENOENT when removing a previous .err
548 # file.
548 # file.
549 if e.errno != errno.ENOENT:
549 if e.errno != errno.ENOENT:
550 raise
550 raise
551
551
552 def run(self, result):
552 def run(self, result):
553 """Run this test and report results against a TestResult instance."""
553 """Run this test and report results against a TestResult instance."""
554 # This function is extremely similar to unittest.TestCase.run(). Once
554 # This function is extremely similar to unittest.TestCase.run(). Once
555 # we require Python 2.7 (or at least its version of unittest), this
555 # we require Python 2.7 (or at least its version of unittest), this
556 # function can largely go away.
556 # function can largely go away.
557 self._result = result
557 self._result = result
558 result.startTest(self)
558 result.startTest(self)
559 try:
559 try:
560 try:
560 try:
561 self.setUp()
561 self.setUp()
562 except (KeyboardInterrupt, SystemExit):
562 except (KeyboardInterrupt, SystemExit):
563 self._aborted = True
563 self._aborted = True
564 raise
564 raise
565 except Exception:
565 except Exception:
566 result.addError(self, sys.exc_info())
566 result.addError(self, sys.exc_info())
567 return
567 return
568
568
569 success = False
569 success = False
570 try:
570 try:
571 self.runTest()
571 self.runTest()
572 except KeyboardInterrupt:
572 except KeyboardInterrupt:
573 self._aborted = True
573 self._aborted = True
574 raise
574 raise
575 except SkipTest as e:
575 except SkipTest as e:
576 result.addSkip(self, str(e))
576 result.addSkip(self, str(e))
577 # The base class will have already counted this as a
577 # The base class will have already counted this as a
578 # test we "ran", but we want to exclude skipped tests
578 # test we "ran", but we want to exclude skipped tests
579 # from those we count towards those run.
579 # from those we count towards those run.
580 result.testsRun -= 1
580 result.testsRun -= 1
581 except IgnoreTest as e:
581 except IgnoreTest as e:
582 result.addIgnore(self, str(e))
582 result.addIgnore(self, str(e))
583 # As with skips, ignores also should be excluded from
583 # As with skips, ignores also should be excluded from
584 # the number of tests executed.
584 # the number of tests executed.
585 result.testsRun -= 1
585 result.testsRun -= 1
586 except WarnTest as e:
586 except WarnTest as e:
587 result.addWarn(self, str(e))
587 result.addWarn(self, str(e))
588 except ReportedTest as e:
588 except ReportedTest as e:
589 pass
589 pass
590 except self.failureException as e:
590 except self.failureException as e:
591 # This differs from unittest in that we don't capture
591 # This differs from unittest in that we don't capture
592 # the stack trace. This is for historical reasons and
592 # the stack trace. This is for historical reasons and
593 # this decision could be revisited in the future,
593 # this decision could be revisited in the future,
594 # especially for PythonTest instances.
594 # especially for PythonTest instances.
595 if result.addFailure(self, str(e)):
595 if result.addFailure(self, str(e)):
596 success = True
596 success = True
597 except Exception:
597 except Exception:
598 result.addError(self, sys.exc_info())
598 result.addError(self, sys.exc_info())
599 else:
599 else:
600 success = True
600 success = True
601
601
602 try:
602 try:
603 self.tearDown()
603 self.tearDown()
604 except (KeyboardInterrupt, SystemExit):
604 except (KeyboardInterrupt, SystemExit):
605 self._aborted = True
605 self._aborted = True
606 raise
606 raise
607 except Exception:
607 except Exception:
608 result.addError(self, sys.exc_info())
608 result.addError(self, sys.exc_info())
609 success = False
609 success = False
610
610
611 if success:
611 if success:
612 result.addSuccess(self)
612 result.addSuccess(self)
613 finally:
613 finally:
614 result.stopTest(self, interrupted=self._aborted)
614 result.stopTest(self, interrupted=self._aborted)
615
615
616 def runTest(self):
616 def runTest(self):
617 """Run this test instance.
617 """Run this test instance.
618
618
619 This will return a tuple describing the result of the test.
619 This will return a tuple describing the result of the test.
620 """
620 """
621 env = self._getenv()
621 env = self._getenv()
622 self._daemonpids.append(env['DAEMON_PIDS'])
622 self._daemonpids.append(env['DAEMON_PIDS'])
623 self._createhgrc(env['HGRCPATH'])
623 self._createhgrc(env['HGRCPATH'])
624
624
625 vlog('# Test', self.name)
625 vlog('# Test', self.name)
626
626
627 ret, out = self._run(env)
627 ret, out = self._run(env)
628 self._finished = True
628 self._finished = True
629 self._ret = ret
629 self._ret = ret
630 self._out = out
630 self._out = out
631
631
632 def describe(ret):
632 def describe(ret):
633 if ret < 0:
633 if ret < 0:
634 return 'killed by signal: %d' % -ret
634 return 'killed by signal: %d' % -ret
635 return 'returned error code %d' % ret
635 return 'returned error code %d' % ret
636
636
637 self._skipped = False
637 self._skipped = False
638
638
639 if ret == self.SKIPPED_STATUS:
639 if ret == self.SKIPPED_STATUS:
640 if out is None: # Debug mode, nothing to parse.
640 if out is None: # Debug mode, nothing to parse.
641 missing = ['unknown']
641 missing = ['unknown']
642 failed = None
642 failed = None
643 else:
643 else:
644 missing, failed = TTest.parsehghaveoutput(out)
644 missing, failed = TTest.parsehghaveoutput(out)
645
645
646 if not missing:
646 if not missing:
647 missing = ['skipped']
647 missing = ['skipped']
648
648
649 if failed:
649 if failed:
650 self.fail('hg have failed checking for %s' % failed[-1])
650 self.fail('hg have failed checking for %s' % failed[-1])
651 else:
651 else:
652 self._skipped = True
652 self._skipped = True
653 raise SkipTest(missing[-1])
653 raise SkipTest(missing[-1])
654 elif ret == 'timeout':
654 elif ret == 'timeout':
655 self.fail('timed out')
655 self.fail('timed out')
656 elif ret is False:
656 elif ret is False:
657 raise WarnTest('no result code from test')
657 raise WarnTest('no result code from test')
658 elif out != self._refout:
658 elif out != self._refout:
659 # Diff generation may rely on written .err file.
659 # Diff generation may rely on written .err file.
660 if (ret != 0 or out != self._refout) and not self._skipped \
660 if (ret != 0 or out != self._refout) and not self._skipped \
661 and not self._debug:
661 and not self._debug:
662 f = open(self.errpath, 'wb')
662 f = open(self.errpath, 'wb')
663 for line in out:
663 for line in out:
664 f.write(line)
664 f.write(line)
665 f.close()
665 f.close()
666
666
667 # The result object handles diff calculation for us.
667 # The result object handles diff calculation for us.
668 if self._result.addOutputMismatch(self, ret, out, self._refout):
668 if self._result.addOutputMismatch(self, ret, out, self._refout):
669 # change was accepted, skip failing
669 # change was accepted, skip failing
670 return
670 return
671
671
672 if ret:
672 if ret:
673 msg = 'output changed and ' + describe(ret)
673 msg = 'output changed and ' + describe(ret)
674 else:
674 else:
675 msg = 'output changed'
675 msg = 'output changed'
676
676
677 self.fail(msg)
677 self.fail(msg)
678 elif ret:
678 elif ret:
679 self.fail(describe(ret))
679 self.fail(describe(ret))
680
680
681 def tearDown(self):
681 def tearDown(self):
682 """Tasks to perform after run()."""
682 """Tasks to perform after run()."""
683 for entry in self._daemonpids:
683 for entry in self._daemonpids:
684 killdaemons(entry)
684 killdaemons(entry)
685 self._daemonpids = []
685 self._daemonpids = []
686
686
687 if self._keeptmpdir:
687 if self._keeptmpdir:
688 log('\nKeeping testtmp dir: %s\nKeeping threadtmp dir: %s' %
688 log('\nKeeping testtmp dir: %s\nKeeping threadtmp dir: %s' %
689 (self._testtmp, self._threadtmp))
689 (self._testtmp, self._threadtmp))
690 else:
690 else:
691 shutil.rmtree(self._testtmp, True)
691 shutil.rmtree(self._testtmp, True)
692 shutil.rmtree(self._threadtmp, True)
692 shutil.rmtree(self._threadtmp, True)
693
693
694 if (self._ret != 0 or self._out != self._refout) and not self._skipped \
694 if (self._ret != 0 or self._out != self._refout) and not self._skipped \
695 and not self._debug and self._out:
695 and not self._debug and self._out:
696 f = open(self.errpath, 'wb')
696 f = open(self.errpath, 'wb')
697 for line in self._out:
697 for line in self._out:
698 f.write(line)
698 f.write(line)
699 f.close()
699 f.close()
700
700
701 vlog("# Ret was:", self._ret, '(%s)' % self.name)
701 vlog("# Ret was:", self._ret, '(%s)' % self.name)
702
702
703 def _run(self, env):
703 def _run(self, env):
704 # This should be implemented in child classes to run tests.
704 # This should be implemented in child classes to run tests.
705 raise SkipTest('unknown test type')
705 raise SkipTest('unknown test type')
706
706
707 def abort(self):
707 def abort(self):
708 """Terminate execution of this test."""
708 """Terminate execution of this test."""
709 self._aborted = True
709 self._aborted = True
710
710
711 def _getreplacements(self):
711 def _getreplacements(self):
712 """Obtain a mapping of text replacements to apply to test output.
712 """Obtain a mapping of text replacements to apply to test output.
713
713
714 Test output needs to be normalized so it can be compared to expected
714 Test output needs to be normalized so it can be compared to expected
715 output. This function defines how some of that normalization will
715 output. This function defines how some of that normalization will
716 occur.
716 occur.
717 """
717 """
718 r = [
718 r = [
719 (br':%d\b' % self._startport, b':$HGPORT'),
719 (br':%d\b' % self._startport, b':$HGPORT'),
720 (br':%d\b' % (self._startport + 1), b':$HGPORT1'),
720 (br':%d\b' % (self._startport + 1), b':$HGPORT1'),
721 (br':%d\b' % (self._startport + 2), b':$HGPORT2'),
721 (br':%d\b' % (self._startport + 2), b':$HGPORT2'),
722 (br':%d\b' % (self._startport + 2), b':$HGPORT3'),
723 (br':%d\b' % (self._startport + 2), b':$HGPORT4'),
722 (br'(?m)^(saved backup bundle to .*\.hg)( \(glob\))?$',
724 (br'(?m)^(saved backup bundle to .*\.hg)( \(glob\))?$',
723 br'\1 (glob)'),
725 br'\1 (glob)'),
724 ]
726 ]
725
727
726 if os.name == 'nt':
728 if os.name == 'nt':
727 r.append(
729 r.append(
728 (b''.join(c.isalpha() and b'[%s%s]' % (c.lower(), c.upper()) or
730 (b''.join(c.isalpha() and b'[%s%s]' % (c.lower(), c.upper()) or
729 c in b'/\\' and br'[/\\]' or c.isdigit() and c or b'\\' + c
731 c in b'/\\' and br'[/\\]' or c.isdigit() and c or b'\\' + c
730 for c in self._testtmp), b'$TESTTMP'))
732 for c in self._testtmp), b'$TESTTMP'))
731 else:
733 else:
732 r.append((re.escape(self._testtmp), b'$TESTTMP'))
734 r.append((re.escape(self._testtmp), b'$TESTTMP'))
733
735
734 return r
736 return r
735
737
736 def _getenv(self):
738 def _getenv(self):
737 """Obtain environment variables to use during test execution."""
739 """Obtain environment variables to use during test execution."""
738 env = os.environ.copy()
740 env = os.environ.copy()
739 env['TESTTMP'] = self._testtmp
741 env['TESTTMP'] = self._testtmp
740 env['HOME'] = self._testtmp
742 env['HOME'] = self._testtmp
741 env["HGPORT"] = str(self._startport)
743 env["HGPORT"] = str(self._startport)
742 env["HGPORT1"] = str(self._startport + 1)
744 env["HGPORT1"] = str(self._startport + 1)
743 env["HGPORT2"] = str(self._startport + 2)
745 env["HGPORT2"] = str(self._startport + 2)
746 env["HGPORT3"] = str(self._startport + 3)
747 env["HGPORT4"] = str(self._startport + 4)
744 env["HGRCPATH"] = os.path.join(self._threadtmp, b'.hgrc')
748 env["HGRCPATH"] = os.path.join(self._threadtmp, b'.hgrc')
745 env["DAEMON_PIDS"] = os.path.join(self._threadtmp, b'daemon.pids')
749 env["DAEMON_PIDS"] = os.path.join(self._threadtmp, b'daemon.pids')
746 env["HGEDITOR"] = ('"' + sys.executable + '"'
750 env["HGEDITOR"] = ('"' + sys.executable + '"'
747 + ' -c "import sys; sys.exit(0)"')
751 + ' -c "import sys; sys.exit(0)"')
748 env["HGMERGE"] = "internal:merge"
752 env["HGMERGE"] = "internal:merge"
749 env["HGUSER"] = "test"
753 env["HGUSER"] = "test"
750 env["HGENCODING"] = "ascii"
754 env["HGENCODING"] = "ascii"
751 env["HGENCODINGMODE"] = "strict"
755 env["HGENCODINGMODE"] = "strict"
752
756
753 # Reset some environment variables to well-known values so that
757 # Reset some environment variables to well-known values so that
754 # the tests produce repeatable output.
758 # the tests produce repeatable output.
755 env['LANG'] = env['LC_ALL'] = env['LANGUAGE'] = 'C'
759 env['LANG'] = env['LC_ALL'] = env['LANGUAGE'] = 'C'
756 env['TZ'] = 'GMT'
760 env['TZ'] = 'GMT'
757 env["EMAIL"] = "Foo Bar <foo.bar@example.com>"
761 env["EMAIL"] = "Foo Bar <foo.bar@example.com>"
758 env['COLUMNS'] = '80'
762 env['COLUMNS'] = '80'
759 env['TERM'] = 'xterm'
763 env['TERM'] = 'xterm'
760
764
761 for k in ('HG HGPROF CDPATH GREP_OPTIONS http_proxy no_proxy ' +
765 for k in ('HG HGPROF CDPATH GREP_OPTIONS http_proxy no_proxy ' +
762 'NO_PROXY').split():
766 'NO_PROXY').split():
763 if k in env:
767 if k in env:
764 del env[k]
768 del env[k]
765
769
766 # unset env related to hooks
770 # unset env related to hooks
767 for k in env.keys():
771 for k in env.keys():
768 if k.startswith('HG_'):
772 if k.startswith('HG_'):
769 del env[k]
773 del env[k]
770
774
771 return env
775 return env
772
776
773 def _createhgrc(self, path):
777 def _createhgrc(self, path):
774 """Create an hgrc file for this test."""
778 """Create an hgrc file for this test."""
775 hgrc = open(path, 'wb')
779 hgrc = open(path, 'wb')
776 hgrc.write(b'[ui]\n')
780 hgrc.write(b'[ui]\n')
777 hgrc.write(b'slash = True\n')
781 hgrc.write(b'slash = True\n')
778 hgrc.write(b'interactive = False\n')
782 hgrc.write(b'interactive = False\n')
779 hgrc.write(b'mergemarkers = detailed\n')
783 hgrc.write(b'mergemarkers = detailed\n')
780 hgrc.write(b'promptecho = True\n')
784 hgrc.write(b'promptecho = True\n')
781 hgrc.write(b'[defaults]\n')
785 hgrc.write(b'[defaults]\n')
782 hgrc.write(b'backout = -d "0 0"\n')
786 hgrc.write(b'backout = -d "0 0"\n')
783 hgrc.write(b'commit = -d "0 0"\n')
787 hgrc.write(b'commit = -d "0 0"\n')
784 hgrc.write(b'shelve = --date "0 0"\n')
788 hgrc.write(b'shelve = --date "0 0"\n')
785 hgrc.write(b'tag = -d "0 0"\n')
789 hgrc.write(b'tag = -d "0 0"\n')
786 hgrc.write(b'[devel]\n')
790 hgrc.write(b'[devel]\n')
787 hgrc.write(b'all-warnings = true\n')
791 hgrc.write(b'all-warnings = true\n')
788 hgrc.write(b'[largefiles]\n')
792 hgrc.write(b'[largefiles]\n')
789 hgrc.write(b'usercache = %s\n' %
793 hgrc.write(b'usercache = %s\n' %
790 (os.path.join(self._testtmp, b'.cache/largefiles')))
794 (os.path.join(self._testtmp, b'.cache/largefiles')))
791
795
792 for opt in self._extraconfigopts:
796 for opt in self._extraconfigopts:
793 section, key = opt.split('.', 1)
797 section, key = opt.split('.', 1)
794 assert '=' in key, ('extra config opt %s must '
798 assert '=' in key, ('extra config opt %s must '
795 'have an = for assignment' % opt)
799 'have an = for assignment' % opt)
796 hgrc.write(b'[%s]\n%s\n' % (section, key))
800 hgrc.write(b'[%s]\n%s\n' % (section, key))
797 hgrc.close()
801 hgrc.close()
798
802
799 def fail(self, msg):
803 def fail(self, msg):
800 # unittest differentiates between errored and failed.
804 # unittest differentiates between errored and failed.
801 # Failed is denoted by AssertionError (by default at least).
805 # Failed is denoted by AssertionError (by default at least).
802 raise AssertionError(msg)
806 raise AssertionError(msg)
803
807
804 def _runcommand(self, cmd, env, normalizenewlines=False):
808 def _runcommand(self, cmd, env, normalizenewlines=False):
805 """Run command in a sub-process, capturing the output (stdout and
809 """Run command in a sub-process, capturing the output (stdout and
806 stderr).
810 stderr).
807
811
808 Return a tuple (exitcode, output). output is None in debug mode.
812 Return a tuple (exitcode, output). output is None in debug mode.
809 """
813 """
810 if self._debug:
814 if self._debug:
811 proc = subprocess.Popen(cmd, shell=True, cwd=self._testtmp,
815 proc = subprocess.Popen(cmd, shell=True, cwd=self._testtmp,
812 env=env)
816 env=env)
813 ret = proc.wait()
817 ret = proc.wait()
814 return (ret, None)
818 return (ret, None)
815
819
816 proc = Popen4(cmd, self._testtmp, self._timeout, env)
820 proc = Popen4(cmd, self._testtmp, self._timeout, env)
817 def cleanup():
821 def cleanup():
818 terminate(proc)
822 terminate(proc)
819 ret = proc.wait()
823 ret = proc.wait()
820 if ret == 0:
824 if ret == 0:
821 ret = signal.SIGTERM << 8
825 ret = signal.SIGTERM << 8
822 killdaemons(env['DAEMON_PIDS'])
826 killdaemons(env['DAEMON_PIDS'])
823 return ret
827 return ret
824
828
825 output = ''
829 output = ''
826 proc.tochild.close()
830 proc.tochild.close()
827
831
828 try:
832 try:
829 output = proc.fromchild.read()
833 output = proc.fromchild.read()
830 except KeyboardInterrupt:
834 except KeyboardInterrupt:
831 vlog('# Handling keyboard interrupt')
835 vlog('# Handling keyboard interrupt')
832 cleanup()
836 cleanup()
833 raise
837 raise
834
838
835 ret = proc.wait()
839 ret = proc.wait()
836 if wifexited(ret):
840 if wifexited(ret):
837 ret = os.WEXITSTATUS(ret)
841 ret = os.WEXITSTATUS(ret)
838
842
839 if proc.timeout:
843 if proc.timeout:
840 ret = 'timeout'
844 ret = 'timeout'
841
845
842 if ret:
846 if ret:
843 killdaemons(env['DAEMON_PIDS'])
847 killdaemons(env['DAEMON_PIDS'])
844
848
845 for s, r in self._getreplacements():
849 for s, r in self._getreplacements():
846 output = re.sub(s, r, output)
850 output = re.sub(s, r, output)
847
851
848 if normalizenewlines:
852 if normalizenewlines:
849 output = output.replace('\r\n', '\n')
853 output = output.replace('\r\n', '\n')
850
854
851 return ret, output.splitlines(True)
855 return ret, output.splitlines(True)
852
856
853 class PythonTest(Test):
857 class PythonTest(Test):
854 """A Python-based test."""
858 """A Python-based test."""
855
859
856 @property
860 @property
857 def refpath(self):
861 def refpath(self):
858 return os.path.join(self._testdir, b'%s.out' % self.bname)
862 return os.path.join(self._testdir, b'%s.out' % self.bname)
859
863
860 def _run(self, env):
864 def _run(self, env):
861 py3kswitch = self._py3kwarnings and b' -3' or b''
865 py3kswitch = self._py3kwarnings and b' -3' or b''
862 cmd = b'%s%s "%s"' % (PYTHON, py3kswitch, self.path)
866 cmd = b'%s%s "%s"' % (PYTHON, py3kswitch, self.path)
863 vlog("# Running", cmd)
867 vlog("# Running", cmd)
864 normalizenewlines = os.name == 'nt'
868 normalizenewlines = os.name == 'nt'
865 result = self._runcommand(cmd, env,
869 result = self._runcommand(cmd, env,
866 normalizenewlines=normalizenewlines)
870 normalizenewlines=normalizenewlines)
867 if self._aborted:
871 if self._aborted:
868 raise KeyboardInterrupt()
872 raise KeyboardInterrupt()
869
873
870 return result
874 return result
871
875
872 # This script may want to drop globs from lines matching these patterns on
876 # This script may want to drop globs from lines matching these patterns on
873 # Windows, but check-code.py wants a glob on these lines unconditionally. Don't
877 # Windows, but check-code.py wants a glob on these lines unconditionally. Don't
874 # warn if that is the case for anything matching these lines.
878 # warn if that is the case for anything matching these lines.
875 checkcodeglobpats = [
879 checkcodeglobpats = [
876 re.compile(br'^pushing to \$TESTTMP/.*[^)]$'),
880 re.compile(br'^pushing to \$TESTTMP/.*[^)]$'),
877 re.compile(br'^moving \S+/.*[^)]$'),
881 re.compile(br'^moving \S+/.*[^)]$'),
878 re.compile(br'^pulling from \$TESTTMP/.*[^)]$')
882 re.compile(br'^pulling from \$TESTTMP/.*[^)]$')
879 ]
883 ]
880
884
881 bchr = chr
885 bchr = chr
882 if PYTHON3:
886 if PYTHON3:
883 bchr = lambda x: bytes([x])
887 bchr = lambda x: bytes([x])
884
888
885 class TTest(Test):
889 class TTest(Test):
886 """A "t test" is a test backed by a .t file."""
890 """A "t test" is a test backed by a .t file."""
887
891
888 SKIPPED_PREFIX = 'skipped: '
892 SKIPPED_PREFIX = 'skipped: '
889 FAILED_PREFIX = 'hghave check failed: '
893 FAILED_PREFIX = 'hghave check failed: '
890 NEEDESCAPE = re.compile(br'[\x00-\x08\x0b-\x1f\x7f-\xff]').search
894 NEEDESCAPE = re.compile(br'[\x00-\x08\x0b-\x1f\x7f-\xff]').search
891
895
892 ESCAPESUB = re.compile(br'[\x00-\x08\x0b-\x1f\\\x7f-\xff]').sub
896 ESCAPESUB = re.compile(br'[\x00-\x08\x0b-\x1f\\\x7f-\xff]').sub
893 ESCAPEMAP = dict((bchr(i), br'\x%02x' % i) for i in range(256))
897 ESCAPEMAP = dict((bchr(i), br'\x%02x' % i) for i in range(256))
894 ESCAPEMAP.update({b'\\': b'\\\\', b'\r': br'\r'})
898 ESCAPEMAP.update({b'\\': b'\\\\', b'\r': br'\r'})
895
899
896 @property
900 @property
897 def refpath(self):
901 def refpath(self):
898 return os.path.join(self._testdir, self.bname)
902 return os.path.join(self._testdir, self.bname)
899
903
900 def _run(self, env):
904 def _run(self, env):
901 f = open(self.path, 'rb')
905 f = open(self.path, 'rb')
902 lines = f.readlines()
906 lines = f.readlines()
903 f.close()
907 f.close()
904
908
905 salt, script, after, expected = self._parsetest(lines)
909 salt, script, after, expected = self._parsetest(lines)
906
910
907 # Write out the generated script.
911 # Write out the generated script.
908 fname = b'%s.sh' % self._testtmp
912 fname = b'%s.sh' % self._testtmp
909 f = open(fname, 'wb')
913 f = open(fname, 'wb')
910 for l in script:
914 for l in script:
911 f.write(l)
915 f.write(l)
912 f.close()
916 f.close()
913
917
914 cmd = b'%s "%s"' % (self._shell, fname)
918 cmd = b'%s "%s"' % (self._shell, fname)
915 vlog("# Running", cmd)
919 vlog("# Running", cmd)
916
920
917 exitcode, output = self._runcommand(cmd, env)
921 exitcode, output = self._runcommand(cmd, env)
918
922
919 if self._aborted:
923 if self._aborted:
920 raise KeyboardInterrupt()
924 raise KeyboardInterrupt()
921
925
922 # Do not merge output if skipped. Return hghave message instead.
926 # Do not merge output if skipped. Return hghave message instead.
923 # Similarly, with --debug, output is None.
927 # Similarly, with --debug, output is None.
924 if exitcode == self.SKIPPED_STATUS or output is None:
928 if exitcode == self.SKIPPED_STATUS or output is None:
925 return exitcode, output
929 return exitcode, output
926
930
927 return self._processoutput(exitcode, output, salt, after, expected)
931 return self._processoutput(exitcode, output, salt, after, expected)
928
932
929 def _hghave(self, reqs):
933 def _hghave(self, reqs):
930 # TODO do something smarter when all other uses of hghave are gone.
934 # TODO do something smarter when all other uses of hghave are gone.
931 runtestdir = os.path.abspath(os.path.dirname(_bytespath(__file__)))
935 runtestdir = os.path.abspath(os.path.dirname(_bytespath(__file__)))
932 tdir = runtestdir.replace(b'\\', b'/')
936 tdir = runtestdir.replace(b'\\', b'/')
933 proc = Popen4(b'%s -c "%s/hghave %s"' %
937 proc = Popen4(b'%s -c "%s/hghave %s"' %
934 (self._shell, tdir, b' '.join(reqs)),
938 (self._shell, tdir, b' '.join(reqs)),
935 self._testtmp, 0, self._getenv())
939 self._testtmp, 0, self._getenv())
936 stdout, stderr = proc.communicate()
940 stdout, stderr = proc.communicate()
937 ret = proc.wait()
941 ret = proc.wait()
938 if wifexited(ret):
942 if wifexited(ret):
939 ret = os.WEXITSTATUS(ret)
943 ret = os.WEXITSTATUS(ret)
940 if ret == 2:
944 if ret == 2:
941 print(stdout)
945 print(stdout)
942 sys.exit(1)
946 sys.exit(1)
943
947
944 if ret != 0:
948 if ret != 0:
945 return False, stdout
949 return False, stdout
946
950
947 if 'slow' in reqs:
951 if 'slow' in reqs:
948 self._timeout = self._slowtimeout
952 self._timeout = self._slowtimeout
949 return True, None
953 return True, None
950
954
951 def _parsetest(self, lines):
955 def _parsetest(self, lines):
952 # We generate a shell script which outputs unique markers to line
956 # We generate a shell script which outputs unique markers to line
953 # up script results with our source. These markers include input
957 # up script results with our source. These markers include input
954 # line number and the last return code.
958 # line number and the last return code.
955 salt = b"SALT%d" % time.time()
959 salt = b"SALT%d" % time.time()
956 def addsalt(line, inpython):
960 def addsalt(line, inpython):
957 if inpython:
961 if inpython:
958 script.append(b'%s %d 0\n' % (salt, line))
962 script.append(b'%s %d 0\n' % (salt, line))
959 else:
963 else:
960 script.append(b'echo %s %d $?\n' % (salt, line))
964 script.append(b'echo %s %d $?\n' % (salt, line))
961
965
962 script = []
966 script = []
963
967
964 # After we run the shell script, we re-unify the script output
968 # After we run the shell script, we re-unify the script output
965 # with non-active parts of the source, with synchronization by our
969 # with non-active parts of the source, with synchronization by our
966 # SALT line number markers. The after table contains the non-active
970 # SALT line number markers. The after table contains the non-active
967 # components, ordered by line number.
971 # components, ordered by line number.
968 after = {}
972 after = {}
969
973
970 # Expected shell script output.
974 # Expected shell script output.
971 expected = {}
975 expected = {}
972
976
973 pos = prepos = -1
977 pos = prepos = -1
974
978
975 # True or False when in a true or false conditional section
979 # True or False when in a true or false conditional section
976 skipping = None
980 skipping = None
977
981
978 # We keep track of whether or not we're in a Python block so we
982 # We keep track of whether or not we're in a Python block so we
979 # can generate the surrounding doctest magic.
983 # can generate the surrounding doctest magic.
980 inpython = False
984 inpython = False
981
985
982 if self._debug:
986 if self._debug:
983 script.append(b'set -x\n')
987 script.append(b'set -x\n')
984 if os.getenv('MSYSTEM'):
988 if os.getenv('MSYSTEM'):
985 script.append(b'alias pwd="pwd -W"\n')
989 script.append(b'alias pwd="pwd -W"\n')
986
990
987 for n, l in enumerate(lines):
991 for n, l in enumerate(lines):
988 if not l.endswith(b'\n'):
992 if not l.endswith(b'\n'):
989 l += b'\n'
993 l += b'\n'
990 if l.startswith(b'#require'):
994 if l.startswith(b'#require'):
991 lsplit = l.split()
995 lsplit = l.split()
992 if len(lsplit) < 2 or lsplit[0] != b'#require':
996 if len(lsplit) < 2 or lsplit[0] != b'#require':
993 after.setdefault(pos, []).append(' !!! invalid #require\n')
997 after.setdefault(pos, []).append(' !!! invalid #require\n')
994 haveresult, message = self._hghave(lsplit[1:])
998 haveresult, message = self._hghave(lsplit[1:])
995 if not haveresult:
999 if not haveresult:
996 script = [b'echo "%s"\nexit 80\n' % message]
1000 script = [b'echo "%s"\nexit 80\n' % message]
997 break
1001 break
998 after.setdefault(pos, []).append(l)
1002 after.setdefault(pos, []).append(l)
999 elif l.startswith(b'#if'):
1003 elif l.startswith(b'#if'):
1000 lsplit = l.split()
1004 lsplit = l.split()
1001 if len(lsplit) < 2 or lsplit[0] != b'#if':
1005 if len(lsplit) < 2 or lsplit[0] != b'#if':
1002 after.setdefault(pos, []).append(' !!! invalid #if\n')
1006 after.setdefault(pos, []).append(' !!! invalid #if\n')
1003 if skipping is not None:
1007 if skipping is not None:
1004 after.setdefault(pos, []).append(' !!! nested #if\n')
1008 after.setdefault(pos, []).append(' !!! nested #if\n')
1005 skipping = not self._hghave(lsplit[1:])[0]
1009 skipping = not self._hghave(lsplit[1:])[0]
1006 after.setdefault(pos, []).append(l)
1010 after.setdefault(pos, []).append(l)
1007 elif l.startswith(b'#else'):
1011 elif l.startswith(b'#else'):
1008 if skipping is None:
1012 if skipping is None:
1009 after.setdefault(pos, []).append(' !!! missing #if\n')
1013 after.setdefault(pos, []).append(' !!! missing #if\n')
1010 skipping = not skipping
1014 skipping = not skipping
1011 after.setdefault(pos, []).append(l)
1015 after.setdefault(pos, []).append(l)
1012 elif l.startswith(b'#endif'):
1016 elif l.startswith(b'#endif'):
1013 if skipping is None:
1017 if skipping is None:
1014 after.setdefault(pos, []).append(' !!! missing #if\n')
1018 after.setdefault(pos, []).append(' !!! missing #if\n')
1015 skipping = None
1019 skipping = None
1016 after.setdefault(pos, []).append(l)
1020 after.setdefault(pos, []).append(l)
1017 elif skipping:
1021 elif skipping:
1018 after.setdefault(pos, []).append(l)
1022 after.setdefault(pos, []).append(l)
1019 elif l.startswith(b' >>> '): # python inlines
1023 elif l.startswith(b' >>> '): # python inlines
1020 after.setdefault(pos, []).append(l)
1024 after.setdefault(pos, []).append(l)
1021 prepos = pos
1025 prepos = pos
1022 pos = n
1026 pos = n
1023 if not inpython:
1027 if not inpython:
1024 # We've just entered a Python block. Add the header.
1028 # We've just entered a Python block. Add the header.
1025 inpython = True
1029 inpython = True
1026 addsalt(prepos, False) # Make sure we report the exit code.
1030 addsalt(prepos, False) # Make sure we report the exit code.
1027 script.append(b'%s -m heredoctest <<EOF\n' % PYTHON)
1031 script.append(b'%s -m heredoctest <<EOF\n' % PYTHON)
1028 addsalt(n, True)
1032 addsalt(n, True)
1029 script.append(l[2:])
1033 script.append(l[2:])
1030 elif l.startswith(b' ... '): # python inlines
1034 elif l.startswith(b' ... '): # python inlines
1031 after.setdefault(prepos, []).append(l)
1035 after.setdefault(prepos, []).append(l)
1032 script.append(l[2:])
1036 script.append(l[2:])
1033 elif l.startswith(b' $ '): # commands
1037 elif l.startswith(b' $ '): # commands
1034 if inpython:
1038 if inpython:
1035 script.append(b'EOF\n')
1039 script.append(b'EOF\n')
1036 inpython = False
1040 inpython = False
1037 after.setdefault(pos, []).append(l)
1041 after.setdefault(pos, []).append(l)
1038 prepos = pos
1042 prepos = pos
1039 pos = n
1043 pos = n
1040 addsalt(n, False)
1044 addsalt(n, False)
1041 cmd = l[4:].split()
1045 cmd = l[4:].split()
1042 if len(cmd) == 2 and cmd[0] == b'cd':
1046 if len(cmd) == 2 and cmd[0] == b'cd':
1043 l = b' $ cd %s || exit 1\n' % cmd[1]
1047 l = b' $ cd %s || exit 1\n' % cmd[1]
1044 script.append(l[4:])
1048 script.append(l[4:])
1045 elif l.startswith(b' > '): # continuations
1049 elif l.startswith(b' > '): # continuations
1046 after.setdefault(prepos, []).append(l)
1050 after.setdefault(prepos, []).append(l)
1047 script.append(l[4:])
1051 script.append(l[4:])
1048 elif l.startswith(b' '): # results
1052 elif l.startswith(b' '): # results
1049 # Queue up a list of expected results.
1053 # Queue up a list of expected results.
1050 expected.setdefault(pos, []).append(l[2:])
1054 expected.setdefault(pos, []).append(l[2:])
1051 else:
1055 else:
1052 if inpython:
1056 if inpython:
1053 script.append(b'EOF\n')
1057 script.append(b'EOF\n')
1054 inpython = False
1058 inpython = False
1055 # Non-command/result. Queue up for merged output.
1059 # Non-command/result. Queue up for merged output.
1056 after.setdefault(pos, []).append(l)
1060 after.setdefault(pos, []).append(l)
1057
1061
1058 if inpython:
1062 if inpython:
1059 script.append(b'EOF\n')
1063 script.append(b'EOF\n')
1060 if skipping is not None:
1064 if skipping is not None:
1061 after.setdefault(pos, []).append(' !!! missing #endif\n')
1065 after.setdefault(pos, []).append(' !!! missing #endif\n')
1062 addsalt(n + 1, False)
1066 addsalt(n + 1, False)
1063
1067
1064 return salt, script, after, expected
1068 return salt, script, after, expected
1065
1069
1066 def _processoutput(self, exitcode, output, salt, after, expected):
1070 def _processoutput(self, exitcode, output, salt, after, expected):
1067 # Merge the script output back into a unified test.
1071 # Merge the script output back into a unified test.
1068 warnonly = 1 # 1: not yet; 2: yes; 3: for sure not
1072 warnonly = 1 # 1: not yet; 2: yes; 3: for sure not
1069 if exitcode != 0:
1073 if exitcode != 0:
1070 warnonly = 3
1074 warnonly = 3
1071
1075
1072 pos = -1
1076 pos = -1
1073 postout = []
1077 postout = []
1074 for l in output:
1078 for l in output:
1075 lout, lcmd = l, None
1079 lout, lcmd = l, None
1076 if salt in l:
1080 if salt in l:
1077 lout, lcmd = l.split(salt, 1)
1081 lout, lcmd = l.split(salt, 1)
1078
1082
1079 while lout:
1083 while lout:
1080 if not lout.endswith(b'\n'):
1084 if not lout.endswith(b'\n'):
1081 lout += b' (no-eol)\n'
1085 lout += b' (no-eol)\n'
1082
1086
1083 # Find the expected output at the current position.
1087 # Find the expected output at the current position.
1084 el = None
1088 el = None
1085 if expected.get(pos, None):
1089 if expected.get(pos, None):
1086 el = expected[pos].pop(0)
1090 el = expected[pos].pop(0)
1087
1091
1088 r = TTest.linematch(el, lout)
1092 r = TTest.linematch(el, lout)
1089 if isinstance(r, str):
1093 if isinstance(r, str):
1090 if r == '+glob':
1094 if r == '+glob':
1091 lout = el[:-1] + ' (glob)\n'
1095 lout = el[:-1] + ' (glob)\n'
1092 r = '' # Warn only this line.
1096 r = '' # Warn only this line.
1093 elif r == '-glob':
1097 elif r == '-glob':
1094 lout = ''.join(el.rsplit(' (glob)', 1))
1098 lout = ''.join(el.rsplit(' (glob)', 1))
1095 r = '' # Warn only this line.
1099 r = '' # Warn only this line.
1096 elif r == "retry":
1100 elif r == "retry":
1097 postout.append(b' ' + el)
1101 postout.append(b' ' + el)
1098 continue
1102 continue
1099 else:
1103 else:
1100 log('\ninfo, unknown linematch result: %r\n' % r)
1104 log('\ninfo, unknown linematch result: %r\n' % r)
1101 r = False
1105 r = False
1102 if r:
1106 if r:
1103 postout.append(b' ' + el)
1107 postout.append(b' ' + el)
1104 else:
1108 else:
1105 if self.NEEDESCAPE(lout):
1109 if self.NEEDESCAPE(lout):
1106 lout = TTest._stringescape(b'%s (esc)\n' %
1110 lout = TTest._stringescape(b'%s (esc)\n' %
1107 lout.rstrip(b'\n'))
1111 lout.rstrip(b'\n'))
1108 postout.append(b' ' + lout) # Let diff deal with it.
1112 postout.append(b' ' + lout) # Let diff deal with it.
1109 if r != '': # If line failed.
1113 if r != '': # If line failed.
1110 warnonly = 3 # for sure not
1114 warnonly = 3 # for sure not
1111 elif warnonly == 1: # Is "not yet" and line is warn only.
1115 elif warnonly == 1: # Is "not yet" and line is warn only.
1112 warnonly = 2 # Yes do warn.
1116 warnonly = 2 # Yes do warn.
1113 break
1117 break
1114
1118
1115 # clean up any optional leftovers
1119 # clean up any optional leftovers
1116 while expected.get(pos, None):
1120 while expected.get(pos, None):
1117 el = expected[pos].pop(0)
1121 el = expected[pos].pop(0)
1118 if not el.endswith(b" (?)\n"):
1122 if not el.endswith(b" (?)\n"):
1119 expected[pos].insert(0, el)
1123 expected[pos].insert(0, el)
1120 break
1124 break
1121 postout.append(b' ' + el)
1125 postout.append(b' ' + el)
1122
1126
1123 if lcmd:
1127 if lcmd:
1124 # Add on last return code.
1128 # Add on last return code.
1125 ret = int(lcmd.split()[1])
1129 ret = int(lcmd.split()[1])
1126 if ret != 0:
1130 if ret != 0:
1127 postout.append(b' [%d]\n' % ret)
1131 postout.append(b' [%d]\n' % ret)
1128 if pos in after:
1132 if pos in after:
1129 # Merge in non-active test bits.
1133 # Merge in non-active test bits.
1130 postout += after.pop(pos)
1134 postout += after.pop(pos)
1131 pos = int(lcmd.split()[0])
1135 pos = int(lcmd.split()[0])
1132
1136
1133 if pos in after:
1137 if pos in after:
1134 postout += after.pop(pos)
1138 postout += after.pop(pos)
1135
1139
1136 if warnonly == 2:
1140 if warnonly == 2:
1137 exitcode = False # Set exitcode to warned.
1141 exitcode = False # Set exitcode to warned.
1138
1142
1139 return exitcode, postout
1143 return exitcode, postout
1140
1144
1141 @staticmethod
1145 @staticmethod
1142 def rematch(el, l):
1146 def rematch(el, l):
1143 try:
1147 try:
1144 # use \Z to ensure that the regex matches to the end of the string
1148 # use \Z to ensure that the regex matches to the end of the string
1145 if os.name == 'nt':
1149 if os.name == 'nt':
1146 return re.match(el + br'\r?\n\Z', l)
1150 return re.match(el + br'\r?\n\Z', l)
1147 return re.match(el + br'\n\Z', l)
1151 return re.match(el + br'\n\Z', l)
1148 except re.error:
1152 except re.error:
1149 # el is an invalid regex
1153 # el is an invalid regex
1150 return False
1154 return False
1151
1155
1152 @staticmethod
1156 @staticmethod
1153 def globmatch(el, l):
1157 def globmatch(el, l):
1154 # The only supported special characters are * and ? plus / which also
1158 # The only supported special characters are * and ? plus / which also
1155 # matches \ on windows. Escaping of these characters is supported.
1159 # matches \ on windows. Escaping of these characters is supported.
1156 if el + b'\n' == l:
1160 if el + b'\n' == l:
1157 if os.altsep:
1161 if os.altsep:
1158 # matching on "/" is not needed for this line
1162 # matching on "/" is not needed for this line
1159 for pat in checkcodeglobpats:
1163 for pat in checkcodeglobpats:
1160 if pat.match(el):
1164 if pat.match(el):
1161 return True
1165 return True
1162 return b'-glob'
1166 return b'-glob'
1163 return True
1167 return True
1164 i, n = 0, len(el)
1168 i, n = 0, len(el)
1165 res = b''
1169 res = b''
1166 while i < n:
1170 while i < n:
1167 c = el[i:i + 1]
1171 c = el[i:i + 1]
1168 i += 1
1172 i += 1
1169 if c == b'\\' and i < n and el[i:i + 1] in b'*?\\/':
1173 if c == b'\\' and i < n and el[i:i + 1] in b'*?\\/':
1170 res += el[i - 1:i + 1]
1174 res += el[i - 1:i + 1]
1171 i += 1
1175 i += 1
1172 elif c == b'*':
1176 elif c == b'*':
1173 res += b'.*'
1177 res += b'.*'
1174 elif c == b'?':
1178 elif c == b'?':
1175 res += b'.'
1179 res += b'.'
1176 elif c == b'/' and os.altsep:
1180 elif c == b'/' and os.altsep:
1177 res += b'[/\\\\]'
1181 res += b'[/\\\\]'
1178 else:
1182 else:
1179 res += re.escape(c)
1183 res += re.escape(c)
1180 return TTest.rematch(res, l)
1184 return TTest.rematch(res, l)
1181
1185
1182 @staticmethod
1186 @staticmethod
1183 def linematch(el, l):
1187 def linematch(el, l):
1184 retry = False
1188 retry = False
1185 if el == l: # perfect match (fast)
1189 if el == l: # perfect match (fast)
1186 return True
1190 return True
1187 if el:
1191 if el:
1188 if el.endswith(b" (?)\n"):
1192 if el.endswith(b" (?)\n"):
1189 retry = "retry"
1193 retry = "retry"
1190 el = el[:-5] + "\n"
1194 el = el[:-5] + "\n"
1191 if el.endswith(b" (esc)\n"):
1195 if el.endswith(b" (esc)\n"):
1192 if PYTHON3:
1196 if PYTHON3:
1193 el = el[:-7].decode('unicode_escape') + '\n'
1197 el = el[:-7].decode('unicode_escape') + '\n'
1194 el = el.encode('utf-8')
1198 el = el.encode('utf-8')
1195 else:
1199 else:
1196 el = el[:-7].decode('string-escape') + '\n'
1200 el = el[:-7].decode('string-escape') + '\n'
1197 if el == l or os.name == 'nt' and el[:-1] + b'\r\n' == l:
1201 if el == l or os.name == 'nt' and el[:-1] + b'\r\n' == l:
1198 return True
1202 return True
1199 if el.endswith(b" (re)\n"):
1203 if el.endswith(b" (re)\n"):
1200 return TTest.rematch(el[:-6], l) or retry
1204 return TTest.rematch(el[:-6], l) or retry
1201 if el.endswith(b" (glob)\n"):
1205 if el.endswith(b" (glob)\n"):
1202 # ignore '(glob)' added to l by 'replacements'
1206 # ignore '(glob)' added to l by 'replacements'
1203 if l.endswith(b" (glob)\n"):
1207 if l.endswith(b" (glob)\n"):
1204 l = l[:-8] + b"\n"
1208 l = l[:-8] + b"\n"
1205 return TTest.globmatch(el[:-8], l)
1209 return TTest.globmatch(el[:-8], l)
1206 if os.altsep and l.replace(b'\\', b'/') == el:
1210 if os.altsep and l.replace(b'\\', b'/') == el:
1207 return b'+glob'
1211 return b'+glob'
1208 return retry
1212 return retry
1209
1213
1210 @staticmethod
1214 @staticmethod
1211 def parsehghaveoutput(lines):
1215 def parsehghaveoutput(lines):
1212 '''Parse hghave log lines.
1216 '''Parse hghave log lines.
1213
1217
1214 Return tuple of lists (missing, failed):
1218 Return tuple of lists (missing, failed):
1215 * the missing/unknown features
1219 * the missing/unknown features
1216 * the features for which existence check failed'''
1220 * the features for which existence check failed'''
1217 missing = []
1221 missing = []
1218 failed = []
1222 failed = []
1219 for line in lines:
1223 for line in lines:
1220 if line.startswith(TTest.SKIPPED_PREFIX):
1224 if line.startswith(TTest.SKIPPED_PREFIX):
1221 line = line.splitlines()[0]
1225 line = line.splitlines()[0]
1222 missing.append(line[len(TTest.SKIPPED_PREFIX):])
1226 missing.append(line[len(TTest.SKIPPED_PREFIX):])
1223 elif line.startswith(TTest.FAILED_PREFIX):
1227 elif line.startswith(TTest.FAILED_PREFIX):
1224 line = line.splitlines()[0]
1228 line = line.splitlines()[0]
1225 failed.append(line[len(TTest.FAILED_PREFIX):])
1229 failed.append(line[len(TTest.FAILED_PREFIX):])
1226
1230
1227 return missing, failed
1231 return missing, failed
1228
1232
1229 @staticmethod
1233 @staticmethod
1230 def _escapef(m):
1234 def _escapef(m):
1231 return TTest.ESCAPEMAP[m.group(0)]
1235 return TTest.ESCAPEMAP[m.group(0)]
1232
1236
1233 @staticmethod
1237 @staticmethod
1234 def _stringescape(s):
1238 def _stringescape(s):
1235 return TTest.ESCAPESUB(TTest._escapef, s)
1239 return TTest.ESCAPESUB(TTest._escapef, s)
1236
1240
1237 iolock = threading.RLock()
1241 iolock = threading.RLock()
1238
1242
1239 class SkipTest(Exception):
1243 class SkipTest(Exception):
1240 """Raised to indicate that a test is to be skipped."""
1244 """Raised to indicate that a test is to be skipped."""
1241
1245
1242 class IgnoreTest(Exception):
1246 class IgnoreTest(Exception):
1243 """Raised to indicate that a test is to be ignored."""
1247 """Raised to indicate that a test is to be ignored."""
1244
1248
1245 class WarnTest(Exception):
1249 class WarnTest(Exception):
1246 """Raised to indicate that a test warned."""
1250 """Raised to indicate that a test warned."""
1247
1251
1248 class ReportedTest(Exception):
1252 class ReportedTest(Exception):
1249 """Raised to indicate that a test already reported."""
1253 """Raised to indicate that a test already reported."""
1250
1254
1251 class TestResult(unittest._TextTestResult):
1255 class TestResult(unittest._TextTestResult):
1252 """Holds results when executing via unittest."""
1256 """Holds results when executing via unittest."""
1253 # Don't worry too much about accessing the non-public _TextTestResult.
1257 # Don't worry too much about accessing the non-public _TextTestResult.
1254 # It is relatively common in Python testing tools.
1258 # It is relatively common in Python testing tools.
1255 def __init__(self, options, *args, **kwargs):
1259 def __init__(self, options, *args, **kwargs):
1256 super(TestResult, self).__init__(*args, **kwargs)
1260 super(TestResult, self).__init__(*args, **kwargs)
1257
1261
1258 self._options = options
1262 self._options = options
1259
1263
1260 # unittest.TestResult didn't have skipped until 2.7. We need to
1264 # unittest.TestResult didn't have skipped until 2.7. We need to
1261 # polyfill it.
1265 # polyfill it.
1262 self.skipped = []
1266 self.skipped = []
1263
1267
1264 # We have a custom "ignored" result that isn't present in any Python
1268 # We have a custom "ignored" result that isn't present in any Python
1265 # unittest implementation. It is very similar to skipped. It may make
1269 # unittest implementation. It is very similar to skipped. It may make
1266 # sense to map it into skip some day.
1270 # sense to map it into skip some day.
1267 self.ignored = []
1271 self.ignored = []
1268
1272
1269 # We have a custom "warned" result that isn't present in any Python
1273 # We have a custom "warned" result that isn't present in any Python
1270 # unittest implementation. It is very similar to failed. It may make
1274 # unittest implementation. It is very similar to failed. It may make
1271 # sense to map it into fail some day.
1275 # sense to map it into fail some day.
1272 self.warned = []
1276 self.warned = []
1273
1277
1274 self.times = []
1278 self.times = []
1275 self._firststarttime = None
1279 self._firststarttime = None
1276 # Data stored for the benefit of generating xunit reports.
1280 # Data stored for the benefit of generating xunit reports.
1277 self.successes = []
1281 self.successes = []
1278 self.faildata = {}
1282 self.faildata = {}
1279
1283
1280 def addFailure(self, test, reason):
1284 def addFailure(self, test, reason):
1281 self.failures.append((test, reason))
1285 self.failures.append((test, reason))
1282
1286
1283 if self._options.first:
1287 if self._options.first:
1284 self.stop()
1288 self.stop()
1285 else:
1289 else:
1286 with iolock:
1290 with iolock:
1287 if reason == "timed out":
1291 if reason == "timed out":
1288 self.stream.write('t')
1292 self.stream.write('t')
1289 else:
1293 else:
1290 if not self._options.nodiff:
1294 if not self._options.nodiff:
1291 self.stream.write('\nERROR: %s output changed\n' % test)
1295 self.stream.write('\nERROR: %s output changed\n' % test)
1292 self.stream.write('!')
1296 self.stream.write('!')
1293
1297
1294 self.stream.flush()
1298 self.stream.flush()
1295
1299
1296 def addSuccess(self, test):
1300 def addSuccess(self, test):
1297 with iolock:
1301 with iolock:
1298 super(TestResult, self).addSuccess(test)
1302 super(TestResult, self).addSuccess(test)
1299 self.successes.append(test)
1303 self.successes.append(test)
1300
1304
1301 def addError(self, test, err):
1305 def addError(self, test, err):
1302 super(TestResult, self).addError(test, err)
1306 super(TestResult, self).addError(test, err)
1303 if self._options.first:
1307 if self._options.first:
1304 self.stop()
1308 self.stop()
1305
1309
1306 # Polyfill.
1310 # Polyfill.
1307 def addSkip(self, test, reason):
1311 def addSkip(self, test, reason):
1308 self.skipped.append((test, reason))
1312 self.skipped.append((test, reason))
1309 with iolock:
1313 with iolock:
1310 if self.showAll:
1314 if self.showAll:
1311 self.stream.writeln('skipped %s' % reason)
1315 self.stream.writeln('skipped %s' % reason)
1312 else:
1316 else:
1313 self.stream.write('s')
1317 self.stream.write('s')
1314 self.stream.flush()
1318 self.stream.flush()
1315
1319
1316 def addIgnore(self, test, reason):
1320 def addIgnore(self, test, reason):
1317 self.ignored.append((test, reason))
1321 self.ignored.append((test, reason))
1318 with iolock:
1322 with iolock:
1319 if self.showAll:
1323 if self.showAll:
1320 self.stream.writeln('ignored %s' % reason)
1324 self.stream.writeln('ignored %s' % reason)
1321 else:
1325 else:
1322 if reason not in ('not retesting', "doesn't match keyword"):
1326 if reason not in ('not retesting', "doesn't match keyword"):
1323 self.stream.write('i')
1327 self.stream.write('i')
1324 else:
1328 else:
1325 self.testsRun += 1
1329 self.testsRun += 1
1326 self.stream.flush()
1330 self.stream.flush()
1327
1331
1328 def addWarn(self, test, reason):
1332 def addWarn(self, test, reason):
1329 self.warned.append((test, reason))
1333 self.warned.append((test, reason))
1330
1334
1331 if self._options.first:
1335 if self._options.first:
1332 self.stop()
1336 self.stop()
1333
1337
1334 with iolock:
1338 with iolock:
1335 if self.showAll:
1339 if self.showAll:
1336 self.stream.writeln('warned %s' % reason)
1340 self.stream.writeln('warned %s' % reason)
1337 else:
1341 else:
1338 self.stream.write('~')
1342 self.stream.write('~')
1339 self.stream.flush()
1343 self.stream.flush()
1340
1344
1341 def addOutputMismatch(self, test, ret, got, expected):
1345 def addOutputMismatch(self, test, ret, got, expected):
1342 """Record a mismatch in test output for a particular test."""
1346 """Record a mismatch in test output for a particular test."""
1343 if self.shouldStop:
1347 if self.shouldStop:
1344 # don't print, some other test case already failed and
1348 # don't print, some other test case already failed and
1345 # printed, we're just stale and probably failed due to our
1349 # printed, we're just stale and probably failed due to our
1346 # temp dir getting cleaned up.
1350 # temp dir getting cleaned up.
1347 return
1351 return
1348
1352
1349 accepted = False
1353 accepted = False
1350 failed = False
1354 failed = False
1351 lines = []
1355 lines = []
1352
1356
1353 with iolock:
1357 with iolock:
1354 if self._options.nodiff:
1358 if self._options.nodiff:
1355 pass
1359 pass
1356 elif self._options.view:
1360 elif self._options.view:
1357 v = self._options.view
1361 v = self._options.view
1358 if PYTHON3:
1362 if PYTHON3:
1359 v = _bytespath(v)
1363 v = _bytespath(v)
1360 os.system(b"%s %s %s" %
1364 os.system(b"%s %s %s" %
1361 (v, test.refpath, test.errpath))
1365 (v, test.refpath, test.errpath))
1362 else:
1366 else:
1363 servefail, lines = getdiff(expected, got,
1367 servefail, lines = getdiff(expected, got,
1364 test.refpath, test.errpath)
1368 test.refpath, test.errpath)
1365 if servefail:
1369 if servefail:
1366 self.addFailure(
1370 self.addFailure(
1367 test,
1371 test,
1368 'server failed to start (HGPORT=%s)' % test._startport)
1372 'server failed to start (HGPORT=%s)' % test._startport)
1369 raise ReportedTest('server failed to start')
1373 raise ReportedTest('server failed to start')
1370 else:
1374 else:
1371 self.stream.write('\n')
1375 self.stream.write('\n')
1372 for line in lines:
1376 for line in lines:
1373 if PYTHON3:
1377 if PYTHON3:
1374 self.stream.flush()
1378 self.stream.flush()
1375 self.stream.buffer.write(line)
1379 self.stream.buffer.write(line)
1376 self.stream.buffer.flush()
1380 self.stream.buffer.flush()
1377 else:
1381 else:
1378 self.stream.write(line)
1382 self.stream.write(line)
1379 self.stream.flush()
1383 self.stream.flush()
1380
1384
1381 # handle interactive prompt without releasing iolock
1385 # handle interactive prompt without releasing iolock
1382 if self._options.interactive:
1386 if self._options.interactive:
1383 self.stream.write('Accept this change? [n] ')
1387 self.stream.write('Accept this change? [n] ')
1384 answer = sys.stdin.readline().strip()
1388 answer = sys.stdin.readline().strip()
1385 if answer.lower() in ('y', 'yes'):
1389 if answer.lower() in ('y', 'yes'):
1386 if test.name.endswith('.t'):
1390 if test.name.endswith('.t'):
1387 rename(test.errpath, test.path)
1391 rename(test.errpath, test.path)
1388 else:
1392 else:
1389 rename(test.errpath, '%s.out' % test.path)
1393 rename(test.errpath, '%s.out' % test.path)
1390 accepted = True
1394 accepted = True
1391 if not accepted and not failed:
1395 if not accepted and not failed:
1392 self.faildata[test.name] = b''.join(lines)
1396 self.faildata[test.name] = b''.join(lines)
1393
1397
1394 return accepted
1398 return accepted
1395
1399
1396 def startTest(self, test):
1400 def startTest(self, test):
1397 super(TestResult, self).startTest(test)
1401 super(TestResult, self).startTest(test)
1398
1402
1399 # os.times module computes the user time and system time spent by
1403 # os.times module computes the user time and system time spent by
1400 # child's processes along with real elapsed time taken by a process.
1404 # child's processes along with real elapsed time taken by a process.
1401 # This module has one limitation. It can only work for Linux user
1405 # This module has one limitation. It can only work for Linux user
1402 # and not for Windows.
1406 # and not for Windows.
1403 test.started = os.times()
1407 test.started = os.times()
1404 if self._firststarttime is None: # thread racy but irrelevant
1408 if self._firststarttime is None: # thread racy but irrelevant
1405 self._firststarttime = test.started[4]
1409 self._firststarttime = test.started[4]
1406
1410
1407 def stopTest(self, test, interrupted=False):
1411 def stopTest(self, test, interrupted=False):
1408 super(TestResult, self).stopTest(test)
1412 super(TestResult, self).stopTest(test)
1409
1413
1410 test.stopped = os.times()
1414 test.stopped = os.times()
1411
1415
1412 starttime = test.started
1416 starttime = test.started
1413 endtime = test.stopped
1417 endtime = test.stopped
1414 origin = self._firststarttime
1418 origin = self._firststarttime
1415 self.times.append((test.name,
1419 self.times.append((test.name,
1416 endtime[2] - starttime[2], # user space CPU time
1420 endtime[2] - starttime[2], # user space CPU time
1417 endtime[3] - starttime[3], # sys space CPU time
1421 endtime[3] - starttime[3], # sys space CPU time
1418 endtime[4] - starttime[4], # real time
1422 endtime[4] - starttime[4], # real time
1419 starttime[4] - origin, # start date in run context
1423 starttime[4] - origin, # start date in run context
1420 endtime[4] - origin, # end date in run context
1424 endtime[4] - origin, # end date in run context
1421 ))
1425 ))
1422
1426
1423 if interrupted:
1427 if interrupted:
1424 with iolock:
1428 with iolock:
1425 self.stream.writeln('INTERRUPTED: %s (after %d seconds)' % (
1429 self.stream.writeln('INTERRUPTED: %s (after %d seconds)' % (
1426 test.name, self.times[-1][3]))
1430 test.name, self.times[-1][3]))
1427
1431
1428 class TestSuite(unittest.TestSuite):
1432 class TestSuite(unittest.TestSuite):
1429 """Custom unittest TestSuite that knows how to execute Mercurial tests."""
1433 """Custom unittest TestSuite that knows how to execute Mercurial tests."""
1430
1434
1431 def __init__(self, testdir, jobs=1, whitelist=None, blacklist=None,
1435 def __init__(self, testdir, jobs=1, whitelist=None, blacklist=None,
1432 retest=False, keywords=None, loop=False, runs_per_test=1,
1436 retest=False, keywords=None, loop=False, runs_per_test=1,
1433 loadtest=None, showchannels=False,
1437 loadtest=None, showchannels=False,
1434 *args, **kwargs):
1438 *args, **kwargs):
1435 """Create a new instance that can run tests with a configuration.
1439 """Create a new instance that can run tests with a configuration.
1436
1440
1437 testdir specifies the directory where tests are executed from. This
1441 testdir specifies the directory where tests are executed from. This
1438 is typically the ``tests`` directory from Mercurial's source
1442 is typically the ``tests`` directory from Mercurial's source
1439 repository.
1443 repository.
1440
1444
1441 jobs specifies the number of jobs to run concurrently. Each test
1445 jobs specifies the number of jobs to run concurrently. Each test
1442 executes on its own thread. Tests actually spawn new processes, so
1446 executes on its own thread. Tests actually spawn new processes, so
1443 state mutation should not be an issue.
1447 state mutation should not be an issue.
1444
1448
1445 If there is only one job, it will use the main thread.
1449 If there is only one job, it will use the main thread.
1446
1450
1447 whitelist and blacklist denote tests that have been whitelisted and
1451 whitelist and blacklist denote tests that have been whitelisted and
1448 blacklisted, respectively. These arguments don't belong in TestSuite.
1452 blacklisted, respectively. These arguments don't belong in TestSuite.
1449 Instead, whitelist and blacklist should be handled by the thing that
1453 Instead, whitelist and blacklist should be handled by the thing that
1450 populates the TestSuite with tests. They are present to preserve
1454 populates the TestSuite with tests. They are present to preserve
1451 backwards compatible behavior which reports skipped tests as part
1455 backwards compatible behavior which reports skipped tests as part
1452 of the results.
1456 of the results.
1453
1457
1454 retest denotes whether to retest failed tests. This arguably belongs
1458 retest denotes whether to retest failed tests. This arguably belongs
1455 outside of TestSuite.
1459 outside of TestSuite.
1456
1460
1457 keywords denotes key words that will be used to filter which tests
1461 keywords denotes key words that will be used to filter which tests
1458 to execute. This arguably belongs outside of TestSuite.
1462 to execute. This arguably belongs outside of TestSuite.
1459
1463
1460 loop denotes whether to loop over tests forever.
1464 loop denotes whether to loop over tests forever.
1461 """
1465 """
1462 super(TestSuite, self).__init__(*args, **kwargs)
1466 super(TestSuite, self).__init__(*args, **kwargs)
1463
1467
1464 self._jobs = jobs
1468 self._jobs = jobs
1465 self._whitelist = whitelist
1469 self._whitelist = whitelist
1466 self._blacklist = blacklist
1470 self._blacklist = blacklist
1467 self._retest = retest
1471 self._retest = retest
1468 self._keywords = keywords
1472 self._keywords = keywords
1469 self._loop = loop
1473 self._loop = loop
1470 self._runs_per_test = runs_per_test
1474 self._runs_per_test = runs_per_test
1471 self._loadtest = loadtest
1475 self._loadtest = loadtest
1472 self._showchannels = showchannels
1476 self._showchannels = showchannels
1473
1477
1474 def run(self, result):
1478 def run(self, result):
1475 # We have a number of filters that need to be applied. We do this
1479 # We have a number of filters that need to be applied. We do this
1476 # here instead of inside Test because it makes the running logic for
1480 # here instead of inside Test because it makes the running logic for
1477 # Test simpler.
1481 # Test simpler.
1478 tests = []
1482 tests = []
1479 num_tests = [0]
1483 num_tests = [0]
1480 for test in self._tests:
1484 for test in self._tests:
1481 def get():
1485 def get():
1482 num_tests[0] += 1
1486 num_tests[0] += 1
1483 if getattr(test, 'should_reload', False):
1487 if getattr(test, 'should_reload', False):
1484 return self._loadtest(test.bname, num_tests[0])
1488 return self._loadtest(test.bname, num_tests[0])
1485 return test
1489 return test
1486 if not os.path.exists(test.path):
1490 if not os.path.exists(test.path):
1487 result.addSkip(test, "Doesn't exist")
1491 result.addSkip(test, "Doesn't exist")
1488 continue
1492 continue
1489
1493
1490 if not (self._whitelist and test.name in self._whitelist):
1494 if not (self._whitelist and test.name in self._whitelist):
1491 if self._blacklist and test.bname in self._blacklist:
1495 if self._blacklist and test.bname in self._blacklist:
1492 result.addSkip(test, 'blacklisted')
1496 result.addSkip(test, 'blacklisted')
1493 continue
1497 continue
1494
1498
1495 if self._retest and not os.path.exists(test.errpath):
1499 if self._retest and not os.path.exists(test.errpath):
1496 result.addIgnore(test, 'not retesting')
1500 result.addIgnore(test, 'not retesting')
1497 continue
1501 continue
1498
1502
1499 if self._keywords:
1503 if self._keywords:
1500 f = open(test.path, 'rb')
1504 f = open(test.path, 'rb')
1501 t = f.read().lower() + test.bname.lower()
1505 t = f.read().lower() + test.bname.lower()
1502 f.close()
1506 f.close()
1503 ignored = False
1507 ignored = False
1504 for k in self._keywords.lower().split():
1508 for k in self._keywords.lower().split():
1505 if k not in t:
1509 if k not in t:
1506 result.addIgnore(test, "doesn't match keyword")
1510 result.addIgnore(test, "doesn't match keyword")
1507 ignored = True
1511 ignored = True
1508 break
1512 break
1509
1513
1510 if ignored:
1514 if ignored:
1511 continue
1515 continue
1512 for _ in xrange(self._runs_per_test):
1516 for _ in xrange(self._runs_per_test):
1513 tests.append(get())
1517 tests.append(get())
1514
1518
1515 runtests = list(tests)
1519 runtests = list(tests)
1516 done = queue.Queue()
1520 done = queue.Queue()
1517 running = 0
1521 running = 0
1518
1522
1519 channels = [""] * self._jobs
1523 channels = [""] * self._jobs
1520
1524
1521 def job(test, result):
1525 def job(test, result):
1522 for n, v in enumerate(channels):
1526 for n, v in enumerate(channels):
1523 if not v:
1527 if not v:
1524 channel = n
1528 channel = n
1525 break
1529 break
1526 channels[channel] = "=" + test.name[5:].split(".")[0]
1530 channels[channel] = "=" + test.name[5:].split(".")[0]
1527 try:
1531 try:
1528 test(result)
1532 test(result)
1529 done.put(None)
1533 done.put(None)
1530 except KeyboardInterrupt:
1534 except KeyboardInterrupt:
1531 pass
1535 pass
1532 except: # re-raises
1536 except: # re-raises
1533 done.put(('!', test, 'run-test raised an error, see traceback'))
1537 done.put(('!', test, 'run-test raised an error, see traceback'))
1534 raise
1538 raise
1535 try:
1539 try:
1536 channels[channel] = ''
1540 channels[channel] = ''
1537 except IndexError:
1541 except IndexError:
1538 pass
1542 pass
1539
1543
1540 def stat():
1544 def stat():
1541 count = 0
1545 count = 0
1542 while channels:
1546 while channels:
1543 d = '\n%03s ' % count
1547 d = '\n%03s ' % count
1544 for n, v in enumerate(channels):
1548 for n, v in enumerate(channels):
1545 if v:
1549 if v:
1546 d += v[0]
1550 d += v[0]
1547 channels[n] = v[1:] or '.'
1551 channels[n] = v[1:] or '.'
1548 else:
1552 else:
1549 d += ' '
1553 d += ' '
1550 d += ' '
1554 d += ' '
1551 with iolock:
1555 with iolock:
1552 sys.stdout.write(d + ' ')
1556 sys.stdout.write(d + ' ')
1553 sys.stdout.flush()
1557 sys.stdout.flush()
1554 for x in xrange(10):
1558 for x in xrange(10):
1555 if channels:
1559 if channels:
1556 time.sleep(.1)
1560 time.sleep(.1)
1557 count += 1
1561 count += 1
1558
1562
1559 stoppedearly = False
1563 stoppedearly = False
1560
1564
1561 if self._showchannels:
1565 if self._showchannels:
1562 statthread = threading.Thread(target=stat, name="stat")
1566 statthread = threading.Thread(target=stat, name="stat")
1563 statthread.start()
1567 statthread.start()
1564
1568
1565 try:
1569 try:
1566 while tests or running:
1570 while tests or running:
1567 if not done.empty() or running == self._jobs or not tests:
1571 if not done.empty() or running == self._jobs or not tests:
1568 try:
1572 try:
1569 done.get(True, 1)
1573 done.get(True, 1)
1570 running -= 1
1574 running -= 1
1571 if result and result.shouldStop:
1575 if result and result.shouldStop:
1572 stoppedearly = True
1576 stoppedearly = True
1573 break
1577 break
1574 except queue.Empty:
1578 except queue.Empty:
1575 continue
1579 continue
1576 if tests and not running == self._jobs:
1580 if tests and not running == self._jobs:
1577 test = tests.pop(0)
1581 test = tests.pop(0)
1578 if self._loop:
1582 if self._loop:
1579 if getattr(test, 'should_reload', False):
1583 if getattr(test, 'should_reload', False):
1580 num_tests[0] += 1
1584 num_tests[0] += 1
1581 tests.append(
1585 tests.append(
1582 self._loadtest(test.name, num_tests[0]))
1586 self._loadtest(test.name, num_tests[0]))
1583 else:
1587 else:
1584 tests.append(test)
1588 tests.append(test)
1585 if self._jobs == 1:
1589 if self._jobs == 1:
1586 job(test, result)
1590 job(test, result)
1587 else:
1591 else:
1588 t = threading.Thread(target=job, name=test.name,
1592 t = threading.Thread(target=job, name=test.name,
1589 args=(test, result))
1593 args=(test, result))
1590 t.start()
1594 t.start()
1591 running += 1
1595 running += 1
1592
1596
1593 # If we stop early we still need to wait on started tests to
1597 # If we stop early we still need to wait on started tests to
1594 # finish. Otherwise, there is a race between the test completing
1598 # finish. Otherwise, there is a race between the test completing
1595 # and the test's cleanup code running. This could result in the
1599 # and the test's cleanup code running. This could result in the
1596 # test reporting incorrect.
1600 # test reporting incorrect.
1597 if stoppedearly:
1601 if stoppedearly:
1598 while running:
1602 while running:
1599 try:
1603 try:
1600 done.get(True, 1)
1604 done.get(True, 1)
1601 running -= 1
1605 running -= 1
1602 except queue.Empty:
1606 except queue.Empty:
1603 continue
1607 continue
1604 except KeyboardInterrupt:
1608 except KeyboardInterrupt:
1605 for test in runtests:
1609 for test in runtests:
1606 test.abort()
1610 test.abort()
1607
1611
1608 channels = []
1612 channels = []
1609
1613
1610 return result
1614 return result
1611
1615
1612 # Save the most recent 5 wall-clock runtimes of each test to a
1616 # Save the most recent 5 wall-clock runtimes of each test to a
1613 # human-readable text file named .testtimes. Tests are sorted
1617 # human-readable text file named .testtimes. Tests are sorted
1614 # alphabetically, while times for each test are listed from oldest to
1618 # alphabetically, while times for each test are listed from oldest to
1615 # newest.
1619 # newest.
1616
1620
1617 def loadtimes(testdir):
1621 def loadtimes(testdir):
1618 times = []
1622 times = []
1619 try:
1623 try:
1620 with open(os.path.join(testdir, '.testtimes-')) as fp:
1624 with open(os.path.join(testdir, '.testtimes-')) as fp:
1621 for line in fp:
1625 for line in fp:
1622 ts = line.split()
1626 ts = line.split()
1623 times.append((ts[0], [float(t) for t in ts[1:]]))
1627 times.append((ts[0], [float(t) for t in ts[1:]]))
1624 except IOError as err:
1628 except IOError as err:
1625 if err.errno != errno.ENOENT:
1629 if err.errno != errno.ENOENT:
1626 raise
1630 raise
1627 return times
1631 return times
1628
1632
1629 def savetimes(testdir, result):
1633 def savetimes(testdir, result):
1630 saved = dict(loadtimes(testdir))
1634 saved = dict(loadtimes(testdir))
1631 maxruns = 5
1635 maxruns = 5
1632 skipped = set([str(t[0]) for t in result.skipped])
1636 skipped = set([str(t[0]) for t in result.skipped])
1633 for tdata in result.times:
1637 for tdata in result.times:
1634 test, real = tdata[0], tdata[3]
1638 test, real = tdata[0], tdata[3]
1635 if test not in skipped:
1639 if test not in skipped:
1636 ts = saved.setdefault(test, [])
1640 ts = saved.setdefault(test, [])
1637 ts.append(real)
1641 ts.append(real)
1638 ts[:] = ts[-maxruns:]
1642 ts[:] = ts[-maxruns:]
1639
1643
1640 fd, tmpname = tempfile.mkstemp(prefix='.testtimes',
1644 fd, tmpname = tempfile.mkstemp(prefix='.testtimes',
1641 dir=testdir, text=True)
1645 dir=testdir, text=True)
1642 with os.fdopen(fd, 'w') as fp:
1646 with os.fdopen(fd, 'w') as fp:
1643 for name, ts in sorted(saved.iteritems()):
1647 for name, ts in sorted(saved.iteritems()):
1644 fp.write('%s %s\n' % (name, ' '.join(['%.3f' % (t,) for t in ts])))
1648 fp.write('%s %s\n' % (name, ' '.join(['%.3f' % (t,) for t in ts])))
1645 timepath = os.path.join(testdir, '.testtimes')
1649 timepath = os.path.join(testdir, '.testtimes')
1646 try:
1650 try:
1647 os.unlink(timepath)
1651 os.unlink(timepath)
1648 except OSError:
1652 except OSError:
1649 pass
1653 pass
1650 try:
1654 try:
1651 os.rename(tmpname, timepath)
1655 os.rename(tmpname, timepath)
1652 except OSError:
1656 except OSError:
1653 pass
1657 pass
1654
1658
1655 class TextTestRunner(unittest.TextTestRunner):
1659 class TextTestRunner(unittest.TextTestRunner):
1656 """Custom unittest test runner that uses appropriate settings."""
1660 """Custom unittest test runner that uses appropriate settings."""
1657
1661
1658 def __init__(self, runner, *args, **kwargs):
1662 def __init__(self, runner, *args, **kwargs):
1659 super(TextTestRunner, self).__init__(*args, **kwargs)
1663 super(TextTestRunner, self).__init__(*args, **kwargs)
1660
1664
1661 self._runner = runner
1665 self._runner = runner
1662
1666
1663 def run(self, test):
1667 def run(self, test):
1664 result = TestResult(self._runner.options, self.stream,
1668 result = TestResult(self._runner.options, self.stream,
1665 self.descriptions, self.verbosity)
1669 self.descriptions, self.verbosity)
1666
1670
1667 test(result)
1671 test(result)
1668
1672
1669 failed = len(result.failures)
1673 failed = len(result.failures)
1670 warned = len(result.warned)
1674 warned = len(result.warned)
1671 skipped = len(result.skipped)
1675 skipped = len(result.skipped)
1672 ignored = len(result.ignored)
1676 ignored = len(result.ignored)
1673
1677
1674 with iolock:
1678 with iolock:
1675 self.stream.writeln('')
1679 self.stream.writeln('')
1676
1680
1677 if not self._runner.options.noskips:
1681 if not self._runner.options.noskips:
1678 for test, msg in result.skipped:
1682 for test, msg in result.skipped:
1679 self.stream.writeln('Skipped %s: %s' % (test.name, msg))
1683 self.stream.writeln('Skipped %s: %s' % (test.name, msg))
1680 for test, msg in result.warned:
1684 for test, msg in result.warned:
1681 self.stream.writeln('Warned %s: %s' % (test.name, msg))
1685 self.stream.writeln('Warned %s: %s' % (test.name, msg))
1682 for test, msg in result.failures:
1686 for test, msg in result.failures:
1683 self.stream.writeln('Failed %s: %s' % (test.name, msg))
1687 self.stream.writeln('Failed %s: %s' % (test.name, msg))
1684 for test, msg in result.errors:
1688 for test, msg in result.errors:
1685 self.stream.writeln('Errored %s: %s' % (test.name, msg))
1689 self.stream.writeln('Errored %s: %s' % (test.name, msg))
1686
1690
1687 if self._runner.options.xunit:
1691 if self._runner.options.xunit:
1688 with open(self._runner.options.xunit, 'wb') as xuf:
1692 with open(self._runner.options.xunit, 'wb') as xuf:
1689 timesd = dict((t[0], t[3]) for t in result.times)
1693 timesd = dict((t[0], t[3]) for t in result.times)
1690 doc = minidom.Document()
1694 doc = minidom.Document()
1691 s = doc.createElement('testsuite')
1695 s = doc.createElement('testsuite')
1692 s.setAttribute('name', 'run-tests')
1696 s.setAttribute('name', 'run-tests')
1693 s.setAttribute('tests', str(result.testsRun))
1697 s.setAttribute('tests', str(result.testsRun))
1694 s.setAttribute('errors', "0") # TODO
1698 s.setAttribute('errors', "0") # TODO
1695 s.setAttribute('failures', str(failed))
1699 s.setAttribute('failures', str(failed))
1696 s.setAttribute('skipped', str(skipped + ignored))
1700 s.setAttribute('skipped', str(skipped + ignored))
1697 doc.appendChild(s)
1701 doc.appendChild(s)
1698 for tc in result.successes:
1702 for tc in result.successes:
1699 t = doc.createElement('testcase')
1703 t = doc.createElement('testcase')
1700 t.setAttribute('name', tc.name)
1704 t.setAttribute('name', tc.name)
1701 t.setAttribute('time', '%.3f' % timesd[tc.name])
1705 t.setAttribute('time', '%.3f' % timesd[tc.name])
1702 s.appendChild(t)
1706 s.appendChild(t)
1703 for tc, err in sorted(result.faildata.items()):
1707 for tc, err in sorted(result.faildata.items()):
1704 t = doc.createElement('testcase')
1708 t = doc.createElement('testcase')
1705 t.setAttribute('name', tc)
1709 t.setAttribute('name', tc)
1706 t.setAttribute('time', '%.3f' % timesd[tc])
1710 t.setAttribute('time', '%.3f' % timesd[tc])
1707 # createCDATASection expects a unicode or it will
1711 # createCDATASection expects a unicode or it will
1708 # convert using default conversion rules, which will
1712 # convert using default conversion rules, which will
1709 # fail if string isn't ASCII.
1713 # fail if string isn't ASCII.
1710 err = cdatasafe(err).decode('utf-8', 'replace')
1714 err = cdatasafe(err).decode('utf-8', 'replace')
1711 cd = doc.createCDATASection(err)
1715 cd = doc.createCDATASection(err)
1712 t.appendChild(cd)
1716 t.appendChild(cd)
1713 s.appendChild(t)
1717 s.appendChild(t)
1714 xuf.write(doc.toprettyxml(indent=' ', encoding='utf-8'))
1718 xuf.write(doc.toprettyxml(indent=' ', encoding='utf-8'))
1715
1719
1716 if self._runner.options.json:
1720 if self._runner.options.json:
1717 if json is None:
1721 if json is None:
1718 raise ImportError("json module not installed")
1722 raise ImportError("json module not installed")
1719 jsonpath = os.path.join(self._runner._testdir, 'report.json')
1723 jsonpath = os.path.join(self._runner._testdir, 'report.json')
1720 with open(jsonpath, 'w') as fp:
1724 with open(jsonpath, 'w') as fp:
1721 timesd = {}
1725 timesd = {}
1722 for tdata in result.times:
1726 for tdata in result.times:
1723 test = tdata[0]
1727 test = tdata[0]
1724 timesd[test] = tdata[1:]
1728 timesd[test] = tdata[1:]
1725
1729
1726 outcome = {}
1730 outcome = {}
1727 groups = [('success', ((tc, None)
1731 groups = [('success', ((tc, None)
1728 for tc in result.successes)),
1732 for tc in result.successes)),
1729 ('failure', result.failures),
1733 ('failure', result.failures),
1730 ('skip', result.skipped)]
1734 ('skip', result.skipped)]
1731 for res, testcases in groups:
1735 for res, testcases in groups:
1732 for tc, __ in testcases:
1736 for tc, __ in testcases:
1733 if tc.name in timesd:
1737 if tc.name in timesd:
1734 tres = {'result': res,
1738 tres = {'result': res,
1735 'time': ('%0.3f' % timesd[tc.name][2]),
1739 'time': ('%0.3f' % timesd[tc.name][2]),
1736 'cuser': ('%0.3f' % timesd[tc.name][0]),
1740 'cuser': ('%0.3f' % timesd[tc.name][0]),
1737 'csys': ('%0.3f' % timesd[tc.name][1]),
1741 'csys': ('%0.3f' % timesd[tc.name][1]),
1738 'start': ('%0.3f' % timesd[tc.name][3]),
1742 'start': ('%0.3f' % timesd[tc.name][3]),
1739 'end': ('%0.3f' % timesd[tc.name][4]),
1743 'end': ('%0.3f' % timesd[tc.name][4]),
1740 'diff': result.faildata.get(tc.name,
1744 'diff': result.faildata.get(tc.name,
1741 ''),
1745 ''),
1742 }
1746 }
1743 else:
1747 else:
1744 # blacklisted test
1748 # blacklisted test
1745 tres = {'result': res}
1749 tres = {'result': res}
1746
1750
1747 outcome[tc.name] = tres
1751 outcome[tc.name] = tres
1748 jsonout = json.dumps(outcome, sort_keys=True, indent=4)
1752 jsonout = json.dumps(outcome, sort_keys=True, indent=4)
1749 fp.writelines(("testreport =", jsonout))
1753 fp.writelines(("testreport =", jsonout))
1750
1754
1751 self._runner._checkhglib('Tested')
1755 self._runner._checkhglib('Tested')
1752
1756
1753 savetimes(self._runner._testdir, result)
1757 savetimes(self._runner._testdir, result)
1754 self.stream.writeln(
1758 self.stream.writeln(
1755 '# Ran %d tests, %d skipped, %d warned, %d failed.'
1759 '# Ran %d tests, %d skipped, %d warned, %d failed.'
1756 % (result.testsRun,
1760 % (result.testsRun,
1757 skipped + ignored, warned, failed))
1761 skipped + ignored, warned, failed))
1758 if failed:
1762 if failed:
1759 self.stream.writeln('python hash seed: %s' %
1763 self.stream.writeln('python hash seed: %s' %
1760 os.environ['PYTHONHASHSEED'])
1764 os.environ['PYTHONHASHSEED'])
1761 if self._runner.options.time:
1765 if self._runner.options.time:
1762 self.printtimes(result.times)
1766 self.printtimes(result.times)
1763
1767
1764 return result
1768 return result
1765
1769
1766 def printtimes(self, times):
1770 def printtimes(self, times):
1767 # iolock held by run
1771 # iolock held by run
1768 self.stream.writeln('# Producing time report')
1772 self.stream.writeln('# Producing time report')
1769 times.sort(key=lambda t: (t[3]))
1773 times.sort(key=lambda t: (t[3]))
1770 cols = '%7.3f %7.3f %7.3f %7.3f %7.3f %s'
1774 cols = '%7.3f %7.3f %7.3f %7.3f %7.3f %s'
1771 self.stream.writeln('%-7s %-7s %-7s %-7s %-7s %s' %
1775 self.stream.writeln('%-7s %-7s %-7s %-7s %-7s %s' %
1772 ('start', 'end', 'cuser', 'csys', 'real', 'Test'))
1776 ('start', 'end', 'cuser', 'csys', 'real', 'Test'))
1773 for tdata in times:
1777 for tdata in times:
1774 test = tdata[0]
1778 test = tdata[0]
1775 cuser, csys, real, start, end = tdata[1:6]
1779 cuser, csys, real, start, end = tdata[1:6]
1776 self.stream.writeln(cols % (start, end, cuser, csys, real, test))
1780 self.stream.writeln(cols % (start, end, cuser, csys, real, test))
1777
1781
1778 class TestRunner(object):
1782 class TestRunner(object):
1779 """Holds context for executing tests.
1783 """Holds context for executing tests.
1780
1784
1781 Tests rely on a lot of state. This object holds it for them.
1785 Tests rely on a lot of state. This object holds it for them.
1782 """
1786 """
1783
1787
1784 # Programs required to run tests.
1788 # Programs required to run tests.
1785 REQUIREDTOOLS = [
1789 REQUIREDTOOLS = [
1786 os.path.basename(_bytespath(sys.executable)),
1790 os.path.basename(_bytespath(sys.executable)),
1787 b'diff',
1791 b'diff',
1788 b'grep',
1792 b'grep',
1789 b'unzip',
1793 b'unzip',
1790 b'gunzip',
1794 b'gunzip',
1791 b'bunzip2',
1795 b'bunzip2',
1792 b'sed',
1796 b'sed',
1793 ]
1797 ]
1794
1798
1795 # Maps file extensions to test class.
1799 # Maps file extensions to test class.
1796 TESTTYPES = [
1800 TESTTYPES = [
1797 (b'.py', PythonTest),
1801 (b'.py', PythonTest),
1798 (b'.t', TTest),
1802 (b'.t', TTest),
1799 ]
1803 ]
1800
1804
1801 def __init__(self):
1805 def __init__(self):
1802 self.options = None
1806 self.options = None
1803 self._hgroot = None
1807 self._hgroot = None
1804 self._testdir = None
1808 self._testdir = None
1805 self._hgtmp = None
1809 self._hgtmp = None
1806 self._installdir = None
1810 self._installdir = None
1807 self._bindir = None
1811 self._bindir = None
1808 self._tmpbinddir = None
1812 self._tmpbinddir = None
1809 self._pythondir = None
1813 self._pythondir = None
1810 self._coveragefile = None
1814 self._coveragefile = None
1811 self._createdfiles = []
1815 self._createdfiles = []
1812 self._hgpath = None
1816 self._hgpath = None
1813 self._portoffset = 0
1817 self._portoffset = 0
1814 self._ports = {}
1818 self._ports = {}
1815
1819
1816 def run(self, args, parser=None):
1820 def run(self, args, parser=None):
1817 """Run the test suite."""
1821 """Run the test suite."""
1818 oldmask = os.umask(0o22)
1822 oldmask = os.umask(0o22)
1819 try:
1823 try:
1820 parser = parser or getparser()
1824 parser = parser or getparser()
1821 options, args = parseargs(args, parser)
1825 options, args = parseargs(args, parser)
1822 # positional arguments are paths to test files to run, so
1826 # positional arguments are paths to test files to run, so
1823 # we make sure they're all bytestrings
1827 # we make sure they're all bytestrings
1824 args = [_bytespath(a) for a in args]
1828 args = [_bytespath(a) for a in args]
1825 self.options = options
1829 self.options = options
1826
1830
1827 self._checktools()
1831 self._checktools()
1828 tests = self.findtests(args)
1832 tests = self.findtests(args)
1829 if options.profile_runner:
1833 if options.profile_runner:
1830 import statprof
1834 import statprof
1831 statprof.start()
1835 statprof.start()
1832 result = self._run(tests)
1836 result = self._run(tests)
1833 if options.profile_runner:
1837 if options.profile_runner:
1834 statprof.stop()
1838 statprof.stop()
1835 statprof.display()
1839 statprof.display()
1836 return result
1840 return result
1837
1841
1838 finally:
1842 finally:
1839 os.umask(oldmask)
1843 os.umask(oldmask)
1840
1844
1841 def _run(self, tests):
1845 def _run(self, tests):
1842 if self.options.random:
1846 if self.options.random:
1843 random.shuffle(tests)
1847 random.shuffle(tests)
1844 else:
1848 else:
1845 # keywords for slow tests
1849 # keywords for slow tests
1846 slow = {b'svn': 10,
1850 slow = {b'svn': 10,
1847 b'cvs': 10,
1851 b'cvs': 10,
1848 b'hghave': 10,
1852 b'hghave': 10,
1849 b'largefiles-update': 10,
1853 b'largefiles-update': 10,
1850 b'run-tests': 10,
1854 b'run-tests': 10,
1851 b'corruption': 10,
1855 b'corruption': 10,
1852 b'race': 10,
1856 b'race': 10,
1853 b'i18n': 10,
1857 b'i18n': 10,
1854 b'check': 100,
1858 b'check': 100,
1855 b'gendoc': 100,
1859 b'gendoc': 100,
1856 b'contrib-perf': 200,
1860 b'contrib-perf': 200,
1857 }
1861 }
1858 perf = {}
1862 perf = {}
1859 def sortkey(f):
1863 def sortkey(f):
1860 # run largest tests first, as they tend to take the longest
1864 # run largest tests first, as they tend to take the longest
1861 try:
1865 try:
1862 return perf[f]
1866 return perf[f]
1863 except KeyError:
1867 except KeyError:
1864 try:
1868 try:
1865 val = -os.stat(f).st_size
1869 val = -os.stat(f).st_size
1866 except OSError as e:
1870 except OSError as e:
1867 if e.errno != errno.ENOENT:
1871 if e.errno != errno.ENOENT:
1868 raise
1872 raise
1869 perf[f] = -1e9 # file does not exist, tell early
1873 perf[f] = -1e9 # file does not exist, tell early
1870 return -1e9
1874 return -1e9
1871 for kw, mul in slow.items():
1875 for kw, mul in slow.items():
1872 if kw in f:
1876 if kw in f:
1873 val *= mul
1877 val *= mul
1874 if f.endswith('.py'):
1878 if f.endswith('.py'):
1875 val /= 10.0
1879 val /= 10.0
1876 perf[f] = val / 1000.0
1880 perf[f] = val / 1000.0
1877 return perf[f]
1881 return perf[f]
1878 tests.sort(key=sortkey)
1882 tests.sort(key=sortkey)
1879
1883
1880 self._testdir = osenvironb[b'TESTDIR'] = getattr(
1884 self._testdir = osenvironb[b'TESTDIR'] = getattr(
1881 os, 'getcwdb', os.getcwd)()
1885 os, 'getcwdb', os.getcwd)()
1882
1886
1883 if 'PYTHONHASHSEED' not in os.environ:
1887 if 'PYTHONHASHSEED' not in os.environ:
1884 # use a random python hash seed all the time
1888 # use a random python hash seed all the time
1885 # we do the randomness ourself to know what seed is used
1889 # we do the randomness ourself to know what seed is used
1886 os.environ['PYTHONHASHSEED'] = str(random.getrandbits(32))
1890 os.environ['PYTHONHASHSEED'] = str(random.getrandbits(32))
1887
1891
1888 if self.options.tmpdir:
1892 if self.options.tmpdir:
1889 self.options.keep_tmpdir = True
1893 self.options.keep_tmpdir = True
1890 tmpdir = _bytespath(self.options.tmpdir)
1894 tmpdir = _bytespath(self.options.tmpdir)
1891 if os.path.exists(tmpdir):
1895 if os.path.exists(tmpdir):
1892 # Meaning of tmpdir has changed since 1.3: we used to create
1896 # Meaning of tmpdir has changed since 1.3: we used to create
1893 # HGTMP inside tmpdir; now HGTMP is tmpdir. So fail if
1897 # HGTMP inside tmpdir; now HGTMP is tmpdir. So fail if
1894 # tmpdir already exists.
1898 # tmpdir already exists.
1895 print("error: temp dir %r already exists" % tmpdir)
1899 print("error: temp dir %r already exists" % tmpdir)
1896 return 1
1900 return 1
1897
1901
1898 # Automatically removing tmpdir sounds convenient, but could
1902 # Automatically removing tmpdir sounds convenient, but could
1899 # really annoy anyone in the habit of using "--tmpdir=/tmp"
1903 # really annoy anyone in the habit of using "--tmpdir=/tmp"
1900 # or "--tmpdir=$HOME".
1904 # or "--tmpdir=$HOME".
1901 #vlog("# Removing temp dir", tmpdir)
1905 #vlog("# Removing temp dir", tmpdir)
1902 #shutil.rmtree(tmpdir)
1906 #shutil.rmtree(tmpdir)
1903 os.makedirs(tmpdir)
1907 os.makedirs(tmpdir)
1904 else:
1908 else:
1905 d = None
1909 d = None
1906 if os.name == 'nt':
1910 if os.name == 'nt':
1907 # without this, we get the default temp dir location, but
1911 # without this, we get the default temp dir location, but
1908 # in all lowercase, which causes troubles with paths (issue3490)
1912 # in all lowercase, which causes troubles with paths (issue3490)
1909 d = osenvironb.get(b'TMP', None)
1913 d = osenvironb.get(b'TMP', None)
1910 tmpdir = tempfile.mkdtemp(b'', b'hgtests.', d)
1914 tmpdir = tempfile.mkdtemp(b'', b'hgtests.', d)
1911
1915
1912 self._hgtmp = osenvironb[b'HGTMP'] = (
1916 self._hgtmp = osenvironb[b'HGTMP'] = (
1913 os.path.realpath(tmpdir))
1917 os.path.realpath(tmpdir))
1914
1918
1915 if self.options.with_hg:
1919 if self.options.with_hg:
1916 self._installdir = None
1920 self._installdir = None
1917 whg = self.options.with_hg
1921 whg = self.options.with_hg
1918 # If --with-hg is not specified, we have bytes already,
1922 # If --with-hg is not specified, we have bytes already,
1919 # but if it was specified in python3 we get a str, so we
1923 # but if it was specified in python3 we get a str, so we
1920 # have to encode it back into a bytes.
1924 # have to encode it back into a bytes.
1921 if PYTHON3:
1925 if PYTHON3:
1922 if not isinstance(whg, bytes):
1926 if not isinstance(whg, bytes):
1923 whg = _bytespath(whg)
1927 whg = _bytespath(whg)
1924 self._bindir = os.path.dirname(os.path.realpath(whg))
1928 self._bindir = os.path.dirname(os.path.realpath(whg))
1925 assert isinstance(self._bindir, bytes)
1929 assert isinstance(self._bindir, bytes)
1926 self._tmpbindir = os.path.join(self._hgtmp, b'install', b'bin')
1930 self._tmpbindir = os.path.join(self._hgtmp, b'install', b'bin')
1927 os.makedirs(self._tmpbindir)
1931 os.makedirs(self._tmpbindir)
1928
1932
1929 # This looks redundant with how Python initializes sys.path from
1933 # This looks redundant with how Python initializes sys.path from
1930 # the location of the script being executed. Needed because the
1934 # the location of the script being executed. Needed because the
1931 # "hg" specified by --with-hg is not the only Python script
1935 # "hg" specified by --with-hg is not the only Python script
1932 # executed in the test suite that needs to import 'mercurial'
1936 # executed in the test suite that needs to import 'mercurial'
1933 # ... which means it's not really redundant at all.
1937 # ... which means it's not really redundant at all.
1934 self._pythondir = self._bindir
1938 self._pythondir = self._bindir
1935 else:
1939 else:
1936 self._installdir = os.path.join(self._hgtmp, b"install")
1940 self._installdir = os.path.join(self._hgtmp, b"install")
1937 self._bindir = osenvironb[b"BINDIR"] = \
1941 self._bindir = osenvironb[b"BINDIR"] = \
1938 os.path.join(self._installdir, b"bin")
1942 os.path.join(self._installdir, b"bin")
1939 self._tmpbindir = self._bindir
1943 self._tmpbindir = self._bindir
1940 self._pythondir = os.path.join(self._installdir, b"lib", b"python")
1944 self._pythondir = os.path.join(self._installdir, b"lib", b"python")
1941
1945
1942 osenvironb[b"BINDIR"] = self._bindir
1946 osenvironb[b"BINDIR"] = self._bindir
1943 osenvironb[b"PYTHON"] = PYTHON
1947 osenvironb[b"PYTHON"] = PYTHON
1944
1948
1945 fileb = _bytespath(__file__)
1949 fileb = _bytespath(__file__)
1946 runtestdir = os.path.abspath(os.path.dirname(fileb))
1950 runtestdir = os.path.abspath(os.path.dirname(fileb))
1947 osenvironb[b'RUNTESTDIR'] = runtestdir
1951 osenvironb[b'RUNTESTDIR'] = runtestdir
1948 if PYTHON3:
1952 if PYTHON3:
1949 sepb = _bytespath(os.pathsep)
1953 sepb = _bytespath(os.pathsep)
1950 else:
1954 else:
1951 sepb = os.pathsep
1955 sepb = os.pathsep
1952 path = [self._bindir, runtestdir] + osenvironb[b"PATH"].split(sepb)
1956 path = [self._bindir, runtestdir] + osenvironb[b"PATH"].split(sepb)
1953 if os.path.islink(__file__):
1957 if os.path.islink(__file__):
1954 # test helper will likely be at the end of the symlink
1958 # test helper will likely be at the end of the symlink
1955 realfile = os.path.realpath(fileb)
1959 realfile = os.path.realpath(fileb)
1956 realdir = os.path.abspath(os.path.dirname(realfile))
1960 realdir = os.path.abspath(os.path.dirname(realfile))
1957 path.insert(2, realdir)
1961 path.insert(2, realdir)
1958 if self._testdir != runtestdir:
1962 if self._testdir != runtestdir:
1959 path = [self._testdir] + path
1963 path = [self._testdir] + path
1960 if self._tmpbindir != self._bindir:
1964 if self._tmpbindir != self._bindir:
1961 path = [self._tmpbindir] + path
1965 path = [self._tmpbindir] + path
1962 osenvironb[b"PATH"] = sepb.join(path)
1966 osenvironb[b"PATH"] = sepb.join(path)
1963
1967
1964 # Include TESTDIR in PYTHONPATH so that out-of-tree extensions
1968 # Include TESTDIR in PYTHONPATH so that out-of-tree extensions
1965 # can run .../tests/run-tests.py test-foo where test-foo
1969 # can run .../tests/run-tests.py test-foo where test-foo
1966 # adds an extension to HGRC. Also include run-test.py directory to
1970 # adds an extension to HGRC. Also include run-test.py directory to
1967 # import modules like heredoctest.
1971 # import modules like heredoctest.
1968 pypath = [self._pythondir, self._testdir, runtestdir]
1972 pypath = [self._pythondir, self._testdir, runtestdir]
1969 # We have to augment PYTHONPATH, rather than simply replacing
1973 # We have to augment PYTHONPATH, rather than simply replacing
1970 # it, in case external libraries are only available via current
1974 # it, in case external libraries are only available via current
1971 # PYTHONPATH. (In particular, the Subversion bindings on OS X
1975 # PYTHONPATH. (In particular, the Subversion bindings on OS X
1972 # are in /opt/subversion.)
1976 # are in /opt/subversion.)
1973 oldpypath = osenvironb.get(IMPL_PATH)
1977 oldpypath = osenvironb.get(IMPL_PATH)
1974 if oldpypath:
1978 if oldpypath:
1975 pypath.append(oldpypath)
1979 pypath.append(oldpypath)
1976 osenvironb[IMPL_PATH] = sepb.join(pypath)
1980 osenvironb[IMPL_PATH] = sepb.join(pypath)
1977
1981
1978 if self.options.pure:
1982 if self.options.pure:
1979 os.environ["HGTEST_RUN_TESTS_PURE"] = "--pure"
1983 os.environ["HGTEST_RUN_TESTS_PURE"] = "--pure"
1980
1984
1981 if self.options.allow_slow_tests:
1985 if self.options.allow_slow_tests:
1982 os.environ["HGTEST_SLOW"] = "slow"
1986 os.environ["HGTEST_SLOW"] = "slow"
1983 elif 'HGTEST_SLOW' in os.environ:
1987 elif 'HGTEST_SLOW' in os.environ:
1984 del os.environ['HGTEST_SLOW']
1988 del os.environ['HGTEST_SLOW']
1985
1989
1986 self._coveragefile = os.path.join(self._testdir, b'.coverage')
1990 self._coveragefile = os.path.join(self._testdir, b'.coverage')
1987
1991
1988 vlog("# Using TESTDIR", self._testdir)
1992 vlog("# Using TESTDIR", self._testdir)
1989 vlog("# Using RUNTESTDIR", osenvironb[b'RUNTESTDIR'])
1993 vlog("# Using RUNTESTDIR", osenvironb[b'RUNTESTDIR'])
1990 vlog("# Using HGTMP", self._hgtmp)
1994 vlog("# Using HGTMP", self._hgtmp)
1991 vlog("# Using PATH", os.environ["PATH"])
1995 vlog("# Using PATH", os.environ["PATH"])
1992 vlog("# Using", IMPL_PATH, osenvironb[IMPL_PATH])
1996 vlog("# Using", IMPL_PATH, osenvironb[IMPL_PATH])
1993
1997
1994 try:
1998 try:
1995 return self._runtests(tests) or 0
1999 return self._runtests(tests) or 0
1996 finally:
2000 finally:
1997 time.sleep(.1)
2001 time.sleep(.1)
1998 self._cleanup()
2002 self._cleanup()
1999
2003
2000 def findtests(self, args):
2004 def findtests(self, args):
2001 """Finds possible test files from arguments.
2005 """Finds possible test files from arguments.
2002
2006
2003 If you wish to inject custom tests into the test harness, this would
2007 If you wish to inject custom tests into the test harness, this would
2004 be a good function to monkeypatch or override in a derived class.
2008 be a good function to monkeypatch or override in a derived class.
2005 """
2009 """
2006 if not args:
2010 if not args:
2007 if self.options.changed:
2011 if self.options.changed:
2008 proc = Popen4('hg st --rev "%s" -man0 .' %
2012 proc = Popen4('hg st --rev "%s" -man0 .' %
2009 self.options.changed, None, 0)
2013 self.options.changed, None, 0)
2010 stdout, stderr = proc.communicate()
2014 stdout, stderr = proc.communicate()
2011 args = stdout.strip(b'\0').split(b'\0')
2015 args = stdout.strip(b'\0').split(b'\0')
2012 else:
2016 else:
2013 args = os.listdir(b'.')
2017 args = os.listdir(b'.')
2014
2018
2015 return [t for t in args
2019 return [t for t in args
2016 if os.path.basename(t).startswith(b'test-')
2020 if os.path.basename(t).startswith(b'test-')
2017 and (t.endswith(b'.py') or t.endswith(b'.t'))]
2021 and (t.endswith(b'.py') or t.endswith(b'.t'))]
2018
2022
2019 def _runtests(self, tests):
2023 def _runtests(self, tests):
2020 try:
2024 try:
2021 if self._installdir:
2025 if self._installdir:
2022 self._installhg()
2026 self._installhg()
2023 self._checkhglib("Testing")
2027 self._checkhglib("Testing")
2024 else:
2028 else:
2025 self._usecorrectpython()
2029 self._usecorrectpython()
2026
2030
2027 if self.options.restart:
2031 if self.options.restart:
2028 orig = list(tests)
2032 orig = list(tests)
2029 while tests:
2033 while tests:
2030 if os.path.exists(tests[0] + ".err"):
2034 if os.path.exists(tests[0] + ".err"):
2031 break
2035 break
2032 tests.pop(0)
2036 tests.pop(0)
2033 if not tests:
2037 if not tests:
2034 print("running all tests")
2038 print("running all tests")
2035 tests = orig
2039 tests = orig
2036
2040
2037 tests = [self._gettest(t, i) for i, t in enumerate(tests)]
2041 tests = [self._gettest(t, i) for i, t in enumerate(tests)]
2038
2042
2039 failed = False
2043 failed = False
2040 warned = False
2044 warned = False
2041 kws = self.options.keywords
2045 kws = self.options.keywords
2042 if kws is not None and PYTHON3:
2046 if kws is not None and PYTHON3:
2043 kws = kws.encode('utf-8')
2047 kws = kws.encode('utf-8')
2044
2048
2045 suite = TestSuite(self._testdir,
2049 suite = TestSuite(self._testdir,
2046 jobs=self.options.jobs,
2050 jobs=self.options.jobs,
2047 whitelist=self.options.whitelisted,
2051 whitelist=self.options.whitelisted,
2048 blacklist=self.options.blacklist,
2052 blacklist=self.options.blacklist,
2049 retest=self.options.retest,
2053 retest=self.options.retest,
2050 keywords=kws,
2054 keywords=kws,
2051 loop=self.options.loop,
2055 loop=self.options.loop,
2052 runs_per_test=self.options.runs_per_test,
2056 runs_per_test=self.options.runs_per_test,
2053 showchannels=self.options.showchannels,
2057 showchannels=self.options.showchannels,
2054 tests=tests, loadtest=self._gettest)
2058 tests=tests, loadtest=self._gettest)
2055 verbosity = 1
2059 verbosity = 1
2056 if self.options.verbose:
2060 if self.options.verbose:
2057 verbosity = 2
2061 verbosity = 2
2058 runner = TextTestRunner(self, verbosity=verbosity)
2062 runner = TextTestRunner(self, verbosity=verbosity)
2059 result = runner.run(suite)
2063 result = runner.run(suite)
2060
2064
2061 if result.failures:
2065 if result.failures:
2062 failed = True
2066 failed = True
2063 if result.warned:
2067 if result.warned:
2064 warned = True
2068 warned = True
2065
2069
2066 if self.options.anycoverage:
2070 if self.options.anycoverage:
2067 self._outputcoverage()
2071 self._outputcoverage()
2068 except KeyboardInterrupt:
2072 except KeyboardInterrupt:
2069 failed = True
2073 failed = True
2070 print("\ninterrupted!")
2074 print("\ninterrupted!")
2071
2075
2072 if failed:
2076 if failed:
2073 return 1
2077 return 1
2074 if warned:
2078 if warned:
2075 return 80
2079 return 80
2076
2080
2077 def _getport(self, count):
2081 def _getport(self, count):
2078 port = self._ports.get(count) # do we have a cached entry?
2082 port = self._ports.get(count) # do we have a cached entry?
2079 if port is None:
2083 if port is None:
2080 portneeded = 3
2084 portneeded = 3
2081 # above 100 tries we just give up and let test reports failure
2085 # above 100 tries we just give up and let test reports failure
2082 for tries in xrange(100):
2086 for tries in xrange(100):
2083 allfree = True
2087 allfree = True
2084 port = self.options.port + self._portoffset
2088 port = self.options.port + self._portoffset
2085 for idx in xrange(portneeded):
2089 for idx in xrange(portneeded):
2086 if not checkportisavailable(port + idx):
2090 if not checkportisavailable(port + idx):
2087 allfree = False
2091 allfree = False
2088 break
2092 break
2089 self._portoffset += portneeded
2093 self._portoffset += portneeded
2090 if allfree:
2094 if allfree:
2091 break
2095 break
2092 self._ports[count] = port
2096 self._ports[count] = port
2093 return port
2097 return port
2094
2098
2095 def _gettest(self, test, count):
2099 def _gettest(self, test, count):
2096 """Obtain a Test by looking at its filename.
2100 """Obtain a Test by looking at its filename.
2097
2101
2098 Returns a Test instance. The Test may not be runnable if it doesn't
2102 Returns a Test instance. The Test may not be runnable if it doesn't
2099 map to a known type.
2103 map to a known type.
2100 """
2104 """
2101 lctest = test.lower()
2105 lctest = test.lower()
2102 testcls = Test
2106 testcls = Test
2103
2107
2104 for ext, cls in self.TESTTYPES:
2108 for ext, cls in self.TESTTYPES:
2105 if lctest.endswith(ext):
2109 if lctest.endswith(ext):
2106 testcls = cls
2110 testcls = cls
2107 break
2111 break
2108
2112
2109 refpath = os.path.join(self._testdir, test)
2113 refpath = os.path.join(self._testdir, test)
2110 tmpdir = os.path.join(self._hgtmp, b'child%d' % count)
2114 tmpdir = os.path.join(self._hgtmp, b'child%d' % count)
2111
2115
2112 t = testcls(refpath, tmpdir,
2116 t = testcls(refpath, tmpdir,
2113 keeptmpdir=self.options.keep_tmpdir,
2117 keeptmpdir=self.options.keep_tmpdir,
2114 debug=self.options.debug,
2118 debug=self.options.debug,
2115 timeout=self.options.timeout,
2119 timeout=self.options.timeout,
2116 startport=self._getport(count),
2120 startport=self._getport(count),
2117 extraconfigopts=self.options.extra_config_opt,
2121 extraconfigopts=self.options.extra_config_opt,
2118 py3kwarnings=self.options.py3k_warnings,
2122 py3kwarnings=self.options.py3k_warnings,
2119 shell=self.options.shell)
2123 shell=self.options.shell)
2120 t.should_reload = True
2124 t.should_reload = True
2121 return t
2125 return t
2122
2126
2123 def _cleanup(self):
2127 def _cleanup(self):
2124 """Clean up state from this test invocation."""
2128 """Clean up state from this test invocation."""
2125
2129
2126 if self.options.keep_tmpdir:
2130 if self.options.keep_tmpdir:
2127 return
2131 return
2128
2132
2129 vlog("# Cleaning up HGTMP", self._hgtmp)
2133 vlog("# Cleaning up HGTMP", self._hgtmp)
2130 shutil.rmtree(self._hgtmp, True)
2134 shutil.rmtree(self._hgtmp, True)
2131 for f in self._createdfiles:
2135 for f in self._createdfiles:
2132 try:
2136 try:
2133 os.remove(f)
2137 os.remove(f)
2134 except OSError:
2138 except OSError:
2135 pass
2139 pass
2136
2140
2137 def _usecorrectpython(self):
2141 def _usecorrectpython(self):
2138 """Configure the environment to use the appropriate Python in tests."""
2142 """Configure the environment to use the appropriate Python in tests."""
2139 # Tests must use the same interpreter as us or bad things will happen.
2143 # Tests must use the same interpreter as us or bad things will happen.
2140 pyexename = sys.platform == 'win32' and b'python.exe' or b'python'
2144 pyexename = sys.platform == 'win32' and b'python.exe' or b'python'
2141 if getattr(os, 'symlink', None):
2145 if getattr(os, 'symlink', None):
2142 vlog("# Making python executable in test path a symlink to '%s'" %
2146 vlog("# Making python executable in test path a symlink to '%s'" %
2143 sys.executable)
2147 sys.executable)
2144 mypython = os.path.join(self._tmpbindir, pyexename)
2148 mypython = os.path.join(self._tmpbindir, pyexename)
2145 try:
2149 try:
2146 if os.readlink(mypython) == sys.executable:
2150 if os.readlink(mypython) == sys.executable:
2147 return
2151 return
2148 os.unlink(mypython)
2152 os.unlink(mypython)
2149 except OSError as err:
2153 except OSError as err:
2150 if err.errno != errno.ENOENT:
2154 if err.errno != errno.ENOENT:
2151 raise
2155 raise
2152 if self._findprogram(pyexename) != sys.executable:
2156 if self._findprogram(pyexename) != sys.executable:
2153 try:
2157 try:
2154 os.symlink(sys.executable, mypython)
2158 os.symlink(sys.executable, mypython)
2155 self._createdfiles.append(mypython)
2159 self._createdfiles.append(mypython)
2156 except OSError as err:
2160 except OSError as err:
2157 # child processes may race, which is harmless
2161 # child processes may race, which is harmless
2158 if err.errno != errno.EEXIST:
2162 if err.errno != errno.EEXIST:
2159 raise
2163 raise
2160 else:
2164 else:
2161 exedir, exename = os.path.split(sys.executable)
2165 exedir, exename = os.path.split(sys.executable)
2162 vlog("# Modifying search path to find %s as %s in '%s'" %
2166 vlog("# Modifying search path to find %s as %s in '%s'" %
2163 (exename, pyexename, exedir))
2167 (exename, pyexename, exedir))
2164 path = os.environ['PATH'].split(os.pathsep)
2168 path = os.environ['PATH'].split(os.pathsep)
2165 while exedir in path:
2169 while exedir in path:
2166 path.remove(exedir)
2170 path.remove(exedir)
2167 os.environ['PATH'] = os.pathsep.join([exedir] + path)
2171 os.environ['PATH'] = os.pathsep.join([exedir] + path)
2168 if not self._findprogram(pyexename):
2172 if not self._findprogram(pyexename):
2169 print("WARNING: Cannot find %s in search path" % pyexename)
2173 print("WARNING: Cannot find %s in search path" % pyexename)
2170
2174
2171 def _installhg(self):
2175 def _installhg(self):
2172 """Install hg into the test environment.
2176 """Install hg into the test environment.
2173
2177
2174 This will also configure hg with the appropriate testing settings.
2178 This will also configure hg with the appropriate testing settings.
2175 """
2179 """
2176 vlog("# Performing temporary installation of HG")
2180 vlog("# Performing temporary installation of HG")
2177 installerrs = os.path.join(b"tests", b"install.err")
2181 installerrs = os.path.join(b"tests", b"install.err")
2178 compiler = ''
2182 compiler = ''
2179 if self.options.compiler:
2183 if self.options.compiler:
2180 compiler = '--compiler ' + self.options.compiler
2184 compiler = '--compiler ' + self.options.compiler
2181 if self.options.pure:
2185 if self.options.pure:
2182 pure = b"--pure"
2186 pure = b"--pure"
2183 else:
2187 else:
2184 pure = b""
2188 pure = b""
2185 py3 = ''
2189 py3 = ''
2186
2190
2187 # Run installer in hg root
2191 # Run installer in hg root
2188 script = os.path.realpath(sys.argv[0])
2192 script = os.path.realpath(sys.argv[0])
2189 exe = sys.executable
2193 exe = sys.executable
2190 if PYTHON3:
2194 if PYTHON3:
2191 py3 = b'--c2to3'
2195 py3 = b'--c2to3'
2192 compiler = _bytespath(compiler)
2196 compiler = _bytespath(compiler)
2193 script = _bytespath(script)
2197 script = _bytespath(script)
2194 exe = _bytespath(exe)
2198 exe = _bytespath(exe)
2195 hgroot = os.path.dirname(os.path.dirname(script))
2199 hgroot = os.path.dirname(os.path.dirname(script))
2196 self._hgroot = hgroot
2200 self._hgroot = hgroot
2197 os.chdir(hgroot)
2201 os.chdir(hgroot)
2198 nohome = b'--home=""'
2202 nohome = b'--home=""'
2199 if os.name == 'nt':
2203 if os.name == 'nt':
2200 # The --home="" trick works only on OS where os.sep == '/'
2204 # The --home="" trick works only on OS where os.sep == '/'
2201 # because of a distutils convert_path() fast-path. Avoid it at
2205 # because of a distutils convert_path() fast-path. Avoid it at
2202 # least on Windows for now, deal with .pydistutils.cfg bugs
2206 # least on Windows for now, deal with .pydistutils.cfg bugs
2203 # when they happen.
2207 # when they happen.
2204 nohome = b''
2208 nohome = b''
2205 cmd = (b'%(exe)s setup.py %(py3)s %(pure)s clean --all'
2209 cmd = (b'%(exe)s setup.py %(py3)s %(pure)s clean --all'
2206 b' build %(compiler)s --build-base="%(base)s"'
2210 b' build %(compiler)s --build-base="%(base)s"'
2207 b' install --force --prefix="%(prefix)s"'
2211 b' install --force --prefix="%(prefix)s"'
2208 b' --install-lib="%(libdir)s"'
2212 b' --install-lib="%(libdir)s"'
2209 b' --install-scripts="%(bindir)s" %(nohome)s >%(logfile)s 2>&1'
2213 b' --install-scripts="%(bindir)s" %(nohome)s >%(logfile)s 2>&1'
2210 % {b'exe': exe, b'py3': py3, b'pure': pure,
2214 % {b'exe': exe, b'py3': py3, b'pure': pure,
2211 b'compiler': compiler,
2215 b'compiler': compiler,
2212 b'base': os.path.join(self._hgtmp, b"build"),
2216 b'base': os.path.join(self._hgtmp, b"build"),
2213 b'prefix': self._installdir, b'libdir': self._pythondir,
2217 b'prefix': self._installdir, b'libdir': self._pythondir,
2214 b'bindir': self._bindir,
2218 b'bindir': self._bindir,
2215 b'nohome': nohome, b'logfile': installerrs})
2219 b'nohome': nohome, b'logfile': installerrs})
2216
2220
2217 # setuptools requires install directories to exist.
2221 # setuptools requires install directories to exist.
2218 def makedirs(p):
2222 def makedirs(p):
2219 try:
2223 try:
2220 os.makedirs(p)
2224 os.makedirs(p)
2221 except OSError as e:
2225 except OSError as e:
2222 if e.errno != errno.EEXIST:
2226 if e.errno != errno.EEXIST:
2223 raise
2227 raise
2224 makedirs(self._pythondir)
2228 makedirs(self._pythondir)
2225 makedirs(self._bindir)
2229 makedirs(self._bindir)
2226
2230
2227 vlog("# Running", cmd)
2231 vlog("# Running", cmd)
2228 if os.system(cmd) == 0:
2232 if os.system(cmd) == 0:
2229 if not self.options.verbose:
2233 if not self.options.verbose:
2230 try:
2234 try:
2231 os.remove(installerrs)
2235 os.remove(installerrs)
2232 except OSError as e:
2236 except OSError as e:
2233 if e.errno != errno.ENOENT:
2237 if e.errno != errno.ENOENT:
2234 raise
2238 raise
2235 else:
2239 else:
2236 f = open(installerrs, 'rb')
2240 f = open(installerrs, 'rb')
2237 for line in f:
2241 for line in f:
2238 if PYTHON3:
2242 if PYTHON3:
2239 sys.stdout.buffer.write(line)
2243 sys.stdout.buffer.write(line)
2240 else:
2244 else:
2241 sys.stdout.write(line)
2245 sys.stdout.write(line)
2242 f.close()
2246 f.close()
2243 sys.exit(1)
2247 sys.exit(1)
2244 os.chdir(self._testdir)
2248 os.chdir(self._testdir)
2245
2249
2246 self._usecorrectpython()
2250 self._usecorrectpython()
2247
2251
2248 if self.options.py3k_warnings and not self.options.anycoverage:
2252 if self.options.py3k_warnings and not self.options.anycoverage:
2249 vlog("# Updating hg command to enable Py3k Warnings switch")
2253 vlog("# Updating hg command to enable Py3k Warnings switch")
2250 f = open(os.path.join(self._bindir, 'hg'), 'rb')
2254 f = open(os.path.join(self._bindir, 'hg'), 'rb')
2251 lines = [line.rstrip() for line in f]
2255 lines = [line.rstrip() for line in f]
2252 lines[0] += ' -3'
2256 lines[0] += ' -3'
2253 f.close()
2257 f.close()
2254 f = open(os.path.join(self._bindir, 'hg'), 'wb')
2258 f = open(os.path.join(self._bindir, 'hg'), 'wb')
2255 for line in lines:
2259 for line in lines:
2256 f.write(line + '\n')
2260 f.write(line + '\n')
2257 f.close()
2261 f.close()
2258
2262
2259 hgbat = os.path.join(self._bindir, b'hg.bat')
2263 hgbat = os.path.join(self._bindir, b'hg.bat')
2260 if os.path.isfile(hgbat):
2264 if os.path.isfile(hgbat):
2261 # hg.bat expects to be put in bin/scripts while run-tests.py
2265 # hg.bat expects to be put in bin/scripts while run-tests.py
2262 # installation layout put it in bin/ directly. Fix it
2266 # installation layout put it in bin/ directly. Fix it
2263 f = open(hgbat, 'rb')
2267 f = open(hgbat, 'rb')
2264 data = f.read()
2268 data = f.read()
2265 f.close()
2269 f.close()
2266 if b'"%~dp0..\python" "%~dp0hg" %*' in data:
2270 if b'"%~dp0..\python" "%~dp0hg" %*' in data:
2267 data = data.replace(b'"%~dp0..\python" "%~dp0hg" %*',
2271 data = data.replace(b'"%~dp0..\python" "%~dp0hg" %*',
2268 b'"%~dp0python" "%~dp0hg" %*')
2272 b'"%~dp0python" "%~dp0hg" %*')
2269 f = open(hgbat, 'wb')
2273 f = open(hgbat, 'wb')
2270 f.write(data)
2274 f.write(data)
2271 f.close()
2275 f.close()
2272 else:
2276 else:
2273 print('WARNING: cannot fix hg.bat reference to python.exe')
2277 print('WARNING: cannot fix hg.bat reference to python.exe')
2274
2278
2275 if self.options.anycoverage:
2279 if self.options.anycoverage:
2276 custom = os.path.join(self._testdir, 'sitecustomize.py')
2280 custom = os.path.join(self._testdir, 'sitecustomize.py')
2277 target = os.path.join(self._pythondir, 'sitecustomize.py')
2281 target = os.path.join(self._pythondir, 'sitecustomize.py')
2278 vlog('# Installing coverage trigger to %s' % target)
2282 vlog('# Installing coverage trigger to %s' % target)
2279 shutil.copyfile(custom, target)
2283 shutil.copyfile(custom, target)
2280 rc = os.path.join(self._testdir, '.coveragerc')
2284 rc = os.path.join(self._testdir, '.coveragerc')
2281 vlog('# Installing coverage rc to %s' % rc)
2285 vlog('# Installing coverage rc to %s' % rc)
2282 os.environ['COVERAGE_PROCESS_START'] = rc
2286 os.environ['COVERAGE_PROCESS_START'] = rc
2283 covdir = os.path.join(self._installdir, '..', 'coverage')
2287 covdir = os.path.join(self._installdir, '..', 'coverage')
2284 try:
2288 try:
2285 os.mkdir(covdir)
2289 os.mkdir(covdir)
2286 except OSError as e:
2290 except OSError as e:
2287 if e.errno != errno.EEXIST:
2291 if e.errno != errno.EEXIST:
2288 raise
2292 raise
2289
2293
2290 os.environ['COVERAGE_DIR'] = covdir
2294 os.environ['COVERAGE_DIR'] = covdir
2291
2295
2292 def _checkhglib(self, verb):
2296 def _checkhglib(self, verb):
2293 """Ensure that the 'mercurial' package imported by python is
2297 """Ensure that the 'mercurial' package imported by python is
2294 the one we expect it to be. If not, print a warning to stderr."""
2298 the one we expect it to be. If not, print a warning to stderr."""
2295 if ((self._bindir == self._pythondir) and
2299 if ((self._bindir == self._pythondir) and
2296 (self._bindir != self._tmpbindir)):
2300 (self._bindir != self._tmpbindir)):
2297 # The pythondir has been inferred from --with-hg flag.
2301 # The pythondir has been inferred from --with-hg flag.
2298 # We cannot expect anything sensible here.
2302 # We cannot expect anything sensible here.
2299 return
2303 return
2300 expecthg = os.path.join(self._pythondir, b'mercurial')
2304 expecthg = os.path.join(self._pythondir, b'mercurial')
2301 actualhg = self._gethgpath()
2305 actualhg = self._gethgpath()
2302 if os.path.abspath(actualhg) != os.path.abspath(expecthg):
2306 if os.path.abspath(actualhg) != os.path.abspath(expecthg):
2303 sys.stderr.write('warning: %s with unexpected mercurial lib: %s\n'
2307 sys.stderr.write('warning: %s with unexpected mercurial lib: %s\n'
2304 ' (expected %s)\n'
2308 ' (expected %s)\n'
2305 % (verb, actualhg, expecthg))
2309 % (verb, actualhg, expecthg))
2306 def _gethgpath(self):
2310 def _gethgpath(self):
2307 """Return the path to the mercurial package that is actually found by
2311 """Return the path to the mercurial package that is actually found by
2308 the current Python interpreter."""
2312 the current Python interpreter."""
2309 if self._hgpath is not None:
2313 if self._hgpath is not None:
2310 return self._hgpath
2314 return self._hgpath
2311
2315
2312 cmd = b'%s -c "import mercurial; print (mercurial.__path__[0])"'
2316 cmd = b'%s -c "import mercurial; print (mercurial.__path__[0])"'
2313 cmd = cmd % PYTHON
2317 cmd = cmd % PYTHON
2314 if PYTHON3:
2318 if PYTHON3:
2315 cmd = _strpath(cmd)
2319 cmd = _strpath(cmd)
2316 pipe = os.popen(cmd)
2320 pipe = os.popen(cmd)
2317 try:
2321 try:
2318 self._hgpath = _bytespath(pipe.read().strip())
2322 self._hgpath = _bytespath(pipe.read().strip())
2319 finally:
2323 finally:
2320 pipe.close()
2324 pipe.close()
2321
2325
2322 return self._hgpath
2326 return self._hgpath
2323
2327
2324 def _outputcoverage(self):
2328 def _outputcoverage(self):
2325 """Produce code coverage output."""
2329 """Produce code coverage output."""
2326 from coverage import coverage
2330 from coverage import coverage
2327
2331
2328 vlog('# Producing coverage report')
2332 vlog('# Producing coverage report')
2329 # chdir is the easiest way to get short, relative paths in the
2333 # chdir is the easiest way to get short, relative paths in the
2330 # output.
2334 # output.
2331 os.chdir(self._hgroot)
2335 os.chdir(self._hgroot)
2332 covdir = os.path.join(self._installdir, '..', 'coverage')
2336 covdir = os.path.join(self._installdir, '..', 'coverage')
2333 cov = coverage(data_file=os.path.join(covdir, 'cov'))
2337 cov = coverage(data_file=os.path.join(covdir, 'cov'))
2334
2338
2335 # Map install directory paths back to source directory.
2339 # Map install directory paths back to source directory.
2336 cov.config.paths['srcdir'] = ['.', self._pythondir]
2340 cov.config.paths['srcdir'] = ['.', self._pythondir]
2337
2341
2338 cov.combine()
2342 cov.combine()
2339
2343
2340 omit = [os.path.join(x, '*') for x in [self._bindir, self._testdir]]
2344 omit = [os.path.join(x, '*') for x in [self._bindir, self._testdir]]
2341 cov.report(ignore_errors=True, omit=omit)
2345 cov.report(ignore_errors=True, omit=omit)
2342
2346
2343 if self.options.htmlcov:
2347 if self.options.htmlcov:
2344 htmldir = os.path.join(self._testdir, 'htmlcov')
2348 htmldir = os.path.join(self._testdir, 'htmlcov')
2345 cov.html_report(directory=htmldir, omit=omit)
2349 cov.html_report(directory=htmldir, omit=omit)
2346 if self.options.annotate:
2350 if self.options.annotate:
2347 adir = os.path.join(self._testdir, 'annotated')
2351 adir = os.path.join(self._testdir, 'annotated')
2348 if not os.path.isdir(adir):
2352 if not os.path.isdir(adir):
2349 os.mkdir(adir)
2353 os.mkdir(adir)
2350 cov.annotate(directory=adir, omit=omit)
2354 cov.annotate(directory=adir, omit=omit)
2351
2355
2352 def _findprogram(self, program):
2356 def _findprogram(self, program):
2353 """Search PATH for a executable program"""
2357 """Search PATH for a executable program"""
2354 dpb = _bytespath(os.defpath)
2358 dpb = _bytespath(os.defpath)
2355 sepb = _bytespath(os.pathsep)
2359 sepb = _bytespath(os.pathsep)
2356 for p in osenvironb.get(b'PATH', dpb).split(sepb):
2360 for p in osenvironb.get(b'PATH', dpb).split(sepb):
2357 name = os.path.join(p, program)
2361 name = os.path.join(p, program)
2358 if os.name == 'nt' or os.access(name, os.X_OK):
2362 if os.name == 'nt' or os.access(name, os.X_OK):
2359 return name
2363 return name
2360 return None
2364 return None
2361
2365
2362 def _checktools(self):
2366 def _checktools(self):
2363 """Ensure tools required to run tests are present."""
2367 """Ensure tools required to run tests are present."""
2364 for p in self.REQUIREDTOOLS:
2368 for p in self.REQUIREDTOOLS:
2365 if os.name == 'nt' and not p.endswith('.exe'):
2369 if os.name == 'nt' and not p.endswith('.exe'):
2366 p += '.exe'
2370 p += '.exe'
2367 found = self._findprogram(p)
2371 found = self._findprogram(p)
2368 if found:
2372 if found:
2369 vlog("# Found prerequisite", p, "at", found)
2373 vlog("# Found prerequisite", p, "at", found)
2370 else:
2374 else:
2371 print("WARNING: Did not find prerequisite tool: %s " % p)
2375 print("WARNING: Did not find prerequisite tool: %s " % p)
2372
2376
2373 if __name__ == '__main__':
2377 if __name__ == '__main__':
2374 runner = TestRunner()
2378 runner = TestRunner()
2375
2379
2376 try:
2380 try:
2377 import msvcrt
2381 import msvcrt
2378 msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
2382 msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
2379 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
2383 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
2380 msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
2384 msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
2381 except ImportError:
2385 except ImportError:
2382 pass
2386 pass
2383
2387
2384 sys.exit(runner.run(sys.argv[1:]))
2388 sys.exit(runner.run(sys.argv[1:]))
@@ -1,568 +1,628 b''
1 $ cat << EOF >> $HGRCPATH
1 $ cat << EOF >> $HGRCPATH
2 > [format]
2 > [format]
3 > usegeneraldelta=yes
3 > usegeneraldelta=yes
4 > [ui]
4 > [ui]
5 > ssh=python "$TESTDIR/dummyssh"
5 > ssh=python "$TESTDIR/dummyssh"
6 > EOF
6 > EOF
7
7
8 Set up repo
8 Set up repo
9
9
10 $ hg --config experimental.treemanifest=True init repo
10 $ hg --config experimental.treemanifest=True init repo
11 $ cd repo
11 $ cd repo
12
12
13 Requirements get set on init
13 Requirements get set on init
14
14
15 $ grep treemanifest .hg/requires
15 $ grep treemanifest .hg/requires
16 treemanifest
16 treemanifest
17
17
18 Without directories, looks like any other repo
18 Without directories, looks like any other repo
19
19
20 $ echo 0 > a
20 $ echo 0 > a
21 $ echo 0 > b
21 $ echo 0 > b
22 $ hg ci -Aqm initial
22 $ hg ci -Aqm initial
23 $ hg debugdata -m 0
23 $ hg debugdata -m 0
24 a\x00362fef284ce2ca02aecc8de6d5e8a1c3af0556fe (esc)
24 a\x00362fef284ce2ca02aecc8de6d5e8a1c3af0556fe (esc)
25 b\x00362fef284ce2ca02aecc8de6d5e8a1c3af0556fe (esc)
25 b\x00362fef284ce2ca02aecc8de6d5e8a1c3af0556fe (esc)
26
26
27 Submanifest is stored in separate revlog
27 Submanifest is stored in separate revlog
28
28
29 $ mkdir dir1
29 $ mkdir dir1
30 $ echo 1 > dir1/a
30 $ echo 1 > dir1/a
31 $ echo 1 > dir1/b
31 $ echo 1 > dir1/b
32 $ echo 1 > e
32 $ echo 1 > e
33 $ hg ci -Aqm 'add dir1'
33 $ hg ci -Aqm 'add dir1'
34 $ hg debugdata -m 1
34 $ hg debugdata -m 1
35 a\x00362fef284ce2ca02aecc8de6d5e8a1c3af0556fe (esc)
35 a\x00362fef284ce2ca02aecc8de6d5e8a1c3af0556fe (esc)
36 b\x00362fef284ce2ca02aecc8de6d5e8a1c3af0556fe (esc)
36 b\x00362fef284ce2ca02aecc8de6d5e8a1c3af0556fe (esc)
37 dir1\x008b3ffd73f901e83304c83d33132c8e774ceac44et (esc)
37 dir1\x008b3ffd73f901e83304c83d33132c8e774ceac44et (esc)
38 e\x00b8e02f6433738021a065f94175c7cd23db5f05be (esc)
38 e\x00b8e02f6433738021a065f94175c7cd23db5f05be (esc)
39 $ hg debugdata --dir dir1 0
39 $ hg debugdata --dir dir1 0
40 a\x00b8e02f6433738021a065f94175c7cd23db5f05be (esc)
40 a\x00b8e02f6433738021a065f94175c7cd23db5f05be (esc)
41 b\x00b8e02f6433738021a065f94175c7cd23db5f05be (esc)
41 b\x00b8e02f6433738021a065f94175c7cd23db5f05be (esc)
42
42
43 Can add nested directories
43 Can add nested directories
44
44
45 $ mkdir dir1/dir1
45 $ mkdir dir1/dir1
46 $ echo 2 > dir1/dir1/a
46 $ echo 2 > dir1/dir1/a
47 $ echo 2 > dir1/dir1/b
47 $ echo 2 > dir1/dir1/b
48 $ mkdir dir1/dir2
48 $ mkdir dir1/dir2
49 $ echo 2 > dir1/dir2/a
49 $ echo 2 > dir1/dir2/a
50 $ echo 2 > dir1/dir2/b
50 $ echo 2 > dir1/dir2/b
51 $ hg ci -Aqm 'add dir1/dir1'
51 $ hg ci -Aqm 'add dir1/dir1'
52 $ hg files -r .
52 $ hg files -r .
53 a
53 a
54 b
54 b
55 dir1/a (glob)
55 dir1/a (glob)
56 dir1/b (glob)
56 dir1/b (glob)
57 dir1/dir1/a (glob)
57 dir1/dir1/a (glob)
58 dir1/dir1/b (glob)
58 dir1/dir1/b (glob)
59 dir1/dir2/a (glob)
59 dir1/dir2/a (glob)
60 dir1/dir2/b (glob)
60 dir1/dir2/b (glob)
61 e
61 e
62
62
63 Revision is not created for unchanged directory
63 Revision is not created for unchanged directory
64
64
65 $ mkdir dir2
65 $ mkdir dir2
66 $ echo 3 > dir2/a
66 $ echo 3 > dir2/a
67 $ hg add dir2
67 $ hg add dir2
68 adding dir2/a (glob)
68 adding dir2/a (glob)
69 $ hg debugindex --dir dir1 > before
69 $ hg debugindex --dir dir1 > before
70 $ hg ci -qm 'add dir2'
70 $ hg ci -qm 'add dir2'
71 $ hg debugindex --dir dir1 > after
71 $ hg debugindex --dir dir1 > after
72 $ diff before after
72 $ diff before after
73 $ rm before after
73 $ rm before after
74
74
75 Removing directory does not create an revlog entry
75 Removing directory does not create an revlog entry
76
76
77 $ hg rm dir1/dir1
77 $ hg rm dir1/dir1
78 removing dir1/dir1/a (glob)
78 removing dir1/dir1/a (glob)
79 removing dir1/dir1/b (glob)
79 removing dir1/dir1/b (glob)
80 $ hg debugindex --dir dir1/dir1 > before
80 $ hg debugindex --dir dir1/dir1 > before
81 $ hg ci -qm 'remove dir1/dir1'
81 $ hg ci -qm 'remove dir1/dir1'
82 $ hg debugindex --dir dir1/dir1 > after
82 $ hg debugindex --dir dir1/dir1 > after
83 $ diff before after
83 $ diff before after
84 $ rm before after
84 $ rm before after
85
85
86 Check that hg files (calls treemanifest.walk()) works
86 Check that hg files (calls treemanifest.walk()) works
87 without loading all directory revlogs
87 without loading all directory revlogs
88
88
89 $ hg co 'desc("add dir2")'
89 $ hg co 'desc("add dir2")'
90 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
90 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
91 $ mv .hg/store/meta/dir2 .hg/store/meta/dir2-backup
91 $ mv .hg/store/meta/dir2 .hg/store/meta/dir2-backup
92 $ hg files -r . dir1
92 $ hg files -r . dir1
93 dir1/a (glob)
93 dir1/a (glob)
94 dir1/b (glob)
94 dir1/b (glob)
95 dir1/dir1/a (glob)
95 dir1/dir1/a (glob)
96 dir1/dir1/b (glob)
96 dir1/dir1/b (glob)
97 dir1/dir2/a (glob)
97 dir1/dir2/a (glob)
98 dir1/dir2/b (glob)
98 dir1/dir2/b (glob)
99
99
100 Check that status between revisions works (calls treemanifest.matches())
100 Check that status between revisions works (calls treemanifest.matches())
101 without loading all directory revlogs
101 without loading all directory revlogs
102
102
103 $ hg status --rev 'desc("add dir1")' --rev . dir1
103 $ hg status --rev 'desc("add dir1")' --rev . dir1
104 A dir1/dir1/a
104 A dir1/dir1/a
105 A dir1/dir1/b
105 A dir1/dir1/b
106 A dir1/dir2/a
106 A dir1/dir2/a
107 A dir1/dir2/b
107 A dir1/dir2/b
108 $ mv .hg/store/meta/dir2-backup .hg/store/meta/dir2
108 $ mv .hg/store/meta/dir2-backup .hg/store/meta/dir2
109
109
110 Merge creates 2-parent revision of directory revlog
110 Merge creates 2-parent revision of directory revlog
111
111
112 $ echo 5 > dir1/a
112 $ echo 5 > dir1/a
113 $ hg ci -Aqm 'modify dir1/a'
113 $ hg ci -Aqm 'modify dir1/a'
114 $ hg co '.^'
114 $ hg co '.^'
115 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
115 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
116 $ echo 6 > dir1/b
116 $ echo 6 > dir1/b
117 $ hg ci -Aqm 'modify dir1/b'
117 $ hg ci -Aqm 'modify dir1/b'
118 $ hg merge 'desc("modify dir1/a")'
118 $ hg merge 'desc("modify dir1/a")'
119 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
119 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
120 (branch merge, don't forget to commit)
120 (branch merge, don't forget to commit)
121 $ hg ci -m 'conflict-free merge involving dir1/'
121 $ hg ci -m 'conflict-free merge involving dir1/'
122 $ cat dir1/a
122 $ cat dir1/a
123 5
123 5
124 $ cat dir1/b
124 $ cat dir1/b
125 6
125 6
126 $ hg debugindex --dir dir1
126 $ hg debugindex --dir dir1
127 rev offset length delta linkrev nodeid p1 p2
127 rev offset length delta linkrev nodeid p1 p2
128 0 0 54 -1 1 8b3ffd73f901 000000000000 000000000000
128 0 0 54 -1 1 8b3ffd73f901 000000000000 000000000000
129 1 54 68 0 2 68e9d057c5a8 8b3ffd73f901 000000000000
129 1 54 68 0 2 68e9d057c5a8 8b3ffd73f901 000000000000
130 2 122 12 1 4 4698198d2624 68e9d057c5a8 000000000000
130 2 122 12 1 4 4698198d2624 68e9d057c5a8 000000000000
131 3 134 55 1 5 44844058ccce 68e9d057c5a8 000000000000
131 3 134 55 1 5 44844058ccce 68e9d057c5a8 000000000000
132 4 189 55 1 6 bf3d9b744927 68e9d057c5a8 000000000000
132 4 189 55 1 6 bf3d9b744927 68e9d057c5a8 000000000000
133 5 244 55 4 7 dde7c0af2a03 bf3d9b744927 44844058ccce
133 5 244 55 4 7 dde7c0af2a03 bf3d9b744927 44844058ccce
134
134
135 Merge keeping directory from parent 1 does not create revlog entry. (Note that
135 Merge keeping directory from parent 1 does not create revlog entry. (Note that
136 dir1's manifest does change, but only because dir1/a's filelog changes.)
136 dir1's manifest does change, but only because dir1/a's filelog changes.)
137
137
138 $ hg co 'desc("add dir2")'
138 $ hg co 'desc("add dir2")'
139 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
139 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
140 $ echo 8 > dir2/a
140 $ echo 8 > dir2/a
141 $ hg ci -m 'modify dir2/a'
141 $ hg ci -m 'modify dir2/a'
142 created new head
142 created new head
143
143
144 $ hg debugindex --dir dir2 > before
144 $ hg debugindex --dir dir2 > before
145 $ hg merge 'desc("modify dir1/a")'
145 $ hg merge 'desc("modify dir1/a")'
146 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
146 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
147 (branch merge, don't forget to commit)
147 (branch merge, don't forget to commit)
148 $ hg revert -r 'desc("modify dir2/a")' .
148 $ hg revert -r 'desc("modify dir2/a")' .
149 reverting dir1/a (glob)
149 reverting dir1/a (glob)
150 $ hg ci -m 'merge, keeping parent 1'
150 $ hg ci -m 'merge, keeping parent 1'
151 $ hg debugindex --dir dir2 > after
151 $ hg debugindex --dir dir2 > after
152 $ diff before after
152 $ diff before after
153 $ rm before after
153 $ rm before after
154
154
155 Merge keeping directory from parent 2 does not create revlog entry. (Note that
155 Merge keeping directory from parent 2 does not create revlog entry. (Note that
156 dir2's manifest does change, but only because dir2/a's filelog changes.)
156 dir2's manifest does change, but only because dir2/a's filelog changes.)
157
157
158 $ hg co 'desc("modify dir2/a")'
158 $ hg co 'desc("modify dir2/a")'
159 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
159 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
160 $ hg debugindex --dir dir1 > before
160 $ hg debugindex --dir dir1 > before
161 $ hg merge 'desc("modify dir1/a")'
161 $ hg merge 'desc("modify dir1/a")'
162 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
162 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
163 (branch merge, don't forget to commit)
163 (branch merge, don't forget to commit)
164 $ hg revert -r 'desc("modify dir1/a")' .
164 $ hg revert -r 'desc("modify dir1/a")' .
165 reverting dir2/a (glob)
165 reverting dir2/a (glob)
166 $ hg ci -m 'merge, keeping parent 2'
166 $ hg ci -m 'merge, keeping parent 2'
167 created new head
167 created new head
168 $ hg debugindex --dir dir1 > after
168 $ hg debugindex --dir dir1 > after
169 $ diff before after
169 $ diff before after
170 $ rm before after
170 $ rm before after
171
171
172 Create flat source repo for tests with mixed flat/tree manifests
172 Create flat source repo for tests with mixed flat/tree manifests
173
173
174 $ cd ..
174 $ cd ..
175 $ hg init repo-flat
175 $ hg init repo-flat
176 $ cd repo-flat
176 $ cd repo-flat
177
177
178 Create a few commits with flat manifest
178 Create a few commits with flat manifest
179
179
180 $ echo 0 > a
180 $ echo 0 > a
181 $ echo 0 > b
181 $ echo 0 > b
182 $ echo 0 > e
182 $ echo 0 > e
183 $ for d in dir1 dir1/dir1 dir1/dir2 dir2
183 $ for d in dir1 dir1/dir1 dir1/dir2 dir2
184 > do
184 > do
185 > mkdir $d
185 > mkdir $d
186 > echo 0 > $d/a
186 > echo 0 > $d/a
187 > echo 0 > $d/b
187 > echo 0 > $d/b
188 > done
188 > done
189 $ hg ci -Aqm initial
189 $ hg ci -Aqm initial
190
190
191 $ echo 1 > a
191 $ echo 1 > a
192 $ echo 1 > dir1/a
192 $ echo 1 > dir1/a
193 $ echo 1 > dir1/dir1/a
193 $ echo 1 > dir1/dir1/a
194 $ hg ci -Aqm 'modify on branch 1'
194 $ hg ci -Aqm 'modify on branch 1'
195
195
196 $ hg co 0
196 $ hg co 0
197 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
197 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
198 $ echo 2 > b
198 $ echo 2 > b
199 $ echo 2 > dir1/b
199 $ echo 2 > dir1/b
200 $ echo 2 > dir1/dir1/b
200 $ echo 2 > dir1/dir1/b
201 $ hg ci -Aqm 'modify on branch 2'
201 $ hg ci -Aqm 'modify on branch 2'
202
202
203 $ hg merge 1
203 $ hg merge 1
204 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
204 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
205 (branch merge, don't forget to commit)
205 (branch merge, don't forget to commit)
206 $ hg ci -m 'merge of flat manifests to new flat manifest'
206 $ hg ci -m 'merge of flat manifests to new flat manifest'
207
207
208 $ hg serve -p $HGPORT -d --pid-file=hg.pid --errorlog=errors.log
208 $ hg serve -p $HGPORT -d --pid-file=hg.pid --errorlog=errors.log
209 $ cat hg.pid >> $DAEMON_PIDS
209 $ cat hg.pid >> $DAEMON_PIDS
210
210
211 Create clone with tree manifests enabled
211 Create clone with tree manifests enabled
212
212
213 $ cd ..
213 $ cd ..
214 $ hg clone --config experimental.treemanifest=1 \
214 $ hg clone --config experimental.treemanifest=1 \
215 > http://localhost:$HGPORT repo-mixed -r 1
215 > http://localhost:$HGPORT repo-mixed -r 1
216 adding changesets
216 adding changesets
217 adding manifests
217 adding manifests
218 adding file changes
218 adding file changes
219 added 2 changesets with 14 changes to 11 files
219 added 2 changesets with 14 changes to 11 files
220 updating to branch default
220 updating to branch default
221 11 files updated, 0 files merged, 0 files removed, 0 files unresolved
221 11 files updated, 0 files merged, 0 files removed, 0 files unresolved
222 $ cd repo-mixed
222 $ cd repo-mixed
223 $ test -d .hg/store/meta
223 $ test -d .hg/store/meta
224 [1]
224 [1]
225 $ grep treemanifest .hg/requires
225 $ grep treemanifest .hg/requires
226 treemanifest
226 treemanifest
227
227
228 Should be possible to push updates from flat to tree manifest repo
228 Should be possible to push updates from flat to tree manifest repo
229
229
230 $ hg -R ../repo-flat push ssh://user@dummy/repo-mixed
230 $ hg -R ../repo-flat push ssh://user@dummy/repo-mixed
231 pushing to ssh://user@dummy/repo-mixed
231 pushing to ssh://user@dummy/repo-mixed
232 searching for changes
232 searching for changes
233 remote: adding changesets
233 remote: adding changesets
234 remote: adding manifests
234 remote: adding manifests
235 remote: adding file changes
235 remote: adding file changes
236 remote: added 2 changesets with 3 changes to 3 files
236 remote: added 2 changesets with 3 changes to 3 files
237
237
238 Commit should store revlog per directory
238 Commit should store revlog per directory
239
239
240 $ hg co 1
240 $ hg co 1
241 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
241 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
242 $ echo 3 > a
242 $ echo 3 > a
243 $ echo 3 > dir1/a
243 $ echo 3 > dir1/a
244 $ echo 3 > dir1/dir1/a
244 $ echo 3 > dir1/dir1/a
245 $ hg ci -m 'first tree'
245 $ hg ci -m 'first tree'
246 created new head
246 created new head
247 $ find .hg/store/meta | sort
247 $ find .hg/store/meta | sort
248 .hg/store/meta
248 .hg/store/meta
249 .hg/store/meta/dir1
249 .hg/store/meta/dir1
250 .hg/store/meta/dir1/00manifest.i
250 .hg/store/meta/dir1/00manifest.i
251 .hg/store/meta/dir1/dir1
251 .hg/store/meta/dir1/dir1
252 .hg/store/meta/dir1/dir1/00manifest.i
252 .hg/store/meta/dir1/dir1/00manifest.i
253 .hg/store/meta/dir1/dir2
253 .hg/store/meta/dir1/dir2
254 .hg/store/meta/dir1/dir2/00manifest.i
254 .hg/store/meta/dir1/dir2/00manifest.i
255 .hg/store/meta/dir2
255 .hg/store/meta/dir2
256 .hg/store/meta/dir2/00manifest.i
256 .hg/store/meta/dir2/00manifest.i
257
257
258 Merge of two trees
258 Merge of two trees
259
259
260 $ hg co 2
260 $ hg co 2
261 6 files updated, 0 files merged, 0 files removed, 0 files unresolved
261 6 files updated, 0 files merged, 0 files removed, 0 files unresolved
262 $ hg merge 1
262 $ hg merge 1
263 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
263 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
264 (branch merge, don't forget to commit)
264 (branch merge, don't forget to commit)
265 $ hg ci -m 'merge of flat manifests to new tree manifest'
265 $ hg ci -m 'merge of flat manifests to new tree manifest'
266 created new head
266 created new head
267 $ hg diff -r 3
267 $ hg diff -r 3
268
268
269 Parent of tree root manifest should be flat manifest, and two for merge
269 Parent of tree root manifest should be flat manifest, and two for merge
270
270
271 $ hg debugindex -m
271 $ hg debugindex -m
272 rev offset length delta linkrev nodeid p1 p2
272 rev offset length delta linkrev nodeid p1 p2
273 0 0 80 -1 0 40536115ed9e 000000000000 000000000000
273 0 0 80 -1 0 40536115ed9e 000000000000 000000000000
274 1 80 83 0 1 f3376063c255 40536115ed9e 000000000000
274 1 80 83 0 1 f3376063c255 40536115ed9e 000000000000
275 2 163 89 0 2 5d9b9da231a2 40536115ed9e 000000000000
275 2 163 89 0 2 5d9b9da231a2 40536115ed9e 000000000000
276 3 252 83 2 3 d17d663cbd8a 5d9b9da231a2 f3376063c255
276 3 252 83 2 3 d17d663cbd8a 5d9b9da231a2 f3376063c255
277 4 335 124 1 4 51e32a8c60ee f3376063c255 000000000000
277 4 335 124 1 4 51e32a8c60ee f3376063c255 000000000000
278 5 459 126 2 5 cc5baa78b230 5d9b9da231a2 f3376063c255
278 5 459 126 2 5 cc5baa78b230 5d9b9da231a2 f3376063c255
279
279
280
280
281 Status across flat/tree boundary should work
281 Status across flat/tree boundary should work
282
282
283 $ hg status --rev '.^' --rev .
283 $ hg status --rev '.^' --rev .
284 M a
284 M a
285 M dir1/a
285 M dir1/a
286 M dir1/dir1/a
286 M dir1/dir1/a
287
287
288
288
289 Turning off treemanifest config has no effect
289 Turning off treemanifest config has no effect
290
290
291 $ hg debugindex --dir dir1
291 $ hg debugindex --dir dir1
292 rev offset length delta linkrev nodeid p1 p2
292 rev offset length delta linkrev nodeid p1 p2
293 0 0 127 -1 4 064927a0648a 000000000000 000000000000
293 0 0 127 -1 4 064927a0648a 000000000000 000000000000
294 1 127 111 0 5 25ecb8cb8618 000000000000 000000000000
294 1 127 111 0 5 25ecb8cb8618 000000000000 000000000000
295 $ echo 2 > dir1/a
295 $ echo 2 > dir1/a
296 $ hg --config experimental.treemanifest=False ci -qm 'modify dir1/a'
296 $ hg --config experimental.treemanifest=False ci -qm 'modify dir1/a'
297 $ hg debugindex --dir dir1
297 $ hg debugindex --dir dir1
298 rev offset length delta linkrev nodeid p1 p2
298 rev offset length delta linkrev nodeid p1 p2
299 0 0 127 -1 4 064927a0648a 000000000000 000000000000
299 0 0 127 -1 4 064927a0648a 000000000000 000000000000
300 1 127 111 0 5 25ecb8cb8618 000000000000 000000000000
300 1 127 111 0 5 25ecb8cb8618 000000000000 000000000000
301 2 238 55 1 6 5b16163a30c6 25ecb8cb8618 000000000000
301 2 238 55 1 6 5b16163a30c6 25ecb8cb8618 000000000000
302
302
303 Stripping and recovering changes should work
303 Stripping and recovering changes should work
304
304
305 $ hg st --change tip
305 $ hg st --change tip
306 M dir1/a
306 M dir1/a
307 $ hg --config extensions.strip= strip tip
307 $ hg --config extensions.strip= strip tip
308 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
308 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
309 saved backup bundle to $TESTTMP/repo-mixed/.hg/strip-backup/51cfd7b1e13b-78a2f3ed-backup.hg (glob)
309 saved backup bundle to $TESTTMP/repo-mixed/.hg/strip-backup/51cfd7b1e13b-78a2f3ed-backup.hg (glob)
310 $ hg unbundle -q .hg/strip-backup/*
310 $ hg unbundle -q .hg/strip-backup/*
311 $ hg st --change tip
311 $ hg st --change tip
312 M dir1/a
312 M dir1/a
313
313
314 Shelving and unshelving should work
314 Shelving and unshelving should work
315
315
316 $ echo foo >> dir1/a
316 $ echo foo >> dir1/a
317 $ hg --config extensions.shelve= shelve
317 $ hg --config extensions.shelve= shelve
318 shelved as default
318 shelved as default
319 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
319 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
320 $ hg --config extensions.shelve= unshelve
320 $ hg --config extensions.shelve= unshelve
321 unshelving change 'default'
321 unshelving change 'default'
322 $ hg diff --nodates
322 $ hg diff --nodates
323 diff -r 708a273da119 dir1/a
323 diff -r 708a273da119 dir1/a
324 --- a/dir1/a
324 --- a/dir1/a
325 +++ b/dir1/a
325 +++ b/dir1/a
326 @@ -1,1 +1,2 @@
326 @@ -1,1 +1,2 @@
327 1
327 1
328 +foo
328 +foo
329
329
330 Pushing from treemanifest repo to an empty repo makes that a treemanifest repo
330 Pushing from treemanifest repo to an empty repo makes that a treemanifest repo
331
331
332 $ cd ..
332 $ cd ..
333 $ hg init empty-repo
333 $ hg init empty-repo
334 $ cat << EOF >> empty-repo/.hg/hgrc
334 $ cat << EOF >> empty-repo/.hg/hgrc
335 > [experimental]
335 > [experimental]
336 > changegroup3=yes
336 > changegroup3=yes
337 > EOF
337 > EOF
338 $ grep treemanifest empty-repo/.hg/requires
338 $ grep treemanifest empty-repo/.hg/requires
339 [1]
339 [1]
340 $ hg push -R repo -r 0 empty-repo
340 $ hg push -R repo -r 0 empty-repo
341 pushing to empty-repo
341 pushing to empty-repo
342 searching for changes
342 searching for changes
343 adding changesets
343 adding changesets
344 adding manifests
344 adding manifests
345 adding file changes
345 adding file changes
346 added 1 changesets with 2 changes to 2 files
346 added 1 changesets with 2 changes to 2 files
347 $ grep treemanifest empty-repo/.hg/requires
347 $ grep treemanifest empty-repo/.hg/requires
348 treemanifest
348 treemanifest
349
349
350 Pushing to an empty repo works
350 Pushing to an empty repo works
351
351
352 $ hg --config experimental.treemanifest=1 init clone
352 $ hg --config experimental.treemanifest=1 init clone
353 $ grep treemanifest clone/.hg/requires
353 $ grep treemanifest clone/.hg/requires
354 treemanifest
354 treemanifest
355 $ hg push -R repo clone
355 $ hg push -R repo clone
356 pushing to clone
356 pushing to clone
357 searching for changes
357 searching for changes
358 adding changesets
358 adding changesets
359 adding manifests
359 adding manifests
360 adding file changes
360 adding file changes
361 added 11 changesets with 15 changes to 10 files (+3 heads)
361 added 11 changesets with 15 changes to 10 files (+3 heads)
362 $ grep treemanifest clone/.hg/requires
362 $ grep treemanifest clone/.hg/requires
363 treemanifest
363 treemanifest
364
364
365 Create deeper repo with tree manifests.
365 Create deeper repo with tree manifests.
366
366
367 $ hg --config experimental.treemanifest=True init deeprepo
367 $ hg --config experimental.treemanifest=True init deeprepo
368 $ cd deeprepo
368 $ cd deeprepo
369
369
370 $ mkdir a
370 $ mkdir .A
371 $ mkdir b
371 $ mkdir b
372 $ mkdir b/bar
372 $ mkdir b/bar
373 $ mkdir b/bar/orange
373 $ mkdir b/bar/orange
374 $ mkdir b/bar/orange/fly
374 $ mkdir b/bar/orange/fly
375 $ mkdir b/foo
375 $ mkdir b/foo
376 $ mkdir b/foo/apple
376 $ mkdir b/foo/apple
377 $ mkdir b/foo/apple/bees
377 $ mkdir b/foo/apple/bees
378
378
379 $ touch a/one.txt
379 $ touch .A/one.txt
380 $ touch a/two.txt
380 $ touch .A/two.txt
381 $ touch b/bar/fruits.txt
381 $ touch b/bar/fruits.txt
382 $ touch b/bar/orange/fly/gnat.py
382 $ touch b/bar/orange/fly/gnat.py
383 $ touch b/bar/orange/fly/housefly.txt
383 $ touch b/bar/orange/fly/housefly.txt
384 $ touch b/foo/apple/bees/flower.py
384 $ touch b/foo/apple/bees/flower.py
385 $ touch c.txt
385 $ touch c.txt
386 $ touch d.py
386 $ touch d.py
387
387
388 $ hg ci -Aqm 'initial'
388 $ hg ci -Aqm 'initial'
389
389
390 We'll see that visitdir works by removing some treemanifest revlogs and running
390 We'll see that visitdir works by removing some treemanifest revlogs and running
391 the files command with various parameters.
391 the files command with various parameters.
392
392
393 Test files from the root.
393 Test files from the root.
394
394
395 $ hg files -r .
395 $ hg files -r .
396 a/one.txt (glob)
396 .A/one.txt (glob)
397 a/two.txt (glob)
397 .A/two.txt (glob)
398 b/bar/fruits.txt (glob)
398 b/bar/fruits.txt (glob)
399 b/bar/orange/fly/gnat.py (glob)
399 b/bar/orange/fly/gnat.py (glob)
400 b/bar/orange/fly/housefly.txt (glob)
400 b/bar/orange/fly/housefly.txt (glob)
401 b/foo/apple/bees/flower.py (glob)
401 b/foo/apple/bees/flower.py (glob)
402 c.txt
402 c.txt
403 d.py
403 d.py
404
404
405 Excludes with a glob should not exclude everything from the glob's root
405 Excludes with a glob should not exclude everything from the glob's root
406
406
407 $ hg files -r . -X 'b/fo?' b
407 $ hg files -r . -X 'b/fo?' b
408 b/bar/fruits.txt (glob)
408 b/bar/fruits.txt (glob)
409 b/bar/orange/fly/gnat.py (glob)
409 b/bar/orange/fly/gnat.py (glob)
410 b/bar/orange/fly/housefly.txt (glob)
410 b/bar/orange/fly/housefly.txt (glob)
411 $ cp -r .hg/store .hg/store-copy
411 $ cp -r .hg/store .hg/store-copy
412
412
413 Test files for a subdirectory.
413 Test files for a subdirectory.
414
414
415 $ rm -r .hg/store/meta/a
415 $ rm -r .hg/store/meta/~2e_a
416 $ hg files -r . b
416 $ hg files -r . b
417 b/bar/fruits.txt (glob)
417 b/bar/fruits.txt (glob)
418 b/bar/orange/fly/gnat.py (glob)
418 b/bar/orange/fly/gnat.py (glob)
419 b/bar/orange/fly/housefly.txt (glob)
419 b/bar/orange/fly/housefly.txt (glob)
420 b/foo/apple/bees/flower.py (glob)
420 b/foo/apple/bees/flower.py (glob)
421 $ cp -rT .hg/store-copy .hg/store
421 $ cp -rT .hg/store-copy .hg/store
422
422
423 Test files with just includes and excludes.
423 Test files with just includes and excludes.
424
424
425 $ rm -r .hg/store/meta/a
425 $ rm -r .hg/store/meta/~2e_a
426 $ rm -r .hg/store/meta/b/bar/orange/fly
426 $ rm -r .hg/store/meta/b/bar/orange/fly
427 $ rm -r .hg/store/meta/b/foo/apple/bees
427 $ rm -r .hg/store/meta/b/foo/apple/bees
428 $ hg files -r . -I path:b/bar -X path:b/bar/orange/fly -I path:b/foo -X path:b/foo/apple/bees
428 $ hg files -r . -I path:b/bar -X path:b/bar/orange/fly -I path:b/foo -X path:b/foo/apple/bees
429 b/bar/fruits.txt (glob)
429 b/bar/fruits.txt (glob)
430 $ cp -rT .hg/store-copy .hg/store
430 $ cp -rT .hg/store-copy .hg/store
431
431
432 Test files for a subdirectory, excluding a directory within it.
432 Test files for a subdirectory, excluding a directory within it.
433
433
434 $ rm -r .hg/store/meta/a
434 $ rm -r .hg/store/meta/~2e_a
435 $ rm -r .hg/store/meta/b/foo
435 $ rm -r .hg/store/meta/b/foo
436 $ hg files -r . -X path:b/foo b
436 $ hg files -r . -X path:b/foo b
437 b/bar/fruits.txt (glob)
437 b/bar/fruits.txt (glob)
438 b/bar/orange/fly/gnat.py (glob)
438 b/bar/orange/fly/gnat.py (glob)
439 b/bar/orange/fly/housefly.txt (glob)
439 b/bar/orange/fly/housefly.txt (glob)
440 $ cp -rT .hg/store-copy .hg/store
440 $ cp -rT .hg/store-copy .hg/store
441
441
442 Test files for a sub directory, including only a directory within it, and
442 Test files for a sub directory, including only a directory within it, and
443 including an unrelated directory.
443 including an unrelated directory.
444
444
445 $ rm -r .hg/store/meta/a
445 $ rm -r .hg/store/meta/~2e_a
446 $ rm -r .hg/store/meta/b/foo
446 $ rm -r .hg/store/meta/b/foo
447 $ hg files -r . -I path:b/bar/orange -I path:a b
447 $ hg files -r . -I path:b/bar/orange -I path:a b
448 b/bar/orange/fly/gnat.py (glob)
448 b/bar/orange/fly/gnat.py (glob)
449 b/bar/orange/fly/housefly.txt (glob)
449 b/bar/orange/fly/housefly.txt (glob)
450 $ cp -rT .hg/store-copy .hg/store
450 $ cp -rT .hg/store-copy .hg/store
451
451
452 Test files for a pattern, including a directory, and excluding a directory
452 Test files for a pattern, including a directory, and excluding a directory
453 within that.
453 within that.
454
454
455 $ rm -r .hg/store/meta/a
455 $ rm -r .hg/store/meta/~2e_a
456 $ rm -r .hg/store/meta/b/foo
456 $ rm -r .hg/store/meta/b/foo
457 $ rm -r .hg/store/meta/b/bar/orange
457 $ rm -r .hg/store/meta/b/bar/orange
458 $ hg files -r . glob:**.txt -I path:b/bar -X path:b/bar/orange
458 $ hg files -r . glob:**.txt -I path:b/bar -X path:b/bar/orange
459 b/bar/fruits.txt (glob)
459 b/bar/fruits.txt (glob)
460 $ cp -rT .hg/store-copy .hg/store
460 $ cp -rT .hg/store-copy .hg/store
461
461
462 Add some more changes to the deep repo
462 Add some more changes to the deep repo
463 $ echo narf >> b/bar/fruits.txt
463 $ echo narf >> b/bar/fruits.txt
464 $ hg ci -m narf
464 $ hg ci -m narf
465 $ echo troz >> b/bar/orange/fly/gnat.py
465 $ echo troz >> b/bar/orange/fly/gnat.py
466 $ hg ci -m troz
466 $ hg ci -m troz
467
467
468 Test cloning a treemanifest repo over http.
468 Test cloning a treemanifest repo over http.
469 $ hg serve -p $HGPORT2 -d --pid-file=hg.pid --errorlog=errors.log
469 $ hg serve -p $HGPORT2 -d --pid-file=hg.pid --errorlog=errors.log
470 $ cat hg.pid >> $DAEMON_PIDS
470 $ cat hg.pid >> $DAEMON_PIDS
471 $ cd ..
471 $ cd ..
472 We can clone even with the knob turned off and we'll get a treemanifest repo.
472 We can clone even with the knob turned off and we'll get a treemanifest repo.
473 $ hg clone --config experimental.treemanifest=False \
473 $ hg clone --config experimental.treemanifest=False \
474 > --config experimental.changegroup3=True \
474 > --config experimental.changegroup3=True \
475 > http://localhost:$HGPORT2 deepclone
475 > http://localhost:$HGPORT2 deepclone
476 requesting all changes
476 requesting all changes
477 adding changesets
477 adding changesets
478 adding manifests
478 adding manifests
479 adding file changes
479 adding file changes
480 added 3 changesets with 10 changes to 8 files
480 added 3 changesets with 10 changes to 8 files
481 updating to branch default
481 updating to branch default
482 8 files updated, 0 files merged, 0 files removed, 0 files unresolved
482 8 files updated, 0 files merged, 0 files removed, 0 files unresolved
483 No server errors.
483 No server errors.
484 $ cat deeprepo/errors.log
484 $ cat deeprepo/errors.log
485 requires got updated to include treemanifest
485 requires got updated to include treemanifest
486 $ cat deepclone/.hg/requires | grep treemanifest
486 $ cat deepclone/.hg/requires | grep treemanifest
487 treemanifest
487 treemanifest
488 Tree manifest revlogs exist.
488 Tree manifest revlogs exist.
489 $ find deepclone/.hg/store/meta | sort
489 $ find deepclone/.hg/store/meta | sort
490 deepclone/.hg/store/meta
490 deepclone/.hg/store/meta
491 deepclone/.hg/store/meta/a
492 deepclone/.hg/store/meta/a/00manifest.i
493 deepclone/.hg/store/meta/b
491 deepclone/.hg/store/meta/b
494 deepclone/.hg/store/meta/b/00manifest.i
492 deepclone/.hg/store/meta/b/00manifest.i
495 deepclone/.hg/store/meta/b/bar
493 deepclone/.hg/store/meta/b/bar
496 deepclone/.hg/store/meta/b/bar/00manifest.i
494 deepclone/.hg/store/meta/b/bar/00manifest.i
497 deepclone/.hg/store/meta/b/bar/orange
495 deepclone/.hg/store/meta/b/bar/orange
498 deepclone/.hg/store/meta/b/bar/orange/00manifest.i
496 deepclone/.hg/store/meta/b/bar/orange/00manifest.i
499 deepclone/.hg/store/meta/b/bar/orange/fly
497 deepclone/.hg/store/meta/b/bar/orange/fly
500 deepclone/.hg/store/meta/b/bar/orange/fly/00manifest.i
498 deepclone/.hg/store/meta/b/bar/orange/fly/00manifest.i
501 deepclone/.hg/store/meta/b/foo
499 deepclone/.hg/store/meta/b/foo
502 deepclone/.hg/store/meta/b/foo/00manifest.i
500 deepclone/.hg/store/meta/b/foo/00manifest.i
503 deepclone/.hg/store/meta/b/foo/apple
501 deepclone/.hg/store/meta/b/foo/apple
504 deepclone/.hg/store/meta/b/foo/apple/00manifest.i
502 deepclone/.hg/store/meta/b/foo/apple/00manifest.i
505 deepclone/.hg/store/meta/b/foo/apple/bees
503 deepclone/.hg/store/meta/b/foo/apple/bees
506 deepclone/.hg/store/meta/b/foo/apple/bees/00manifest.i
504 deepclone/.hg/store/meta/b/foo/apple/bees/00manifest.i
505 deepclone/.hg/store/meta/~2e_a
506 deepclone/.hg/store/meta/~2e_a/00manifest.i
507 Verify passes.
507 Verify passes.
508 $ cd deepclone
508 $ cd deepclone
509 $ hg verify
509 $ hg verify
510 checking changesets
510 checking changesets
511 checking manifests
511 checking manifests
512 crosschecking files in changesets and manifests
512 crosschecking files in changesets and manifests
513 checking files
513 checking files
514 8 files, 3 changesets, 10 total revisions
514 8 files, 3 changesets, 10 total revisions
515 $ cd ..
515 $ cd ..
516
516
517 Create clones using old repo formats to use in later tests
517 Create clones using old repo formats to use in later tests
518 $ hg clone --config format.usestore=False \
518 $ hg clone --config format.usestore=False \
519 > --config experimental.changegroup3=True \
519 > --config experimental.changegroup3=True \
520 > http://localhost:$HGPORT2 deeprepo-basicstore
520 > http://localhost:$HGPORT2 deeprepo-basicstore
521 requesting all changes
521 requesting all changes
522 adding changesets
522 adding changesets
523 adding manifests
523 adding manifests
524 adding file changes
524 adding file changes
525 added 3 changesets with 10 changes to 8 files
525 added 3 changesets with 10 changes to 8 files
526 updating to branch default
526 updating to branch default
527 8 files updated, 0 files merged, 0 files removed, 0 files unresolved
527 8 files updated, 0 files merged, 0 files removed, 0 files unresolved
528 $ grep store deeprepo-basicstore/.hg/requires
528 $ cd deeprepo-basicstore
529 $ grep store .hg/requires
529 [1]
530 [1]
531 $ hg serve -p $HGPORT3 -d --pid-file=hg.pid --errorlog=errors.log
532 $ cat hg.pid >> $DAEMON_PIDS
533 $ cd ..
530 $ hg clone --config format.usefncache=False \
534 $ hg clone --config format.usefncache=False \
531 > --config experimental.changegroup3=True \
535 > --config experimental.changegroup3=True \
532 > http://localhost:$HGPORT2 deeprepo-encodedstore
536 > http://localhost:$HGPORT2 deeprepo-encodedstore
533 requesting all changes
537 requesting all changes
534 adding changesets
538 adding changesets
535 adding manifests
539 adding manifests
536 adding file changes
540 adding file changes
537 added 3 changesets with 10 changes to 8 files
541 added 3 changesets with 10 changes to 8 files
538 updating to branch default
542 updating to branch default
539 8 files updated, 0 files merged, 0 files removed, 0 files unresolved
543 8 files updated, 0 files merged, 0 files removed, 0 files unresolved
540 $ grep fncache deeprepo-encodedstore/.hg/requires
544 $ cd deeprepo-encodedstore
545 $ grep fncache .hg/requires
541 [1]
546 [1]
547 $ hg serve -p $HGPORT4 -d --pid-file=hg.pid --errorlog=errors.log
548 $ cat hg.pid >> $DAEMON_PIDS
549 $ cd ..
542
550
543 Local clone with basicstore
551 Local clone with basicstore
544 $ hg clone -U deeprepo-basicstore local-clone-basicstore
552 $ hg clone -U deeprepo-basicstore local-clone-basicstore
545 $ hg -R local-clone-basicstore verify
553 $ hg -R local-clone-basicstore verify
546 checking changesets
554 checking changesets
547 checking manifests
555 checking manifests
548 crosschecking files in changesets and manifests
556 crosschecking files in changesets and manifests
549 checking files
557 checking files
550 8 files, 3 changesets, 10 total revisions
558 8 files, 3 changesets, 10 total revisions
551
559
552 Local clone with encodedstore
560 Local clone with encodedstore
553 $ hg clone -U deeprepo-encodedstore local-clone-encodedstore
561 $ hg clone -U deeprepo-encodedstore local-clone-encodedstore
554 $ hg -R local-clone-encodedstore verify
562 $ hg -R local-clone-encodedstore verify
555 checking changesets
563 checking changesets
556 checking manifests
564 checking manifests
557 crosschecking files in changesets and manifests
565 crosschecking files in changesets and manifests
558 checking files
566 checking files
559 8 files, 3 changesets, 10 total revisions
567 8 files, 3 changesets, 10 total revisions
560
568
561 Local clone with fncachestore
569 Local clone with fncachestore
562 $ hg clone -U deeprepo local-clone-fncachestore
570 $ hg clone -U deeprepo local-clone-fncachestore
563 $ hg -R local-clone-fncachestore verify
571 $ hg -R local-clone-fncachestore verify
564 checking changesets
572 checking changesets
565 checking manifests
573 checking manifests
566 crosschecking files in changesets and manifests
574 crosschecking files in changesets and manifests
567 checking files
575 checking files
568 8 files, 3 changesets, 10 total revisions
576 8 files, 3 changesets, 10 total revisions
577
578 Stream clone with basicstore
579 $ hg clone --config experimental.changegroup3=True --uncompressed -U \
580 > http://localhost:$HGPORT3 stream-clone-basicstore
581 streaming all changes
582 18 files to transfer, * of data (glob)
583 transferred * in * seconds (*) (glob)
584 searching for changes
585 no changes found
586 $ hg -R stream-clone-basicstore verify
587 checking changesets
588 checking manifests
589 crosschecking files in changesets and manifests
590 checking files
591 8 files, 3 changesets, 10 total revisions
592
593 Stream clone with encodedstore
594 $ hg clone --config experimental.changegroup3=True --uncompressed -U \
595 > http://localhost:$HGPORT4 stream-clone-encodedstore
596 streaming all changes
597 18 files to transfer, * of data (glob)
598 transferred * in * seconds (*) (glob)
599 searching for changes
600 no changes found
601 $ hg -R stream-clone-encodedstore verify
602 checking changesets
603 checking manifests
604 crosschecking files in changesets and manifests
605 checking files
606 8 files, 3 changesets, 10 total revisions
607
608 Stream clone with fncachestore
609 $ hg clone --config experimental.changegroup3=True --uncompressed -U \
610 > http://localhost:$HGPORT2 stream-clone-fncachestore
611 streaming all changes
612 18 files to transfer, * of data (glob)
613 transferred * in * seconds (*) (glob)
614 searching for changes
615 no changes found
616 $ hg -R stream-clone-fncachestore verify
617 checking changesets
618 checking manifests
619 crosschecking files in changesets and manifests
620 checking files
621 8 files, 3 changesets, 10 total revisions
622
623 Packed bundle
624 $ hg -R deeprepo debugcreatestreamclonebundle repo-packed.hg
625 writing 3349 bytes for 18 files
626 bundle requirements: generaldelta, revlogv1, treemanifest
627 $ hg debugbundle --spec repo-packed.hg
628 none-packed1;requirements%3Dgeneraldelta%2Crevlogv1%2Ctreemanifest
General Comments 0
You need to be logged in to leave comments. Login now