Show More
@@ -1,482 +1,480 b'' | |||||
1 | # repair.py - functions for repository repair for mercurial |
|
1 | # repair.py - functions for repository repair for mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2005, 2006 Chris Mason <mason@suse.com> |
|
3 | # Copyright 2005, 2006 Chris Mason <mason@suse.com> | |
4 | # Copyright 2007 Matt Mackall |
|
4 | # Copyright 2007 Matt Mackall | |
5 | # |
|
5 | # | |
6 | # This software may be used and distributed according to the terms of the |
|
6 | # This software may be used and distributed according to the terms of the | |
7 | # GNU General Public License version 2 or any later version. |
|
7 | # GNU General Public License version 2 or any later version. | |
8 |
|
8 | |||
9 | from __future__ import absolute_import |
|
9 | from __future__ import absolute_import | |
10 |
|
10 | |||
11 | import errno |
|
11 | import errno | |
12 | import hashlib |
|
12 | import hashlib | |
13 |
|
13 | |||
14 | from .i18n import _ |
|
14 | from .i18n import _ | |
15 | from .node import ( |
|
15 | from .node import ( | |
16 | hex, |
|
16 | hex, | |
17 | short, |
|
17 | short, | |
18 | ) |
|
18 | ) | |
19 | from . import ( |
|
19 | from . import ( | |
20 | bundle2, |
|
20 | bundle2, | |
21 | changegroup, |
|
21 | changegroup, | |
22 | discovery, |
|
22 | discovery, | |
23 | error, |
|
23 | error, | |
24 | exchange, |
|
24 | exchange, | |
25 | obsolete, |
|
25 | obsolete, | |
26 | obsutil, |
|
26 | obsutil, | |
27 | phases, |
|
27 | phases, | |
28 | pycompat, |
|
28 | pycompat, | |
29 | util, |
|
29 | util, | |
30 | ) |
|
30 | ) | |
31 | from .utils import ( |
|
31 | from .utils import ( | |
32 | stringutil, |
|
32 | stringutil, | |
33 | ) |
|
33 | ) | |
34 |
|
34 | |||
35 | def backupbundle(repo, bases, heads, node, suffix, compress=True, |
|
35 | def backupbundle(repo, bases, heads, node, suffix, compress=True, | |
36 | obsolescence=True): |
|
36 | obsolescence=True): | |
37 | """create a bundle with the specified revisions as a backup""" |
|
37 | """create a bundle with the specified revisions as a backup""" | |
38 |
|
38 | |||
39 | backupdir = "strip-backup" |
|
39 | backupdir = "strip-backup" | |
40 | vfs = repo.vfs |
|
40 | vfs = repo.vfs | |
41 | if not vfs.isdir(backupdir): |
|
41 | if not vfs.isdir(backupdir): | |
42 | vfs.mkdir(backupdir) |
|
42 | vfs.mkdir(backupdir) | |
43 |
|
43 | |||
44 | # Include a hash of all the nodes in the filename for uniqueness |
|
44 | # Include a hash of all the nodes in the filename for uniqueness | |
45 | allcommits = repo.set('%ln::%ln', bases, heads) |
|
45 | allcommits = repo.set('%ln::%ln', bases, heads) | |
46 | allhashes = sorted(c.hex() for c in allcommits) |
|
46 | allhashes = sorted(c.hex() for c in allcommits) | |
47 | totalhash = hashlib.sha1(''.join(allhashes)).digest() |
|
47 | totalhash = hashlib.sha1(''.join(allhashes)).digest() | |
48 | name = "%s/%s-%s-%s.hg" % (backupdir, short(node), |
|
48 | name = "%s/%s-%s-%s.hg" % (backupdir, short(node), | |
49 | hex(totalhash[:4]), suffix) |
|
49 | hex(totalhash[:4]), suffix) | |
50 |
|
50 | |||
51 | cgversion = changegroup.localversion(repo) |
|
51 | cgversion = changegroup.localversion(repo) | |
52 | comp = None |
|
52 | comp = None | |
53 | if cgversion != '01': |
|
53 | if cgversion != '01': | |
54 | bundletype = "HG20" |
|
54 | bundletype = "HG20" | |
55 | if compress: |
|
55 | if compress: | |
56 | comp = 'BZ' |
|
56 | comp = 'BZ' | |
57 | elif compress: |
|
57 | elif compress: | |
58 | bundletype = "HG10BZ" |
|
58 | bundletype = "HG10BZ" | |
59 | else: |
|
59 | else: | |
60 | bundletype = "HG10UN" |
|
60 | bundletype = "HG10UN" | |
61 |
|
61 | |||
62 | outgoing = discovery.outgoing(repo, missingroots=bases, missingheads=heads) |
|
62 | outgoing = discovery.outgoing(repo, missingroots=bases, missingheads=heads) | |
63 | contentopts = { |
|
63 | contentopts = { | |
64 | 'cg.version': cgversion, |
|
64 | 'cg.version': cgversion, | |
65 | 'obsolescence': obsolescence, |
|
65 | 'obsolescence': obsolescence, | |
66 | 'phases': True, |
|
66 | 'phases': True, | |
67 | } |
|
67 | } | |
68 | return bundle2.writenewbundle(repo.ui, repo, 'strip', name, bundletype, |
|
68 | return bundle2.writenewbundle(repo.ui, repo, 'strip', name, bundletype, | |
69 | outgoing, contentopts, vfs, compression=comp) |
|
69 | outgoing, contentopts, vfs, compression=comp) | |
70 |
|
70 | |||
71 | def _collectfiles(repo, striprev): |
|
71 | def _collectfiles(repo, striprev): | |
72 | """find out the filelogs affected by the strip""" |
|
72 | """find out the filelogs affected by the strip""" | |
73 | files = set() |
|
73 | files = set() | |
74 |
|
74 | |||
75 | for x in pycompat.xrange(striprev, len(repo)): |
|
75 | for x in pycompat.xrange(striprev, len(repo)): | |
76 | files.update(repo[x].files()) |
|
76 | files.update(repo[x].files()) | |
77 |
|
77 | |||
78 | return sorted(files) |
|
78 | return sorted(files) | |
79 |
|
79 | |||
80 | def _collectrevlog(revlog, striprev): |
|
80 | def _collectrevlog(revlog, striprev): | |
81 | _, brokenset = revlog.getstrippoint(striprev) |
|
81 | _, brokenset = revlog.getstrippoint(striprev) | |
82 | return [revlog.linkrev(r) for r in brokenset] |
|
82 | return [revlog.linkrev(r) for r in brokenset] | |
83 |
|
83 | |||
84 | def _collectmanifest(repo, striprev): |
|
84 | def _collectmanifest(repo, striprev): | |
85 | return _collectrevlog(repo.manifestlog.getstorage(b''), striprev) |
|
85 | return _collectrevlog(repo.manifestlog.getstorage(b''), striprev) | |
86 |
|
86 | |||
87 | def _collectbrokencsets(repo, files, striprev): |
|
87 | def _collectbrokencsets(repo, files, striprev): | |
88 | """return the changesets which will be broken by the truncation""" |
|
88 | """return the changesets which will be broken by the truncation""" | |
89 | s = set() |
|
89 | s = set() | |
90 |
|
90 | |||
91 | s.update(_collectmanifest(repo, striprev)) |
|
91 | s.update(_collectmanifest(repo, striprev)) | |
92 | for fname in files: |
|
92 | for fname in files: | |
93 | s.update(_collectrevlog(repo.file(fname), striprev)) |
|
93 | s.update(_collectrevlog(repo.file(fname), striprev)) | |
94 |
|
94 | |||
95 | return s |
|
95 | return s | |
96 |
|
96 | |||
97 | def strip(ui, repo, nodelist, backup=True, topic='backup'): |
|
97 | def strip(ui, repo, nodelist, backup=True, topic='backup'): | |
98 | # This function requires the caller to lock the repo, but it operates |
|
98 | # This function requires the caller to lock the repo, but it operates | |
99 | # within a transaction of its own, and thus requires there to be no current |
|
99 | # within a transaction of its own, and thus requires there to be no current | |
100 | # transaction when it is called. |
|
100 | # transaction when it is called. | |
101 | if repo.currenttransaction() is not None: |
|
101 | if repo.currenttransaction() is not None: | |
102 | raise error.ProgrammingError('cannot strip from inside a transaction') |
|
102 | raise error.ProgrammingError('cannot strip from inside a transaction') | |
103 |
|
103 | |||
104 | # Simple way to maintain backwards compatibility for this |
|
104 | # Simple way to maintain backwards compatibility for this | |
105 | # argument. |
|
105 | # argument. | |
106 | if backup in ['none', 'strip']: |
|
106 | if backup in ['none', 'strip']: | |
107 | backup = False |
|
107 | backup = False | |
108 |
|
108 | |||
109 | repo = repo.unfiltered() |
|
109 | repo = repo.unfiltered() | |
110 | repo.destroying() |
|
110 | repo.destroying() | |
111 | vfs = repo.vfs |
|
111 | vfs = repo.vfs | |
112 | # load bookmark before changelog to avoid side effect from outdated |
|
112 | # load bookmark before changelog to avoid side effect from outdated | |
113 | # changelog (see repo._refreshchangelog) |
|
113 | # changelog (see repo._refreshchangelog) | |
114 | repo._bookmarks |
|
114 | repo._bookmarks | |
115 | cl = repo.changelog |
|
115 | cl = repo.changelog | |
116 |
|
116 | |||
117 | # TODO handle undo of merge sets |
|
117 | # TODO handle undo of merge sets | |
118 | if isinstance(nodelist, str): |
|
118 | if isinstance(nodelist, str): | |
119 | nodelist = [nodelist] |
|
119 | nodelist = [nodelist] | |
120 | striplist = [cl.rev(node) for node in nodelist] |
|
120 | striplist = [cl.rev(node) for node in nodelist] | |
121 | striprev = min(striplist) |
|
121 | striprev = min(striplist) | |
122 |
|
122 | |||
123 | files = _collectfiles(repo, striprev) |
|
123 | files = _collectfiles(repo, striprev) | |
124 | saverevs = _collectbrokencsets(repo, files, striprev) |
|
124 | saverevs = _collectbrokencsets(repo, files, striprev) | |
125 |
|
125 | |||
126 | # Some revisions with rev > striprev may not be descendants of striprev. |
|
126 | # Some revisions with rev > striprev may not be descendants of striprev. | |
127 | # We have to find these revisions and put them in a bundle, so that |
|
127 | # We have to find these revisions and put them in a bundle, so that | |
128 | # we can restore them after the truncations. |
|
128 | # we can restore them after the truncations. | |
129 | # To create the bundle we use repo.changegroupsubset which requires |
|
129 | # To create the bundle we use repo.changegroupsubset which requires | |
130 | # the list of heads and bases of the set of interesting revisions. |
|
130 | # the list of heads and bases of the set of interesting revisions. | |
131 | # (head = revision in the set that has no descendant in the set; |
|
131 | # (head = revision in the set that has no descendant in the set; | |
132 | # base = revision in the set that has no ancestor in the set) |
|
132 | # base = revision in the set that has no ancestor in the set) | |
133 | tostrip = set(striplist) |
|
133 | tostrip = set(striplist) | |
134 | saveheads = set(saverevs) |
|
134 | saveheads = set(saverevs) | |
135 | for r in cl.revs(start=striprev + 1): |
|
135 | for r in cl.revs(start=striprev + 1): | |
136 | if any(p in tostrip for p in cl.parentrevs(r)): |
|
136 | if any(p in tostrip for p in cl.parentrevs(r)): | |
137 | tostrip.add(r) |
|
137 | tostrip.add(r) | |
138 |
|
138 | |||
139 | if r not in tostrip: |
|
139 | if r not in tostrip: | |
140 | saverevs.add(r) |
|
140 | saverevs.add(r) | |
141 | saveheads.difference_update(cl.parentrevs(r)) |
|
141 | saveheads.difference_update(cl.parentrevs(r)) | |
142 | saveheads.add(r) |
|
142 | saveheads.add(r) | |
143 | saveheads = [cl.node(r) for r in saveheads] |
|
143 | saveheads = [cl.node(r) for r in saveheads] | |
144 |
|
144 | |||
145 | # compute base nodes |
|
145 | # compute base nodes | |
146 | if saverevs: |
|
146 | if saverevs: | |
147 | descendants = set(cl.descendants(saverevs)) |
|
147 | descendants = set(cl.descendants(saverevs)) | |
148 | saverevs.difference_update(descendants) |
|
148 | saverevs.difference_update(descendants) | |
149 | savebases = [cl.node(r) for r in saverevs] |
|
149 | savebases = [cl.node(r) for r in saverevs] | |
150 | stripbases = [cl.node(r) for r in tostrip] |
|
150 | stripbases = [cl.node(r) for r in tostrip] | |
151 |
|
151 | |||
152 | stripobsidx = obsmarkers = () |
|
152 | stripobsidx = obsmarkers = () | |
153 | if repo.ui.configbool('devel', 'strip-obsmarkers'): |
|
153 | if repo.ui.configbool('devel', 'strip-obsmarkers'): | |
154 | obsmarkers = obsutil.exclusivemarkers(repo, stripbases) |
|
154 | obsmarkers = obsutil.exclusivemarkers(repo, stripbases) | |
155 | if obsmarkers: |
|
155 | if obsmarkers: | |
156 | stripobsidx = [i for i, m in enumerate(repo.obsstore) |
|
156 | stripobsidx = [i for i, m in enumerate(repo.obsstore) | |
157 | if m in obsmarkers] |
|
157 | if m in obsmarkers] | |
158 |
|
158 | |||
159 | newbmtarget, updatebm = _bookmarkmovements(repo, tostrip) |
|
159 | newbmtarget, updatebm = _bookmarkmovements(repo, tostrip) | |
160 |
|
160 | |||
161 | backupfile = None |
|
161 | backupfile = None | |
162 | node = nodelist[-1] |
|
162 | node = nodelist[-1] | |
163 | if backup: |
|
163 | if backup: | |
164 | backupfile = _createstripbackup(repo, stripbases, node, topic) |
|
164 | backupfile = _createstripbackup(repo, stripbases, node, topic) | |
165 | # create a changegroup for all the branches we need to keep |
|
165 | # create a changegroup for all the branches we need to keep | |
166 | tmpbundlefile = None |
|
166 | tmpbundlefile = None | |
167 | if saveheads: |
|
167 | if saveheads: | |
168 | # do not compress temporary bundle if we remove it from disk later |
|
168 | # do not compress temporary bundle if we remove it from disk later | |
169 | # |
|
169 | # | |
170 | # We do not include obsolescence, it might re-introduce prune markers |
|
170 | # We do not include obsolescence, it might re-introduce prune markers | |
171 | # we are trying to strip. This is harmless since the stripped markers |
|
171 | # we are trying to strip. This is harmless since the stripped markers | |
172 | # are already backed up and we did not touched the markers for the |
|
172 | # are already backed up and we did not touched the markers for the | |
173 | # saved changesets. |
|
173 | # saved changesets. | |
174 | tmpbundlefile = backupbundle(repo, savebases, saveheads, node, 'temp', |
|
174 | tmpbundlefile = backupbundle(repo, savebases, saveheads, node, 'temp', | |
175 | compress=False, obsolescence=False) |
|
175 | compress=False, obsolescence=False) | |
176 |
|
176 | |||
177 | with ui.uninterruptible(): |
|
177 | with ui.uninterruptible(): | |
178 | try: |
|
178 | try: | |
179 | with repo.transaction("strip") as tr: |
|
179 | with repo.transaction("strip") as tr: | |
180 | # TODO this code violates the interface abstraction of the |
|
180 | # TODO this code violates the interface abstraction of the | |
181 | # transaction and makes assumptions that file storage is |
|
181 | # transaction and makes assumptions that file storage is | |
182 | # using append-only files. We'll need some kind of storage |
|
182 | # using append-only files. We'll need some kind of storage | |
183 | # API to handle stripping for us. |
|
183 | # API to handle stripping for us. | |
184 | offset = len(tr._entries) |
|
184 | offset = len(tr._entries) | |
185 |
|
185 | |||
186 | tr.startgroup() |
|
186 | tr.startgroup() | |
187 | cl.strip(striprev, tr) |
|
187 | cl.strip(striprev, tr) | |
188 | stripmanifest(repo, striprev, tr, files) |
|
188 | stripmanifest(repo, striprev, tr, files) | |
189 |
|
189 | |||
190 | for fn in files: |
|
190 | for fn in files: | |
191 | repo.file(fn).strip(striprev, tr) |
|
191 | repo.file(fn).strip(striprev, tr) | |
192 | tr.endgroup() |
|
192 | tr.endgroup() | |
193 |
|
193 | |||
194 | for i in pycompat.xrange(offset, len(tr._entries)): |
|
194 | for i in pycompat.xrange(offset, len(tr._entries)): | |
195 | file, troffset, ignore = tr._entries[i] |
|
195 | file, troffset, ignore = tr._entries[i] | |
196 | with repo.svfs(file, 'a', checkambig=True) as fp: |
|
196 | with repo.svfs(file, 'a', checkambig=True) as fp: | |
197 | fp.truncate(troffset) |
|
197 | fp.truncate(troffset) | |
198 | if troffset == 0: |
|
198 | if troffset == 0: | |
199 | repo.store.markremoved(file) |
|
199 | repo.store.markremoved(file) | |
200 |
|
200 | |||
201 | deleteobsmarkers(repo.obsstore, stripobsidx) |
|
201 | deleteobsmarkers(repo.obsstore, stripobsidx) | |
202 | del repo.obsstore |
|
202 | del repo.obsstore | |
203 | repo.invalidatevolatilesets() |
|
203 | repo.invalidatevolatilesets() | |
204 | repo._phasecache.filterunknown(repo) |
|
204 | repo._phasecache.filterunknown(repo) | |
205 |
|
205 | |||
206 | if tmpbundlefile: |
|
206 | if tmpbundlefile: | |
207 | ui.note(_("adding branch\n")) |
|
207 | ui.note(_("adding branch\n")) | |
208 | f = vfs.open(tmpbundlefile, "rb") |
|
208 | f = vfs.open(tmpbundlefile, "rb") | |
209 | gen = exchange.readbundle(ui, f, tmpbundlefile, vfs) |
|
209 | gen = exchange.readbundle(ui, f, tmpbundlefile, vfs) | |
210 | if not repo.ui.verbose: |
|
210 | if not repo.ui.verbose: | |
211 | # silence internal shuffling chatter |
|
211 | # silence internal shuffling chatter | |
212 | repo.ui.pushbuffer() |
|
212 | repo.ui.pushbuffer() | |
213 | tmpbundleurl = 'bundle:' + vfs.join(tmpbundlefile) |
|
213 | tmpbundleurl = 'bundle:' + vfs.join(tmpbundlefile) | |
214 | txnname = 'strip' |
|
214 | txnname = 'strip' | |
215 | if not isinstance(gen, bundle2.unbundle20): |
|
215 | if not isinstance(gen, bundle2.unbundle20): | |
216 | txnname = "strip\n%s" % util.hidepassword(tmpbundleurl) |
|
216 | txnname = "strip\n%s" % util.hidepassword(tmpbundleurl) | |
217 | with repo.transaction(txnname) as tr: |
|
217 | with repo.transaction(txnname) as tr: | |
218 | bundle2.applybundle(repo, gen, tr, source='strip', |
|
218 | bundle2.applybundle(repo, gen, tr, source='strip', | |
219 | url=tmpbundleurl) |
|
219 | url=tmpbundleurl) | |
220 | if not repo.ui.verbose: |
|
220 | if not repo.ui.verbose: | |
221 | repo.ui.popbuffer() |
|
221 | repo.ui.popbuffer() | |
222 | f.close() |
|
222 | f.close() | |
223 |
|
223 | |||
224 | with repo.transaction('repair') as tr: |
|
224 | with repo.transaction('repair') as tr: | |
225 | bmchanges = [(m, repo[newbmtarget].node()) for m in updatebm] |
|
225 | bmchanges = [(m, repo[newbmtarget].node()) for m in updatebm] | |
226 | repo._bookmarks.applychanges(repo, tr, bmchanges) |
|
226 | repo._bookmarks.applychanges(repo, tr, bmchanges) | |
227 |
|
227 | |||
228 | # remove undo files |
|
228 | # remove undo files | |
229 | for undovfs, undofile in repo.undofiles(): |
|
229 | for undovfs, undofile in repo.undofiles(): | |
230 | try: |
|
230 | try: | |
231 | undovfs.unlink(undofile) |
|
231 | undovfs.unlink(undofile) | |
232 | except OSError as e: |
|
232 | except OSError as e: | |
233 | if e.errno != errno.ENOENT: |
|
233 | if e.errno != errno.ENOENT: | |
234 | ui.warn(_('error removing %s: %s\n') % |
|
234 | ui.warn(_('error removing %s: %s\n') % | |
235 | (undovfs.join(undofile), |
|
235 | (undovfs.join(undofile), | |
236 | stringutil.forcebytestr(e))) |
|
236 | stringutil.forcebytestr(e))) | |
237 |
|
237 | |||
238 | except: # re-raises |
|
238 | except: # re-raises | |
239 | if backupfile: |
|
239 | if backupfile: | |
240 | ui.warn(_("strip failed, backup bundle stored in '%s'\n") |
|
240 | ui.warn(_("strip failed, backup bundle stored in '%s'\n") | |
241 | % vfs.join(backupfile)) |
|
241 | % vfs.join(backupfile)) | |
242 | if tmpbundlefile: |
|
242 | if tmpbundlefile: | |
243 | ui.warn(_("strip failed, unrecovered changes stored in '%s'\n") |
|
243 | ui.warn(_("strip failed, unrecovered changes stored in '%s'\n") | |
244 | % vfs.join(tmpbundlefile)) |
|
244 | % vfs.join(tmpbundlefile)) | |
245 | ui.warn(_("(fix the problem, then recover the changesets with " |
|
245 | ui.warn(_("(fix the problem, then recover the changesets with " | |
246 | "\"hg unbundle '%s'\")\n") % vfs.join(tmpbundlefile)) |
|
246 | "\"hg unbundle '%s'\")\n") % vfs.join(tmpbundlefile)) | |
247 | raise |
|
247 | raise | |
248 | else: |
|
248 | else: | |
249 | if tmpbundlefile: |
|
249 | if tmpbundlefile: | |
250 | # Remove temporary bundle only if there were no exceptions |
|
250 | # Remove temporary bundle only if there were no exceptions | |
251 | vfs.unlink(tmpbundlefile) |
|
251 | vfs.unlink(tmpbundlefile) | |
252 |
|
252 | |||
253 | repo.destroyed() |
|
253 | repo.destroyed() | |
254 | # return the backup file path (or None if 'backup' was False) so |
|
254 | # return the backup file path (or None if 'backup' was False) so | |
255 | # extensions can use it |
|
255 | # extensions can use it | |
256 | return backupfile |
|
256 | return backupfile | |
257 |
|
257 | |||
258 | def softstrip(ui, repo, nodelist, backup=True, topic='backup'): |
|
258 | def softstrip(ui, repo, nodelist, backup=True, topic='backup'): | |
259 | """perform a "soft" strip using the archived phase""" |
|
259 | """perform a "soft" strip using the archived phase""" | |
260 | tostrip = [c.node() for c in repo.set('sort(%ln::)', nodelist)] |
|
260 | tostrip = [c.node() for c in repo.set('sort(%ln::)', nodelist)] | |
261 | if not tostrip: |
|
261 | if not tostrip: | |
262 | return None |
|
262 | return None | |
263 |
|
263 | |||
264 | newbmtarget, updatebm = _bookmarkmovements(repo, tostrip) |
|
264 | newbmtarget, updatebm = _bookmarkmovements(repo, tostrip) | |
265 | if backup: |
|
265 | if backup: | |
266 | node = tostrip[0] |
|
266 | node = tostrip[0] | |
267 | backupfile = _createstripbackup(repo, tostrip, node, topic) |
|
267 | backupfile = _createstripbackup(repo, tostrip, node, topic) | |
268 |
|
268 | |||
269 | with repo.transaction('strip') as tr: |
|
269 | with repo.transaction('strip') as tr: | |
270 | phases.retractboundary(repo, tr, phases.archived, tostrip) |
|
270 | phases.retractboundary(repo, tr, phases.archived, tostrip) | |
271 | bmchanges = [(m, repo[newbmtarget].node()) for m in updatebm] |
|
271 | bmchanges = [(m, repo[newbmtarget].node()) for m in updatebm] | |
272 | repo._bookmarks.applychanges(repo, tr, bmchanges) |
|
272 | repo._bookmarks.applychanges(repo, tr, bmchanges) | |
273 | return backupfile |
|
273 | return backupfile | |
274 |
|
274 | |||
275 |
|
275 | |||
276 | def _bookmarkmovements(repo, tostrip): |
|
276 | def _bookmarkmovements(repo, tostrip): | |
277 | # compute necessary bookmark movement |
|
277 | # compute necessary bookmark movement | |
278 | bm = repo._bookmarks |
|
278 | bm = repo._bookmarks | |
279 | updatebm = [] |
|
279 | updatebm = [] | |
280 | for m in bm: |
|
280 | for m in bm: | |
281 | rev = repo[bm[m]].rev() |
|
281 | rev = repo[bm[m]].rev() | |
282 | if rev in tostrip: |
|
282 | if rev in tostrip: | |
283 | updatebm.append(m) |
|
283 | updatebm.append(m) | |
284 | newbmtarget = None |
|
284 | newbmtarget = None | |
285 | # If we need to move bookmarks, compute bookmark |
|
285 | # If we need to move bookmarks, compute bookmark | |
286 | # targets. Otherwise we can skip doing this logic. |
|
286 | # targets. Otherwise we can skip doing this logic. | |
287 | if updatebm: |
|
287 | if updatebm: | |
288 | # For a set s, max(parents(s) - s) is the same as max(heads(::s - s)), |
|
288 | # For a set s, max(parents(s) - s) is the same as max(heads(::s - s)), | |
289 | # but is much faster |
|
289 | # but is much faster | |
290 | newbmtarget = repo.revs('max(parents(%ld) - (%ld))', tostrip, tostrip) |
|
290 | newbmtarget = repo.revs('max(parents(%ld) - (%ld))', tostrip, tostrip) | |
291 | if newbmtarget: |
|
291 | if newbmtarget: | |
292 | newbmtarget = repo[newbmtarget.first()].node() |
|
292 | newbmtarget = repo[newbmtarget.first()].node() | |
293 | else: |
|
293 | else: | |
294 | newbmtarget = '.' |
|
294 | newbmtarget = '.' | |
295 | return newbmtarget, updatebm |
|
295 | return newbmtarget, updatebm | |
296 |
|
296 | |||
297 | def _createstripbackup(repo, stripbases, node, topic): |
|
297 | def _createstripbackup(repo, stripbases, node, topic): | |
298 | # backup the changeset we are about to strip |
|
298 | # backup the changeset we are about to strip | |
299 | vfs = repo.vfs |
|
299 | vfs = repo.vfs | |
300 | cl = repo.changelog |
|
300 | cl = repo.changelog | |
301 | backupfile = backupbundle(repo, stripbases, cl.heads(), node, topic) |
|
301 | backupfile = backupbundle(repo, stripbases, cl.heads(), node, topic) | |
302 | repo.ui.status(_("saved backup bundle to %s\n") % |
|
302 | repo.ui.status(_("saved backup bundle to %s\n") % | |
303 | vfs.join(backupfile)) |
|
303 | vfs.join(backupfile)) | |
304 | repo.ui.log("backupbundle", "saved backup bundle to %s\n", |
|
304 | repo.ui.log("backupbundle", "saved backup bundle to %s\n", | |
305 | vfs.join(backupfile)) |
|
305 | vfs.join(backupfile)) | |
306 | return backupfile |
|
306 | return backupfile | |
307 |
|
307 | |||
308 | def safestriproots(ui, repo, nodes): |
|
308 | def safestriproots(ui, repo, nodes): | |
309 | """return list of roots of nodes where descendants are covered by nodes""" |
|
309 | """return list of roots of nodes where descendants are covered by nodes""" | |
310 | torev = repo.unfiltered().changelog.rev |
|
310 | torev = repo.unfiltered().changelog.rev | |
311 | revs = set(torev(n) for n in nodes) |
|
311 | revs = set(torev(n) for n in nodes) | |
312 | # tostrip = wanted - unsafe = wanted - ancestors(orphaned) |
|
312 | # tostrip = wanted - unsafe = wanted - ancestors(orphaned) | |
313 | # orphaned = affected - wanted |
|
313 | # orphaned = affected - wanted | |
314 | # affected = descendants(roots(wanted)) |
|
314 | # affected = descendants(roots(wanted)) | |
315 | # wanted = revs |
|
315 | # wanted = revs | |
316 | revset = '%ld - ( ::( (roots(%ld):: and not _phase(%s)) -%ld) )' |
|
316 | revset = '%ld - ( ::( (roots(%ld):: and not _phase(%s)) -%ld) )' | |
317 | tostrip = set(repo.revs(revset, revs, revs, phases.internal, revs)) |
|
317 | tostrip = set(repo.revs(revset, revs, revs, phases.internal, revs)) | |
318 | notstrip = revs - tostrip |
|
318 | notstrip = revs - tostrip | |
319 | if notstrip: |
|
319 | if notstrip: | |
320 | nodestr = ', '.join(sorted(short(repo[n].node()) for n in notstrip)) |
|
320 | nodestr = ', '.join(sorted(short(repo[n].node()) for n in notstrip)) | |
321 | ui.warn(_('warning: orphaned descendants detected, ' |
|
321 | ui.warn(_('warning: orphaned descendants detected, ' | |
322 | 'not stripping %s\n') % nodestr) |
|
322 | 'not stripping %s\n') % nodestr) | |
323 | return [c.node() for c in repo.set('roots(%ld)', tostrip)] |
|
323 | return [c.node() for c in repo.set('roots(%ld)', tostrip)] | |
324 |
|
324 | |||
325 | class stripcallback(object): |
|
325 | class stripcallback(object): | |
326 | """used as a transaction postclose callback""" |
|
326 | """used as a transaction postclose callback""" | |
327 |
|
327 | |||
328 | def __init__(self, ui, repo, backup, topic): |
|
328 | def __init__(self, ui, repo, backup, topic): | |
329 | self.ui = ui |
|
329 | self.ui = ui | |
330 | self.repo = repo |
|
330 | self.repo = repo | |
331 | self.backup = backup |
|
331 | self.backup = backup | |
332 | self.topic = topic or 'backup' |
|
332 | self.topic = topic or 'backup' | |
333 | self.nodelist = [] |
|
333 | self.nodelist = [] | |
334 |
|
334 | |||
335 | def addnodes(self, nodes): |
|
335 | def addnodes(self, nodes): | |
336 | self.nodelist.extend(nodes) |
|
336 | self.nodelist.extend(nodes) | |
337 |
|
337 | |||
338 | def __call__(self, tr): |
|
338 | def __call__(self, tr): | |
339 | roots = safestriproots(self.ui, self.repo, self.nodelist) |
|
339 | roots = safestriproots(self.ui, self.repo, self.nodelist) | |
340 | if roots: |
|
340 | if roots: | |
341 | strip(self.ui, self.repo, roots, self.backup, self.topic) |
|
341 | strip(self.ui, self.repo, roots, self.backup, self.topic) | |
342 |
|
342 | |||
343 | def delayedstrip(ui, repo, nodelist, topic=None, backup=True): |
|
343 | def delayedstrip(ui, repo, nodelist, topic=None, backup=True): | |
344 | """like strip, but works inside transaction and won't strip irreverent revs |
|
344 | """like strip, but works inside transaction and won't strip irreverent revs | |
345 |
|
345 | |||
346 | nodelist must explicitly contain all descendants. Otherwise a warning will |
|
346 | nodelist must explicitly contain all descendants. Otherwise a warning will | |
347 | be printed that some nodes are not stripped. |
|
347 | be printed that some nodes are not stripped. | |
348 |
|
348 | |||
349 | Will do a backup if `backup` is True. The last non-None "topic" will be |
|
349 | Will do a backup if `backup` is True. The last non-None "topic" will be | |
350 | used as the backup topic name. The default backup topic name is "backup". |
|
350 | used as the backup topic name. The default backup topic name is "backup". | |
351 | """ |
|
351 | """ | |
352 | tr = repo.currenttransaction() |
|
352 | tr = repo.currenttransaction() | |
353 | if not tr: |
|
353 | if not tr: | |
354 | nodes = safestriproots(ui, repo, nodelist) |
|
354 | nodes = safestriproots(ui, repo, nodelist) | |
355 | return strip(ui, repo, nodes, backup=backup, topic=topic) |
|
355 | return strip(ui, repo, nodes, backup=backup, topic=topic) | |
356 | # transaction postclose callbacks are called in alphabet order. |
|
356 | # transaction postclose callbacks are called in alphabet order. | |
357 | # use '\xff' as prefix so we are likely to be called last. |
|
357 | # use '\xff' as prefix so we are likely to be called last. | |
358 | callback = tr.getpostclose('\xffstrip') |
|
358 | callback = tr.getpostclose('\xffstrip') | |
359 | if callback is None: |
|
359 | if callback is None: | |
360 | callback = stripcallback(ui, repo, backup=backup, topic=topic) |
|
360 | callback = stripcallback(ui, repo, backup=backup, topic=topic) | |
361 | tr.addpostclose('\xffstrip', callback) |
|
361 | tr.addpostclose('\xffstrip', callback) | |
362 | if topic: |
|
362 | if topic: | |
363 | callback.topic = topic |
|
363 | callback.topic = topic | |
364 | callback.addnodes(nodelist) |
|
364 | callback.addnodes(nodelist) | |
365 |
|
365 | |||
366 | def stripmanifest(repo, striprev, tr, files): |
|
366 | def stripmanifest(repo, striprev, tr, files): | |
367 | revlog = repo.manifestlog.getstorage(b'') |
|
367 | revlog = repo.manifestlog.getstorage(b'') | |
368 | revlog.strip(striprev, tr) |
|
368 | revlog.strip(striprev, tr) | |
369 | striptrees(repo, tr, striprev, files) |
|
369 | striptrees(repo, tr, striprev, files) | |
370 |
|
370 | |||
371 | def striptrees(repo, tr, striprev, files): |
|
371 | def striptrees(repo, tr, striprev, files): | |
372 | if 'treemanifest' in repo.requirements: |
|
372 | if 'treemanifest' in repo.requirements: | |
373 | # This logic is safe if treemanifest isn't enabled, but also |
|
373 | # This logic is safe if treemanifest isn't enabled, but also | |
374 | # pointless, so we skip it if treemanifest isn't enabled. |
|
374 | # pointless, so we skip it if treemanifest isn't enabled. | |
375 | for unencoded, encoded, size in repo.store.datafiles(): |
|
375 | for unencoded, encoded, size in repo.store.datafiles(): | |
376 | if (unencoded.startswith('meta/') and |
|
376 | if (unencoded.startswith('meta/') and | |
377 | unencoded.endswith('00manifest.i')): |
|
377 | unencoded.endswith('00manifest.i')): | |
378 | dir = unencoded[5:-12] |
|
378 | dir = unencoded[5:-12] | |
379 | repo.manifestlog.getstorage(dir).strip(striprev, tr) |
|
379 | repo.manifestlog.getstorage(dir).strip(striprev, tr) | |
380 |
|
380 | |||
381 | def rebuildfncache(ui, repo): |
|
381 | def rebuildfncache(ui, repo): | |
382 | """Rebuilds the fncache file from repo history. |
|
382 | """Rebuilds the fncache file from repo history. | |
383 |
|
383 | |||
384 | Missing entries will be added. Extra entries will be removed. |
|
384 | Missing entries will be added. Extra entries will be removed. | |
385 | """ |
|
385 | """ | |
386 | repo = repo.unfiltered() |
|
386 | repo = repo.unfiltered() | |
387 |
|
387 | |||
388 | if 'fncache' not in repo.requirements: |
|
388 | if 'fncache' not in repo.requirements: | |
389 | ui.warn(_('(not rebuilding fncache because repository does not ' |
|
389 | ui.warn(_('(not rebuilding fncache because repository does not ' | |
390 | 'support fncache)\n')) |
|
390 | 'support fncache)\n')) | |
391 | return |
|
391 | return | |
392 |
|
392 | |||
393 | with repo.lock(): |
|
393 | with repo.lock(): | |
394 | fnc = repo.store.fncache |
|
394 | fnc = repo.store.fncache | |
395 | # Trigger load of fncache. |
|
395 | fnc.ensureloaded(warn=ui.warn) | |
396 | if 'irrelevant' in fnc: |
|
|||
397 | pass |
|
|||
398 |
|
396 | |||
399 | oldentries = set(fnc.entries) |
|
397 | oldentries = set(fnc.entries) | |
400 | newentries = set() |
|
398 | newentries = set() | |
401 | seenfiles = set() |
|
399 | seenfiles = set() | |
402 |
|
400 | |||
403 | progress = ui.makeprogress(_('rebuilding'), unit=_('changesets'), |
|
401 | progress = ui.makeprogress(_('rebuilding'), unit=_('changesets'), | |
404 | total=len(repo)) |
|
402 | total=len(repo)) | |
405 | for rev in repo: |
|
403 | for rev in repo: | |
406 | progress.update(rev) |
|
404 | progress.update(rev) | |
407 |
|
405 | |||
408 | ctx = repo[rev] |
|
406 | ctx = repo[rev] | |
409 | for f in ctx.files(): |
|
407 | for f in ctx.files(): | |
410 | # This is to minimize I/O. |
|
408 | # This is to minimize I/O. | |
411 | if f in seenfiles: |
|
409 | if f in seenfiles: | |
412 | continue |
|
410 | continue | |
413 | seenfiles.add(f) |
|
411 | seenfiles.add(f) | |
414 |
|
412 | |||
415 | i = 'data/%s.i' % f |
|
413 | i = 'data/%s.i' % f | |
416 | d = 'data/%s.d' % f |
|
414 | d = 'data/%s.d' % f | |
417 |
|
415 | |||
418 | if repo.store._exists(i): |
|
416 | if repo.store._exists(i): | |
419 | newentries.add(i) |
|
417 | newentries.add(i) | |
420 | if repo.store._exists(d): |
|
418 | if repo.store._exists(d): | |
421 | newentries.add(d) |
|
419 | newentries.add(d) | |
422 |
|
420 | |||
423 | progress.complete() |
|
421 | progress.complete() | |
424 |
|
422 | |||
425 | if 'treemanifest' in repo.requirements: |
|
423 | if 'treemanifest' in repo.requirements: | |
426 | # This logic is safe if treemanifest isn't enabled, but also |
|
424 | # This logic is safe if treemanifest isn't enabled, but also | |
427 | # pointless, so we skip it if treemanifest isn't enabled. |
|
425 | # pointless, so we skip it if treemanifest isn't enabled. | |
428 | for dir in util.dirs(seenfiles): |
|
426 | for dir in util.dirs(seenfiles): | |
429 | i = 'meta/%s/00manifest.i' % dir |
|
427 | i = 'meta/%s/00manifest.i' % dir | |
430 | d = 'meta/%s/00manifest.d' % dir |
|
428 | d = 'meta/%s/00manifest.d' % dir | |
431 |
|
429 | |||
432 | if repo.store._exists(i): |
|
430 | if repo.store._exists(i): | |
433 | newentries.add(i) |
|
431 | newentries.add(i) | |
434 | if repo.store._exists(d): |
|
432 | if repo.store._exists(d): | |
435 | newentries.add(d) |
|
433 | newentries.add(d) | |
436 |
|
434 | |||
437 | addcount = len(newentries - oldentries) |
|
435 | addcount = len(newentries - oldentries) | |
438 | removecount = len(oldentries - newentries) |
|
436 | removecount = len(oldentries - newentries) | |
439 | for p in sorted(oldentries - newentries): |
|
437 | for p in sorted(oldentries - newentries): | |
440 | ui.write(_('removing %s\n') % p) |
|
438 | ui.write(_('removing %s\n') % p) | |
441 | for p in sorted(newentries - oldentries): |
|
439 | for p in sorted(newentries - oldentries): | |
442 | ui.write(_('adding %s\n') % p) |
|
440 | ui.write(_('adding %s\n') % p) | |
443 |
|
441 | |||
444 | if addcount or removecount: |
|
442 | if addcount or removecount: | |
445 | ui.write(_('%d items added, %d removed from fncache\n') % |
|
443 | ui.write(_('%d items added, %d removed from fncache\n') % | |
446 | (addcount, removecount)) |
|
444 | (addcount, removecount)) | |
447 | fnc.entries = newentries |
|
445 | fnc.entries = newentries | |
448 | fnc._dirty = True |
|
446 | fnc._dirty = True | |
449 |
|
447 | |||
450 | with repo.transaction('fncache') as tr: |
|
448 | with repo.transaction('fncache') as tr: | |
451 | fnc.write(tr) |
|
449 | fnc.write(tr) | |
452 | else: |
|
450 | else: | |
453 | ui.write(_('fncache already up to date\n')) |
|
451 | ui.write(_('fncache already up to date\n')) | |
454 |
|
452 | |||
455 | def deleteobsmarkers(obsstore, indices): |
|
453 | def deleteobsmarkers(obsstore, indices): | |
456 | """Delete some obsmarkers from obsstore and return how many were deleted |
|
454 | """Delete some obsmarkers from obsstore and return how many were deleted | |
457 |
|
455 | |||
458 | 'indices' is a list of ints which are the indices |
|
456 | 'indices' is a list of ints which are the indices | |
459 | of the markers to be deleted. |
|
457 | of the markers to be deleted. | |
460 |
|
458 | |||
461 | Every invocation of this function completely rewrites the obsstore file, |
|
459 | Every invocation of this function completely rewrites the obsstore file, | |
462 | skipping the markers we want to be removed. The new temporary file is |
|
460 | skipping the markers we want to be removed. The new temporary file is | |
463 | created, remaining markers are written there and on .close() this file |
|
461 | created, remaining markers are written there and on .close() this file | |
464 | gets atomically renamed to obsstore, thus guaranteeing consistency.""" |
|
462 | gets atomically renamed to obsstore, thus guaranteeing consistency.""" | |
465 | if not indices: |
|
463 | if not indices: | |
466 | # we don't want to rewrite the obsstore with the same content |
|
464 | # we don't want to rewrite the obsstore with the same content | |
467 | return |
|
465 | return | |
468 |
|
466 | |||
469 | left = [] |
|
467 | left = [] | |
470 | current = obsstore._all |
|
468 | current = obsstore._all | |
471 | n = 0 |
|
469 | n = 0 | |
472 | for i, m in enumerate(current): |
|
470 | for i, m in enumerate(current): | |
473 | if i in indices: |
|
471 | if i in indices: | |
474 | n += 1 |
|
472 | n += 1 | |
475 | continue |
|
473 | continue | |
476 | left.append(m) |
|
474 | left.append(m) | |
477 |
|
475 | |||
478 | newobsstorefile = obsstore.svfs('obsstore', 'w', atomictemp=True) |
|
476 | newobsstorefile = obsstore.svfs('obsstore', 'w', atomictemp=True) | |
479 | for bytes in obsolete.encodemarkers(left, True, obsstore._version): |
|
477 | for bytes in obsolete.encodemarkers(left, True, obsstore._version): | |
480 | newobsstorefile.write(bytes) |
|
478 | newobsstorefile.write(bytes) | |
481 | newobsstorefile.close() |
|
479 | newobsstorefile.close() | |
482 | return n |
|
480 | return n |
@@ -1,654 +1,669 b'' | |||||
1 | # store.py - repository store handling for Mercurial |
|
1 | # store.py - repository store handling for Mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2008 Matt Mackall <mpm@selenic.com> |
|
3 | # Copyright 2008 Matt Mackall <mpm@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | from __future__ import absolute_import |
|
8 | from __future__ import absolute_import | |
9 |
|
9 | |||
10 | import errno |
|
10 | import errno | |
11 | import functools |
|
11 | import functools | |
12 | import hashlib |
|
12 | import hashlib | |
13 | import os |
|
13 | import os | |
14 | import stat |
|
14 | import stat | |
15 |
|
15 | |||
16 | from .i18n import _ |
|
16 | from .i18n import _ | |
17 | from . import ( |
|
17 | from . import ( | |
18 | error, |
|
18 | error, | |
19 | node, |
|
19 | node, | |
20 | policy, |
|
20 | policy, | |
21 | pycompat, |
|
21 | pycompat, | |
22 | util, |
|
22 | util, | |
23 | vfs as vfsmod, |
|
23 | vfs as vfsmod, | |
24 | ) |
|
24 | ) | |
25 |
|
25 | |||
26 | parsers = policy.importmod(r'parsers') |
|
26 | parsers = policy.importmod(r'parsers') | |
27 | # how much bytes should be read from fncache in one read |
|
27 | # how much bytes should be read from fncache in one read | |
28 | # It is done to prevent loading large fncache files into memory |
|
28 | # It is done to prevent loading large fncache files into memory | |
29 | fncache_chunksize = 10 ** 6 |
|
29 | fncache_chunksize = 10 ** 6 | |
30 |
|
30 | |||
31 | def _matchtrackedpath(path, matcher): |
|
31 | def _matchtrackedpath(path, matcher): | |
32 | """parses a fncache entry and returns whether the entry is tracking a path |
|
32 | """parses a fncache entry and returns whether the entry is tracking a path | |
33 | matched by matcher or not. |
|
33 | matched by matcher or not. | |
34 |
|
34 | |||
35 | If matcher is None, returns True""" |
|
35 | If matcher is None, returns True""" | |
36 |
|
36 | |||
37 | if matcher is None: |
|
37 | if matcher is None: | |
38 | return True |
|
38 | return True | |
39 | path = decodedir(path) |
|
39 | path = decodedir(path) | |
40 | if path.startswith('data/'): |
|
40 | if path.startswith('data/'): | |
41 | return matcher(path[len('data/'):-len('.i')]) |
|
41 | return matcher(path[len('data/'):-len('.i')]) | |
42 | elif path.startswith('meta/'): |
|
42 | elif path.startswith('meta/'): | |
43 | return matcher.visitdir(path[len('meta/'):-len('/00manifest.i')]) |
|
43 | return matcher.visitdir(path[len('meta/'):-len('/00manifest.i')]) | |
44 |
|
44 | |||
45 | raise error.ProgrammingError("cannot decode path %s" % path) |
|
45 | raise error.ProgrammingError("cannot decode path %s" % path) | |
46 |
|
46 | |||
47 | # This avoids a collision between a file named foo and a dir named |
|
47 | # This avoids a collision between a file named foo and a dir named | |
48 | # foo.i or foo.d |
|
48 | # foo.i or foo.d | |
49 | def _encodedir(path): |
|
49 | def _encodedir(path): | |
50 | ''' |
|
50 | ''' | |
51 | >>> _encodedir(b'data/foo.i') |
|
51 | >>> _encodedir(b'data/foo.i') | |
52 | 'data/foo.i' |
|
52 | 'data/foo.i' | |
53 | >>> _encodedir(b'data/foo.i/bla.i') |
|
53 | >>> _encodedir(b'data/foo.i/bla.i') | |
54 | 'data/foo.i.hg/bla.i' |
|
54 | 'data/foo.i.hg/bla.i' | |
55 | >>> _encodedir(b'data/foo.i.hg/bla.i') |
|
55 | >>> _encodedir(b'data/foo.i.hg/bla.i') | |
56 | 'data/foo.i.hg.hg/bla.i' |
|
56 | 'data/foo.i.hg.hg/bla.i' | |
57 | >>> _encodedir(b'data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n') |
|
57 | >>> _encodedir(b'data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n') | |
58 | 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n' |
|
58 | 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n' | |
59 | ''' |
|
59 | ''' | |
60 | return (path |
|
60 | return (path | |
61 | .replace(".hg/", ".hg.hg/") |
|
61 | .replace(".hg/", ".hg.hg/") | |
62 | .replace(".i/", ".i.hg/") |
|
62 | .replace(".i/", ".i.hg/") | |
63 | .replace(".d/", ".d.hg/")) |
|
63 | .replace(".d/", ".d.hg/")) | |
64 |
|
64 | |||
65 | encodedir = getattr(parsers, 'encodedir', _encodedir) |
|
65 | encodedir = getattr(parsers, 'encodedir', _encodedir) | |
66 |
|
66 | |||
67 | def decodedir(path): |
|
67 | def decodedir(path): | |
68 | ''' |
|
68 | ''' | |
69 | >>> decodedir(b'data/foo.i') |
|
69 | >>> decodedir(b'data/foo.i') | |
70 | 'data/foo.i' |
|
70 | 'data/foo.i' | |
71 | >>> decodedir(b'data/foo.i.hg/bla.i') |
|
71 | >>> decodedir(b'data/foo.i.hg/bla.i') | |
72 | 'data/foo.i/bla.i' |
|
72 | 'data/foo.i/bla.i' | |
73 | >>> decodedir(b'data/foo.i.hg.hg/bla.i') |
|
73 | >>> decodedir(b'data/foo.i.hg.hg/bla.i') | |
74 | 'data/foo.i.hg/bla.i' |
|
74 | 'data/foo.i.hg/bla.i' | |
75 | ''' |
|
75 | ''' | |
76 | if ".hg/" not in path: |
|
76 | if ".hg/" not in path: | |
77 | return path |
|
77 | return path | |
78 | return (path |
|
78 | return (path | |
79 | .replace(".d.hg/", ".d/") |
|
79 | .replace(".d.hg/", ".d/") | |
80 | .replace(".i.hg/", ".i/") |
|
80 | .replace(".i.hg/", ".i/") | |
81 | .replace(".hg.hg/", ".hg/")) |
|
81 | .replace(".hg.hg/", ".hg/")) | |
82 |
|
82 | |||
83 | def _reserved(): |
|
83 | def _reserved(): | |
84 | ''' characters that are problematic for filesystems |
|
84 | ''' characters that are problematic for filesystems | |
85 |
|
85 | |||
86 | * ascii escapes (0..31) |
|
86 | * ascii escapes (0..31) | |
87 | * ascii hi (126..255) |
|
87 | * ascii hi (126..255) | |
88 | * windows specials |
|
88 | * windows specials | |
89 |
|
89 | |||
90 | these characters will be escaped by encodefunctions |
|
90 | these characters will be escaped by encodefunctions | |
91 | ''' |
|
91 | ''' | |
92 | winreserved = [ord(x) for x in u'\\:*?"<>|'] |
|
92 | winreserved = [ord(x) for x in u'\\:*?"<>|'] | |
93 | for x in range(32): |
|
93 | for x in range(32): | |
94 | yield x |
|
94 | yield x | |
95 | for x in range(126, 256): |
|
95 | for x in range(126, 256): | |
96 | yield x |
|
96 | yield x | |
97 | for x in winreserved: |
|
97 | for x in winreserved: | |
98 | yield x |
|
98 | yield x | |
99 |
|
99 | |||
100 | def _buildencodefun(): |
|
100 | def _buildencodefun(): | |
101 | ''' |
|
101 | ''' | |
102 | >>> enc, dec = _buildencodefun() |
|
102 | >>> enc, dec = _buildencodefun() | |
103 |
|
103 | |||
104 | >>> enc(b'nothing/special.txt') |
|
104 | >>> enc(b'nothing/special.txt') | |
105 | 'nothing/special.txt' |
|
105 | 'nothing/special.txt' | |
106 | >>> dec(b'nothing/special.txt') |
|
106 | >>> dec(b'nothing/special.txt') | |
107 | 'nothing/special.txt' |
|
107 | 'nothing/special.txt' | |
108 |
|
108 | |||
109 | >>> enc(b'HELLO') |
|
109 | >>> enc(b'HELLO') | |
110 | '_h_e_l_l_o' |
|
110 | '_h_e_l_l_o' | |
111 | >>> dec(b'_h_e_l_l_o') |
|
111 | >>> dec(b'_h_e_l_l_o') | |
112 | 'HELLO' |
|
112 | 'HELLO' | |
113 |
|
113 | |||
114 | >>> enc(b'hello:world?') |
|
114 | >>> enc(b'hello:world?') | |
115 | 'hello~3aworld~3f' |
|
115 | 'hello~3aworld~3f' | |
116 | >>> dec(b'hello~3aworld~3f') |
|
116 | >>> dec(b'hello~3aworld~3f') | |
117 | 'hello:world?' |
|
117 | 'hello:world?' | |
118 |
|
118 | |||
119 | >>> enc(b'the\\x07quick\\xADshot') |
|
119 | >>> enc(b'the\\x07quick\\xADshot') | |
120 | 'the~07quick~adshot' |
|
120 | 'the~07quick~adshot' | |
121 | >>> dec(b'the~07quick~adshot') |
|
121 | >>> dec(b'the~07quick~adshot') | |
122 | 'the\\x07quick\\xadshot' |
|
122 | 'the\\x07quick\\xadshot' | |
123 | ''' |
|
123 | ''' | |
124 | e = '_' |
|
124 | e = '_' | |
125 | xchr = pycompat.bytechr |
|
125 | xchr = pycompat.bytechr | |
126 | asciistr = list(map(xchr, range(127))) |
|
126 | asciistr = list(map(xchr, range(127))) | |
127 | capitals = list(range(ord("A"), ord("Z") + 1)) |
|
127 | capitals = list(range(ord("A"), ord("Z") + 1)) | |
128 |
|
128 | |||
129 | cmap = dict((x, x) for x in asciistr) |
|
129 | cmap = dict((x, x) for x in asciistr) | |
130 | for x in _reserved(): |
|
130 | for x in _reserved(): | |
131 | cmap[xchr(x)] = "~%02x" % x |
|
131 | cmap[xchr(x)] = "~%02x" % x | |
132 | for x in capitals + [ord(e)]: |
|
132 | for x in capitals + [ord(e)]: | |
133 | cmap[xchr(x)] = e + xchr(x).lower() |
|
133 | cmap[xchr(x)] = e + xchr(x).lower() | |
134 |
|
134 | |||
135 | dmap = {} |
|
135 | dmap = {} | |
136 | for k, v in cmap.iteritems(): |
|
136 | for k, v in cmap.iteritems(): | |
137 | dmap[v] = k |
|
137 | dmap[v] = k | |
138 | def decode(s): |
|
138 | def decode(s): | |
139 | i = 0 |
|
139 | i = 0 | |
140 | while i < len(s): |
|
140 | while i < len(s): | |
141 | for l in pycompat.xrange(1, 4): |
|
141 | for l in pycompat.xrange(1, 4): | |
142 | try: |
|
142 | try: | |
143 | yield dmap[s[i:i + l]] |
|
143 | yield dmap[s[i:i + l]] | |
144 | i += l |
|
144 | i += l | |
145 | break |
|
145 | break | |
146 | except KeyError: |
|
146 | except KeyError: | |
147 | pass |
|
147 | pass | |
148 | else: |
|
148 | else: | |
149 | raise KeyError |
|
149 | raise KeyError | |
150 | return (lambda s: ''.join([cmap[s[c:c + 1]] |
|
150 | return (lambda s: ''.join([cmap[s[c:c + 1]] | |
151 | for c in pycompat.xrange(len(s))]), |
|
151 | for c in pycompat.xrange(len(s))]), | |
152 | lambda s: ''.join(list(decode(s)))) |
|
152 | lambda s: ''.join(list(decode(s)))) | |
153 |
|
153 | |||
154 | _encodefname, _decodefname = _buildencodefun() |
|
154 | _encodefname, _decodefname = _buildencodefun() | |
155 |
|
155 | |||
156 | def encodefilename(s): |
|
156 | def encodefilename(s): | |
157 | ''' |
|
157 | ''' | |
158 | >>> encodefilename(b'foo.i/bar.d/bla.hg/hi:world?/HELLO') |
|
158 | >>> encodefilename(b'foo.i/bar.d/bla.hg/hi:world?/HELLO') | |
159 | 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o' |
|
159 | 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o' | |
160 | ''' |
|
160 | ''' | |
161 | return _encodefname(encodedir(s)) |
|
161 | return _encodefname(encodedir(s)) | |
162 |
|
162 | |||
163 | def decodefilename(s): |
|
163 | def decodefilename(s): | |
164 | ''' |
|
164 | ''' | |
165 | >>> decodefilename(b'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o') |
|
165 | >>> decodefilename(b'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o') | |
166 | 'foo.i/bar.d/bla.hg/hi:world?/HELLO' |
|
166 | 'foo.i/bar.d/bla.hg/hi:world?/HELLO' | |
167 | ''' |
|
167 | ''' | |
168 | return decodedir(_decodefname(s)) |
|
168 | return decodedir(_decodefname(s)) | |
169 |
|
169 | |||
170 | def _buildlowerencodefun(): |
|
170 | def _buildlowerencodefun(): | |
171 | ''' |
|
171 | ''' | |
172 | >>> f = _buildlowerencodefun() |
|
172 | >>> f = _buildlowerencodefun() | |
173 | >>> f(b'nothing/special.txt') |
|
173 | >>> f(b'nothing/special.txt') | |
174 | 'nothing/special.txt' |
|
174 | 'nothing/special.txt' | |
175 | >>> f(b'HELLO') |
|
175 | >>> f(b'HELLO') | |
176 | 'hello' |
|
176 | 'hello' | |
177 | >>> f(b'hello:world?') |
|
177 | >>> f(b'hello:world?') | |
178 | 'hello~3aworld~3f' |
|
178 | 'hello~3aworld~3f' | |
179 | >>> f(b'the\\x07quick\\xADshot') |
|
179 | >>> f(b'the\\x07quick\\xADshot') | |
180 | 'the~07quick~adshot' |
|
180 | 'the~07quick~adshot' | |
181 | ''' |
|
181 | ''' | |
182 | xchr = pycompat.bytechr |
|
182 | xchr = pycompat.bytechr | |
183 | cmap = dict([(xchr(x), xchr(x)) for x in pycompat.xrange(127)]) |
|
183 | cmap = dict([(xchr(x), xchr(x)) for x in pycompat.xrange(127)]) | |
184 | for x in _reserved(): |
|
184 | for x in _reserved(): | |
185 | cmap[xchr(x)] = "~%02x" % x |
|
185 | cmap[xchr(x)] = "~%02x" % x | |
186 | for x in range(ord("A"), ord("Z") + 1): |
|
186 | for x in range(ord("A"), ord("Z") + 1): | |
187 | cmap[xchr(x)] = xchr(x).lower() |
|
187 | cmap[xchr(x)] = xchr(x).lower() | |
188 | def lowerencode(s): |
|
188 | def lowerencode(s): | |
189 | return "".join([cmap[c] for c in pycompat.iterbytestr(s)]) |
|
189 | return "".join([cmap[c] for c in pycompat.iterbytestr(s)]) | |
190 | return lowerencode |
|
190 | return lowerencode | |
191 |
|
191 | |||
192 | lowerencode = getattr(parsers, 'lowerencode', None) or _buildlowerencodefun() |
|
192 | lowerencode = getattr(parsers, 'lowerencode', None) or _buildlowerencodefun() | |
193 |
|
193 | |||
194 | # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9 |
|
194 | # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9 | |
195 | _winres3 = ('aux', 'con', 'prn', 'nul') # length 3 |
|
195 | _winres3 = ('aux', 'con', 'prn', 'nul') # length 3 | |
196 | _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9) |
|
196 | _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9) | |
197 | def _auxencode(path, dotencode): |
|
197 | def _auxencode(path, dotencode): | |
198 | ''' |
|
198 | ''' | |
199 | Encodes filenames containing names reserved by Windows or which end in |
|
199 | Encodes filenames containing names reserved by Windows or which end in | |
200 | period or space. Does not touch other single reserved characters c. |
|
200 | period or space. Does not touch other single reserved characters c. | |
201 | Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here. |
|
201 | Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here. | |
202 | Additionally encodes space or period at the beginning, if dotencode is |
|
202 | Additionally encodes space or period at the beginning, if dotencode is | |
203 | True. Parameter path is assumed to be all lowercase. |
|
203 | True. Parameter path is assumed to be all lowercase. | |
204 | A segment only needs encoding if a reserved name appears as a |
|
204 | A segment only needs encoding if a reserved name appears as a | |
205 | basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux" |
|
205 | basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux" | |
206 | doesn't need encoding. |
|
206 | doesn't need encoding. | |
207 |
|
207 | |||
208 | >>> s = b'.foo/aux.txt/txt.aux/con/prn/nul/foo.' |
|
208 | >>> s = b'.foo/aux.txt/txt.aux/con/prn/nul/foo.' | |
209 | >>> _auxencode(s.split(b'/'), True) |
|
209 | >>> _auxencode(s.split(b'/'), True) | |
210 | ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e'] |
|
210 | ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e'] | |
211 | >>> s = b'.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.' |
|
211 | >>> s = b'.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.' | |
212 | >>> _auxencode(s.split(b'/'), False) |
|
212 | >>> _auxencode(s.split(b'/'), False) | |
213 | ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e'] |
|
213 | ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e'] | |
214 | >>> _auxencode([b'foo. '], True) |
|
214 | >>> _auxencode([b'foo. '], True) | |
215 | ['foo.~20'] |
|
215 | ['foo.~20'] | |
216 | >>> _auxencode([b' .foo'], True) |
|
216 | >>> _auxencode([b' .foo'], True) | |
217 | ['~20.foo'] |
|
217 | ['~20.foo'] | |
218 | ''' |
|
218 | ''' | |
219 | for i, n in enumerate(path): |
|
219 | for i, n in enumerate(path): | |
220 | if not n: |
|
220 | if not n: | |
221 | continue |
|
221 | continue | |
222 | if dotencode and n[0] in '. ': |
|
222 | if dotencode and n[0] in '. ': | |
223 | n = "~%02x" % ord(n[0:1]) + n[1:] |
|
223 | n = "~%02x" % ord(n[0:1]) + n[1:] | |
224 | path[i] = n |
|
224 | path[i] = n | |
225 | else: |
|
225 | else: | |
226 | l = n.find('.') |
|
226 | l = n.find('.') | |
227 | if l == -1: |
|
227 | if l == -1: | |
228 | l = len(n) |
|
228 | l = len(n) | |
229 | if ((l == 3 and n[:3] in _winres3) or |
|
229 | if ((l == 3 and n[:3] in _winres3) or | |
230 | (l == 4 and n[3:4] <= '9' and n[3:4] >= '1' |
|
230 | (l == 4 and n[3:4] <= '9' and n[3:4] >= '1' | |
231 | and n[:3] in _winres4)): |
|
231 | and n[:3] in _winres4)): | |
232 | # encode third letter ('aux' -> 'au~78') |
|
232 | # encode third letter ('aux' -> 'au~78') | |
233 | ec = "~%02x" % ord(n[2:3]) |
|
233 | ec = "~%02x" % ord(n[2:3]) | |
234 | n = n[0:2] + ec + n[3:] |
|
234 | n = n[0:2] + ec + n[3:] | |
235 | path[i] = n |
|
235 | path[i] = n | |
236 | if n[-1] in '. ': |
|
236 | if n[-1] in '. ': | |
237 | # encode last period or space ('foo...' -> 'foo..~2e') |
|
237 | # encode last period or space ('foo...' -> 'foo..~2e') | |
238 | path[i] = n[:-1] + "~%02x" % ord(n[-1:]) |
|
238 | path[i] = n[:-1] + "~%02x" % ord(n[-1:]) | |
239 | return path |
|
239 | return path | |
240 |
|
240 | |||
241 | _maxstorepathlen = 120 |
|
241 | _maxstorepathlen = 120 | |
242 | _dirprefixlen = 8 |
|
242 | _dirprefixlen = 8 | |
243 | _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4 |
|
243 | _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4 | |
244 |
|
244 | |||
245 | def _hashencode(path, dotencode): |
|
245 | def _hashencode(path, dotencode): | |
246 | digest = node.hex(hashlib.sha1(path).digest()) |
|
246 | digest = node.hex(hashlib.sha1(path).digest()) | |
247 | le = lowerencode(path[5:]).split('/') # skips prefix 'data/' or 'meta/' |
|
247 | le = lowerencode(path[5:]).split('/') # skips prefix 'data/' or 'meta/' | |
248 | parts = _auxencode(le, dotencode) |
|
248 | parts = _auxencode(le, dotencode) | |
249 | basename = parts[-1] |
|
249 | basename = parts[-1] | |
250 | _root, ext = os.path.splitext(basename) |
|
250 | _root, ext = os.path.splitext(basename) | |
251 | sdirs = [] |
|
251 | sdirs = [] | |
252 | sdirslen = 0 |
|
252 | sdirslen = 0 | |
253 | for p in parts[:-1]: |
|
253 | for p in parts[:-1]: | |
254 | d = p[:_dirprefixlen] |
|
254 | d = p[:_dirprefixlen] | |
255 | if d[-1] in '. ': |
|
255 | if d[-1] in '. ': | |
256 | # Windows can't access dirs ending in period or space |
|
256 | # Windows can't access dirs ending in period or space | |
257 | d = d[:-1] + '_' |
|
257 | d = d[:-1] + '_' | |
258 | if sdirslen == 0: |
|
258 | if sdirslen == 0: | |
259 | t = len(d) |
|
259 | t = len(d) | |
260 | else: |
|
260 | else: | |
261 | t = sdirslen + 1 + len(d) |
|
261 | t = sdirslen + 1 + len(d) | |
262 | if t > _maxshortdirslen: |
|
262 | if t > _maxshortdirslen: | |
263 | break |
|
263 | break | |
264 | sdirs.append(d) |
|
264 | sdirs.append(d) | |
265 | sdirslen = t |
|
265 | sdirslen = t | |
266 | dirs = '/'.join(sdirs) |
|
266 | dirs = '/'.join(sdirs) | |
267 | if len(dirs) > 0: |
|
267 | if len(dirs) > 0: | |
268 | dirs += '/' |
|
268 | dirs += '/' | |
269 | res = 'dh/' + dirs + digest + ext |
|
269 | res = 'dh/' + dirs + digest + ext | |
270 | spaceleft = _maxstorepathlen - len(res) |
|
270 | spaceleft = _maxstorepathlen - len(res) | |
271 | if spaceleft > 0: |
|
271 | if spaceleft > 0: | |
272 | filler = basename[:spaceleft] |
|
272 | filler = basename[:spaceleft] | |
273 | res = 'dh/' + dirs + filler + digest + ext |
|
273 | res = 'dh/' + dirs + filler + digest + ext | |
274 | return res |
|
274 | return res | |
275 |
|
275 | |||
276 | def _hybridencode(path, dotencode): |
|
276 | def _hybridencode(path, dotencode): | |
277 | '''encodes path with a length limit |
|
277 | '''encodes path with a length limit | |
278 |
|
278 | |||
279 | Encodes all paths that begin with 'data/', according to the following. |
|
279 | Encodes all paths that begin with 'data/', according to the following. | |
280 |
|
280 | |||
281 | Default encoding (reversible): |
|
281 | Default encoding (reversible): | |
282 |
|
282 | |||
283 | Encodes all uppercase letters 'X' as '_x'. All reserved or illegal |
|
283 | Encodes all uppercase letters 'X' as '_x'. All reserved or illegal | |
284 | characters are encoded as '~xx', where xx is the two digit hex code |
|
284 | characters are encoded as '~xx', where xx is the two digit hex code | |
285 | of the character (see encodefilename). |
|
285 | of the character (see encodefilename). | |
286 | Relevant path components consisting of Windows reserved filenames are |
|
286 | Relevant path components consisting of Windows reserved filenames are | |
287 | masked by encoding the third character ('aux' -> 'au~78', see _auxencode). |
|
287 | masked by encoding the third character ('aux' -> 'au~78', see _auxencode). | |
288 |
|
288 | |||
289 | Hashed encoding (not reversible): |
|
289 | Hashed encoding (not reversible): | |
290 |
|
290 | |||
291 | If the default-encoded path is longer than _maxstorepathlen, a |
|
291 | If the default-encoded path is longer than _maxstorepathlen, a | |
292 | non-reversible hybrid hashing of the path is done instead. |
|
292 | non-reversible hybrid hashing of the path is done instead. | |
293 | This encoding uses up to _dirprefixlen characters of all directory |
|
293 | This encoding uses up to _dirprefixlen characters of all directory | |
294 | levels of the lowerencoded path, but not more levels than can fit into |
|
294 | levels of the lowerencoded path, but not more levels than can fit into | |
295 | _maxshortdirslen. |
|
295 | _maxshortdirslen. | |
296 | Then follows the filler followed by the sha digest of the full path. |
|
296 | Then follows the filler followed by the sha digest of the full path. | |
297 | The filler is the beginning of the basename of the lowerencoded path |
|
297 | The filler is the beginning of the basename of the lowerencoded path | |
298 | (the basename is everything after the last path separator). The filler |
|
298 | (the basename is everything after the last path separator). The filler | |
299 | is as long as possible, filling in characters from the basename until |
|
299 | is as long as possible, filling in characters from the basename until | |
300 | the encoded path has _maxstorepathlen characters (or all chars of the |
|
300 | the encoded path has _maxstorepathlen characters (or all chars of the | |
301 | basename have been taken). |
|
301 | basename have been taken). | |
302 | The extension (e.g. '.i' or '.d') is preserved. |
|
302 | The extension (e.g. '.i' or '.d') is preserved. | |
303 |
|
303 | |||
304 | The string 'data/' at the beginning is replaced with 'dh/', if the hashed |
|
304 | The string 'data/' at the beginning is replaced with 'dh/', if the hashed | |
305 | encoding was used. |
|
305 | encoding was used. | |
306 | ''' |
|
306 | ''' | |
307 | path = encodedir(path) |
|
307 | path = encodedir(path) | |
308 | ef = _encodefname(path).split('/') |
|
308 | ef = _encodefname(path).split('/') | |
309 | res = '/'.join(_auxencode(ef, dotencode)) |
|
309 | res = '/'.join(_auxencode(ef, dotencode)) | |
310 | if len(res) > _maxstorepathlen: |
|
310 | if len(res) > _maxstorepathlen: | |
311 | res = _hashencode(path, dotencode) |
|
311 | res = _hashencode(path, dotencode) | |
312 | return res |
|
312 | return res | |
313 |
|
313 | |||
314 | def _pathencode(path): |
|
314 | def _pathencode(path): | |
315 | de = encodedir(path) |
|
315 | de = encodedir(path) | |
316 | if len(path) > _maxstorepathlen: |
|
316 | if len(path) > _maxstorepathlen: | |
317 | return _hashencode(de, True) |
|
317 | return _hashencode(de, True) | |
318 | ef = _encodefname(de).split('/') |
|
318 | ef = _encodefname(de).split('/') | |
319 | res = '/'.join(_auxencode(ef, True)) |
|
319 | res = '/'.join(_auxencode(ef, True)) | |
320 | if len(res) > _maxstorepathlen: |
|
320 | if len(res) > _maxstorepathlen: | |
321 | return _hashencode(de, True) |
|
321 | return _hashencode(de, True) | |
322 | return res |
|
322 | return res | |
323 |
|
323 | |||
324 | _pathencode = getattr(parsers, 'pathencode', _pathencode) |
|
324 | _pathencode = getattr(parsers, 'pathencode', _pathencode) | |
325 |
|
325 | |||
326 | def _plainhybridencode(f): |
|
326 | def _plainhybridencode(f): | |
327 | return _hybridencode(f, False) |
|
327 | return _hybridencode(f, False) | |
328 |
|
328 | |||
329 | def _calcmode(vfs): |
|
329 | def _calcmode(vfs): | |
330 | try: |
|
330 | try: | |
331 | # files in .hg/ will be created using this mode |
|
331 | # files in .hg/ will be created using this mode | |
332 | mode = vfs.stat().st_mode |
|
332 | mode = vfs.stat().st_mode | |
333 | # avoid some useless chmods |
|
333 | # avoid some useless chmods | |
334 | if (0o777 & ~util.umask) == (0o777 & mode): |
|
334 | if (0o777 & ~util.umask) == (0o777 & mode): | |
335 | mode = None |
|
335 | mode = None | |
336 | except OSError: |
|
336 | except OSError: | |
337 | mode = None |
|
337 | mode = None | |
338 | return mode |
|
338 | return mode | |
339 |
|
339 | |||
340 | _data = ('bookmarks narrowspec data meta 00manifest.d 00manifest.i' |
|
340 | _data = ('bookmarks narrowspec data meta 00manifest.d 00manifest.i' | |
341 | ' 00changelog.d 00changelog.i phaseroots obsstore') |
|
341 | ' 00changelog.d 00changelog.i phaseroots obsstore') | |
342 |
|
342 | |||
343 | def isrevlog(f, kind, st): |
|
343 | def isrevlog(f, kind, st): | |
344 | return kind == stat.S_IFREG and f[-2:] in ('.i', '.d') |
|
344 | return kind == stat.S_IFREG and f[-2:] in ('.i', '.d') | |
345 |
|
345 | |||
346 | class basicstore(object): |
|
346 | class basicstore(object): | |
347 | '''base class for local repository stores''' |
|
347 | '''base class for local repository stores''' | |
348 | def __init__(self, path, vfstype): |
|
348 | def __init__(self, path, vfstype): | |
349 | vfs = vfstype(path) |
|
349 | vfs = vfstype(path) | |
350 | self.path = vfs.base |
|
350 | self.path = vfs.base | |
351 | self.createmode = _calcmode(vfs) |
|
351 | self.createmode = _calcmode(vfs) | |
352 | vfs.createmode = self.createmode |
|
352 | vfs.createmode = self.createmode | |
353 | self.rawvfs = vfs |
|
353 | self.rawvfs = vfs | |
354 | self.vfs = vfsmod.filtervfs(vfs, encodedir) |
|
354 | self.vfs = vfsmod.filtervfs(vfs, encodedir) | |
355 | self.opener = self.vfs |
|
355 | self.opener = self.vfs | |
356 |
|
356 | |||
357 | def join(self, f): |
|
357 | def join(self, f): | |
358 | return self.path + '/' + encodedir(f) |
|
358 | return self.path + '/' + encodedir(f) | |
359 |
|
359 | |||
360 | def _walk(self, relpath, recurse, filefilter=isrevlog): |
|
360 | def _walk(self, relpath, recurse, filefilter=isrevlog): | |
361 | '''yields (unencoded, encoded, size)''' |
|
361 | '''yields (unencoded, encoded, size)''' | |
362 | path = self.path |
|
362 | path = self.path | |
363 | if relpath: |
|
363 | if relpath: | |
364 | path += '/' + relpath |
|
364 | path += '/' + relpath | |
365 | striplen = len(self.path) + 1 |
|
365 | striplen = len(self.path) + 1 | |
366 | l = [] |
|
366 | l = [] | |
367 | if self.rawvfs.isdir(path): |
|
367 | if self.rawvfs.isdir(path): | |
368 | visit = [path] |
|
368 | visit = [path] | |
369 | readdir = self.rawvfs.readdir |
|
369 | readdir = self.rawvfs.readdir | |
370 | while visit: |
|
370 | while visit: | |
371 | p = visit.pop() |
|
371 | p = visit.pop() | |
372 | for f, kind, st in readdir(p, stat=True): |
|
372 | for f, kind, st in readdir(p, stat=True): | |
373 | fp = p + '/' + f |
|
373 | fp = p + '/' + f | |
374 | if filefilter(f, kind, st): |
|
374 | if filefilter(f, kind, st): | |
375 | n = util.pconvert(fp[striplen:]) |
|
375 | n = util.pconvert(fp[striplen:]) | |
376 | l.append((decodedir(n), n, st.st_size)) |
|
376 | l.append((decodedir(n), n, st.st_size)) | |
377 | elif kind == stat.S_IFDIR and recurse: |
|
377 | elif kind == stat.S_IFDIR and recurse: | |
378 | visit.append(fp) |
|
378 | visit.append(fp) | |
379 | l.sort() |
|
379 | l.sort() | |
380 | return l |
|
380 | return l | |
381 |
|
381 | |||
382 | def datafiles(self, matcher=None): |
|
382 | def datafiles(self, matcher=None): | |
383 | return self._walk('data', True) + self._walk('meta', True) |
|
383 | return self._walk('data', True) + self._walk('meta', True) | |
384 |
|
384 | |||
385 | def topfiles(self): |
|
385 | def topfiles(self): | |
386 | # yield manifest before changelog |
|
386 | # yield manifest before changelog | |
387 | return reversed(self._walk('', False)) |
|
387 | return reversed(self._walk('', False)) | |
388 |
|
388 | |||
389 | def walk(self, matcher=None): |
|
389 | def walk(self, matcher=None): | |
390 | '''yields (unencoded, encoded, size) |
|
390 | '''yields (unencoded, encoded, size) | |
391 |
|
391 | |||
392 | if a matcher is passed, storage files of only those tracked paths |
|
392 | if a matcher is passed, storage files of only those tracked paths | |
393 | are passed with matches the matcher |
|
393 | are passed with matches the matcher | |
394 | ''' |
|
394 | ''' | |
395 | # yield data files first |
|
395 | # yield data files first | |
396 | for x in self.datafiles(matcher): |
|
396 | for x in self.datafiles(matcher): | |
397 | yield x |
|
397 | yield x | |
398 | for x in self.topfiles(): |
|
398 | for x in self.topfiles(): | |
399 | yield x |
|
399 | yield x | |
400 |
|
400 | |||
401 | def copylist(self): |
|
401 | def copylist(self): | |
402 | return ['requires'] + _data.split() |
|
402 | return ['requires'] + _data.split() | |
403 |
|
403 | |||
404 | def write(self, tr): |
|
404 | def write(self, tr): | |
405 | pass |
|
405 | pass | |
406 |
|
406 | |||
407 | def invalidatecaches(self): |
|
407 | def invalidatecaches(self): | |
408 | pass |
|
408 | pass | |
409 |
|
409 | |||
410 | def markremoved(self, fn): |
|
410 | def markremoved(self, fn): | |
411 | pass |
|
411 | pass | |
412 |
|
412 | |||
413 | def __contains__(self, path): |
|
413 | def __contains__(self, path): | |
414 | '''Checks if the store contains path''' |
|
414 | '''Checks if the store contains path''' | |
415 | path = "/".join(("data", path)) |
|
415 | path = "/".join(("data", path)) | |
416 | # file? |
|
416 | # file? | |
417 | if self.vfs.exists(path + ".i"): |
|
417 | if self.vfs.exists(path + ".i"): | |
418 | return True |
|
418 | return True | |
419 | # dir? |
|
419 | # dir? | |
420 | if not path.endswith("/"): |
|
420 | if not path.endswith("/"): | |
421 | path = path + "/" |
|
421 | path = path + "/" | |
422 | return self.vfs.exists(path) |
|
422 | return self.vfs.exists(path) | |
423 |
|
423 | |||
424 | class encodedstore(basicstore): |
|
424 | class encodedstore(basicstore): | |
425 | def __init__(self, path, vfstype): |
|
425 | def __init__(self, path, vfstype): | |
426 | vfs = vfstype(path + '/store') |
|
426 | vfs = vfstype(path + '/store') | |
427 | self.path = vfs.base |
|
427 | self.path = vfs.base | |
428 | self.createmode = _calcmode(vfs) |
|
428 | self.createmode = _calcmode(vfs) | |
429 | vfs.createmode = self.createmode |
|
429 | vfs.createmode = self.createmode | |
430 | self.rawvfs = vfs |
|
430 | self.rawvfs = vfs | |
431 | self.vfs = vfsmod.filtervfs(vfs, encodefilename) |
|
431 | self.vfs = vfsmod.filtervfs(vfs, encodefilename) | |
432 | self.opener = self.vfs |
|
432 | self.opener = self.vfs | |
433 |
|
433 | |||
434 | def datafiles(self, matcher=None): |
|
434 | def datafiles(self, matcher=None): | |
435 | for a, b, size in super(encodedstore, self).datafiles(): |
|
435 | for a, b, size in super(encodedstore, self).datafiles(): | |
436 | try: |
|
436 | try: | |
437 | a = decodefilename(a) |
|
437 | a = decodefilename(a) | |
438 | except KeyError: |
|
438 | except KeyError: | |
439 | a = None |
|
439 | a = None | |
440 | if a is not None and not _matchtrackedpath(a, matcher): |
|
440 | if a is not None and not _matchtrackedpath(a, matcher): | |
441 | continue |
|
441 | continue | |
442 | yield a, b, size |
|
442 | yield a, b, size | |
443 |
|
443 | |||
444 | def join(self, f): |
|
444 | def join(self, f): | |
445 | return self.path + '/' + encodefilename(f) |
|
445 | return self.path + '/' + encodefilename(f) | |
446 |
|
446 | |||
447 | def copylist(self): |
|
447 | def copylist(self): | |
448 | return (['requires', '00changelog.i'] + |
|
448 | return (['requires', '00changelog.i'] + | |
449 | ['store/' + f for f in _data.split()]) |
|
449 | ['store/' + f for f in _data.split()]) | |
450 |
|
450 | |||
451 | class fncache(object): |
|
451 | class fncache(object): | |
452 | # the filename used to be partially encoded |
|
452 | # the filename used to be partially encoded | |
453 | # hence the encodedir/decodedir dance |
|
453 | # hence the encodedir/decodedir dance | |
454 | def __init__(self, vfs): |
|
454 | def __init__(self, vfs): | |
455 | self.vfs = vfs |
|
455 | self.vfs = vfs | |
456 | self.entries = None |
|
456 | self.entries = None | |
457 | self._dirty = False |
|
457 | self._dirty = False | |
458 | # set of new additions to fncache |
|
458 | # set of new additions to fncache | |
459 | self.addls = set() |
|
459 | self.addls = set() | |
460 |
|
460 | |||
461 |
def |
|
461 | def ensureloaded(self, warn=None): | |
|
462 | '''read the fncache file if not already read. | |||
|
463 | ||||
|
464 | If the file on disk is corrupted, raise. If warn is provided, | |||
|
465 | warn and keep going instead.''' | |||
|
466 | if self.entries is None: | |||
|
467 | self._load(warn) | |||
|
468 | ||||
|
469 | def _load(self, warn=None): | |||
462 | '''fill the entries from the fncache file''' |
|
470 | '''fill the entries from the fncache file''' | |
463 | self._dirty = False |
|
471 | self._dirty = False | |
464 | try: |
|
472 | try: | |
465 | fp = self.vfs('fncache', mode='rb') |
|
473 | fp = self.vfs('fncache', mode='rb') | |
466 | except IOError: |
|
474 | except IOError: | |
467 | # skip nonexistent file |
|
475 | # skip nonexistent file | |
468 | self.entries = set() |
|
476 | self.entries = set() | |
469 | return |
|
477 | return | |
470 |
|
478 | |||
471 | self.entries = set() |
|
479 | self.entries = set() | |
472 | chunk = b'' |
|
480 | chunk = b'' | |
473 | for c in iter(functools.partial(fp.read, fncache_chunksize), b''): |
|
481 | for c in iter(functools.partial(fp.read, fncache_chunksize), b''): | |
474 | chunk += c |
|
482 | chunk += c | |
475 | try: |
|
483 | try: | |
476 | p = chunk.rindex(b'\n') |
|
484 | p = chunk.rindex(b'\n') | |
477 | self.entries.update(decodedir(chunk[:p + 1]).splitlines()) |
|
485 | self.entries.update(decodedir(chunk[:p + 1]).splitlines()) | |
478 | chunk = chunk[p + 1:] |
|
486 | chunk = chunk[p + 1:] | |
479 | except ValueError: |
|
487 | except ValueError: | |
480 | # substring '\n' not found, maybe the entry is bigger than the |
|
488 | # substring '\n' not found, maybe the entry is bigger than the | |
481 | # chunksize, so let's keep iterating |
|
489 | # chunksize, so let's keep iterating | |
482 | pass |
|
490 | pass | |
483 |
|
491 | |||
484 | if chunk: |
|
492 | if chunk: | |
485 |
|
|
493 | msg = _("fncache does not ends with a newline") | |
486 | hint=_("use 'hg debugrebuildfncache' to rebuild" |
|
494 | if warn: | |
487 | " the fncache")) |
|
495 | warn(msg + '\n') | |
488 | self._checkentries(fp) |
|
496 | else: | |
|
497 | raise error.Abort(msg, | |||
|
498 | hint=_("use 'hg debugrebuildfncache' to " | |||
|
499 | "rebuild the fncache")) | |||
|
500 | self._checkentries(fp, warn) | |||
489 | fp.close() |
|
501 | fp.close() | |
490 |
|
502 | |||
491 | def _checkentries(self, fp): |
|
503 | def _checkentries(self, fp, warn): | |
492 | """ make sure there is no empty string in entries """ |
|
504 | """ make sure there is no empty string in entries """ | |
493 | if '' in self.entries: |
|
505 | if '' in self.entries: | |
494 | fp.seek(0) |
|
506 | fp.seek(0) | |
495 | for n, line in enumerate(util.iterfile(fp)): |
|
507 | for n, line in enumerate(util.iterfile(fp)): | |
496 | if not line.rstrip('\n'): |
|
508 | if not line.rstrip('\n'): | |
497 | t = _('invalid entry in fncache, line %d') % (n + 1) |
|
509 | t = _('invalid entry in fncache, line %d') % (n + 1) | |
498 |
|
|
510 | if warn: | |
|
511 | warn(t + '\n') | |||
|
512 | else: | |||
|
513 | raise error.Abort(t) | |||
499 |
|
514 | |||
500 | def write(self, tr): |
|
515 | def write(self, tr): | |
501 | if self._dirty: |
|
516 | if self._dirty: | |
502 | assert self.entries is not None |
|
517 | assert self.entries is not None | |
503 | self.entries = self.entries | self.addls |
|
518 | self.entries = self.entries | self.addls | |
504 | self.addls = set() |
|
519 | self.addls = set() | |
505 | tr.addbackup('fncache') |
|
520 | tr.addbackup('fncache') | |
506 | fp = self.vfs('fncache', mode='wb', atomictemp=True) |
|
521 | fp = self.vfs('fncache', mode='wb', atomictemp=True) | |
507 | if self.entries: |
|
522 | if self.entries: | |
508 | fp.write(encodedir('\n'.join(self.entries) + '\n')) |
|
523 | fp.write(encodedir('\n'.join(self.entries) + '\n')) | |
509 | fp.close() |
|
524 | fp.close() | |
510 | self._dirty = False |
|
525 | self._dirty = False | |
511 | if self.addls: |
|
526 | if self.addls: | |
512 | # if we have just new entries, let's append them to the fncache |
|
527 | # if we have just new entries, let's append them to the fncache | |
513 | tr.addbackup('fncache') |
|
528 | tr.addbackup('fncache') | |
514 | fp = self.vfs('fncache', mode='ab', atomictemp=True) |
|
529 | fp = self.vfs('fncache', mode='ab', atomictemp=True) | |
515 | if self.addls: |
|
530 | if self.addls: | |
516 | fp.write(encodedir('\n'.join(self.addls) + '\n')) |
|
531 | fp.write(encodedir('\n'.join(self.addls) + '\n')) | |
517 | fp.close() |
|
532 | fp.close() | |
518 | self.entries = None |
|
533 | self.entries = None | |
519 | self.addls = set() |
|
534 | self.addls = set() | |
520 |
|
535 | |||
521 | def add(self, fn): |
|
536 | def add(self, fn): | |
522 | if self.entries is None: |
|
537 | if self.entries is None: | |
523 | self._load() |
|
538 | self._load() | |
524 | if fn not in self.entries: |
|
539 | if fn not in self.entries: | |
525 | self.addls.add(fn) |
|
540 | self.addls.add(fn) | |
526 |
|
541 | |||
527 | def remove(self, fn): |
|
542 | def remove(self, fn): | |
528 | if self.entries is None: |
|
543 | if self.entries is None: | |
529 | self._load() |
|
544 | self._load() | |
530 | if fn in self.addls: |
|
545 | if fn in self.addls: | |
531 | self.addls.remove(fn) |
|
546 | self.addls.remove(fn) | |
532 | return |
|
547 | return | |
533 | try: |
|
548 | try: | |
534 | self.entries.remove(fn) |
|
549 | self.entries.remove(fn) | |
535 | self._dirty = True |
|
550 | self._dirty = True | |
536 | except KeyError: |
|
551 | except KeyError: | |
537 | pass |
|
552 | pass | |
538 |
|
553 | |||
539 | def __contains__(self, fn): |
|
554 | def __contains__(self, fn): | |
540 | if fn in self.addls: |
|
555 | if fn in self.addls: | |
541 | return True |
|
556 | return True | |
542 | if self.entries is None: |
|
557 | if self.entries is None: | |
543 | self._load() |
|
558 | self._load() | |
544 | return fn in self.entries |
|
559 | return fn in self.entries | |
545 |
|
560 | |||
546 | def __iter__(self): |
|
561 | def __iter__(self): | |
547 | if self.entries is None: |
|
562 | if self.entries is None: | |
548 | self._load() |
|
563 | self._load() | |
549 | return iter(self.entries | self.addls) |
|
564 | return iter(self.entries | self.addls) | |
550 |
|
565 | |||
551 | class _fncachevfs(vfsmod.proxyvfs): |
|
566 | class _fncachevfs(vfsmod.proxyvfs): | |
552 | def __init__(self, vfs, fnc, encode): |
|
567 | def __init__(self, vfs, fnc, encode): | |
553 | vfsmod.proxyvfs.__init__(self, vfs) |
|
568 | vfsmod.proxyvfs.__init__(self, vfs) | |
554 | self.fncache = fnc |
|
569 | self.fncache = fnc | |
555 | self.encode = encode |
|
570 | self.encode = encode | |
556 |
|
571 | |||
557 | def __call__(self, path, mode='r', *args, **kw): |
|
572 | def __call__(self, path, mode='r', *args, **kw): | |
558 | encoded = self.encode(path) |
|
573 | encoded = self.encode(path) | |
559 | if mode not in ('r', 'rb') and (path.startswith('data/') or |
|
574 | if mode not in ('r', 'rb') and (path.startswith('data/') or | |
560 | path.startswith('meta/')): |
|
575 | path.startswith('meta/')): | |
561 | # do not trigger a fncache load when adding a file that already is |
|
576 | # do not trigger a fncache load when adding a file that already is | |
562 | # known to exist. |
|
577 | # known to exist. | |
563 | notload = self.fncache.entries is None and self.vfs.exists(encoded) |
|
578 | notload = self.fncache.entries is None and self.vfs.exists(encoded) | |
564 | if notload and 'a' in mode and not self.vfs.stat(encoded).st_size: |
|
579 | if notload and 'a' in mode and not self.vfs.stat(encoded).st_size: | |
565 | # when appending to an existing file, if the file has size zero, |
|
580 | # when appending to an existing file, if the file has size zero, | |
566 | # it should be considered as missing. Such zero-size files are |
|
581 | # it should be considered as missing. Such zero-size files are | |
567 | # the result of truncation when a transaction is aborted. |
|
582 | # the result of truncation when a transaction is aborted. | |
568 | notload = False |
|
583 | notload = False | |
569 | if not notload: |
|
584 | if not notload: | |
570 | self.fncache.add(path) |
|
585 | self.fncache.add(path) | |
571 | return self.vfs(encoded, mode, *args, **kw) |
|
586 | return self.vfs(encoded, mode, *args, **kw) | |
572 |
|
587 | |||
573 | def join(self, path): |
|
588 | def join(self, path): | |
574 | if path: |
|
589 | if path: | |
575 | return self.vfs.join(self.encode(path)) |
|
590 | return self.vfs.join(self.encode(path)) | |
576 | else: |
|
591 | else: | |
577 | return self.vfs.join(path) |
|
592 | return self.vfs.join(path) | |
578 |
|
593 | |||
579 | class fncachestore(basicstore): |
|
594 | class fncachestore(basicstore): | |
580 | def __init__(self, path, vfstype, dotencode): |
|
595 | def __init__(self, path, vfstype, dotencode): | |
581 | if dotencode: |
|
596 | if dotencode: | |
582 | encode = _pathencode |
|
597 | encode = _pathencode | |
583 | else: |
|
598 | else: | |
584 | encode = _plainhybridencode |
|
599 | encode = _plainhybridencode | |
585 | self.encode = encode |
|
600 | self.encode = encode | |
586 | vfs = vfstype(path + '/store') |
|
601 | vfs = vfstype(path + '/store') | |
587 | self.path = vfs.base |
|
602 | self.path = vfs.base | |
588 | self.pathsep = self.path + '/' |
|
603 | self.pathsep = self.path + '/' | |
589 | self.createmode = _calcmode(vfs) |
|
604 | self.createmode = _calcmode(vfs) | |
590 | vfs.createmode = self.createmode |
|
605 | vfs.createmode = self.createmode | |
591 | self.rawvfs = vfs |
|
606 | self.rawvfs = vfs | |
592 | fnc = fncache(vfs) |
|
607 | fnc = fncache(vfs) | |
593 | self.fncache = fnc |
|
608 | self.fncache = fnc | |
594 | self.vfs = _fncachevfs(vfs, fnc, encode) |
|
609 | self.vfs = _fncachevfs(vfs, fnc, encode) | |
595 | self.opener = self.vfs |
|
610 | self.opener = self.vfs | |
596 |
|
611 | |||
597 | def join(self, f): |
|
612 | def join(self, f): | |
598 | return self.pathsep + self.encode(f) |
|
613 | return self.pathsep + self.encode(f) | |
599 |
|
614 | |||
600 | def getsize(self, path): |
|
615 | def getsize(self, path): | |
601 | return self.rawvfs.stat(path).st_size |
|
616 | return self.rawvfs.stat(path).st_size | |
602 |
|
617 | |||
603 | def datafiles(self, matcher=None): |
|
618 | def datafiles(self, matcher=None): | |
604 | for f in sorted(self.fncache): |
|
619 | for f in sorted(self.fncache): | |
605 | if not _matchtrackedpath(f, matcher): |
|
620 | if not _matchtrackedpath(f, matcher): | |
606 | continue |
|
621 | continue | |
607 | ef = self.encode(f) |
|
622 | ef = self.encode(f) | |
608 | try: |
|
623 | try: | |
609 | yield f, ef, self.getsize(ef) |
|
624 | yield f, ef, self.getsize(ef) | |
610 | except OSError as err: |
|
625 | except OSError as err: | |
611 | if err.errno != errno.ENOENT: |
|
626 | if err.errno != errno.ENOENT: | |
612 | raise |
|
627 | raise | |
613 |
|
628 | |||
614 | def copylist(self): |
|
629 | def copylist(self): | |
615 | d = ('bookmarks narrowspec data meta dh fncache phaseroots obsstore' |
|
630 | d = ('bookmarks narrowspec data meta dh fncache phaseroots obsstore' | |
616 | ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i') |
|
631 | ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i') | |
617 | return (['requires', '00changelog.i'] + |
|
632 | return (['requires', '00changelog.i'] + | |
618 | ['store/' + f for f in d.split()]) |
|
633 | ['store/' + f for f in d.split()]) | |
619 |
|
634 | |||
620 | def write(self, tr): |
|
635 | def write(self, tr): | |
621 | self.fncache.write(tr) |
|
636 | self.fncache.write(tr) | |
622 |
|
637 | |||
623 | def invalidatecaches(self): |
|
638 | def invalidatecaches(self): | |
624 | self.fncache.entries = None |
|
639 | self.fncache.entries = None | |
625 | self.fncache.addls = set() |
|
640 | self.fncache.addls = set() | |
626 |
|
641 | |||
627 | def markremoved(self, fn): |
|
642 | def markremoved(self, fn): | |
628 | self.fncache.remove(fn) |
|
643 | self.fncache.remove(fn) | |
629 |
|
644 | |||
630 | def _exists(self, f): |
|
645 | def _exists(self, f): | |
631 | ef = self.encode(f) |
|
646 | ef = self.encode(f) | |
632 | try: |
|
647 | try: | |
633 | self.getsize(ef) |
|
648 | self.getsize(ef) | |
634 | return True |
|
649 | return True | |
635 | except OSError as err: |
|
650 | except OSError as err: | |
636 | if err.errno != errno.ENOENT: |
|
651 | if err.errno != errno.ENOENT: | |
637 | raise |
|
652 | raise | |
638 | # nonexistent entry |
|
653 | # nonexistent entry | |
639 | return False |
|
654 | return False | |
640 |
|
655 | |||
641 | def __contains__(self, path): |
|
656 | def __contains__(self, path): | |
642 | '''Checks if the store contains path''' |
|
657 | '''Checks if the store contains path''' | |
643 | path = "/".join(("data", path)) |
|
658 | path = "/".join(("data", path)) | |
644 | # check for files (exact match) |
|
659 | # check for files (exact match) | |
645 | e = path + '.i' |
|
660 | e = path + '.i' | |
646 | if e in self.fncache and self._exists(e): |
|
661 | if e in self.fncache and self._exists(e): | |
647 | return True |
|
662 | return True | |
648 | # now check for directories (prefix match) |
|
663 | # now check for directories (prefix match) | |
649 | if not path.endswith('/'): |
|
664 | if not path.endswith('/'): | |
650 | path += '/' |
|
665 | path += '/' | |
651 | for e in self.fncache: |
|
666 | for e in self.fncache: | |
652 | if e.startswith(path) and self._exists(e): |
|
667 | if e.startswith(path) and self._exists(e): | |
653 | return True |
|
668 | return True | |
654 | return False |
|
669 | return False |
@@ -1,532 +1,545 b'' | |||||
1 | #require repofncache |
|
1 | #require repofncache | |
2 |
|
2 | |||
3 | An extension which will set fncache chunksize to 1 byte to make sure that logic |
|
3 | An extension which will set fncache chunksize to 1 byte to make sure that logic | |
4 | does not break |
|
4 | does not break | |
5 |
|
5 | |||
6 | $ cat > chunksize.py <<EOF |
|
6 | $ cat > chunksize.py <<EOF | |
7 | > from __future__ import absolute_import |
|
7 | > from __future__ import absolute_import | |
8 | > from mercurial import store |
|
8 | > from mercurial import store | |
9 | > store.fncache_chunksize = 1 |
|
9 | > store.fncache_chunksize = 1 | |
10 | > EOF |
|
10 | > EOF | |
11 |
|
11 | |||
12 | $ cat >> $HGRCPATH <<EOF |
|
12 | $ cat >> $HGRCPATH <<EOF | |
13 | > [extensions] |
|
13 | > [extensions] | |
14 | > chunksize = $TESTTMP/chunksize.py |
|
14 | > chunksize = $TESTTMP/chunksize.py | |
15 | > EOF |
|
15 | > EOF | |
16 |
|
16 | |||
17 | Init repo1: |
|
17 | Init repo1: | |
18 |
|
18 | |||
19 | $ hg init repo1 |
|
19 | $ hg init repo1 | |
20 | $ cd repo1 |
|
20 | $ cd repo1 | |
21 | $ echo "some text" > a |
|
21 | $ echo "some text" > a | |
22 | $ hg add |
|
22 | $ hg add | |
23 | adding a |
|
23 | adding a | |
24 | $ hg ci -m first |
|
24 | $ hg ci -m first | |
25 | $ cat .hg/store/fncache | sort |
|
25 | $ cat .hg/store/fncache | sort | |
26 | data/a.i |
|
26 | data/a.i | |
27 |
|
27 | |||
28 | Testing a.i/b: |
|
28 | Testing a.i/b: | |
29 |
|
29 | |||
30 | $ mkdir a.i |
|
30 | $ mkdir a.i | |
31 | $ echo "some other text" > a.i/b |
|
31 | $ echo "some other text" > a.i/b | |
32 | $ hg add |
|
32 | $ hg add | |
33 | adding a.i/b |
|
33 | adding a.i/b | |
34 | $ hg ci -m second |
|
34 | $ hg ci -m second | |
35 | $ cat .hg/store/fncache | sort |
|
35 | $ cat .hg/store/fncache | sort | |
36 | data/a.i |
|
36 | data/a.i | |
37 | data/a.i.hg/b.i |
|
37 | data/a.i.hg/b.i | |
38 |
|
38 | |||
39 | Testing a.i.hg/c: |
|
39 | Testing a.i.hg/c: | |
40 |
|
40 | |||
41 | $ mkdir a.i.hg |
|
41 | $ mkdir a.i.hg | |
42 | $ echo "yet another text" > a.i.hg/c |
|
42 | $ echo "yet another text" > a.i.hg/c | |
43 | $ hg add |
|
43 | $ hg add | |
44 | adding a.i.hg/c |
|
44 | adding a.i.hg/c | |
45 | $ hg ci -m third |
|
45 | $ hg ci -m third | |
46 | $ cat .hg/store/fncache | sort |
|
46 | $ cat .hg/store/fncache | sort | |
47 | data/a.i |
|
47 | data/a.i | |
48 | data/a.i.hg.hg/c.i |
|
48 | data/a.i.hg.hg/c.i | |
49 | data/a.i.hg/b.i |
|
49 | data/a.i.hg/b.i | |
50 |
|
50 | |||
51 | Testing verify: |
|
51 | Testing verify: | |
52 |
|
52 | |||
53 | $ hg verify |
|
53 | $ hg verify | |
54 | checking changesets |
|
54 | checking changesets | |
55 | checking manifests |
|
55 | checking manifests | |
56 | crosschecking files in changesets and manifests |
|
56 | crosschecking files in changesets and manifests | |
57 | checking files |
|
57 | checking files | |
58 | checked 3 changesets with 3 changes to 3 files |
|
58 | checked 3 changesets with 3 changes to 3 files | |
59 |
|
59 | |||
60 | $ rm .hg/store/fncache |
|
60 | $ rm .hg/store/fncache | |
61 |
|
61 | |||
62 | $ hg verify |
|
62 | $ hg verify | |
63 | checking changesets |
|
63 | checking changesets | |
64 | checking manifests |
|
64 | checking manifests | |
65 | crosschecking files in changesets and manifests |
|
65 | crosschecking files in changesets and manifests | |
66 | checking files |
|
66 | checking files | |
67 | warning: revlog 'data/a.i' not in fncache! |
|
67 | warning: revlog 'data/a.i' not in fncache! | |
68 | warning: revlog 'data/a.i.hg/c.i' not in fncache! |
|
68 | warning: revlog 'data/a.i.hg/c.i' not in fncache! | |
69 | warning: revlog 'data/a.i/b.i' not in fncache! |
|
69 | warning: revlog 'data/a.i/b.i' not in fncache! | |
70 | checked 3 changesets with 3 changes to 3 files |
|
70 | checked 3 changesets with 3 changes to 3 files | |
71 | 3 warnings encountered! |
|
71 | 3 warnings encountered! | |
72 | hint: run "hg debugrebuildfncache" to recover from corrupt fncache |
|
72 | hint: run "hg debugrebuildfncache" to recover from corrupt fncache | |
73 |
|
73 | |||
74 | Follow the hint to make sure it works |
|
74 | Follow the hint to make sure it works | |
75 |
|
75 | |||
76 | $ hg debugrebuildfncache |
|
76 | $ hg debugrebuildfncache | |
77 | adding data/a.i |
|
77 | adding data/a.i | |
78 | adding data/a.i.hg/c.i |
|
78 | adding data/a.i.hg/c.i | |
79 | adding data/a.i/b.i |
|
79 | adding data/a.i/b.i | |
80 | 3 items added, 0 removed from fncache |
|
80 | 3 items added, 0 removed from fncache | |
81 |
|
81 | |||
82 | $ hg verify |
|
82 | $ hg verify | |
83 | checking changesets |
|
83 | checking changesets | |
84 | checking manifests |
|
84 | checking manifests | |
85 | crosschecking files in changesets and manifests |
|
85 | crosschecking files in changesets and manifests | |
86 | checking files |
|
86 | checking files | |
87 | checked 3 changesets with 3 changes to 3 files |
|
87 | checked 3 changesets with 3 changes to 3 files | |
88 |
|
88 | |||
89 | $ cd .. |
|
89 | $ cd .. | |
90 |
|
90 | |||
91 | Non store repo: |
|
91 | Non store repo: | |
92 |
|
92 | |||
93 | $ hg --config format.usestore=False init foo |
|
93 | $ hg --config format.usestore=False init foo | |
94 | $ cd foo |
|
94 | $ cd foo | |
95 | $ mkdir tst.d |
|
95 | $ mkdir tst.d | |
96 | $ echo foo > tst.d/foo |
|
96 | $ echo foo > tst.d/foo | |
97 | $ hg ci -Amfoo |
|
97 | $ hg ci -Amfoo | |
98 | adding tst.d/foo |
|
98 | adding tst.d/foo | |
99 | $ find .hg | sort |
|
99 | $ find .hg | sort | |
100 | .hg |
|
100 | .hg | |
101 | .hg/00changelog.i |
|
101 | .hg/00changelog.i | |
102 | .hg/00manifest.i |
|
102 | .hg/00manifest.i | |
103 | .hg/cache |
|
103 | .hg/cache | |
104 | .hg/cache/branch2-served |
|
104 | .hg/cache/branch2-served | |
105 | .hg/cache/rbc-names-v1 |
|
105 | .hg/cache/rbc-names-v1 | |
106 | .hg/cache/rbc-revs-v1 |
|
106 | .hg/cache/rbc-revs-v1 | |
107 | .hg/data |
|
107 | .hg/data | |
108 | .hg/data/tst.d.hg |
|
108 | .hg/data/tst.d.hg | |
109 | .hg/data/tst.d.hg/foo.i |
|
109 | .hg/data/tst.d.hg/foo.i | |
110 | .hg/dirstate |
|
110 | .hg/dirstate | |
111 | .hg/fsmonitor.state (fsmonitor !) |
|
111 | .hg/fsmonitor.state (fsmonitor !) | |
112 | .hg/last-message.txt |
|
112 | .hg/last-message.txt | |
113 | .hg/phaseroots |
|
113 | .hg/phaseroots | |
114 | .hg/requires |
|
114 | .hg/requires | |
115 | .hg/undo |
|
115 | .hg/undo | |
116 | .hg/undo.backup.dirstate |
|
116 | .hg/undo.backup.dirstate | |
117 | .hg/undo.backupfiles |
|
117 | .hg/undo.backupfiles | |
118 | .hg/undo.bookmarks |
|
118 | .hg/undo.bookmarks | |
119 | .hg/undo.branch |
|
119 | .hg/undo.branch | |
120 | .hg/undo.desc |
|
120 | .hg/undo.desc | |
121 | .hg/undo.dirstate |
|
121 | .hg/undo.dirstate | |
122 | .hg/undo.phaseroots |
|
122 | .hg/undo.phaseroots | |
123 | .hg/wcache |
|
123 | .hg/wcache | |
124 | .hg/wcache/checkisexec (execbit !) |
|
124 | .hg/wcache/checkisexec (execbit !) | |
125 | .hg/wcache/checklink (symlink !) |
|
125 | .hg/wcache/checklink (symlink !) | |
126 | .hg/wcache/checklink-target (symlink !) |
|
126 | .hg/wcache/checklink-target (symlink !) | |
127 | .hg/wcache/manifestfulltextcache (reporevlogstore !) |
|
127 | .hg/wcache/manifestfulltextcache (reporevlogstore !) | |
128 | $ cd .. |
|
128 | $ cd .. | |
129 |
|
129 | |||
130 | Non fncache repo: |
|
130 | Non fncache repo: | |
131 |
|
131 | |||
132 | $ hg --config format.usefncache=False init bar |
|
132 | $ hg --config format.usefncache=False init bar | |
133 | $ cd bar |
|
133 | $ cd bar | |
134 | $ mkdir tst.d |
|
134 | $ mkdir tst.d | |
135 | $ echo foo > tst.d/Foo |
|
135 | $ echo foo > tst.d/Foo | |
136 | $ hg ci -Amfoo |
|
136 | $ hg ci -Amfoo | |
137 | adding tst.d/Foo |
|
137 | adding tst.d/Foo | |
138 | $ find .hg | sort |
|
138 | $ find .hg | sort | |
139 | .hg |
|
139 | .hg | |
140 | .hg/00changelog.i |
|
140 | .hg/00changelog.i | |
141 | .hg/cache |
|
141 | .hg/cache | |
142 | .hg/cache/branch2-served |
|
142 | .hg/cache/branch2-served | |
143 | .hg/cache/rbc-names-v1 |
|
143 | .hg/cache/rbc-names-v1 | |
144 | .hg/cache/rbc-revs-v1 |
|
144 | .hg/cache/rbc-revs-v1 | |
145 | .hg/dirstate |
|
145 | .hg/dirstate | |
146 | .hg/fsmonitor.state (fsmonitor !) |
|
146 | .hg/fsmonitor.state (fsmonitor !) | |
147 | .hg/last-message.txt |
|
147 | .hg/last-message.txt | |
148 | .hg/requires |
|
148 | .hg/requires | |
149 | .hg/store |
|
149 | .hg/store | |
150 | .hg/store/00changelog.i |
|
150 | .hg/store/00changelog.i | |
151 | .hg/store/00manifest.i |
|
151 | .hg/store/00manifest.i | |
152 | .hg/store/data |
|
152 | .hg/store/data | |
153 | .hg/store/data/tst.d.hg |
|
153 | .hg/store/data/tst.d.hg | |
154 | .hg/store/data/tst.d.hg/_foo.i |
|
154 | .hg/store/data/tst.d.hg/_foo.i | |
155 | .hg/store/phaseroots |
|
155 | .hg/store/phaseroots | |
156 | .hg/store/undo |
|
156 | .hg/store/undo | |
157 | .hg/store/undo.backupfiles |
|
157 | .hg/store/undo.backupfiles | |
158 | .hg/store/undo.phaseroots |
|
158 | .hg/store/undo.phaseroots | |
159 | .hg/undo.backup.dirstate |
|
159 | .hg/undo.backup.dirstate | |
160 | .hg/undo.bookmarks |
|
160 | .hg/undo.bookmarks | |
161 | .hg/undo.branch |
|
161 | .hg/undo.branch | |
162 | .hg/undo.desc |
|
162 | .hg/undo.desc | |
163 | .hg/undo.dirstate |
|
163 | .hg/undo.dirstate | |
164 | .hg/wcache |
|
164 | .hg/wcache | |
165 | .hg/wcache/checkisexec (execbit !) |
|
165 | .hg/wcache/checkisexec (execbit !) | |
166 | .hg/wcache/checklink (symlink !) |
|
166 | .hg/wcache/checklink (symlink !) | |
167 | .hg/wcache/checklink-target (symlink !) |
|
167 | .hg/wcache/checklink-target (symlink !) | |
168 | .hg/wcache/manifestfulltextcache (reporevlogstore !) |
|
168 | .hg/wcache/manifestfulltextcache (reporevlogstore !) | |
169 | $ cd .. |
|
169 | $ cd .. | |
170 |
|
170 | |||
171 | Encoding of reserved / long paths in the store |
|
171 | Encoding of reserved / long paths in the store | |
172 |
|
172 | |||
173 | $ hg init r2 |
|
173 | $ hg init r2 | |
174 | $ cd r2 |
|
174 | $ cd r2 | |
175 | $ cat <<EOF > .hg/hgrc |
|
175 | $ cat <<EOF > .hg/hgrc | |
176 | > [ui] |
|
176 | > [ui] | |
177 | > portablefilenames = ignore |
|
177 | > portablefilenames = ignore | |
178 | > EOF |
|
178 | > EOF | |
179 |
|
179 | |||
180 | $ hg import -q --bypass - <<EOF |
|
180 | $ hg import -q --bypass - <<EOF | |
181 | > # HG changeset patch |
|
181 | > # HG changeset patch | |
182 | > # User test |
|
182 | > # User test | |
183 | > # Date 0 0 |
|
183 | > # Date 0 0 | |
184 | > # Node ID 1c7a2f7cb77be1a0def34e4c7cabc562ad98fbd7 |
|
184 | > # Node ID 1c7a2f7cb77be1a0def34e4c7cabc562ad98fbd7 | |
185 | > # Parent 0000000000000000000000000000000000000000 |
|
185 | > # Parent 0000000000000000000000000000000000000000 | |
186 | > 1 |
|
186 | > 1 | |
187 | > |
|
187 | > | |
188 | > diff --git a/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz b/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz |
|
188 | > diff --git a/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz b/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz | |
189 | > new file mode 100644 |
|
189 | > new file mode 100644 | |
190 | > --- /dev/null |
|
190 | > --- /dev/null | |
191 | > +++ b/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz |
|
191 | > +++ b/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz | |
192 | > @@ -0,0 +1,1 @@ |
|
192 | > @@ -0,0 +1,1 @@ | |
193 | > +foo |
|
193 | > +foo | |
194 | > diff --git a/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT b/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT |
|
194 | > diff --git a/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT b/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT | |
195 | > new file mode 100644 |
|
195 | > new file mode 100644 | |
196 | > --- /dev/null |
|
196 | > --- /dev/null | |
197 | > +++ b/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT |
|
197 | > +++ b/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT | |
198 | > @@ -0,0 +1,1 @@ |
|
198 | > @@ -0,0 +1,1 @@ | |
199 | > +foo |
|
199 | > +foo | |
200 | > diff --git a/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt b/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt |
|
200 | > diff --git a/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt b/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt | |
201 | > new file mode 100644 |
|
201 | > new file mode 100644 | |
202 | > --- /dev/null |
|
202 | > --- /dev/null | |
203 | > +++ b/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt |
|
203 | > +++ b/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt | |
204 | > @@ -0,0 +1,1 @@ |
|
204 | > @@ -0,0 +1,1 @@ | |
205 | > +foo |
|
205 | > +foo | |
206 | > diff --git a/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c b/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c |
|
206 | > diff --git a/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c b/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c | |
207 | > new file mode 100644 |
|
207 | > new file mode 100644 | |
208 | > --- /dev/null |
|
208 | > --- /dev/null | |
209 | > +++ b/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c |
|
209 | > +++ b/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c | |
210 | > @@ -0,0 +1,1 @@ |
|
210 | > @@ -0,0 +1,1 @@ | |
211 | > +foo |
|
211 | > +foo | |
212 | > diff --git a/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider b/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider |
|
212 | > diff --git a/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider b/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider | |
213 | > new file mode 100644 |
|
213 | > new file mode 100644 | |
214 | > --- /dev/null |
|
214 | > --- /dev/null | |
215 | > +++ b/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider |
|
215 | > +++ b/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider | |
216 | > @@ -0,0 +1,1 @@ |
|
216 | > @@ -0,0 +1,1 @@ | |
217 | > +foo |
|
217 | > +foo | |
218 | > EOF |
|
218 | > EOF | |
219 |
|
219 | |||
220 | $ find .hg/store -name *.i | sort |
|
220 | $ find .hg/store -name *.i | sort | |
221 | .hg/store/00changelog.i |
|
221 | .hg/store/00changelog.i | |
222 | .hg/store/00manifest.i |
|
222 | .hg/store/00manifest.i | |
223 | .hg/store/data/bla.aux/pr~6e/_p_r_n/lpt/co~6d3/nu~6c/coma/foo._n_u_l/normal.c.i |
|
223 | .hg/store/data/bla.aux/pr~6e/_p_r_n/lpt/co~6d3/nu~6c/coma/foo._n_u_l/normal.c.i | |
224 | .hg/store/dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxx168e07b38e65eff86ab579afaaa8e30bfbe0f35f.i |
|
224 | .hg/store/dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxx168e07b38e65eff86ab579afaaa8e30bfbe0f35f.i | |
225 | .hg/store/dh/au~78/second/x.prn/fourth/fi~3afth/sixth/seventh/eighth/nineth/tenth/loremia20419e358ddff1bf8751e38288aff1d7c32ec05.i |
|
225 | .hg/store/dh/au~78/second/x.prn/fourth/fi~3afth/sixth/seventh/eighth/nineth/tenth/loremia20419e358ddff1bf8751e38288aff1d7c32ec05.i | |
226 | .hg/store/dh/enterpri/openesba/contrib-/corba-bc/netbeans/wsdlexte/src/main/java/org.net7018f27961fdf338a598a40c4683429e7ffb9743.i |
|
226 | .hg/store/dh/enterpri/openesba/contrib-/corba-bc/netbeans/wsdlexte/src/main/java/org.net7018f27961fdf338a598a40c4683429e7ffb9743.i | |
227 | .hg/store/dh/project_/resource/anotherl/followed/andanoth/andthenanextremelylongfilename0d8e1f4187c650e2f1fdca9fd90f786bc0976b6b.i |
|
227 | .hg/store/dh/project_/resource/anotherl/followed/andanoth/andthenanextremelylongfilename0d8e1f4187c650e2f1fdca9fd90f786bc0976b6b.i | |
228 |
|
228 | |||
229 | $ cd .. |
|
229 | $ cd .. | |
230 |
|
230 | |||
231 | Aborting lock does not prevent fncache writes |
|
231 | Aborting lock does not prevent fncache writes | |
232 |
|
232 | |||
233 | $ cat > exceptionext.py <<EOF |
|
233 | $ cat > exceptionext.py <<EOF | |
234 | > from __future__ import absolute_import |
|
234 | > from __future__ import absolute_import | |
235 | > import os |
|
235 | > import os | |
236 | > from mercurial import commands, error, extensions |
|
236 | > from mercurial import commands, error, extensions | |
237 | > |
|
237 | > | |
238 | > def lockexception(orig, vfs, lockname, wait, releasefn, *args, **kwargs): |
|
238 | > def lockexception(orig, vfs, lockname, wait, releasefn, *args, **kwargs): | |
239 | > def releasewrap(): |
|
239 | > def releasewrap(): | |
240 | > l.held = False # ensure __del__ is a noop |
|
240 | > l.held = False # ensure __del__ is a noop | |
241 | > raise error.Abort("forced lock failure") |
|
241 | > raise error.Abort("forced lock failure") | |
242 | > l = orig(vfs, lockname, wait, releasewrap, *args, **kwargs) |
|
242 | > l = orig(vfs, lockname, wait, releasewrap, *args, **kwargs) | |
243 | > return l |
|
243 | > return l | |
244 | > |
|
244 | > | |
245 | > def reposetup(ui, repo): |
|
245 | > def reposetup(ui, repo): | |
246 | > extensions.wrapfunction(repo, '_lock', lockexception) |
|
246 | > extensions.wrapfunction(repo, '_lock', lockexception) | |
247 | > |
|
247 | > | |
248 | > cmdtable = {} |
|
248 | > cmdtable = {} | |
249 | > |
|
249 | > | |
250 | > # wrap "commit" command to prevent wlock from being '__del__()'-ed |
|
250 | > # wrap "commit" command to prevent wlock from being '__del__()'-ed | |
251 | > # at the end of dispatching (for intentional "forced lcok failure") |
|
251 | > # at the end of dispatching (for intentional "forced lcok failure") | |
252 | > def commitwrap(orig, ui, repo, *pats, **opts): |
|
252 | > def commitwrap(orig, ui, repo, *pats, **opts): | |
253 | > repo = repo.unfiltered() # to use replaced repo._lock certainly |
|
253 | > repo = repo.unfiltered() # to use replaced repo._lock certainly | |
254 | > wlock = repo.wlock() |
|
254 | > wlock = repo.wlock() | |
255 | > try: |
|
255 | > try: | |
256 | > return orig(ui, repo, *pats, **opts) |
|
256 | > return orig(ui, repo, *pats, **opts) | |
257 | > finally: |
|
257 | > finally: | |
258 | > # multiple 'relase()' is needed for complete releasing wlock, |
|
258 | > # multiple 'relase()' is needed for complete releasing wlock, | |
259 | > # because "forced" abort at last releasing store lock |
|
259 | > # because "forced" abort at last releasing store lock | |
260 | > # prevents wlock from being released at same 'lockmod.release()' |
|
260 | > # prevents wlock from being released at same 'lockmod.release()' | |
261 | > for i in range(wlock.held): |
|
261 | > for i in range(wlock.held): | |
262 | > wlock.release() |
|
262 | > wlock.release() | |
263 | > |
|
263 | > | |
264 | > def extsetup(ui): |
|
264 | > def extsetup(ui): | |
265 | > extensions.wrapcommand(commands.table, b"commit", commitwrap) |
|
265 | > extensions.wrapcommand(commands.table, b"commit", commitwrap) | |
266 | > EOF |
|
266 | > EOF | |
267 | $ extpath=`pwd`/exceptionext.py |
|
267 | $ extpath=`pwd`/exceptionext.py | |
268 | $ hg init fncachetxn |
|
268 | $ hg init fncachetxn | |
269 | $ cd fncachetxn |
|
269 | $ cd fncachetxn | |
270 | $ printf "[extensions]\nexceptionext=$extpath\n" >> .hg/hgrc |
|
270 | $ printf "[extensions]\nexceptionext=$extpath\n" >> .hg/hgrc | |
271 | $ touch y |
|
271 | $ touch y | |
272 | $ hg ci -qAm y |
|
272 | $ hg ci -qAm y | |
273 | abort: forced lock failure |
|
273 | abort: forced lock failure | |
274 | [255] |
|
274 | [255] | |
275 | $ cat .hg/store/fncache |
|
275 | $ cat .hg/store/fncache | |
276 | data/y.i |
|
276 | data/y.i | |
277 |
|
277 | |||
278 | Aborting transaction prevents fncache change |
|
278 | Aborting transaction prevents fncache change | |
279 |
|
279 | |||
280 | $ cat > ../exceptionext.py <<EOF |
|
280 | $ cat > ../exceptionext.py <<EOF | |
281 | > from __future__ import absolute_import |
|
281 | > from __future__ import absolute_import | |
282 | > import os |
|
282 | > import os | |
283 | > from mercurial import commands, error, extensions, localrepo |
|
283 | > from mercurial import commands, error, extensions, localrepo | |
284 | > |
|
284 | > | |
285 | > def wrapper(orig, self, *args, **kwargs): |
|
285 | > def wrapper(orig, self, *args, **kwargs): | |
286 | > tr = orig(self, *args, **kwargs) |
|
286 | > tr = orig(self, *args, **kwargs) | |
287 | > def fail(tr): |
|
287 | > def fail(tr): | |
288 | > raise error.Abort(b"forced transaction failure") |
|
288 | > raise error.Abort(b"forced transaction failure") | |
289 | > # zzz prefix to ensure it sorted after store.write |
|
289 | > # zzz prefix to ensure it sorted after store.write | |
290 | > tr.addfinalize(b'zzz-forcefails', fail) |
|
290 | > tr.addfinalize(b'zzz-forcefails', fail) | |
291 | > return tr |
|
291 | > return tr | |
292 | > |
|
292 | > | |
293 | > def uisetup(ui): |
|
293 | > def uisetup(ui): | |
294 | > extensions.wrapfunction( |
|
294 | > extensions.wrapfunction( | |
295 | > localrepo.localrepository, b'transaction', wrapper) |
|
295 | > localrepo.localrepository, b'transaction', wrapper) | |
296 | > |
|
296 | > | |
297 | > cmdtable = {} |
|
297 | > cmdtable = {} | |
298 | > |
|
298 | > | |
299 | > EOF |
|
299 | > EOF | |
300 |
|
300 | |||
301 | Clean cached version |
|
301 | Clean cached version | |
302 | $ rm -f "${extpath}c" |
|
302 | $ rm -f "${extpath}c" | |
303 | $ rm -Rf "`dirname $extpath`/__pycache__" |
|
303 | $ rm -Rf "`dirname $extpath`/__pycache__" | |
304 |
|
304 | |||
305 | $ touch z |
|
305 | $ touch z | |
306 | $ hg ci -qAm z |
|
306 | $ hg ci -qAm z | |
307 | transaction abort! |
|
307 | transaction abort! | |
308 | rollback completed |
|
308 | rollback completed | |
309 | abort: forced transaction failure |
|
309 | abort: forced transaction failure | |
310 | [255] |
|
310 | [255] | |
311 | $ cat .hg/store/fncache |
|
311 | $ cat .hg/store/fncache | |
312 | data/y.i |
|
312 | data/y.i | |
313 |
|
313 | |||
314 | Aborted transactions can be recovered later |
|
314 | Aborted transactions can be recovered later | |
315 |
|
315 | |||
316 | $ cat > ../exceptionext.py <<EOF |
|
316 | $ cat > ../exceptionext.py <<EOF | |
317 | > from __future__ import absolute_import |
|
317 | > from __future__ import absolute_import | |
318 | > import os |
|
318 | > import os | |
319 | > from mercurial import ( |
|
319 | > from mercurial import ( | |
320 | > commands, |
|
320 | > commands, | |
321 | > error, |
|
321 | > error, | |
322 | > extensions, |
|
322 | > extensions, | |
323 | > localrepo, |
|
323 | > localrepo, | |
324 | > transaction, |
|
324 | > transaction, | |
325 | > ) |
|
325 | > ) | |
326 | > |
|
326 | > | |
327 | > def trwrapper(orig, self, *args, **kwargs): |
|
327 | > def trwrapper(orig, self, *args, **kwargs): | |
328 | > tr = orig(self, *args, **kwargs) |
|
328 | > tr = orig(self, *args, **kwargs) | |
329 | > def fail(tr): |
|
329 | > def fail(tr): | |
330 | > raise error.Abort(b"forced transaction failure") |
|
330 | > raise error.Abort(b"forced transaction failure") | |
331 | > # zzz prefix to ensure it sorted after store.write |
|
331 | > # zzz prefix to ensure it sorted after store.write | |
332 | > tr.addfinalize(b'zzz-forcefails', fail) |
|
332 | > tr.addfinalize(b'zzz-forcefails', fail) | |
333 | > return tr |
|
333 | > return tr | |
334 | > |
|
334 | > | |
335 | > def abortwrapper(orig, self, *args, **kwargs): |
|
335 | > def abortwrapper(orig, self, *args, **kwargs): | |
336 | > raise error.Abort(b"forced transaction failure") |
|
336 | > raise error.Abort(b"forced transaction failure") | |
337 | > |
|
337 | > | |
338 | > def uisetup(ui): |
|
338 | > def uisetup(ui): | |
339 | > extensions.wrapfunction(localrepo.localrepository, 'transaction', |
|
339 | > extensions.wrapfunction(localrepo.localrepository, 'transaction', | |
340 | > trwrapper) |
|
340 | > trwrapper) | |
341 | > extensions.wrapfunction(transaction.transaction, '_abort', |
|
341 | > extensions.wrapfunction(transaction.transaction, '_abort', | |
342 | > abortwrapper) |
|
342 | > abortwrapper) | |
343 | > |
|
343 | > | |
344 | > cmdtable = {} |
|
344 | > cmdtable = {} | |
345 | > |
|
345 | > | |
346 | > EOF |
|
346 | > EOF | |
347 |
|
347 | |||
348 | Clean cached versions |
|
348 | Clean cached versions | |
349 | $ rm -f "${extpath}c" |
|
349 | $ rm -f "${extpath}c" | |
350 | $ rm -Rf "`dirname $extpath`/__pycache__" |
|
350 | $ rm -Rf "`dirname $extpath`/__pycache__" | |
351 |
|
351 | |||
352 | $ hg up -q 1 |
|
352 | $ hg up -q 1 | |
353 | $ touch z |
|
353 | $ touch z | |
354 | $ hg ci -qAm z 2>/dev/null |
|
354 | $ hg ci -qAm z 2>/dev/null | |
355 | [255] |
|
355 | [255] | |
356 | $ cat .hg/store/fncache | sort |
|
356 | $ cat .hg/store/fncache | sort | |
357 | data/y.i |
|
357 | data/y.i | |
358 | data/z.i |
|
358 | data/z.i | |
359 | $ hg recover |
|
359 | $ hg recover | |
360 | rolling back interrupted transaction |
|
360 | rolling back interrupted transaction | |
361 | checking changesets |
|
361 | checking changesets | |
362 | checking manifests |
|
362 | checking manifests | |
363 | crosschecking files in changesets and manifests |
|
363 | crosschecking files in changesets and manifests | |
364 | checking files |
|
364 | checking files | |
365 | checked 1 changesets with 1 changes to 1 files |
|
365 | checked 1 changesets with 1 changes to 1 files | |
366 | $ cat .hg/store/fncache |
|
366 | $ cat .hg/store/fncache | |
367 | data/y.i |
|
367 | data/y.i | |
368 |
|
368 | |||
369 | $ cd .. |
|
369 | $ cd .. | |
370 |
|
370 | |||
371 | debugrebuildfncache does nothing unless repo has fncache requirement |
|
371 | debugrebuildfncache does nothing unless repo has fncache requirement | |
372 |
|
372 | |||
373 | $ hg --config format.usefncache=false init nofncache |
|
373 | $ hg --config format.usefncache=false init nofncache | |
374 | $ cd nofncache |
|
374 | $ cd nofncache | |
375 | $ hg debugrebuildfncache |
|
375 | $ hg debugrebuildfncache | |
376 | (not rebuilding fncache because repository does not support fncache) |
|
376 | (not rebuilding fncache because repository does not support fncache) | |
377 |
|
377 | |||
378 | $ cd .. |
|
378 | $ cd .. | |
379 |
|
379 | |||
380 | debugrebuildfncache works on empty repository |
|
380 | debugrebuildfncache works on empty repository | |
381 |
|
381 | |||
382 | $ hg init empty |
|
382 | $ hg init empty | |
383 | $ cd empty |
|
383 | $ cd empty | |
384 | $ hg debugrebuildfncache |
|
384 | $ hg debugrebuildfncache | |
385 | fncache already up to date |
|
385 | fncache already up to date | |
386 | $ cd .. |
|
386 | $ cd .. | |
387 |
|
387 | |||
388 | debugrebuildfncache on an up to date repository no-ops |
|
388 | debugrebuildfncache on an up to date repository no-ops | |
389 |
|
389 | |||
390 | $ hg init repo |
|
390 | $ hg init repo | |
391 | $ cd repo |
|
391 | $ cd repo | |
392 | $ echo initial > foo |
|
392 | $ echo initial > foo | |
393 | $ echo initial > .bar |
|
393 | $ echo initial > .bar | |
394 | $ hg commit -A -m initial |
|
394 | $ hg commit -A -m initial | |
395 | adding .bar |
|
395 | adding .bar | |
396 | adding foo |
|
396 | adding foo | |
397 |
|
397 | |||
398 | $ cat .hg/store/fncache | sort |
|
398 | $ cat .hg/store/fncache | sort | |
399 | data/.bar.i |
|
399 | data/.bar.i | |
400 | data/foo.i |
|
400 | data/foo.i | |
401 |
|
401 | |||
402 | $ hg debugrebuildfncache |
|
402 | $ hg debugrebuildfncache | |
403 | fncache already up to date |
|
403 | fncache already up to date | |
404 |
|
404 | |||
405 | debugrebuildfncache restores deleted fncache file |
|
405 | debugrebuildfncache restores deleted fncache file | |
406 |
|
406 | |||
407 | $ rm -f .hg/store/fncache |
|
407 | $ rm -f .hg/store/fncache | |
408 | $ hg debugrebuildfncache |
|
408 | $ hg debugrebuildfncache | |
409 | adding data/.bar.i |
|
409 | adding data/.bar.i | |
410 | adding data/foo.i |
|
410 | adding data/foo.i | |
411 | 2 items added, 0 removed from fncache |
|
411 | 2 items added, 0 removed from fncache | |
412 |
|
412 | |||
413 | $ cat .hg/store/fncache | sort |
|
413 | $ cat .hg/store/fncache | sort | |
414 | data/.bar.i |
|
414 | data/.bar.i | |
415 | data/foo.i |
|
415 | data/foo.i | |
416 |
|
416 | |||
417 | Rebuild after rebuild should no-op |
|
417 | Rebuild after rebuild should no-op | |
418 |
|
418 | |||
419 | $ hg debugrebuildfncache |
|
419 | $ hg debugrebuildfncache | |
420 | fncache already up to date |
|
420 | fncache already up to date | |
421 |
|
421 | |||
422 | A single missing file should get restored, an extra file should be removed |
|
422 | A single missing file should get restored, an extra file should be removed | |
423 |
|
423 | |||
424 | $ cat > .hg/store/fncache << EOF |
|
424 | $ cat > .hg/store/fncache << EOF | |
425 | > data/foo.i |
|
425 | > data/foo.i | |
426 | > data/bad-entry.i |
|
426 | > data/bad-entry.i | |
427 | > EOF |
|
427 | > EOF | |
428 |
|
428 | |||
429 | $ hg debugrebuildfncache |
|
429 | $ hg debugrebuildfncache | |
430 | removing data/bad-entry.i |
|
430 | removing data/bad-entry.i | |
431 | adding data/.bar.i |
|
431 | adding data/.bar.i | |
432 | 1 items added, 1 removed from fncache |
|
432 | 1 items added, 1 removed from fncache | |
433 |
|
433 | |||
434 | $ cat .hg/store/fncache | sort |
|
434 | $ cat .hg/store/fncache | sort | |
435 | data/.bar.i |
|
435 | data/.bar.i | |
436 | data/foo.i |
|
436 | data/foo.i | |
437 |
|
437 | |||
|
438 | debugrebuildfncache recovers from truncated line in fncache | |||
|
439 | ||||
|
440 | $ printf a > .hg/store/fncache | |||
|
441 | $ hg debugrebuildfncache | |||
|
442 | fncache does not ends with a newline | |||
|
443 | adding data/.bar.i | |||
|
444 | adding data/foo.i | |||
|
445 | 2 items added, 0 removed from fncache | |||
|
446 | ||||
|
447 | $ cat .hg/store/fncache | sort | |||
|
448 | data/.bar.i | |||
|
449 | data/foo.i | |||
|
450 | ||||
438 | $ cd .. |
|
451 | $ cd .. | |
439 |
|
452 | |||
440 | Try a simple variation without dotencode to ensure fncache is ignorant of encoding |
|
453 | Try a simple variation without dotencode to ensure fncache is ignorant of encoding | |
441 |
|
454 | |||
442 | $ hg --config format.dotencode=false init nodotencode |
|
455 | $ hg --config format.dotencode=false init nodotencode | |
443 | $ cd nodotencode |
|
456 | $ cd nodotencode | |
444 | $ echo initial > foo |
|
457 | $ echo initial > foo | |
445 | $ echo initial > .bar |
|
458 | $ echo initial > .bar | |
446 | $ hg commit -A -m initial |
|
459 | $ hg commit -A -m initial | |
447 | adding .bar |
|
460 | adding .bar | |
448 | adding foo |
|
461 | adding foo | |
449 |
|
462 | |||
450 | $ cat .hg/store/fncache | sort |
|
463 | $ cat .hg/store/fncache | sort | |
451 | data/.bar.i |
|
464 | data/.bar.i | |
452 | data/foo.i |
|
465 | data/foo.i | |
453 |
|
466 | |||
454 | $ rm .hg/store/fncache |
|
467 | $ rm .hg/store/fncache | |
455 | $ hg debugrebuildfncache |
|
468 | $ hg debugrebuildfncache | |
456 | adding data/.bar.i |
|
469 | adding data/.bar.i | |
457 | adding data/foo.i |
|
470 | adding data/foo.i | |
458 | 2 items added, 0 removed from fncache |
|
471 | 2 items added, 0 removed from fncache | |
459 |
|
472 | |||
460 | $ cat .hg/store/fncache | sort |
|
473 | $ cat .hg/store/fncache | sort | |
461 | data/.bar.i |
|
474 | data/.bar.i | |
462 | data/foo.i |
|
475 | data/foo.i | |
463 |
|
476 | |||
464 | $ cd .. |
|
477 | $ cd .. | |
465 |
|
478 | |||
466 | In repositories that have accumulated a large number of files over time, the |
|
479 | In repositories that have accumulated a large number of files over time, the | |
467 | fncache file is going to be large. If we possibly can avoid loading it, so much the better. |
|
480 | fncache file is going to be large. If we possibly can avoid loading it, so much the better. | |
468 | The cache should not loaded when committing changes to existing files, or when unbundling |
|
481 | The cache should not loaded when committing changes to existing files, or when unbundling | |
469 | changesets that only contain changes to existing files: |
|
482 | changesets that only contain changes to existing files: | |
470 |
|
483 | |||
471 | $ cat > fncacheloadwarn.py << EOF |
|
484 | $ cat > fncacheloadwarn.py << EOF | |
472 | > from __future__ import absolute_import |
|
485 | > from __future__ import absolute_import | |
473 | > from mercurial import extensions, localrepo |
|
486 | > from mercurial import extensions, localrepo | |
474 | > |
|
487 | > | |
475 | > def extsetup(ui): |
|
488 | > def extsetup(ui): | |
476 | > def wrapstore(orig, requirements, *args): |
|
489 | > def wrapstore(orig, requirements, *args): | |
477 | > store = orig(requirements, *args) |
|
490 | > store = orig(requirements, *args) | |
478 | > if b'store' in requirements and b'fncache' in requirements: |
|
491 | > if b'store' in requirements and b'fncache' in requirements: | |
479 | > instrumentfncachestore(store, ui) |
|
492 | > instrumentfncachestore(store, ui) | |
480 | > return store |
|
493 | > return store | |
481 | > extensions.wrapfunction(localrepo, 'makestore', wrapstore) |
|
494 | > extensions.wrapfunction(localrepo, 'makestore', wrapstore) | |
482 | > |
|
495 | > | |
483 | > def instrumentfncachestore(fncachestore, ui): |
|
496 | > def instrumentfncachestore(fncachestore, ui): | |
484 | > class instrumentedfncache(type(fncachestore.fncache)): |
|
497 | > class instrumentedfncache(type(fncachestore.fncache)): | |
485 | > def _load(self): |
|
498 | > def _load(self): | |
486 | > ui.warn(b'fncache load triggered!\n') |
|
499 | > ui.warn(b'fncache load triggered!\n') | |
487 | > super(instrumentedfncache, self)._load() |
|
500 | > super(instrumentedfncache, self)._load() | |
488 | > fncachestore.fncache.__class__ = instrumentedfncache |
|
501 | > fncachestore.fncache.__class__ = instrumentedfncache | |
489 | > EOF |
|
502 | > EOF | |
490 |
|
503 | |||
491 | $ fncachextpath=`pwd`/fncacheloadwarn.py |
|
504 | $ fncachextpath=`pwd`/fncacheloadwarn.py | |
492 | $ hg init nofncacheload |
|
505 | $ hg init nofncacheload | |
493 | $ cd nofncacheload |
|
506 | $ cd nofncacheload | |
494 | $ printf "[extensions]\nfncacheloadwarn=$fncachextpath\n" >> .hg/hgrc |
|
507 | $ printf "[extensions]\nfncacheloadwarn=$fncachextpath\n" >> .hg/hgrc | |
495 |
|
508 | |||
496 | A new file should trigger a load, as we'd want to update the fncache set in that case: |
|
509 | A new file should trigger a load, as we'd want to update the fncache set in that case: | |
497 |
|
510 | |||
498 | $ touch foo |
|
511 | $ touch foo | |
499 | $ hg ci -qAm foo |
|
512 | $ hg ci -qAm foo | |
500 | fncache load triggered! |
|
513 | fncache load triggered! | |
501 |
|
514 | |||
502 | But modifying that file should not: |
|
515 | But modifying that file should not: | |
503 |
|
516 | |||
504 | $ echo bar >> foo |
|
517 | $ echo bar >> foo | |
505 | $ hg ci -qm foo |
|
518 | $ hg ci -qm foo | |
506 |
|
519 | |||
507 | If a transaction has been aborted, the zero-size truncated index file will |
|
520 | If a transaction has been aborted, the zero-size truncated index file will | |
508 | not prevent the fncache from being loaded; rather than actually abort |
|
521 | not prevent the fncache from being loaded; rather than actually abort | |
509 | a transaction, we simulate the situation by creating a zero-size index file: |
|
522 | a transaction, we simulate the situation by creating a zero-size index file: | |
510 |
|
523 | |||
511 | $ touch .hg/store/data/bar.i |
|
524 | $ touch .hg/store/data/bar.i | |
512 | $ touch bar |
|
525 | $ touch bar | |
513 | $ hg ci -qAm bar |
|
526 | $ hg ci -qAm bar | |
514 | fncache load triggered! |
|
527 | fncache load triggered! | |
515 |
|
528 | |||
516 | Unbundling should follow the same rules; existing files should not cause a load: |
|
529 | Unbundling should follow the same rules; existing files should not cause a load: | |
517 |
|
530 | |||
518 | $ hg clone -q . tobundle |
|
531 | $ hg clone -q . tobundle | |
519 | $ echo 'new line' > tobundle/bar |
|
532 | $ echo 'new line' > tobundle/bar | |
520 | $ hg -R tobundle ci -qm bar |
|
533 | $ hg -R tobundle ci -qm bar | |
521 | $ hg -R tobundle bundle -q barupdated.hg |
|
534 | $ hg -R tobundle bundle -q barupdated.hg | |
522 | $ hg unbundle -q barupdated.hg |
|
535 | $ hg unbundle -q barupdated.hg | |
523 |
|
536 | |||
524 | but adding new files should: |
|
537 | but adding new files should: | |
525 |
|
538 | |||
526 | $ touch tobundle/newfile |
|
539 | $ touch tobundle/newfile | |
527 | $ hg -R tobundle ci -qAm newfile |
|
540 | $ hg -R tobundle ci -qAm newfile | |
528 | $ hg -R tobundle bundle -q newfile.hg |
|
541 | $ hg -R tobundle bundle -q newfile.hg | |
529 | $ hg unbundle -q newfile.hg |
|
542 | $ hg unbundle -q newfile.hg | |
530 | fncache load triggered! |
|
543 | fncache load triggered! | |
531 |
|
544 | |||
532 | $ cd .. |
|
545 | $ cd .. |
@@ -1,2005 +1,2005 b'' | |||||
1 | This file tests the behavior of run-tests.py itself. |
|
1 | This file tests the behavior of run-tests.py itself. | |
2 |
|
2 | |||
3 | Avoid interference from actual test env: |
|
3 | Avoid interference from actual test env: | |
4 |
|
4 | |||
5 | $ . "$TESTDIR/helper-runtests.sh" |
|
5 | $ . "$TESTDIR/helper-runtests.sh" | |
6 |
|
6 | |||
7 | Smoke test with install |
|
7 | Smoke test with install | |
8 | ============ |
|
8 | ============ | |
9 | $ "$PYTHON" $TESTDIR/run-tests.py $HGTEST_RUN_TESTS_PURE -l |
|
9 | $ "$PYTHON" $TESTDIR/run-tests.py $HGTEST_RUN_TESTS_PURE -l | |
10 | running 0 tests using 0 parallel processes |
|
10 | running 0 tests using 0 parallel processes | |
11 |
|
11 | |||
12 | # Ran 0 tests, 0 skipped, 0 failed. |
|
12 | # Ran 0 tests, 0 skipped, 0 failed. | |
13 |
|
13 | |||
14 | Define a helper to avoid the install step |
|
14 | Define a helper to avoid the install step | |
15 | ============= |
|
15 | ============= | |
16 | $ rt() |
|
16 | $ rt() | |
17 | > { |
|
17 | > { | |
18 | > "$PYTHON" $TESTDIR/run-tests.py --with-hg=`which hg` -j1 "$@" |
|
18 | > "$PYTHON" $TESTDIR/run-tests.py --with-hg=`which hg` -j1 "$@" | |
19 | > } |
|
19 | > } | |
20 |
|
20 | |||
21 | error paths |
|
21 | error paths | |
22 |
|
22 | |||
23 | #if symlink |
|
23 | #if symlink | |
24 | $ ln -s `which true` hg |
|
24 | $ ln -s `which true` hg | |
25 | $ "$PYTHON" $TESTDIR/run-tests.py --with-hg=./hg |
|
25 | $ "$PYTHON" $TESTDIR/run-tests.py --with-hg=./hg | |
26 | warning: --with-hg should specify an hg script |
|
26 | warning: --with-hg should specify an hg script | |
27 | running 0 tests using 0 parallel processes |
|
27 | running 0 tests using 0 parallel processes | |
28 |
|
28 | |||
29 | # Ran 0 tests, 0 skipped, 0 failed. |
|
29 | # Ran 0 tests, 0 skipped, 0 failed. | |
30 | $ rm hg |
|
30 | $ rm hg | |
31 | #endif |
|
31 | #endif | |
32 |
|
32 | |||
33 | #if execbit |
|
33 | #if execbit | |
34 | $ touch hg |
|
34 | $ touch hg | |
35 | $ "$PYTHON" $TESTDIR/run-tests.py --with-hg=./hg |
|
35 | $ "$PYTHON" $TESTDIR/run-tests.py --with-hg=./hg | |
36 | usage: run-tests.py [options] [tests] |
|
36 | usage: run-tests.py [options] [tests] | |
37 | run-tests.py: error: --with-hg must specify an executable hg script |
|
37 | run-tests.py: error: --with-hg must specify an executable hg script | |
38 | [2] |
|
38 | [2] | |
39 | $ rm hg |
|
39 | $ rm hg | |
40 | #endif |
|
40 | #endif | |
41 |
|
41 | |||
42 | Features for testing optional lines |
|
42 | Features for testing optional lines | |
43 | =================================== |
|
43 | =================================== | |
44 |
|
44 | |||
45 | $ cat > hghaveaddon.py <<EOF |
|
45 | $ cat > hghaveaddon.py <<EOF | |
46 | > import hghave |
|
46 | > import hghave | |
47 | > @hghave.check("custom", "custom hghave feature") |
|
47 | > @hghave.check("custom", "custom hghave feature") | |
48 | > def has_custom(): |
|
48 | > def has_custom(): | |
49 | > return True |
|
49 | > return True | |
50 | > @hghave.check("missing", "missing hghave feature") |
|
50 | > @hghave.check("missing", "missing hghave feature") | |
51 | > def has_missing(): |
|
51 | > def has_missing(): | |
52 | > return False |
|
52 | > return False | |
53 | > EOF |
|
53 | > EOF | |
54 |
|
54 | |||
55 | an empty test |
|
55 | an empty test | |
56 | ======================= |
|
56 | ======================= | |
57 |
|
57 | |||
58 | $ touch test-empty.t |
|
58 | $ touch test-empty.t | |
59 | $ rt |
|
59 | $ rt | |
60 | running 1 tests using 1 parallel processes |
|
60 | running 1 tests using 1 parallel processes | |
61 | . |
|
61 | . | |
62 | # Ran 1 tests, 0 skipped, 0 failed. |
|
62 | # Ran 1 tests, 0 skipped, 0 failed. | |
63 | $ rm test-empty.t |
|
63 | $ rm test-empty.t | |
64 |
|
64 | |||
65 | a succesful test |
|
65 | a succesful test | |
66 | ======================= |
|
66 | ======================= | |
67 |
|
67 | |||
68 | $ cat > test-success.t << EOF |
|
68 | $ cat > test-success.t << EOF | |
69 | > $ echo babar |
|
69 | > $ echo babar | |
70 | > babar |
|
70 | > babar | |
71 | > $ echo xyzzy |
|
71 | > $ echo xyzzy | |
72 | > dont_print (?) |
|
72 | > dont_print (?) | |
73 | > nothing[42]line (re) (?) |
|
73 | > nothing[42]line (re) (?) | |
74 | > never*happens (glob) (?) |
|
74 | > never*happens (glob) (?) | |
75 | > more_nothing (?) |
|
75 | > more_nothing (?) | |
76 | > xyzzy |
|
76 | > xyzzy | |
77 | > nor this (?) |
|
77 | > nor this (?) | |
78 | > $ printf 'abc\ndef\nxyz\n' |
|
78 | > $ printf 'abc\ndef\nxyz\n' | |
79 | > 123 (?) |
|
79 | > 123 (?) | |
80 | > abc |
|
80 | > abc | |
81 | > def (?) |
|
81 | > def (?) | |
82 | > 456 (?) |
|
82 | > 456 (?) | |
83 | > xyz |
|
83 | > xyz | |
84 | > $ printf 'zyx\nwvu\ntsr\n' |
|
84 | > $ printf 'zyx\nwvu\ntsr\n' | |
85 | > abc (?) |
|
85 | > abc (?) | |
86 | > zyx (custom !) |
|
86 | > zyx (custom !) | |
87 | > wvu |
|
87 | > wvu | |
88 | > no_print (no-custom !) |
|
88 | > no_print (no-custom !) | |
89 | > tsr (no-missing !) |
|
89 | > tsr (no-missing !) | |
90 | > missing (missing !) |
|
90 | > missing (missing !) | |
91 | > EOF |
|
91 | > EOF | |
92 |
|
92 | |||
93 | $ rt |
|
93 | $ rt | |
94 | running 1 tests using 1 parallel processes |
|
94 | running 1 tests using 1 parallel processes | |
95 | . |
|
95 | . | |
96 | # Ran 1 tests, 0 skipped, 0 failed. |
|
96 | # Ran 1 tests, 0 skipped, 0 failed. | |
97 |
|
97 | |||
98 | failing test |
|
98 | failing test | |
99 | ================== |
|
99 | ================== | |
100 |
|
100 | |||
101 | test churn with globs |
|
101 | test churn with globs | |
102 | $ cat > test-failure.t <<EOF |
|
102 | $ cat > test-failure.t <<EOF | |
103 | > $ echo "bar-baz"; echo "bar-bad"; echo foo |
|
103 | > $ echo "bar-baz"; echo "bar-bad"; echo foo | |
104 | > bar*bad (glob) |
|
104 | > bar*bad (glob) | |
105 | > bar*baz (glob) |
|
105 | > bar*baz (glob) | |
106 | > | fo (re) |
|
106 | > | fo (re) | |
107 | > EOF |
|
107 | > EOF | |
108 | $ rt test-failure.t |
|
108 | $ rt test-failure.t | |
109 | running 1 tests using 1 parallel processes |
|
109 | running 1 tests using 1 parallel processes | |
110 |
|
110 | |||
111 | --- $TESTTMP/test-failure.t |
|
111 | --- $TESTTMP/test-failure.t | |
112 | +++ $TESTTMP/test-failure.t.err |
|
112 | +++ $TESTTMP/test-failure.t.err | |
113 | @@ -1,4 +1,4 @@ |
|
113 | @@ -1,4 +1,4 @@ | |
114 | $ echo "bar-baz"; echo "bar-bad"; echo foo |
|
114 | $ echo "bar-baz"; echo "bar-bad"; echo foo | |
115 | + bar*baz (glob) |
|
115 | + bar*baz (glob) | |
116 | bar*bad (glob) |
|
116 | bar*bad (glob) | |
117 | - bar*baz (glob) |
|
117 | - bar*baz (glob) | |
118 | - | fo (re) |
|
118 | - | fo (re) | |
119 | + foo |
|
119 | + foo | |
120 |
|
120 | |||
121 | ERROR: test-failure.t output changed |
|
121 | ERROR: test-failure.t output changed | |
122 | ! |
|
122 | ! | |
123 | Failed test-failure.t: output changed |
|
123 | Failed test-failure.t: output changed | |
124 | # Ran 1 tests, 0 skipped, 1 failed. |
|
124 | # Ran 1 tests, 0 skipped, 1 failed. | |
125 | python hash seed: * (glob) |
|
125 | python hash seed: * (glob) | |
126 | [1] |
|
126 | [1] | |
127 |
|
127 | |||
128 | test how multiple globs gets matched with lines in output |
|
128 | test how multiple globs gets matched with lines in output | |
129 | $ cat > test-failure-globs.t <<EOF |
|
129 | $ cat > test-failure-globs.t <<EOF | |
130 | > $ echo "context"; echo "context"; \ |
|
130 | > $ echo "context"; echo "context"; \ | |
131 | > echo "key: 1"; echo "value: not a"; \ |
|
131 | > echo "key: 1"; echo "value: not a"; \ | |
132 | > echo "key: 2"; echo "value: not b"; \ |
|
132 | > echo "key: 2"; echo "value: not b"; \ | |
133 | > echo "key: 3"; echo "value: c"; \ |
|
133 | > echo "key: 3"; echo "value: c"; \ | |
134 | > echo "key: 4"; echo "value: d" |
|
134 | > echo "key: 4"; echo "value: d" | |
135 | > context |
|
135 | > context | |
136 | > context |
|
136 | > context | |
137 | > key: 1 |
|
137 | > key: 1 | |
138 | > value: a |
|
138 | > value: a | |
139 | > key: 2 |
|
139 | > key: 2 | |
140 | > value: b |
|
140 | > value: b | |
141 | > key: 3 |
|
141 | > key: 3 | |
142 | > value: * (glob) |
|
142 | > value: * (glob) | |
143 | > key: 4 |
|
143 | > key: 4 | |
144 | > value: * (glob) |
|
144 | > value: * (glob) | |
145 | > EOF |
|
145 | > EOF | |
146 | $ rt test-failure-globs.t |
|
146 | $ rt test-failure-globs.t | |
147 | running 1 tests using 1 parallel processes |
|
147 | running 1 tests using 1 parallel processes | |
148 |
|
148 | |||
149 | --- $TESTTMP/test-failure-globs.t |
|
149 | --- $TESTTMP/test-failure-globs.t | |
150 | +++ $TESTTMP/test-failure-globs.t.err |
|
150 | +++ $TESTTMP/test-failure-globs.t.err | |
151 | @@ -2,9 +2,9 @@ |
|
151 | @@ -2,9 +2,9 @@ | |
152 | context |
|
152 | context | |
153 | context |
|
153 | context | |
154 | key: 1 |
|
154 | key: 1 | |
155 | - value: a |
|
155 | - value: a | |
156 | + value: not a |
|
156 | + value: not a | |
157 | key: 2 |
|
157 | key: 2 | |
158 | - value: b |
|
158 | - value: b | |
159 | + value: not b |
|
159 | + value: not b | |
160 | key: 3 |
|
160 | key: 3 | |
161 | value: * (glob) |
|
161 | value: * (glob) | |
162 | key: 4 |
|
162 | key: 4 | |
163 |
|
163 | |||
164 | ERROR: test-failure-globs.t output changed |
|
164 | ERROR: test-failure-globs.t output changed | |
165 | ! |
|
165 | ! | |
166 | Failed test-failure-globs.t: output changed |
|
166 | Failed test-failure-globs.t: output changed | |
167 | # Ran 1 tests, 0 skipped, 1 failed. |
|
167 | # Ran 1 tests, 0 skipped, 1 failed. | |
168 | python hash seed: * (glob) |
|
168 | python hash seed: * (glob) | |
169 | [1] |
|
169 | [1] | |
170 | $ rm test-failure-globs.t |
|
170 | $ rm test-failure-globs.t | |
171 |
|
171 | |||
172 | test diff colorisation |
|
172 | test diff colorisation | |
173 |
|
173 | |||
174 | #if no-windows pygments |
|
174 | #if no-windows pygments | |
175 | $ rt test-failure.t --color always |
|
175 | $ rt test-failure.t --color always | |
176 | running 1 tests using 1 parallel processes |
|
176 | running 1 tests using 1 parallel processes | |
177 |
|
177 | |||
178 | \x1b[38;5;124m--- $TESTTMP/test-failure.t\x1b[39m (esc) |
|
178 | \x1b[38;5;124m--- $TESTTMP/test-failure.t\x1b[39m (esc) | |
179 | \x1b[38;5;34m+++ $TESTTMP/test-failure.t.err\x1b[39m (esc) |
|
179 | \x1b[38;5;34m+++ $TESTTMP/test-failure.t.err\x1b[39m (esc) | |
180 | \x1b[38;5;90;01m@@ -1,4 +1,4 @@\x1b[39;00m (esc) |
|
180 | \x1b[38;5;90;01m@@ -1,4 +1,4 @@\x1b[39;00m (esc) | |
181 | $ echo "bar-baz"; echo "bar-bad"; echo foo |
|
181 | $ echo "bar-baz"; echo "bar-bad"; echo foo | |
182 | \x1b[38;5;34m+ bar*baz (glob)\x1b[39m (esc) |
|
182 | \x1b[38;5;34m+ bar*baz (glob)\x1b[39m (esc) | |
183 | bar*bad (glob) |
|
183 | bar*bad (glob) | |
184 | \x1b[38;5;124m- bar*baz (glob)\x1b[39m (esc) |
|
184 | \x1b[38;5;124m- bar*baz (glob)\x1b[39m (esc) | |
185 | \x1b[38;5;124m- | fo (re)\x1b[39m (esc) |
|
185 | \x1b[38;5;124m- | fo (re)\x1b[39m (esc) | |
186 | \x1b[38;5;34m+ foo\x1b[39m (esc) |
|
186 | \x1b[38;5;34m+ foo\x1b[39m (esc) | |
187 |
|
187 | |||
188 | \x1b[38;5;88mERROR: \x1b[39m\x1b[38;5;9mtest-failure.t\x1b[39m\x1b[38;5;88m output changed\x1b[39m (esc) |
|
188 | \x1b[38;5;88mERROR: \x1b[39m\x1b[38;5;9mtest-failure.t\x1b[39m\x1b[38;5;88m output changed\x1b[39m (esc) | |
189 | ! |
|
189 | ! | |
190 | \x1b[38;5;88mFailed \x1b[39m\x1b[38;5;9mtest-failure.t\x1b[39m\x1b[38;5;88m: output changed\x1b[39m (esc) |
|
190 | \x1b[38;5;88mFailed \x1b[39m\x1b[38;5;9mtest-failure.t\x1b[39m\x1b[38;5;88m: output changed\x1b[39m (esc) | |
191 | # Ran 1 tests, 0 skipped, 1 failed. |
|
191 | # Ran 1 tests, 0 skipped, 1 failed. | |
192 | python hash seed: * (glob) |
|
192 | python hash seed: * (glob) | |
193 | [1] |
|
193 | [1] | |
194 |
|
194 | |||
195 | $ rt test-failure.t 2> tmp.log |
|
195 | $ rt test-failure.t 2> tmp.log | |
196 | running 1 tests using 1 parallel processes |
|
196 | running 1 tests using 1 parallel processes | |
197 | [1] |
|
197 | [1] | |
198 | $ cat tmp.log |
|
198 | $ cat tmp.log | |
199 |
|
199 | |||
200 | --- $TESTTMP/test-failure.t |
|
200 | --- $TESTTMP/test-failure.t | |
201 | +++ $TESTTMP/test-failure.t.err |
|
201 | +++ $TESTTMP/test-failure.t.err | |
202 | @@ -1,4 +1,4 @@ |
|
202 | @@ -1,4 +1,4 @@ | |
203 | $ echo "bar-baz"; echo "bar-bad"; echo foo |
|
203 | $ echo "bar-baz"; echo "bar-bad"; echo foo | |
204 | + bar*baz (glob) |
|
204 | + bar*baz (glob) | |
205 | bar*bad (glob) |
|
205 | bar*bad (glob) | |
206 | - bar*baz (glob) |
|
206 | - bar*baz (glob) | |
207 | - | fo (re) |
|
207 | - | fo (re) | |
208 | + foo |
|
208 | + foo | |
209 |
|
209 | |||
210 | ERROR: test-failure.t output changed |
|
210 | ERROR: test-failure.t output changed | |
211 | ! |
|
211 | ! | |
212 | Failed test-failure.t: output changed |
|
212 | Failed test-failure.t: output changed | |
213 | # Ran 1 tests, 0 skipped, 1 failed. |
|
213 | # Ran 1 tests, 0 skipped, 1 failed. | |
214 | python hash seed: * (glob) |
|
214 | python hash seed: * (glob) | |
215 | #endif |
|
215 | #endif | |
216 |
|
216 | |||
217 | $ cat > test-failure.t << EOF |
|
217 | $ cat > test-failure.t << EOF | |
218 | > $ true |
|
218 | > $ true | |
219 | > should go away (true !) |
|
219 | > should go away (true !) | |
220 | > $ true |
|
220 | > $ true | |
221 | > should stay (false !) |
|
221 | > should stay (false !) | |
222 | > |
|
222 | > | |
223 | > Should remove first line, not second or third |
|
223 | > Should remove first line, not second or third | |
224 | > $ echo 'testing' |
|
224 | > $ echo 'testing' | |
225 | > baz*foo (glob) (true !) |
|
225 | > baz*foo (glob) (true !) | |
226 | > foobar*foo (glob) (false !) |
|
226 | > foobar*foo (glob) (false !) | |
227 | > te*ting (glob) (true !) |
|
227 | > te*ting (glob) (true !) | |
228 | > |
|
228 | > | |
229 | > Should keep first two lines, remove third and last |
|
229 | > Should keep first two lines, remove third and last | |
230 | > $ echo 'testing' |
|
230 | > $ echo 'testing' | |
231 | > test.ng (re) (true !) |
|
231 | > test.ng (re) (true !) | |
232 | > foo.ar (re) (false !) |
|
232 | > foo.ar (re) (false !) | |
233 | > b.r (re) (true !) |
|
233 | > b.r (re) (true !) | |
234 | > missing (?) |
|
234 | > missing (?) | |
235 | > awol (true !) |
|
235 | > awol (true !) | |
236 | > |
|
236 | > | |
237 | > The "missing" line should stay, even though awol is dropped |
|
237 | > The "missing" line should stay, even though awol is dropped | |
238 | > $ echo 'testing' |
|
238 | > $ echo 'testing' | |
239 | > test.ng (re) (true !) |
|
239 | > test.ng (re) (true !) | |
240 | > foo.ar (?) |
|
240 | > foo.ar (?) | |
241 | > awol |
|
241 | > awol | |
242 | > missing (?) |
|
242 | > missing (?) | |
243 | > EOF |
|
243 | > EOF | |
244 | $ rt test-failure.t |
|
244 | $ rt test-failure.t | |
245 | running 1 tests using 1 parallel processes |
|
245 | running 1 tests using 1 parallel processes | |
246 |
|
246 | |||
247 | --- $TESTTMP/test-failure.t |
|
247 | --- $TESTTMP/test-failure.t | |
248 | +++ $TESTTMP/test-failure.t.err |
|
248 | +++ $TESTTMP/test-failure.t.err | |
249 | @@ -1,11 +1,9 @@ |
|
249 | @@ -1,11 +1,9 @@ | |
250 | $ true |
|
250 | $ true | |
251 | - should go away (true !) |
|
251 | - should go away (true !) | |
252 | $ true |
|
252 | $ true | |
253 | should stay (false !) |
|
253 | should stay (false !) | |
254 |
|
254 | |||
255 | Should remove first line, not second or third |
|
255 | Should remove first line, not second or third | |
256 | $ echo 'testing' |
|
256 | $ echo 'testing' | |
257 | - baz*foo (glob) (true !) |
|
257 | - baz*foo (glob) (true !) | |
258 | foobar*foo (glob) (false !) |
|
258 | foobar*foo (glob) (false !) | |
259 | te*ting (glob) (true !) |
|
259 | te*ting (glob) (true !) | |
260 |
|
260 | |||
261 | foo.ar (re) (false !) |
|
261 | foo.ar (re) (false !) | |
262 | missing (?) |
|
262 | missing (?) | |
263 | @@ -13,13 +11,10 @@ |
|
263 | @@ -13,13 +11,10 @@ | |
264 | $ echo 'testing' |
|
264 | $ echo 'testing' | |
265 | test.ng (re) (true !) |
|
265 | test.ng (re) (true !) | |
266 | foo.ar (re) (false !) |
|
266 | foo.ar (re) (false !) | |
267 | - b.r (re) (true !) |
|
267 | - b.r (re) (true !) | |
268 | missing (?) |
|
268 | missing (?) | |
269 | - awol (true !) |
|
269 | - awol (true !) | |
270 |
|
270 | |||
271 | The "missing" line should stay, even though awol is dropped |
|
271 | The "missing" line should stay, even though awol is dropped | |
272 | $ echo 'testing' |
|
272 | $ echo 'testing' | |
273 | test.ng (re) (true !) |
|
273 | test.ng (re) (true !) | |
274 | foo.ar (?) |
|
274 | foo.ar (?) | |
275 | - awol |
|
275 | - awol | |
276 | missing (?) |
|
276 | missing (?) | |
277 |
|
277 | |||
278 | ERROR: test-failure.t output changed |
|
278 | ERROR: test-failure.t output changed | |
279 | ! |
|
279 | ! | |
280 | Failed test-failure.t: output changed |
|
280 | Failed test-failure.t: output changed | |
281 | # Ran 1 tests, 0 skipped, 1 failed. |
|
281 | # Ran 1 tests, 0 skipped, 1 failed. | |
282 | python hash seed: * (glob) |
|
282 | python hash seed: * (glob) | |
283 | [1] |
|
283 | [1] | |
284 |
|
284 | |||
285 | basic failing test |
|
285 | basic failing test | |
286 | $ cat > test-failure.t << EOF |
|
286 | $ cat > test-failure.t << EOF | |
287 | > $ echo babar |
|
287 | > $ echo babar | |
288 | > rataxes |
|
288 | > rataxes | |
289 | > This is a noop statement so that |
|
289 | > This is a noop statement so that | |
290 | > this test is still more bytes than success. |
|
290 | > this test is still more bytes than success. | |
291 | > pad pad pad pad............................................................ |
|
291 | > pad pad pad pad............................................................ | |
292 | > pad pad pad pad............................................................ |
|
292 | > pad pad pad pad............................................................ | |
293 | > pad pad pad pad............................................................ |
|
293 | > pad pad pad pad............................................................ | |
294 | > pad pad pad pad............................................................ |
|
294 | > pad pad pad pad............................................................ | |
295 | > pad pad pad pad............................................................ |
|
295 | > pad pad pad pad............................................................ | |
296 | > pad pad pad pad............................................................ |
|
296 | > pad pad pad pad............................................................ | |
297 | > EOF |
|
297 | > EOF | |
298 |
|
298 | |||
299 | >>> fh = open('test-failure-unicode.t', 'wb') |
|
299 | >>> fh = open('test-failure-unicode.t', 'wb') | |
300 | >>> fh.write(u' $ echo babar\u03b1\n'.encode('utf-8')) and None |
|
300 | >>> fh.write(u' $ echo babar\u03b1\n'.encode('utf-8')) and None | |
301 | >>> fh.write(u' l\u03b5\u03b5t\n'.encode('utf-8')) and None |
|
301 | >>> fh.write(u' l\u03b5\u03b5t\n'.encode('utf-8')) and None | |
302 |
|
302 | |||
303 | $ rt |
|
303 | $ rt | |
304 | running 3 tests using 1 parallel processes |
|
304 | running 3 tests using 1 parallel processes | |
305 |
|
305 | |||
306 | --- $TESTTMP/test-failure.t |
|
306 | --- $TESTTMP/test-failure.t | |
307 | +++ $TESTTMP/test-failure.t.err |
|
307 | +++ $TESTTMP/test-failure.t.err | |
308 | @@ -1,5 +1,5 @@ |
|
308 | @@ -1,5 +1,5 @@ | |
309 | $ echo babar |
|
309 | $ echo babar | |
310 | - rataxes |
|
310 | - rataxes | |
311 | + babar |
|
311 | + babar | |
312 | This is a noop statement so that |
|
312 | This is a noop statement so that | |
313 | this test is still more bytes than success. |
|
313 | this test is still more bytes than success. | |
314 | pad pad pad pad............................................................ |
|
314 | pad pad pad pad............................................................ | |
315 |
|
315 | |||
316 | ERROR: test-failure.t output changed |
|
316 | ERROR: test-failure.t output changed | |
317 | !. |
|
317 | !. | |
318 | --- $TESTTMP/test-failure-unicode.t |
|
318 | --- $TESTTMP/test-failure-unicode.t | |
319 | +++ $TESTTMP/test-failure-unicode.t.err |
|
319 | +++ $TESTTMP/test-failure-unicode.t.err | |
320 | @@ -1,2 +1,2 @@ |
|
320 | @@ -1,2 +1,2 @@ | |
321 | $ echo babar\xce\xb1 (esc) |
|
321 | $ echo babar\xce\xb1 (esc) | |
322 | - l\xce\xb5\xce\xb5t (esc) |
|
322 | - l\xce\xb5\xce\xb5t (esc) | |
323 | + babar\xce\xb1 (esc) |
|
323 | + babar\xce\xb1 (esc) | |
324 |
|
324 | |||
325 | ERROR: test-failure-unicode.t output changed |
|
325 | ERROR: test-failure-unicode.t output changed | |
326 | ! |
|
326 | ! | |
327 | Failed test-failure-unicode.t: output changed |
|
327 | Failed test-failure-unicode.t: output changed | |
328 | Failed test-failure.t: output changed |
|
328 | Failed test-failure.t: output changed | |
329 | # Ran 3 tests, 0 skipped, 2 failed. |
|
329 | # Ran 3 tests, 0 skipped, 2 failed. | |
330 | python hash seed: * (glob) |
|
330 | python hash seed: * (glob) | |
331 | [1] |
|
331 | [1] | |
332 |
|
332 | |||
333 | test --outputdir |
|
333 | test --outputdir | |
334 | $ mkdir output |
|
334 | $ mkdir output | |
335 | $ rt --outputdir output |
|
335 | $ rt --outputdir output | |
336 | running 3 tests using 1 parallel processes |
|
336 | running 3 tests using 1 parallel processes | |
337 |
|
337 | |||
338 | --- $TESTTMP/test-failure.t |
|
338 | --- $TESTTMP/test-failure.t | |
339 | +++ $TESTTMP/output/test-failure.t.err |
|
339 | +++ $TESTTMP/output/test-failure.t.err | |
340 | @@ -1,5 +1,5 @@ |
|
340 | @@ -1,5 +1,5 @@ | |
341 | $ echo babar |
|
341 | $ echo babar | |
342 | - rataxes |
|
342 | - rataxes | |
343 | + babar |
|
343 | + babar | |
344 | This is a noop statement so that |
|
344 | This is a noop statement so that | |
345 | this test is still more bytes than success. |
|
345 | this test is still more bytes than success. | |
346 | pad pad pad pad............................................................ |
|
346 | pad pad pad pad............................................................ | |
347 |
|
347 | |||
348 | ERROR: test-failure.t output changed |
|
348 | ERROR: test-failure.t output changed | |
349 | !. |
|
349 | !. | |
350 | --- $TESTTMP/test-failure-unicode.t |
|
350 | --- $TESTTMP/test-failure-unicode.t | |
351 | +++ $TESTTMP/output/test-failure-unicode.t.err |
|
351 | +++ $TESTTMP/output/test-failure-unicode.t.err | |
352 | @@ -1,2 +1,2 @@ |
|
352 | @@ -1,2 +1,2 @@ | |
353 | $ echo babar\xce\xb1 (esc) |
|
353 | $ echo babar\xce\xb1 (esc) | |
354 | - l\xce\xb5\xce\xb5t (esc) |
|
354 | - l\xce\xb5\xce\xb5t (esc) | |
355 | + babar\xce\xb1 (esc) |
|
355 | + babar\xce\xb1 (esc) | |
356 |
|
356 | |||
357 | ERROR: test-failure-unicode.t output changed |
|
357 | ERROR: test-failure-unicode.t output changed | |
358 | ! |
|
358 | ! | |
359 | Failed test-failure-unicode.t: output changed |
|
359 | Failed test-failure-unicode.t: output changed | |
360 | Failed test-failure.t: output changed |
|
360 | Failed test-failure.t: output changed | |
361 | # Ran 3 tests, 0 skipped, 2 failed. |
|
361 | # Ran 3 tests, 0 skipped, 2 failed. | |
362 | python hash seed: * (glob) |
|
362 | python hash seed: * (glob) | |
363 | [1] |
|
363 | [1] | |
364 | $ ls -a output |
|
364 | $ ls -a output | |
365 | . |
|
365 | . | |
366 | .. |
|
366 | .. | |
367 | .testtimes |
|
367 | .testtimes | |
368 | test-failure-unicode.t.err |
|
368 | test-failure-unicode.t.err | |
369 | test-failure.t.err |
|
369 | test-failure.t.err | |
370 |
|
370 | |||
371 | test --xunit support |
|
371 | test --xunit support | |
372 | $ rt --xunit=xunit.xml |
|
372 | $ rt --xunit=xunit.xml | |
373 | running 3 tests using 1 parallel processes |
|
373 | running 3 tests using 1 parallel processes | |
374 |
|
374 | |||
375 | --- $TESTTMP/test-failure.t |
|
375 | --- $TESTTMP/test-failure.t | |
376 | +++ $TESTTMP/test-failure.t.err |
|
376 | +++ $TESTTMP/test-failure.t.err | |
377 | @@ -1,5 +1,5 @@ |
|
377 | @@ -1,5 +1,5 @@ | |
378 | $ echo babar |
|
378 | $ echo babar | |
379 | - rataxes |
|
379 | - rataxes | |
380 | + babar |
|
380 | + babar | |
381 | This is a noop statement so that |
|
381 | This is a noop statement so that | |
382 | this test is still more bytes than success. |
|
382 | this test is still more bytes than success. | |
383 | pad pad pad pad............................................................ |
|
383 | pad pad pad pad............................................................ | |
384 |
|
384 | |||
385 | ERROR: test-failure.t output changed |
|
385 | ERROR: test-failure.t output changed | |
386 | !. |
|
386 | !. | |
387 | --- $TESTTMP/test-failure-unicode.t |
|
387 | --- $TESTTMP/test-failure-unicode.t | |
388 | +++ $TESTTMP/test-failure-unicode.t.err |
|
388 | +++ $TESTTMP/test-failure-unicode.t.err | |
389 | @@ -1,2 +1,2 @@ |
|
389 | @@ -1,2 +1,2 @@ | |
390 | $ echo babar\xce\xb1 (esc) |
|
390 | $ echo babar\xce\xb1 (esc) | |
391 | - l\xce\xb5\xce\xb5t (esc) |
|
391 | - l\xce\xb5\xce\xb5t (esc) | |
392 | + babar\xce\xb1 (esc) |
|
392 | + babar\xce\xb1 (esc) | |
393 |
|
393 | |||
394 | ERROR: test-failure-unicode.t output changed |
|
394 | ERROR: test-failure-unicode.t output changed | |
395 | ! |
|
395 | ! | |
396 | Failed test-failure-unicode.t: output changed |
|
396 | Failed test-failure-unicode.t: output changed | |
397 | Failed test-failure.t: output changed |
|
397 | Failed test-failure.t: output changed | |
398 | # Ran 3 tests, 0 skipped, 2 failed. |
|
398 | # Ran 3 tests, 0 skipped, 2 failed. | |
399 | python hash seed: * (glob) |
|
399 | python hash seed: * (glob) | |
400 | [1] |
|
400 | [1] | |
401 | $ cat xunit.xml |
|
401 | $ cat xunit.xml | |
402 | <?xml version="1.0" encoding="utf-8"?> |
|
402 | <?xml version="1.0" encoding="utf-8"?> | |
403 | <testsuite errors="0" failures="2" name="run-tests" skipped="0" tests="3"> |
|
403 | <testsuite errors="0" failures="2" name="run-tests" skipped="0" tests="3"> | |
404 | <testcase name="test-success.t" time="*"/> (glob) |
|
404 | <testcase name="test-success.t" time="*"/> (glob) | |
405 | <testcase name="test-failure-unicode.t" time="*"> (glob) |
|
405 | <testcase name="test-failure-unicode.t" time="*"> (glob) | |
406 | <failure message="output changed" type="output-mismatch"> |
|
406 | <failure message="output changed" type="output-mismatch"> | |
407 | <![CDATA[--- $TESTTMP/test-failure-unicode.t |
|
407 | <![CDATA[--- $TESTTMP/test-failure-unicode.t | |
408 | +++ $TESTTMP/test-failure-unicode.t.err |
|
408 | +++ $TESTTMP/test-failure-unicode.t.err | |
409 | @@ -1,2 +1,2 @@ |
|
409 | @@ -1,2 +1,2 @@ | |
410 | $ echo babar\xce\xb1 (esc) |
|
410 | $ echo babar\xce\xb1 (esc) | |
411 | - l\xce\xb5\xce\xb5t (esc) |
|
411 | - l\xce\xb5\xce\xb5t (esc) | |
412 | + babar\xce\xb1 (esc) |
|
412 | + babar\xce\xb1 (esc) | |
413 | ]]> </failure> |
|
413 | ]]> </failure> | |
414 | </testcase> |
|
414 | </testcase> | |
415 | <testcase name="test-failure.t" time="*"> (glob) |
|
415 | <testcase name="test-failure.t" time="*"> (glob) | |
416 | <failure message="output changed" type="output-mismatch"> |
|
416 | <failure message="output changed" type="output-mismatch"> | |
417 | <![CDATA[--- $TESTTMP/test-failure.t |
|
417 | <![CDATA[--- $TESTTMP/test-failure.t | |
418 | +++ $TESTTMP/test-failure.t.err |
|
418 | +++ $TESTTMP/test-failure.t.err | |
419 | @@ -1,5 +1,5 @@ |
|
419 | @@ -1,5 +1,5 @@ | |
420 | $ echo babar |
|
420 | $ echo babar | |
421 | - rataxes |
|
421 | - rataxes | |
422 | + babar |
|
422 | + babar | |
423 | This is a noop statement so that |
|
423 | This is a noop statement so that | |
424 | this test is still more bytes than success. |
|
424 | this test is still more bytes than success. | |
425 | pad pad pad pad............................................................ |
|
425 | pad pad pad pad............................................................ | |
426 | ]]> </failure> |
|
426 | ]]> </failure> | |
427 | </testcase> |
|
427 | </testcase> | |
428 | </testsuite> |
|
428 | </testsuite> | |
429 |
|
429 | |||
430 | $ cat .testtimes |
|
430 | $ cat .testtimes | |
431 | test-empty.t * (glob) |
|
431 | test-empty.t * (glob) | |
432 | test-failure-globs.t * (glob) |
|
432 | test-failure-globs.t * (glob) | |
433 | test-failure-unicode.t * (glob) |
|
433 | test-failure-unicode.t * (glob) | |
434 | test-failure.t * (glob) |
|
434 | test-failure.t * (glob) | |
435 | test-success.t * (glob) |
|
435 | test-success.t * (glob) | |
436 |
|
436 | |||
437 | $ rt --list-tests |
|
437 | $ rt --list-tests | |
438 | test-failure-unicode.t |
|
438 | test-failure-unicode.t | |
439 | test-failure.t |
|
439 | test-failure.t | |
440 | test-success.t |
|
440 | test-success.t | |
441 |
|
441 | |||
442 | $ rt --list-tests --json |
|
442 | $ rt --list-tests --json | |
443 | test-failure-unicode.t |
|
443 | test-failure-unicode.t | |
444 | test-failure.t |
|
444 | test-failure.t | |
445 | test-success.t |
|
445 | test-success.t | |
446 | $ cat report.json |
|
446 | $ cat report.json | |
447 | testreport ={ |
|
447 | testreport ={ | |
448 | "test-failure-unicode.t": { |
|
448 | "test-failure-unicode.t": { | |
449 | "result": "success" |
|
449 | "result": "success" | |
450 | }, |
|
450 | }, | |
451 | "test-failure.t": { |
|
451 | "test-failure.t": { | |
452 | "result": "success" |
|
452 | "result": "success" | |
453 | }, |
|
453 | }, | |
454 | "test-success.t": { |
|
454 | "test-success.t": { | |
455 | "result": "success" |
|
455 | "result": "success" | |
456 | } |
|
456 | } | |
457 | } (no-eol) |
|
457 | } (no-eol) | |
458 |
|
458 | |||
459 | $ rt --list-tests --xunit=xunit.xml |
|
459 | $ rt --list-tests --xunit=xunit.xml | |
460 | test-failure-unicode.t |
|
460 | test-failure-unicode.t | |
461 | test-failure.t |
|
461 | test-failure.t | |
462 | test-success.t |
|
462 | test-success.t | |
463 | $ cat xunit.xml |
|
463 | $ cat xunit.xml | |
464 | <?xml version="1.0" encoding="utf-8"?> |
|
464 | <?xml version="1.0" encoding="utf-8"?> | |
465 | <testsuite errors="0" failures="0" name="run-tests" skipped="0" tests="0"> |
|
465 | <testsuite errors="0" failures="0" name="run-tests" skipped="0" tests="0"> | |
466 | <testcase name="test-failure-unicode.t"/> |
|
466 | <testcase name="test-failure-unicode.t"/> | |
467 | <testcase name="test-failure.t"/> |
|
467 | <testcase name="test-failure.t"/> | |
468 | <testcase name="test-success.t"/> |
|
468 | <testcase name="test-success.t"/> | |
469 | </testsuite> |
|
469 | </testsuite> | |
470 |
|
470 | |||
471 | $ rt --list-tests test-failure* --json --xunit=xunit.xml --outputdir output |
|
471 | $ rt --list-tests test-failure* --json --xunit=xunit.xml --outputdir output | |
472 | test-failure-unicode.t |
|
472 | test-failure-unicode.t | |
473 | test-failure.t |
|
473 | test-failure.t | |
474 | $ cat output/report.json |
|
474 | $ cat output/report.json | |
475 | testreport ={ |
|
475 | testreport ={ | |
476 | "test-failure-unicode.t": { |
|
476 | "test-failure-unicode.t": { | |
477 | "result": "success" |
|
477 | "result": "success" | |
478 | }, |
|
478 | }, | |
479 | "test-failure.t": { |
|
479 | "test-failure.t": { | |
480 | "result": "success" |
|
480 | "result": "success" | |
481 | } |
|
481 | } | |
482 | } (no-eol) |
|
482 | } (no-eol) | |
483 | $ cat xunit.xml |
|
483 | $ cat xunit.xml | |
484 | <?xml version="1.0" encoding="utf-8"?> |
|
484 | <?xml version="1.0" encoding="utf-8"?> | |
485 | <testsuite errors="0" failures="0" name="run-tests" skipped="0" tests="0"> |
|
485 | <testsuite errors="0" failures="0" name="run-tests" skipped="0" tests="0"> | |
486 | <testcase name="test-failure-unicode.t"/> |
|
486 | <testcase name="test-failure-unicode.t"/> | |
487 | <testcase name="test-failure.t"/> |
|
487 | <testcase name="test-failure.t"/> | |
488 | </testsuite> |
|
488 | </testsuite> | |
489 |
|
489 | |||
490 | $ rm test-failure-unicode.t |
|
490 | $ rm test-failure-unicode.t | |
491 |
|
491 | |||
492 | test for --retest |
|
492 | test for --retest | |
493 | ==================== |
|
493 | ==================== | |
494 |
|
494 | |||
495 | $ rt --retest |
|
495 | $ rt --retest | |
496 | running 2 tests using 1 parallel processes |
|
496 | running 2 tests using 1 parallel processes | |
497 |
|
497 | |||
498 | --- $TESTTMP/test-failure.t |
|
498 | --- $TESTTMP/test-failure.t | |
499 | +++ $TESTTMP/test-failure.t.err |
|
499 | +++ $TESTTMP/test-failure.t.err | |
500 | @@ -1,5 +1,5 @@ |
|
500 | @@ -1,5 +1,5 @@ | |
501 | $ echo babar |
|
501 | $ echo babar | |
502 | - rataxes |
|
502 | - rataxes | |
503 | + babar |
|
503 | + babar | |
504 | This is a noop statement so that |
|
504 | This is a noop statement so that | |
505 | this test is still more bytes than success. |
|
505 | this test is still more bytes than success. | |
506 | pad pad pad pad............................................................ |
|
506 | pad pad pad pad............................................................ | |
507 |
|
507 | |||
508 | ERROR: test-failure.t output changed |
|
508 | ERROR: test-failure.t output changed | |
509 | ! |
|
509 | ! | |
510 | Failed test-failure.t: output changed |
|
510 | Failed test-failure.t: output changed | |
511 | # Ran 2 tests, 1 skipped, 1 failed. |
|
511 | # Ran 2 tests, 1 skipped, 1 failed. | |
512 | python hash seed: * (glob) |
|
512 | python hash seed: * (glob) | |
513 | [1] |
|
513 | [1] | |
514 |
|
514 | |||
515 | --retest works with --outputdir |
|
515 | --retest works with --outputdir | |
516 | $ rm -r output |
|
516 | $ rm -r output | |
517 | $ mkdir output |
|
517 | $ mkdir output | |
518 | $ mv test-failure.t.err output |
|
518 | $ mv test-failure.t.err output | |
519 | $ rt --retest --outputdir output |
|
519 | $ rt --retest --outputdir output | |
520 | running 2 tests using 1 parallel processes |
|
520 | running 2 tests using 1 parallel processes | |
521 |
|
521 | |||
522 | --- $TESTTMP/test-failure.t |
|
522 | --- $TESTTMP/test-failure.t | |
523 | +++ $TESTTMP/output/test-failure.t.err |
|
523 | +++ $TESTTMP/output/test-failure.t.err | |
524 | @@ -1,5 +1,5 @@ |
|
524 | @@ -1,5 +1,5 @@ | |
525 | $ echo babar |
|
525 | $ echo babar | |
526 | - rataxes |
|
526 | - rataxes | |
527 | + babar |
|
527 | + babar | |
528 | This is a noop statement so that |
|
528 | This is a noop statement so that | |
529 | this test is still more bytes than success. |
|
529 | this test is still more bytes than success. | |
530 | pad pad pad pad............................................................ |
|
530 | pad pad pad pad............................................................ | |
531 |
|
531 | |||
532 | ERROR: test-failure.t output changed |
|
532 | ERROR: test-failure.t output changed | |
533 | ! |
|
533 | ! | |
534 | Failed test-failure.t: output changed |
|
534 | Failed test-failure.t: output changed | |
535 | # Ran 2 tests, 1 skipped, 1 failed. |
|
535 | # Ran 2 tests, 1 skipped, 1 failed. | |
536 | python hash seed: * (glob) |
|
536 | python hash seed: * (glob) | |
537 | [1] |
|
537 | [1] | |
538 |
|
538 | |||
539 | Selecting Tests To Run |
|
539 | Selecting Tests To Run | |
540 | ====================== |
|
540 | ====================== | |
541 |
|
541 | |||
542 | successful |
|
542 | successful | |
543 |
|
543 | |||
544 | $ rt test-success.t |
|
544 | $ rt test-success.t | |
545 | running 1 tests using 1 parallel processes |
|
545 | running 1 tests using 1 parallel processes | |
546 | . |
|
546 | . | |
547 | # Ran 1 tests, 0 skipped, 0 failed. |
|
547 | # Ran 1 tests, 0 skipped, 0 failed. | |
548 |
|
548 | |||
549 | success w/ keyword |
|
549 | success w/ keyword | |
550 | $ rt -k xyzzy |
|
550 | $ rt -k xyzzy | |
551 | running 2 tests using 1 parallel processes |
|
551 | running 2 tests using 1 parallel processes | |
552 | . |
|
552 | . | |
553 | # Ran 2 tests, 1 skipped, 0 failed. |
|
553 | # Ran 2 tests, 1 skipped, 0 failed. | |
554 |
|
554 | |||
555 | failed |
|
555 | failed | |
556 |
|
556 | |||
557 | $ rt test-failure.t |
|
557 | $ rt test-failure.t | |
558 | running 1 tests using 1 parallel processes |
|
558 | running 1 tests using 1 parallel processes | |
559 |
|
559 | |||
560 | --- $TESTTMP/test-failure.t |
|
560 | --- $TESTTMP/test-failure.t | |
561 | +++ $TESTTMP/test-failure.t.err |
|
561 | +++ $TESTTMP/test-failure.t.err | |
562 | @@ -1,5 +1,5 @@ |
|
562 | @@ -1,5 +1,5 @@ | |
563 | $ echo babar |
|
563 | $ echo babar | |
564 | - rataxes |
|
564 | - rataxes | |
565 | + babar |
|
565 | + babar | |
566 | This is a noop statement so that |
|
566 | This is a noop statement so that | |
567 | this test is still more bytes than success. |
|
567 | this test is still more bytes than success. | |
568 | pad pad pad pad............................................................ |
|
568 | pad pad pad pad............................................................ | |
569 |
|
569 | |||
570 | ERROR: test-failure.t output changed |
|
570 | ERROR: test-failure.t output changed | |
571 | ! |
|
571 | ! | |
572 | Failed test-failure.t: output changed |
|
572 | Failed test-failure.t: output changed | |
573 | # Ran 1 tests, 0 skipped, 1 failed. |
|
573 | # Ran 1 tests, 0 skipped, 1 failed. | |
574 | python hash seed: * (glob) |
|
574 | python hash seed: * (glob) | |
575 | [1] |
|
575 | [1] | |
576 |
|
576 | |||
577 | failure w/ keyword |
|
577 | failure w/ keyword | |
578 | $ rt -k rataxes |
|
578 | $ rt -k rataxes | |
579 | running 2 tests using 1 parallel processes |
|
579 | running 2 tests using 1 parallel processes | |
580 |
|
580 | |||
581 | --- $TESTTMP/test-failure.t |
|
581 | --- $TESTTMP/test-failure.t | |
582 | +++ $TESTTMP/test-failure.t.err |
|
582 | +++ $TESTTMP/test-failure.t.err | |
583 | @@ -1,5 +1,5 @@ |
|
583 | @@ -1,5 +1,5 @@ | |
584 | $ echo babar |
|
584 | $ echo babar | |
585 | - rataxes |
|
585 | - rataxes | |
586 | + babar |
|
586 | + babar | |
587 | This is a noop statement so that |
|
587 | This is a noop statement so that | |
588 | this test is still more bytes than success. |
|
588 | this test is still more bytes than success. | |
589 | pad pad pad pad............................................................ |
|
589 | pad pad pad pad............................................................ | |
590 |
|
590 | |||
591 | ERROR: test-failure.t output changed |
|
591 | ERROR: test-failure.t output changed | |
592 | ! |
|
592 | ! | |
593 | Failed test-failure.t: output changed |
|
593 | Failed test-failure.t: output changed | |
594 | # Ran 2 tests, 1 skipped, 1 failed. |
|
594 | # Ran 2 tests, 1 skipped, 1 failed. | |
595 | python hash seed: * (glob) |
|
595 | python hash seed: * (glob) | |
596 | [1] |
|
596 | [1] | |
597 |
|
597 | |||
598 | Verify that when a process fails to start we show a useful message |
|
598 | Verify that when a process fails to start we show a useful message | |
599 | ================================================================== |
|
599 | ================================================================== | |
600 |
|
600 | |||
601 | $ cat > test-serve-fail.t <<EOF |
|
601 | $ cat > test-serve-fail.t <<EOF | |
602 | > $ echo 'abort: child process failed to start blah' |
|
602 | > $ echo 'abort: child process failed to start blah' | |
603 | > EOF |
|
603 | > EOF | |
604 | $ rt test-serve-fail.t |
|
604 | $ rt test-serve-fail.t | |
605 | running 1 tests using 1 parallel processes |
|
605 | running 1 tests using 1 parallel processes | |
606 |
|
606 | |||
607 | --- $TESTTMP/test-serve-fail.t |
|
607 | --- $TESTTMP/test-serve-fail.t | |
608 | +++ $TESTTMP/test-serve-fail.t.err |
|
608 | +++ $TESTTMP/test-serve-fail.t.err | |
609 | @@ -1* +1,2 @@ (glob) |
|
609 | @@ -1* +1,2 @@ (glob) | |
610 | $ echo 'abort: child process failed to start blah' |
|
610 | $ echo 'abort: child process failed to start blah' | |
611 | + abort: child process failed to start blah |
|
611 | + abort: child process failed to start blah | |
612 |
|
612 | |||
613 | ERROR: test-serve-fail.t output changed |
|
613 | ERROR: test-serve-fail.t output changed | |
614 | ! |
|
614 | ! | |
615 | Failed test-serve-fail.t: server failed to start (HGPORT=*) (glob) |
|
615 | Failed test-serve-fail.t: server failed to start (HGPORT=*) (glob) | |
616 | # Ran 1 tests, 0 skipped, 1 failed. |
|
616 | # Ran 1 tests, 0 skipped, 1 failed. | |
617 | python hash seed: * (glob) |
|
617 | python hash seed: * (glob) | |
618 | [1] |
|
618 | [1] | |
619 | $ rm test-serve-fail.t |
|
619 | $ rm test-serve-fail.t | |
620 |
|
620 | |||
621 | Verify that we can try other ports |
|
621 | Verify that we can try other ports | |
622 | =================================== |
|
622 | =================================== | |
623 |
|
623 | |||
624 | Extensions aren't inherited by the invoked run-tests.py. An extension |
|
624 | Extensions aren't inherited by the invoked run-tests.py. An extension | |
625 | introducing a repository requirement could cause this to fail. So we force |
|
625 | introducing a repository requirement could cause this to fail. So we force | |
626 | HGRCPATH to get a clean environment. |
|
626 | HGRCPATH to get a clean environment. | |
627 |
|
627 | |||
628 | $ HGRCPATH= hg init inuse |
|
628 | $ HGRCPATH= hg init inuse | |
629 | $ hg serve -R inuse -p $HGPORT -d --pid-file=blocks.pid |
|
629 | $ hg serve -R inuse -p $HGPORT -d --pid-file=blocks.pid | |
630 | $ cat blocks.pid >> $DAEMON_PIDS |
|
630 | $ cat blocks.pid >> $DAEMON_PIDS | |
631 | $ cat > test-serve-inuse.t <<EOF |
|
631 | $ cat > test-serve-inuse.t <<EOF | |
632 | > $ hg serve -R `pwd`/inuse -p \$HGPORT -d --pid-file=hg.pid |
|
632 | > $ hg serve -R `pwd`/inuse -p \$HGPORT -d --pid-file=hg.pid | |
633 | > $ cat hg.pid >> \$DAEMON_PIDS |
|
633 | > $ cat hg.pid >> \$DAEMON_PIDS | |
634 | > EOF |
|
634 | > EOF | |
635 | $ rt test-serve-inuse.t |
|
635 | $ rt test-serve-inuse.t | |
636 | running 1 tests using 1 parallel processes |
|
636 | running 1 tests using 1 parallel processes | |
637 | . |
|
637 | . | |
638 | # Ran 1 tests, 0 skipped, 0 failed. |
|
638 | # Ran 1 tests, 0 skipped, 0 failed. | |
639 | $ rm test-serve-inuse.t |
|
639 | $ rm test-serve-inuse.t | |
640 | $ killdaemons.py $DAEMON_PIDS |
|
640 | $ killdaemons.py $DAEMON_PIDS | |
641 |
|
641 | |||
642 | Running In Debug Mode |
|
642 | Running In Debug Mode | |
643 | ====================== |
|
643 | ====================== | |
644 |
|
644 | |||
645 | $ rt --debug 2>&1 | grep -v pwd |
|
645 | $ rt --debug 2>&1 | grep -v pwd | |
646 | running 2 tests using 1 parallel processes |
|
646 | running 2 tests using 1 parallel processes | |
647 | + alias hg=hg.exe (windows !) |
|
647 | + alias hg=hg.exe (windows !) | |
648 | + echo *SALT* 0 0 (glob) |
|
648 | + echo *SALT* 0 0 (glob) | |
649 | *SALT* 0 0 (glob) |
|
649 | *SALT* 0 0 (glob) | |
650 | + echo babar |
|
650 | + echo babar | |
651 | babar |
|
651 | babar | |
652 | + echo *SALT* 10 0 (glob) |
|
652 | + echo *SALT* 10 0 (glob) | |
653 | *SALT* 10 0 (glob) |
|
653 | *SALT* 10 0 (glob) | |
654 | .+ alias hg=hg.exe (windows !) |
|
654 | .+ alias hg=hg.exe (windows !) | |
655 | *+ echo *SALT* 0 0 (glob) |
|
655 | *+ echo *SALT* 0 0 (glob) | |
656 | *SALT* 0 0 (glob) |
|
656 | *SALT* 0 0 (glob) | |
657 | + echo babar |
|
657 | + echo babar | |
658 | babar |
|
658 | babar | |
659 | + echo *SALT* 2 0 (glob) |
|
659 | + echo *SALT* 2 0 (glob) | |
660 | *SALT* 2 0 (glob) |
|
660 | *SALT* 2 0 (glob) | |
661 | + echo xyzzy |
|
661 | + echo xyzzy | |
662 | xyzzy |
|
662 | xyzzy | |
663 | + echo *SALT* 9 0 (glob) |
|
663 | + echo *SALT* 9 0 (glob) | |
664 | *SALT* 9 0 (glob) |
|
664 | *SALT* 9 0 (glob) | |
665 | + printf *abc\ndef\nxyz\n* (glob) |
|
665 | + printf *abc\ndef\nxyz\n* (glob) | |
666 | abc |
|
666 | abc | |
667 | def |
|
667 | def | |
668 | xyz |
|
668 | xyz | |
669 | + echo *SALT* 15 0 (glob) |
|
669 | + echo *SALT* 15 0 (glob) | |
670 | *SALT* 15 0 (glob) |
|
670 | *SALT* 15 0 (glob) | |
671 | + printf *zyx\nwvu\ntsr\n* (glob) |
|
671 | + printf *zyx\nwvu\ntsr\n* (glob) | |
672 | zyx |
|
672 | zyx | |
673 | wvu |
|
673 | wvu | |
674 | tsr |
|
674 | tsr | |
675 | + echo *SALT* 22 0 (glob) |
|
675 | + echo *SALT* 22 0 (glob) | |
676 | *SALT* 22 0 (glob) |
|
676 | *SALT* 22 0 (glob) | |
677 | . |
|
677 | . | |
678 | # Ran 2 tests, 0 skipped, 0 failed. |
|
678 | # Ran 2 tests, 0 skipped, 0 failed. | |
679 |
|
679 | |||
680 | Parallel runs |
|
680 | Parallel runs | |
681 | ============== |
|
681 | ============== | |
682 |
|
682 | |||
683 | (duplicate the failing test to get predictable output) |
|
683 | (duplicate the failing test to get predictable output) | |
684 | $ cp test-failure.t test-failure-copy.t |
|
684 | $ cp test-failure.t test-failure-copy.t | |
685 |
|
685 | |||
686 | $ rt --jobs 2 test-failure*.t -n |
|
686 | $ rt --jobs 2 test-failure*.t -n | |
687 | running 2 tests using 2 parallel processes |
|
687 | running 2 tests using 2 parallel processes | |
688 | !! |
|
688 | !! | |
689 | Failed test-failure*.t: output changed (glob) |
|
689 | Failed test-failure*.t: output changed (glob) | |
690 | Failed test-failure*.t: output changed (glob) |
|
690 | Failed test-failure*.t: output changed (glob) | |
691 | # Ran 2 tests, 0 skipped, 2 failed. |
|
691 | # Ran 2 tests, 0 skipped, 2 failed. | |
692 | python hash seed: * (glob) |
|
692 | python hash seed: * (glob) | |
693 | [1] |
|
693 | [1] | |
694 |
|
694 | |||
695 | failures in parallel with --first should only print one failure |
|
695 | failures in parallel with --first should only print one failure | |
696 | $ rt --jobs 2 --first test-failure*.t |
|
696 | $ rt --jobs 2 --first test-failure*.t | |
697 | running 2 tests using 2 parallel processes |
|
697 | running 2 tests using 2 parallel processes | |
698 |
|
698 | |||
699 | --- $TESTTMP/test-failure*.t (glob) |
|
699 | --- $TESTTMP/test-failure*.t (glob) | |
700 | +++ $TESTTMP/test-failure*.t.err (glob) |
|
700 | +++ $TESTTMP/test-failure*.t.err (glob) | |
701 | @@ -1,5 +1,5 @@ |
|
701 | @@ -1,5 +1,5 @@ | |
702 | $ echo babar |
|
702 | $ echo babar | |
703 | - rataxes |
|
703 | - rataxes | |
704 | + babar |
|
704 | + babar | |
705 | This is a noop statement so that |
|
705 | This is a noop statement so that | |
706 | this test is still more bytes than success. |
|
706 | this test is still more bytes than success. | |
707 | pad pad pad pad............................................................ |
|
707 | pad pad pad pad............................................................ | |
708 |
|
708 | |||
709 | Failed test-failure*.t: output changed (glob) |
|
709 | Failed test-failure*.t: output changed (glob) | |
710 | Failed test-failure*.t: output changed (glob) |
|
710 | Failed test-failure*.t: output changed (glob) | |
711 | # Ran 2 tests, 0 skipped, 2 failed. |
|
711 | # Ran 2 tests, 0 skipped, 2 failed. | |
712 | python hash seed: * (glob) |
|
712 | python hash seed: * (glob) | |
713 | [1] |
|
713 | [1] | |
714 |
|
714 | |||
715 |
|
715 | |||
716 | (delete the duplicated test file) |
|
716 | (delete the duplicated test file) | |
717 | $ rm test-failure-copy.t |
|
717 | $ rm test-failure-copy.t | |
718 |
|
718 | |||
719 | multiple runs per test should be parallelized |
|
719 | multiple runs per test should be parallelized | |
720 |
|
720 | |||
721 | $ rt --jobs 2 --runs-per-test 2 test-success.t |
|
721 | $ rt --jobs 2 --runs-per-test 2 test-success.t | |
722 | running 2 tests using 2 parallel processes |
|
722 | running 2 tests using 2 parallel processes | |
723 | .. |
|
723 | .. | |
724 | # Ran 2 tests, 0 skipped, 0 failed. |
|
724 | # Ran 2 tests, 0 skipped, 0 failed. | |
725 |
|
725 | |||
726 | Interactive run |
|
726 | Interactive run | |
727 | =============== |
|
727 | =============== | |
728 |
|
728 | |||
729 | (backup the failing test) |
|
729 | (backup the failing test) | |
730 | $ cp test-failure.t backup |
|
730 | $ cp test-failure.t backup | |
731 |
|
731 | |||
732 | Refuse the fix |
|
732 | Refuse the fix | |
733 |
|
733 | |||
734 | $ echo 'n' | rt -i |
|
734 | $ echo 'n' | rt -i | |
735 | running 2 tests using 1 parallel processes |
|
735 | running 2 tests using 1 parallel processes | |
736 |
|
736 | |||
737 | --- $TESTTMP/test-failure.t |
|
737 | --- $TESTTMP/test-failure.t | |
738 | +++ $TESTTMP/test-failure.t.err |
|
738 | +++ $TESTTMP/test-failure.t.err | |
739 | @@ -1,5 +1,5 @@ |
|
739 | @@ -1,5 +1,5 @@ | |
740 | $ echo babar |
|
740 | $ echo babar | |
741 | - rataxes |
|
741 | - rataxes | |
742 | + babar |
|
742 | + babar | |
743 | This is a noop statement so that |
|
743 | This is a noop statement so that | |
744 | this test is still more bytes than success. |
|
744 | this test is still more bytes than success. | |
745 | pad pad pad pad............................................................ |
|
745 | pad pad pad pad............................................................ | |
746 | Accept this change? [n] |
|
746 | Accept this change? [n] | |
747 | ERROR: test-failure.t output changed |
|
747 | ERROR: test-failure.t output changed | |
748 | !. |
|
748 | !. | |
749 | Failed test-failure.t: output changed |
|
749 | Failed test-failure.t: output changed | |
750 | # Ran 2 tests, 0 skipped, 1 failed. |
|
750 | # Ran 2 tests, 0 skipped, 1 failed. | |
751 | python hash seed: * (glob) |
|
751 | python hash seed: * (glob) | |
752 | [1] |
|
752 | [1] | |
753 |
|
753 | |||
754 | $ cat test-failure.t |
|
754 | $ cat test-failure.t | |
755 | $ echo babar |
|
755 | $ echo babar | |
756 | rataxes |
|
756 | rataxes | |
757 | This is a noop statement so that |
|
757 | This is a noop statement so that | |
758 | this test is still more bytes than success. |
|
758 | this test is still more bytes than success. | |
759 | pad pad pad pad............................................................ |
|
759 | pad pad pad pad............................................................ | |
760 | pad pad pad pad............................................................ |
|
760 | pad pad pad pad............................................................ | |
761 | pad pad pad pad............................................................ |
|
761 | pad pad pad pad............................................................ | |
762 | pad pad pad pad............................................................ |
|
762 | pad pad pad pad............................................................ | |
763 | pad pad pad pad............................................................ |
|
763 | pad pad pad pad............................................................ | |
764 | pad pad pad pad............................................................ |
|
764 | pad pad pad pad............................................................ | |
765 |
|
765 | |||
766 | Interactive with custom view |
|
766 | Interactive with custom view | |
767 |
|
767 | |||
768 | $ echo 'n' | rt -i --view echo |
|
768 | $ echo 'n' | rt -i --view echo | |
769 | running 2 tests using 1 parallel processes |
|
769 | running 2 tests using 1 parallel processes | |
770 | $TESTTMP/test-failure.t $TESTTMP/test-failure.t.err |
|
770 | $TESTTMP/test-failure.t $TESTTMP/test-failure.t.err | |
771 | Accept this change? [n]* (glob) |
|
771 | Accept this change? [n]* (glob) | |
772 | ERROR: test-failure.t output changed |
|
772 | ERROR: test-failure.t output changed | |
773 | !. |
|
773 | !. | |
774 | Failed test-failure.t: output changed |
|
774 | Failed test-failure.t: output changed | |
775 | # Ran 2 tests, 0 skipped, 1 failed. |
|
775 | # Ran 2 tests, 0 skipped, 1 failed. | |
776 | python hash seed: * (glob) |
|
776 | python hash seed: * (glob) | |
777 | [1] |
|
777 | [1] | |
778 |
|
778 | |||
779 | View the fix |
|
779 | View the fix | |
780 |
|
780 | |||
781 | $ echo 'y' | rt --view echo |
|
781 | $ echo 'y' | rt --view echo | |
782 | running 2 tests using 1 parallel processes |
|
782 | running 2 tests using 1 parallel processes | |
783 | $TESTTMP/test-failure.t $TESTTMP/test-failure.t.err |
|
783 | $TESTTMP/test-failure.t $TESTTMP/test-failure.t.err | |
784 |
|
784 | |||
785 | ERROR: test-failure.t output changed |
|
785 | ERROR: test-failure.t output changed | |
786 | !. |
|
786 | !. | |
787 | Failed test-failure.t: output changed |
|
787 | Failed test-failure.t: output changed | |
788 | # Ran 2 tests, 0 skipped, 1 failed. |
|
788 | # Ran 2 tests, 0 skipped, 1 failed. | |
789 | python hash seed: * (glob) |
|
789 | python hash seed: * (glob) | |
790 | [1] |
|
790 | [1] | |
791 |
|
791 | |||
792 | Accept the fix |
|
792 | Accept the fix | |
793 |
|
793 | |||
794 | $ cat >> test-failure.t <<EOF |
|
794 | $ cat >> test-failure.t <<EOF | |
795 | > $ echo 'saved backup bundle to \$TESTTMP/foo.hg' |
|
795 | > $ echo 'saved backup bundle to \$TESTTMP/foo.hg' | |
796 | > saved backup bundle to \$TESTTMP/foo.hg |
|
796 | > saved backup bundle to \$TESTTMP/foo.hg | |
797 | > $ echo 'saved backup bundle to \$TESTTMP/foo.hg' |
|
797 | > $ echo 'saved backup bundle to \$TESTTMP/foo.hg' | |
798 | > saved backup bundle to $TESTTMP\\foo.hg |
|
798 | > saved backup bundle to $TESTTMP\\foo.hg | |
799 | > $ echo 'saved backup bundle to \$TESTTMP/foo.hg' |
|
799 | > $ echo 'saved backup bundle to \$TESTTMP/foo.hg' | |
800 | > saved backup bundle to \$TESTTMP/*.hg (glob) |
|
800 | > saved backup bundle to \$TESTTMP/*.hg (glob) | |
801 | > EOF |
|
801 | > EOF | |
802 | $ echo 'y' | rt -i 2>&1 |
|
802 | $ echo 'y' | rt -i 2>&1 | |
803 | running 2 tests using 1 parallel processes |
|
803 | running 2 tests using 1 parallel processes | |
804 |
|
804 | |||
805 | --- $TESTTMP/test-failure.t |
|
805 | --- $TESTTMP/test-failure.t | |
806 | +++ $TESTTMP/test-failure.t.err |
|
806 | +++ $TESTTMP/test-failure.t.err | |
807 | @@ -1,5 +1,5 @@ |
|
807 | @@ -1,5 +1,5 @@ | |
808 | $ echo babar |
|
808 | $ echo babar | |
809 | - rataxes |
|
809 | - rataxes | |
810 | + babar |
|
810 | + babar | |
811 | This is a noop statement so that |
|
811 | This is a noop statement so that | |
812 | this test is still more bytes than success. |
|
812 | this test is still more bytes than success. | |
813 | pad pad pad pad............................................................ |
|
813 | pad pad pad pad............................................................ | |
814 | @@ -11,6 +11,6 @@ |
|
814 | @@ -11,6 +11,6 @@ | |
815 | $ echo 'saved backup bundle to $TESTTMP/foo.hg' |
|
815 | $ echo 'saved backup bundle to $TESTTMP/foo.hg' | |
816 | saved backup bundle to $TESTTMP/foo.hg |
|
816 | saved backup bundle to $TESTTMP/foo.hg | |
817 | $ echo 'saved backup bundle to $TESTTMP/foo.hg' |
|
817 | $ echo 'saved backup bundle to $TESTTMP/foo.hg' | |
818 | - saved backup bundle to $TESTTMP\foo.hg |
|
818 | - saved backup bundle to $TESTTMP\foo.hg | |
819 | + saved backup bundle to $TESTTMP/foo.hg |
|
819 | + saved backup bundle to $TESTTMP/foo.hg | |
820 | $ echo 'saved backup bundle to $TESTTMP/foo.hg' |
|
820 | $ echo 'saved backup bundle to $TESTTMP/foo.hg' | |
821 | saved backup bundle to $TESTTMP/*.hg (glob) |
|
821 | saved backup bundle to $TESTTMP/*.hg (glob) | |
822 | Accept this change? [n] .. |
|
822 | Accept this change? [n] .. | |
823 | # Ran 2 tests, 0 skipped, 0 failed. |
|
823 | # Ran 2 tests, 0 skipped, 0 failed. | |
824 |
|
824 | |||
825 | $ sed -e 's,(glob)$,&<,g' test-failure.t |
|
825 | $ sed -e 's,(glob)$,&<,g' test-failure.t | |
826 | $ echo babar |
|
826 | $ echo babar | |
827 | babar |
|
827 | babar | |
828 | This is a noop statement so that |
|
828 | This is a noop statement so that | |
829 | this test is still more bytes than success. |
|
829 | this test is still more bytes than success. | |
830 | pad pad pad pad............................................................ |
|
830 | pad pad pad pad............................................................ | |
831 | pad pad pad pad............................................................ |
|
831 | pad pad pad pad............................................................ | |
832 | pad pad pad pad............................................................ |
|
832 | pad pad pad pad............................................................ | |
833 | pad pad pad pad............................................................ |
|
833 | pad pad pad pad............................................................ | |
834 | pad pad pad pad............................................................ |
|
834 | pad pad pad pad............................................................ | |
835 | pad pad pad pad............................................................ |
|
835 | pad pad pad pad............................................................ | |
836 | $ echo 'saved backup bundle to $TESTTMP/foo.hg' |
|
836 | $ echo 'saved backup bundle to $TESTTMP/foo.hg' | |
837 | saved backup bundle to $TESTTMP/foo.hg |
|
837 | saved backup bundle to $TESTTMP/foo.hg | |
838 | $ echo 'saved backup bundle to $TESTTMP/foo.hg' |
|
838 | $ echo 'saved backup bundle to $TESTTMP/foo.hg' | |
839 | saved backup bundle to $TESTTMP/foo.hg |
|
839 | saved backup bundle to $TESTTMP/foo.hg | |
840 | $ echo 'saved backup bundle to $TESTTMP/foo.hg' |
|
840 | $ echo 'saved backup bundle to $TESTTMP/foo.hg' | |
841 | saved backup bundle to $TESTTMP/*.hg (glob)< |
|
841 | saved backup bundle to $TESTTMP/*.hg (glob)< | |
842 |
|
842 | |||
843 | Race condition - test file was modified when test is running |
|
843 | Race condition - test file was modified when test is running | |
844 |
|
844 | |||
845 | $ TESTRACEDIR=`pwd` |
|
845 | $ TESTRACEDIR=`pwd` | |
846 | $ export TESTRACEDIR |
|
846 | $ export TESTRACEDIR | |
847 | $ cat > test-race.t <<EOF |
|
847 | $ cat > test-race.t <<EOF | |
848 | > $ echo 1 |
|
848 | > $ echo 1 | |
849 | > $ echo "# a new line" >> $TESTRACEDIR/test-race.t |
|
849 | > $ echo "# a new line" >> $TESTRACEDIR/test-race.t | |
850 | > EOF |
|
850 | > EOF | |
851 |
|
851 | |||
852 | $ rt -i test-race.t |
|
852 | $ rt -i test-race.t | |
853 | running 1 tests using 1 parallel processes |
|
853 | running 1 tests using 1 parallel processes | |
854 |
|
854 | |||
855 | --- $TESTTMP/test-race.t |
|
855 | --- $TESTTMP/test-race.t | |
856 | +++ $TESTTMP/test-race.t.err |
|
856 | +++ $TESTTMP/test-race.t.err | |
857 | @@ -1,2 +1,3 @@ |
|
857 | @@ -1,2 +1,3 @@ | |
858 | $ echo 1 |
|
858 | $ echo 1 | |
859 | + 1 |
|
859 | + 1 | |
860 | $ echo "# a new line" >> $TESTTMP/test-race.t |
|
860 | $ echo "# a new line" >> $TESTTMP/test-race.t | |
861 | Reference output has changed (run again to prompt changes) |
|
861 | Reference output has changed (run again to prompt changes) | |
862 | ERROR: test-race.t output changed |
|
862 | ERROR: test-race.t output changed | |
863 | ! |
|
863 | ! | |
864 | Failed test-race.t: output changed |
|
864 | Failed test-race.t: output changed | |
865 | # Ran 1 tests, 0 skipped, 1 failed. |
|
865 | # Ran 1 tests, 0 skipped, 1 failed. | |
866 | python hash seed: * (glob) |
|
866 | python hash seed: * (glob) | |
867 | [1] |
|
867 | [1] | |
868 |
|
868 | |||
869 | $ rm test-race.t |
|
869 | $ rm test-race.t | |
870 |
|
870 | |||
871 | When "#testcases" is used in .t files |
|
871 | When "#testcases" is used in .t files | |
872 |
|
872 | |||
873 | $ cat >> test-cases.t <<EOF |
|
873 | $ cat >> test-cases.t <<EOF | |
874 | > #testcases a b |
|
874 | > #testcases a b | |
875 | > #if a |
|
875 | > #if a | |
876 | > $ echo 1 |
|
876 | > $ echo 1 | |
877 | > #endif |
|
877 | > #endif | |
878 | > #if b |
|
878 | > #if b | |
879 | > $ echo 2 |
|
879 | > $ echo 2 | |
880 | > #endif |
|
880 | > #endif | |
881 | > EOF |
|
881 | > EOF | |
882 |
|
882 | |||
883 | $ cat <<EOF | rt -i test-cases.t 2>&1 |
|
883 | $ cat <<EOF | rt -i test-cases.t 2>&1 | |
884 | > y |
|
884 | > y | |
885 | > y |
|
885 | > y | |
886 | > EOF |
|
886 | > EOF | |
887 | running 2 tests using 1 parallel processes |
|
887 | running 2 tests using 1 parallel processes | |
888 |
|
888 | |||
889 | --- $TESTTMP/test-cases.t |
|
889 | --- $TESTTMP/test-cases.t | |
890 | +++ $TESTTMP/test-cases.t#a.err |
|
890 | +++ $TESTTMP/test-cases.t#a.err | |
891 | @@ -1,6 +1,7 @@ |
|
891 | @@ -1,6 +1,7 @@ | |
892 | #testcases a b |
|
892 | #testcases a b | |
893 | #if a |
|
893 | #if a | |
894 | $ echo 1 |
|
894 | $ echo 1 | |
895 | + 1 |
|
895 | + 1 | |
896 | #endif |
|
896 | #endif | |
897 | #if b |
|
897 | #if b | |
898 | $ echo 2 |
|
898 | $ echo 2 | |
899 | Accept this change? [n] . |
|
899 | Accept this change? [n] . | |
900 | --- $TESTTMP/test-cases.t |
|
900 | --- $TESTTMP/test-cases.t | |
901 | +++ $TESTTMP/test-cases.t#b.err |
|
901 | +++ $TESTTMP/test-cases.t#b.err | |
902 | @@ -5,4 +5,5 @@ |
|
902 | @@ -5,4 +5,5 @@ | |
903 | #endif |
|
903 | #endif | |
904 | #if b |
|
904 | #if b | |
905 | $ echo 2 |
|
905 | $ echo 2 | |
906 | + 2 |
|
906 | + 2 | |
907 | #endif |
|
907 | #endif | |
908 | Accept this change? [n] . |
|
908 | Accept this change? [n] . | |
909 | # Ran 2 tests, 0 skipped, 0 failed. |
|
909 | # Ran 2 tests, 0 skipped, 0 failed. | |
910 |
|
910 | |||
911 | $ cat test-cases.t |
|
911 | $ cat test-cases.t | |
912 | #testcases a b |
|
912 | #testcases a b | |
913 | #if a |
|
913 | #if a | |
914 | $ echo 1 |
|
914 | $ echo 1 | |
915 | 1 |
|
915 | 1 | |
916 | #endif |
|
916 | #endif | |
917 | #if b |
|
917 | #if b | |
918 | $ echo 2 |
|
918 | $ echo 2 | |
919 | 2 |
|
919 | 2 | |
920 | #endif |
|
920 | #endif | |
921 |
|
921 | |||
922 | $ cat >> test-cases.t <<'EOF' |
|
922 | $ cat >> test-cases.t <<'EOF' | |
923 | > #if a |
|
923 | > #if a | |
924 | > $ NAME=A |
|
924 | > $ NAME=A | |
925 | > #else |
|
925 | > #else | |
926 | > $ NAME=B |
|
926 | > $ NAME=B | |
927 | > #endif |
|
927 | > #endif | |
928 | > $ echo $NAME |
|
928 | > $ echo $NAME | |
929 | > A (a !) |
|
929 | > A (a !) | |
930 | > B (b !) |
|
930 | > B (b !) | |
931 | > EOF |
|
931 | > EOF | |
932 | $ rt test-cases.t |
|
932 | $ rt test-cases.t | |
933 | running 2 tests using 1 parallel processes |
|
933 | running 2 tests using 1 parallel processes | |
934 | .. |
|
934 | .. | |
935 | # Ran 2 tests, 0 skipped, 0 failed. |
|
935 | # Ran 2 tests, 0 skipped, 0 failed. | |
936 |
|
936 | |||
937 | When using multiple dimensions of "#testcases" in .t files |
|
937 | When using multiple dimensions of "#testcases" in .t files | |
938 |
|
938 | |||
939 | $ cat > test-cases.t <<'EOF' |
|
939 | $ cat > test-cases.t <<'EOF' | |
940 | > #testcases a b |
|
940 | > #testcases a b | |
941 | > #testcases c d |
|
941 | > #testcases c d | |
942 | > #if a d |
|
942 | > #if a d | |
943 | > $ echo $TESTCASE |
|
943 | > $ echo $TESTCASE | |
944 | > a#d |
|
944 | > a#d | |
945 | > #endif |
|
945 | > #endif | |
946 | > #if b c |
|
946 | > #if b c | |
947 | > $ echo yes |
|
947 | > $ echo yes | |
948 | > no |
|
948 | > no | |
949 | > #endif |
|
949 | > #endif | |
950 | > EOF |
|
950 | > EOF | |
951 | $ rt test-cases.t |
|
951 | $ rt test-cases.t | |
952 | running 4 tests using 1 parallel processes |
|
952 | running 4 tests using 1 parallel processes | |
953 | .. |
|
953 | .. | |
954 | --- $TESTTMP/test-cases.t |
|
954 | --- $TESTTMP/test-cases.t | |
955 | +++ $TESTTMP/test-cases.t#b#c.err |
|
955 | +++ $TESTTMP/test-cases.t#b#c.err | |
956 | @@ -6,5 +6,5 @@ |
|
956 | @@ -6,5 +6,5 @@ | |
957 | #endif |
|
957 | #endif | |
958 | #if b c |
|
958 | #if b c | |
959 | $ echo yes |
|
959 | $ echo yes | |
960 | - no |
|
960 | - no | |
961 | + yes |
|
961 | + yes | |
962 | #endif |
|
962 | #endif | |
963 |
|
963 | |||
964 | ERROR: test-cases.t#b#c output changed |
|
964 | ERROR: test-cases.t#b#c output changed | |
965 | !. |
|
965 | !. | |
966 | Failed test-cases.t#b#c: output changed |
|
966 | Failed test-cases.t#b#c: output changed | |
967 | # Ran 4 tests, 0 skipped, 1 failed. |
|
967 | # Ran 4 tests, 0 skipped, 1 failed. | |
968 | python hash seed: * (glob) |
|
968 | python hash seed: * (glob) | |
969 | [1] |
|
969 | [1] | |
970 |
|
970 | |||
971 | $ rm test-cases.t#b#c.err |
|
971 | $ rm test-cases.t#b#c.err | |
972 | $ rm test-cases.t |
|
972 | $ rm test-cases.t | |
973 |
|
973 | |||
974 | (reinstall) |
|
974 | (reinstall) | |
975 | $ mv backup test-failure.t |
|
975 | $ mv backup test-failure.t | |
976 |
|
976 | |||
977 | No Diff |
|
977 | No Diff | |
978 | =============== |
|
978 | =============== | |
979 |
|
979 | |||
980 | $ rt --nodiff |
|
980 | $ rt --nodiff | |
981 | running 2 tests using 1 parallel processes |
|
981 | running 2 tests using 1 parallel processes | |
982 | !. |
|
982 | !. | |
983 | Failed test-failure.t: output changed |
|
983 | Failed test-failure.t: output changed | |
984 | # Ran 2 tests, 0 skipped, 1 failed. |
|
984 | # Ran 2 tests, 0 skipped, 1 failed. | |
985 | python hash seed: * (glob) |
|
985 | python hash seed: * (glob) | |
986 | [1] |
|
986 | [1] | |
987 |
|
987 | |||
988 | test --tmpdir support |
|
988 | test --tmpdir support | |
989 | $ rt --tmpdir=$TESTTMP/keep test-success.t |
|
989 | $ rt --tmpdir=$TESTTMP/keep test-success.t | |
990 | running 1 tests using 1 parallel processes |
|
990 | running 1 tests using 1 parallel processes | |
991 |
|
991 | |||
992 | Keeping testtmp dir: $TESTTMP/keep/child1/test-success.t |
|
992 | Keeping testtmp dir: $TESTTMP/keep/child1/test-success.t | |
993 | Keeping threadtmp dir: $TESTTMP/keep/child1 |
|
993 | Keeping threadtmp dir: $TESTTMP/keep/child1 | |
994 | . |
|
994 | . | |
995 | # Ran 1 tests, 0 skipped, 0 failed. |
|
995 | # Ran 1 tests, 0 skipped, 0 failed. | |
996 |
|
996 | |||
997 | timeouts |
|
997 | timeouts | |
998 | ======== |
|
998 | ======== | |
999 | $ cat > test-timeout.t <<EOF |
|
999 | $ cat > test-timeout.t <<EOF | |
1000 | > $ sleep 2 |
|
1000 | > $ sleep 2 | |
1001 | > $ echo pass |
|
1001 | > $ echo pass | |
1002 | > pass |
|
1002 | > pass | |
1003 | > EOF |
|
1003 | > EOF | |
1004 | > echo '#require slow' > test-slow-timeout.t |
|
1004 | > echo '#require slow' > test-slow-timeout.t | |
1005 | > cat test-timeout.t >> test-slow-timeout.t |
|
1005 | > cat test-timeout.t >> test-slow-timeout.t | |
1006 | $ rt --timeout=1 --slowtimeout=3 test-timeout.t test-slow-timeout.t |
|
1006 | $ rt --timeout=1 --slowtimeout=3 test-timeout.t test-slow-timeout.t | |
1007 | running 2 tests using 1 parallel processes |
|
1007 | running 2 tests using 1 parallel processes | |
1008 | st |
|
1008 | st | |
1009 | Skipped test-slow-timeout.t: missing feature: allow slow tests (use --allow-slow-tests) |
|
1009 | Skipped test-slow-timeout.t: missing feature: allow slow tests (use --allow-slow-tests) | |
1010 | Failed test-timeout.t: timed out |
|
1010 | Failed test-timeout.t: timed out | |
1011 | # Ran 1 tests, 1 skipped, 1 failed. |
|
1011 | # Ran 1 tests, 1 skipped, 1 failed. | |
1012 | python hash seed: * (glob) |
|
1012 | python hash seed: * (glob) | |
1013 | [1] |
|
1013 | [1] | |
1014 | $ rt --timeout=1 --slowtimeout=3 \ |
|
1014 | $ rt --timeout=1 --slowtimeout=3 \ | |
1015 | > test-timeout.t test-slow-timeout.t --allow-slow-tests |
|
1015 | > test-timeout.t test-slow-timeout.t --allow-slow-tests | |
1016 | running 2 tests using 1 parallel processes |
|
1016 | running 2 tests using 1 parallel processes | |
1017 | .t |
|
1017 | .t | |
1018 | Failed test-timeout.t: timed out |
|
1018 | Failed test-timeout.t: timed out | |
1019 | # Ran 2 tests, 0 skipped, 1 failed. |
|
1019 | # Ran 2 tests, 0 skipped, 1 failed. | |
1020 | python hash seed: * (glob) |
|
1020 | python hash seed: * (glob) | |
1021 | [1] |
|
1021 | [1] | |
1022 | $ rm test-timeout.t test-slow-timeout.t |
|
1022 | $ rm test-timeout.t test-slow-timeout.t | |
1023 |
|
1023 | |||
1024 | test for --time |
|
1024 | test for --time | |
1025 | ================== |
|
1025 | ================== | |
1026 |
|
1026 | |||
1027 | $ rt test-success.t --time |
|
1027 | $ rt test-success.t --time | |
1028 | running 1 tests using 1 parallel processes |
|
1028 | running 1 tests using 1 parallel processes | |
1029 | . |
|
1029 | . | |
1030 | # Ran 1 tests, 0 skipped, 0 failed. |
|
1030 | # Ran 1 tests, 0 skipped, 0 failed. | |
1031 | # Producing time report |
|
1031 | # Producing time report | |
1032 | start end cuser csys real Test |
|
1032 | start end cuser csys real Test | |
1033 |
\s*[\d\.]{5,8} |
|
1033 | \s*[\d\.]{5,8} \s*[\d\.]{5,8} \s*[\d\.]{5,8} \s*[\d\.]{5,8} \s*[\d\.]{5,8} test-success.t (re) | |
1034 |
|
1034 | |||
1035 | test for --time with --job enabled |
|
1035 | test for --time with --job enabled | |
1036 | ==================================== |
|
1036 | ==================================== | |
1037 |
|
1037 | |||
1038 | $ rt test-success.t --time --jobs 2 |
|
1038 | $ rt test-success.t --time --jobs 2 | |
1039 | running 1 tests using 1 parallel processes |
|
1039 | running 1 tests using 1 parallel processes | |
1040 | . |
|
1040 | . | |
1041 | # Ran 1 tests, 0 skipped, 0 failed. |
|
1041 | # Ran 1 tests, 0 skipped, 0 failed. | |
1042 | # Producing time report |
|
1042 | # Producing time report | |
1043 | start end cuser csys real Test |
|
1043 | start end cuser csys real Test | |
1044 |
\s*[\d\.]{5,8} |
|
1044 | \s*[\d\.]{5,8} \s*[\d\.]{5,8} \s*[\d\.]{5,8} \s*[\d\.]{5,8} \s*[\d\.]{5,8} test-success.t (re) | |
1045 |
|
1045 | |||
1046 | Skips |
|
1046 | Skips | |
1047 | ================ |
|
1047 | ================ | |
1048 | $ cat > test-skip.t <<EOF |
|
1048 | $ cat > test-skip.t <<EOF | |
1049 | > $ echo xyzzy |
|
1049 | > $ echo xyzzy | |
1050 | > #if true |
|
1050 | > #if true | |
1051 | > #require false |
|
1051 | > #require false | |
1052 | > #end |
|
1052 | > #end | |
1053 | > EOF |
|
1053 | > EOF | |
1054 | $ cat > test-noskip.t <<EOF |
|
1054 | $ cat > test-noskip.t <<EOF | |
1055 | > #if false |
|
1055 | > #if false | |
1056 | > #require false |
|
1056 | > #require false | |
1057 | > #endif |
|
1057 | > #endif | |
1058 | > EOF |
|
1058 | > EOF | |
1059 | $ rt --nodiff |
|
1059 | $ rt --nodiff | |
1060 | running 4 tests using 1 parallel processes |
|
1060 | running 4 tests using 1 parallel processes | |
1061 | !.s. |
|
1061 | !.s. | |
1062 | Skipped test-skip.t: missing feature: nail clipper |
|
1062 | Skipped test-skip.t: missing feature: nail clipper | |
1063 | Failed test-failure.t: output changed |
|
1063 | Failed test-failure.t: output changed | |
1064 | # Ran 3 tests, 1 skipped, 1 failed. |
|
1064 | # Ran 3 tests, 1 skipped, 1 failed. | |
1065 | python hash seed: * (glob) |
|
1065 | python hash seed: * (glob) | |
1066 | [1] |
|
1066 | [1] | |
1067 |
|
1067 | |||
1068 | $ rm test-noskip.t |
|
1068 | $ rm test-noskip.t | |
1069 | $ rt --keyword xyzzy |
|
1069 | $ rt --keyword xyzzy | |
1070 | running 3 tests using 1 parallel processes |
|
1070 | running 3 tests using 1 parallel processes | |
1071 | .s |
|
1071 | .s | |
1072 | Skipped test-skip.t: missing feature: nail clipper |
|
1072 | Skipped test-skip.t: missing feature: nail clipper | |
1073 | # Ran 2 tests, 2 skipped, 0 failed. |
|
1073 | # Ran 2 tests, 2 skipped, 0 failed. | |
1074 |
|
1074 | |||
1075 | Skips with xml |
|
1075 | Skips with xml | |
1076 | $ rt --keyword xyzzy \ |
|
1076 | $ rt --keyword xyzzy \ | |
1077 | > --xunit=xunit.xml |
|
1077 | > --xunit=xunit.xml | |
1078 | running 3 tests using 1 parallel processes |
|
1078 | running 3 tests using 1 parallel processes | |
1079 | .s |
|
1079 | .s | |
1080 | Skipped test-skip.t: missing feature: nail clipper |
|
1080 | Skipped test-skip.t: missing feature: nail clipper | |
1081 | # Ran 2 tests, 2 skipped, 0 failed. |
|
1081 | # Ran 2 tests, 2 skipped, 0 failed. | |
1082 | $ cat xunit.xml |
|
1082 | $ cat xunit.xml | |
1083 | <?xml version="1.0" encoding="utf-8"?> |
|
1083 | <?xml version="1.0" encoding="utf-8"?> | |
1084 | <testsuite errors="0" failures="0" name="run-tests" skipped="2" tests="2"> |
|
1084 | <testsuite errors="0" failures="0" name="run-tests" skipped="2" tests="2"> | |
1085 | <testcase name="test-success.t" time="*"/> (glob) |
|
1085 | <testcase name="test-success.t" time="*"/> (glob) | |
1086 | <testcase name="test-skip.t"> |
|
1086 | <testcase name="test-skip.t"> | |
1087 | <skipped> |
|
1087 | <skipped> | |
1088 | <![CDATA[missing feature: nail clipper]]> </skipped> |
|
1088 | <![CDATA[missing feature: nail clipper]]> </skipped> | |
1089 | </testcase> |
|
1089 | </testcase> | |
1090 | </testsuite> |
|
1090 | </testsuite> | |
1091 |
|
1091 | |||
1092 | Missing skips or blacklisted skips don't count as executed: |
|
1092 | Missing skips or blacklisted skips don't count as executed: | |
1093 | $ echo test-failure.t > blacklist |
|
1093 | $ echo test-failure.t > blacklist | |
1094 | $ rt --blacklist=blacklist --json\ |
|
1094 | $ rt --blacklist=blacklist --json\ | |
1095 | > test-failure.t test-bogus.t |
|
1095 | > test-failure.t test-bogus.t | |
1096 | running 2 tests using 1 parallel processes |
|
1096 | running 2 tests using 1 parallel processes | |
1097 | ss |
|
1097 | ss | |
1098 | Skipped test-bogus.t: Doesn't exist |
|
1098 | Skipped test-bogus.t: Doesn't exist | |
1099 | Skipped test-failure.t: blacklisted |
|
1099 | Skipped test-failure.t: blacklisted | |
1100 | # Ran 0 tests, 2 skipped, 0 failed. |
|
1100 | # Ran 0 tests, 2 skipped, 0 failed. | |
1101 | $ cat report.json |
|
1101 | $ cat report.json | |
1102 | testreport ={ |
|
1102 | testreport ={ | |
1103 | "test-bogus.t": { |
|
1103 | "test-bogus.t": { | |
1104 | "result": "skip" |
|
1104 | "result": "skip" | |
1105 | }, |
|
1105 | }, | |
1106 | "test-failure.t": { |
|
1106 | "test-failure.t": { | |
1107 | "result": "skip" |
|
1107 | "result": "skip" | |
1108 | } |
|
1108 | } | |
1109 | } (no-eol) |
|
1109 | } (no-eol) | |
1110 |
|
1110 | |||
1111 | Whitelist trumps blacklist |
|
1111 | Whitelist trumps blacklist | |
1112 | $ echo test-failure.t > whitelist |
|
1112 | $ echo test-failure.t > whitelist | |
1113 | $ rt --blacklist=blacklist --whitelist=whitelist --json\ |
|
1113 | $ rt --blacklist=blacklist --whitelist=whitelist --json\ | |
1114 | > test-failure.t test-bogus.t |
|
1114 | > test-failure.t test-bogus.t | |
1115 | running 2 tests using 1 parallel processes |
|
1115 | running 2 tests using 1 parallel processes | |
1116 | s |
|
1116 | s | |
1117 | --- $TESTTMP/test-failure.t |
|
1117 | --- $TESTTMP/test-failure.t | |
1118 | +++ $TESTTMP/test-failure.t.err |
|
1118 | +++ $TESTTMP/test-failure.t.err | |
1119 | @@ -1,5 +1,5 @@ |
|
1119 | @@ -1,5 +1,5 @@ | |
1120 | $ echo babar |
|
1120 | $ echo babar | |
1121 | - rataxes |
|
1121 | - rataxes | |
1122 | + babar |
|
1122 | + babar | |
1123 | This is a noop statement so that |
|
1123 | This is a noop statement so that | |
1124 | this test is still more bytes than success. |
|
1124 | this test is still more bytes than success. | |
1125 | pad pad pad pad............................................................ |
|
1125 | pad pad pad pad............................................................ | |
1126 |
|
1126 | |||
1127 | ERROR: test-failure.t output changed |
|
1127 | ERROR: test-failure.t output changed | |
1128 | ! |
|
1128 | ! | |
1129 | Skipped test-bogus.t: Doesn't exist |
|
1129 | Skipped test-bogus.t: Doesn't exist | |
1130 | Failed test-failure.t: output changed |
|
1130 | Failed test-failure.t: output changed | |
1131 | # Ran 1 tests, 1 skipped, 1 failed. |
|
1131 | # Ran 1 tests, 1 skipped, 1 failed. | |
1132 | python hash seed: * (glob) |
|
1132 | python hash seed: * (glob) | |
1133 | [1] |
|
1133 | [1] | |
1134 |
|
1134 | |||
1135 | Ensure that --test-list causes only the tests listed in that file to |
|
1135 | Ensure that --test-list causes only the tests listed in that file to | |
1136 | be executed. |
|
1136 | be executed. | |
1137 | $ echo test-success.t >> onlytest |
|
1137 | $ echo test-success.t >> onlytest | |
1138 | $ rt --test-list=onlytest |
|
1138 | $ rt --test-list=onlytest | |
1139 | running 1 tests using 1 parallel processes |
|
1139 | running 1 tests using 1 parallel processes | |
1140 | . |
|
1140 | . | |
1141 | # Ran 1 tests, 0 skipped, 0 failed. |
|
1141 | # Ran 1 tests, 0 skipped, 0 failed. | |
1142 | $ echo test-bogus.t >> anothertest |
|
1142 | $ echo test-bogus.t >> anothertest | |
1143 | $ rt --test-list=onlytest --test-list=anothertest |
|
1143 | $ rt --test-list=onlytest --test-list=anothertest | |
1144 | running 2 tests using 1 parallel processes |
|
1144 | running 2 tests using 1 parallel processes | |
1145 | s. |
|
1145 | s. | |
1146 | Skipped test-bogus.t: Doesn't exist |
|
1146 | Skipped test-bogus.t: Doesn't exist | |
1147 | # Ran 1 tests, 1 skipped, 0 failed. |
|
1147 | # Ran 1 tests, 1 skipped, 0 failed. | |
1148 | $ rm onlytest anothertest |
|
1148 | $ rm onlytest anothertest | |
1149 |
|
1149 | |||
1150 | test for --json |
|
1150 | test for --json | |
1151 | ================== |
|
1151 | ================== | |
1152 |
|
1152 | |||
1153 | $ rt --json |
|
1153 | $ rt --json | |
1154 | running 3 tests using 1 parallel processes |
|
1154 | running 3 tests using 1 parallel processes | |
1155 |
|
1155 | |||
1156 | --- $TESTTMP/test-failure.t |
|
1156 | --- $TESTTMP/test-failure.t | |
1157 | +++ $TESTTMP/test-failure.t.err |
|
1157 | +++ $TESTTMP/test-failure.t.err | |
1158 | @@ -1,5 +1,5 @@ |
|
1158 | @@ -1,5 +1,5 @@ | |
1159 | $ echo babar |
|
1159 | $ echo babar | |
1160 | - rataxes |
|
1160 | - rataxes | |
1161 | + babar |
|
1161 | + babar | |
1162 | This is a noop statement so that |
|
1162 | This is a noop statement so that | |
1163 | this test is still more bytes than success. |
|
1163 | this test is still more bytes than success. | |
1164 | pad pad pad pad............................................................ |
|
1164 | pad pad pad pad............................................................ | |
1165 |
|
1165 | |||
1166 | ERROR: test-failure.t output changed |
|
1166 | ERROR: test-failure.t output changed | |
1167 | !.s |
|
1167 | !.s | |
1168 | Skipped test-skip.t: missing feature: nail clipper |
|
1168 | Skipped test-skip.t: missing feature: nail clipper | |
1169 | Failed test-failure.t: output changed |
|
1169 | Failed test-failure.t: output changed | |
1170 | # Ran 2 tests, 1 skipped, 1 failed. |
|
1170 | # Ran 2 tests, 1 skipped, 1 failed. | |
1171 | python hash seed: * (glob) |
|
1171 | python hash seed: * (glob) | |
1172 | [1] |
|
1172 | [1] | |
1173 |
|
1173 | |||
1174 | $ cat report.json |
|
1174 | $ cat report.json | |
1175 | testreport ={ |
|
1175 | testreport ={ | |
1176 | "test-failure.t": [\{] (re) |
|
1176 | "test-failure.t": [\{] (re) | |
1177 | "csys": "\s*\d+\.\d{3,4}", ? (re) |
|
1177 | "csys": "\s*\d+\.\d{3,4}", ? (re) | |
1178 | "cuser": "\s*\d+\.\d{3,4}", ? (re) |
|
1178 | "cuser": "\s*\d+\.\d{3,4}", ? (re) | |
1179 | "diff": "---.+\+\+\+.+", ? (re) |
|
1179 | "diff": "---.+\+\+\+.+", ? (re) | |
1180 | "end": "\s*\d+\.\d{3,4}", ? (re) |
|
1180 | "end": "\s*\d+\.\d{3,4}", ? (re) | |
1181 | "result": "failure", ? (re) |
|
1181 | "result": "failure", ? (re) | |
1182 | "start": "\s*\d+\.\d{3,4}", ? (re) |
|
1182 | "start": "\s*\d+\.\d{3,4}", ? (re) | |
1183 | "time": "\s*\d+\.\d{3,4}" (re) |
|
1183 | "time": "\s*\d+\.\d{3,4}" (re) | |
1184 | }, ? (re) |
|
1184 | }, ? (re) | |
1185 | "test-skip.t": { |
|
1185 | "test-skip.t": { | |
1186 | "csys": "\s*\d+\.\d{3,4}", ? (re) |
|
1186 | "csys": "\s*\d+\.\d{3,4}", ? (re) | |
1187 | "cuser": "\s*\d+\.\d{3,4}", ? (re) |
|
1187 | "cuser": "\s*\d+\.\d{3,4}", ? (re) | |
1188 | "diff": "", ? (re) |
|
1188 | "diff": "", ? (re) | |
1189 | "end": "\s*\d+\.\d{3,4}", ? (re) |
|
1189 | "end": "\s*\d+\.\d{3,4}", ? (re) | |
1190 | "result": "skip", ? (re) |
|
1190 | "result": "skip", ? (re) | |
1191 | "start": "\s*\d+\.\d{3,4}", ? (re) |
|
1191 | "start": "\s*\d+\.\d{3,4}", ? (re) | |
1192 | "time": "\s*\d+\.\d{3,4}" (re) |
|
1192 | "time": "\s*\d+\.\d{3,4}" (re) | |
1193 | }, ? (re) |
|
1193 | }, ? (re) | |
1194 | "test-success.t": [\{] (re) |
|
1194 | "test-success.t": [\{] (re) | |
1195 | "csys": "\s*\d+\.\d{3,4}", ? (re) |
|
1195 | "csys": "\s*\d+\.\d{3,4}", ? (re) | |
1196 | "cuser": "\s*\d+\.\d{3,4}", ? (re) |
|
1196 | "cuser": "\s*\d+\.\d{3,4}", ? (re) | |
1197 | "diff": "", ? (re) |
|
1197 | "diff": "", ? (re) | |
1198 | "end": "\s*\d+\.\d{3,4}", ? (re) |
|
1198 | "end": "\s*\d+\.\d{3,4}", ? (re) | |
1199 | "result": "success", ? (re) |
|
1199 | "result": "success", ? (re) | |
1200 | "start": "\s*\d+\.\d{3,4}", ? (re) |
|
1200 | "start": "\s*\d+\.\d{3,4}", ? (re) | |
1201 | "time": "\s*\d+\.\d{3,4}" (re) |
|
1201 | "time": "\s*\d+\.\d{3,4}" (re) | |
1202 | } |
|
1202 | } | |
1203 | } (no-eol) |
|
1203 | } (no-eol) | |
1204 | --json with --outputdir |
|
1204 | --json with --outputdir | |
1205 |
|
1205 | |||
1206 | $ rm report.json |
|
1206 | $ rm report.json | |
1207 | $ rm -r output |
|
1207 | $ rm -r output | |
1208 | $ mkdir output |
|
1208 | $ mkdir output | |
1209 | $ rt --json --outputdir output |
|
1209 | $ rt --json --outputdir output | |
1210 | running 3 tests using 1 parallel processes |
|
1210 | running 3 tests using 1 parallel processes | |
1211 |
|
1211 | |||
1212 | --- $TESTTMP/test-failure.t |
|
1212 | --- $TESTTMP/test-failure.t | |
1213 | +++ $TESTTMP/output/test-failure.t.err |
|
1213 | +++ $TESTTMP/output/test-failure.t.err | |
1214 | @@ -1,5 +1,5 @@ |
|
1214 | @@ -1,5 +1,5 @@ | |
1215 | $ echo babar |
|
1215 | $ echo babar | |
1216 | - rataxes |
|
1216 | - rataxes | |
1217 | + babar |
|
1217 | + babar | |
1218 | This is a noop statement so that |
|
1218 | This is a noop statement so that | |
1219 | this test is still more bytes than success. |
|
1219 | this test is still more bytes than success. | |
1220 | pad pad pad pad............................................................ |
|
1220 | pad pad pad pad............................................................ | |
1221 |
|
1221 | |||
1222 | ERROR: test-failure.t output changed |
|
1222 | ERROR: test-failure.t output changed | |
1223 | !.s |
|
1223 | !.s | |
1224 | Skipped test-skip.t: missing feature: nail clipper |
|
1224 | Skipped test-skip.t: missing feature: nail clipper | |
1225 | Failed test-failure.t: output changed |
|
1225 | Failed test-failure.t: output changed | |
1226 | # Ran 2 tests, 1 skipped, 1 failed. |
|
1226 | # Ran 2 tests, 1 skipped, 1 failed. | |
1227 | python hash seed: * (glob) |
|
1227 | python hash seed: * (glob) | |
1228 | [1] |
|
1228 | [1] | |
1229 | $ f report.json |
|
1229 | $ f report.json | |
1230 | report.json: file not found |
|
1230 | report.json: file not found | |
1231 | $ cat output/report.json |
|
1231 | $ cat output/report.json | |
1232 | testreport ={ |
|
1232 | testreport ={ | |
1233 | "test-failure.t": [\{] (re) |
|
1233 | "test-failure.t": [\{] (re) | |
1234 | "csys": "\s*\d+\.\d{3,4}", ? (re) |
|
1234 | "csys": "\s*\d+\.\d{3,4}", ? (re) | |
1235 | "cuser": "\s*\d+\.\d{3,4}", ? (re) |
|
1235 | "cuser": "\s*\d+\.\d{3,4}", ? (re) | |
1236 | "diff": "---.+\+\+\+.+", ? (re) |
|
1236 | "diff": "---.+\+\+\+.+", ? (re) | |
1237 | "end": "\s*\d+\.\d{3,4}", ? (re) |
|
1237 | "end": "\s*\d+\.\d{3,4}", ? (re) | |
1238 | "result": "failure", ? (re) |
|
1238 | "result": "failure", ? (re) | |
1239 | "start": "\s*\d+\.\d{3,4}", ? (re) |
|
1239 | "start": "\s*\d+\.\d{3,4}", ? (re) | |
1240 | "time": "\s*\d+\.\d{3,4}" (re) |
|
1240 | "time": "\s*\d+\.\d{3,4}" (re) | |
1241 | }, ? (re) |
|
1241 | }, ? (re) | |
1242 | "test-skip.t": { |
|
1242 | "test-skip.t": { | |
1243 | "csys": "\s*\d+\.\d{3,4}", ? (re) |
|
1243 | "csys": "\s*\d+\.\d{3,4}", ? (re) | |
1244 | "cuser": "\s*\d+\.\d{3,4}", ? (re) |
|
1244 | "cuser": "\s*\d+\.\d{3,4}", ? (re) | |
1245 | "diff": "", ? (re) |
|
1245 | "diff": "", ? (re) | |
1246 | "end": "\s*\d+\.\d{3,4}", ? (re) |
|
1246 | "end": "\s*\d+\.\d{3,4}", ? (re) | |
1247 | "result": "skip", ? (re) |
|
1247 | "result": "skip", ? (re) | |
1248 | "start": "\s*\d+\.\d{3,4}", ? (re) |
|
1248 | "start": "\s*\d+\.\d{3,4}", ? (re) | |
1249 | "time": "\s*\d+\.\d{3,4}" (re) |
|
1249 | "time": "\s*\d+\.\d{3,4}" (re) | |
1250 | }, ? (re) |
|
1250 | }, ? (re) | |
1251 | "test-success.t": [\{] (re) |
|
1251 | "test-success.t": [\{] (re) | |
1252 | "csys": "\s*\d+\.\d{3,4}", ? (re) |
|
1252 | "csys": "\s*\d+\.\d{3,4}", ? (re) | |
1253 | "cuser": "\s*\d+\.\d{3,4}", ? (re) |
|
1253 | "cuser": "\s*\d+\.\d{3,4}", ? (re) | |
1254 | "diff": "", ? (re) |
|
1254 | "diff": "", ? (re) | |
1255 | "end": "\s*\d+\.\d{3,4}", ? (re) |
|
1255 | "end": "\s*\d+\.\d{3,4}", ? (re) | |
1256 | "result": "success", ? (re) |
|
1256 | "result": "success", ? (re) | |
1257 | "start": "\s*\d+\.\d{3,4}", ? (re) |
|
1257 | "start": "\s*\d+\.\d{3,4}", ? (re) | |
1258 | "time": "\s*\d+\.\d{3,4}" (re) |
|
1258 | "time": "\s*\d+\.\d{3,4}" (re) | |
1259 | } |
|
1259 | } | |
1260 | } (no-eol) |
|
1260 | } (no-eol) | |
1261 | $ ls -a output |
|
1261 | $ ls -a output | |
1262 | . |
|
1262 | . | |
1263 | .. |
|
1263 | .. | |
1264 | .testtimes |
|
1264 | .testtimes | |
1265 | report.json |
|
1265 | report.json | |
1266 | test-failure.t.err |
|
1266 | test-failure.t.err | |
1267 |
|
1267 | |||
1268 | Test that failed test accepted through interactive are properly reported: |
|
1268 | Test that failed test accepted through interactive are properly reported: | |
1269 |
|
1269 | |||
1270 | $ cp test-failure.t backup |
|
1270 | $ cp test-failure.t backup | |
1271 | $ echo y | rt --json -i |
|
1271 | $ echo y | rt --json -i | |
1272 | running 3 tests using 1 parallel processes |
|
1272 | running 3 tests using 1 parallel processes | |
1273 |
|
1273 | |||
1274 | --- $TESTTMP/test-failure.t |
|
1274 | --- $TESTTMP/test-failure.t | |
1275 | +++ $TESTTMP/test-failure.t.err |
|
1275 | +++ $TESTTMP/test-failure.t.err | |
1276 | @@ -1,5 +1,5 @@ |
|
1276 | @@ -1,5 +1,5 @@ | |
1277 | $ echo babar |
|
1277 | $ echo babar | |
1278 | - rataxes |
|
1278 | - rataxes | |
1279 | + babar |
|
1279 | + babar | |
1280 | This is a noop statement so that |
|
1280 | This is a noop statement so that | |
1281 | this test is still more bytes than success. |
|
1281 | this test is still more bytes than success. | |
1282 | pad pad pad pad............................................................ |
|
1282 | pad pad pad pad............................................................ | |
1283 | Accept this change? [n] ..s |
|
1283 | Accept this change? [n] ..s | |
1284 | Skipped test-skip.t: missing feature: nail clipper |
|
1284 | Skipped test-skip.t: missing feature: nail clipper | |
1285 | # Ran 2 tests, 1 skipped, 0 failed. |
|
1285 | # Ran 2 tests, 1 skipped, 0 failed. | |
1286 |
|
1286 | |||
1287 | $ cat report.json |
|
1287 | $ cat report.json | |
1288 | testreport ={ |
|
1288 | testreport ={ | |
1289 | "test-failure.t": [\{] (re) |
|
1289 | "test-failure.t": [\{] (re) | |
1290 | "csys": "\s*\d+\.\d{3,4}", ? (re) |
|
1290 | "csys": "\s*\d+\.\d{3,4}", ? (re) | |
1291 | "cuser": "\s*\d+\.\d{3,4}", ? (re) |
|
1291 | "cuser": "\s*\d+\.\d{3,4}", ? (re) | |
1292 | "diff": "", ? (re) |
|
1292 | "diff": "", ? (re) | |
1293 | "end": "\s*\d+\.\d{3,4}", ? (re) |
|
1293 | "end": "\s*\d+\.\d{3,4}", ? (re) | |
1294 | "result": "success", ? (re) |
|
1294 | "result": "success", ? (re) | |
1295 | "start": "\s*\d+\.\d{3,4}", ? (re) |
|
1295 | "start": "\s*\d+\.\d{3,4}", ? (re) | |
1296 | "time": "\s*\d+\.\d{3,4}" (re) |
|
1296 | "time": "\s*\d+\.\d{3,4}" (re) | |
1297 | }, ? (re) |
|
1297 | }, ? (re) | |
1298 | "test-skip.t": { |
|
1298 | "test-skip.t": { | |
1299 | "csys": "\s*\d+\.\d{3,4}", ? (re) |
|
1299 | "csys": "\s*\d+\.\d{3,4}", ? (re) | |
1300 | "cuser": "\s*\d+\.\d{3,4}", ? (re) |
|
1300 | "cuser": "\s*\d+\.\d{3,4}", ? (re) | |
1301 | "diff": "", ? (re) |
|
1301 | "diff": "", ? (re) | |
1302 | "end": "\s*\d+\.\d{3,4}", ? (re) |
|
1302 | "end": "\s*\d+\.\d{3,4}", ? (re) | |
1303 | "result": "skip", ? (re) |
|
1303 | "result": "skip", ? (re) | |
1304 | "start": "\s*\d+\.\d{3,4}", ? (re) |
|
1304 | "start": "\s*\d+\.\d{3,4}", ? (re) | |
1305 | "time": "\s*\d+\.\d{3,4}" (re) |
|
1305 | "time": "\s*\d+\.\d{3,4}" (re) | |
1306 | }, ? (re) |
|
1306 | }, ? (re) | |
1307 | "test-success.t": [\{] (re) |
|
1307 | "test-success.t": [\{] (re) | |
1308 | "csys": "\s*\d+\.\d{3,4}", ? (re) |
|
1308 | "csys": "\s*\d+\.\d{3,4}", ? (re) | |
1309 | "cuser": "\s*\d+\.\d{3,4}", ? (re) |
|
1309 | "cuser": "\s*\d+\.\d{3,4}", ? (re) | |
1310 | "diff": "", ? (re) |
|
1310 | "diff": "", ? (re) | |
1311 | "end": "\s*\d+\.\d{3,4}", ? (re) |
|
1311 | "end": "\s*\d+\.\d{3,4}", ? (re) | |
1312 | "result": "success", ? (re) |
|
1312 | "result": "success", ? (re) | |
1313 | "start": "\s*\d+\.\d{3,4}", ? (re) |
|
1313 | "start": "\s*\d+\.\d{3,4}", ? (re) | |
1314 | "time": "\s*\d+\.\d{3,4}" (re) |
|
1314 | "time": "\s*\d+\.\d{3,4}" (re) | |
1315 | } |
|
1315 | } | |
1316 | } (no-eol) |
|
1316 | } (no-eol) | |
1317 | $ mv backup test-failure.t |
|
1317 | $ mv backup test-failure.t | |
1318 |
|
1318 | |||
1319 | backslash on end of line with glob matching is handled properly |
|
1319 | backslash on end of line with glob matching is handled properly | |
1320 |
|
1320 | |||
1321 | $ cat > test-glob-backslash.t << EOF |
|
1321 | $ cat > test-glob-backslash.t << EOF | |
1322 | > $ echo 'foo bar \\' |
|
1322 | > $ echo 'foo bar \\' | |
1323 | > foo * \ (glob) |
|
1323 | > foo * \ (glob) | |
1324 | > EOF |
|
1324 | > EOF | |
1325 |
|
1325 | |||
1326 | $ rt test-glob-backslash.t |
|
1326 | $ rt test-glob-backslash.t | |
1327 | running 1 tests using 1 parallel processes |
|
1327 | running 1 tests using 1 parallel processes | |
1328 | . |
|
1328 | . | |
1329 | # Ran 1 tests, 0 skipped, 0 failed. |
|
1329 | # Ran 1 tests, 0 skipped, 0 failed. | |
1330 |
|
1330 | |||
1331 | $ rm -f test-glob-backslash.t |
|
1331 | $ rm -f test-glob-backslash.t | |
1332 |
|
1332 | |||
1333 | Test globbing of local IP addresses |
|
1333 | Test globbing of local IP addresses | |
1334 | $ echo 172.16.18.1 |
|
1334 | $ echo 172.16.18.1 | |
1335 | $LOCALIP (glob) |
|
1335 | $LOCALIP (glob) | |
1336 | $ echo dead:beef::1 |
|
1336 | $ echo dead:beef::1 | |
1337 | $LOCALIP (glob) |
|
1337 | $LOCALIP (glob) | |
1338 |
|
1338 | |||
1339 | Add support for external test formatter |
|
1339 | Add support for external test formatter | |
1340 | ======================================= |
|
1340 | ======================================= | |
1341 |
|
1341 | |||
1342 | $ CUSTOM_TEST_RESULT=basic_test_result "$PYTHON" $TESTDIR/run-tests.py --with-hg=`which hg` -j1 "$@" test-success.t test-failure.t |
|
1342 | $ CUSTOM_TEST_RESULT=basic_test_result "$PYTHON" $TESTDIR/run-tests.py --with-hg=`which hg` -j1 "$@" test-success.t test-failure.t | |
1343 | running 2 tests using 1 parallel processes |
|
1343 | running 2 tests using 1 parallel processes | |
1344 |
|
1344 | |||
1345 | # Ran 2 tests, 0 skipped, 0 failed. |
|
1345 | # Ran 2 tests, 0 skipped, 0 failed. | |
1346 | ON_START! <__main__.TestSuite tests=[<__main__.TTest testMethod=test-failure.t>, <__main__.TTest testMethod=test-success.t>]> |
|
1346 | ON_START! <__main__.TestSuite tests=[<__main__.TTest testMethod=test-failure.t>, <__main__.TTest testMethod=test-success.t>]> | |
1347 | FAILURE! test-failure.t output changed |
|
1347 | FAILURE! test-failure.t output changed | |
1348 | SUCCESS! test-success.t |
|
1348 | SUCCESS! test-success.t | |
1349 | ON_END! |
|
1349 | ON_END! | |
1350 |
|
1350 | |||
1351 | Test reusability for third party tools |
|
1351 | Test reusability for third party tools | |
1352 | ====================================== |
|
1352 | ====================================== | |
1353 |
|
1353 | |||
1354 | $ mkdir "$TESTTMP"/anothertests |
|
1354 | $ mkdir "$TESTTMP"/anothertests | |
1355 | $ cd "$TESTTMP"/anothertests |
|
1355 | $ cd "$TESTTMP"/anothertests | |
1356 |
|
1356 | |||
1357 | test that `run-tests.py` can execute hghave, even if it runs not in |
|
1357 | test that `run-tests.py` can execute hghave, even if it runs not in | |
1358 | Mercurial source tree. |
|
1358 | Mercurial source tree. | |
1359 |
|
1359 | |||
1360 | $ cat > test-hghave.t <<EOF |
|
1360 | $ cat > test-hghave.t <<EOF | |
1361 | > #require true |
|
1361 | > #require true | |
1362 | > $ echo foo |
|
1362 | > $ echo foo | |
1363 | > foo |
|
1363 | > foo | |
1364 | > EOF |
|
1364 | > EOF | |
1365 | $ rt test-hghave.t |
|
1365 | $ rt test-hghave.t | |
1366 | running 1 tests using 1 parallel processes |
|
1366 | running 1 tests using 1 parallel processes | |
1367 | . |
|
1367 | . | |
1368 | # Ran 1 tests, 0 skipped, 0 failed. |
|
1368 | # Ran 1 tests, 0 skipped, 0 failed. | |
1369 |
|
1369 | |||
1370 | test that RUNTESTDIR refers the directory, in which `run-tests.py` now |
|
1370 | test that RUNTESTDIR refers the directory, in which `run-tests.py` now | |
1371 | running is placed. |
|
1371 | running is placed. | |
1372 |
|
1372 | |||
1373 | $ cat > test-runtestdir.t <<EOF |
|
1373 | $ cat > test-runtestdir.t <<EOF | |
1374 | > - $TESTDIR, in which test-run-tests.t is placed |
|
1374 | > - $TESTDIR, in which test-run-tests.t is placed | |
1375 | > - \$TESTDIR, in which test-runtestdir.t is placed (expanded at runtime) |
|
1375 | > - \$TESTDIR, in which test-runtestdir.t is placed (expanded at runtime) | |
1376 | > - \$RUNTESTDIR, in which run-tests.py is placed (expanded at runtime) |
|
1376 | > - \$RUNTESTDIR, in which run-tests.py is placed (expanded at runtime) | |
1377 | > |
|
1377 | > | |
1378 | > #if windows |
|
1378 | > #if windows | |
1379 | > $ test "\$TESTDIR" = "$TESTTMP\anothertests" |
|
1379 | > $ test "\$TESTDIR" = "$TESTTMP\anothertests" | |
1380 | > #else |
|
1380 | > #else | |
1381 | > $ test "\$TESTDIR" = "$TESTTMP"/anothertests |
|
1381 | > $ test "\$TESTDIR" = "$TESTTMP"/anothertests | |
1382 | > #endif |
|
1382 | > #endif | |
1383 | > If this prints a path, that means RUNTESTDIR didn't equal |
|
1383 | > If this prints a path, that means RUNTESTDIR didn't equal | |
1384 | > TESTDIR as it should have. |
|
1384 | > TESTDIR as it should have. | |
1385 | > $ test "\$RUNTESTDIR" = "$TESTDIR" || echo "\$RUNTESTDIR" |
|
1385 | > $ test "\$RUNTESTDIR" = "$TESTDIR" || echo "\$RUNTESTDIR" | |
1386 | > This should print the start of check-code. If this passes but the |
|
1386 | > This should print the start of check-code. If this passes but the | |
1387 | > previous check failed, that means we found a copy of check-code at whatever |
|
1387 | > previous check failed, that means we found a copy of check-code at whatever | |
1388 | > RUNTESTSDIR ended up containing, even though it doesn't match TESTDIR. |
|
1388 | > RUNTESTSDIR ended up containing, even though it doesn't match TESTDIR. | |
1389 | > $ head -n 3 "\$RUNTESTDIR"/../contrib/check-code.py | sed 's@.!.*python@#!USRBINENVPY@' |
|
1389 | > $ head -n 3 "\$RUNTESTDIR"/../contrib/check-code.py | sed 's@.!.*python@#!USRBINENVPY@' | |
1390 | > #!USRBINENVPY |
|
1390 | > #!USRBINENVPY | |
1391 | > # |
|
1391 | > # | |
1392 | > # check-code - a style and portability checker for Mercurial |
|
1392 | > # check-code - a style and portability checker for Mercurial | |
1393 | > EOF |
|
1393 | > EOF | |
1394 | $ rt test-runtestdir.t |
|
1394 | $ rt test-runtestdir.t | |
1395 | running 1 tests using 1 parallel processes |
|
1395 | running 1 tests using 1 parallel processes | |
1396 | . |
|
1396 | . | |
1397 | # Ran 1 tests, 0 skipped, 0 failed. |
|
1397 | # Ran 1 tests, 0 skipped, 0 failed. | |
1398 |
|
1398 | |||
1399 | #if execbit |
|
1399 | #if execbit | |
1400 |
|
1400 | |||
1401 | test that TESTDIR is referred in PATH |
|
1401 | test that TESTDIR is referred in PATH | |
1402 |
|
1402 | |||
1403 | $ cat > custom-command.sh <<EOF |
|
1403 | $ cat > custom-command.sh <<EOF | |
1404 | > #!/bin/sh |
|
1404 | > #!/bin/sh | |
1405 | > echo "hello world" |
|
1405 | > echo "hello world" | |
1406 | > EOF |
|
1406 | > EOF | |
1407 | $ chmod +x custom-command.sh |
|
1407 | $ chmod +x custom-command.sh | |
1408 | $ cat > test-testdir-path.t <<EOF |
|
1408 | $ cat > test-testdir-path.t <<EOF | |
1409 | > $ custom-command.sh |
|
1409 | > $ custom-command.sh | |
1410 | > hello world |
|
1410 | > hello world | |
1411 | > EOF |
|
1411 | > EOF | |
1412 | $ rt test-testdir-path.t |
|
1412 | $ rt test-testdir-path.t | |
1413 | running 1 tests using 1 parallel processes |
|
1413 | running 1 tests using 1 parallel processes | |
1414 | . |
|
1414 | . | |
1415 | # Ran 1 tests, 0 skipped, 0 failed. |
|
1415 | # Ran 1 tests, 0 skipped, 0 failed. | |
1416 |
|
1416 | |||
1417 | #endif |
|
1417 | #endif | |
1418 |
|
1418 | |||
1419 | test support for --allow-slow-tests |
|
1419 | test support for --allow-slow-tests | |
1420 | $ cat > test-very-slow-test.t <<EOF |
|
1420 | $ cat > test-very-slow-test.t <<EOF | |
1421 | > #require slow |
|
1421 | > #require slow | |
1422 | > $ echo pass |
|
1422 | > $ echo pass | |
1423 | > pass |
|
1423 | > pass | |
1424 | > EOF |
|
1424 | > EOF | |
1425 | $ rt test-very-slow-test.t |
|
1425 | $ rt test-very-slow-test.t | |
1426 | running 1 tests using 1 parallel processes |
|
1426 | running 1 tests using 1 parallel processes | |
1427 | s |
|
1427 | s | |
1428 | Skipped test-very-slow-test.t: missing feature: allow slow tests (use --allow-slow-tests) |
|
1428 | Skipped test-very-slow-test.t: missing feature: allow slow tests (use --allow-slow-tests) | |
1429 | # Ran 0 tests, 1 skipped, 0 failed. |
|
1429 | # Ran 0 tests, 1 skipped, 0 failed. | |
1430 | $ rt $HGTEST_RUN_TESTS_PURE --allow-slow-tests test-very-slow-test.t |
|
1430 | $ rt $HGTEST_RUN_TESTS_PURE --allow-slow-tests test-very-slow-test.t | |
1431 | running 1 tests using 1 parallel processes |
|
1431 | running 1 tests using 1 parallel processes | |
1432 | . |
|
1432 | . | |
1433 | # Ran 1 tests, 0 skipped, 0 failed. |
|
1433 | # Ran 1 tests, 0 skipped, 0 failed. | |
1434 |
|
1434 | |||
1435 | support for running a test outside the current directory |
|
1435 | support for running a test outside the current directory | |
1436 | $ mkdir nonlocal |
|
1436 | $ mkdir nonlocal | |
1437 | $ cat > nonlocal/test-is-not-here.t << EOF |
|
1437 | $ cat > nonlocal/test-is-not-here.t << EOF | |
1438 | > $ echo pass |
|
1438 | > $ echo pass | |
1439 | > pass |
|
1439 | > pass | |
1440 | > EOF |
|
1440 | > EOF | |
1441 | $ rt nonlocal/test-is-not-here.t |
|
1441 | $ rt nonlocal/test-is-not-here.t | |
1442 | running 1 tests using 1 parallel processes |
|
1442 | running 1 tests using 1 parallel processes | |
1443 | . |
|
1443 | . | |
1444 | # Ran 1 tests, 0 skipped, 0 failed. |
|
1444 | # Ran 1 tests, 0 skipped, 0 failed. | |
1445 |
|
1445 | |||
1446 | support for automatically discovering test if arg is a folder |
|
1446 | support for automatically discovering test if arg is a folder | |
1447 | $ mkdir tmp && cd tmp |
|
1447 | $ mkdir tmp && cd tmp | |
1448 |
|
1448 | |||
1449 | $ cat > test-uno.t << EOF |
|
1449 | $ cat > test-uno.t << EOF | |
1450 | > $ echo line |
|
1450 | > $ echo line | |
1451 | > line |
|
1451 | > line | |
1452 | > EOF |
|
1452 | > EOF | |
1453 |
|
1453 | |||
1454 | $ cp test-uno.t test-dos.t |
|
1454 | $ cp test-uno.t test-dos.t | |
1455 | $ cd .. |
|
1455 | $ cd .. | |
1456 | $ cp -R tmp tmpp |
|
1456 | $ cp -R tmp tmpp | |
1457 | $ cp tmp/test-uno.t test-solo.t |
|
1457 | $ cp tmp/test-uno.t test-solo.t | |
1458 |
|
1458 | |||
1459 | $ rt tmp/ test-solo.t tmpp |
|
1459 | $ rt tmp/ test-solo.t tmpp | |
1460 | running 5 tests using 1 parallel processes |
|
1460 | running 5 tests using 1 parallel processes | |
1461 | ..... |
|
1461 | ..... | |
1462 | # Ran 5 tests, 0 skipped, 0 failed. |
|
1462 | # Ran 5 tests, 0 skipped, 0 failed. | |
1463 | $ rm -rf tmp tmpp |
|
1463 | $ rm -rf tmp tmpp | |
1464 |
|
1464 | |||
1465 | support for running run-tests.py from another directory |
|
1465 | support for running run-tests.py from another directory | |
1466 | $ mkdir tmp && cd tmp |
|
1466 | $ mkdir tmp && cd tmp | |
1467 |
|
1467 | |||
1468 | $ cat > useful-file.sh << EOF |
|
1468 | $ cat > useful-file.sh << EOF | |
1469 | > important command |
|
1469 | > important command | |
1470 | > EOF |
|
1470 | > EOF | |
1471 |
|
1471 | |||
1472 | $ cat > test-folder.t << EOF |
|
1472 | $ cat > test-folder.t << EOF | |
1473 | > $ cat \$TESTDIR/useful-file.sh |
|
1473 | > $ cat \$TESTDIR/useful-file.sh | |
1474 | > important command |
|
1474 | > important command | |
1475 | > EOF |
|
1475 | > EOF | |
1476 |
|
1476 | |||
1477 | $ cat > test-folder-fail.t << EOF |
|
1477 | $ cat > test-folder-fail.t << EOF | |
1478 | > $ cat \$TESTDIR/useful-file.sh |
|
1478 | > $ cat \$TESTDIR/useful-file.sh | |
1479 | > important commando |
|
1479 | > important commando | |
1480 | > EOF |
|
1480 | > EOF | |
1481 |
|
1481 | |||
1482 | $ cd .. |
|
1482 | $ cd .. | |
1483 | $ rt tmp/test-*.t |
|
1483 | $ rt tmp/test-*.t | |
1484 | running 2 tests using 1 parallel processes |
|
1484 | running 2 tests using 1 parallel processes | |
1485 |
|
1485 | |||
1486 | --- $TESTTMP/anothertests/tmp/test-folder-fail.t |
|
1486 | --- $TESTTMP/anothertests/tmp/test-folder-fail.t | |
1487 | +++ $TESTTMP/anothertests/tmp/test-folder-fail.t.err |
|
1487 | +++ $TESTTMP/anothertests/tmp/test-folder-fail.t.err | |
1488 | @@ -1,2 +1,2 @@ |
|
1488 | @@ -1,2 +1,2 @@ | |
1489 | $ cat $TESTDIR/useful-file.sh |
|
1489 | $ cat $TESTDIR/useful-file.sh | |
1490 | - important commando |
|
1490 | - important commando | |
1491 | + important command |
|
1491 | + important command | |
1492 |
|
1492 | |||
1493 | ERROR: test-folder-fail.t output changed |
|
1493 | ERROR: test-folder-fail.t output changed | |
1494 | !. |
|
1494 | !. | |
1495 | Failed test-folder-fail.t: output changed |
|
1495 | Failed test-folder-fail.t: output changed | |
1496 | # Ran 2 tests, 0 skipped, 1 failed. |
|
1496 | # Ran 2 tests, 0 skipped, 1 failed. | |
1497 | python hash seed: * (glob) |
|
1497 | python hash seed: * (glob) | |
1498 | [1] |
|
1498 | [1] | |
1499 |
|
1499 | |||
1500 | support for bisecting failed tests automatically |
|
1500 | support for bisecting failed tests automatically | |
1501 | $ hg init bisect |
|
1501 | $ hg init bisect | |
1502 | $ cd bisect |
|
1502 | $ cd bisect | |
1503 | $ cat >> test-bisect.t <<EOF |
|
1503 | $ cat >> test-bisect.t <<EOF | |
1504 | > $ echo pass |
|
1504 | > $ echo pass | |
1505 | > pass |
|
1505 | > pass | |
1506 | > EOF |
|
1506 | > EOF | |
1507 | $ hg add test-bisect.t |
|
1507 | $ hg add test-bisect.t | |
1508 | $ hg ci -m 'good' |
|
1508 | $ hg ci -m 'good' | |
1509 | $ cat >> test-bisect.t <<EOF |
|
1509 | $ cat >> test-bisect.t <<EOF | |
1510 | > $ echo pass |
|
1510 | > $ echo pass | |
1511 | > fail |
|
1511 | > fail | |
1512 | > EOF |
|
1512 | > EOF | |
1513 | $ hg ci -m 'bad' |
|
1513 | $ hg ci -m 'bad' | |
1514 | $ rt --known-good-rev=0 test-bisect.t |
|
1514 | $ rt --known-good-rev=0 test-bisect.t | |
1515 | running 1 tests using 1 parallel processes |
|
1515 | running 1 tests using 1 parallel processes | |
1516 |
|
1516 | |||
1517 | --- $TESTTMP/anothertests/bisect/test-bisect.t |
|
1517 | --- $TESTTMP/anothertests/bisect/test-bisect.t | |
1518 | +++ $TESTTMP/anothertests/bisect/test-bisect.t.err |
|
1518 | +++ $TESTTMP/anothertests/bisect/test-bisect.t.err | |
1519 | @@ -1,4 +1,4 @@ |
|
1519 | @@ -1,4 +1,4 @@ | |
1520 | $ echo pass |
|
1520 | $ echo pass | |
1521 | pass |
|
1521 | pass | |
1522 | $ echo pass |
|
1522 | $ echo pass | |
1523 | - fail |
|
1523 | - fail | |
1524 | + pass |
|
1524 | + pass | |
1525 |
|
1525 | |||
1526 | ERROR: test-bisect.t output changed |
|
1526 | ERROR: test-bisect.t output changed | |
1527 | ! |
|
1527 | ! | |
1528 | Failed test-bisect.t: output changed |
|
1528 | Failed test-bisect.t: output changed | |
1529 | test-bisect.t broken by 72cbf122d116 (bad) |
|
1529 | test-bisect.t broken by 72cbf122d116 (bad) | |
1530 | # Ran 1 tests, 0 skipped, 1 failed. |
|
1530 | # Ran 1 tests, 0 skipped, 1 failed. | |
1531 | python hash seed: * (glob) |
|
1531 | python hash seed: * (glob) | |
1532 | [1] |
|
1532 | [1] | |
1533 |
|
1533 | |||
1534 | $ cd .. |
|
1534 | $ cd .. | |
1535 |
|
1535 | |||
1536 | support bisecting a separate repo |
|
1536 | support bisecting a separate repo | |
1537 |
|
1537 | |||
1538 | $ hg init bisect-dependent |
|
1538 | $ hg init bisect-dependent | |
1539 | $ cd bisect-dependent |
|
1539 | $ cd bisect-dependent | |
1540 | $ cat > test-bisect-dependent.t <<EOF |
|
1540 | $ cat > test-bisect-dependent.t <<EOF | |
1541 | > $ tail -1 \$TESTDIR/../bisect/test-bisect.t |
|
1541 | > $ tail -1 \$TESTDIR/../bisect/test-bisect.t | |
1542 | > pass |
|
1542 | > pass | |
1543 | > EOF |
|
1543 | > EOF | |
1544 | $ hg commit -Am dependent test-bisect-dependent.t |
|
1544 | $ hg commit -Am dependent test-bisect-dependent.t | |
1545 |
|
1545 | |||
1546 | $ rt --known-good-rev=0 test-bisect-dependent.t |
|
1546 | $ rt --known-good-rev=0 test-bisect-dependent.t | |
1547 | running 1 tests using 1 parallel processes |
|
1547 | running 1 tests using 1 parallel processes | |
1548 |
|
1548 | |||
1549 | --- $TESTTMP/anothertests/bisect-dependent/test-bisect-dependent.t |
|
1549 | --- $TESTTMP/anothertests/bisect-dependent/test-bisect-dependent.t | |
1550 | +++ $TESTTMP/anothertests/bisect-dependent/test-bisect-dependent.t.err |
|
1550 | +++ $TESTTMP/anothertests/bisect-dependent/test-bisect-dependent.t.err | |
1551 | @@ -1,2 +1,2 @@ |
|
1551 | @@ -1,2 +1,2 @@ | |
1552 | $ tail -1 $TESTDIR/../bisect/test-bisect.t |
|
1552 | $ tail -1 $TESTDIR/../bisect/test-bisect.t | |
1553 | - pass |
|
1553 | - pass | |
1554 | + fail |
|
1554 | + fail | |
1555 |
|
1555 | |||
1556 | ERROR: test-bisect-dependent.t output changed |
|
1556 | ERROR: test-bisect-dependent.t output changed | |
1557 | ! |
|
1557 | ! | |
1558 | Failed test-bisect-dependent.t: output changed |
|
1558 | Failed test-bisect-dependent.t: output changed | |
1559 | Failed to identify failure point for test-bisect-dependent.t |
|
1559 | Failed to identify failure point for test-bisect-dependent.t | |
1560 | # Ran 1 tests, 0 skipped, 1 failed. |
|
1560 | # Ran 1 tests, 0 skipped, 1 failed. | |
1561 | python hash seed: * (glob) |
|
1561 | python hash seed: * (glob) | |
1562 | [1] |
|
1562 | [1] | |
1563 |
|
1563 | |||
1564 | $ rt --bisect-repo=../test-bisect test-bisect-dependent.t |
|
1564 | $ rt --bisect-repo=../test-bisect test-bisect-dependent.t | |
1565 | usage: run-tests.py [options] [tests] |
|
1565 | usage: run-tests.py [options] [tests] | |
1566 | run-tests.py: error: --bisect-repo cannot be used without --known-good-rev |
|
1566 | run-tests.py: error: --bisect-repo cannot be used without --known-good-rev | |
1567 | [2] |
|
1567 | [2] | |
1568 |
|
1568 | |||
1569 | $ rt --known-good-rev=0 --bisect-repo=../bisect test-bisect-dependent.t |
|
1569 | $ rt --known-good-rev=0 --bisect-repo=../bisect test-bisect-dependent.t | |
1570 | running 1 tests using 1 parallel processes |
|
1570 | running 1 tests using 1 parallel processes | |
1571 |
|
1571 | |||
1572 | --- $TESTTMP/anothertests/bisect-dependent/test-bisect-dependent.t |
|
1572 | --- $TESTTMP/anothertests/bisect-dependent/test-bisect-dependent.t | |
1573 | +++ $TESTTMP/anothertests/bisect-dependent/test-bisect-dependent.t.err |
|
1573 | +++ $TESTTMP/anothertests/bisect-dependent/test-bisect-dependent.t.err | |
1574 | @@ -1,2 +1,2 @@ |
|
1574 | @@ -1,2 +1,2 @@ | |
1575 | $ tail -1 $TESTDIR/../bisect/test-bisect.t |
|
1575 | $ tail -1 $TESTDIR/../bisect/test-bisect.t | |
1576 | - pass |
|
1576 | - pass | |
1577 | + fail |
|
1577 | + fail | |
1578 |
|
1578 | |||
1579 | ERROR: test-bisect-dependent.t output changed |
|
1579 | ERROR: test-bisect-dependent.t output changed | |
1580 | ! |
|
1580 | ! | |
1581 | Failed test-bisect-dependent.t: output changed |
|
1581 | Failed test-bisect-dependent.t: output changed | |
1582 | test-bisect-dependent.t broken by 72cbf122d116 (bad) |
|
1582 | test-bisect-dependent.t broken by 72cbf122d116 (bad) | |
1583 | # Ran 1 tests, 0 skipped, 1 failed. |
|
1583 | # Ran 1 tests, 0 skipped, 1 failed. | |
1584 | python hash seed: * (glob) |
|
1584 | python hash seed: * (glob) | |
1585 | [1] |
|
1585 | [1] | |
1586 |
|
1586 | |||
1587 | $ cd .. |
|
1587 | $ cd .. | |
1588 |
|
1588 | |||
1589 | Test a broken #if statement doesn't break run-tests threading. |
|
1589 | Test a broken #if statement doesn't break run-tests threading. | |
1590 | ============================================================== |
|
1590 | ============================================================== | |
1591 | $ mkdir broken |
|
1591 | $ mkdir broken | |
1592 | $ cd broken |
|
1592 | $ cd broken | |
1593 | $ cat > test-broken.t <<EOF |
|
1593 | $ cat > test-broken.t <<EOF | |
1594 | > true |
|
1594 | > true | |
1595 | > #if notarealhghavefeature |
|
1595 | > #if notarealhghavefeature | |
1596 | > $ false |
|
1596 | > $ false | |
1597 | > #endif |
|
1597 | > #endif | |
1598 | > EOF |
|
1598 | > EOF | |
1599 | $ for f in 1 2 3 4 ; do |
|
1599 | $ for f in 1 2 3 4 ; do | |
1600 | > cat > test-works-$f.t <<EOF |
|
1600 | > cat > test-works-$f.t <<EOF | |
1601 | > This is test case $f |
|
1601 | > This is test case $f | |
1602 | > $ sleep 1 |
|
1602 | > $ sleep 1 | |
1603 | > EOF |
|
1603 | > EOF | |
1604 | > done |
|
1604 | > done | |
1605 | $ rt -j 2 |
|
1605 | $ rt -j 2 | |
1606 | running 5 tests using 2 parallel processes |
|
1606 | running 5 tests using 2 parallel processes | |
1607 | .... |
|
1607 | .... | |
1608 | # Ran 5 tests, 0 skipped, 0 failed. |
|
1608 | # Ran 5 tests, 0 skipped, 0 failed. | |
1609 | skipped: unknown feature: notarealhghavefeature |
|
1609 | skipped: unknown feature: notarealhghavefeature | |
1610 |
|
1610 | |||
1611 | $ cd .. |
|
1611 | $ cd .. | |
1612 | $ rm -rf broken |
|
1612 | $ rm -rf broken | |
1613 |
|
1613 | |||
1614 | Test cases in .t files |
|
1614 | Test cases in .t files | |
1615 | ====================== |
|
1615 | ====================== | |
1616 | $ mkdir cases |
|
1616 | $ mkdir cases | |
1617 | $ cd cases |
|
1617 | $ cd cases | |
1618 | $ cat > test-cases-abc.t <<'EOF' |
|
1618 | $ cat > test-cases-abc.t <<'EOF' | |
1619 | > #testcases A B C |
|
1619 | > #testcases A B C | |
1620 | > $ V=B |
|
1620 | > $ V=B | |
1621 | > #if A |
|
1621 | > #if A | |
1622 | > $ V=A |
|
1622 | > $ V=A | |
1623 | > #endif |
|
1623 | > #endif | |
1624 | > #if C |
|
1624 | > #if C | |
1625 | > $ V=C |
|
1625 | > $ V=C | |
1626 | > #endif |
|
1626 | > #endif | |
1627 | > $ echo $V | sed 's/A/C/' |
|
1627 | > $ echo $V | sed 's/A/C/' | |
1628 | > C |
|
1628 | > C | |
1629 | > #if C |
|
1629 | > #if C | |
1630 | > $ [ $V = C ] |
|
1630 | > $ [ $V = C ] | |
1631 | > #endif |
|
1631 | > #endif | |
1632 | > #if A |
|
1632 | > #if A | |
1633 | > $ [ $V = C ] |
|
1633 | > $ [ $V = C ] | |
1634 | > [1] |
|
1634 | > [1] | |
1635 | > #endif |
|
1635 | > #endif | |
1636 | > #if no-C |
|
1636 | > #if no-C | |
1637 | > $ [ $V = C ] |
|
1637 | > $ [ $V = C ] | |
1638 | > [1] |
|
1638 | > [1] | |
1639 | > #endif |
|
1639 | > #endif | |
1640 | > $ [ $V = D ] |
|
1640 | > $ [ $V = D ] | |
1641 | > [1] |
|
1641 | > [1] | |
1642 | > EOF |
|
1642 | > EOF | |
1643 | $ rt |
|
1643 | $ rt | |
1644 | running 3 tests using 1 parallel processes |
|
1644 | running 3 tests using 1 parallel processes | |
1645 | . |
|
1645 | . | |
1646 | --- $TESTTMP/anothertests/cases/test-cases-abc.t |
|
1646 | --- $TESTTMP/anothertests/cases/test-cases-abc.t | |
1647 | +++ $TESTTMP/anothertests/cases/test-cases-abc.t#B.err |
|
1647 | +++ $TESTTMP/anothertests/cases/test-cases-abc.t#B.err | |
1648 | @@ -7,7 +7,7 @@ |
|
1648 | @@ -7,7 +7,7 @@ | |
1649 | $ V=C |
|
1649 | $ V=C | |
1650 | #endif |
|
1650 | #endif | |
1651 | $ echo $V | sed 's/A/C/' |
|
1651 | $ echo $V | sed 's/A/C/' | |
1652 | - C |
|
1652 | - C | |
1653 | + B |
|
1653 | + B | |
1654 | #if C |
|
1654 | #if C | |
1655 | $ [ $V = C ] |
|
1655 | $ [ $V = C ] | |
1656 | #endif |
|
1656 | #endif | |
1657 |
|
1657 | |||
1658 | ERROR: test-cases-abc.t#B output changed |
|
1658 | ERROR: test-cases-abc.t#B output changed | |
1659 | !. |
|
1659 | !. | |
1660 | Failed test-cases-abc.t#B: output changed |
|
1660 | Failed test-cases-abc.t#B: output changed | |
1661 | # Ran 3 tests, 0 skipped, 1 failed. |
|
1661 | # Ran 3 tests, 0 skipped, 1 failed. | |
1662 | python hash seed: * (glob) |
|
1662 | python hash seed: * (glob) | |
1663 | [1] |
|
1663 | [1] | |
1664 |
|
1664 | |||
1665 | --restart works |
|
1665 | --restart works | |
1666 |
|
1666 | |||
1667 | $ rt --restart |
|
1667 | $ rt --restart | |
1668 | running 2 tests using 1 parallel processes |
|
1668 | running 2 tests using 1 parallel processes | |
1669 |
|
1669 | |||
1670 | --- $TESTTMP/anothertests/cases/test-cases-abc.t |
|
1670 | --- $TESTTMP/anothertests/cases/test-cases-abc.t | |
1671 | +++ $TESTTMP/anothertests/cases/test-cases-abc.t#B.err |
|
1671 | +++ $TESTTMP/anothertests/cases/test-cases-abc.t#B.err | |
1672 | @@ -7,7 +7,7 @@ |
|
1672 | @@ -7,7 +7,7 @@ | |
1673 | $ V=C |
|
1673 | $ V=C | |
1674 | #endif |
|
1674 | #endif | |
1675 | $ echo $V | sed 's/A/C/' |
|
1675 | $ echo $V | sed 's/A/C/' | |
1676 | - C |
|
1676 | - C | |
1677 | + B |
|
1677 | + B | |
1678 | #if C |
|
1678 | #if C | |
1679 | $ [ $V = C ] |
|
1679 | $ [ $V = C ] | |
1680 | #endif |
|
1680 | #endif | |
1681 |
|
1681 | |||
1682 | ERROR: test-cases-abc.t#B output changed |
|
1682 | ERROR: test-cases-abc.t#B output changed | |
1683 | !. |
|
1683 | !. | |
1684 | Failed test-cases-abc.t#B: output changed |
|
1684 | Failed test-cases-abc.t#B: output changed | |
1685 | # Ran 2 tests, 0 skipped, 1 failed. |
|
1685 | # Ran 2 tests, 0 skipped, 1 failed. | |
1686 | python hash seed: * (glob) |
|
1686 | python hash seed: * (glob) | |
1687 | [1] |
|
1687 | [1] | |
1688 |
|
1688 | |||
1689 | --restart works with outputdir |
|
1689 | --restart works with outputdir | |
1690 |
|
1690 | |||
1691 | $ mkdir output |
|
1691 | $ mkdir output | |
1692 | $ mv test-cases-abc.t#B.err output |
|
1692 | $ mv test-cases-abc.t#B.err output | |
1693 | $ rt --restart --outputdir output |
|
1693 | $ rt --restart --outputdir output | |
1694 | running 2 tests using 1 parallel processes |
|
1694 | running 2 tests using 1 parallel processes | |
1695 |
|
1695 | |||
1696 | --- $TESTTMP/anothertests/cases/test-cases-abc.t |
|
1696 | --- $TESTTMP/anothertests/cases/test-cases-abc.t | |
1697 | +++ $TESTTMP/anothertests/cases/output/test-cases-abc.t#B.err |
|
1697 | +++ $TESTTMP/anothertests/cases/output/test-cases-abc.t#B.err | |
1698 | @@ -7,7 +7,7 @@ |
|
1698 | @@ -7,7 +7,7 @@ | |
1699 | $ V=C |
|
1699 | $ V=C | |
1700 | #endif |
|
1700 | #endif | |
1701 | $ echo $V | sed 's/A/C/' |
|
1701 | $ echo $V | sed 's/A/C/' | |
1702 | - C |
|
1702 | - C | |
1703 | + B |
|
1703 | + B | |
1704 | #if C |
|
1704 | #if C | |
1705 | $ [ $V = C ] |
|
1705 | $ [ $V = C ] | |
1706 | #endif |
|
1706 | #endif | |
1707 |
|
1707 | |||
1708 | ERROR: test-cases-abc.t#B output changed |
|
1708 | ERROR: test-cases-abc.t#B output changed | |
1709 | !. |
|
1709 | !. | |
1710 | Failed test-cases-abc.t#B: output changed |
|
1710 | Failed test-cases-abc.t#B: output changed | |
1711 | # Ran 2 tests, 0 skipped, 1 failed. |
|
1711 | # Ran 2 tests, 0 skipped, 1 failed. | |
1712 | python hash seed: * (glob) |
|
1712 | python hash seed: * (glob) | |
1713 | [1] |
|
1713 | [1] | |
1714 |
|
1714 | |||
1715 | Test TESTCASE variable |
|
1715 | Test TESTCASE variable | |
1716 |
|
1716 | |||
1717 | $ cat > test-cases-ab.t <<'EOF' |
|
1717 | $ cat > test-cases-ab.t <<'EOF' | |
1718 | > $ dostuff() { |
|
1718 | > $ dostuff() { | |
1719 | > > echo "In case $TESTCASE" |
|
1719 | > > echo "In case $TESTCASE" | |
1720 | > > } |
|
1720 | > > } | |
1721 | > #testcases A B |
|
1721 | > #testcases A B | |
1722 | > #if A |
|
1722 | > #if A | |
1723 | > $ dostuff |
|
1723 | > $ dostuff | |
1724 | > In case A |
|
1724 | > In case A | |
1725 | > #endif |
|
1725 | > #endif | |
1726 | > #if B |
|
1726 | > #if B | |
1727 | > $ dostuff |
|
1727 | > $ dostuff | |
1728 | > In case B |
|
1728 | > In case B | |
1729 | > #endif |
|
1729 | > #endif | |
1730 | > EOF |
|
1730 | > EOF | |
1731 | $ rt test-cases-ab.t |
|
1731 | $ rt test-cases-ab.t | |
1732 | running 2 tests using 1 parallel processes |
|
1732 | running 2 tests using 1 parallel processes | |
1733 | .. |
|
1733 | .. | |
1734 | # Ran 2 tests, 0 skipped, 0 failed. |
|
1734 | # Ran 2 tests, 0 skipped, 0 failed. | |
1735 |
|
1735 | |||
1736 | Support running a specific test case |
|
1736 | Support running a specific test case | |
1737 |
|
1737 | |||
1738 | $ rt "test-cases-abc.t#B" |
|
1738 | $ rt "test-cases-abc.t#B" | |
1739 | running 1 tests using 1 parallel processes |
|
1739 | running 1 tests using 1 parallel processes | |
1740 |
|
1740 | |||
1741 | --- $TESTTMP/anothertests/cases/test-cases-abc.t |
|
1741 | --- $TESTTMP/anothertests/cases/test-cases-abc.t | |
1742 | +++ $TESTTMP/anothertests/cases/test-cases-abc.t#B.err |
|
1742 | +++ $TESTTMP/anothertests/cases/test-cases-abc.t#B.err | |
1743 | @@ -7,7 +7,7 @@ |
|
1743 | @@ -7,7 +7,7 @@ | |
1744 | $ V=C |
|
1744 | $ V=C | |
1745 | #endif |
|
1745 | #endif | |
1746 | $ echo $V | sed 's/A/C/' |
|
1746 | $ echo $V | sed 's/A/C/' | |
1747 | - C |
|
1747 | - C | |
1748 | + B |
|
1748 | + B | |
1749 | #if C |
|
1749 | #if C | |
1750 | $ [ $V = C ] |
|
1750 | $ [ $V = C ] | |
1751 | #endif |
|
1751 | #endif | |
1752 |
|
1752 | |||
1753 | ERROR: test-cases-abc.t#B output changed |
|
1753 | ERROR: test-cases-abc.t#B output changed | |
1754 | ! |
|
1754 | ! | |
1755 | Failed test-cases-abc.t#B: output changed |
|
1755 | Failed test-cases-abc.t#B: output changed | |
1756 | # Ran 1 tests, 0 skipped, 1 failed. |
|
1756 | # Ran 1 tests, 0 skipped, 1 failed. | |
1757 | python hash seed: * (glob) |
|
1757 | python hash seed: * (glob) | |
1758 | [1] |
|
1758 | [1] | |
1759 |
|
1759 | |||
1760 | Support running multiple test cases in the same file |
|
1760 | Support running multiple test cases in the same file | |
1761 |
|
1761 | |||
1762 | $ rt test-cases-abc.t#B test-cases-abc.t#C |
|
1762 | $ rt test-cases-abc.t#B test-cases-abc.t#C | |
1763 | running 2 tests using 1 parallel processes |
|
1763 | running 2 tests using 1 parallel processes | |
1764 |
|
1764 | |||
1765 | --- $TESTTMP/anothertests/cases/test-cases-abc.t |
|
1765 | --- $TESTTMP/anothertests/cases/test-cases-abc.t | |
1766 | +++ $TESTTMP/anothertests/cases/test-cases-abc.t#B.err |
|
1766 | +++ $TESTTMP/anothertests/cases/test-cases-abc.t#B.err | |
1767 | @@ -7,7 +7,7 @@ |
|
1767 | @@ -7,7 +7,7 @@ | |
1768 | $ V=C |
|
1768 | $ V=C | |
1769 | #endif |
|
1769 | #endif | |
1770 | $ echo $V | sed 's/A/C/' |
|
1770 | $ echo $V | sed 's/A/C/' | |
1771 | - C |
|
1771 | - C | |
1772 | + B |
|
1772 | + B | |
1773 | #if C |
|
1773 | #if C | |
1774 | $ [ $V = C ] |
|
1774 | $ [ $V = C ] | |
1775 | #endif |
|
1775 | #endif | |
1776 |
|
1776 | |||
1777 | ERROR: test-cases-abc.t#B output changed |
|
1777 | ERROR: test-cases-abc.t#B output changed | |
1778 | !. |
|
1778 | !. | |
1779 | Failed test-cases-abc.t#B: output changed |
|
1779 | Failed test-cases-abc.t#B: output changed | |
1780 | # Ran 2 tests, 0 skipped, 1 failed. |
|
1780 | # Ran 2 tests, 0 skipped, 1 failed. | |
1781 | python hash seed: * (glob) |
|
1781 | python hash seed: * (glob) | |
1782 | [1] |
|
1782 | [1] | |
1783 |
|
1783 | |||
1784 | Support ignoring invalid test cases |
|
1784 | Support ignoring invalid test cases | |
1785 |
|
1785 | |||
1786 | $ rt test-cases-abc.t#B test-cases-abc.t#D |
|
1786 | $ rt test-cases-abc.t#B test-cases-abc.t#D | |
1787 | running 1 tests using 1 parallel processes |
|
1787 | running 1 tests using 1 parallel processes | |
1788 |
|
1788 | |||
1789 | --- $TESTTMP/anothertests/cases/test-cases-abc.t |
|
1789 | --- $TESTTMP/anothertests/cases/test-cases-abc.t | |
1790 | +++ $TESTTMP/anothertests/cases/test-cases-abc.t#B.err |
|
1790 | +++ $TESTTMP/anothertests/cases/test-cases-abc.t#B.err | |
1791 | @@ -7,7 +7,7 @@ |
|
1791 | @@ -7,7 +7,7 @@ | |
1792 | $ V=C |
|
1792 | $ V=C | |
1793 | #endif |
|
1793 | #endif | |
1794 | $ echo $V | sed 's/A/C/' |
|
1794 | $ echo $V | sed 's/A/C/' | |
1795 | - C |
|
1795 | - C | |
1796 | + B |
|
1796 | + B | |
1797 | #if C |
|
1797 | #if C | |
1798 | $ [ $V = C ] |
|
1798 | $ [ $V = C ] | |
1799 | #endif |
|
1799 | #endif | |
1800 |
|
1800 | |||
1801 | ERROR: test-cases-abc.t#B output changed |
|
1801 | ERROR: test-cases-abc.t#B output changed | |
1802 | ! |
|
1802 | ! | |
1803 | Failed test-cases-abc.t#B: output changed |
|
1803 | Failed test-cases-abc.t#B: output changed | |
1804 | # Ran 1 tests, 0 skipped, 1 failed. |
|
1804 | # Ran 1 tests, 0 skipped, 1 failed. | |
1805 | python hash seed: * (glob) |
|
1805 | python hash seed: * (glob) | |
1806 | [1] |
|
1806 | [1] | |
1807 |
|
1807 | |||
1808 | Support running complex test cases names |
|
1808 | Support running complex test cases names | |
1809 |
|
1809 | |||
1810 | $ cat > test-cases-advanced-cases.t <<'EOF' |
|
1810 | $ cat > test-cases-advanced-cases.t <<'EOF' | |
1811 | > #testcases simple case-with-dashes casewith_-.chars |
|
1811 | > #testcases simple case-with-dashes casewith_-.chars | |
1812 | > $ echo $TESTCASE |
|
1812 | > $ echo $TESTCASE | |
1813 | > simple |
|
1813 | > simple | |
1814 | > EOF |
|
1814 | > EOF | |
1815 |
|
1815 | |||
1816 | $ cat test-cases-advanced-cases.t |
|
1816 | $ cat test-cases-advanced-cases.t | |
1817 | #testcases simple case-with-dashes casewith_-.chars |
|
1817 | #testcases simple case-with-dashes casewith_-.chars | |
1818 | $ echo $TESTCASE |
|
1818 | $ echo $TESTCASE | |
1819 | simple |
|
1819 | simple | |
1820 |
|
1820 | |||
1821 | $ rt test-cases-advanced-cases.t |
|
1821 | $ rt test-cases-advanced-cases.t | |
1822 | running 3 tests using 1 parallel processes |
|
1822 | running 3 tests using 1 parallel processes | |
1823 |
|
1823 | |||
1824 | --- $TESTTMP/anothertests/cases/test-cases-advanced-cases.t |
|
1824 | --- $TESTTMP/anothertests/cases/test-cases-advanced-cases.t | |
1825 | +++ $TESTTMP/anothertests/cases/test-cases-advanced-cases.t#case-with-dashes.err |
|
1825 | +++ $TESTTMP/anothertests/cases/test-cases-advanced-cases.t#case-with-dashes.err | |
1826 | @@ -1,3 +1,3 @@ |
|
1826 | @@ -1,3 +1,3 @@ | |
1827 | #testcases simple case-with-dashes casewith_-.chars |
|
1827 | #testcases simple case-with-dashes casewith_-.chars | |
1828 | $ echo $TESTCASE |
|
1828 | $ echo $TESTCASE | |
1829 | - simple |
|
1829 | - simple | |
1830 | + case-with-dashes |
|
1830 | + case-with-dashes | |
1831 |
|
1831 | |||
1832 | ERROR: test-cases-advanced-cases.t#case-with-dashes output changed |
|
1832 | ERROR: test-cases-advanced-cases.t#case-with-dashes output changed | |
1833 | ! |
|
1833 | ! | |
1834 | --- $TESTTMP/anothertests/cases/test-cases-advanced-cases.t |
|
1834 | --- $TESTTMP/anothertests/cases/test-cases-advanced-cases.t | |
1835 | +++ $TESTTMP/anothertests/cases/test-cases-advanced-cases.t#casewith_-.chars.err |
|
1835 | +++ $TESTTMP/anothertests/cases/test-cases-advanced-cases.t#casewith_-.chars.err | |
1836 | @@ -1,3 +1,3 @@ |
|
1836 | @@ -1,3 +1,3 @@ | |
1837 | #testcases simple case-with-dashes casewith_-.chars |
|
1837 | #testcases simple case-with-dashes casewith_-.chars | |
1838 | $ echo $TESTCASE |
|
1838 | $ echo $TESTCASE | |
1839 | - simple |
|
1839 | - simple | |
1840 | + casewith_-.chars |
|
1840 | + casewith_-.chars | |
1841 |
|
1841 | |||
1842 | ERROR: test-cases-advanced-cases.t#casewith_-.chars output changed |
|
1842 | ERROR: test-cases-advanced-cases.t#casewith_-.chars output changed | |
1843 | !. |
|
1843 | !. | |
1844 | Failed test-cases-advanced-cases.t#case-with-dashes: output changed |
|
1844 | Failed test-cases-advanced-cases.t#case-with-dashes: output changed | |
1845 | Failed test-cases-advanced-cases.t#casewith_-.chars: output changed |
|
1845 | Failed test-cases-advanced-cases.t#casewith_-.chars: output changed | |
1846 | # Ran 3 tests, 0 skipped, 2 failed. |
|
1846 | # Ran 3 tests, 0 skipped, 2 failed. | |
1847 | python hash seed: * (glob) |
|
1847 | python hash seed: * (glob) | |
1848 | [1] |
|
1848 | [1] | |
1849 |
|
1849 | |||
1850 | $ rt "test-cases-advanced-cases.t#case-with-dashes" |
|
1850 | $ rt "test-cases-advanced-cases.t#case-with-dashes" | |
1851 | running 1 tests using 1 parallel processes |
|
1851 | running 1 tests using 1 parallel processes | |
1852 |
|
1852 | |||
1853 | --- $TESTTMP/anothertests/cases/test-cases-advanced-cases.t |
|
1853 | --- $TESTTMP/anothertests/cases/test-cases-advanced-cases.t | |
1854 | +++ $TESTTMP/anothertests/cases/test-cases-advanced-cases.t#case-with-dashes.err |
|
1854 | +++ $TESTTMP/anothertests/cases/test-cases-advanced-cases.t#case-with-dashes.err | |
1855 | @@ -1,3 +1,3 @@ |
|
1855 | @@ -1,3 +1,3 @@ | |
1856 | #testcases simple case-with-dashes casewith_-.chars |
|
1856 | #testcases simple case-with-dashes casewith_-.chars | |
1857 | $ echo $TESTCASE |
|
1857 | $ echo $TESTCASE | |
1858 | - simple |
|
1858 | - simple | |
1859 | + case-with-dashes |
|
1859 | + case-with-dashes | |
1860 |
|
1860 | |||
1861 | ERROR: test-cases-advanced-cases.t#case-with-dashes output changed |
|
1861 | ERROR: test-cases-advanced-cases.t#case-with-dashes output changed | |
1862 | ! |
|
1862 | ! | |
1863 | Failed test-cases-advanced-cases.t#case-with-dashes: output changed |
|
1863 | Failed test-cases-advanced-cases.t#case-with-dashes: output changed | |
1864 | # Ran 1 tests, 0 skipped, 1 failed. |
|
1864 | # Ran 1 tests, 0 skipped, 1 failed. | |
1865 | python hash seed: * (glob) |
|
1865 | python hash seed: * (glob) | |
1866 | [1] |
|
1866 | [1] | |
1867 |
|
1867 | |||
1868 | $ rt "test-cases-advanced-cases.t#casewith_-.chars" |
|
1868 | $ rt "test-cases-advanced-cases.t#casewith_-.chars" | |
1869 | running 1 tests using 1 parallel processes |
|
1869 | running 1 tests using 1 parallel processes | |
1870 |
|
1870 | |||
1871 | --- $TESTTMP/anothertests/cases/test-cases-advanced-cases.t |
|
1871 | --- $TESTTMP/anothertests/cases/test-cases-advanced-cases.t | |
1872 | +++ $TESTTMP/anothertests/cases/test-cases-advanced-cases.t#casewith_-.chars.err |
|
1872 | +++ $TESTTMP/anothertests/cases/test-cases-advanced-cases.t#casewith_-.chars.err | |
1873 | @@ -1,3 +1,3 @@ |
|
1873 | @@ -1,3 +1,3 @@ | |
1874 | #testcases simple case-with-dashes casewith_-.chars |
|
1874 | #testcases simple case-with-dashes casewith_-.chars | |
1875 | $ echo $TESTCASE |
|
1875 | $ echo $TESTCASE | |
1876 | - simple |
|
1876 | - simple | |
1877 | + casewith_-.chars |
|
1877 | + casewith_-.chars | |
1878 |
|
1878 | |||
1879 | ERROR: test-cases-advanced-cases.t#casewith_-.chars output changed |
|
1879 | ERROR: test-cases-advanced-cases.t#casewith_-.chars output changed | |
1880 | ! |
|
1880 | ! | |
1881 | Failed test-cases-advanced-cases.t#casewith_-.chars: output changed |
|
1881 | Failed test-cases-advanced-cases.t#casewith_-.chars: output changed | |
1882 | # Ran 1 tests, 0 skipped, 1 failed. |
|
1882 | # Ran 1 tests, 0 skipped, 1 failed. | |
1883 | python hash seed: * (glob) |
|
1883 | python hash seed: * (glob) | |
1884 | [1] |
|
1884 | [1] | |
1885 |
|
1885 | |||
1886 | Test automatic pattern replacement |
|
1886 | Test automatic pattern replacement | |
1887 | ================================== |
|
1887 | ================================== | |
1888 |
|
1888 | |||
1889 | $ cat << EOF >> common-pattern.py |
|
1889 | $ cat << EOF >> common-pattern.py | |
1890 | > substitutions = [ |
|
1890 | > substitutions = [ | |
1891 | > (br'foo-(.*)\\b', |
|
1891 | > (br'foo-(.*)\\b', | |
1892 | > br'\$XXX=\\1\$'), |
|
1892 | > br'\$XXX=\\1\$'), | |
1893 | > (br'bar\\n', |
|
1893 | > (br'bar\\n', | |
1894 | > br'\$YYY$\\n'), |
|
1894 | > br'\$YYY$\\n'), | |
1895 | > ] |
|
1895 | > ] | |
1896 | > EOF |
|
1896 | > EOF | |
1897 |
|
1897 | |||
1898 | $ cat << EOF >> test-substitution.t |
|
1898 | $ cat << EOF >> test-substitution.t | |
1899 | > $ echo foo-12 |
|
1899 | > $ echo foo-12 | |
1900 | > \$XXX=12$ |
|
1900 | > \$XXX=12$ | |
1901 | > $ echo foo-42 |
|
1901 | > $ echo foo-42 | |
1902 | > \$XXX=42$ |
|
1902 | > \$XXX=42$ | |
1903 | > $ echo bar prior |
|
1903 | > $ echo bar prior | |
1904 | > bar prior |
|
1904 | > bar prior | |
1905 | > $ echo lastbar |
|
1905 | > $ echo lastbar | |
1906 | > last\$YYY$ |
|
1906 | > last\$YYY$ | |
1907 | > $ echo foo-bar foo-baz |
|
1907 | > $ echo foo-bar foo-baz | |
1908 | > EOF |
|
1908 | > EOF | |
1909 |
|
1909 | |||
1910 | $ rt test-substitution.t |
|
1910 | $ rt test-substitution.t | |
1911 | running 1 tests using 1 parallel processes |
|
1911 | running 1 tests using 1 parallel processes | |
1912 |
|
1912 | |||
1913 | --- $TESTTMP/anothertests/cases/test-substitution.t |
|
1913 | --- $TESTTMP/anothertests/cases/test-substitution.t | |
1914 | +++ $TESTTMP/anothertests/cases/test-substitution.t.err |
|
1914 | +++ $TESTTMP/anothertests/cases/test-substitution.t.err | |
1915 | @@ -7,3 +7,4 @@ |
|
1915 | @@ -7,3 +7,4 @@ | |
1916 | $ echo lastbar |
|
1916 | $ echo lastbar | |
1917 | last$YYY$ |
|
1917 | last$YYY$ | |
1918 | $ echo foo-bar foo-baz |
|
1918 | $ echo foo-bar foo-baz | |
1919 | + $XXX=bar foo-baz$ |
|
1919 | + $XXX=bar foo-baz$ | |
1920 |
|
1920 | |||
1921 | ERROR: test-substitution.t output changed |
|
1921 | ERROR: test-substitution.t output changed | |
1922 | ! |
|
1922 | ! | |
1923 | Failed test-substitution.t: output changed |
|
1923 | Failed test-substitution.t: output changed | |
1924 | # Ran 1 tests, 0 skipped, 1 failed. |
|
1924 | # Ran 1 tests, 0 skipped, 1 failed. | |
1925 | python hash seed: * (glob) |
|
1925 | python hash seed: * (glob) | |
1926 | [1] |
|
1926 | [1] | |
1927 |
|
1927 | |||
1928 | --extra-config-opt works |
|
1928 | --extra-config-opt works | |
1929 |
|
1929 | |||
1930 | $ cat << EOF >> test-config-opt.t |
|
1930 | $ cat << EOF >> test-config-opt.t | |
1931 | > $ hg init test-config-opt |
|
1931 | > $ hg init test-config-opt | |
1932 | > $ hg -R test-config-opt purge |
|
1932 | > $ hg -R test-config-opt purge | |
1933 | > EOF |
|
1933 | > EOF | |
1934 |
|
1934 | |||
1935 | $ rt --extra-config-opt extensions.purge= test-config-opt.t |
|
1935 | $ rt --extra-config-opt extensions.purge= test-config-opt.t | |
1936 | running 1 tests using 1 parallel processes |
|
1936 | running 1 tests using 1 parallel processes | |
1937 | . |
|
1937 | . | |
1938 | # Ran 1 tests, 0 skipped, 0 failed. |
|
1938 | # Ran 1 tests, 0 skipped, 0 failed. | |
1939 |
|
1939 | |||
1940 | Test conditional output matching |
|
1940 | Test conditional output matching | |
1941 | ================================ |
|
1941 | ================================ | |
1942 |
|
1942 | |||
1943 | $ cat << EOF >> test-conditional-matching.t |
|
1943 | $ cat << EOF >> test-conditional-matching.t | |
1944 | > #testcases foo bar |
|
1944 | > #testcases foo bar | |
1945 | > $ echo richtig |
|
1945 | > $ echo richtig | |
1946 | > richtig (true !) |
|
1946 | > richtig (true !) | |
1947 | > $ echo falsch |
|
1947 | > $ echo falsch | |
1948 | > falsch (false !) |
|
1948 | > falsch (false !) | |
1949 | > #if foo |
|
1949 | > #if foo | |
1950 | > $ echo arthur |
|
1950 | > $ echo arthur | |
1951 | > arthur (bar !) |
|
1951 | > arthur (bar !) | |
1952 | > #endif |
|
1952 | > #endif | |
1953 | > $ echo celeste |
|
1953 | > $ echo celeste | |
1954 | > celeste (foo !) |
|
1954 | > celeste (foo !) | |
1955 | > $ echo zephir |
|
1955 | > $ echo zephir | |
1956 | > zephir (bar !) |
|
1956 | > zephir (bar !) | |
1957 | > EOF |
|
1957 | > EOF | |
1958 |
|
1958 | |||
1959 | $ rt test-conditional-matching.t |
|
1959 | $ rt test-conditional-matching.t | |
1960 | running 2 tests using 1 parallel processes |
|
1960 | running 2 tests using 1 parallel processes | |
1961 |
|
1961 | |||
1962 | --- $TESTTMP/anothertests/cases/test-conditional-matching.t |
|
1962 | --- $TESTTMP/anothertests/cases/test-conditional-matching.t | |
1963 | +++ $TESTTMP/anothertests/cases/test-conditional-matching.t#bar.err |
|
1963 | +++ $TESTTMP/anothertests/cases/test-conditional-matching.t#bar.err | |
1964 | @@ -3,11 +3,13 @@ |
|
1964 | @@ -3,11 +3,13 @@ | |
1965 | richtig (true !) |
|
1965 | richtig (true !) | |
1966 | $ echo falsch |
|
1966 | $ echo falsch | |
1967 | falsch (false !) |
|
1967 | falsch (false !) | |
1968 | + falsch |
|
1968 | + falsch | |
1969 | #if foo |
|
1969 | #if foo | |
1970 | $ echo arthur |
|
1970 | $ echo arthur | |
1971 | arthur \(bar !\) (re) |
|
1971 | arthur \(bar !\) (re) | |
1972 | #endif |
|
1972 | #endif | |
1973 | $ echo celeste |
|
1973 | $ echo celeste | |
1974 | celeste \(foo !\) (re) |
|
1974 | celeste \(foo !\) (re) | |
1975 | + celeste |
|
1975 | + celeste | |
1976 | $ echo zephir |
|
1976 | $ echo zephir | |
1977 | zephir \(bar !\) (re) |
|
1977 | zephir \(bar !\) (re) | |
1978 |
|
1978 | |||
1979 | ERROR: test-conditional-matching.t#bar output changed |
|
1979 | ERROR: test-conditional-matching.t#bar output changed | |
1980 | ! |
|
1980 | ! | |
1981 | --- $TESTTMP/anothertests/cases/test-conditional-matching.t |
|
1981 | --- $TESTTMP/anothertests/cases/test-conditional-matching.t | |
1982 | +++ $TESTTMP/anothertests/cases/test-conditional-matching.t#foo.err |
|
1982 | +++ $TESTTMP/anothertests/cases/test-conditional-matching.t#foo.err | |
1983 | @@ -3,11 +3,14 @@ |
|
1983 | @@ -3,11 +3,14 @@ | |
1984 | richtig (true !) |
|
1984 | richtig (true !) | |
1985 | $ echo falsch |
|
1985 | $ echo falsch | |
1986 | falsch (false !) |
|
1986 | falsch (false !) | |
1987 | + falsch |
|
1987 | + falsch | |
1988 | #if foo |
|
1988 | #if foo | |
1989 | $ echo arthur |
|
1989 | $ echo arthur | |
1990 | arthur \(bar !\) (re) |
|
1990 | arthur \(bar !\) (re) | |
1991 | + arthur |
|
1991 | + arthur | |
1992 | #endif |
|
1992 | #endif | |
1993 | $ echo celeste |
|
1993 | $ echo celeste | |
1994 | celeste \(foo !\) (re) |
|
1994 | celeste \(foo !\) (re) | |
1995 | $ echo zephir |
|
1995 | $ echo zephir | |
1996 | zephir \(bar !\) (re) |
|
1996 | zephir \(bar !\) (re) | |
1997 | + zephir |
|
1997 | + zephir | |
1998 |
|
1998 | |||
1999 | ERROR: test-conditional-matching.t#foo output changed |
|
1999 | ERROR: test-conditional-matching.t#foo output changed | |
2000 | ! |
|
2000 | ! | |
2001 | Failed test-conditional-matching.t#bar: output changed |
|
2001 | Failed test-conditional-matching.t#bar: output changed | |
2002 | Failed test-conditional-matching.t#foo: output changed |
|
2002 | Failed test-conditional-matching.t#foo: output changed | |
2003 | # Ran 2 tests, 0 skipped, 2 failed. |
|
2003 | # Ran 2 tests, 0 skipped, 2 failed. | |
2004 | python hash seed: * (glob) |
|
2004 | python hash seed: * (glob) | |
2005 | [1] |
|
2005 | [1] |
General Comments 0
You need to be logged in to leave comments.
Login now