##// END OF EJS Templates
fncache: avoid loading the filename cache when not actually modifying it...
Martijn Pieters -
r38683:8ac0c9cd default
parent child Browse files
Show More
@@ -1,582 +1,592 b''
1 # store.py - repository store handling for Mercurial
1 # store.py - repository store handling for Mercurial
2 #
2 #
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
11 import hashlib
12 import os
12 import os
13 import stat
13 import stat
14
14
15 from .i18n import _
15 from .i18n import _
16 from . import (
16 from . import (
17 error,
17 error,
18 node,
18 node,
19 policy,
19 policy,
20 pycompat,
20 pycompat,
21 util,
21 util,
22 vfs as vfsmod,
22 vfs as vfsmod,
23 )
23 )
24
24
25 parsers = policy.importmod(r'parsers')
25 parsers = policy.importmod(r'parsers')
26
26
27 # This avoids a collision between a file named foo and a dir named
27 # This avoids a collision between a file named foo and a dir named
28 # foo.i or foo.d
28 # foo.i or foo.d
29 def _encodedir(path):
29 def _encodedir(path):
30 '''
30 '''
31 >>> _encodedir(b'data/foo.i')
31 >>> _encodedir(b'data/foo.i')
32 'data/foo.i'
32 'data/foo.i'
33 >>> _encodedir(b'data/foo.i/bla.i')
33 >>> _encodedir(b'data/foo.i/bla.i')
34 'data/foo.i.hg/bla.i'
34 'data/foo.i.hg/bla.i'
35 >>> _encodedir(b'data/foo.i.hg/bla.i')
35 >>> _encodedir(b'data/foo.i.hg/bla.i')
36 'data/foo.i.hg.hg/bla.i'
36 'data/foo.i.hg.hg/bla.i'
37 >>> _encodedir(b'data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
37 >>> _encodedir(b'data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
38 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
38 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
39 '''
39 '''
40 return (path
40 return (path
41 .replace(".hg/", ".hg.hg/")
41 .replace(".hg/", ".hg.hg/")
42 .replace(".i/", ".i.hg/")
42 .replace(".i/", ".i.hg/")
43 .replace(".d/", ".d.hg/"))
43 .replace(".d/", ".d.hg/"))
44
44
45 encodedir = getattr(parsers, 'encodedir', _encodedir)
45 encodedir = getattr(parsers, 'encodedir', _encodedir)
46
46
47 def decodedir(path):
47 def decodedir(path):
48 '''
48 '''
49 >>> decodedir(b'data/foo.i')
49 >>> decodedir(b'data/foo.i')
50 'data/foo.i'
50 'data/foo.i'
51 >>> decodedir(b'data/foo.i.hg/bla.i')
51 >>> decodedir(b'data/foo.i.hg/bla.i')
52 'data/foo.i/bla.i'
52 'data/foo.i/bla.i'
53 >>> decodedir(b'data/foo.i.hg.hg/bla.i')
53 >>> decodedir(b'data/foo.i.hg.hg/bla.i')
54 'data/foo.i.hg/bla.i'
54 'data/foo.i.hg/bla.i'
55 '''
55 '''
56 if ".hg/" not in path:
56 if ".hg/" not in path:
57 return path
57 return path
58 return (path
58 return (path
59 .replace(".d.hg/", ".d/")
59 .replace(".d.hg/", ".d/")
60 .replace(".i.hg/", ".i/")
60 .replace(".i.hg/", ".i/")
61 .replace(".hg.hg/", ".hg/"))
61 .replace(".hg.hg/", ".hg/"))
62
62
63 def _reserved():
63 def _reserved():
64 ''' characters that are problematic for filesystems
64 ''' characters that are problematic for filesystems
65
65
66 * ascii escapes (0..31)
66 * ascii escapes (0..31)
67 * ascii hi (126..255)
67 * ascii hi (126..255)
68 * windows specials
68 * windows specials
69
69
70 these characters will be escaped by encodefunctions
70 these characters will be escaped by encodefunctions
71 '''
71 '''
72 winreserved = [ord(x) for x in u'\\:*?"<>|']
72 winreserved = [ord(x) for x in u'\\:*?"<>|']
73 for x in range(32):
73 for x in range(32):
74 yield x
74 yield x
75 for x in range(126, 256):
75 for x in range(126, 256):
76 yield x
76 yield x
77 for x in winreserved:
77 for x in winreserved:
78 yield x
78 yield x
79
79
80 def _buildencodefun():
80 def _buildencodefun():
81 '''
81 '''
82 >>> enc, dec = _buildencodefun()
82 >>> enc, dec = _buildencodefun()
83
83
84 >>> enc(b'nothing/special.txt')
84 >>> enc(b'nothing/special.txt')
85 'nothing/special.txt'
85 'nothing/special.txt'
86 >>> dec(b'nothing/special.txt')
86 >>> dec(b'nothing/special.txt')
87 'nothing/special.txt'
87 'nothing/special.txt'
88
88
89 >>> enc(b'HELLO')
89 >>> enc(b'HELLO')
90 '_h_e_l_l_o'
90 '_h_e_l_l_o'
91 >>> dec(b'_h_e_l_l_o')
91 >>> dec(b'_h_e_l_l_o')
92 'HELLO'
92 'HELLO'
93
93
94 >>> enc(b'hello:world?')
94 >>> enc(b'hello:world?')
95 'hello~3aworld~3f'
95 'hello~3aworld~3f'
96 >>> dec(b'hello~3aworld~3f')
96 >>> dec(b'hello~3aworld~3f')
97 'hello:world?'
97 'hello:world?'
98
98
99 >>> enc(b'the\\x07quick\\xADshot')
99 >>> enc(b'the\\x07quick\\xADshot')
100 'the~07quick~adshot'
100 'the~07quick~adshot'
101 >>> dec(b'the~07quick~adshot')
101 >>> dec(b'the~07quick~adshot')
102 'the\\x07quick\\xadshot'
102 'the\\x07quick\\xadshot'
103 '''
103 '''
104 e = '_'
104 e = '_'
105 xchr = pycompat.bytechr
105 xchr = pycompat.bytechr
106 asciistr = list(map(xchr, range(127)))
106 asciistr = list(map(xchr, range(127)))
107 capitals = list(range(ord("A"), ord("Z") + 1))
107 capitals = list(range(ord("A"), ord("Z") + 1))
108
108
109 cmap = dict((x, x) for x in asciistr)
109 cmap = dict((x, x) for x in asciistr)
110 for x in _reserved():
110 for x in _reserved():
111 cmap[xchr(x)] = "~%02x" % x
111 cmap[xchr(x)] = "~%02x" % x
112 for x in capitals + [ord(e)]:
112 for x in capitals + [ord(e)]:
113 cmap[xchr(x)] = e + xchr(x).lower()
113 cmap[xchr(x)] = e + xchr(x).lower()
114
114
115 dmap = {}
115 dmap = {}
116 for k, v in cmap.iteritems():
116 for k, v in cmap.iteritems():
117 dmap[v] = k
117 dmap[v] = k
118 def decode(s):
118 def decode(s):
119 i = 0
119 i = 0
120 while i < len(s):
120 while i < len(s):
121 for l in xrange(1, 4):
121 for l in xrange(1, 4):
122 try:
122 try:
123 yield dmap[s[i:i + l]]
123 yield dmap[s[i:i + l]]
124 i += l
124 i += l
125 break
125 break
126 except KeyError:
126 except KeyError:
127 pass
127 pass
128 else:
128 else:
129 raise KeyError
129 raise KeyError
130 return (lambda s: ''.join([cmap[s[c:c + 1]] for c in xrange(len(s))]),
130 return (lambda s: ''.join([cmap[s[c:c + 1]] for c in xrange(len(s))]),
131 lambda s: ''.join(list(decode(s))))
131 lambda s: ''.join(list(decode(s))))
132
132
133 _encodefname, _decodefname = _buildencodefun()
133 _encodefname, _decodefname = _buildencodefun()
134
134
135 def encodefilename(s):
135 def encodefilename(s):
136 '''
136 '''
137 >>> encodefilename(b'foo.i/bar.d/bla.hg/hi:world?/HELLO')
137 >>> encodefilename(b'foo.i/bar.d/bla.hg/hi:world?/HELLO')
138 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
138 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
139 '''
139 '''
140 return _encodefname(encodedir(s))
140 return _encodefname(encodedir(s))
141
141
142 def decodefilename(s):
142 def decodefilename(s):
143 '''
143 '''
144 >>> decodefilename(b'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
144 >>> decodefilename(b'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
145 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
145 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
146 '''
146 '''
147 return decodedir(_decodefname(s))
147 return decodedir(_decodefname(s))
148
148
149 def _buildlowerencodefun():
149 def _buildlowerencodefun():
150 '''
150 '''
151 >>> f = _buildlowerencodefun()
151 >>> f = _buildlowerencodefun()
152 >>> f(b'nothing/special.txt')
152 >>> f(b'nothing/special.txt')
153 'nothing/special.txt'
153 'nothing/special.txt'
154 >>> f(b'HELLO')
154 >>> f(b'HELLO')
155 'hello'
155 'hello'
156 >>> f(b'hello:world?')
156 >>> f(b'hello:world?')
157 'hello~3aworld~3f'
157 'hello~3aworld~3f'
158 >>> f(b'the\\x07quick\\xADshot')
158 >>> f(b'the\\x07quick\\xADshot')
159 'the~07quick~adshot'
159 'the~07quick~adshot'
160 '''
160 '''
161 xchr = pycompat.bytechr
161 xchr = pycompat.bytechr
162 cmap = dict([(xchr(x), xchr(x)) for x in xrange(127)])
162 cmap = dict([(xchr(x), xchr(x)) for x in xrange(127)])
163 for x in _reserved():
163 for x in _reserved():
164 cmap[xchr(x)] = "~%02x" % x
164 cmap[xchr(x)] = "~%02x" % x
165 for x in range(ord("A"), ord("Z") + 1):
165 for x in range(ord("A"), ord("Z") + 1):
166 cmap[xchr(x)] = xchr(x).lower()
166 cmap[xchr(x)] = xchr(x).lower()
167 def lowerencode(s):
167 def lowerencode(s):
168 return "".join([cmap[c] for c in pycompat.iterbytestr(s)])
168 return "".join([cmap[c] for c in pycompat.iterbytestr(s)])
169 return lowerencode
169 return lowerencode
170
170
171 lowerencode = getattr(parsers, 'lowerencode', None) or _buildlowerencodefun()
171 lowerencode = getattr(parsers, 'lowerencode', None) or _buildlowerencodefun()
172
172
173 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
173 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
174 _winres3 = ('aux', 'con', 'prn', 'nul') # length 3
174 _winres3 = ('aux', 'con', 'prn', 'nul') # length 3
175 _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9)
175 _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9)
176 def _auxencode(path, dotencode):
176 def _auxencode(path, dotencode):
177 '''
177 '''
178 Encodes filenames containing names reserved by Windows or which end in
178 Encodes filenames containing names reserved by Windows or which end in
179 period or space. Does not touch other single reserved characters c.
179 period or space. Does not touch other single reserved characters c.
180 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
180 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
181 Additionally encodes space or period at the beginning, if dotencode is
181 Additionally encodes space or period at the beginning, if dotencode is
182 True. Parameter path is assumed to be all lowercase.
182 True. Parameter path is assumed to be all lowercase.
183 A segment only needs encoding if a reserved name appears as a
183 A segment only needs encoding if a reserved name appears as a
184 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
184 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
185 doesn't need encoding.
185 doesn't need encoding.
186
186
187 >>> s = b'.foo/aux.txt/txt.aux/con/prn/nul/foo.'
187 >>> s = b'.foo/aux.txt/txt.aux/con/prn/nul/foo.'
188 >>> _auxencode(s.split(b'/'), True)
188 >>> _auxencode(s.split(b'/'), True)
189 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
189 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
190 >>> s = b'.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
190 >>> s = b'.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
191 >>> _auxencode(s.split(b'/'), False)
191 >>> _auxencode(s.split(b'/'), False)
192 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
192 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
193 >>> _auxencode([b'foo. '], True)
193 >>> _auxencode([b'foo. '], True)
194 ['foo.~20']
194 ['foo.~20']
195 >>> _auxencode([b' .foo'], True)
195 >>> _auxencode([b' .foo'], True)
196 ['~20.foo']
196 ['~20.foo']
197 '''
197 '''
198 for i, n in enumerate(path):
198 for i, n in enumerate(path):
199 if not n:
199 if not n:
200 continue
200 continue
201 if dotencode and n[0] in '. ':
201 if dotencode and n[0] in '. ':
202 n = "~%02x" % ord(n[0:1]) + n[1:]
202 n = "~%02x" % ord(n[0:1]) + n[1:]
203 path[i] = n
203 path[i] = n
204 else:
204 else:
205 l = n.find('.')
205 l = n.find('.')
206 if l == -1:
206 if l == -1:
207 l = len(n)
207 l = len(n)
208 if ((l == 3 and n[:3] in _winres3) or
208 if ((l == 3 and n[:3] in _winres3) or
209 (l == 4 and n[3:4] <= '9' and n[3:4] >= '1'
209 (l == 4 and n[3:4] <= '9' and n[3:4] >= '1'
210 and n[:3] in _winres4)):
210 and n[:3] in _winres4)):
211 # encode third letter ('aux' -> 'au~78')
211 # encode third letter ('aux' -> 'au~78')
212 ec = "~%02x" % ord(n[2:3])
212 ec = "~%02x" % ord(n[2:3])
213 n = n[0:2] + ec + n[3:]
213 n = n[0:2] + ec + n[3:]
214 path[i] = n
214 path[i] = n
215 if n[-1] in '. ':
215 if n[-1] in '. ':
216 # encode last period or space ('foo...' -> 'foo..~2e')
216 # encode last period or space ('foo...' -> 'foo..~2e')
217 path[i] = n[:-1] + "~%02x" % ord(n[-1:])
217 path[i] = n[:-1] + "~%02x" % ord(n[-1:])
218 return path
218 return path
219
219
220 _maxstorepathlen = 120
220 _maxstorepathlen = 120
221 _dirprefixlen = 8
221 _dirprefixlen = 8
222 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
222 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
223
223
224 def _hashencode(path, dotencode):
224 def _hashencode(path, dotencode):
225 digest = node.hex(hashlib.sha1(path).digest())
225 digest = node.hex(hashlib.sha1(path).digest())
226 le = lowerencode(path[5:]).split('/') # skips prefix 'data/' or 'meta/'
226 le = lowerencode(path[5:]).split('/') # skips prefix 'data/' or 'meta/'
227 parts = _auxencode(le, dotencode)
227 parts = _auxencode(le, dotencode)
228 basename = parts[-1]
228 basename = parts[-1]
229 _root, ext = os.path.splitext(basename)
229 _root, ext = os.path.splitext(basename)
230 sdirs = []
230 sdirs = []
231 sdirslen = 0
231 sdirslen = 0
232 for p in parts[:-1]:
232 for p in parts[:-1]:
233 d = p[:_dirprefixlen]
233 d = p[:_dirprefixlen]
234 if d[-1] in '. ':
234 if d[-1] in '. ':
235 # Windows can't access dirs ending in period or space
235 # Windows can't access dirs ending in period or space
236 d = d[:-1] + '_'
236 d = d[:-1] + '_'
237 if sdirslen == 0:
237 if sdirslen == 0:
238 t = len(d)
238 t = len(d)
239 else:
239 else:
240 t = sdirslen + 1 + len(d)
240 t = sdirslen + 1 + len(d)
241 if t > _maxshortdirslen:
241 if t > _maxshortdirslen:
242 break
242 break
243 sdirs.append(d)
243 sdirs.append(d)
244 sdirslen = t
244 sdirslen = t
245 dirs = '/'.join(sdirs)
245 dirs = '/'.join(sdirs)
246 if len(dirs) > 0:
246 if len(dirs) > 0:
247 dirs += '/'
247 dirs += '/'
248 res = 'dh/' + dirs + digest + ext
248 res = 'dh/' + dirs + digest + ext
249 spaceleft = _maxstorepathlen - len(res)
249 spaceleft = _maxstorepathlen - len(res)
250 if spaceleft > 0:
250 if spaceleft > 0:
251 filler = basename[:spaceleft]
251 filler = basename[:spaceleft]
252 res = 'dh/' + dirs + filler + digest + ext
252 res = 'dh/' + dirs + filler + digest + ext
253 return res
253 return res
254
254
255 def _hybridencode(path, dotencode):
255 def _hybridencode(path, dotencode):
256 '''encodes path with a length limit
256 '''encodes path with a length limit
257
257
258 Encodes all paths that begin with 'data/', according to the following.
258 Encodes all paths that begin with 'data/', according to the following.
259
259
260 Default encoding (reversible):
260 Default encoding (reversible):
261
261
262 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
262 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
263 characters are encoded as '~xx', where xx is the two digit hex code
263 characters are encoded as '~xx', where xx is the two digit hex code
264 of the character (see encodefilename).
264 of the character (see encodefilename).
265 Relevant path components consisting of Windows reserved filenames are
265 Relevant path components consisting of Windows reserved filenames are
266 masked by encoding the third character ('aux' -> 'au~78', see _auxencode).
266 masked by encoding the third character ('aux' -> 'au~78', see _auxencode).
267
267
268 Hashed encoding (not reversible):
268 Hashed encoding (not reversible):
269
269
270 If the default-encoded path is longer than _maxstorepathlen, a
270 If the default-encoded path is longer than _maxstorepathlen, a
271 non-reversible hybrid hashing of the path is done instead.
271 non-reversible hybrid hashing of the path is done instead.
272 This encoding uses up to _dirprefixlen characters of all directory
272 This encoding uses up to _dirprefixlen characters of all directory
273 levels of the lowerencoded path, but not more levels than can fit into
273 levels of the lowerencoded path, but not more levels than can fit into
274 _maxshortdirslen.
274 _maxshortdirslen.
275 Then follows the filler followed by the sha digest of the full path.
275 Then follows the filler followed by the sha digest of the full path.
276 The filler is the beginning of the basename of the lowerencoded path
276 The filler is the beginning of the basename of the lowerencoded path
277 (the basename is everything after the last path separator). The filler
277 (the basename is everything after the last path separator). The filler
278 is as long as possible, filling in characters from the basename until
278 is as long as possible, filling in characters from the basename until
279 the encoded path has _maxstorepathlen characters (or all chars of the
279 the encoded path has _maxstorepathlen characters (or all chars of the
280 basename have been taken).
280 basename have been taken).
281 The extension (e.g. '.i' or '.d') is preserved.
281 The extension (e.g. '.i' or '.d') is preserved.
282
282
283 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
283 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
284 encoding was used.
284 encoding was used.
285 '''
285 '''
286 path = encodedir(path)
286 path = encodedir(path)
287 ef = _encodefname(path).split('/')
287 ef = _encodefname(path).split('/')
288 res = '/'.join(_auxencode(ef, dotencode))
288 res = '/'.join(_auxencode(ef, dotencode))
289 if len(res) > _maxstorepathlen:
289 if len(res) > _maxstorepathlen:
290 res = _hashencode(path, dotencode)
290 res = _hashencode(path, dotencode)
291 return res
291 return res
292
292
293 def _pathencode(path):
293 def _pathencode(path):
294 de = encodedir(path)
294 de = encodedir(path)
295 if len(path) > _maxstorepathlen:
295 if len(path) > _maxstorepathlen:
296 return _hashencode(de, True)
296 return _hashencode(de, True)
297 ef = _encodefname(de).split('/')
297 ef = _encodefname(de).split('/')
298 res = '/'.join(_auxencode(ef, True))
298 res = '/'.join(_auxencode(ef, True))
299 if len(res) > _maxstorepathlen:
299 if len(res) > _maxstorepathlen:
300 return _hashencode(de, True)
300 return _hashencode(de, True)
301 return res
301 return res
302
302
303 _pathencode = getattr(parsers, 'pathencode', _pathencode)
303 _pathencode = getattr(parsers, 'pathencode', _pathencode)
304
304
305 def _plainhybridencode(f):
305 def _plainhybridencode(f):
306 return _hybridencode(f, False)
306 return _hybridencode(f, False)
307
307
308 def _calcmode(vfs):
308 def _calcmode(vfs):
309 try:
309 try:
310 # files in .hg/ will be created using this mode
310 # files in .hg/ will be created using this mode
311 mode = vfs.stat().st_mode
311 mode = vfs.stat().st_mode
312 # avoid some useless chmods
312 # avoid some useless chmods
313 if (0o777 & ~util.umask) == (0o777 & mode):
313 if (0o777 & ~util.umask) == (0o777 & mode):
314 mode = None
314 mode = None
315 except OSError:
315 except OSError:
316 mode = None
316 mode = None
317 return mode
317 return mode
318
318
319 _data = ('data meta 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
319 _data = ('data meta 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
320 ' phaseroots obsstore')
320 ' phaseroots obsstore')
321
321
322 def isrevlog(f, kind, st):
322 def isrevlog(f, kind, st):
323 return kind == stat.S_IFREG and f[-2:] in ('.i', '.d')
323 return kind == stat.S_IFREG and f[-2:] in ('.i', '.d')
324
324
325 class basicstore(object):
325 class basicstore(object):
326 '''base class for local repository stores'''
326 '''base class for local repository stores'''
327 def __init__(self, path, vfstype):
327 def __init__(self, path, vfstype):
328 vfs = vfstype(path)
328 vfs = vfstype(path)
329 self.path = vfs.base
329 self.path = vfs.base
330 self.createmode = _calcmode(vfs)
330 self.createmode = _calcmode(vfs)
331 vfs.createmode = self.createmode
331 vfs.createmode = self.createmode
332 self.rawvfs = vfs
332 self.rawvfs = vfs
333 self.vfs = vfsmod.filtervfs(vfs, encodedir)
333 self.vfs = vfsmod.filtervfs(vfs, encodedir)
334 self.opener = self.vfs
334 self.opener = self.vfs
335
335
336 def join(self, f):
336 def join(self, f):
337 return self.path + '/' + encodedir(f)
337 return self.path + '/' + encodedir(f)
338
338
339 def _walk(self, relpath, recurse, filefilter=isrevlog):
339 def _walk(self, relpath, recurse, filefilter=isrevlog):
340 '''yields (unencoded, encoded, size)'''
340 '''yields (unencoded, encoded, size)'''
341 path = self.path
341 path = self.path
342 if relpath:
342 if relpath:
343 path += '/' + relpath
343 path += '/' + relpath
344 striplen = len(self.path) + 1
344 striplen = len(self.path) + 1
345 l = []
345 l = []
346 if self.rawvfs.isdir(path):
346 if self.rawvfs.isdir(path):
347 visit = [path]
347 visit = [path]
348 readdir = self.rawvfs.readdir
348 readdir = self.rawvfs.readdir
349 while visit:
349 while visit:
350 p = visit.pop()
350 p = visit.pop()
351 for f, kind, st in readdir(p, stat=True):
351 for f, kind, st in readdir(p, stat=True):
352 fp = p + '/' + f
352 fp = p + '/' + f
353 if filefilter(f, kind, st):
353 if filefilter(f, kind, st):
354 n = util.pconvert(fp[striplen:])
354 n = util.pconvert(fp[striplen:])
355 l.append((decodedir(n), n, st.st_size))
355 l.append((decodedir(n), n, st.st_size))
356 elif kind == stat.S_IFDIR and recurse:
356 elif kind == stat.S_IFDIR and recurse:
357 visit.append(fp)
357 visit.append(fp)
358 l.sort()
358 l.sort()
359 return l
359 return l
360
360
361 def datafiles(self):
361 def datafiles(self):
362 return self._walk('data', True) + self._walk('meta', True)
362 return self._walk('data', True) + self._walk('meta', True)
363
363
364 def topfiles(self):
364 def topfiles(self):
365 # yield manifest before changelog
365 # yield manifest before changelog
366 return reversed(self._walk('', False))
366 return reversed(self._walk('', False))
367
367
368 def walk(self):
368 def walk(self):
369 '''yields (unencoded, encoded, size)'''
369 '''yields (unencoded, encoded, size)'''
370 # yield data files first
370 # yield data files first
371 for x in self.datafiles():
371 for x in self.datafiles():
372 yield x
372 yield x
373 for x in self.topfiles():
373 for x in self.topfiles():
374 yield x
374 yield x
375
375
376 def copylist(self):
376 def copylist(self):
377 return ['requires'] + _data.split()
377 return ['requires'] + _data.split()
378
378
379 def write(self, tr):
379 def write(self, tr):
380 pass
380 pass
381
381
382 def invalidatecaches(self):
382 def invalidatecaches(self):
383 pass
383 pass
384
384
385 def markremoved(self, fn):
385 def markremoved(self, fn):
386 pass
386 pass
387
387
388 def __contains__(self, path):
388 def __contains__(self, path):
389 '''Checks if the store contains path'''
389 '''Checks if the store contains path'''
390 path = "/".join(("data", path))
390 path = "/".join(("data", path))
391 # file?
391 # file?
392 if self.vfs.exists(path + ".i"):
392 if self.vfs.exists(path + ".i"):
393 return True
393 return True
394 # dir?
394 # dir?
395 if not path.endswith("/"):
395 if not path.endswith("/"):
396 path = path + "/"
396 path = path + "/"
397 return self.vfs.exists(path)
397 return self.vfs.exists(path)
398
398
399 class encodedstore(basicstore):
399 class encodedstore(basicstore):
400 def __init__(self, path, vfstype):
400 def __init__(self, path, vfstype):
401 vfs = vfstype(path + '/store')
401 vfs = vfstype(path + '/store')
402 self.path = vfs.base
402 self.path = vfs.base
403 self.createmode = _calcmode(vfs)
403 self.createmode = _calcmode(vfs)
404 vfs.createmode = self.createmode
404 vfs.createmode = self.createmode
405 self.rawvfs = vfs
405 self.rawvfs = vfs
406 self.vfs = vfsmod.filtervfs(vfs, encodefilename)
406 self.vfs = vfsmod.filtervfs(vfs, encodefilename)
407 self.opener = self.vfs
407 self.opener = self.vfs
408
408
409 def datafiles(self):
409 def datafiles(self):
410 for a, b, size in super(encodedstore, self).datafiles():
410 for a, b, size in super(encodedstore, self).datafiles():
411 try:
411 try:
412 a = decodefilename(a)
412 a = decodefilename(a)
413 except KeyError:
413 except KeyError:
414 a = None
414 a = None
415 yield a, b, size
415 yield a, b, size
416
416
417 def join(self, f):
417 def join(self, f):
418 return self.path + '/' + encodefilename(f)
418 return self.path + '/' + encodefilename(f)
419
419
420 def copylist(self):
420 def copylist(self):
421 return (['requires', '00changelog.i'] +
421 return (['requires', '00changelog.i'] +
422 ['store/' + f for f in _data.split()])
422 ['store/' + f for f in _data.split()])
423
423
424 class fncache(object):
424 class fncache(object):
425 # the filename used to be partially encoded
425 # the filename used to be partially encoded
426 # hence the encodedir/decodedir dance
426 # hence the encodedir/decodedir dance
427 def __init__(self, vfs):
427 def __init__(self, vfs):
428 self.vfs = vfs
428 self.vfs = vfs
429 self.entries = None
429 self.entries = None
430 self._dirty = False
430 self._dirty = False
431
431
432 def _load(self):
432 def _load(self):
433 '''fill the entries from the fncache file'''
433 '''fill the entries from the fncache file'''
434 self._dirty = False
434 self._dirty = False
435 try:
435 try:
436 fp = self.vfs('fncache', mode='rb')
436 fp = self.vfs('fncache', mode='rb')
437 except IOError:
437 except IOError:
438 # skip nonexistent file
438 # skip nonexistent file
439 self.entries = set()
439 self.entries = set()
440 return
440 return
441 self.entries = set(decodedir(fp.read()).splitlines())
441 self.entries = set(decodedir(fp.read()).splitlines())
442 if '' in self.entries:
442 if '' in self.entries:
443 fp.seek(0)
443 fp.seek(0)
444 for n, line in enumerate(util.iterfile(fp)):
444 for n, line in enumerate(util.iterfile(fp)):
445 if not line.rstrip('\n'):
445 if not line.rstrip('\n'):
446 t = _('invalid entry in fncache, line %d') % (n + 1)
446 t = _('invalid entry in fncache, line %d') % (n + 1)
447 raise error.Abort(t)
447 raise error.Abort(t)
448 fp.close()
448 fp.close()
449
449
450 def write(self, tr):
450 def write(self, tr):
451 if self._dirty:
451 if self._dirty:
452 tr.addbackup('fncache')
452 tr.addbackup('fncache')
453 fp = self.vfs('fncache', mode='wb', atomictemp=True)
453 fp = self.vfs('fncache', mode='wb', atomictemp=True)
454 if self.entries:
454 if self.entries:
455 fp.write(encodedir('\n'.join(self.entries) + '\n'))
455 fp.write(encodedir('\n'.join(self.entries) + '\n'))
456 fp.close()
456 fp.close()
457 self._dirty = False
457 self._dirty = False
458
458
459 def add(self, fn):
459 def add(self, fn):
460 if self.entries is None:
460 if self.entries is None:
461 self._load()
461 self._load()
462 if fn not in self.entries:
462 if fn not in self.entries:
463 self._dirty = True
463 self._dirty = True
464 self.entries.add(fn)
464 self.entries.add(fn)
465
465
466 def remove(self, fn):
466 def remove(self, fn):
467 if self.entries is None:
467 if self.entries is None:
468 self._load()
468 self._load()
469 try:
469 try:
470 self.entries.remove(fn)
470 self.entries.remove(fn)
471 self._dirty = True
471 self._dirty = True
472 except KeyError:
472 except KeyError:
473 pass
473 pass
474
474
475 def __contains__(self, fn):
475 def __contains__(self, fn):
476 if self.entries is None:
476 if self.entries is None:
477 self._load()
477 self._load()
478 return fn in self.entries
478 return fn in self.entries
479
479
480 def __iter__(self):
480 def __iter__(self):
481 if self.entries is None:
481 if self.entries is None:
482 self._load()
482 self._load()
483 return iter(self.entries)
483 return iter(self.entries)
484
484
485 class _fncachevfs(vfsmod.abstractvfs, vfsmod.proxyvfs):
485 class _fncachevfs(vfsmod.abstractvfs, vfsmod.proxyvfs):
486 def __init__(self, vfs, fnc, encode):
486 def __init__(self, vfs, fnc, encode):
487 vfsmod.proxyvfs.__init__(self, vfs)
487 vfsmod.proxyvfs.__init__(self, vfs)
488 self.fncache = fnc
488 self.fncache = fnc
489 self.encode = encode
489 self.encode = encode
490
490
491 def __call__(self, path, mode='r', *args, **kw):
491 def __call__(self, path, mode='r', *args, **kw):
492 encoded = self.encode(path)
492 if mode not in ('r', 'rb') and (path.startswith('data/') or
493 if mode not in ('r', 'rb') and (path.startswith('data/') or
493 path.startswith('meta/')):
494 path.startswith('meta/')):
494 self.fncache.add(path)
495 # do not trigger a fncache load when adding a file that already is
495 return self.vfs(self.encode(path), mode, *args, **kw)
496 # known to exist.
497 notload = self.fncache.entries is None and self.vfs.exists(encoded)
498 if notload and 'a' in mode and not self.vfs.stat(encoded).st_size:
499 # when appending to an existing file, if the file has size zero,
500 # it should be considered as missing. Such zero-size files are
501 # the result of truncation when a transaction is aborted.
502 notload = False
503 if not notload:
504 self.fncache.add(path)
505 return self.vfs(encoded, mode, *args, **kw)
496
506
497 def join(self, path):
507 def join(self, path):
498 if path:
508 if path:
499 return self.vfs.join(self.encode(path))
509 return self.vfs.join(self.encode(path))
500 else:
510 else:
501 return self.vfs.join(path)
511 return self.vfs.join(path)
502
512
503 class fncachestore(basicstore):
513 class fncachestore(basicstore):
504 def __init__(self, path, vfstype, dotencode):
514 def __init__(self, path, vfstype, dotencode):
505 if dotencode:
515 if dotencode:
506 encode = _pathencode
516 encode = _pathencode
507 else:
517 else:
508 encode = _plainhybridencode
518 encode = _plainhybridencode
509 self.encode = encode
519 self.encode = encode
510 vfs = vfstype(path + '/store')
520 vfs = vfstype(path + '/store')
511 self.path = vfs.base
521 self.path = vfs.base
512 self.pathsep = self.path + '/'
522 self.pathsep = self.path + '/'
513 self.createmode = _calcmode(vfs)
523 self.createmode = _calcmode(vfs)
514 vfs.createmode = self.createmode
524 vfs.createmode = self.createmode
515 self.rawvfs = vfs
525 self.rawvfs = vfs
516 fnc = fncache(vfs)
526 fnc = fncache(vfs)
517 self.fncache = fnc
527 self.fncache = fnc
518 self.vfs = _fncachevfs(vfs, fnc, encode)
528 self.vfs = _fncachevfs(vfs, fnc, encode)
519 self.opener = self.vfs
529 self.opener = self.vfs
520
530
521 def join(self, f):
531 def join(self, f):
522 return self.pathsep + self.encode(f)
532 return self.pathsep + self.encode(f)
523
533
524 def getsize(self, path):
534 def getsize(self, path):
525 return self.rawvfs.stat(path).st_size
535 return self.rawvfs.stat(path).st_size
526
536
527 def datafiles(self):
537 def datafiles(self):
528 for f in sorted(self.fncache):
538 for f in sorted(self.fncache):
529 ef = self.encode(f)
539 ef = self.encode(f)
530 try:
540 try:
531 yield f, ef, self.getsize(ef)
541 yield f, ef, self.getsize(ef)
532 except OSError as err:
542 except OSError as err:
533 if err.errno != errno.ENOENT:
543 if err.errno != errno.ENOENT:
534 raise
544 raise
535
545
536 def copylist(self):
546 def copylist(self):
537 d = ('data meta dh fncache phaseroots obsstore'
547 d = ('data meta dh fncache phaseroots obsstore'
538 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
548 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
539 return (['requires', '00changelog.i'] +
549 return (['requires', '00changelog.i'] +
540 ['store/' + f for f in d.split()])
550 ['store/' + f for f in d.split()])
541
551
542 def write(self, tr):
552 def write(self, tr):
543 self.fncache.write(tr)
553 self.fncache.write(tr)
544
554
545 def invalidatecaches(self):
555 def invalidatecaches(self):
546 self.fncache.entries = None
556 self.fncache.entries = None
547
557
548 def markremoved(self, fn):
558 def markremoved(self, fn):
549 self.fncache.remove(fn)
559 self.fncache.remove(fn)
550
560
551 def _exists(self, f):
561 def _exists(self, f):
552 ef = self.encode(f)
562 ef = self.encode(f)
553 try:
563 try:
554 self.getsize(ef)
564 self.getsize(ef)
555 return True
565 return True
556 except OSError as err:
566 except OSError as err:
557 if err.errno != errno.ENOENT:
567 if err.errno != errno.ENOENT:
558 raise
568 raise
559 # nonexistent entry
569 # nonexistent entry
560 return False
570 return False
561
571
562 def __contains__(self, path):
572 def __contains__(self, path):
563 '''Checks if the store contains path'''
573 '''Checks if the store contains path'''
564 path = "/".join(("data", path))
574 path = "/".join(("data", path))
565 # check for files (exact match)
575 # check for files (exact match)
566 e = path + '.i'
576 e = path + '.i'
567 if e in self.fncache and self._exists(e):
577 if e in self.fncache and self._exists(e):
568 return True
578 return True
569 # now check for directories (prefix match)
579 # now check for directories (prefix match)
570 if not path.endswith('/'):
580 if not path.endswith('/'):
571 path += '/'
581 path += '/'
572 for e in self.fncache:
582 for e in self.fncache:
573 if e.startswith(path) and self._exists(e):
583 if e.startswith(path) and self._exists(e):
574 return True
584 return True
575 return False
585 return False
576
586
577 def store(requirements, path, vfstype):
587 def store(requirements, path, vfstype):
578 if 'store' in requirements:
588 if 'store' in requirements:
579 if 'fncache' in requirements:
589 if 'fncache' in requirements:
580 return fncachestore(path, vfstype, 'dotencode' in requirements)
590 return fncachestore(path, vfstype, 'dotencode' in requirements)
581 return encodedstore(path, vfstype)
591 return encodedstore(path, vfstype)
582 return basicstore(path, vfstype)
592 return basicstore(path, vfstype)
@@ -1,438 +1,508 b''
1 #require repofncache
1 #require repofncache
2
2
3 Init repo1:
3 Init repo1:
4
4
5 $ hg init repo1
5 $ hg init repo1
6 $ cd repo1
6 $ cd repo1
7 $ echo "some text" > a
7 $ echo "some text" > a
8 $ hg add
8 $ hg add
9 adding a
9 adding a
10 $ hg ci -m first
10 $ hg ci -m first
11 $ cat .hg/store/fncache | sort
11 $ cat .hg/store/fncache | sort
12 data/a.i
12 data/a.i
13
13
14 Testing a.i/b:
14 Testing a.i/b:
15
15
16 $ mkdir a.i
16 $ mkdir a.i
17 $ echo "some other text" > a.i/b
17 $ echo "some other text" > a.i/b
18 $ hg add
18 $ hg add
19 adding a.i/b
19 adding a.i/b
20 $ hg ci -m second
20 $ hg ci -m second
21 $ cat .hg/store/fncache | sort
21 $ cat .hg/store/fncache | sort
22 data/a.i
22 data/a.i
23 data/a.i.hg/b.i
23 data/a.i.hg/b.i
24
24
25 Testing a.i.hg/c:
25 Testing a.i.hg/c:
26
26
27 $ mkdir a.i.hg
27 $ mkdir a.i.hg
28 $ echo "yet another text" > a.i.hg/c
28 $ echo "yet another text" > a.i.hg/c
29 $ hg add
29 $ hg add
30 adding a.i.hg/c
30 adding a.i.hg/c
31 $ hg ci -m third
31 $ hg ci -m third
32 $ cat .hg/store/fncache | sort
32 $ cat .hg/store/fncache | sort
33 data/a.i
33 data/a.i
34 data/a.i.hg.hg/c.i
34 data/a.i.hg.hg/c.i
35 data/a.i.hg/b.i
35 data/a.i.hg/b.i
36
36
37 Testing verify:
37 Testing verify:
38
38
39 $ hg verify
39 $ hg verify
40 checking changesets
40 checking changesets
41 checking manifests
41 checking manifests
42 crosschecking files in changesets and manifests
42 crosschecking files in changesets and manifests
43 checking files
43 checking files
44 3 files, 3 changesets, 3 total revisions
44 3 files, 3 changesets, 3 total revisions
45
45
46 $ rm .hg/store/fncache
46 $ rm .hg/store/fncache
47
47
48 $ hg verify
48 $ hg verify
49 checking changesets
49 checking changesets
50 checking manifests
50 checking manifests
51 crosschecking files in changesets and manifests
51 crosschecking files in changesets and manifests
52 checking files
52 checking files
53 warning: revlog 'data/a.i' not in fncache!
53 warning: revlog 'data/a.i' not in fncache!
54 warning: revlog 'data/a.i.hg/c.i' not in fncache!
54 warning: revlog 'data/a.i.hg/c.i' not in fncache!
55 warning: revlog 'data/a.i/b.i' not in fncache!
55 warning: revlog 'data/a.i/b.i' not in fncache!
56 3 files, 3 changesets, 3 total revisions
56 3 files, 3 changesets, 3 total revisions
57 3 warnings encountered!
57 3 warnings encountered!
58 hint: run "hg debugrebuildfncache" to recover from corrupt fncache
58 hint: run "hg debugrebuildfncache" to recover from corrupt fncache
59
59
60 Follow the hint to make sure it works
60 Follow the hint to make sure it works
61
61
62 $ hg debugrebuildfncache
62 $ hg debugrebuildfncache
63 adding data/a.i
63 adding data/a.i
64 adding data/a.i.hg/c.i
64 adding data/a.i.hg/c.i
65 adding data/a.i/b.i
65 adding data/a.i/b.i
66 3 items added, 0 removed from fncache
66 3 items added, 0 removed from fncache
67
67
68 $ hg verify
68 $ hg verify
69 checking changesets
69 checking changesets
70 checking manifests
70 checking manifests
71 crosschecking files in changesets and manifests
71 crosschecking files in changesets and manifests
72 checking files
72 checking files
73 3 files, 3 changesets, 3 total revisions
73 3 files, 3 changesets, 3 total revisions
74
74
75 $ cd ..
75 $ cd ..
76
76
77 Non store repo:
77 Non store repo:
78
78
79 $ hg --config format.usestore=False init foo
79 $ hg --config format.usestore=False init foo
80 $ cd foo
80 $ cd foo
81 $ mkdir tst.d
81 $ mkdir tst.d
82 $ echo foo > tst.d/foo
82 $ echo foo > tst.d/foo
83 $ hg ci -Amfoo
83 $ hg ci -Amfoo
84 adding tst.d/foo
84 adding tst.d/foo
85 $ find .hg | sort
85 $ find .hg | sort
86 .hg
86 .hg
87 .hg/00changelog.i
87 .hg/00changelog.i
88 .hg/00manifest.i
88 .hg/00manifest.i
89 .hg/cache
89 .hg/cache
90 .hg/cache/branch2-served
90 .hg/cache/branch2-served
91 .hg/cache/rbc-names-v1
91 .hg/cache/rbc-names-v1
92 .hg/cache/rbc-revs-v1
92 .hg/cache/rbc-revs-v1
93 .hg/data
93 .hg/data
94 .hg/data/tst.d.hg
94 .hg/data/tst.d.hg
95 .hg/data/tst.d.hg/foo.i
95 .hg/data/tst.d.hg/foo.i
96 .hg/dirstate
96 .hg/dirstate
97 .hg/fsmonitor.state (fsmonitor !)
97 .hg/fsmonitor.state (fsmonitor !)
98 .hg/last-message.txt
98 .hg/last-message.txt
99 .hg/phaseroots
99 .hg/phaseroots
100 .hg/requires
100 .hg/requires
101 .hg/undo
101 .hg/undo
102 .hg/undo.backup.dirstate
102 .hg/undo.backup.dirstate
103 .hg/undo.backupfiles
103 .hg/undo.backupfiles
104 .hg/undo.bookmarks
104 .hg/undo.bookmarks
105 .hg/undo.branch
105 .hg/undo.branch
106 .hg/undo.desc
106 .hg/undo.desc
107 .hg/undo.dirstate
107 .hg/undo.dirstate
108 .hg/undo.phaseroots
108 .hg/undo.phaseroots
109 $ cd ..
109 $ cd ..
110
110
111 Non fncache repo:
111 Non fncache repo:
112
112
113 $ hg --config format.usefncache=False init bar
113 $ hg --config format.usefncache=False init bar
114 $ cd bar
114 $ cd bar
115 $ mkdir tst.d
115 $ mkdir tst.d
116 $ echo foo > tst.d/Foo
116 $ echo foo > tst.d/Foo
117 $ hg ci -Amfoo
117 $ hg ci -Amfoo
118 adding tst.d/Foo
118 adding tst.d/Foo
119 $ find .hg | sort
119 $ find .hg | sort
120 .hg
120 .hg
121 .hg/00changelog.i
121 .hg/00changelog.i
122 .hg/cache
122 .hg/cache
123 .hg/cache/branch2-served
123 .hg/cache/branch2-served
124 .hg/cache/rbc-names-v1
124 .hg/cache/rbc-names-v1
125 .hg/cache/rbc-revs-v1
125 .hg/cache/rbc-revs-v1
126 .hg/dirstate
126 .hg/dirstate
127 .hg/fsmonitor.state (fsmonitor !)
127 .hg/fsmonitor.state (fsmonitor !)
128 .hg/last-message.txt
128 .hg/last-message.txt
129 .hg/requires
129 .hg/requires
130 .hg/store
130 .hg/store
131 .hg/store/00changelog.i
131 .hg/store/00changelog.i
132 .hg/store/00manifest.i
132 .hg/store/00manifest.i
133 .hg/store/data
133 .hg/store/data
134 .hg/store/data/tst.d.hg
134 .hg/store/data/tst.d.hg
135 .hg/store/data/tst.d.hg/_foo.i
135 .hg/store/data/tst.d.hg/_foo.i
136 .hg/store/phaseroots
136 .hg/store/phaseroots
137 .hg/store/undo
137 .hg/store/undo
138 .hg/store/undo.backupfiles
138 .hg/store/undo.backupfiles
139 .hg/store/undo.phaseroots
139 .hg/store/undo.phaseroots
140 .hg/undo.backup.dirstate
140 .hg/undo.backup.dirstate
141 .hg/undo.bookmarks
141 .hg/undo.bookmarks
142 .hg/undo.branch
142 .hg/undo.branch
143 .hg/undo.desc
143 .hg/undo.desc
144 .hg/undo.dirstate
144 .hg/undo.dirstate
145 $ cd ..
145 $ cd ..
146
146
147 Encoding of reserved / long paths in the store
147 Encoding of reserved / long paths in the store
148
148
149 $ hg init r2
149 $ hg init r2
150 $ cd r2
150 $ cd r2
151 $ cat <<EOF > .hg/hgrc
151 $ cat <<EOF > .hg/hgrc
152 > [ui]
152 > [ui]
153 > portablefilenames = ignore
153 > portablefilenames = ignore
154 > EOF
154 > EOF
155
155
156 $ hg import -q --bypass - <<EOF
156 $ hg import -q --bypass - <<EOF
157 > # HG changeset patch
157 > # HG changeset patch
158 > # User test
158 > # User test
159 > # Date 0 0
159 > # Date 0 0
160 > # Node ID 1c7a2f7cb77be1a0def34e4c7cabc562ad98fbd7
160 > # Node ID 1c7a2f7cb77be1a0def34e4c7cabc562ad98fbd7
161 > # Parent 0000000000000000000000000000000000000000
161 > # Parent 0000000000000000000000000000000000000000
162 > 1
162 > 1
163 >
163 >
164 > diff --git a/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz b/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz
164 > diff --git a/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz b/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz
165 > new file mode 100644
165 > new file mode 100644
166 > --- /dev/null
166 > --- /dev/null
167 > +++ b/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz
167 > +++ b/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz
168 > @@ -0,0 +1,1 @@
168 > @@ -0,0 +1,1 @@
169 > +foo
169 > +foo
170 > diff --git a/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT b/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT
170 > diff --git a/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT b/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT
171 > new file mode 100644
171 > new file mode 100644
172 > --- /dev/null
172 > --- /dev/null
173 > +++ b/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT
173 > +++ b/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT
174 > @@ -0,0 +1,1 @@
174 > @@ -0,0 +1,1 @@
175 > +foo
175 > +foo
176 > diff --git a/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt b/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt
176 > diff --git a/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt b/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt
177 > new file mode 100644
177 > new file mode 100644
178 > --- /dev/null
178 > --- /dev/null
179 > +++ b/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt
179 > +++ b/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt
180 > @@ -0,0 +1,1 @@
180 > @@ -0,0 +1,1 @@
181 > +foo
181 > +foo
182 > diff --git a/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c b/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c
182 > diff --git a/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c b/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c
183 > new file mode 100644
183 > new file mode 100644
184 > --- /dev/null
184 > --- /dev/null
185 > +++ b/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c
185 > +++ b/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c
186 > @@ -0,0 +1,1 @@
186 > @@ -0,0 +1,1 @@
187 > +foo
187 > +foo
188 > diff --git a/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider b/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider
188 > diff --git a/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider b/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider
189 > new file mode 100644
189 > new file mode 100644
190 > --- /dev/null
190 > --- /dev/null
191 > +++ b/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider
191 > +++ b/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider
192 > @@ -0,0 +1,1 @@
192 > @@ -0,0 +1,1 @@
193 > +foo
193 > +foo
194 > EOF
194 > EOF
195
195
196 $ find .hg/store -name *.i | sort
196 $ find .hg/store -name *.i | sort
197 .hg/store/00changelog.i
197 .hg/store/00changelog.i
198 .hg/store/00manifest.i
198 .hg/store/00manifest.i
199 .hg/store/data/bla.aux/pr~6e/_p_r_n/lpt/co~6d3/nu~6c/coma/foo._n_u_l/normal.c.i
199 .hg/store/data/bla.aux/pr~6e/_p_r_n/lpt/co~6d3/nu~6c/coma/foo._n_u_l/normal.c.i
200 .hg/store/dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxx168e07b38e65eff86ab579afaaa8e30bfbe0f35f.i
200 .hg/store/dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxx168e07b38e65eff86ab579afaaa8e30bfbe0f35f.i
201 .hg/store/dh/au~78/second/x.prn/fourth/fi~3afth/sixth/seventh/eighth/nineth/tenth/loremia20419e358ddff1bf8751e38288aff1d7c32ec05.i
201 .hg/store/dh/au~78/second/x.prn/fourth/fi~3afth/sixth/seventh/eighth/nineth/tenth/loremia20419e358ddff1bf8751e38288aff1d7c32ec05.i
202 .hg/store/dh/enterpri/openesba/contrib-/corba-bc/netbeans/wsdlexte/src/main/java/org.net7018f27961fdf338a598a40c4683429e7ffb9743.i
202 .hg/store/dh/enterpri/openesba/contrib-/corba-bc/netbeans/wsdlexte/src/main/java/org.net7018f27961fdf338a598a40c4683429e7ffb9743.i
203 .hg/store/dh/project_/resource/anotherl/followed/andanoth/andthenanextremelylongfilename0d8e1f4187c650e2f1fdca9fd90f786bc0976b6b.i
203 .hg/store/dh/project_/resource/anotherl/followed/andanoth/andthenanextremelylongfilename0d8e1f4187c650e2f1fdca9fd90f786bc0976b6b.i
204
204
205 $ cd ..
205 $ cd ..
206
206
207 Aborting lock does not prevent fncache writes
207 Aborting lock does not prevent fncache writes
208
208
209 $ cat > exceptionext.py <<EOF
209 $ cat > exceptionext.py <<EOF
210 > from __future__ import absolute_import
210 > from __future__ import absolute_import
211 > import os
211 > import os
212 > from mercurial import commands, error, extensions
212 > from mercurial import commands, error, extensions
213 >
213 >
214 > def lockexception(orig, vfs, lockname, wait, releasefn, *args, **kwargs):
214 > def lockexception(orig, vfs, lockname, wait, releasefn, *args, **kwargs):
215 > def releasewrap():
215 > def releasewrap():
216 > l.held = False # ensure __del__ is a noop
216 > l.held = False # ensure __del__ is a noop
217 > raise error.Abort("forced lock failure")
217 > raise error.Abort("forced lock failure")
218 > l = orig(vfs, lockname, wait, releasewrap, *args, **kwargs)
218 > l = orig(vfs, lockname, wait, releasewrap, *args, **kwargs)
219 > return l
219 > return l
220 >
220 >
221 > def reposetup(ui, repo):
221 > def reposetup(ui, repo):
222 > extensions.wrapfunction(repo, '_lock', lockexception)
222 > extensions.wrapfunction(repo, '_lock', lockexception)
223 >
223 >
224 > cmdtable = {}
224 > cmdtable = {}
225 >
225 >
226 > # wrap "commit" command to prevent wlock from being '__del__()'-ed
226 > # wrap "commit" command to prevent wlock from being '__del__()'-ed
227 > # at the end of dispatching (for intentional "forced lcok failure")
227 > # at the end of dispatching (for intentional "forced lcok failure")
228 > def commitwrap(orig, ui, repo, *pats, **opts):
228 > def commitwrap(orig, ui, repo, *pats, **opts):
229 > repo = repo.unfiltered() # to use replaced repo._lock certainly
229 > repo = repo.unfiltered() # to use replaced repo._lock certainly
230 > wlock = repo.wlock()
230 > wlock = repo.wlock()
231 > try:
231 > try:
232 > return orig(ui, repo, *pats, **opts)
232 > return orig(ui, repo, *pats, **opts)
233 > finally:
233 > finally:
234 > # multiple 'relase()' is needed for complete releasing wlock,
234 > # multiple 'relase()' is needed for complete releasing wlock,
235 > # because "forced" abort at last releasing store lock
235 > # because "forced" abort at last releasing store lock
236 > # prevents wlock from being released at same 'lockmod.release()'
236 > # prevents wlock from being released at same 'lockmod.release()'
237 > for i in range(wlock.held):
237 > for i in range(wlock.held):
238 > wlock.release()
238 > wlock.release()
239 >
239 >
240 > def extsetup(ui):
240 > def extsetup(ui):
241 > extensions.wrapcommand(commands.table, b"commit", commitwrap)
241 > extensions.wrapcommand(commands.table, b"commit", commitwrap)
242 > EOF
242 > EOF
243 $ extpath=`pwd`/exceptionext.py
243 $ extpath=`pwd`/exceptionext.py
244 $ hg init fncachetxn
244 $ hg init fncachetxn
245 $ cd fncachetxn
245 $ cd fncachetxn
246 $ printf "[extensions]\nexceptionext=$extpath\n" >> .hg/hgrc
246 $ printf "[extensions]\nexceptionext=$extpath\n" >> .hg/hgrc
247 $ touch y
247 $ touch y
248 $ hg ci -qAm y
248 $ hg ci -qAm y
249 abort: forced lock failure
249 abort: forced lock failure
250 [255]
250 [255]
251 $ cat .hg/store/fncache
251 $ cat .hg/store/fncache
252 data/y.i
252 data/y.i
253
253
254 Aborting transaction prevents fncache change
254 Aborting transaction prevents fncache change
255
255
256 $ cat > ../exceptionext.py <<EOF
256 $ cat > ../exceptionext.py <<EOF
257 > from __future__ import absolute_import
257 > from __future__ import absolute_import
258 > import os
258 > import os
259 > from mercurial import commands, error, extensions, localrepo
259 > from mercurial import commands, error, extensions, localrepo
260 >
260 >
261 > def wrapper(orig, self, *args, **kwargs):
261 > def wrapper(orig, self, *args, **kwargs):
262 > tr = orig(self, *args, **kwargs)
262 > tr = orig(self, *args, **kwargs)
263 > def fail(tr):
263 > def fail(tr):
264 > raise error.Abort(b"forced transaction failure")
264 > raise error.Abort(b"forced transaction failure")
265 > # zzz prefix to ensure it sorted after store.write
265 > # zzz prefix to ensure it sorted after store.write
266 > tr.addfinalize(b'zzz-forcefails', fail)
266 > tr.addfinalize(b'zzz-forcefails', fail)
267 > return tr
267 > return tr
268 >
268 >
269 > def uisetup(ui):
269 > def uisetup(ui):
270 > extensions.wrapfunction(
270 > extensions.wrapfunction(
271 > localrepo.localrepository, b'transaction', wrapper)
271 > localrepo.localrepository, b'transaction', wrapper)
272 >
272 >
273 > cmdtable = {}
273 > cmdtable = {}
274 >
274 >
275 > EOF
275 > EOF
276
276
277 Clean cached version
277 Clean cached version
278 $ rm -f "${extpath}c"
278 $ rm -f "${extpath}c"
279 $ rm -Rf "`dirname $extpath`/__pycache__"
279 $ rm -Rf "`dirname $extpath`/__pycache__"
280
280
281 $ touch z
281 $ touch z
282 $ hg ci -qAm z
282 $ hg ci -qAm z
283 transaction abort!
283 transaction abort!
284 rollback completed
284 rollback completed
285 abort: forced transaction failure
285 abort: forced transaction failure
286 [255]
286 [255]
287 $ cat .hg/store/fncache
287 $ cat .hg/store/fncache
288 data/y.i
288 data/y.i
289
289
290 Aborted transactions can be recovered later
290 Aborted transactions can be recovered later
291
291
292 $ cat > ../exceptionext.py <<EOF
292 $ cat > ../exceptionext.py <<EOF
293 > from __future__ import absolute_import
293 > from __future__ import absolute_import
294 > import os
294 > import os
295 > from mercurial import (
295 > from mercurial import (
296 > commands,
296 > commands,
297 > error,
297 > error,
298 > extensions,
298 > extensions,
299 > localrepo,
299 > localrepo,
300 > transaction,
300 > transaction,
301 > )
301 > )
302 >
302 >
303 > def trwrapper(orig, self, *args, **kwargs):
303 > def trwrapper(orig, self, *args, **kwargs):
304 > tr = orig(self, *args, **kwargs)
304 > tr = orig(self, *args, **kwargs)
305 > def fail(tr):
305 > def fail(tr):
306 > raise error.Abort("forced transaction failure")
306 > raise error.Abort("forced transaction failure")
307 > # zzz prefix to ensure it sorted after store.write
307 > # zzz prefix to ensure it sorted after store.write
308 > tr.addfinalize('zzz-forcefails', fail)
308 > tr.addfinalize('zzz-forcefails', fail)
309 > return tr
309 > return tr
310 >
310 >
311 > def abortwrapper(orig, self, *args, **kwargs):
311 > def abortwrapper(orig, self, *args, **kwargs):
312 > raise error.Abort("forced transaction failure")
312 > raise error.Abort("forced transaction failure")
313 >
313 >
314 > def uisetup(ui):
314 > def uisetup(ui):
315 > extensions.wrapfunction(localrepo.localrepository, 'transaction',
315 > extensions.wrapfunction(localrepo.localrepository, 'transaction',
316 > trwrapper)
316 > trwrapper)
317 > extensions.wrapfunction(transaction.transaction, '_abort',
317 > extensions.wrapfunction(transaction.transaction, '_abort',
318 > abortwrapper)
318 > abortwrapper)
319 >
319 >
320 > cmdtable = {}
320 > cmdtable = {}
321 >
321 >
322 > EOF
322 > EOF
323
323
324 Clean cached versions
324 Clean cached versions
325 $ rm -f "${extpath}c"
325 $ rm -f "${extpath}c"
326 $ rm -Rf "`dirname $extpath`/__pycache__"
326 $ rm -Rf "`dirname $extpath`/__pycache__"
327
327
328 $ hg up -q 1
328 $ hg up -q 1
329 $ touch z
329 $ touch z
330 $ hg ci -qAm z 2>/dev/null
330 $ hg ci -qAm z 2>/dev/null
331 [255]
331 [255]
332 $ cat .hg/store/fncache | sort
332 $ cat .hg/store/fncache | sort
333 data/y.i
333 data/y.i
334 data/z.i
334 data/z.i
335 $ hg recover
335 $ hg recover
336 rolling back interrupted transaction
336 rolling back interrupted transaction
337 checking changesets
337 checking changesets
338 checking manifests
338 checking manifests
339 crosschecking files in changesets and manifests
339 crosschecking files in changesets and manifests
340 checking files
340 checking files
341 1 files, 1 changesets, 1 total revisions
341 1 files, 1 changesets, 1 total revisions
342 $ cat .hg/store/fncache
342 $ cat .hg/store/fncache
343 data/y.i
343 data/y.i
344
344
345 $ cd ..
345 $ cd ..
346
346
347 debugrebuildfncache does nothing unless repo has fncache requirement
347 debugrebuildfncache does nothing unless repo has fncache requirement
348
348
349 $ hg --config format.usefncache=false init nofncache
349 $ hg --config format.usefncache=false init nofncache
350 $ cd nofncache
350 $ cd nofncache
351 $ hg debugrebuildfncache
351 $ hg debugrebuildfncache
352 (not rebuilding fncache because repository does not support fncache)
352 (not rebuilding fncache because repository does not support fncache)
353
353
354 $ cd ..
354 $ cd ..
355
355
356 debugrebuildfncache works on empty repository
356 debugrebuildfncache works on empty repository
357
357
358 $ hg init empty
358 $ hg init empty
359 $ cd empty
359 $ cd empty
360 $ hg debugrebuildfncache
360 $ hg debugrebuildfncache
361 fncache already up to date
361 fncache already up to date
362 $ cd ..
362 $ cd ..
363
363
364 debugrebuildfncache on an up to date repository no-ops
364 debugrebuildfncache on an up to date repository no-ops
365
365
366 $ hg init repo
366 $ hg init repo
367 $ cd repo
367 $ cd repo
368 $ echo initial > foo
368 $ echo initial > foo
369 $ echo initial > .bar
369 $ echo initial > .bar
370 $ hg commit -A -m initial
370 $ hg commit -A -m initial
371 adding .bar
371 adding .bar
372 adding foo
372 adding foo
373
373
374 $ cat .hg/store/fncache | sort
374 $ cat .hg/store/fncache | sort
375 data/.bar.i
375 data/.bar.i
376 data/foo.i
376 data/foo.i
377
377
378 $ hg debugrebuildfncache
378 $ hg debugrebuildfncache
379 fncache already up to date
379 fncache already up to date
380
380
381 debugrebuildfncache restores deleted fncache file
381 debugrebuildfncache restores deleted fncache file
382
382
383 $ rm -f .hg/store/fncache
383 $ rm -f .hg/store/fncache
384 $ hg debugrebuildfncache
384 $ hg debugrebuildfncache
385 adding data/.bar.i
385 adding data/.bar.i
386 adding data/foo.i
386 adding data/foo.i
387 2 items added, 0 removed from fncache
387 2 items added, 0 removed from fncache
388
388
389 $ cat .hg/store/fncache | sort
389 $ cat .hg/store/fncache | sort
390 data/.bar.i
390 data/.bar.i
391 data/foo.i
391 data/foo.i
392
392
393 Rebuild after rebuild should no-op
393 Rebuild after rebuild should no-op
394
394
395 $ hg debugrebuildfncache
395 $ hg debugrebuildfncache
396 fncache already up to date
396 fncache already up to date
397
397
398 A single missing file should get restored, an extra file should be removed
398 A single missing file should get restored, an extra file should be removed
399
399
400 $ cat > .hg/store/fncache << EOF
400 $ cat > .hg/store/fncache << EOF
401 > data/foo.i
401 > data/foo.i
402 > data/bad-entry.i
402 > data/bad-entry.i
403 > EOF
403 > EOF
404
404
405 $ hg debugrebuildfncache
405 $ hg debugrebuildfncache
406 removing data/bad-entry.i
406 removing data/bad-entry.i
407 adding data/.bar.i
407 adding data/.bar.i
408 1 items added, 1 removed from fncache
408 1 items added, 1 removed from fncache
409
409
410 $ cat .hg/store/fncache | sort
410 $ cat .hg/store/fncache | sort
411 data/.bar.i
411 data/.bar.i
412 data/foo.i
412 data/foo.i
413
413
414 $ cd ..
414 $ cd ..
415
415
416 Try a simple variation without dotencode to ensure fncache is ignorant of encoding
416 Try a simple variation without dotencode to ensure fncache is ignorant of encoding
417
417
418 $ hg --config format.dotencode=false init nodotencode
418 $ hg --config format.dotencode=false init nodotencode
419 $ cd nodotencode
419 $ cd nodotencode
420 $ echo initial > foo
420 $ echo initial > foo
421 $ echo initial > .bar
421 $ echo initial > .bar
422 $ hg commit -A -m initial
422 $ hg commit -A -m initial
423 adding .bar
423 adding .bar
424 adding foo
424 adding foo
425
425
426 $ cat .hg/store/fncache | sort
426 $ cat .hg/store/fncache | sort
427 data/.bar.i
427 data/.bar.i
428 data/foo.i
428 data/foo.i
429
429
430 $ rm .hg/store/fncache
430 $ rm .hg/store/fncache
431 $ hg debugrebuildfncache
431 $ hg debugrebuildfncache
432 adding data/.bar.i
432 adding data/.bar.i
433 adding data/foo.i
433 adding data/foo.i
434 2 items added, 0 removed from fncache
434 2 items added, 0 removed from fncache
435
435
436 $ cat .hg/store/fncache | sort
436 $ cat .hg/store/fncache | sort
437 data/.bar.i
437 data/.bar.i
438 data/foo.i
438 data/foo.i
439
440 $ cd ..
441
442 In repositories that have accumulated a large number of files over time, the
443 fncache file is going to be large. If we possibly can avoid loading it, so much the better.
444 The cache should not loaded when committing changes to existing files, or when unbundling
445 changesets that only contain changes to existing files:
446
447 $ cat > fncacheloadwarn.py << EOF
448 > from __future__ import absolute_import
449 > from mercurial import extensions, store
450 >
451 > def extsetup(ui):
452 > def wrapstore(orig, requirements, *args):
453 > store = orig(requirements, *args)
454 > if 'store' in requirements and 'fncache' in requirements:
455 > instrumentfncachestore(store, ui)
456 > return store
457 > extensions.wrapfunction(store, 'store', wrapstore)
458 >
459 > def instrumentfncachestore(fncachestore, ui):
460 > class instrumentedfncache(type(fncachestore.fncache)):
461 > def _load(self):
462 > ui.warn('fncache load triggered!\n')
463 > super(instrumentedfncache, self)._load()
464 > fncachestore.fncache.__class__ = instrumentedfncache
465 > EOF
466
467 $ fncachextpath=`pwd`/fncacheloadwarn.py
468 $ hg init nofncacheload
469 $ cd nofncacheload
470 $ printf "[extensions]\nfncacheloadwarn=$fncachextpath\n" >> .hg/hgrc
471
472 A new file should trigger a load, as we'd want to update the fncache set in that case:
473
474 $ touch foo
475 $ hg ci -qAm foo
476 fncache load triggered!
477
478 But modifying that file should not:
479
480 $ echo bar >> foo
481 $ hg ci -qm foo
482
483 If a transaction has been aborted, the zero-size truncated index file will
484 not prevent the fncache from being loaded; rather than actually abort
485 a transaction, we simulate the situation by creating a zero-size index file:
486
487 $ touch .hg/store/data/bar.i
488 $ touch bar
489 $ hg ci -qAm bar
490 fncache load triggered!
491
492 Unbundling should follow the same rules; existing files should not cause a load:
493
494 $ hg clone -q . tobundle
495 $ echo 'new line' > tobundle/bar
496 $ hg -R tobundle ci -qm bar
497 $ hg -R tobundle bundle -q barupdated.hg
498 $ hg unbundle -q barupdated.hg
499
500 but adding new files should:
501
502 $ touch tobundle/newfile
503 $ hg -R tobundle ci -qAm newfile
504 $ hg -R tobundle bundle -q newfile.hg
505 $ hg unbundle -q newfile.hg
506 fncache load triggered!
507
508 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now