##// END OF EJS Templates
store: assert the fncache have been loaded if dirty...
Boris Feld -
r38718:89d93dd1 default
parent child Browse files
Show More
@@ -1,592 +1,593 b''
1 # store.py - repository store handling for Mercurial
1 # store.py - repository store handling for Mercurial
2 #
2 #
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
11 import hashlib
12 import os
12 import os
13 import stat
13 import stat
14
14
15 from .i18n import _
15 from .i18n import _
16 from . import (
16 from . import (
17 error,
17 error,
18 node,
18 node,
19 policy,
19 policy,
20 pycompat,
20 pycompat,
21 util,
21 util,
22 vfs as vfsmod,
22 vfs as vfsmod,
23 )
23 )
24
24
25 parsers = policy.importmod(r'parsers')
25 parsers = policy.importmod(r'parsers')
26
26
27 # This avoids a collision between a file named foo and a dir named
27 # This avoids a collision between a file named foo and a dir named
28 # foo.i or foo.d
28 # foo.i or foo.d
29 def _encodedir(path):
29 def _encodedir(path):
30 '''
30 '''
31 >>> _encodedir(b'data/foo.i')
31 >>> _encodedir(b'data/foo.i')
32 'data/foo.i'
32 'data/foo.i'
33 >>> _encodedir(b'data/foo.i/bla.i')
33 >>> _encodedir(b'data/foo.i/bla.i')
34 'data/foo.i.hg/bla.i'
34 'data/foo.i.hg/bla.i'
35 >>> _encodedir(b'data/foo.i.hg/bla.i')
35 >>> _encodedir(b'data/foo.i.hg/bla.i')
36 'data/foo.i.hg.hg/bla.i'
36 'data/foo.i.hg.hg/bla.i'
37 >>> _encodedir(b'data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
37 >>> _encodedir(b'data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
38 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
38 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
39 '''
39 '''
40 return (path
40 return (path
41 .replace(".hg/", ".hg.hg/")
41 .replace(".hg/", ".hg.hg/")
42 .replace(".i/", ".i.hg/")
42 .replace(".i/", ".i.hg/")
43 .replace(".d/", ".d.hg/"))
43 .replace(".d/", ".d.hg/"))
44
44
45 encodedir = getattr(parsers, 'encodedir', _encodedir)
45 encodedir = getattr(parsers, 'encodedir', _encodedir)
46
46
47 def decodedir(path):
47 def decodedir(path):
48 '''
48 '''
49 >>> decodedir(b'data/foo.i')
49 >>> decodedir(b'data/foo.i')
50 'data/foo.i'
50 'data/foo.i'
51 >>> decodedir(b'data/foo.i.hg/bla.i')
51 >>> decodedir(b'data/foo.i.hg/bla.i')
52 'data/foo.i/bla.i'
52 'data/foo.i/bla.i'
53 >>> decodedir(b'data/foo.i.hg.hg/bla.i')
53 >>> decodedir(b'data/foo.i.hg.hg/bla.i')
54 'data/foo.i.hg/bla.i'
54 'data/foo.i.hg/bla.i'
55 '''
55 '''
56 if ".hg/" not in path:
56 if ".hg/" not in path:
57 return path
57 return path
58 return (path
58 return (path
59 .replace(".d.hg/", ".d/")
59 .replace(".d.hg/", ".d/")
60 .replace(".i.hg/", ".i/")
60 .replace(".i.hg/", ".i/")
61 .replace(".hg.hg/", ".hg/"))
61 .replace(".hg.hg/", ".hg/"))
62
62
63 def _reserved():
63 def _reserved():
64 ''' characters that are problematic for filesystems
64 ''' characters that are problematic for filesystems
65
65
66 * ascii escapes (0..31)
66 * ascii escapes (0..31)
67 * ascii hi (126..255)
67 * ascii hi (126..255)
68 * windows specials
68 * windows specials
69
69
70 these characters will be escaped by encodefunctions
70 these characters will be escaped by encodefunctions
71 '''
71 '''
72 winreserved = [ord(x) for x in u'\\:*?"<>|']
72 winreserved = [ord(x) for x in u'\\:*?"<>|']
73 for x in range(32):
73 for x in range(32):
74 yield x
74 yield x
75 for x in range(126, 256):
75 for x in range(126, 256):
76 yield x
76 yield x
77 for x in winreserved:
77 for x in winreserved:
78 yield x
78 yield x
79
79
80 def _buildencodefun():
80 def _buildencodefun():
81 '''
81 '''
82 >>> enc, dec = _buildencodefun()
82 >>> enc, dec = _buildencodefun()
83
83
84 >>> enc(b'nothing/special.txt')
84 >>> enc(b'nothing/special.txt')
85 'nothing/special.txt'
85 'nothing/special.txt'
86 >>> dec(b'nothing/special.txt')
86 >>> dec(b'nothing/special.txt')
87 'nothing/special.txt'
87 'nothing/special.txt'
88
88
89 >>> enc(b'HELLO')
89 >>> enc(b'HELLO')
90 '_h_e_l_l_o'
90 '_h_e_l_l_o'
91 >>> dec(b'_h_e_l_l_o')
91 >>> dec(b'_h_e_l_l_o')
92 'HELLO'
92 'HELLO'
93
93
94 >>> enc(b'hello:world?')
94 >>> enc(b'hello:world?')
95 'hello~3aworld~3f'
95 'hello~3aworld~3f'
96 >>> dec(b'hello~3aworld~3f')
96 >>> dec(b'hello~3aworld~3f')
97 'hello:world?'
97 'hello:world?'
98
98
99 >>> enc(b'the\\x07quick\\xADshot')
99 >>> enc(b'the\\x07quick\\xADshot')
100 'the~07quick~adshot'
100 'the~07quick~adshot'
101 >>> dec(b'the~07quick~adshot')
101 >>> dec(b'the~07quick~adshot')
102 'the\\x07quick\\xadshot'
102 'the\\x07quick\\xadshot'
103 '''
103 '''
104 e = '_'
104 e = '_'
105 xchr = pycompat.bytechr
105 xchr = pycompat.bytechr
106 asciistr = list(map(xchr, range(127)))
106 asciistr = list(map(xchr, range(127)))
107 capitals = list(range(ord("A"), ord("Z") + 1))
107 capitals = list(range(ord("A"), ord("Z") + 1))
108
108
109 cmap = dict((x, x) for x in asciistr)
109 cmap = dict((x, x) for x in asciistr)
110 for x in _reserved():
110 for x in _reserved():
111 cmap[xchr(x)] = "~%02x" % x
111 cmap[xchr(x)] = "~%02x" % x
112 for x in capitals + [ord(e)]:
112 for x in capitals + [ord(e)]:
113 cmap[xchr(x)] = e + xchr(x).lower()
113 cmap[xchr(x)] = e + xchr(x).lower()
114
114
115 dmap = {}
115 dmap = {}
116 for k, v in cmap.iteritems():
116 for k, v in cmap.iteritems():
117 dmap[v] = k
117 dmap[v] = k
118 def decode(s):
118 def decode(s):
119 i = 0
119 i = 0
120 while i < len(s):
120 while i < len(s):
121 for l in xrange(1, 4):
121 for l in xrange(1, 4):
122 try:
122 try:
123 yield dmap[s[i:i + l]]
123 yield dmap[s[i:i + l]]
124 i += l
124 i += l
125 break
125 break
126 except KeyError:
126 except KeyError:
127 pass
127 pass
128 else:
128 else:
129 raise KeyError
129 raise KeyError
130 return (lambda s: ''.join([cmap[s[c:c + 1]] for c in xrange(len(s))]),
130 return (lambda s: ''.join([cmap[s[c:c + 1]] for c in xrange(len(s))]),
131 lambda s: ''.join(list(decode(s))))
131 lambda s: ''.join(list(decode(s))))
132
132
133 _encodefname, _decodefname = _buildencodefun()
133 _encodefname, _decodefname = _buildencodefun()
134
134
135 def encodefilename(s):
135 def encodefilename(s):
136 '''
136 '''
137 >>> encodefilename(b'foo.i/bar.d/bla.hg/hi:world?/HELLO')
137 >>> encodefilename(b'foo.i/bar.d/bla.hg/hi:world?/HELLO')
138 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
138 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
139 '''
139 '''
140 return _encodefname(encodedir(s))
140 return _encodefname(encodedir(s))
141
141
142 def decodefilename(s):
142 def decodefilename(s):
143 '''
143 '''
144 >>> decodefilename(b'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
144 >>> decodefilename(b'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
145 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
145 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
146 '''
146 '''
147 return decodedir(_decodefname(s))
147 return decodedir(_decodefname(s))
148
148
149 def _buildlowerencodefun():
149 def _buildlowerencodefun():
150 '''
150 '''
151 >>> f = _buildlowerencodefun()
151 >>> f = _buildlowerencodefun()
152 >>> f(b'nothing/special.txt')
152 >>> f(b'nothing/special.txt')
153 'nothing/special.txt'
153 'nothing/special.txt'
154 >>> f(b'HELLO')
154 >>> f(b'HELLO')
155 'hello'
155 'hello'
156 >>> f(b'hello:world?')
156 >>> f(b'hello:world?')
157 'hello~3aworld~3f'
157 'hello~3aworld~3f'
158 >>> f(b'the\\x07quick\\xADshot')
158 >>> f(b'the\\x07quick\\xADshot')
159 'the~07quick~adshot'
159 'the~07quick~adshot'
160 '''
160 '''
161 xchr = pycompat.bytechr
161 xchr = pycompat.bytechr
162 cmap = dict([(xchr(x), xchr(x)) for x in xrange(127)])
162 cmap = dict([(xchr(x), xchr(x)) for x in xrange(127)])
163 for x in _reserved():
163 for x in _reserved():
164 cmap[xchr(x)] = "~%02x" % x
164 cmap[xchr(x)] = "~%02x" % x
165 for x in range(ord("A"), ord("Z") + 1):
165 for x in range(ord("A"), ord("Z") + 1):
166 cmap[xchr(x)] = xchr(x).lower()
166 cmap[xchr(x)] = xchr(x).lower()
167 def lowerencode(s):
167 def lowerencode(s):
168 return "".join([cmap[c] for c in pycompat.iterbytestr(s)])
168 return "".join([cmap[c] for c in pycompat.iterbytestr(s)])
169 return lowerencode
169 return lowerencode
170
170
171 lowerencode = getattr(parsers, 'lowerencode', None) or _buildlowerencodefun()
171 lowerencode = getattr(parsers, 'lowerencode', None) or _buildlowerencodefun()
172
172
173 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
173 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
174 _winres3 = ('aux', 'con', 'prn', 'nul') # length 3
174 _winres3 = ('aux', 'con', 'prn', 'nul') # length 3
175 _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9)
175 _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9)
176 def _auxencode(path, dotencode):
176 def _auxencode(path, dotencode):
177 '''
177 '''
178 Encodes filenames containing names reserved by Windows or which end in
178 Encodes filenames containing names reserved by Windows or which end in
179 period or space. Does not touch other single reserved characters c.
179 period or space. Does not touch other single reserved characters c.
180 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
180 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
181 Additionally encodes space or period at the beginning, if dotencode is
181 Additionally encodes space or period at the beginning, if dotencode is
182 True. Parameter path is assumed to be all lowercase.
182 True. Parameter path is assumed to be all lowercase.
183 A segment only needs encoding if a reserved name appears as a
183 A segment only needs encoding if a reserved name appears as a
184 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
184 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
185 doesn't need encoding.
185 doesn't need encoding.
186
186
187 >>> s = b'.foo/aux.txt/txt.aux/con/prn/nul/foo.'
187 >>> s = b'.foo/aux.txt/txt.aux/con/prn/nul/foo.'
188 >>> _auxencode(s.split(b'/'), True)
188 >>> _auxencode(s.split(b'/'), True)
189 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
189 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
190 >>> s = b'.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
190 >>> s = b'.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
191 >>> _auxencode(s.split(b'/'), False)
191 >>> _auxencode(s.split(b'/'), False)
192 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
192 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
193 >>> _auxencode([b'foo. '], True)
193 >>> _auxencode([b'foo. '], True)
194 ['foo.~20']
194 ['foo.~20']
195 >>> _auxencode([b' .foo'], True)
195 >>> _auxencode([b' .foo'], True)
196 ['~20.foo']
196 ['~20.foo']
197 '''
197 '''
198 for i, n in enumerate(path):
198 for i, n in enumerate(path):
199 if not n:
199 if not n:
200 continue
200 continue
201 if dotencode and n[0] in '. ':
201 if dotencode and n[0] in '. ':
202 n = "~%02x" % ord(n[0:1]) + n[1:]
202 n = "~%02x" % ord(n[0:1]) + n[1:]
203 path[i] = n
203 path[i] = n
204 else:
204 else:
205 l = n.find('.')
205 l = n.find('.')
206 if l == -1:
206 if l == -1:
207 l = len(n)
207 l = len(n)
208 if ((l == 3 and n[:3] in _winres3) or
208 if ((l == 3 and n[:3] in _winres3) or
209 (l == 4 and n[3:4] <= '9' and n[3:4] >= '1'
209 (l == 4 and n[3:4] <= '9' and n[3:4] >= '1'
210 and n[:3] in _winres4)):
210 and n[:3] in _winres4)):
211 # encode third letter ('aux' -> 'au~78')
211 # encode third letter ('aux' -> 'au~78')
212 ec = "~%02x" % ord(n[2:3])
212 ec = "~%02x" % ord(n[2:3])
213 n = n[0:2] + ec + n[3:]
213 n = n[0:2] + ec + n[3:]
214 path[i] = n
214 path[i] = n
215 if n[-1] in '. ':
215 if n[-1] in '. ':
216 # encode last period or space ('foo...' -> 'foo..~2e')
216 # encode last period or space ('foo...' -> 'foo..~2e')
217 path[i] = n[:-1] + "~%02x" % ord(n[-1:])
217 path[i] = n[:-1] + "~%02x" % ord(n[-1:])
218 return path
218 return path
219
219
220 _maxstorepathlen = 120
220 _maxstorepathlen = 120
221 _dirprefixlen = 8
221 _dirprefixlen = 8
222 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
222 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
223
223
224 def _hashencode(path, dotencode):
224 def _hashencode(path, dotencode):
225 digest = node.hex(hashlib.sha1(path).digest())
225 digest = node.hex(hashlib.sha1(path).digest())
226 le = lowerencode(path[5:]).split('/') # skips prefix 'data/' or 'meta/'
226 le = lowerencode(path[5:]).split('/') # skips prefix 'data/' or 'meta/'
227 parts = _auxencode(le, dotencode)
227 parts = _auxencode(le, dotencode)
228 basename = parts[-1]
228 basename = parts[-1]
229 _root, ext = os.path.splitext(basename)
229 _root, ext = os.path.splitext(basename)
230 sdirs = []
230 sdirs = []
231 sdirslen = 0
231 sdirslen = 0
232 for p in parts[:-1]:
232 for p in parts[:-1]:
233 d = p[:_dirprefixlen]
233 d = p[:_dirprefixlen]
234 if d[-1] in '. ':
234 if d[-1] in '. ':
235 # Windows can't access dirs ending in period or space
235 # Windows can't access dirs ending in period or space
236 d = d[:-1] + '_'
236 d = d[:-1] + '_'
237 if sdirslen == 0:
237 if sdirslen == 0:
238 t = len(d)
238 t = len(d)
239 else:
239 else:
240 t = sdirslen + 1 + len(d)
240 t = sdirslen + 1 + len(d)
241 if t > _maxshortdirslen:
241 if t > _maxshortdirslen:
242 break
242 break
243 sdirs.append(d)
243 sdirs.append(d)
244 sdirslen = t
244 sdirslen = t
245 dirs = '/'.join(sdirs)
245 dirs = '/'.join(sdirs)
246 if len(dirs) > 0:
246 if len(dirs) > 0:
247 dirs += '/'
247 dirs += '/'
248 res = 'dh/' + dirs + digest + ext
248 res = 'dh/' + dirs + digest + ext
249 spaceleft = _maxstorepathlen - len(res)
249 spaceleft = _maxstorepathlen - len(res)
250 if spaceleft > 0:
250 if spaceleft > 0:
251 filler = basename[:spaceleft]
251 filler = basename[:spaceleft]
252 res = 'dh/' + dirs + filler + digest + ext
252 res = 'dh/' + dirs + filler + digest + ext
253 return res
253 return res
254
254
255 def _hybridencode(path, dotencode):
255 def _hybridencode(path, dotencode):
256 '''encodes path with a length limit
256 '''encodes path with a length limit
257
257
258 Encodes all paths that begin with 'data/', according to the following.
258 Encodes all paths that begin with 'data/', according to the following.
259
259
260 Default encoding (reversible):
260 Default encoding (reversible):
261
261
262 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
262 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
263 characters are encoded as '~xx', where xx is the two digit hex code
263 characters are encoded as '~xx', where xx is the two digit hex code
264 of the character (see encodefilename).
264 of the character (see encodefilename).
265 Relevant path components consisting of Windows reserved filenames are
265 Relevant path components consisting of Windows reserved filenames are
266 masked by encoding the third character ('aux' -> 'au~78', see _auxencode).
266 masked by encoding the third character ('aux' -> 'au~78', see _auxencode).
267
267
268 Hashed encoding (not reversible):
268 Hashed encoding (not reversible):
269
269
270 If the default-encoded path is longer than _maxstorepathlen, a
270 If the default-encoded path is longer than _maxstorepathlen, a
271 non-reversible hybrid hashing of the path is done instead.
271 non-reversible hybrid hashing of the path is done instead.
272 This encoding uses up to _dirprefixlen characters of all directory
272 This encoding uses up to _dirprefixlen characters of all directory
273 levels of the lowerencoded path, but not more levels than can fit into
273 levels of the lowerencoded path, but not more levels than can fit into
274 _maxshortdirslen.
274 _maxshortdirslen.
275 Then follows the filler followed by the sha digest of the full path.
275 Then follows the filler followed by the sha digest of the full path.
276 The filler is the beginning of the basename of the lowerencoded path
276 The filler is the beginning of the basename of the lowerencoded path
277 (the basename is everything after the last path separator). The filler
277 (the basename is everything after the last path separator). The filler
278 is as long as possible, filling in characters from the basename until
278 is as long as possible, filling in characters from the basename until
279 the encoded path has _maxstorepathlen characters (or all chars of the
279 the encoded path has _maxstorepathlen characters (or all chars of the
280 basename have been taken).
280 basename have been taken).
281 The extension (e.g. '.i' or '.d') is preserved.
281 The extension (e.g. '.i' or '.d') is preserved.
282
282
283 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
283 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
284 encoding was used.
284 encoding was used.
285 '''
285 '''
286 path = encodedir(path)
286 path = encodedir(path)
287 ef = _encodefname(path).split('/')
287 ef = _encodefname(path).split('/')
288 res = '/'.join(_auxencode(ef, dotencode))
288 res = '/'.join(_auxencode(ef, dotencode))
289 if len(res) > _maxstorepathlen:
289 if len(res) > _maxstorepathlen:
290 res = _hashencode(path, dotencode)
290 res = _hashencode(path, dotencode)
291 return res
291 return res
292
292
293 def _pathencode(path):
293 def _pathencode(path):
294 de = encodedir(path)
294 de = encodedir(path)
295 if len(path) > _maxstorepathlen:
295 if len(path) > _maxstorepathlen:
296 return _hashencode(de, True)
296 return _hashencode(de, True)
297 ef = _encodefname(de).split('/')
297 ef = _encodefname(de).split('/')
298 res = '/'.join(_auxencode(ef, True))
298 res = '/'.join(_auxencode(ef, True))
299 if len(res) > _maxstorepathlen:
299 if len(res) > _maxstorepathlen:
300 return _hashencode(de, True)
300 return _hashencode(de, True)
301 return res
301 return res
302
302
303 _pathencode = getattr(parsers, 'pathencode', _pathencode)
303 _pathencode = getattr(parsers, 'pathencode', _pathencode)
304
304
305 def _plainhybridencode(f):
305 def _plainhybridencode(f):
306 return _hybridencode(f, False)
306 return _hybridencode(f, False)
307
307
308 def _calcmode(vfs):
308 def _calcmode(vfs):
309 try:
309 try:
310 # files in .hg/ will be created using this mode
310 # files in .hg/ will be created using this mode
311 mode = vfs.stat().st_mode
311 mode = vfs.stat().st_mode
312 # avoid some useless chmods
312 # avoid some useless chmods
313 if (0o777 & ~util.umask) == (0o777 & mode):
313 if (0o777 & ~util.umask) == (0o777 & mode):
314 mode = None
314 mode = None
315 except OSError:
315 except OSError:
316 mode = None
316 mode = None
317 return mode
317 return mode
318
318
319 _data = ('data meta 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
319 _data = ('data meta 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
320 ' phaseroots obsstore')
320 ' phaseroots obsstore')
321
321
322 def isrevlog(f, kind, st):
322 def isrevlog(f, kind, st):
323 return kind == stat.S_IFREG and f[-2:] in ('.i', '.d')
323 return kind == stat.S_IFREG and f[-2:] in ('.i', '.d')
324
324
325 class basicstore(object):
325 class basicstore(object):
326 '''base class for local repository stores'''
326 '''base class for local repository stores'''
327 def __init__(self, path, vfstype):
327 def __init__(self, path, vfstype):
328 vfs = vfstype(path)
328 vfs = vfstype(path)
329 self.path = vfs.base
329 self.path = vfs.base
330 self.createmode = _calcmode(vfs)
330 self.createmode = _calcmode(vfs)
331 vfs.createmode = self.createmode
331 vfs.createmode = self.createmode
332 self.rawvfs = vfs
332 self.rawvfs = vfs
333 self.vfs = vfsmod.filtervfs(vfs, encodedir)
333 self.vfs = vfsmod.filtervfs(vfs, encodedir)
334 self.opener = self.vfs
334 self.opener = self.vfs
335
335
336 def join(self, f):
336 def join(self, f):
337 return self.path + '/' + encodedir(f)
337 return self.path + '/' + encodedir(f)
338
338
339 def _walk(self, relpath, recurse, filefilter=isrevlog):
339 def _walk(self, relpath, recurse, filefilter=isrevlog):
340 '''yields (unencoded, encoded, size)'''
340 '''yields (unencoded, encoded, size)'''
341 path = self.path
341 path = self.path
342 if relpath:
342 if relpath:
343 path += '/' + relpath
343 path += '/' + relpath
344 striplen = len(self.path) + 1
344 striplen = len(self.path) + 1
345 l = []
345 l = []
346 if self.rawvfs.isdir(path):
346 if self.rawvfs.isdir(path):
347 visit = [path]
347 visit = [path]
348 readdir = self.rawvfs.readdir
348 readdir = self.rawvfs.readdir
349 while visit:
349 while visit:
350 p = visit.pop()
350 p = visit.pop()
351 for f, kind, st in readdir(p, stat=True):
351 for f, kind, st in readdir(p, stat=True):
352 fp = p + '/' + f
352 fp = p + '/' + f
353 if filefilter(f, kind, st):
353 if filefilter(f, kind, st):
354 n = util.pconvert(fp[striplen:])
354 n = util.pconvert(fp[striplen:])
355 l.append((decodedir(n), n, st.st_size))
355 l.append((decodedir(n), n, st.st_size))
356 elif kind == stat.S_IFDIR and recurse:
356 elif kind == stat.S_IFDIR and recurse:
357 visit.append(fp)
357 visit.append(fp)
358 l.sort()
358 l.sort()
359 return l
359 return l
360
360
361 def datafiles(self):
361 def datafiles(self):
362 return self._walk('data', True) + self._walk('meta', True)
362 return self._walk('data', True) + self._walk('meta', True)
363
363
364 def topfiles(self):
364 def topfiles(self):
365 # yield manifest before changelog
365 # yield manifest before changelog
366 return reversed(self._walk('', False))
366 return reversed(self._walk('', False))
367
367
368 def walk(self):
368 def walk(self):
369 '''yields (unencoded, encoded, size)'''
369 '''yields (unencoded, encoded, size)'''
370 # yield data files first
370 # yield data files first
371 for x in self.datafiles():
371 for x in self.datafiles():
372 yield x
372 yield x
373 for x in self.topfiles():
373 for x in self.topfiles():
374 yield x
374 yield x
375
375
376 def copylist(self):
376 def copylist(self):
377 return ['requires'] + _data.split()
377 return ['requires'] + _data.split()
378
378
379 def write(self, tr):
379 def write(self, tr):
380 pass
380 pass
381
381
382 def invalidatecaches(self):
382 def invalidatecaches(self):
383 pass
383 pass
384
384
385 def markremoved(self, fn):
385 def markremoved(self, fn):
386 pass
386 pass
387
387
388 def __contains__(self, path):
388 def __contains__(self, path):
389 '''Checks if the store contains path'''
389 '''Checks if the store contains path'''
390 path = "/".join(("data", path))
390 path = "/".join(("data", path))
391 # file?
391 # file?
392 if self.vfs.exists(path + ".i"):
392 if self.vfs.exists(path + ".i"):
393 return True
393 return True
394 # dir?
394 # dir?
395 if not path.endswith("/"):
395 if not path.endswith("/"):
396 path = path + "/"
396 path = path + "/"
397 return self.vfs.exists(path)
397 return self.vfs.exists(path)
398
398
399 class encodedstore(basicstore):
399 class encodedstore(basicstore):
400 def __init__(self, path, vfstype):
400 def __init__(self, path, vfstype):
401 vfs = vfstype(path + '/store')
401 vfs = vfstype(path + '/store')
402 self.path = vfs.base
402 self.path = vfs.base
403 self.createmode = _calcmode(vfs)
403 self.createmode = _calcmode(vfs)
404 vfs.createmode = self.createmode
404 vfs.createmode = self.createmode
405 self.rawvfs = vfs
405 self.rawvfs = vfs
406 self.vfs = vfsmod.filtervfs(vfs, encodefilename)
406 self.vfs = vfsmod.filtervfs(vfs, encodefilename)
407 self.opener = self.vfs
407 self.opener = self.vfs
408
408
409 def datafiles(self):
409 def datafiles(self):
410 for a, b, size in super(encodedstore, self).datafiles():
410 for a, b, size in super(encodedstore, self).datafiles():
411 try:
411 try:
412 a = decodefilename(a)
412 a = decodefilename(a)
413 except KeyError:
413 except KeyError:
414 a = None
414 a = None
415 yield a, b, size
415 yield a, b, size
416
416
417 def join(self, f):
417 def join(self, f):
418 return self.path + '/' + encodefilename(f)
418 return self.path + '/' + encodefilename(f)
419
419
420 def copylist(self):
420 def copylist(self):
421 return (['requires', '00changelog.i'] +
421 return (['requires', '00changelog.i'] +
422 ['store/' + f for f in _data.split()])
422 ['store/' + f for f in _data.split()])
423
423
424 class fncache(object):
424 class fncache(object):
425 # the filename used to be partially encoded
425 # the filename used to be partially encoded
426 # hence the encodedir/decodedir dance
426 # hence the encodedir/decodedir dance
427 def __init__(self, vfs):
427 def __init__(self, vfs):
428 self.vfs = vfs
428 self.vfs = vfs
429 self.entries = None
429 self.entries = None
430 self._dirty = False
430 self._dirty = False
431
431
432 def _load(self):
432 def _load(self):
433 '''fill the entries from the fncache file'''
433 '''fill the entries from the fncache file'''
434 self._dirty = False
434 self._dirty = False
435 try:
435 try:
436 fp = self.vfs('fncache', mode='rb')
436 fp = self.vfs('fncache', mode='rb')
437 except IOError:
437 except IOError:
438 # skip nonexistent file
438 # skip nonexistent file
439 self.entries = set()
439 self.entries = set()
440 return
440 return
441 self.entries = set(decodedir(fp.read()).splitlines())
441 self.entries = set(decodedir(fp.read()).splitlines())
442 if '' in self.entries:
442 if '' in self.entries:
443 fp.seek(0)
443 fp.seek(0)
444 for n, line in enumerate(util.iterfile(fp)):
444 for n, line in enumerate(util.iterfile(fp)):
445 if not line.rstrip('\n'):
445 if not line.rstrip('\n'):
446 t = _('invalid entry in fncache, line %d') % (n + 1)
446 t = _('invalid entry in fncache, line %d') % (n + 1)
447 raise error.Abort(t)
447 raise error.Abort(t)
448 fp.close()
448 fp.close()
449
449
450 def write(self, tr):
450 def write(self, tr):
451 if self._dirty:
451 if self._dirty:
452 assert self.entries is not None
452 tr.addbackup('fncache')
453 tr.addbackup('fncache')
453 fp = self.vfs('fncache', mode='wb', atomictemp=True)
454 fp = self.vfs('fncache', mode='wb', atomictemp=True)
454 if self.entries:
455 if self.entries:
455 fp.write(encodedir('\n'.join(self.entries) + '\n'))
456 fp.write(encodedir('\n'.join(self.entries) + '\n'))
456 fp.close()
457 fp.close()
457 self._dirty = False
458 self._dirty = False
458
459
459 def add(self, fn):
460 def add(self, fn):
460 if self.entries is None:
461 if self.entries is None:
461 self._load()
462 self._load()
462 if fn not in self.entries:
463 if fn not in self.entries:
463 self._dirty = True
464 self._dirty = True
464 self.entries.add(fn)
465 self.entries.add(fn)
465
466
466 def remove(self, fn):
467 def remove(self, fn):
467 if self.entries is None:
468 if self.entries is None:
468 self._load()
469 self._load()
469 try:
470 try:
470 self.entries.remove(fn)
471 self.entries.remove(fn)
471 self._dirty = True
472 self._dirty = True
472 except KeyError:
473 except KeyError:
473 pass
474 pass
474
475
475 def __contains__(self, fn):
476 def __contains__(self, fn):
476 if self.entries is None:
477 if self.entries is None:
477 self._load()
478 self._load()
478 return fn in self.entries
479 return fn in self.entries
479
480
480 def __iter__(self):
481 def __iter__(self):
481 if self.entries is None:
482 if self.entries is None:
482 self._load()
483 self._load()
483 return iter(self.entries)
484 return iter(self.entries)
484
485
485 class _fncachevfs(vfsmod.abstractvfs, vfsmod.proxyvfs):
486 class _fncachevfs(vfsmod.abstractvfs, vfsmod.proxyvfs):
486 def __init__(self, vfs, fnc, encode):
487 def __init__(self, vfs, fnc, encode):
487 vfsmod.proxyvfs.__init__(self, vfs)
488 vfsmod.proxyvfs.__init__(self, vfs)
488 self.fncache = fnc
489 self.fncache = fnc
489 self.encode = encode
490 self.encode = encode
490
491
491 def __call__(self, path, mode='r', *args, **kw):
492 def __call__(self, path, mode='r', *args, **kw):
492 encoded = self.encode(path)
493 encoded = self.encode(path)
493 if mode not in ('r', 'rb') and (path.startswith('data/') or
494 if mode not in ('r', 'rb') and (path.startswith('data/') or
494 path.startswith('meta/')):
495 path.startswith('meta/')):
495 # do not trigger a fncache load when adding a file that already is
496 # do not trigger a fncache load when adding a file that already is
496 # known to exist.
497 # known to exist.
497 notload = self.fncache.entries is None and self.vfs.exists(encoded)
498 notload = self.fncache.entries is None and self.vfs.exists(encoded)
498 if notload and 'a' in mode and not self.vfs.stat(encoded).st_size:
499 if notload and 'a' in mode and not self.vfs.stat(encoded).st_size:
499 # when appending to an existing file, if the file has size zero,
500 # when appending to an existing file, if the file has size zero,
500 # it should be considered as missing. Such zero-size files are
501 # it should be considered as missing. Such zero-size files are
501 # the result of truncation when a transaction is aborted.
502 # the result of truncation when a transaction is aborted.
502 notload = False
503 notload = False
503 if not notload:
504 if not notload:
504 self.fncache.add(path)
505 self.fncache.add(path)
505 return self.vfs(encoded, mode, *args, **kw)
506 return self.vfs(encoded, mode, *args, **kw)
506
507
507 def join(self, path):
508 def join(self, path):
508 if path:
509 if path:
509 return self.vfs.join(self.encode(path))
510 return self.vfs.join(self.encode(path))
510 else:
511 else:
511 return self.vfs.join(path)
512 return self.vfs.join(path)
512
513
513 class fncachestore(basicstore):
514 class fncachestore(basicstore):
514 def __init__(self, path, vfstype, dotencode):
515 def __init__(self, path, vfstype, dotencode):
515 if dotencode:
516 if dotencode:
516 encode = _pathencode
517 encode = _pathencode
517 else:
518 else:
518 encode = _plainhybridencode
519 encode = _plainhybridencode
519 self.encode = encode
520 self.encode = encode
520 vfs = vfstype(path + '/store')
521 vfs = vfstype(path + '/store')
521 self.path = vfs.base
522 self.path = vfs.base
522 self.pathsep = self.path + '/'
523 self.pathsep = self.path + '/'
523 self.createmode = _calcmode(vfs)
524 self.createmode = _calcmode(vfs)
524 vfs.createmode = self.createmode
525 vfs.createmode = self.createmode
525 self.rawvfs = vfs
526 self.rawvfs = vfs
526 fnc = fncache(vfs)
527 fnc = fncache(vfs)
527 self.fncache = fnc
528 self.fncache = fnc
528 self.vfs = _fncachevfs(vfs, fnc, encode)
529 self.vfs = _fncachevfs(vfs, fnc, encode)
529 self.opener = self.vfs
530 self.opener = self.vfs
530
531
531 def join(self, f):
532 def join(self, f):
532 return self.pathsep + self.encode(f)
533 return self.pathsep + self.encode(f)
533
534
534 def getsize(self, path):
535 def getsize(self, path):
535 return self.rawvfs.stat(path).st_size
536 return self.rawvfs.stat(path).st_size
536
537
537 def datafiles(self):
538 def datafiles(self):
538 for f in sorted(self.fncache):
539 for f in sorted(self.fncache):
539 ef = self.encode(f)
540 ef = self.encode(f)
540 try:
541 try:
541 yield f, ef, self.getsize(ef)
542 yield f, ef, self.getsize(ef)
542 except OSError as err:
543 except OSError as err:
543 if err.errno != errno.ENOENT:
544 if err.errno != errno.ENOENT:
544 raise
545 raise
545
546
546 def copylist(self):
547 def copylist(self):
547 d = ('data meta dh fncache phaseroots obsstore'
548 d = ('data meta dh fncache phaseroots obsstore'
548 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
549 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
549 return (['requires', '00changelog.i'] +
550 return (['requires', '00changelog.i'] +
550 ['store/' + f for f in d.split()])
551 ['store/' + f for f in d.split()])
551
552
552 def write(self, tr):
553 def write(self, tr):
553 self.fncache.write(tr)
554 self.fncache.write(tr)
554
555
555 def invalidatecaches(self):
556 def invalidatecaches(self):
556 self.fncache.entries = None
557 self.fncache.entries = None
557
558
558 def markremoved(self, fn):
559 def markremoved(self, fn):
559 self.fncache.remove(fn)
560 self.fncache.remove(fn)
560
561
561 def _exists(self, f):
562 def _exists(self, f):
562 ef = self.encode(f)
563 ef = self.encode(f)
563 try:
564 try:
564 self.getsize(ef)
565 self.getsize(ef)
565 return True
566 return True
566 except OSError as err:
567 except OSError as err:
567 if err.errno != errno.ENOENT:
568 if err.errno != errno.ENOENT:
568 raise
569 raise
569 # nonexistent entry
570 # nonexistent entry
570 return False
571 return False
571
572
572 def __contains__(self, path):
573 def __contains__(self, path):
573 '''Checks if the store contains path'''
574 '''Checks if the store contains path'''
574 path = "/".join(("data", path))
575 path = "/".join(("data", path))
575 # check for files (exact match)
576 # check for files (exact match)
576 e = path + '.i'
577 e = path + '.i'
577 if e in self.fncache and self._exists(e):
578 if e in self.fncache and self._exists(e):
578 return True
579 return True
579 # now check for directories (prefix match)
580 # now check for directories (prefix match)
580 if not path.endswith('/'):
581 if not path.endswith('/'):
581 path += '/'
582 path += '/'
582 for e in self.fncache:
583 for e in self.fncache:
583 if e.startswith(path) and self._exists(e):
584 if e.startswith(path) and self._exists(e):
584 return True
585 return True
585 return False
586 return False
586
587
587 def store(requirements, path, vfstype):
588 def store(requirements, path, vfstype):
588 if 'store' in requirements:
589 if 'store' in requirements:
589 if 'fncache' in requirements:
590 if 'fncache' in requirements:
590 return fncachestore(path, vfstype, 'dotencode' in requirements)
591 return fncachestore(path, vfstype, 'dotencode' in requirements)
591 return encodedstore(path, vfstype)
592 return encodedstore(path, vfstype)
592 return basicstore(path, vfstype)
593 return basicstore(path, vfstype)
General Comments 0
You need to be logged in to leave comments. Login now