##// END OF EJS Templates
store: write fncache only once if there are both adds and removes...
Pulkit Goyal -
r40779:df8ed31a default
parent child Browse files
Show More
@@ -1,627 +1,629 b''
1 # store.py - repository store handling for Mercurial
1 # store.py - repository store handling for Mercurial
2 #
2 #
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
11 import hashlib
12 import os
12 import os
13 import stat
13 import stat
14
14
15 from .i18n import _
15 from .i18n import _
16 from . import (
16 from . import (
17 error,
17 error,
18 node,
18 node,
19 policy,
19 policy,
20 pycompat,
20 pycompat,
21 util,
21 util,
22 vfs as vfsmod,
22 vfs as vfsmod,
23 )
23 )
24
24
25 parsers = policy.importmod(r'parsers')
25 parsers = policy.importmod(r'parsers')
26
26
27 def _matchtrackedpath(path, matcher):
27 def _matchtrackedpath(path, matcher):
28 """parses a fncache entry and returns whether the entry is tracking a path
28 """parses a fncache entry and returns whether the entry is tracking a path
29 matched by matcher or not.
29 matched by matcher or not.
30
30
31 If matcher is None, returns True"""
31 If matcher is None, returns True"""
32
32
33 if matcher is None:
33 if matcher is None:
34 return True
34 return True
35 path = decodedir(path)
35 path = decodedir(path)
36 if path.startswith('data/'):
36 if path.startswith('data/'):
37 return matcher(path[len('data/'):-len('.i')])
37 return matcher(path[len('data/'):-len('.i')])
38 elif path.startswith('meta/'):
38 elif path.startswith('meta/'):
39 return matcher.visitdir(path[len('meta/'):-len('/00manifest.i')] or '.')
39 return matcher.visitdir(path[len('meta/'):-len('/00manifest.i')] or '.')
40
40
41 raise error.ProgrammingError("cannot decode path %s" % path)
41 raise error.ProgrammingError("cannot decode path %s" % path)
42
42
43 # This avoids a collision between a file named foo and a dir named
43 # This avoids a collision between a file named foo and a dir named
44 # foo.i or foo.d
44 # foo.i or foo.d
45 def _encodedir(path):
45 def _encodedir(path):
46 '''
46 '''
47 >>> _encodedir(b'data/foo.i')
47 >>> _encodedir(b'data/foo.i')
48 'data/foo.i'
48 'data/foo.i'
49 >>> _encodedir(b'data/foo.i/bla.i')
49 >>> _encodedir(b'data/foo.i/bla.i')
50 'data/foo.i.hg/bla.i'
50 'data/foo.i.hg/bla.i'
51 >>> _encodedir(b'data/foo.i.hg/bla.i')
51 >>> _encodedir(b'data/foo.i.hg/bla.i')
52 'data/foo.i.hg.hg/bla.i'
52 'data/foo.i.hg.hg/bla.i'
53 >>> _encodedir(b'data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
53 >>> _encodedir(b'data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
54 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
54 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
55 '''
55 '''
56 return (path
56 return (path
57 .replace(".hg/", ".hg.hg/")
57 .replace(".hg/", ".hg.hg/")
58 .replace(".i/", ".i.hg/")
58 .replace(".i/", ".i.hg/")
59 .replace(".d/", ".d.hg/"))
59 .replace(".d/", ".d.hg/"))
60
60
61 encodedir = getattr(parsers, 'encodedir', _encodedir)
61 encodedir = getattr(parsers, 'encodedir', _encodedir)
62
62
63 def decodedir(path):
63 def decodedir(path):
64 '''
64 '''
65 >>> decodedir(b'data/foo.i')
65 >>> decodedir(b'data/foo.i')
66 'data/foo.i'
66 'data/foo.i'
67 >>> decodedir(b'data/foo.i.hg/bla.i')
67 >>> decodedir(b'data/foo.i.hg/bla.i')
68 'data/foo.i/bla.i'
68 'data/foo.i/bla.i'
69 >>> decodedir(b'data/foo.i.hg.hg/bla.i')
69 >>> decodedir(b'data/foo.i.hg.hg/bla.i')
70 'data/foo.i.hg/bla.i'
70 'data/foo.i.hg/bla.i'
71 '''
71 '''
72 if ".hg/" not in path:
72 if ".hg/" not in path:
73 return path
73 return path
74 return (path
74 return (path
75 .replace(".d.hg/", ".d/")
75 .replace(".d.hg/", ".d/")
76 .replace(".i.hg/", ".i/")
76 .replace(".i.hg/", ".i/")
77 .replace(".hg.hg/", ".hg/"))
77 .replace(".hg.hg/", ".hg/"))
78
78
79 def _reserved():
79 def _reserved():
80 ''' characters that are problematic for filesystems
80 ''' characters that are problematic for filesystems
81
81
82 * ascii escapes (0..31)
82 * ascii escapes (0..31)
83 * ascii hi (126..255)
83 * ascii hi (126..255)
84 * windows specials
84 * windows specials
85
85
86 these characters will be escaped by encodefunctions
86 these characters will be escaped by encodefunctions
87 '''
87 '''
88 winreserved = [ord(x) for x in u'\\:*?"<>|']
88 winreserved = [ord(x) for x in u'\\:*?"<>|']
89 for x in range(32):
89 for x in range(32):
90 yield x
90 yield x
91 for x in range(126, 256):
91 for x in range(126, 256):
92 yield x
92 yield x
93 for x in winreserved:
93 for x in winreserved:
94 yield x
94 yield x
95
95
96 def _buildencodefun():
96 def _buildencodefun():
97 '''
97 '''
98 >>> enc, dec = _buildencodefun()
98 >>> enc, dec = _buildencodefun()
99
99
100 >>> enc(b'nothing/special.txt')
100 >>> enc(b'nothing/special.txt')
101 'nothing/special.txt'
101 'nothing/special.txt'
102 >>> dec(b'nothing/special.txt')
102 >>> dec(b'nothing/special.txt')
103 'nothing/special.txt'
103 'nothing/special.txt'
104
104
105 >>> enc(b'HELLO')
105 >>> enc(b'HELLO')
106 '_h_e_l_l_o'
106 '_h_e_l_l_o'
107 >>> dec(b'_h_e_l_l_o')
107 >>> dec(b'_h_e_l_l_o')
108 'HELLO'
108 'HELLO'
109
109
110 >>> enc(b'hello:world?')
110 >>> enc(b'hello:world?')
111 'hello~3aworld~3f'
111 'hello~3aworld~3f'
112 >>> dec(b'hello~3aworld~3f')
112 >>> dec(b'hello~3aworld~3f')
113 'hello:world?'
113 'hello:world?'
114
114
115 >>> enc(b'the\\x07quick\\xADshot')
115 >>> enc(b'the\\x07quick\\xADshot')
116 'the~07quick~adshot'
116 'the~07quick~adshot'
117 >>> dec(b'the~07quick~adshot')
117 >>> dec(b'the~07quick~adshot')
118 'the\\x07quick\\xadshot'
118 'the\\x07quick\\xadshot'
119 '''
119 '''
120 e = '_'
120 e = '_'
121 xchr = pycompat.bytechr
121 xchr = pycompat.bytechr
122 asciistr = list(map(xchr, range(127)))
122 asciistr = list(map(xchr, range(127)))
123 capitals = list(range(ord("A"), ord("Z") + 1))
123 capitals = list(range(ord("A"), ord("Z") + 1))
124
124
125 cmap = dict((x, x) for x in asciistr)
125 cmap = dict((x, x) for x in asciistr)
126 for x in _reserved():
126 for x in _reserved():
127 cmap[xchr(x)] = "~%02x" % x
127 cmap[xchr(x)] = "~%02x" % x
128 for x in capitals + [ord(e)]:
128 for x in capitals + [ord(e)]:
129 cmap[xchr(x)] = e + xchr(x).lower()
129 cmap[xchr(x)] = e + xchr(x).lower()
130
130
131 dmap = {}
131 dmap = {}
132 for k, v in cmap.iteritems():
132 for k, v in cmap.iteritems():
133 dmap[v] = k
133 dmap[v] = k
134 def decode(s):
134 def decode(s):
135 i = 0
135 i = 0
136 while i < len(s):
136 while i < len(s):
137 for l in pycompat.xrange(1, 4):
137 for l in pycompat.xrange(1, 4):
138 try:
138 try:
139 yield dmap[s[i:i + l]]
139 yield dmap[s[i:i + l]]
140 i += l
140 i += l
141 break
141 break
142 except KeyError:
142 except KeyError:
143 pass
143 pass
144 else:
144 else:
145 raise KeyError
145 raise KeyError
146 return (lambda s: ''.join([cmap[s[c:c + 1]]
146 return (lambda s: ''.join([cmap[s[c:c + 1]]
147 for c in pycompat.xrange(len(s))]),
147 for c in pycompat.xrange(len(s))]),
148 lambda s: ''.join(list(decode(s))))
148 lambda s: ''.join(list(decode(s))))
149
149
150 _encodefname, _decodefname = _buildencodefun()
150 _encodefname, _decodefname = _buildencodefun()
151
151
152 def encodefilename(s):
152 def encodefilename(s):
153 '''
153 '''
154 >>> encodefilename(b'foo.i/bar.d/bla.hg/hi:world?/HELLO')
154 >>> encodefilename(b'foo.i/bar.d/bla.hg/hi:world?/HELLO')
155 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
155 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
156 '''
156 '''
157 return _encodefname(encodedir(s))
157 return _encodefname(encodedir(s))
158
158
159 def decodefilename(s):
159 def decodefilename(s):
160 '''
160 '''
161 >>> decodefilename(b'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
161 >>> decodefilename(b'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
162 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
162 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
163 '''
163 '''
164 return decodedir(_decodefname(s))
164 return decodedir(_decodefname(s))
165
165
166 def _buildlowerencodefun():
166 def _buildlowerencodefun():
167 '''
167 '''
168 >>> f = _buildlowerencodefun()
168 >>> f = _buildlowerencodefun()
169 >>> f(b'nothing/special.txt')
169 >>> f(b'nothing/special.txt')
170 'nothing/special.txt'
170 'nothing/special.txt'
171 >>> f(b'HELLO')
171 >>> f(b'HELLO')
172 'hello'
172 'hello'
173 >>> f(b'hello:world?')
173 >>> f(b'hello:world?')
174 'hello~3aworld~3f'
174 'hello~3aworld~3f'
175 >>> f(b'the\\x07quick\\xADshot')
175 >>> f(b'the\\x07quick\\xADshot')
176 'the~07quick~adshot'
176 'the~07quick~adshot'
177 '''
177 '''
178 xchr = pycompat.bytechr
178 xchr = pycompat.bytechr
179 cmap = dict([(xchr(x), xchr(x)) for x in pycompat.xrange(127)])
179 cmap = dict([(xchr(x), xchr(x)) for x in pycompat.xrange(127)])
180 for x in _reserved():
180 for x in _reserved():
181 cmap[xchr(x)] = "~%02x" % x
181 cmap[xchr(x)] = "~%02x" % x
182 for x in range(ord("A"), ord("Z") + 1):
182 for x in range(ord("A"), ord("Z") + 1):
183 cmap[xchr(x)] = xchr(x).lower()
183 cmap[xchr(x)] = xchr(x).lower()
184 def lowerencode(s):
184 def lowerencode(s):
185 return "".join([cmap[c] for c in pycompat.iterbytestr(s)])
185 return "".join([cmap[c] for c in pycompat.iterbytestr(s)])
186 return lowerencode
186 return lowerencode
187
187
188 lowerencode = getattr(parsers, 'lowerencode', None) or _buildlowerencodefun()
188 lowerencode = getattr(parsers, 'lowerencode', None) or _buildlowerencodefun()
189
189
190 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
190 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
191 _winres3 = ('aux', 'con', 'prn', 'nul') # length 3
191 _winres3 = ('aux', 'con', 'prn', 'nul') # length 3
192 _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9)
192 _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9)
193 def _auxencode(path, dotencode):
193 def _auxencode(path, dotencode):
194 '''
194 '''
195 Encodes filenames containing names reserved by Windows or which end in
195 Encodes filenames containing names reserved by Windows or which end in
196 period or space. Does not touch other single reserved characters c.
196 period or space. Does not touch other single reserved characters c.
197 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
197 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
198 Additionally encodes space or period at the beginning, if dotencode is
198 Additionally encodes space or period at the beginning, if dotencode is
199 True. Parameter path is assumed to be all lowercase.
199 True. Parameter path is assumed to be all lowercase.
200 A segment only needs encoding if a reserved name appears as a
200 A segment only needs encoding if a reserved name appears as a
201 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
201 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
202 doesn't need encoding.
202 doesn't need encoding.
203
203
204 >>> s = b'.foo/aux.txt/txt.aux/con/prn/nul/foo.'
204 >>> s = b'.foo/aux.txt/txt.aux/con/prn/nul/foo.'
205 >>> _auxencode(s.split(b'/'), True)
205 >>> _auxencode(s.split(b'/'), True)
206 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
206 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
207 >>> s = b'.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
207 >>> s = b'.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
208 >>> _auxencode(s.split(b'/'), False)
208 >>> _auxencode(s.split(b'/'), False)
209 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
209 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
210 >>> _auxencode([b'foo. '], True)
210 >>> _auxencode([b'foo. '], True)
211 ['foo.~20']
211 ['foo.~20']
212 >>> _auxencode([b' .foo'], True)
212 >>> _auxencode([b' .foo'], True)
213 ['~20.foo']
213 ['~20.foo']
214 '''
214 '''
215 for i, n in enumerate(path):
215 for i, n in enumerate(path):
216 if not n:
216 if not n:
217 continue
217 continue
218 if dotencode and n[0] in '. ':
218 if dotencode and n[0] in '. ':
219 n = "~%02x" % ord(n[0:1]) + n[1:]
219 n = "~%02x" % ord(n[0:1]) + n[1:]
220 path[i] = n
220 path[i] = n
221 else:
221 else:
222 l = n.find('.')
222 l = n.find('.')
223 if l == -1:
223 if l == -1:
224 l = len(n)
224 l = len(n)
225 if ((l == 3 and n[:3] in _winres3) or
225 if ((l == 3 and n[:3] in _winres3) or
226 (l == 4 and n[3:4] <= '9' and n[3:4] >= '1'
226 (l == 4 and n[3:4] <= '9' and n[3:4] >= '1'
227 and n[:3] in _winres4)):
227 and n[:3] in _winres4)):
228 # encode third letter ('aux' -> 'au~78')
228 # encode third letter ('aux' -> 'au~78')
229 ec = "~%02x" % ord(n[2:3])
229 ec = "~%02x" % ord(n[2:3])
230 n = n[0:2] + ec + n[3:]
230 n = n[0:2] + ec + n[3:]
231 path[i] = n
231 path[i] = n
232 if n[-1] in '. ':
232 if n[-1] in '. ':
233 # encode last period or space ('foo...' -> 'foo..~2e')
233 # encode last period or space ('foo...' -> 'foo..~2e')
234 path[i] = n[:-1] + "~%02x" % ord(n[-1:])
234 path[i] = n[:-1] + "~%02x" % ord(n[-1:])
235 return path
235 return path
236
236
237 _maxstorepathlen = 120
237 _maxstorepathlen = 120
238 _dirprefixlen = 8
238 _dirprefixlen = 8
239 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
239 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
240
240
241 def _hashencode(path, dotencode):
241 def _hashencode(path, dotencode):
242 digest = node.hex(hashlib.sha1(path).digest())
242 digest = node.hex(hashlib.sha1(path).digest())
243 le = lowerencode(path[5:]).split('/') # skips prefix 'data/' or 'meta/'
243 le = lowerencode(path[5:]).split('/') # skips prefix 'data/' or 'meta/'
244 parts = _auxencode(le, dotencode)
244 parts = _auxencode(le, dotencode)
245 basename = parts[-1]
245 basename = parts[-1]
246 _root, ext = os.path.splitext(basename)
246 _root, ext = os.path.splitext(basename)
247 sdirs = []
247 sdirs = []
248 sdirslen = 0
248 sdirslen = 0
249 for p in parts[:-1]:
249 for p in parts[:-1]:
250 d = p[:_dirprefixlen]
250 d = p[:_dirprefixlen]
251 if d[-1] in '. ':
251 if d[-1] in '. ':
252 # Windows can't access dirs ending in period or space
252 # Windows can't access dirs ending in period or space
253 d = d[:-1] + '_'
253 d = d[:-1] + '_'
254 if sdirslen == 0:
254 if sdirslen == 0:
255 t = len(d)
255 t = len(d)
256 else:
256 else:
257 t = sdirslen + 1 + len(d)
257 t = sdirslen + 1 + len(d)
258 if t > _maxshortdirslen:
258 if t > _maxshortdirslen:
259 break
259 break
260 sdirs.append(d)
260 sdirs.append(d)
261 sdirslen = t
261 sdirslen = t
262 dirs = '/'.join(sdirs)
262 dirs = '/'.join(sdirs)
263 if len(dirs) > 0:
263 if len(dirs) > 0:
264 dirs += '/'
264 dirs += '/'
265 res = 'dh/' + dirs + digest + ext
265 res = 'dh/' + dirs + digest + ext
266 spaceleft = _maxstorepathlen - len(res)
266 spaceleft = _maxstorepathlen - len(res)
267 if spaceleft > 0:
267 if spaceleft > 0:
268 filler = basename[:spaceleft]
268 filler = basename[:spaceleft]
269 res = 'dh/' + dirs + filler + digest + ext
269 res = 'dh/' + dirs + filler + digest + ext
270 return res
270 return res
271
271
272 def _hybridencode(path, dotencode):
272 def _hybridencode(path, dotencode):
273 '''encodes path with a length limit
273 '''encodes path with a length limit
274
274
275 Encodes all paths that begin with 'data/', according to the following.
275 Encodes all paths that begin with 'data/', according to the following.
276
276
277 Default encoding (reversible):
277 Default encoding (reversible):
278
278
279 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
279 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
280 characters are encoded as '~xx', where xx is the two digit hex code
280 characters are encoded as '~xx', where xx is the two digit hex code
281 of the character (see encodefilename).
281 of the character (see encodefilename).
282 Relevant path components consisting of Windows reserved filenames are
282 Relevant path components consisting of Windows reserved filenames are
283 masked by encoding the third character ('aux' -> 'au~78', see _auxencode).
283 masked by encoding the third character ('aux' -> 'au~78', see _auxencode).
284
284
285 Hashed encoding (not reversible):
285 Hashed encoding (not reversible):
286
286
287 If the default-encoded path is longer than _maxstorepathlen, a
287 If the default-encoded path is longer than _maxstorepathlen, a
288 non-reversible hybrid hashing of the path is done instead.
288 non-reversible hybrid hashing of the path is done instead.
289 This encoding uses up to _dirprefixlen characters of all directory
289 This encoding uses up to _dirprefixlen characters of all directory
290 levels of the lowerencoded path, but not more levels than can fit into
290 levels of the lowerencoded path, but not more levels than can fit into
291 _maxshortdirslen.
291 _maxshortdirslen.
292 Then follows the filler followed by the sha digest of the full path.
292 Then follows the filler followed by the sha digest of the full path.
293 The filler is the beginning of the basename of the lowerencoded path
293 The filler is the beginning of the basename of the lowerencoded path
294 (the basename is everything after the last path separator). The filler
294 (the basename is everything after the last path separator). The filler
295 is as long as possible, filling in characters from the basename until
295 is as long as possible, filling in characters from the basename until
296 the encoded path has _maxstorepathlen characters (or all chars of the
296 the encoded path has _maxstorepathlen characters (or all chars of the
297 basename have been taken).
297 basename have been taken).
298 The extension (e.g. '.i' or '.d') is preserved.
298 The extension (e.g. '.i' or '.d') is preserved.
299
299
300 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
300 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
301 encoding was used.
301 encoding was used.
302 '''
302 '''
303 path = encodedir(path)
303 path = encodedir(path)
304 ef = _encodefname(path).split('/')
304 ef = _encodefname(path).split('/')
305 res = '/'.join(_auxencode(ef, dotencode))
305 res = '/'.join(_auxencode(ef, dotencode))
306 if len(res) > _maxstorepathlen:
306 if len(res) > _maxstorepathlen:
307 res = _hashencode(path, dotencode)
307 res = _hashencode(path, dotencode)
308 return res
308 return res
309
309
310 def _pathencode(path):
310 def _pathencode(path):
311 de = encodedir(path)
311 de = encodedir(path)
312 if len(path) > _maxstorepathlen:
312 if len(path) > _maxstorepathlen:
313 return _hashencode(de, True)
313 return _hashencode(de, True)
314 ef = _encodefname(de).split('/')
314 ef = _encodefname(de).split('/')
315 res = '/'.join(_auxencode(ef, True))
315 res = '/'.join(_auxencode(ef, True))
316 if len(res) > _maxstorepathlen:
316 if len(res) > _maxstorepathlen:
317 return _hashencode(de, True)
317 return _hashencode(de, True)
318 return res
318 return res
319
319
320 _pathencode = getattr(parsers, 'pathencode', _pathencode)
320 _pathencode = getattr(parsers, 'pathencode', _pathencode)
321
321
322 def _plainhybridencode(f):
322 def _plainhybridencode(f):
323 return _hybridencode(f, False)
323 return _hybridencode(f, False)
324
324
325 def _calcmode(vfs):
325 def _calcmode(vfs):
326 try:
326 try:
327 # files in .hg/ will be created using this mode
327 # files in .hg/ will be created using this mode
328 mode = vfs.stat().st_mode
328 mode = vfs.stat().st_mode
329 # avoid some useless chmods
329 # avoid some useless chmods
330 if (0o777 & ~util.umask) == (0o777 & mode):
330 if (0o777 & ~util.umask) == (0o777 & mode):
331 mode = None
331 mode = None
332 except OSError:
332 except OSError:
333 mode = None
333 mode = None
334 return mode
334 return mode
335
335
336 _data = ('narrowspec data meta 00manifest.d 00manifest.i'
336 _data = ('narrowspec data meta 00manifest.d 00manifest.i'
337 ' 00changelog.d 00changelog.i phaseroots obsstore')
337 ' 00changelog.d 00changelog.i phaseroots obsstore')
338
338
339 def isrevlog(f, kind, st):
339 def isrevlog(f, kind, st):
340 return kind == stat.S_IFREG and f[-2:] in ('.i', '.d')
340 return kind == stat.S_IFREG and f[-2:] in ('.i', '.d')
341
341
342 class basicstore(object):
342 class basicstore(object):
343 '''base class for local repository stores'''
343 '''base class for local repository stores'''
344 def __init__(self, path, vfstype):
344 def __init__(self, path, vfstype):
345 vfs = vfstype(path)
345 vfs = vfstype(path)
346 self.path = vfs.base
346 self.path = vfs.base
347 self.createmode = _calcmode(vfs)
347 self.createmode = _calcmode(vfs)
348 vfs.createmode = self.createmode
348 vfs.createmode = self.createmode
349 self.rawvfs = vfs
349 self.rawvfs = vfs
350 self.vfs = vfsmod.filtervfs(vfs, encodedir)
350 self.vfs = vfsmod.filtervfs(vfs, encodedir)
351 self.opener = self.vfs
351 self.opener = self.vfs
352
352
353 def join(self, f):
353 def join(self, f):
354 return self.path + '/' + encodedir(f)
354 return self.path + '/' + encodedir(f)
355
355
356 def _walk(self, relpath, recurse, filefilter=isrevlog):
356 def _walk(self, relpath, recurse, filefilter=isrevlog):
357 '''yields (unencoded, encoded, size)'''
357 '''yields (unencoded, encoded, size)'''
358 path = self.path
358 path = self.path
359 if relpath:
359 if relpath:
360 path += '/' + relpath
360 path += '/' + relpath
361 striplen = len(self.path) + 1
361 striplen = len(self.path) + 1
362 l = []
362 l = []
363 if self.rawvfs.isdir(path):
363 if self.rawvfs.isdir(path):
364 visit = [path]
364 visit = [path]
365 readdir = self.rawvfs.readdir
365 readdir = self.rawvfs.readdir
366 while visit:
366 while visit:
367 p = visit.pop()
367 p = visit.pop()
368 for f, kind, st in readdir(p, stat=True):
368 for f, kind, st in readdir(p, stat=True):
369 fp = p + '/' + f
369 fp = p + '/' + f
370 if filefilter(f, kind, st):
370 if filefilter(f, kind, st):
371 n = util.pconvert(fp[striplen:])
371 n = util.pconvert(fp[striplen:])
372 l.append((decodedir(n), n, st.st_size))
372 l.append((decodedir(n), n, st.st_size))
373 elif kind == stat.S_IFDIR and recurse:
373 elif kind == stat.S_IFDIR and recurse:
374 visit.append(fp)
374 visit.append(fp)
375 l.sort()
375 l.sort()
376 return l
376 return l
377
377
378 def datafiles(self, matcher=None):
378 def datafiles(self, matcher=None):
379 return self._walk('data', True) + self._walk('meta', True)
379 return self._walk('data', True) + self._walk('meta', True)
380
380
381 def topfiles(self):
381 def topfiles(self):
382 # yield manifest before changelog
382 # yield manifest before changelog
383 return reversed(self._walk('', False))
383 return reversed(self._walk('', False))
384
384
385 def walk(self, matcher=None):
385 def walk(self, matcher=None):
386 '''yields (unencoded, encoded, size)
386 '''yields (unencoded, encoded, size)
387
387
388 if a matcher is passed, storage files of only those tracked paths
388 if a matcher is passed, storage files of only those tracked paths
389 are passed with matches the matcher
389 are passed with matches the matcher
390 '''
390 '''
391 # yield data files first
391 # yield data files first
392 for x in self.datafiles(matcher):
392 for x in self.datafiles(matcher):
393 yield x
393 yield x
394 for x in self.topfiles():
394 for x in self.topfiles():
395 yield x
395 yield x
396
396
397 def copylist(self):
397 def copylist(self):
398 return ['requires'] + _data.split()
398 return ['requires'] + _data.split()
399
399
400 def write(self, tr):
400 def write(self, tr):
401 pass
401 pass
402
402
403 def invalidatecaches(self):
403 def invalidatecaches(self):
404 pass
404 pass
405
405
406 def markremoved(self, fn):
406 def markremoved(self, fn):
407 pass
407 pass
408
408
409 def __contains__(self, path):
409 def __contains__(self, path):
410 '''Checks if the store contains path'''
410 '''Checks if the store contains path'''
411 path = "/".join(("data", path))
411 path = "/".join(("data", path))
412 # file?
412 # file?
413 if self.vfs.exists(path + ".i"):
413 if self.vfs.exists(path + ".i"):
414 return True
414 return True
415 # dir?
415 # dir?
416 if not path.endswith("/"):
416 if not path.endswith("/"):
417 path = path + "/"
417 path = path + "/"
418 return self.vfs.exists(path)
418 return self.vfs.exists(path)
419
419
420 class encodedstore(basicstore):
420 class encodedstore(basicstore):
421 def __init__(self, path, vfstype):
421 def __init__(self, path, vfstype):
422 vfs = vfstype(path + '/store')
422 vfs = vfstype(path + '/store')
423 self.path = vfs.base
423 self.path = vfs.base
424 self.createmode = _calcmode(vfs)
424 self.createmode = _calcmode(vfs)
425 vfs.createmode = self.createmode
425 vfs.createmode = self.createmode
426 self.rawvfs = vfs
426 self.rawvfs = vfs
427 self.vfs = vfsmod.filtervfs(vfs, encodefilename)
427 self.vfs = vfsmod.filtervfs(vfs, encodefilename)
428 self.opener = self.vfs
428 self.opener = self.vfs
429
429
430 def datafiles(self, matcher=None):
430 def datafiles(self, matcher=None):
431 for a, b, size in super(encodedstore, self).datafiles():
431 for a, b, size in super(encodedstore, self).datafiles():
432 try:
432 try:
433 a = decodefilename(a)
433 a = decodefilename(a)
434 except KeyError:
434 except KeyError:
435 a = None
435 a = None
436 if a is not None and not _matchtrackedpath(a, matcher):
436 if a is not None and not _matchtrackedpath(a, matcher):
437 continue
437 continue
438 yield a, b, size
438 yield a, b, size
439
439
440 def join(self, f):
440 def join(self, f):
441 return self.path + '/' + encodefilename(f)
441 return self.path + '/' + encodefilename(f)
442
442
443 def copylist(self):
443 def copylist(self):
444 return (['requires', '00changelog.i'] +
444 return (['requires', '00changelog.i'] +
445 ['store/' + f for f in _data.split()])
445 ['store/' + f for f in _data.split()])
446
446
447 class fncache(object):
447 class fncache(object):
448 # the filename used to be partially encoded
448 # the filename used to be partially encoded
449 # hence the encodedir/decodedir dance
449 # hence the encodedir/decodedir dance
450 def __init__(self, vfs):
450 def __init__(self, vfs):
451 self.vfs = vfs
451 self.vfs = vfs
452 self.entries = None
452 self.entries = None
453 self._dirty = False
453 self._dirty = False
454 # set of new additions to fncache
454 # set of new additions to fncache
455 self.addls = set()
455 self.addls = set()
456
456
457 def _load(self):
457 def _load(self):
458 '''fill the entries from the fncache file'''
458 '''fill the entries from the fncache file'''
459 self._dirty = False
459 self._dirty = False
460 try:
460 try:
461 fp = self.vfs('fncache', mode='rb')
461 fp = self.vfs('fncache', mode='rb')
462 except IOError:
462 except IOError:
463 # skip nonexistent file
463 # skip nonexistent file
464 self.entries = set()
464 self.entries = set()
465 return
465 return
466 self.entries = set(decodedir(fp.read()).splitlines())
466 self.entries = set(decodedir(fp.read()).splitlines())
467 if '' in self.entries:
467 if '' in self.entries:
468 fp.seek(0)
468 fp.seek(0)
469 for n, line in enumerate(util.iterfile(fp)):
469 for n, line in enumerate(util.iterfile(fp)):
470 if not line.rstrip('\n'):
470 if not line.rstrip('\n'):
471 t = _('invalid entry in fncache, line %d') % (n + 1)
471 t = _('invalid entry in fncache, line %d') % (n + 1)
472 raise error.Abort(t)
472 raise error.Abort(t)
473 fp.close()
473 fp.close()
474
474
475 def write(self, tr):
475 def write(self, tr):
476 if self._dirty:
476 if self._dirty:
477 assert self.entries is not None
477 assert self.entries is not None
478 self.entries = self.entries | self.addls
479 self.addls = set()
478 tr.addbackup('fncache')
480 tr.addbackup('fncache')
479 fp = self.vfs('fncache', mode='wb', atomictemp=True)
481 fp = self.vfs('fncache', mode='wb', atomictemp=True)
480 if self.entries:
482 if self.entries:
481 fp.write(encodedir('\n'.join(self.entries) + '\n'))
483 fp.write(encodedir('\n'.join(self.entries) + '\n'))
482 fp.close()
484 fp.close()
483 self._dirty = False
485 self._dirty = False
484 if self.addls:
486 if self.addls:
485 # if we have just new entries, let's append them to the fncache
487 # if we have just new entries, let's append them to the fncache
486 tr.addbackup('fncache')
488 tr.addbackup('fncache')
487 fp = self.vfs('fncache', mode='ab', atomictemp=True)
489 fp = self.vfs('fncache', mode='ab', atomictemp=True)
488 if self.addls:
490 if self.addls:
489 fp.write(encodedir('\n'.join(self.addls) + '\n'))
491 fp.write(encodedir('\n'.join(self.addls) + '\n'))
490 fp.close()
492 fp.close()
491 self.entries = None
493 self.entries = None
492 self.addls = set()
494 self.addls = set()
493
495
494 def add(self, fn):
496 def add(self, fn):
495 if self.entries is None:
497 if self.entries is None:
496 self._load()
498 self._load()
497 if fn not in self.entries:
499 if fn not in self.entries:
498 self.addls.add(fn)
500 self.addls.add(fn)
499
501
500 def remove(self, fn):
502 def remove(self, fn):
501 if self.entries is None:
503 if self.entries is None:
502 self._load()
504 self._load()
503 if fn in self.addls:
505 if fn in self.addls:
504 self.addls.remove(fn)
506 self.addls.remove(fn)
505 return
507 return
506 try:
508 try:
507 self.entries.remove(fn)
509 self.entries.remove(fn)
508 self._dirty = True
510 self._dirty = True
509 except KeyError:
511 except KeyError:
510 pass
512 pass
511
513
512 def __contains__(self, fn):
514 def __contains__(self, fn):
513 if fn in self.addls:
515 if fn in self.addls:
514 return True
516 return True
515 if self.entries is None:
517 if self.entries is None:
516 self._load()
518 self._load()
517 return fn in self.entries
519 return fn in self.entries
518
520
519 def __iter__(self):
521 def __iter__(self):
520 if self.entries is None:
522 if self.entries is None:
521 self._load()
523 self._load()
522 return iter(self.entries | self.addls)
524 return iter(self.entries | self.addls)
523
525
524 class _fncachevfs(vfsmod.abstractvfs, vfsmod.proxyvfs):
526 class _fncachevfs(vfsmod.abstractvfs, vfsmod.proxyvfs):
525 def __init__(self, vfs, fnc, encode):
527 def __init__(self, vfs, fnc, encode):
526 vfsmod.proxyvfs.__init__(self, vfs)
528 vfsmod.proxyvfs.__init__(self, vfs)
527 self.fncache = fnc
529 self.fncache = fnc
528 self.encode = encode
530 self.encode = encode
529
531
530 def __call__(self, path, mode='r', *args, **kw):
532 def __call__(self, path, mode='r', *args, **kw):
531 encoded = self.encode(path)
533 encoded = self.encode(path)
532 if mode not in ('r', 'rb') and (path.startswith('data/') or
534 if mode not in ('r', 'rb') and (path.startswith('data/') or
533 path.startswith('meta/')):
535 path.startswith('meta/')):
534 # do not trigger a fncache load when adding a file that already is
536 # do not trigger a fncache load when adding a file that already is
535 # known to exist.
537 # known to exist.
536 notload = self.fncache.entries is None and self.vfs.exists(encoded)
538 notload = self.fncache.entries is None and self.vfs.exists(encoded)
537 if notload and 'a' in mode and not self.vfs.stat(encoded).st_size:
539 if notload and 'a' in mode and not self.vfs.stat(encoded).st_size:
538 # when appending to an existing file, if the file has size zero,
540 # when appending to an existing file, if the file has size zero,
539 # it should be considered as missing. Such zero-size files are
541 # it should be considered as missing. Such zero-size files are
540 # the result of truncation when a transaction is aborted.
542 # the result of truncation when a transaction is aborted.
541 notload = False
543 notload = False
542 if not notload:
544 if not notload:
543 self.fncache.add(path)
545 self.fncache.add(path)
544 return self.vfs(encoded, mode, *args, **kw)
546 return self.vfs(encoded, mode, *args, **kw)
545
547
546 def join(self, path):
548 def join(self, path):
547 if path:
549 if path:
548 return self.vfs.join(self.encode(path))
550 return self.vfs.join(self.encode(path))
549 else:
551 else:
550 return self.vfs.join(path)
552 return self.vfs.join(path)
551
553
552 class fncachestore(basicstore):
554 class fncachestore(basicstore):
553 def __init__(self, path, vfstype, dotencode):
555 def __init__(self, path, vfstype, dotencode):
554 if dotencode:
556 if dotencode:
555 encode = _pathencode
557 encode = _pathencode
556 else:
558 else:
557 encode = _plainhybridencode
559 encode = _plainhybridencode
558 self.encode = encode
560 self.encode = encode
559 vfs = vfstype(path + '/store')
561 vfs = vfstype(path + '/store')
560 self.path = vfs.base
562 self.path = vfs.base
561 self.pathsep = self.path + '/'
563 self.pathsep = self.path + '/'
562 self.createmode = _calcmode(vfs)
564 self.createmode = _calcmode(vfs)
563 vfs.createmode = self.createmode
565 vfs.createmode = self.createmode
564 self.rawvfs = vfs
566 self.rawvfs = vfs
565 fnc = fncache(vfs)
567 fnc = fncache(vfs)
566 self.fncache = fnc
568 self.fncache = fnc
567 self.vfs = _fncachevfs(vfs, fnc, encode)
569 self.vfs = _fncachevfs(vfs, fnc, encode)
568 self.opener = self.vfs
570 self.opener = self.vfs
569
571
570 def join(self, f):
572 def join(self, f):
571 return self.pathsep + self.encode(f)
573 return self.pathsep + self.encode(f)
572
574
573 def getsize(self, path):
575 def getsize(self, path):
574 return self.rawvfs.stat(path).st_size
576 return self.rawvfs.stat(path).st_size
575
577
576 def datafiles(self, matcher=None):
578 def datafiles(self, matcher=None):
577 for f in sorted(self.fncache):
579 for f in sorted(self.fncache):
578 if not _matchtrackedpath(f, matcher):
580 if not _matchtrackedpath(f, matcher):
579 continue
581 continue
580 ef = self.encode(f)
582 ef = self.encode(f)
581 try:
583 try:
582 yield f, ef, self.getsize(ef)
584 yield f, ef, self.getsize(ef)
583 except OSError as err:
585 except OSError as err:
584 if err.errno != errno.ENOENT:
586 if err.errno != errno.ENOENT:
585 raise
587 raise
586
588
587 def copylist(self):
589 def copylist(self):
588 d = ('narrowspec data meta dh fncache phaseroots obsstore'
590 d = ('narrowspec data meta dh fncache phaseroots obsstore'
589 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
591 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
590 return (['requires', '00changelog.i'] +
592 return (['requires', '00changelog.i'] +
591 ['store/' + f for f in d.split()])
593 ['store/' + f for f in d.split()])
592
594
593 def write(self, tr):
595 def write(self, tr):
594 self.fncache.write(tr)
596 self.fncache.write(tr)
595
597
596 def invalidatecaches(self):
598 def invalidatecaches(self):
597 self.fncache.entries = None
599 self.fncache.entries = None
598 self.fncache.addls = set()
600 self.fncache.addls = set()
599
601
600 def markremoved(self, fn):
602 def markremoved(self, fn):
601 self.fncache.remove(fn)
603 self.fncache.remove(fn)
602
604
603 def _exists(self, f):
605 def _exists(self, f):
604 ef = self.encode(f)
606 ef = self.encode(f)
605 try:
607 try:
606 self.getsize(ef)
608 self.getsize(ef)
607 return True
609 return True
608 except OSError as err:
610 except OSError as err:
609 if err.errno != errno.ENOENT:
611 if err.errno != errno.ENOENT:
610 raise
612 raise
611 # nonexistent entry
613 # nonexistent entry
612 return False
614 return False
613
615
614 def __contains__(self, path):
616 def __contains__(self, path):
615 '''Checks if the store contains path'''
617 '''Checks if the store contains path'''
616 path = "/".join(("data", path))
618 path = "/".join(("data", path))
617 # check for files (exact match)
619 # check for files (exact match)
618 e = path + '.i'
620 e = path + '.i'
619 if e in self.fncache and self._exists(e):
621 if e in self.fncache and self._exists(e):
620 return True
622 return True
621 # now check for directories (prefix match)
623 # now check for directories (prefix match)
622 if not path.endswith('/'):
624 if not path.endswith('/'):
623 path += '/'
625 path += '/'
624 for e in self.fncache:
626 for e in self.fncache:
625 if e.startswith(path) and self._exists(e):
627 if e.startswith(path) and self._exists(e):
626 return True
628 return True
627 return False
629 return False
General Comments 0
You need to be logged in to leave comments. Login now