##// END OF EJS Templates
store: append to fncache if there are only new files to write...
Pulkit Goyal -
r40767:0728d87a default
parent child Browse files
Show More
@@ -1,611 +1,627 b''
1 # store.py - repository store handling for Mercurial
1 # store.py - repository store handling for Mercurial
2 #
2 #
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
11 import hashlib
12 import os
12 import os
13 import stat
13 import stat
14
14
15 from .i18n import _
15 from .i18n import _
16 from . import (
16 from . import (
17 error,
17 error,
18 node,
18 node,
19 policy,
19 policy,
20 pycompat,
20 pycompat,
21 util,
21 util,
22 vfs as vfsmod,
22 vfs as vfsmod,
23 )
23 )
24
24
25 parsers = policy.importmod(r'parsers')
25 parsers = policy.importmod(r'parsers')
26
26
27 def _matchtrackedpath(path, matcher):
27 def _matchtrackedpath(path, matcher):
28 """parses a fncache entry and returns whether the entry is tracking a path
28 """parses a fncache entry and returns whether the entry is tracking a path
29 matched by matcher or not.
29 matched by matcher or not.
30
30
31 If matcher is None, returns True"""
31 If matcher is None, returns True"""
32
32
33 if matcher is None:
33 if matcher is None:
34 return True
34 return True
35 path = decodedir(path)
35 path = decodedir(path)
36 if path.startswith('data/'):
36 if path.startswith('data/'):
37 return matcher(path[len('data/'):-len('.i')])
37 return matcher(path[len('data/'):-len('.i')])
38 elif path.startswith('meta/'):
38 elif path.startswith('meta/'):
39 return matcher.visitdir(path[len('meta/'):-len('/00manifest.i')] or '.')
39 return matcher.visitdir(path[len('meta/'):-len('/00manifest.i')] or '.')
40
40
41 raise error.ProgrammingError("cannot decode path %s" % path)
41 raise error.ProgrammingError("cannot decode path %s" % path)
42
42
43 # This avoids a collision between a file named foo and a dir named
43 # This avoids a collision between a file named foo and a dir named
44 # foo.i or foo.d
44 # foo.i or foo.d
45 def _encodedir(path):
45 def _encodedir(path):
46 '''
46 '''
47 >>> _encodedir(b'data/foo.i')
47 >>> _encodedir(b'data/foo.i')
48 'data/foo.i'
48 'data/foo.i'
49 >>> _encodedir(b'data/foo.i/bla.i')
49 >>> _encodedir(b'data/foo.i/bla.i')
50 'data/foo.i.hg/bla.i'
50 'data/foo.i.hg/bla.i'
51 >>> _encodedir(b'data/foo.i.hg/bla.i')
51 >>> _encodedir(b'data/foo.i.hg/bla.i')
52 'data/foo.i.hg.hg/bla.i'
52 'data/foo.i.hg.hg/bla.i'
53 >>> _encodedir(b'data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
53 >>> _encodedir(b'data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
54 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
54 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
55 '''
55 '''
56 return (path
56 return (path
57 .replace(".hg/", ".hg.hg/")
57 .replace(".hg/", ".hg.hg/")
58 .replace(".i/", ".i.hg/")
58 .replace(".i/", ".i.hg/")
59 .replace(".d/", ".d.hg/"))
59 .replace(".d/", ".d.hg/"))
60
60
61 encodedir = getattr(parsers, 'encodedir', _encodedir)
61 encodedir = getattr(parsers, 'encodedir', _encodedir)
62
62
63 def decodedir(path):
63 def decodedir(path):
64 '''
64 '''
65 >>> decodedir(b'data/foo.i')
65 >>> decodedir(b'data/foo.i')
66 'data/foo.i'
66 'data/foo.i'
67 >>> decodedir(b'data/foo.i.hg/bla.i')
67 >>> decodedir(b'data/foo.i.hg/bla.i')
68 'data/foo.i/bla.i'
68 'data/foo.i/bla.i'
69 >>> decodedir(b'data/foo.i.hg.hg/bla.i')
69 >>> decodedir(b'data/foo.i.hg.hg/bla.i')
70 'data/foo.i.hg/bla.i'
70 'data/foo.i.hg/bla.i'
71 '''
71 '''
72 if ".hg/" not in path:
72 if ".hg/" not in path:
73 return path
73 return path
74 return (path
74 return (path
75 .replace(".d.hg/", ".d/")
75 .replace(".d.hg/", ".d/")
76 .replace(".i.hg/", ".i/")
76 .replace(".i.hg/", ".i/")
77 .replace(".hg.hg/", ".hg/"))
77 .replace(".hg.hg/", ".hg/"))
78
78
79 def _reserved():
79 def _reserved():
80 ''' characters that are problematic for filesystems
80 ''' characters that are problematic for filesystems
81
81
82 * ascii escapes (0..31)
82 * ascii escapes (0..31)
83 * ascii hi (126..255)
83 * ascii hi (126..255)
84 * windows specials
84 * windows specials
85
85
86 these characters will be escaped by encodefunctions
86 these characters will be escaped by encodefunctions
87 '''
87 '''
88 winreserved = [ord(x) for x in u'\\:*?"<>|']
88 winreserved = [ord(x) for x in u'\\:*?"<>|']
89 for x in range(32):
89 for x in range(32):
90 yield x
90 yield x
91 for x in range(126, 256):
91 for x in range(126, 256):
92 yield x
92 yield x
93 for x in winreserved:
93 for x in winreserved:
94 yield x
94 yield x
95
95
96 def _buildencodefun():
96 def _buildencodefun():
97 '''
97 '''
98 >>> enc, dec = _buildencodefun()
98 >>> enc, dec = _buildencodefun()
99
99
100 >>> enc(b'nothing/special.txt')
100 >>> enc(b'nothing/special.txt')
101 'nothing/special.txt'
101 'nothing/special.txt'
102 >>> dec(b'nothing/special.txt')
102 >>> dec(b'nothing/special.txt')
103 'nothing/special.txt'
103 'nothing/special.txt'
104
104
105 >>> enc(b'HELLO')
105 >>> enc(b'HELLO')
106 '_h_e_l_l_o'
106 '_h_e_l_l_o'
107 >>> dec(b'_h_e_l_l_o')
107 >>> dec(b'_h_e_l_l_o')
108 'HELLO'
108 'HELLO'
109
109
110 >>> enc(b'hello:world?')
110 >>> enc(b'hello:world?')
111 'hello~3aworld~3f'
111 'hello~3aworld~3f'
112 >>> dec(b'hello~3aworld~3f')
112 >>> dec(b'hello~3aworld~3f')
113 'hello:world?'
113 'hello:world?'
114
114
115 >>> enc(b'the\\x07quick\\xADshot')
115 >>> enc(b'the\\x07quick\\xADshot')
116 'the~07quick~adshot'
116 'the~07quick~adshot'
117 >>> dec(b'the~07quick~adshot')
117 >>> dec(b'the~07quick~adshot')
118 'the\\x07quick\\xadshot'
118 'the\\x07quick\\xadshot'
119 '''
119 '''
120 e = '_'
120 e = '_'
121 xchr = pycompat.bytechr
121 xchr = pycompat.bytechr
122 asciistr = list(map(xchr, range(127)))
122 asciistr = list(map(xchr, range(127)))
123 capitals = list(range(ord("A"), ord("Z") + 1))
123 capitals = list(range(ord("A"), ord("Z") + 1))
124
124
125 cmap = dict((x, x) for x in asciistr)
125 cmap = dict((x, x) for x in asciistr)
126 for x in _reserved():
126 for x in _reserved():
127 cmap[xchr(x)] = "~%02x" % x
127 cmap[xchr(x)] = "~%02x" % x
128 for x in capitals + [ord(e)]:
128 for x in capitals + [ord(e)]:
129 cmap[xchr(x)] = e + xchr(x).lower()
129 cmap[xchr(x)] = e + xchr(x).lower()
130
130
131 dmap = {}
131 dmap = {}
132 for k, v in cmap.iteritems():
132 for k, v in cmap.iteritems():
133 dmap[v] = k
133 dmap[v] = k
134 def decode(s):
134 def decode(s):
135 i = 0
135 i = 0
136 while i < len(s):
136 while i < len(s):
137 for l in pycompat.xrange(1, 4):
137 for l in pycompat.xrange(1, 4):
138 try:
138 try:
139 yield dmap[s[i:i + l]]
139 yield dmap[s[i:i + l]]
140 i += l
140 i += l
141 break
141 break
142 except KeyError:
142 except KeyError:
143 pass
143 pass
144 else:
144 else:
145 raise KeyError
145 raise KeyError
146 return (lambda s: ''.join([cmap[s[c:c + 1]]
146 return (lambda s: ''.join([cmap[s[c:c + 1]]
147 for c in pycompat.xrange(len(s))]),
147 for c in pycompat.xrange(len(s))]),
148 lambda s: ''.join(list(decode(s))))
148 lambda s: ''.join(list(decode(s))))
149
149
150 _encodefname, _decodefname = _buildencodefun()
150 _encodefname, _decodefname = _buildencodefun()
151
151
152 def encodefilename(s):
152 def encodefilename(s):
153 '''
153 '''
154 >>> encodefilename(b'foo.i/bar.d/bla.hg/hi:world?/HELLO')
154 >>> encodefilename(b'foo.i/bar.d/bla.hg/hi:world?/HELLO')
155 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
155 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
156 '''
156 '''
157 return _encodefname(encodedir(s))
157 return _encodefname(encodedir(s))
158
158
159 def decodefilename(s):
159 def decodefilename(s):
160 '''
160 '''
161 >>> decodefilename(b'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
161 >>> decodefilename(b'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
162 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
162 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
163 '''
163 '''
164 return decodedir(_decodefname(s))
164 return decodedir(_decodefname(s))
165
165
166 def _buildlowerencodefun():
166 def _buildlowerencodefun():
167 '''
167 '''
168 >>> f = _buildlowerencodefun()
168 >>> f = _buildlowerencodefun()
169 >>> f(b'nothing/special.txt')
169 >>> f(b'nothing/special.txt')
170 'nothing/special.txt'
170 'nothing/special.txt'
171 >>> f(b'HELLO')
171 >>> f(b'HELLO')
172 'hello'
172 'hello'
173 >>> f(b'hello:world?')
173 >>> f(b'hello:world?')
174 'hello~3aworld~3f'
174 'hello~3aworld~3f'
175 >>> f(b'the\\x07quick\\xADshot')
175 >>> f(b'the\\x07quick\\xADshot')
176 'the~07quick~adshot'
176 'the~07quick~adshot'
177 '''
177 '''
178 xchr = pycompat.bytechr
178 xchr = pycompat.bytechr
179 cmap = dict([(xchr(x), xchr(x)) for x in pycompat.xrange(127)])
179 cmap = dict([(xchr(x), xchr(x)) for x in pycompat.xrange(127)])
180 for x in _reserved():
180 for x in _reserved():
181 cmap[xchr(x)] = "~%02x" % x
181 cmap[xchr(x)] = "~%02x" % x
182 for x in range(ord("A"), ord("Z") + 1):
182 for x in range(ord("A"), ord("Z") + 1):
183 cmap[xchr(x)] = xchr(x).lower()
183 cmap[xchr(x)] = xchr(x).lower()
184 def lowerencode(s):
184 def lowerencode(s):
185 return "".join([cmap[c] for c in pycompat.iterbytestr(s)])
185 return "".join([cmap[c] for c in pycompat.iterbytestr(s)])
186 return lowerencode
186 return lowerencode
187
187
188 lowerencode = getattr(parsers, 'lowerencode', None) or _buildlowerencodefun()
188 lowerencode = getattr(parsers, 'lowerencode', None) or _buildlowerencodefun()
189
189
190 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
190 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
191 _winres3 = ('aux', 'con', 'prn', 'nul') # length 3
191 _winres3 = ('aux', 'con', 'prn', 'nul') # length 3
192 _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9)
192 _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9)
193 def _auxencode(path, dotencode):
193 def _auxencode(path, dotencode):
194 '''
194 '''
195 Encodes filenames containing names reserved by Windows or which end in
195 Encodes filenames containing names reserved by Windows or which end in
196 period or space. Does not touch other single reserved characters c.
196 period or space. Does not touch other single reserved characters c.
197 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
197 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
198 Additionally encodes space or period at the beginning, if dotencode is
198 Additionally encodes space or period at the beginning, if dotencode is
199 True. Parameter path is assumed to be all lowercase.
199 True. Parameter path is assumed to be all lowercase.
200 A segment only needs encoding if a reserved name appears as a
200 A segment only needs encoding if a reserved name appears as a
201 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
201 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
202 doesn't need encoding.
202 doesn't need encoding.
203
203
204 >>> s = b'.foo/aux.txt/txt.aux/con/prn/nul/foo.'
204 >>> s = b'.foo/aux.txt/txt.aux/con/prn/nul/foo.'
205 >>> _auxencode(s.split(b'/'), True)
205 >>> _auxencode(s.split(b'/'), True)
206 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
206 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
207 >>> s = b'.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
207 >>> s = b'.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
208 >>> _auxencode(s.split(b'/'), False)
208 >>> _auxencode(s.split(b'/'), False)
209 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
209 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
210 >>> _auxencode([b'foo. '], True)
210 >>> _auxencode([b'foo. '], True)
211 ['foo.~20']
211 ['foo.~20']
212 >>> _auxencode([b' .foo'], True)
212 >>> _auxencode([b' .foo'], True)
213 ['~20.foo']
213 ['~20.foo']
214 '''
214 '''
215 for i, n in enumerate(path):
215 for i, n in enumerate(path):
216 if not n:
216 if not n:
217 continue
217 continue
218 if dotencode and n[0] in '. ':
218 if dotencode and n[0] in '. ':
219 n = "~%02x" % ord(n[0:1]) + n[1:]
219 n = "~%02x" % ord(n[0:1]) + n[1:]
220 path[i] = n
220 path[i] = n
221 else:
221 else:
222 l = n.find('.')
222 l = n.find('.')
223 if l == -1:
223 if l == -1:
224 l = len(n)
224 l = len(n)
225 if ((l == 3 and n[:3] in _winres3) or
225 if ((l == 3 and n[:3] in _winres3) or
226 (l == 4 and n[3:4] <= '9' and n[3:4] >= '1'
226 (l == 4 and n[3:4] <= '9' and n[3:4] >= '1'
227 and n[:3] in _winres4)):
227 and n[:3] in _winres4)):
228 # encode third letter ('aux' -> 'au~78')
228 # encode third letter ('aux' -> 'au~78')
229 ec = "~%02x" % ord(n[2:3])
229 ec = "~%02x" % ord(n[2:3])
230 n = n[0:2] + ec + n[3:]
230 n = n[0:2] + ec + n[3:]
231 path[i] = n
231 path[i] = n
232 if n[-1] in '. ':
232 if n[-1] in '. ':
233 # encode last period or space ('foo...' -> 'foo..~2e')
233 # encode last period or space ('foo...' -> 'foo..~2e')
234 path[i] = n[:-1] + "~%02x" % ord(n[-1:])
234 path[i] = n[:-1] + "~%02x" % ord(n[-1:])
235 return path
235 return path
236
236
237 _maxstorepathlen = 120
237 _maxstorepathlen = 120
238 _dirprefixlen = 8
238 _dirprefixlen = 8
239 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
239 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
240
240
241 def _hashencode(path, dotencode):
241 def _hashencode(path, dotencode):
242 digest = node.hex(hashlib.sha1(path).digest())
242 digest = node.hex(hashlib.sha1(path).digest())
243 le = lowerencode(path[5:]).split('/') # skips prefix 'data/' or 'meta/'
243 le = lowerencode(path[5:]).split('/') # skips prefix 'data/' or 'meta/'
244 parts = _auxencode(le, dotencode)
244 parts = _auxencode(le, dotencode)
245 basename = parts[-1]
245 basename = parts[-1]
246 _root, ext = os.path.splitext(basename)
246 _root, ext = os.path.splitext(basename)
247 sdirs = []
247 sdirs = []
248 sdirslen = 0
248 sdirslen = 0
249 for p in parts[:-1]:
249 for p in parts[:-1]:
250 d = p[:_dirprefixlen]
250 d = p[:_dirprefixlen]
251 if d[-1] in '. ':
251 if d[-1] in '. ':
252 # Windows can't access dirs ending in period or space
252 # Windows can't access dirs ending in period or space
253 d = d[:-1] + '_'
253 d = d[:-1] + '_'
254 if sdirslen == 0:
254 if sdirslen == 0:
255 t = len(d)
255 t = len(d)
256 else:
256 else:
257 t = sdirslen + 1 + len(d)
257 t = sdirslen + 1 + len(d)
258 if t > _maxshortdirslen:
258 if t > _maxshortdirslen:
259 break
259 break
260 sdirs.append(d)
260 sdirs.append(d)
261 sdirslen = t
261 sdirslen = t
262 dirs = '/'.join(sdirs)
262 dirs = '/'.join(sdirs)
263 if len(dirs) > 0:
263 if len(dirs) > 0:
264 dirs += '/'
264 dirs += '/'
265 res = 'dh/' + dirs + digest + ext
265 res = 'dh/' + dirs + digest + ext
266 spaceleft = _maxstorepathlen - len(res)
266 spaceleft = _maxstorepathlen - len(res)
267 if spaceleft > 0:
267 if spaceleft > 0:
268 filler = basename[:spaceleft]
268 filler = basename[:spaceleft]
269 res = 'dh/' + dirs + filler + digest + ext
269 res = 'dh/' + dirs + filler + digest + ext
270 return res
270 return res
271
271
272 def _hybridencode(path, dotencode):
272 def _hybridencode(path, dotencode):
273 '''encodes path with a length limit
273 '''encodes path with a length limit
274
274
275 Encodes all paths that begin with 'data/', according to the following.
275 Encodes all paths that begin with 'data/', according to the following.
276
276
277 Default encoding (reversible):
277 Default encoding (reversible):
278
278
279 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
279 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
280 characters are encoded as '~xx', where xx is the two digit hex code
280 characters are encoded as '~xx', where xx is the two digit hex code
281 of the character (see encodefilename).
281 of the character (see encodefilename).
282 Relevant path components consisting of Windows reserved filenames are
282 Relevant path components consisting of Windows reserved filenames are
283 masked by encoding the third character ('aux' -> 'au~78', see _auxencode).
283 masked by encoding the third character ('aux' -> 'au~78', see _auxencode).
284
284
285 Hashed encoding (not reversible):
285 Hashed encoding (not reversible):
286
286
287 If the default-encoded path is longer than _maxstorepathlen, a
287 If the default-encoded path is longer than _maxstorepathlen, a
288 non-reversible hybrid hashing of the path is done instead.
288 non-reversible hybrid hashing of the path is done instead.
289 This encoding uses up to _dirprefixlen characters of all directory
289 This encoding uses up to _dirprefixlen characters of all directory
290 levels of the lowerencoded path, but not more levels than can fit into
290 levels of the lowerencoded path, but not more levels than can fit into
291 _maxshortdirslen.
291 _maxshortdirslen.
292 Then follows the filler followed by the sha digest of the full path.
292 Then follows the filler followed by the sha digest of the full path.
293 The filler is the beginning of the basename of the lowerencoded path
293 The filler is the beginning of the basename of the lowerencoded path
294 (the basename is everything after the last path separator). The filler
294 (the basename is everything after the last path separator). The filler
295 is as long as possible, filling in characters from the basename until
295 is as long as possible, filling in characters from the basename until
296 the encoded path has _maxstorepathlen characters (or all chars of the
296 the encoded path has _maxstorepathlen characters (or all chars of the
297 basename have been taken).
297 basename have been taken).
298 The extension (e.g. '.i' or '.d') is preserved.
298 The extension (e.g. '.i' or '.d') is preserved.
299
299
300 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
300 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
301 encoding was used.
301 encoding was used.
302 '''
302 '''
303 path = encodedir(path)
303 path = encodedir(path)
304 ef = _encodefname(path).split('/')
304 ef = _encodefname(path).split('/')
305 res = '/'.join(_auxencode(ef, dotencode))
305 res = '/'.join(_auxencode(ef, dotencode))
306 if len(res) > _maxstorepathlen:
306 if len(res) > _maxstorepathlen:
307 res = _hashencode(path, dotencode)
307 res = _hashencode(path, dotencode)
308 return res
308 return res
309
309
310 def _pathencode(path):
310 def _pathencode(path):
311 de = encodedir(path)
311 de = encodedir(path)
312 if len(path) > _maxstorepathlen:
312 if len(path) > _maxstorepathlen:
313 return _hashencode(de, True)
313 return _hashencode(de, True)
314 ef = _encodefname(de).split('/')
314 ef = _encodefname(de).split('/')
315 res = '/'.join(_auxencode(ef, True))
315 res = '/'.join(_auxencode(ef, True))
316 if len(res) > _maxstorepathlen:
316 if len(res) > _maxstorepathlen:
317 return _hashencode(de, True)
317 return _hashencode(de, True)
318 return res
318 return res
319
319
320 _pathencode = getattr(parsers, 'pathencode', _pathencode)
320 _pathencode = getattr(parsers, 'pathencode', _pathencode)
321
321
322 def _plainhybridencode(f):
322 def _plainhybridencode(f):
323 return _hybridencode(f, False)
323 return _hybridencode(f, False)
324
324
325 def _calcmode(vfs):
325 def _calcmode(vfs):
326 try:
326 try:
327 # files in .hg/ will be created using this mode
327 # files in .hg/ will be created using this mode
328 mode = vfs.stat().st_mode
328 mode = vfs.stat().st_mode
329 # avoid some useless chmods
329 # avoid some useless chmods
330 if (0o777 & ~util.umask) == (0o777 & mode):
330 if (0o777 & ~util.umask) == (0o777 & mode):
331 mode = None
331 mode = None
332 except OSError:
332 except OSError:
333 mode = None
333 mode = None
334 return mode
334 return mode
335
335
336 _data = ('narrowspec data meta 00manifest.d 00manifest.i'
336 _data = ('narrowspec data meta 00manifest.d 00manifest.i'
337 ' 00changelog.d 00changelog.i phaseroots obsstore')
337 ' 00changelog.d 00changelog.i phaseroots obsstore')
338
338
339 def isrevlog(f, kind, st):
339 def isrevlog(f, kind, st):
340 return kind == stat.S_IFREG and f[-2:] in ('.i', '.d')
340 return kind == stat.S_IFREG and f[-2:] in ('.i', '.d')
341
341
342 class basicstore(object):
342 class basicstore(object):
343 '''base class for local repository stores'''
343 '''base class for local repository stores'''
344 def __init__(self, path, vfstype):
344 def __init__(self, path, vfstype):
345 vfs = vfstype(path)
345 vfs = vfstype(path)
346 self.path = vfs.base
346 self.path = vfs.base
347 self.createmode = _calcmode(vfs)
347 self.createmode = _calcmode(vfs)
348 vfs.createmode = self.createmode
348 vfs.createmode = self.createmode
349 self.rawvfs = vfs
349 self.rawvfs = vfs
350 self.vfs = vfsmod.filtervfs(vfs, encodedir)
350 self.vfs = vfsmod.filtervfs(vfs, encodedir)
351 self.opener = self.vfs
351 self.opener = self.vfs
352
352
353 def join(self, f):
353 def join(self, f):
354 return self.path + '/' + encodedir(f)
354 return self.path + '/' + encodedir(f)
355
355
356 def _walk(self, relpath, recurse, filefilter=isrevlog):
356 def _walk(self, relpath, recurse, filefilter=isrevlog):
357 '''yields (unencoded, encoded, size)'''
357 '''yields (unencoded, encoded, size)'''
358 path = self.path
358 path = self.path
359 if relpath:
359 if relpath:
360 path += '/' + relpath
360 path += '/' + relpath
361 striplen = len(self.path) + 1
361 striplen = len(self.path) + 1
362 l = []
362 l = []
363 if self.rawvfs.isdir(path):
363 if self.rawvfs.isdir(path):
364 visit = [path]
364 visit = [path]
365 readdir = self.rawvfs.readdir
365 readdir = self.rawvfs.readdir
366 while visit:
366 while visit:
367 p = visit.pop()
367 p = visit.pop()
368 for f, kind, st in readdir(p, stat=True):
368 for f, kind, st in readdir(p, stat=True):
369 fp = p + '/' + f
369 fp = p + '/' + f
370 if filefilter(f, kind, st):
370 if filefilter(f, kind, st):
371 n = util.pconvert(fp[striplen:])
371 n = util.pconvert(fp[striplen:])
372 l.append((decodedir(n), n, st.st_size))
372 l.append((decodedir(n), n, st.st_size))
373 elif kind == stat.S_IFDIR and recurse:
373 elif kind == stat.S_IFDIR and recurse:
374 visit.append(fp)
374 visit.append(fp)
375 l.sort()
375 l.sort()
376 return l
376 return l
377
377
378 def datafiles(self, matcher=None):
378 def datafiles(self, matcher=None):
379 return self._walk('data', True) + self._walk('meta', True)
379 return self._walk('data', True) + self._walk('meta', True)
380
380
381 def topfiles(self):
381 def topfiles(self):
382 # yield manifest before changelog
382 # yield manifest before changelog
383 return reversed(self._walk('', False))
383 return reversed(self._walk('', False))
384
384
385 def walk(self, matcher=None):
385 def walk(self, matcher=None):
386 '''yields (unencoded, encoded, size)
386 '''yields (unencoded, encoded, size)
387
387
388 if a matcher is passed, storage files of only those tracked paths
388 if a matcher is passed, storage files of only those tracked paths
389 are passed with matches the matcher
389 are passed with matches the matcher
390 '''
390 '''
391 # yield data files first
391 # yield data files first
392 for x in self.datafiles(matcher):
392 for x in self.datafiles(matcher):
393 yield x
393 yield x
394 for x in self.topfiles():
394 for x in self.topfiles():
395 yield x
395 yield x
396
396
397 def copylist(self):
397 def copylist(self):
398 return ['requires'] + _data.split()
398 return ['requires'] + _data.split()
399
399
400 def write(self, tr):
400 def write(self, tr):
401 pass
401 pass
402
402
403 def invalidatecaches(self):
403 def invalidatecaches(self):
404 pass
404 pass
405
405
406 def markremoved(self, fn):
406 def markremoved(self, fn):
407 pass
407 pass
408
408
409 def __contains__(self, path):
409 def __contains__(self, path):
410 '''Checks if the store contains path'''
410 '''Checks if the store contains path'''
411 path = "/".join(("data", path))
411 path = "/".join(("data", path))
412 # file?
412 # file?
413 if self.vfs.exists(path + ".i"):
413 if self.vfs.exists(path + ".i"):
414 return True
414 return True
415 # dir?
415 # dir?
416 if not path.endswith("/"):
416 if not path.endswith("/"):
417 path = path + "/"
417 path = path + "/"
418 return self.vfs.exists(path)
418 return self.vfs.exists(path)
419
419
420 class encodedstore(basicstore):
420 class encodedstore(basicstore):
421 def __init__(self, path, vfstype):
421 def __init__(self, path, vfstype):
422 vfs = vfstype(path + '/store')
422 vfs = vfstype(path + '/store')
423 self.path = vfs.base
423 self.path = vfs.base
424 self.createmode = _calcmode(vfs)
424 self.createmode = _calcmode(vfs)
425 vfs.createmode = self.createmode
425 vfs.createmode = self.createmode
426 self.rawvfs = vfs
426 self.rawvfs = vfs
427 self.vfs = vfsmod.filtervfs(vfs, encodefilename)
427 self.vfs = vfsmod.filtervfs(vfs, encodefilename)
428 self.opener = self.vfs
428 self.opener = self.vfs
429
429
430 def datafiles(self, matcher=None):
430 def datafiles(self, matcher=None):
431 for a, b, size in super(encodedstore, self).datafiles():
431 for a, b, size in super(encodedstore, self).datafiles():
432 try:
432 try:
433 a = decodefilename(a)
433 a = decodefilename(a)
434 except KeyError:
434 except KeyError:
435 a = None
435 a = None
436 if a is not None and not _matchtrackedpath(a, matcher):
436 if a is not None and not _matchtrackedpath(a, matcher):
437 continue
437 continue
438 yield a, b, size
438 yield a, b, size
439
439
440 def join(self, f):
440 def join(self, f):
441 return self.path + '/' + encodefilename(f)
441 return self.path + '/' + encodefilename(f)
442
442
443 def copylist(self):
443 def copylist(self):
444 return (['requires', '00changelog.i'] +
444 return (['requires', '00changelog.i'] +
445 ['store/' + f for f in _data.split()])
445 ['store/' + f for f in _data.split()])
446
446
447 class fncache(object):
447 class fncache(object):
448 # the filename used to be partially encoded
448 # the filename used to be partially encoded
449 # hence the encodedir/decodedir dance
449 # hence the encodedir/decodedir dance
450 def __init__(self, vfs):
450 def __init__(self, vfs):
451 self.vfs = vfs
451 self.vfs = vfs
452 self.entries = None
452 self.entries = None
453 self._dirty = False
453 self._dirty = False
454 # set of new additions to fncache
455 self.addls = set()
454
456
455 def _load(self):
457 def _load(self):
456 '''fill the entries from the fncache file'''
458 '''fill the entries from the fncache file'''
457 self._dirty = False
459 self._dirty = False
458 try:
460 try:
459 fp = self.vfs('fncache', mode='rb')
461 fp = self.vfs('fncache', mode='rb')
460 except IOError:
462 except IOError:
461 # skip nonexistent file
463 # skip nonexistent file
462 self.entries = set()
464 self.entries = set()
463 return
465 return
464 self.entries = set(decodedir(fp.read()).splitlines())
466 self.entries = set(decodedir(fp.read()).splitlines())
465 if '' in self.entries:
467 if '' in self.entries:
466 fp.seek(0)
468 fp.seek(0)
467 for n, line in enumerate(util.iterfile(fp)):
469 for n, line in enumerate(util.iterfile(fp)):
468 if not line.rstrip('\n'):
470 if not line.rstrip('\n'):
469 t = _('invalid entry in fncache, line %d') % (n + 1)
471 t = _('invalid entry in fncache, line %d') % (n + 1)
470 raise error.Abort(t)
472 raise error.Abort(t)
471 fp.close()
473 fp.close()
472
474
473 def write(self, tr):
475 def write(self, tr):
474 if self._dirty:
476 if self._dirty:
475 assert self.entries is not None
477 assert self.entries is not None
476 tr.addbackup('fncache')
478 tr.addbackup('fncache')
477 fp = self.vfs('fncache', mode='wb', atomictemp=True)
479 fp = self.vfs('fncache', mode='wb', atomictemp=True)
478 if self.entries:
480 if self.entries:
479 fp.write(encodedir('\n'.join(self.entries) + '\n'))
481 fp.write(encodedir('\n'.join(self.entries) + '\n'))
480 fp.close()
482 fp.close()
481 self._dirty = False
483 self._dirty = False
484 if self.addls:
485 # if we have just new entries, let's append them to the fncache
486 tr.addbackup('fncache')
487 fp = self.vfs('fncache', mode='ab', atomictemp=True)
488 if self.addls:
489 fp.write(encodedir('\n'.join(self.addls) + '\n'))
490 fp.close()
491 self.entries = None
492 self.addls = set()
482
493
483 def add(self, fn):
494 def add(self, fn):
484 if self.entries is None:
495 if self.entries is None:
485 self._load()
496 self._load()
486 if fn not in self.entries:
497 if fn not in self.entries:
487 self._dirty = True
498 self.addls.add(fn)
488 self.entries.add(fn)
489
499
490 def remove(self, fn):
500 def remove(self, fn):
491 if self.entries is None:
501 if self.entries is None:
492 self._load()
502 self._load()
503 if fn in self.addls:
504 self.addls.remove(fn)
505 return
493 try:
506 try:
494 self.entries.remove(fn)
507 self.entries.remove(fn)
495 self._dirty = True
508 self._dirty = True
496 except KeyError:
509 except KeyError:
497 pass
510 pass
498
511
499 def __contains__(self, fn):
512 def __contains__(self, fn):
513 if fn in self.addls:
514 return True
500 if self.entries is None:
515 if self.entries is None:
501 self._load()
516 self._load()
502 return fn in self.entries
517 return fn in self.entries
503
518
504 def __iter__(self):
519 def __iter__(self):
505 if self.entries is None:
520 if self.entries is None:
506 self._load()
521 self._load()
507 return iter(self.entries)
522 return iter(self.entries | self.addls)
508
523
509 class _fncachevfs(vfsmod.abstractvfs, vfsmod.proxyvfs):
524 class _fncachevfs(vfsmod.abstractvfs, vfsmod.proxyvfs):
510 def __init__(self, vfs, fnc, encode):
525 def __init__(self, vfs, fnc, encode):
511 vfsmod.proxyvfs.__init__(self, vfs)
526 vfsmod.proxyvfs.__init__(self, vfs)
512 self.fncache = fnc
527 self.fncache = fnc
513 self.encode = encode
528 self.encode = encode
514
529
515 def __call__(self, path, mode='r', *args, **kw):
530 def __call__(self, path, mode='r', *args, **kw):
516 encoded = self.encode(path)
531 encoded = self.encode(path)
517 if mode not in ('r', 'rb') and (path.startswith('data/') or
532 if mode not in ('r', 'rb') and (path.startswith('data/') or
518 path.startswith('meta/')):
533 path.startswith('meta/')):
519 # do not trigger a fncache load when adding a file that already is
534 # do not trigger a fncache load when adding a file that already is
520 # known to exist.
535 # known to exist.
521 notload = self.fncache.entries is None and self.vfs.exists(encoded)
536 notload = self.fncache.entries is None and self.vfs.exists(encoded)
522 if notload and 'a' in mode and not self.vfs.stat(encoded).st_size:
537 if notload and 'a' in mode and not self.vfs.stat(encoded).st_size:
523 # when appending to an existing file, if the file has size zero,
538 # when appending to an existing file, if the file has size zero,
524 # it should be considered as missing. Such zero-size files are
539 # it should be considered as missing. Such zero-size files are
525 # the result of truncation when a transaction is aborted.
540 # the result of truncation when a transaction is aborted.
526 notload = False
541 notload = False
527 if not notload:
542 if not notload:
528 self.fncache.add(path)
543 self.fncache.add(path)
529 return self.vfs(encoded, mode, *args, **kw)
544 return self.vfs(encoded, mode, *args, **kw)
530
545
531 def join(self, path):
546 def join(self, path):
532 if path:
547 if path:
533 return self.vfs.join(self.encode(path))
548 return self.vfs.join(self.encode(path))
534 else:
549 else:
535 return self.vfs.join(path)
550 return self.vfs.join(path)
536
551
537 class fncachestore(basicstore):
552 class fncachestore(basicstore):
538 def __init__(self, path, vfstype, dotencode):
553 def __init__(self, path, vfstype, dotencode):
539 if dotencode:
554 if dotencode:
540 encode = _pathencode
555 encode = _pathencode
541 else:
556 else:
542 encode = _plainhybridencode
557 encode = _plainhybridencode
543 self.encode = encode
558 self.encode = encode
544 vfs = vfstype(path + '/store')
559 vfs = vfstype(path + '/store')
545 self.path = vfs.base
560 self.path = vfs.base
546 self.pathsep = self.path + '/'
561 self.pathsep = self.path + '/'
547 self.createmode = _calcmode(vfs)
562 self.createmode = _calcmode(vfs)
548 vfs.createmode = self.createmode
563 vfs.createmode = self.createmode
549 self.rawvfs = vfs
564 self.rawvfs = vfs
550 fnc = fncache(vfs)
565 fnc = fncache(vfs)
551 self.fncache = fnc
566 self.fncache = fnc
552 self.vfs = _fncachevfs(vfs, fnc, encode)
567 self.vfs = _fncachevfs(vfs, fnc, encode)
553 self.opener = self.vfs
568 self.opener = self.vfs
554
569
555 def join(self, f):
570 def join(self, f):
556 return self.pathsep + self.encode(f)
571 return self.pathsep + self.encode(f)
557
572
558 def getsize(self, path):
573 def getsize(self, path):
559 return self.rawvfs.stat(path).st_size
574 return self.rawvfs.stat(path).st_size
560
575
561 def datafiles(self, matcher=None):
576 def datafiles(self, matcher=None):
562 for f in sorted(self.fncache):
577 for f in sorted(self.fncache):
563 if not _matchtrackedpath(f, matcher):
578 if not _matchtrackedpath(f, matcher):
564 continue
579 continue
565 ef = self.encode(f)
580 ef = self.encode(f)
566 try:
581 try:
567 yield f, ef, self.getsize(ef)
582 yield f, ef, self.getsize(ef)
568 except OSError as err:
583 except OSError as err:
569 if err.errno != errno.ENOENT:
584 if err.errno != errno.ENOENT:
570 raise
585 raise
571
586
572 def copylist(self):
587 def copylist(self):
573 d = ('narrowspec data meta dh fncache phaseroots obsstore'
588 d = ('narrowspec data meta dh fncache phaseroots obsstore'
574 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
589 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
575 return (['requires', '00changelog.i'] +
590 return (['requires', '00changelog.i'] +
576 ['store/' + f for f in d.split()])
591 ['store/' + f for f in d.split()])
577
592
578 def write(self, tr):
593 def write(self, tr):
579 self.fncache.write(tr)
594 self.fncache.write(tr)
580
595
581 def invalidatecaches(self):
596 def invalidatecaches(self):
582 self.fncache.entries = None
597 self.fncache.entries = None
598 self.fncache.addls = set()
583
599
584 def markremoved(self, fn):
600 def markremoved(self, fn):
585 self.fncache.remove(fn)
601 self.fncache.remove(fn)
586
602
587 def _exists(self, f):
603 def _exists(self, f):
588 ef = self.encode(f)
604 ef = self.encode(f)
589 try:
605 try:
590 self.getsize(ef)
606 self.getsize(ef)
591 return True
607 return True
592 except OSError as err:
608 except OSError as err:
593 if err.errno != errno.ENOENT:
609 if err.errno != errno.ENOENT:
594 raise
610 raise
595 # nonexistent entry
611 # nonexistent entry
596 return False
612 return False
597
613
598 def __contains__(self, path):
614 def __contains__(self, path):
599 '''Checks if the store contains path'''
615 '''Checks if the store contains path'''
600 path = "/".join(("data", path))
616 path = "/".join(("data", path))
601 # check for files (exact match)
617 # check for files (exact match)
602 e = path + '.i'
618 e = path + '.i'
603 if e in self.fncache and self._exists(e):
619 if e in self.fncache and self._exists(e):
604 return True
620 return True
605 # now check for directories (prefix match)
621 # now check for directories (prefix match)
606 if not path.endswith('/'):
622 if not path.endswith('/'):
607 path += '/'
623 path += '/'
608 for e in self.fncache:
624 for e in self.fncache:
609 if e.startswith(path) and self._exists(e):
625 if e.startswith(path) and self._exists(e):
610 return True
626 return True
611 return False
627 return False
General Comments 0
You need to be logged in to leave comments. Login now