##// END OF EJS Templates
py3: fix for Python 3.12 emitting SyntaxWarning on invalid escape sequences...
Mads Kiilerich -
r51245:4be9ecc9 stable
parent child Browse files
Show More
@@ -1,848 +1,848 b''
1 # store.py - repository store handling for Mercurial
1 # store.py - repository store handling for Mercurial
2 #
2 #
3 # Copyright 2008 Olivia Mackall <olivia@selenic.com>
3 # Copyright 2008 Olivia Mackall <olivia@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8
8
9 import functools
9 import functools
10 import os
10 import os
11 import re
11 import re
12 import stat
12 import stat
13
13
14 from .i18n import _
14 from .i18n import _
15 from .pycompat import getattr
15 from .pycompat import getattr
16 from .node import hex
16 from .node import hex
17 from . import (
17 from . import (
18 changelog,
18 changelog,
19 error,
19 error,
20 manifest,
20 manifest,
21 policy,
21 policy,
22 pycompat,
22 pycompat,
23 util,
23 util,
24 vfs as vfsmod,
24 vfs as vfsmod,
25 )
25 )
26 from .utils import hashutil
26 from .utils import hashutil
27
27
28 parsers = policy.importmod('parsers')
28 parsers = policy.importmod('parsers')
29 # how much bytes should be read from fncache in one read
29 # how much bytes should be read from fncache in one read
30 # It is done to prevent loading large fncache files into memory
30 # It is done to prevent loading large fncache files into memory
31 fncache_chunksize = 10 ** 6
31 fncache_chunksize = 10 ** 6
32
32
33
33
34 def _matchtrackedpath(path, matcher):
34 def _matchtrackedpath(path, matcher):
35 """parses a fncache entry and returns whether the entry is tracking a path
35 """parses a fncache entry and returns whether the entry is tracking a path
36 matched by matcher or not.
36 matched by matcher or not.
37
37
38 If matcher is None, returns True"""
38 If matcher is None, returns True"""
39
39
40 if matcher is None:
40 if matcher is None:
41 return True
41 return True
42 path = decodedir(path)
42 path = decodedir(path)
43 if path.startswith(b'data/'):
43 if path.startswith(b'data/'):
44 return matcher(path[len(b'data/') : -len(b'.i')])
44 return matcher(path[len(b'data/') : -len(b'.i')])
45 elif path.startswith(b'meta/'):
45 elif path.startswith(b'meta/'):
46 return matcher.visitdir(path[len(b'meta/') : -len(b'/00manifest.i')])
46 return matcher.visitdir(path[len(b'meta/') : -len(b'/00manifest.i')])
47
47
48 raise error.ProgrammingError(b"cannot decode path %s" % path)
48 raise error.ProgrammingError(b"cannot decode path %s" % path)
49
49
50
50
51 # This avoids a collision between a file named foo and a dir named
51 # This avoids a collision between a file named foo and a dir named
52 # foo.i or foo.d
52 # foo.i or foo.d
53 def _encodedir(path):
53 def _encodedir(path):
54 """
54 """
55 >>> _encodedir(b'data/foo.i')
55 >>> _encodedir(b'data/foo.i')
56 'data/foo.i'
56 'data/foo.i'
57 >>> _encodedir(b'data/foo.i/bla.i')
57 >>> _encodedir(b'data/foo.i/bla.i')
58 'data/foo.i.hg/bla.i'
58 'data/foo.i.hg/bla.i'
59 >>> _encodedir(b'data/foo.i.hg/bla.i')
59 >>> _encodedir(b'data/foo.i.hg/bla.i')
60 'data/foo.i.hg.hg/bla.i'
60 'data/foo.i.hg.hg/bla.i'
61 >>> _encodedir(b'data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
61 >>> _encodedir(b'data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
62 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
62 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
63 """
63 """
64 return (
64 return (
65 path.replace(b".hg/", b".hg.hg/")
65 path.replace(b".hg/", b".hg.hg/")
66 .replace(b".i/", b".i.hg/")
66 .replace(b".i/", b".i.hg/")
67 .replace(b".d/", b".d.hg/")
67 .replace(b".d/", b".d.hg/")
68 )
68 )
69
69
70
70
71 encodedir = getattr(parsers, 'encodedir', _encodedir)
71 encodedir = getattr(parsers, 'encodedir', _encodedir)
72
72
73
73
74 def decodedir(path):
74 def decodedir(path):
75 """
75 """
76 >>> decodedir(b'data/foo.i')
76 >>> decodedir(b'data/foo.i')
77 'data/foo.i'
77 'data/foo.i'
78 >>> decodedir(b'data/foo.i.hg/bla.i')
78 >>> decodedir(b'data/foo.i.hg/bla.i')
79 'data/foo.i/bla.i'
79 'data/foo.i/bla.i'
80 >>> decodedir(b'data/foo.i.hg.hg/bla.i')
80 >>> decodedir(b'data/foo.i.hg.hg/bla.i')
81 'data/foo.i.hg/bla.i'
81 'data/foo.i.hg/bla.i'
82 """
82 """
83 if b".hg/" not in path:
83 if b".hg/" not in path:
84 return path
84 return path
85 return (
85 return (
86 path.replace(b".d.hg/", b".d/")
86 path.replace(b".d.hg/", b".d/")
87 .replace(b".i.hg/", b".i/")
87 .replace(b".i.hg/", b".i/")
88 .replace(b".hg.hg/", b".hg/")
88 .replace(b".hg.hg/", b".hg/")
89 )
89 )
90
90
91
91
92 def _reserved():
92 def _reserved():
93 """characters that are problematic for filesystems
93 """characters that are problematic for filesystems
94
94
95 * ascii escapes (0..31)
95 * ascii escapes (0..31)
96 * ascii hi (126..255)
96 * ascii hi (126..255)
97 * windows specials
97 * windows specials
98
98
99 these characters will be escaped by encodefunctions
99 these characters will be escaped by encodefunctions
100 """
100 """
101 winreserved = [ord(x) for x in u'\\:*?"<>|']
101 winreserved = [ord(x) for x in u'\\:*?"<>|']
102 for x in range(32):
102 for x in range(32):
103 yield x
103 yield x
104 for x in range(126, 256):
104 for x in range(126, 256):
105 yield x
105 yield x
106 for x in winreserved:
106 for x in winreserved:
107 yield x
107 yield x
108
108
109
109
110 def _buildencodefun():
110 def _buildencodefun():
111 """
111 """
112 >>> enc, dec = _buildencodefun()
112 >>> enc, dec = _buildencodefun()
113
113
114 >>> enc(b'nothing/special.txt')
114 >>> enc(b'nothing/special.txt')
115 'nothing/special.txt'
115 'nothing/special.txt'
116 >>> dec(b'nothing/special.txt')
116 >>> dec(b'nothing/special.txt')
117 'nothing/special.txt'
117 'nothing/special.txt'
118
118
119 >>> enc(b'HELLO')
119 >>> enc(b'HELLO')
120 '_h_e_l_l_o'
120 '_h_e_l_l_o'
121 >>> dec(b'_h_e_l_l_o')
121 >>> dec(b'_h_e_l_l_o')
122 'HELLO'
122 'HELLO'
123
123
124 >>> enc(b'hello:world?')
124 >>> enc(b'hello:world?')
125 'hello~3aworld~3f'
125 'hello~3aworld~3f'
126 >>> dec(b'hello~3aworld~3f')
126 >>> dec(b'hello~3aworld~3f')
127 'hello:world?'
127 'hello:world?'
128
128
129 >>> enc(b'the\\x07quick\\xADshot')
129 >>> enc(b'the\\x07quick\\xADshot')
130 'the~07quick~adshot'
130 'the~07quick~adshot'
131 >>> dec(b'the~07quick~adshot')
131 >>> dec(b'the~07quick~adshot')
132 'the\\x07quick\\xadshot'
132 'the\\x07quick\\xadshot'
133 """
133 """
134 e = b'_'
134 e = b'_'
135 xchr = pycompat.bytechr
135 xchr = pycompat.bytechr
136 asciistr = list(map(xchr, range(127)))
136 asciistr = list(map(xchr, range(127)))
137 capitals = list(range(ord(b"A"), ord(b"Z") + 1))
137 capitals = list(range(ord(b"A"), ord(b"Z") + 1))
138
138
139 cmap = {x: x for x in asciistr}
139 cmap = {x: x for x in asciistr}
140 for x in _reserved():
140 for x in _reserved():
141 cmap[xchr(x)] = b"~%02x" % x
141 cmap[xchr(x)] = b"~%02x" % x
142 for x in capitals + [ord(e)]:
142 for x in capitals + [ord(e)]:
143 cmap[xchr(x)] = e + xchr(x).lower()
143 cmap[xchr(x)] = e + xchr(x).lower()
144
144
145 dmap = {}
145 dmap = {}
146 for k, v in cmap.items():
146 for k, v in cmap.items():
147 dmap[v] = k
147 dmap[v] = k
148
148
149 def decode(s):
149 def decode(s):
150 i = 0
150 i = 0
151 while i < len(s):
151 while i < len(s):
152 for l in range(1, 4):
152 for l in range(1, 4):
153 try:
153 try:
154 yield dmap[s[i : i + l]]
154 yield dmap[s[i : i + l]]
155 i += l
155 i += l
156 break
156 break
157 except KeyError:
157 except KeyError:
158 pass
158 pass
159 else:
159 else:
160 raise KeyError
160 raise KeyError
161
161
162 return (
162 return (
163 lambda s: b''.join([cmap[s[c : c + 1]] for c in range(len(s))]),
163 lambda s: b''.join([cmap[s[c : c + 1]] for c in range(len(s))]),
164 lambda s: b''.join(list(decode(s))),
164 lambda s: b''.join(list(decode(s))),
165 )
165 )
166
166
167
167
168 _encodefname, _decodefname = _buildencodefun()
168 _encodefname, _decodefname = _buildencodefun()
169
169
170
170
171 def encodefilename(s):
171 def encodefilename(s):
172 """
172 """
173 >>> encodefilename(b'foo.i/bar.d/bla.hg/hi:world?/HELLO')
173 >>> encodefilename(b'foo.i/bar.d/bla.hg/hi:world?/HELLO')
174 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
174 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
175 """
175 """
176 return _encodefname(encodedir(s))
176 return _encodefname(encodedir(s))
177
177
178
178
179 def decodefilename(s):
179 def decodefilename(s):
180 """
180 """
181 >>> decodefilename(b'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
181 >>> decodefilename(b'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
182 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
182 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
183 """
183 """
184 return decodedir(_decodefname(s))
184 return decodedir(_decodefname(s))
185
185
186
186
187 def _buildlowerencodefun():
187 def _buildlowerencodefun():
188 """
188 """
189 >>> f = _buildlowerencodefun()
189 >>> f = _buildlowerencodefun()
190 >>> f(b'nothing/special.txt')
190 >>> f(b'nothing/special.txt')
191 'nothing/special.txt'
191 'nothing/special.txt'
192 >>> f(b'HELLO')
192 >>> f(b'HELLO')
193 'hello'
193 'hello'
194 >>> f(b'hello:world?')
194 >>> f(b'hello:world?')
195 'hello~3aworld~3f'
195 'hello~3aworld~3f'
196 >>> f(b'the\\x07quick\\xADshot')
196 >>> f(b'the\\x07quick\\xADshot')
197 'the~07quick~adshot'
197 'the~07quick~adshot'
198 """
198 """
199 xchr = pycompat.bytechr
199 xchr = pycompat.bytechr
200 cmap = {xchr(x): xchr(x) for x in range(127)}
200 cmap = {xchr(x): xchr(x) for x in range(127)}
201 for x in _reserved():
201 for x in _reserved():
202 cmap[xchr(x)] = b"~%02x" % x
202 cmap[xchr(x)] = b"~%02x" % x
203 for x in range(ord(b"A"), ord(b"Z") + 1):
203 for x in range(ord(b"A"), ord(b"Z") + 1):
204 cmap[xchr(x)] = xchr(x).lower()
204 cmap[xchr(x)] = xchr(x).lower()
205
205
206 def lowerencode(s):
206 def lowerencode(s):
207 return b"".join([cmap[c] for c in pycompat.iterbytestr(s)])
207 return b"".join([cmap[c] for c in pycompat.iterbytestr(s)])
208
208
209 return lowerencode
209 return lowerencode
210
210
211
211
212 lowerencode = getattr(parsers, 'lowerencode', None) or _buildlowerencodefun()
212 lowerencode = getattr(parsers, 'lowerencode', None) or _buildlowerencodefun()
213
213
214 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
214 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
215 _winres3 = (b'aux', b'con', b'prn', b'nul') # length 3
215 _winres3 = (b'aux', b'con', b'prn', b'nul') # length 3
216 _winres4 = (b'com', b'lpt') # length 4 (with trailing 1..9)
216 _winres4 = (b'com', b'lpt') # length 4 (with trailing 1..9)
217
217
218
218
219 def _auxencode(path, dotencode):
219 def _auxencode(path, dotencode):
220 """
220 """
221 Encodes filenames containing names reserved by Windows or which end in
221 Encodes filenames containing names reserved by Windows or which end in
222 period or space. Does not touch other single reserved characters c.
222 period or space. Does not touch other single reserved characters c.
223 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
223 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
224 Additionally encodes space or period at the beginning, if dotencode is
224 Additionally encodes space or period at the beginning, if dotencode is
225 True. Parameter path is assumed to be all lowercase.
225 True. Parameter path is assumed to be all lowercase.
226 A segment only needs encoding if a reserved name appears as a
226 A segment only needs encoding if a reserved name appears as a
227 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
227 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
228 doesn't need encoding.
228 doesn't need encoding.
229
229
230 >>> s = b'.foo/aux.txt/txt.aux/con/prn/nul/foo.'
230 >>> s = b'.foo/aux.txt/txt.aux/con/prn/nul/foo.'
231 >>> _auxencode(s.split(b'/'), True)
231 >>> _auxencode(s.split(b'/'), True)
232 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
232 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
233 >>> s = b'.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
233 >>> s = b'.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
234 >>> _auxencode(s.split(b'/'), False)
234 >>> _auxencode(s.split(b'/'), False)
235 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
235 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
236 >>> _auxencode([b'foo. '], True)
236 >>> _auxencode([b'foo. '], True)
237 ['foo.~20']
237 ['foo.~20']
238 >>> _auxencode([b' .foo'], True)
238 >>> _auxencode([b' .foo'], True)
239 ['~20.foo']
239 ['~20.foo']
240 """
240 """
241 for i, n in enumerate(path):
241 for i, n in enumerate(path):
242 if not n:
242 if not n:
243 continue
243 continue
244 if dotencode and n[0] in b'. ':
244 if dotencode and n[0] in b'. ':
245 n = b"~%02x" % ord(n[0:1]) + n[1:]
245 n = b"~%02x" % ord(n[0:1]) + n[1:]
246 path[i] = n
246 path[i] = n
247 else:
247 else:
248 l = n.find(b'.')
248 l = n.find(b'.')
249 if l == -1:
249 if l == -1:
250 l = len(n)
250 l = len(n)
251 if (l == 3 and n[:3] in _winres3) or (
251 if (l == 3 and n[:3] in _winres3) or (
252 l == 4
252 l == 4
253 and n[3:4] <= b'9'
253 and n[3:4] <= b'9'
254 and n[3:4] >= b'1'
254 and n[3:4] >= b'1'
255 and n[:3] in _winres4
255 and n[:3] in _winres4
256 ):
256 ):
257 # encode third letter ('aux' -> 'au~78')
257 # encode third letter ('aux' -> 'au~78')
258 ec = b"~%02x" % ord(n[2:3])
258 ec = b"~%02x" % ord(n[2:3])
259 n = n[0:2] + ec + n[3:]
259 n = n[0:2] + ec + n[3:]
260 path[i] = n
260 path[i] = n
261 if n[-1] in b'. ':
261 if n[-1] in b'. ':
262 # encode last period or space ('foo...' -> 'foo..~2e')
262 # encode last period or space ('foo...' -> 'foo..~2e')
263 path[i] = n[:-1] + b"~%02x" % ord(n[-1:])
263 path[i] = n[:-1] + b"~%02x" % ord(n[-1:])
264 return path
264 return path
265
265
266
266
267 _maxstorepathlen = 120
267 _maxstorepathlen = 120
268 _dirprefixlen = 8
268 _dirprefixlen = 8
269 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
269 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
270
270
271
271
272 def _hashencode(path, dotencode):
272 def _hashencode(path, dotencode):
273 digest = hex(hashutil.sha1(path).digest())
273 digest = hex(hashutil.sha1(path).digest())
274 le = lowerencode(path[5:]).split(b'/') # skips prefix 'data/' or 'meta/'
274 le = lowerencode(path[5:]).split(b'/') # skips prefix 'data/' or 'meta/'
275 parts = _auxencode(le, dotencode)
275 parts = _auxencode(le, dotencode)
276 basename = parts[-1]
276 basename = parts[-1]
277 _root, ext = os.path.splitext(basename)
277 _root, ext = os.path.splitext(basename)
278 sdirs = []
278 sdirs = []
279 sdirslen = 0
279 sdirslen = 0
280 for p in parts[:-1]:
280 for p in parts[:-1]:
281 d = p[:_dirprefixlen]
281 d = p[:_dirprefixlen]
282 if d[-1] in b'. ':
282 if d[-1] in b'. ':
283 # Windows can't access dirs ending in period or space
283 # Windows can't access dirs ending in period or space
284 d = d[:-1] + b'_'
284 d = d[:-1] + b'_'
285 if sdirslen == 0:
285 if sdirslen == 0:
286 t = len(d)
286 t = len(d)
287 else:
287 else:
288 t = sdirslen + 1 + len(d)
288 t = sdirslen + 1 + len(d)
289 if t > _maxshortdirslen:
289 if t > _maxshortdirslen:
290 break
290 break
291 sdirs.append(d)
291 sdirs.append(d)
292 sdirslen = t
292 sdirslen = t
293 dirs = b'/'.join(sdirs)
293 dirs = b'/'.join(sdirs)
294 if len(dirs) > 0:
294 if len(dirs) > 0:
295 dirs += b'/'
295 dirs += b'/'
296 res = b'dh/' + dirs + digest + ext
296 res = b'dh/' + dirs + digest + ext
297 spaceleft = _maxstorepathlen - len(res)
297 spaceleft = _maxstorepathlen - len(res)
298 if spaceleft > 0:
298 if spaceleft > 0:
299 filler = basename[:spaceleft]
299 filler = basename[:spaceleft]
300 res = b'dh/' + dirs + filler + digest + ext
300 res = b'dh/' + dirs + filler + digest + ext
301 return res
301 return res
302
302
303
303
304 def _hybridencode(path, dotencode):
304 def _hybridencode(path, dotencode):
305 """encodes path with a length limit
305 """encodes path with a length limit
306
306
307 Encodes all paths that begin with 'data/', according to the following.
307 Encodes all paths that begin with 'data/', according to the following.
308
308
309 Default encoding (reversible):
309 Default encoding (reversible):
310
310
311 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
311 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
312 characters are encoded as '~xx', where xx is the two digit hex code
312 characters are encoded as '~xx', where xx is the two digit hex code
313 of the character (see encodefilename).
313 of the character (see encodefilename).
314 Relevant path components consisting of Windows reserved filenames are
314 Relevant path components consisting of Windows reserved filenames are
315 masked by encoding the third character ('aux' -> 'au~78', see _auxencode).
315 masked by encoding the third character ('aux' -> 'au~78', see _auxencode).
316
316
317 Hashed encoding (not reversible):
317 Hashed encoding (not reversible):
318
318
319 If the default-encoded path is longer than _maxstorepathlen, a
319 If the default-encoded path is longer than _maxstorepathlen, a
320 non-reversible hybrid hashing of the path is done instead.
320 non-reversible hybrid hashing of the path is done instead.
321 This encoding uses up to _dirprefixlen characters of all directory
321 This encoding uses up to _dirprefixlen characters of all directory
322 levels of the lowerencoded path, but not more levels than can fit into
322 levels of the lowerencoded path, but not more levels than can fit into
323 _maxshortdirslen.
323 _maxshortdirslen.
324 Then follows the filler followed by the sha digest of the full path.
324 Then follows the filler followed by the sha digest of the full path.
325 The filler is the beginning of the basename of the lowerencoded path
325 The filler is the beginning of the basename of the lowerencoded path
326 (the basename is everything after the last path separator). The filler
326 (the basename is everything after the last path separator). The filler
327 is as long as possible, filling in characters from the basename until
327 is as long as possible, filling in characters from the basename until
328 the encoded path has _maxstorepathlen characters (or all chars of the
328 the encoded path has _maxstorepathlen characters (or all chars of the
329 basename have been taken).
329 basename have been taken).
330 The extension (e.g. '.i' or '.d') is preserved.
330 The extension (e.g. '.i' or '.d') is preserved.
331
331
332 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
332 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
333 encoding was used.
333 encoding was used.
334 """
334 """
335 path = encodedir(path)
335 path = encodedir(path)
336 ef = _encodefname(path).split(b'/')
336 ef = _encodefname(path).split(b'/')
337 res = b'/'.join(_auxencode(ef, dotencode))
337 res = b'/'.join(_auxencode(ef, dotencode))
338 if len(res) > _maxstorepathlen:
338 if len(res) > _maxstorepathlen:
339 res = _hashencode(path, dotencode)
339 res = _hashencode(path, dotencode)
340 return res
340 return res
341
341
342
342
343 def _pathencode(path):
343 def _pathencode(path):
344 de = encodedir(path)
344 de = encodedir(path)
345 if len(path) > _maxstorepathlen:
345 if len(path) > _maxstorepathlen:
346 return _hashencode(de, True)
346 return _hashencode(de, True)
347 ef = _encodefname(de).split(b'/')
347 ef = _encodefname(de).split(b'/')
348 res = b'/'.join(_auxencode(ef, True))
348 res = b'/'.join(_auxencode(ef, True))
349 if len(res) > _maxstorepathlen:
349 if len(res) > _maxstorepathlen:
350 return _hashencode(de, True)
350 return _hashencode(de, True)
351 return res
351 return res
352
352
353
353
354 _pathencode = getattr(parsers, 'pathencode', _pathencode)
354 _pathencode = getattr(parsers, 'pathencode', _pathencode)
355
355
356
356
357 def _plainhybridencode(f):
357 def _plainhybridencode(f):
358 return _hybridencode(f, False)
358 return _hybridencode(f, False)
359
359
360
360
361 def _calcmode(vfs):
361 def _calcmode(vfs):
362 try:
362 try:
363 # files in .hg/ will be created using this mode
363 # files in .hg/ will be created using this mode
364 mode = vfs.stat().st_mode
364 mode = vfs.stat().st_mode
365 # avoid some useless chmods
365 # avoid some useless chmods
366 if (0o777 & ~util.umask) == (0o777 & mode):
366 if (0o777 & ~util.umask) == (0o777 & mode):
367 mode = None
367 mode = None
368 except OSError:
368 except OSError:
369 mode = None
369 mode = None
370 return mode
370 return mode
371
371
372
372
373 _data = [
373 _data = [
374 b'bookmarks',
374 b'bookmarks',
375 b'narrowspec',
375 b'narrowspec',
376 b'data',
376 b'data',
377 b'meta',
377 b'meta',
378 b'00manifest.d',
378 b'00manifest.d',
379 b'00manifest.i',
379 b'00manifest.i',
380 b'00changelog.d',
380 b'00changelog.d',
381 b'00changelog.i',
381 b'00changelog.i',
382 b'phaseroots',
382 b'phaseroots',
383 b'obsstore',
383 b'obsstore',
384 b'requires',
384 b'requires',
385 ]
385 ]
386
386
387 REVLOG_FILES_MAIN_EXT = (b'.i', b'i.tmpcensored')
387 REVLOG_FILES_MAIN_EXT = (b'.i', b'i.tmpcensored')
388 REVLOG_FILES_OTHER_EXT = (
388 REVLOG_FILES_OTHER_EXT = (
389 b'.idx',
389 b'.idx',
390 b'.d',
390 b'.d',
391 b'.dat',
391 b'.dat',
392 b'.n',
392 b'.n',
393 b'.nd',
393 b'.nd',
394 b'.sda',
394 b'.sda',
395 b'd.tmpcensored',
395 b'd.tmpcensored',
396 )
396 )
397 # files that are "volatile" and might change between listing and streaming
397 # files that are "volatile" and might change between listing and streaming
398 #
398 #
399 # note: the ".nd" file are nodemap data and won't "change" but they might be
399 # note: the ".nd" file are nodemap data and won't "change" but they might be
400 # deleted.
400 # deleted.
401 REVLOG_FILES_VOLATILE_EXT = (b'.n', b'.nd')
401 REVLOG_FILES_VOLATILE_EXT = (b'.n', b'.nd')
402
402
403 # some exception to the above matching
403 # some exception to the above matching
404 #
404 #
405 # XXX This is currently not in use because of issue6542
405 # XXX This is currently not in use because of issue6542
406 EXCLUDED = re.compile(b'.*undo\.[^/]+\.(nd?|i)$')
406 EXCLUDED = re.compile(br'.*undo\.[^/]+\.(nd?|i)$')
407
407
408
408
409 def is_revlog(f, kind, st):
409 def is_revlog(f, kind, st):
410 if kind != stat.S_IFREG:
410 if kind != stat.S_IFREG:
411 return None
411 return None
412 return revlog_type(f)
412 return revlog_type(f)
413
413
414
414
415 def revlog_type(f):
415 def revlog_type(f):
416 # XXX we need to filter `undo.` created by the transaction here, however
416 # XXX we need to filter `undo.` created by the transaction here, however
417 # being naive about it also filter revlog for `undo.*` files, leading to
417 # being naive about it also filter revlog for `undo.*` files, leading to
418 # issue6542. So we no longer use EXCLUDED.
418 # issue6542. So we no longer use EXCLUDED.
419 if f.endswith(REVLOG_FILES_MAIN_EXT):
419 if f.endswith(REVLOG_FILES_MAIN_EXT):
420 return FILEFLAGS_REVLOG_MAIN
420 return FILEFLAGS_REVLOG_MAIN
421 elif f.endswith(REVLOG_FILES_OTHER_EXT):
421 elif f.endswith(REVLOG_FILES_OTHER_EXT):
422 t = FILETYPE_FILELOG_OTHER
422 t = FILETYPE_FILELOG_OTHER
423 if f.endswith(REVLOG_FILES_VOLATILE_EXT):
423 if f.endswith(REVLOG_FILES_VOLATILE_EXT):
424 t |= FILEFLAGS_VOLATILE
424 t |= FILEFLAGS_VOLATILE
425 return t
425 return t
426 return None
426 return None
427
427
428
428
429 # the file is part of changelog data
429 # the file is part of changelog data
430 FILEFLAGS_CHANGELOG = 1 << 13
430 FILEFLAGS_CHANGELOG = 1 << 13
431 # the file is part of manifest data
431 # the file is part of manifest data
432 FILEFLAGS_MANIFESTLOG = 1 << 12
432 FILEFLAGS_MANIFESTLOG = 1 << 12
433 # the file is part of filelog data
433 # the file is part of filelog data
434 FILEFLAGS_FILELOG = 1 << 11
434 FILEFLAGS_FILELOG = 1 << 11
435 # file that are not directly part of a revlog
435 # file that are not directly part of a revlog
436 FILEFLAGS_OTHER = 1 << 10
436 FILEFLAGS_OTHER = 1 << 10
437
437
438 # the main entry point for a revlog
438 # the main entry point for a revlog
439 FILEFLAGS_REVLOG_MAIN = 1 << 1
439 FILEFLAGS_REVLOG_MAIN = 1 << 1
440 # a secondary file for a revlog
440 # a secondary file for a revlog
441 FILEFLAGS_REVLOG_OTHER = 1 << 0
441 FILEFLAGS_REVLOG_OTHER = 1 << 0
442
442
443 # files that are "volatile" and might change between listing and streaming
443 # files that are "volatile" and might change between listing and streaming
444 FILEFLAGS_VOLATILE = 1 << 20
444 FILEFLAGS_VOLATILE = 1 << 20
445
445
446 FILETYPE_CHANGELOG_MAIN = FILEFLAGS_CHANGELOG | FILEFLAGS_REVLOG_MAIN
446 FILETYPE_CHANGELOG_MAIN = FILEFLAGS_CHANGELOG | FILEFLAGS_REVLOG_MAIN
447 FILETYPE_CHANGELOG_OTHER = FILEFLAGS_CHANGELOG | FILEFLAGS_REVLOG_OTHER
447 FILETYPE_CHANGELOG_OTHER = FILEFLAGS_CHANGELOG | FILEFLAGS_REVLOG_OTHER
448 FILETYPE_MANIFESTLOG_MAIN = FILEFLAGS_MANIFESTLOG | FILEFLAGS_REVLOG_MAIN
448 FILETYPE_MANIFESTLOG_MAIN = FILEFLAGS_MANIFESTLOG | FILEFLAGS_REVLOG_MAIN
449 FILETYPE_MANIFESTLOG_OTHER = FILEFLAGS_MANIFESTLOG | FILEFLAGS_REVLOG_OTHER
449 FILETYPE_MANIFESTLOG_OTHER = FILEFLAGS_MANIFESTLOG | FILEFLAGS_REVLOG_OTHER
450 FILETYPE_FILELOG_MAIN = FILEFLAGS_FILELOG | FILEFLAGS_REVLOG_MAIN
450 FILETYPE_FILELOG_MAIN = FILEFLAGS_FILELOG | FILEFLAGS_REVLOG_MAIN
451 FILETYPE_FILELOG_OTHER = FILEFLAGS_FILELOG | FILEFLAGS_REVLOG_OTHER
451 FILETYPE_FILELOG_OTHER = FILEFLAGS_FILELOG | FILEFLAGS_REVLOG_OTHER
452 FILETYPE_OTHER = FILEFLAGS_OTHER
452 FILETYPE_OTHER = FILEFLAGS_OTHER
453
453
454
454
455 class basicstore:
455 class basicstore:
456 '''base class for local repository stores'''
456 '''base class for local repository stores'''
457
457
458 def __init__(self, path, vfstype):
458 def __init__(self, path, vfstype):
459 vfs = vfstype(path)
459 vfs = vfstype(path)
460 self.path = vfs.base
460 self.path = vfs.base
461 self.createmode = _calcmode(vfs)
461 self.createmode = _calcmode(vfs)
462 vfs.createmode = self.createmode
462 vfs.createmode = self.createmode
463 self.rawvfs = vfs
463 self.rawvfs = vfs
464 self.vfs = vfsmod.filtervfs(vfs, encodedir)
464 self.vfs = vfsmod.filtervfs(vfs, encodedir)
465 self.opener = self.vfs
465 self.opener = self.vfs
466
466
467 def join(self, f):
467 def join(self, f):
468 return self.path + b'/' + encodedir(f)
468 return self.path + b'/' + encodedir(f)
469
469
470 def _walk(self, relpath, recurse):
470 def _walk(self, relpath, recurse):
471 '''yields (revlog_type, unencoded, size)'''
471 '''yields (revlog_type, unencoded, size)'''
472 path = self.path
472 path = self.path
473 if relpath:
473 if relpath:
474 path += b'/' + relpath
474 path += b'/' + relpath
475 striplen = len(self.path) + 1
475 striplen = len(self.path) + 1
476 l = []
476 l = []
477 if self.rawvfs.isdir(path):
477 if self.rawvfs.isdir(path):
478 visit = [path]
478 visit = [path]
479 readdir = self.rawvfs.readdir
479 readdir = self.rawvfs.readdir
480 while visit:
480 while visit:
481 p = visit.pop()
481 p = visit.pop()
482 for f, kind, st in readdir(p, stat=True):
482 for f, kind, st in readdir(p, stat=True):
483 fp = p + b'/' + f
483 fp = p + b'/' + f
484 rl_type = is_revlog(f, kind, st)
484 rl_type = is_revlog(f, kind, st)
485 if rl_type is not None:
485 if rl_type is not None:
486 n = util.pconvert(fp[striplen:])
486 n = util.pconvert(fp[striplen:])
487 l.append((rl_type, decodedir(n), st.st_size))
487 l.append((rl_type, decodedir(n), st.st_size))
488 elif kind == stat.S_IFDIR and recurse:
488 elif kind == stat.S_IFDIR and recurse:
489 visit.append(fp)
489 visit.append(fp)
490 l.sort()
490 l.sort()
491 return l
491 return l
492
492
493 def changelog(self, trypending, concurrencychecker=None):
493 def changelog(self, trypending, concurrencychecker=None):
494 return changelog.changelog(
494 return changelog.changelog(
495 self.vfs,
495 self.vfs,
496 trypending=trypending,
496 trypending=trypending,
497 concurrencychecker=concurrencychecker,
497 concurrencychecker=concurrencychecker,
498 )
498 )
499
499
500 def manifestlog(self, repo, storenarrowmatch):
500 def manifestlog(self, repo, storenarrowmatch):
501 rootstore = manifest.manifestrevlog(repo.nodeconstants, self.vfs)
501 rootstore = manifest.manifestrevlog(repo.nodeconstants, self.vfs)
502 return manifest.manifestlog(self.vfs, repo, rootstore, storenarrowmatch)
502 return manifest.manifestlog(self.vfs, repo, rootstore, storenarrowmatch)
503
503
504 def datafiles(self, matcher=None, undecodable=None):
504 def datafiles(self, matcher=None, undecodable=None):
505 """Like walk, but excluding the changelog and root manifest.
505 """Like walk, but excluding the changelog and root manifest.
506
506
507 When [undecodable] is None, revlogs names that can't be
507 When [undecodable] is None, revlogs names that can't be
508 decoded cause an exception. When it is provided, it should
508 decoded cause an exception. When it is provided, it should
509 be a list and the filenames that can't be decoded are added
509 be a list and the filenames that can't be decoded are added
510 to it instead. This is very rarely needed."""
510 to it instead. This is very rarely needed."""
511 files = self._walk(b'data', True) + self._walk(b'meta', True)
511 files = self._walk(b'data', True) + self._walk(b'meta', True)
512 for (t, u, s) in files:
512 for (t, u, s) in files:
513 yield (FILEFLAGS_FILELOG | t, u, s)
513 yield (FILEFLAGS_FILELOG | t, u, s)
514
514
515 def topfiles(self):
515 def topfiles(self):
516 # yield manifest before changelog
516 # yield manifest before changelog
517 files = reversed(self._walk(b'', False))
517 files = reversed(self._walk(b'', False))
518 for (t, u, s) in files:
518 for (t, u, s) in files:
519 if u.startswith(b'00changelog'):
519 if u.startswith(b'00changelog'):
520 yield (FILEFLAGS_CHANGELOG | t, u, s)
520 yield (FILEFLAGS_CHANGELOG | t, u, s)
521 elif u.startswith(b'00manifest'):
521 elif u.startswith(b'00manifest'):
522 yield (FILEFLAGS_MANIFESTLOG | t, u, s)
522 yield (FILEFLAGS_MANIFESTLOG | t, u, s)
523 else:
523 else:
524 yield (FILETYPE_OTHER | t, u, s)
524 yield (FILETYPE_OTHER | t, u, s)
525
525
526 def walk(self, matcher=None):
526 def walk(self, matcher=None):
527 """return file related to data storage (ie: revlogs)
527 """return file related to data storage (ie: revlogs)
528
528
529 yields (file_type, unencoded, size)
529 yields (file_type, unencoded, size)
530
530
531 if a matcher is passed, storage files of only those tracked paths
531 if a matcher is passed, storage files of only those tracked paths
532 are passed with matches the matcher
532 are passed with matches the matcher
533 """
533 """
534 # yield data files first
534 # yield data files first
535 for x in self.datafiles(matcher):
535 for x in self.datafiles(matcher):
536 yield x
536 yield x
537 for x in self.topfiles():
537 for x in self.topfiles():
538 yield x
538 yield x
539
539
540 def copylist(self):
540 def copylist(self):
541 return _data
541 return _data
542
542
543 def write(self, tr):
543 def write(self, tr):
544 pass
544 pass
545
545
546 def invalidatecaches(self):
546 def invalidatecaches(self):
547 pass
547 pass
548
548
549 def markremoved(self, fn):
549 def markremoved(self, fn):
550 pass
550 pass
551
551
552 def __contains__(self, path):
552 def __contains__(self, path):
553 '''Checks if the store contains path'''
553 '''Checks if the store contains path'''
554 path = b"/".join((b"data", path))
554 path = b"/".join((b"data", path))
555 # file?
555 # file?
556 if self.vfs.exists(path + b".i"):
556 if self.vfs.exists(path + b".i"):
557 return True
557 return True
558 # dir?
558 # dir?
559 if not path.endswith(b"/"):
559 if not path.endswith(b"/"):
560 path = path + b"/"
560 path = path + b"/"
561 return self.vfs.exists(path)
561 return self.vfs.exists(path)
562
562
563
563
564 class encodedstore(basicstore):
564 class encodedstore(basicstore):
565 def __init__(self, path, vfstype):
565 def __init__(self, path, vfstype):
566 vfs = vfstype(path + b'/store')
566 vfs = vfstype(path + b'/store')
567 self.path = vfs.base
567 self.path = vfs.base
568 self.createmode = _calcmode(vfs)
568 self.createmode = _calcmode(vfs)
569 vfs.createmode = self.createmode
569 vfs.createmode = self.createmode
570 self.rawvfs = vfs
570 self.rawvfs = vfs
571 self.vfs = vfsmod.filtervfs(vfs, encodefilename)
571 self.vfs = vfsmod.filtervfs(vfs, encodefilename)
572 self.opener = self.vfs
572 self.opener = self.vfs
573
573
574 # note: topfiles would also need a decode phase. It is just that in
574 # note: topfiles would also need a decode phase. It is just that in
575 # practice we do not have any file outside of `data/` that needs encoding.
575 # practice we do not have any file outside of `data/` that needs encoding.
576 # However that might change so we should probably add a test and encoding
576 # However that might change so we should probably add a test and encoding
577 # decoding for it too. see issue6548
577 # decoding for it too. see issue6548
578
578
579 def datafiles(self, matcher=None, undecodable=None):
579 def datafiles(self, matcher=None, undecodable=None):
580 for t, f1, size in super(encodedstore, self).datafiles():
580 for t, f1, size in super(encodedstore, self).datafiles():
581 try:
581 try:
582 f2 = decodefilename(f1)
582 f2 = decodefilename(f1)
583 except KeyError:
583 except KeyError:
584 if undecodable is None:
584 if undecodable is None:
585 msg = _(b'undecodable revlog name %s') % f1
585 msg = _(b'undecodable revlog name %s') % f1
586 raise error.StorageError(msg)
586 raise error.StorageError(msg)
587 else:
587 else:
588 undecodable.append(f1)
588 undecodable.append(f1)
589 continue
589 continue
590 if not _matchtrackedpath(f2, matcher):
590 if not _matchtrackedpath(f2, matcher):
591 continue
591 continue
592 yield t, f2, size
592 yield t, f2, size
593
593
594 def join(self, f):
594 def join(self, f):
595 return self.path + b'/' + encodefilename(f)
595 return self.path + b'/' + encodefilename(f)
596
596
597 def copylist(self):
597 def copylist(self):
598 return [b'requires', b'00changelog.i'] + [b'store/' + f for f in _data]
598 return [b'requires', b'00changelog.i'] + [b'store/' + f for f in _data]
599
599
600
600
601 class fncache:
601 class fncache:
602 # the filename used to be partially encoded
602 # the filename used to be partially encoded
603 # hence the encodedir/decodedir dance
603 # hence the encodedir/decodedir dance
604 def __init__(self, vfs):
604 def __init__(self, vfs):
605 self.vfs = vfs
605 self.vfs = vfs
606 self._ignores = set()
606 self._ignores = set()
607 self.entries = None
607 self.entries = None
608 self._dirty = False
608 self._dirty = False
609 # set of new additions to fncache
609 # set of new additions to fncache
610 self.addls = set()
610 self.addls = set()
611
611
612 def ensureloaded(self, warn=None):
612 def ensureloaded(self, warn=None):
613 """read the fncache file if not already read.
613 """read the fncache file if not already read.
614
614
615 If the file on disk is corrupted, raise. If warn is provided,
615 If the file on disk is corrupted, raise. If warn is provided,
616 warn and keep going instead."""
616 warn and keep going instead."""
617 if self.entries is None:
617 if self.entries is None:
618 self._load(warn)
618 self._load(warn)
619
619
620 def _load(self, warn=None):
620 def _load(self, warn=None):
621 '''fill the entries from the fncache file'''
621 '''fill the entries from the fncache file'''
622 self._dirty = False
622 self._dirty = False
623 try:
623 try:
624 fp = self.vfs(b'fncache', mode=b'rb')
624 fp = self.vfs(b'fncache', mode=b'rb')
625 except IOError:
625 except IOError:
626 # skip nonexistent file
626 # skip nonexistent file
627 self.entries = set()
627 self.entries = set()
628 return
628 return
629
629
630 self.entries = set()
630 self.entries = set()
631 chunk = b''
631 chunk = b''
632 for c in iter(functools.partial(fp.read, fncache_chunksize), b''):
632 for c in iter(functools.partial(fp.read, fncache_chunksize), b''):
633 chunk += c
633 chunk += c
634 try:
634 try:
635 p = chunk.rindex(b'\n')
635 p = chunk.rindex(b'\n')
636 self.entries.update(decodedir(chunk[: p + 1]).splitlines())
636 self.entries.update(decodedir(chunk[: p + 1]).splitlines())
637 chunk = chunk[p + 1 :]
637 chunk = chunk[p + 1 :]
638 except ValueError:
638 except ValueError:
639 # substring '\n' not found, maybe the entry is bigger than the
639 # substring '\n' not found, maybe the entry is bigger than the
640 # chunksize, so let's keep iterating
640 # chunksize, so let's keep iterating
641 pass
641 pass
642
642
643 if chunk:
643 if chunk:
644 msg = _(b"fncache does not ends with a newline")
644 msg = _(b"fncache does not ends with a newline")
645 if warn:
645 if warn:
646 warn(msg + b'\n')
646 warn(msg + b'\n')
647 else:
647 else:
648 raise error.Abort(
648 raise error.Abort(
649 msg,
649 msg,
650 hint=_(
650 hint=_(
651 b"use 'hg debugrebuildfncache' to "
651 b"use 'hg debugrebuildfncache' to "
652 b"rebuild the fncache"
652 b"rebuild the fncache"
653 ),
653 ),
654 )
654 )
655 self._checkentries(fp, warn)
655 self._checkentries(fp, warn)
656 fp.close()
656 fp.close()
657
657
658 def _checkentries(self, fp, warn):
658 def _checkentries(self, fp, warn):
659 """make sure there is no empty string in entries"""
659 """make sure there is no empty string in entries"""
660 if b'' in self.entries:
660 if b'' in self.entries:
661 fp.seek(0)
661 fp.seek(0)
662 for n, line in enumerate(fp):
662 for n, line in enumerate(fp):
663 if not line.rstrip(b'\n'):
663 if not line.rstrip(b'\n'):
664 t = _(b'invalid entry in fncache, line %d') % (n + 1)
664 t = _(b'invalid entry in fncache, line %d') % (n + 1)
665 if warn:
665 if warn:
666 warn(t + b'\n')
666 warn(t + b'\n')
667 else:
667 else:
668 raise error.Abort(t)
668 raise error.Abort(t)
669
669
670 def write(self, tr):
670 def write(self, tr):
671 if self._dirty:
671 if self._dirty:
672 assert self.entries is not None
672 assert self.entries is not None
673 self.entries = self.entries | self.addls
673 self.entries = self.entries | self.addls
674 self.addls = set()
674 self.addls = set()
675 tr.addbackup(b'fncache')
675 tr.addbackup(b'fncache')
676 fp = self.vfs(b'fncache', mode=b'wb', atomictemp=True)
676 fp = self.vfs(b'fncache', mode=b'wb', atomictemp=True)
677 if self.entries:
677 if self.entries:
678 fp.write(encodedir(b'\n'.join(self.entries) + b'\n'))
678 fp.write(encodedir(b'\n'.join(self.entries) + b'\n'))
679 fp.close()
679 fp.close()
680 self._dirty = False
680 self._dirty = False
681 if self.addls:
681 if self.addls:
682 # if we have just new entries, let's append them to the fncache
682 # if we have just new entries, let's append them to the fncache
683 tr.addbackup(b'fncache')
683 tr.addbackup(b'fncache')
684 fp = self.vfs(b'fncache', mode=b'ab', atomictemp=True)
684 fp = self.vfs(b'fncache', mode=b'ab', atomictemp=True)
685 if self.addls:
685 if self.addls:
686 fp.write(encodedir(b'\n'.join(self.addls) + b'\n'))
686 fp.write(encodedir(b'\n'.join(self.addls) + b'\n'))
687 fp.close()
687 fp.close()
688 self.entries = None
688 self.entries = None
689 self.addls = set()
689 self.addls = set()
690
690
691 def addignore(self, fn):
691 def addignore(self, fn):
692 self._ignores.add(fn)
692 self._ignores.add(fn)
693
693
694 def add(self, fn):
694 def add(self, fn):
695 if fn in self._ignores:
695 if fn in self._ignores:
696 return
696 return
697 if self.entries is None:
697 if self.entries is None:
698 self._load()
698 self._load()
699 if fn not in self.entries:
699 if fn not in self.entries:
700 self.addls.add(fn)
700 self.addls.add(fn)
701
701
702 def remove(self, fn):
702 def remove(self, fn):
703 if self.entries is None:
703 if self.entries is None:
704 self._load()
704 self._load()
705 if fn in self.addls:
705 if fn in self.addls:
706 self.addls.remove(fn)
706 self.addls.remove(fn)
707 return
707 return
708 try:
708 try:
709 self.entries.remove(fn)
709 self.entries.remove(fn)
710 self._dirty = True
710 self._dirty = True
711 except KeyError:
711 except KeyError:
712 pass
712 pass
713
713
714 def __contains__(self, fn):
714 def __contains__(self, fn):
715 if fn in self.addls:
715 if fn in self.addls:
716 return True
716 return True
717 if self.entries is None:
717 if self.entries is None:
718 self._load()
718 self._load()
719 return fn in self.entries
719 return fn in self.entries
720
720
721 def __iter__(self):
721 def __iter__(self):
722 if self.entries is None:
722 if self.entries is None:
723 self._load()
723 self._load()
724 return iter(self.entries | self.addls)
724 return iter(self.entries | self.addls)
725
725
726
726
727 class _fncachevfs(vfsmod.proxyvfs):
727 class _fncachevfs(vfsmod.proxyvfs):
728 def __init__(self, vfs, fnc, encode):
728 def __init__(self, vfs, fnc, encode):
729 vfsmod.proxyvfs.__init__(self, vfs)
729 vfsmod.proxyvfs.__init__(self, vfs)
730 self.fncache = fnc
730 self.fncache = fnc
731 self.encode = encode
731 self.encode = encode
732
732
733 def __call__(self, path, mode=b'r', *args, **kw):
733 def __call__(self, path, mode=b'r', *args, **kw):
734 encoded = self.encode(path)
734 encoded = self.encode(path)
735 if mode not in (b'r', b'rb') and (
735 if mode not in (b'r', b'rb') and (
736 path.startswith(b'data/') or path.startswith(b'meta/')
736 path.startswith(b'data/') or path.startswith(b'meta/')
737 ):
737 ):
738 # do not trigger a fncache load when adding a file that already is
738 # do not trigger a fncache load when adding a file that already is
739 # known to exist.
739 # known to exist.
740 notload = self.fncache.entries is None and self.vfs.exists(encoded)
740 notload = self.fncache.entries is None and self.vfs.exists(encoded)
741 if notload and b'r+' in mode and not self.vfs.stat(encoded).st_size:
741 if notload and b'r+' in mode and not self.vfs.stat(encoded).st_size:
742 # when appending to an existing file, if the file has size zero,
742 # when appending to an existing file, if the file has size zero,
743 # it should be considered as missing. Such zero-size files are
743 # it should be considered as missing. Such zero-size files are
744 # the result of truncation when a transaction is aborted.
744 # the result of truncation when a transaction is aborted.
745 notload = False
745 notload = False
746 if not notload:
746 if not notload:
747 self.fncache.add(path)
747 self.fncache.add(path)
748 return self.vfs(encoded, mode, *args, **kw)
748 return self.vfs(encoded, mode, *args, **kw)
749
749
750 def join(self, path):
750 def join(self, path):
751 if path:
751 if path:
752 return self.vfs.join(self.encode(path))
752 return self.vfs.join(self.encode(path))
753 else:
753 else:
754 return self.vfs.join(path)
754 return self.vfs.join(path)
755
755
756 def register_file(self, path):
756 def register_file(self, path):
757 """generic hook point to lets fncache steer its stew"""
757 """generic hook point to lets fncache steer its stew"""
758 if path.startswith(b'data/') or path.startswith(b'meta/'):
758 if path.startswith(b'data/') or path.startswith(b'meta/'):
759 self.fncache.add(path)
759 self.fncache.add(path)
760
760
761
761
762 class fncachestore(basicstore):
762 class fncachestore(basicstore):
763 def __init__(self, path, vfstype, dotencode):
763 def __init__(self, path, vfstype, dotencode):
764 if dotencode:
764 if dotencode:
765 encode = _pathencode
765 encode = _pathencode
766 else:
766 else:
767 encode = _plainhybridencode
767 encode = _plainhybridencode
768 self.encode = encode
768 self.encode = encode
769 vfs = vfstype(path + b'/store')
769 vfs = vfstype(path + b'/store')
770 self.path = vfs.base
770 self.path = vfs.base
771 self.pathsep = self.path + b'/'
771 self.pathsep = self.path + b'/'
772 self.createmode = _calcmode(vfs)
772 self.createmode = _calcmode(vfs)
773 vfs.createmode = self.createmode
773 vfs.createmode = self.createmode
774 self.rawvfs = vfs
774 self.rawvfs = vfs
775 fnc = fncache(vfs)
775 fnc = fncache(vfs)
776 self.fncache = fnc
776 self.fncache = fnc
777 self.vfs = _fncachevfs(vfs, fnc, encode)
777 self.vfs = _fncachevfs(vfs, fnc, encode)
778 self.opener = self.vfs
778 self.opener = self.vfs
779
779
780 def join(self, f):
780 def join(self, f):
781 return self.pathsep + self.encode(f)
781 return self.pathsep + self.encode(f)
782
782
783 def getsize(self, path):
783 def getsize(self, path):
784 return self.rawvfs.stat(path).st_size
784 return self.rawvfs.stat(path).st_size
785
785
786 def datafiles(self, matcher=None, undecodable=None):
786 def datafiles(self, matcher=None, undecodable=None):
787 for f in sorted(self.fncache):
787 for f in sorted(self.fncache):
788 if not _matchtrackedpath(f, matcher):
788 if not _matchtrackedpath(f, matcher):
789 continue
789 continue
790 ef = self.encode(f)
790 ef = self.encode(f)
791 try:
791 try:
792 t = revlog_type(f)
792 t = revlog_type(f)
793 assert t is not None, f
793 assert t is not None, f
794 t |= FILEFLAGS_FILELOG
794 t |= FILEFLAGS_FILELOG
795 yield t, f, self.getsize(ef)
795 yield t, f, self.getsize(ef)
796 except FileNotFoundError:
796 except FileNotFoundError:
797 pass
797 pass
798
798
799 def copylist(self):
799 def copylist(self):
800 d = (
800 d = (
801 b'bookmarks',
801 b'bookmarks',
802 b'narrowspec',
802 b'narrowspec',
803 b'data',
803 b'data',
804 b'meta',
804 b'meta',
805 b'dh',
805 b'dh',
806 b'fncache',
806 b'fncache',
807 b'phaseroots',
807 b'phaseroots',
808 b'obsstore',
808 b'obsstore',
809 b'00manifest.d',
809 b'00manifest.d',
810 b'00manifest.i',
810 b'00manifest.i',
811 b'00changelog.d',
811 b'00changelog.d',
812 b'00changelog.i',
812 b'00changelog.i',
813 b'requires',
813 b'requires',
814 )
814 )
815 return [b'requires', b'00changelog.i'] + [b'store/' + f for f in d]
815 return [b'requires', b'00changelog.i'] + [b'store/' + f for f in d]
816
816
817 def write(self, tr):
817 def write(self, tr):
818 self.fncache.write(tr)
818 self.fncache.write(tr)
819
819
820 def invalidatecaches(self):
820 def invalidatecaches(self):
821 self.fncache.entries = None
821 self.fncache.entries = None
822 self.fncache.addls = set()
822 self.fncache.addls = set()
823
823
824 def markremoved(self, fn):
824 def markremoved(self, fn):
825 self.fncache.remove(fn)
825 self.fncache.remove(fn)
826
826
827 def _exists(self, f):
827 def _exists(self, f):
828 ef = self.encode(f)
828 ef = self.encode(f)
829 try:
829 try:
830 self.getsize(ef)
830 self.getsize(ef)
831 return True
831 return True
832 except FileNotFoundError:
832 except FileNotFoundError:
833 return False
833 return False
834
834
835 def __contains__(self, path):
835 def __contains__(self, path):
836 '''Checks if the store contains path'''
836 '''Checks if the store contains path'''
837 path = b"/".join((b"data", path))
837 path = b"/".join((b"data", path))
838 # check for files (exact match)
838 # check for files (exact match)
839 e = path + b'.i'
839 e = path + b'.i'
840 if e in self.fncache and self._exists(e):
840 if e in self.fncache and self._exists(e):
841 return True
841 return True
842 # now check for directories (prefix match)
842 # now check for directories (prefix match)
843 if not path.endswith(b'/'):
843 if not path.endswith(b'/'):
844 path += b'/'
844 path += b'/'
845 for e in self.fncache:
845 for e in self.fncache:
846 if e.startswith(path) and self._exists(e):
846 if e.startswith(path) and self._exists(e):
847 return True
847 return True
848 return False
848 return False
General Comments 0
You need to be logged in to leave comments. Login now