##// END OF EJS Templates
py3: fix the way we produce bytes list in store.py...
Pulkit Goyal -
r30893:a0e3d808 default
parent child Browse files
Show More
@@ -1,577 +1,577 b''
1 # store.py - repository store handling for Mercurial
1 # store.py - repository store handling for Mercurial
2 #
2 #
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import hashlib
11 import hashlib
12 import os
12 import os
13 import stat
13 import stat
14
14
15 from .i18n import _
15 from .i18n import _
16 from . import (
16 from . import (
17 error,
17 error,
18 parsers,
18 parsers,
19 pycompat,
19 pycompat,
20 scmutil,
20 scmutil,
21 util,
21 util,
22 )
22 )
23
23
24 # This avoids a collision between a file named foo and a dir named
24 # This avoids a collision between a file named foo and a dir named
25 # foo.i or foo.d
25 # foo.i or foo.d
26 def _encodedir(path):
26 def _encodedir(path):
27 '''
27 '''
28 >>> _encodedir('data/foo.i')
28 >>> _encodedir('data/foo.i')
29 'data/foo.i'
29 'data/foo.i'
30 >>> _encodedir('data/foo.i/bla.i')
30 >>> _encodedir('data/foo.i/bla.i')
31 'data/foo.i.hg/bla.i'
31 'data/foo.i.hg/bla.i'
32 >>> _encodedir('data/foo.i.hg/bla.i')
32 >>> _encodedir('data/foo.i.hg/bla.i')
33 'data/foo.i.hg.hg/bla.i'
33 'data/foo.i.hg.hg/bla.i'
34 >>> _encodedir('data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
34 >>> _encodedir('data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
35 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
35 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
36 '''
36 '''
37 return (path
37 return (path
38 .replace(".hg/", ".hg.hg/")
38 .replace(".hg/", ".hg.hg/")
39 .replace(".i/", ".i.hg/")
39 .replace(".i/", ".i.hg/")
40 .replace(".d/", ".d.hg/"))
40 .replace(".d/", ".d.hg/"))
41
41
42 encodedir = getattr(parsers, 'encodedir', _encodedir)
42 encodedir = getattr(parsers, 'encodedir', _encodedir)
43
43
44 def decodedir(path):
44 def decodedir(path):
45 '''
45 '''
46 >>> decodedir('data/foo.i')
46 >>> decodedir('data/foo.i')
47 'data/foo.i'
47 'data/foo.i'
48 >>> decodedir('data/foo.i.hg/bla.i')
48 >>> decodedir('data/foo.i.hg/bla.i')
49 'data/foo.i/bla.i'
49 'data/foo.i/bla.i'
50 >>> decodedir('data/foo.i.hg.hg/bla.i')
50 >>> decodedir('data/foo.i.hg.hg/bla.i')
51 'data/foo.i.hg/bla.i'
51 'data/foo.i.hg/bla.i'
52 '''
52 '''
53 if ".hg/" not in path:
53 if ".hg/" not in path:
54 return path
54 return path
55 return (path
55 return (path
56 .replace(".d.hg/", ".d/")
56 .replace(".d.hg/", ".d/")
57 .replace(".i.hg/", ".i/")
57 .replace(".i.hg/", ".i/")
58 .replace(".hg.hg/", ".hg/"))
58 .replace(".hg.hg/", ".hg/"))
59
59
60 def _reserved():
60 def _reserved():
61 ''' characters that are problematic for filesystems
61 ''' characters that are problematic for filesystems
62
62
63 * ascii escapes (0..31)
63 * ascii escapes (0..31)
64 * ascii hi (126..255)
64 * ascii hi (126..255)
65 * windows specials
65 * windows specials
66
66
67 these characters will be escaped by encodefunctions
67 these characters will be escaped by encodefunctions
68 '''
68 '''
69 winreserved = [ord(x) for x in u'\\:*?"<>|']
69 winreserved = [ord(x) for x in u'\\:*?"<>|']
70 for x in range(32):
70 for x in range(32):
71 yield x
71 yield x
72 for x in range(126, 256):
72 for x in range(126, 256):
73 yield x
73 yield x
74 for x in winreserved:
74 for x in winreserved:
75 yield x
75 yield x
76
76
77 def _buildencodefun():
77 def _buildencodefun():
78 '''
78 '''
79 >>> enc, dec = _buildencodefun()
79 >>> enc, dec = _buildencodefun()
80
80
81 >>> enc('nothing/special.txt')
81 >>> enc('nothing/special.txt')
82 'nothing/special.txt'
82 'nothing/special.txt'
83 >>> dec('nothing/special.txt')
83 >>> dec('nothing/special.txt')
84 'nothing/special.txt'
84 'nothing/special.txt'
85
85
86 >>> enc('HELLO')
86 >>> enc('HELLO')
87 '_h_e_l_l_o'
87 '_h_e_l_l_o'
88 >>> dec('_h_e_l_l_o')
88 >>> dec('_h_e_l_l_o')
89 'HELLO'
89 'HELLO'
90
90
91 >>> enc('hello:world?')
91 >>> enc('hello:world?')
92 'hello~3aworld~3f'
92 'hello~3aworld~3f'
93 >>> dec('hello~3aworld~3f')
93 >>> dec('hello~3aworld~3f')
94 'hello:world?'
94 'hello:world?'
95
95
96 >>> enc('the\x07quick\xADshot')
96 >>> enc('the\x07quick\xADshot')
97 'the~07quick~adshot'
97 'the~07quick~adshot'
98 >>> dec('the~07quick~adshot')
98 >>> dec('the~07quick~adshot')
99 'the\\x07quick\\xadshot'
99 'the\\x07quick\\xadshot'
100 '''
100 '''
101 e = '_'
101 e = '_'
102 if pycompat.ispy3:
102 if pycompat.ispy3:
103 xchr = lambda x: bytes([x])
103 xchr = lambda x: bytes([x])
104 asciistr = bytes(xrange(127))
104 asciistr = [bytes(a) for a in range(127)]
105 else:
105 else:
106 xchr = chr
106 xchr = chr
107 asciistr = map(chr, xrange(127))
107 asciistr = map(chr, xrange(127))
108 capitals = list(range(ord("A"), ord("Z") + 1))
108 capitals = list(range(ord("A"), ord("Z") + 1))
109
109
110 cmap = dict((x, x) for x in asciistr)
110 cmap = dict((x, x) for x in asciistr)
111 for x in _reserved():
111 for x in _reserved():
112 cmap[xchr(x)] = "~%02x" % x
112 cmap[xchr(x)] = "~%02x" % x
113 for x in capitals + [ord(e)]:
113 for x in capitals + [ord(e)]:
114 cmap[xchr(x)] = e + xchr(x).lower()
114 cmap[xchr(x)] = e + xchr(x).lower()
115
115
116 dmap = {}
116 dmap = {}
117 for k, v in cmap.iteritems():
117 for k, v in cmap.iteritems():
118 dmap[v] = k
118 dmap[v] = k
119 def decode(s):
119 def decode(s):
120 i = 0
120 i = 0
121 while i < len(s):
121 while i < len(s):
122 for l in xrange(1, 4):
122 for l in xrange(1, 4):
123 try:
123 try:
124 yield dmap[s[i:i + l]]
124 yield dmap[s[i:i + l]]
125 i += l
125 i += l
126 break
126 break
127 except KeyError:
127 except KeyError:
128 pass
128 pass
129 else:
129 else:
130 raise KeyError
130 raise KeyError
131 return (lambda s: ''.join([cmap[c] for c in s]),
131 return (lambda s: ''.join([cmap[c] for c in s]),
132 lambda s: ''.join(list(decode(s))))
132 lambda s: ''.join(list(decode(s))))
133
133
134 _encodefname, _decodefname = _buildencodefun()
134 _encodefname, _decodefname = _buildencodefun()
135
135
136 def encodefilename(s):
136 def encodefilename(s):
137 '''
137 '''
138 >>> encodefilename('foo.i/bar.d/bla.hg/hi:world?/HELLO')
138 >>> encodefilename('foo.i/bar.d/bla.hg/hi:world?/HELLO')
139 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
139 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
140 '''
140 '''
141 return _encodefname(encodedir(s))
141 return _encodefname(encodedir(s))
142
142
143 def decodefilename(s):
143 def decodefilename(s):
144 '''
144 '''
145 >>> decodefilename('foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
145 >>> decodefilename('foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
146 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
146 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
147 '''
147 '''
148 return decodedir(_decodefname(s))
148 return decodedir(_decodefname(s))
149
149
150 def _buildlowerencodefun():
150 def _buildlowerencodefun():
151 '''
151 '''
152 >>> f = _buildlowerencodefun()
152 >>> f = _buildlowerencodefun()
153 >>> f('nothing/special.txt')
153 >>> f('nothing/special.txt')
154 'nothing/special.txt'
154 'nothing/special.txt'
155 >>> f('HELLO')
155 >>> f('HELLO')
156 'hello'
156 'hello'
157 >>> f('hello:world?')
157 >>> f('hello:world?')
158 'hello~3aworld~3f'
158 'hello~3aworld~3f'
159 >>> f('the\x07quick\xADshot')
159 >>> f('the\x07quick\xADshot')
160 'the~07quick~adshot'
160 'the~07quick~adshot'
161 '''
161 '''
162 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
162 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
163 for x in _reserved():
163 for x in _reserved():
164 cmap[chr(x)] = "~%02x" % x
164 cmap[chr(x)] = "~%02x" % x
165 for x in range(ord("A"), ord("Z") + 1):
165 for x in range(ord("A"), ord("Z") + 1):
166 cmap[chr(x)] = chr(x).lower()
166 cmap[chr(x)] = chr(x).lower()
167 return lambda s: "".join([cmap[c] for c in s])
167 return lambda s: "".join([cmap[c] for c in s])
168
168
169 lowerencode = getattr(parsers, 'lowerencode', None) or _buildlowerencodefun()
169 lowerencode = getattr(parsers, 'lowerencode', None) or _buildlowerencodefun()
170
170
171 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
171 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
172 _winres3 = ('aux', 'con', 'prn', 'nul') # length 3
172 _winres3 = ('aux', 'con', 'prn', 'nul') # length 3
173 _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9)
173 _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9)
174 def _auxencode(path, dotencode):
174 def _auxencode(path, dotencode):
175 '''
175 '''
176 Encodes filenames containing names reserved by Windows or which end in
176 Encodes filenames containing names reserved by Windows or which end in
177 period or space. Does not touch other single reserved characters c.
177 period or space. Does not touch other single reserved characters c.
178 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
178 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
179 Additionally encodes space or period at the beginning, if dotencode is
179 Additionally encodes space or period at the beginning, if dotencode is
180 True. Parameter path is assumed to be all lowercase.
180 True. Parameter path is assumed to be all lowercase.
181 A segment only needs encoding if a reserved name appears as a
181 A segment only needs encoding if a reserved name appears as a
182 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
182 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
183 doesn't need encoding.
183 doesn't need encoding.
184
184
185 >>> s = '.foo/aux.txt/txt.aux/con/prn/nul/foo.'
185 >>> s = '.foo/aux.txt/txt.aux/con/prn/nul/foo.'
186 >>> _auxencode(s.split('/'), True)
186 >>> _auxencode(s.split('/'), True)
187 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
187 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
188 >>> s = '.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
188 >>> s = '.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
189 >>> _auxencode(s.split('/'), False)
189 >>> _auxencode(s.split('/'), False)
190 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
190 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
191 >>> _auxencode(['foo. '], True)
191 >>> _auxencode(['foo. '], True)
192 ['foo.~20']
192 ['foo.~20']
193 >>> _auxencode([' .foo'], True)
193 >>> _auxencode([' .foo'], True)
194 ['~20.foo']
194 ['~20.foo']
195 '''
195 '''
196 for i, n in enumerate(path):
196 for i, n in enumerate(path):
197 if not n:
197 if not n:
198 continue
198 continue
199 if dotencode and n[0] in '. ':
199 if dotencode and n[0] in '. ':
200 n = "~%02x" % ord(n[0]) + n[1:]
200 n = "~%02x" % ord(n[0]) + n[1:]
201 path[i] = n
201 path[i] = n
202 else:
202 else:
203 l = n.find('.')
203 l = n.find('.')
204 if l == -1:
204 if l == -1:
205 l = len(n)
205 l = len(n)
206 if ((l == 3 and n[:3] in _winres3) or
206 if ((l == 3 and n[:3] in _winres3) or
207 (l == 4 and n[3] <= '9' and n[3] >= '1'
207 (l == 4 and n[3] <= '9' and n[3] >= '1'
208 and n[:3] in _winres4)):
208 and n[:3] in _winres4)):
209 # encode third letter ('aux' -> 'au~78')
209 # encode third letter ('aux' -> 'au~78')
210 ec = "~%02x" % ord(n[2])
210 ec = "~%02x" % ord(n[2])
211 n = n[0:2] + ec + n[3:]
211 n = n[0:2] + ec + n[3:]
212 path[i] = n
212 path[i] = n
213 if n[-1] in '. ':
213 if n[-1] in '. ':
214 # encode last period or space ('foo...' -> 'foo..~2e')
214 # encode last period or space ('foo...' -> 'foo..~2e')
215 path[i] = n[:-1] + "~%02x" % ord(n[-1])
215 path[i] = n[:-1] + "~%02x" % ord(n[-1])
216 return path
216 return path
217
217
218 _maxstorepathlen = 120
218 _maxstorepathlen = 120
219 _dirprefixlen = 8
219 _dirprefixlen = 8
220 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
220 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
221
221
222 def _hashencode(path, dotencode):
222 def _hashencode(path, dotencode):
223 digest = hashlib.sha1(path).hexdigest()
223 digest = hashlib.sha1(path).hexdigest()
224 le = lowerencode(path[5:]).split('/') # skips prefix 'data/' or 'meta/'
224 le = lowerencode(path[5:]).split('/') # skips prefix 'data/' or 'meta/'
225 parts = _auxencode(le, dotencode)
225 parts = _auxencode(le, dotencode)
226 basename = parts[-1]
226 basename = parts[-1]
227 _root, ext = os.path.splitext(basename)
227 _root, ext = os.path.splitext(basename)
228 sdirs = []
228 sdirs = []
229 sdirslen = 0
229 sdirslen = 0
230 for p in parts[:-1]:
230 for p in parts[:-1]:
231 d = p[:_dirprefixlen]
231 d = p[:_dirprefixlen]
232 if d[-1] in '. ':
232 if d[-1] in '. ':
233 # Windows can't access dirs ending in period or space
233 # Windows can't access dirs ending in period or space
234 d = d[:-1] + '_'
234 d = d[:-1] + '_'
235 if sdirslen == 0:
235 if sdirslen == 0:
236 t = len(d)
236 t = len(d)
237 else:
237 else:
238 t = sdirslen + 1 + len(d)
238 t = sdirslen + 1 + len(d)
239 if t > _maxshortdirslen:
239 if t > _maxshortdirslen:
240 break
240 break
241 sdirs.append(d)
241 sdirs.append(d)
242 sdirslen = t
242 sdirslen = t
243 dirs = '/'.join(sdirs)
243 dirs = '/'.join(sdirs)
244 if len(dirs) > 0:
244 if len(dirs) > 0:
245 dirs += '/'
245 dirs += '/'
246 res = 'dh/' + dirs + digest + ext
246 res = 'dh/' + dirs + digest + ext
247 spaceleft = _maxstorepathlen - len(res)
247 spaceleft = _maxstorepathlen - len(res)
248 if spaceleft > 0:
248 if spaceleft > 0:
249 filler = basename[:spaceleft]
249 filler = basename[:spaceleft]
250 res = 'dh/' + dirs + filler + digest + ext
250 res = 'dh/' + dirs + filler + digest + ext
251 return res
251 return res
252
252
253 def _hybridencode(path, dotencode):
253 def _hybridencode(path, dotencode):
254 '''encodes path with a length limit
254 '''encodes path with a length limit
255
255
256 Encodes all paths that begin with 'data/', according to the following.
256 Encodes all paths that begin with 'data/', according to the following.
257
257
258 Default encoding (reversible):
258 Default encoding (reversible):
259
259
260 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
260 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
261 characters are encoded as '~xx', where xx is the two digit hex code
261 characters are encoded as '~xx', where xx is the two digit hex code
262 of the character (see encodefilename).
262 of the character (see encodefilename).
263 Relevant path components consisting of Windows reserved filenames are
263 Relevant path components consisting of Windows reserved filenames are
264 masked by encoding the third character ('aux' -> 'au~78', see _auxencode).
264 masked by encoding the third character ('aux' -> 'au~78', see _auxencode).
265
265
266 Hashed encoding (not reversible):
266 Hashed encoding (not reversible):
267
267
268 If the default-encoded path is longer than _maxstorepathlen, a
268 If the default-encoded path is longer than _maxstorepathlen, a
269 non-reversible hybrid hashing of the path is done instead.
269 non-reversible hybrid hashing of the path is done instead.
270 This encoding uses up to _dirprefixlen characters of all directory
270 This encoding uses up to _dirprefixlen characters of all directory
271 levels of the lowerencoded path, but not more levels than can fit into
271 levels of the lowerencoded path, but not more levels than can fit into
272 _maxshortdirslen.
272 _maxshortdirslen.
273 Then follows the filler followed by the sha digest of the full path.
273 Then follows the filler followed by the sha digest of the full path.
274 The filler is the beginning of the basename of the lowerencoded path
274 The filler is the beginning of the basename of the lowerencoded path
275 (the basename is everything after the last path separator). The filler
275 (the basename is everything after the last path separator). The filler
276 is as long as possible, filling in characters from the basename until
276 is as long as possible, filling in characters from the basename until
277 the encoded path has _maxstorepathlen characters (or all chars of the
277 the encoded path has _maxstorepathlen characters (or all chars of the
278 basename have been taken).
278 basename have been taken).
279 The extension (e.g. '.i' or '.d') is preserved.
279 The extension (e.g. '.i' or '.d') is preserved.
280
280
281 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
281 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
282 encoding was used.
282 encoding was used.
283 '''
283 '''
284 path = encodedir(path)
284 path = encodedir(path)
285 ef = _encodefname(path).split('/')
285 ef = _encodefname(path).split('/')
286 res = '/'.join(_auxencode(ef, dotencode))
286 res = '/'.join(_auxencode(ef, dotencode))
287 if len(res) > _maxstorepathlen:
287 if len(res) > _maxstorepathlen:
288 res = _hashencode(path, dotencode)
288 res = _hashencode(path, dotencode)
289 return res
289 return res
290
290
291 def _pathencode(path):
291 def _pathencode(path):
292 de = encodedir(path)
292 de = encodedir(path)
293 if len(path) > _maxstorepathlen:
293 if len(path) > _maxstorepathlen:
294 return _hashencode(de, True)
294 return _hashencode(de, True)
295 ef = _encodefname(de).split('/')
295 ef = _encodefname(de).split('/')
296 res = '/'.join(_auxencode(ef, True))
296 res = '/'.join(_auxencode(ef, True))
297 if len(res) > _maxstorepathlen:
297 if len(res) > _maxstorepathlen:
298 return _hashencode(de, True)
298 return _hashencode(de, True)
299 return res
299 return res
300
300
301 _pathencode = getattr(parsers, 'pathencode', _pathencode)
301 _pathencode = getattr(parsers, 'pathencode', _pathencode)
302
302
303 def _plainhybridencode(f):
303 def _plainhybridencode(f):
304 return _hybridencode(f, False)
304 return _hybridencode(f, False)
305
305
306 def _calcmode(vfs):
306 def _calcmode(vfs):
307 try:
307 try:
308 # files in .hg/ will be created using this mode
308 # files in .hg/ will be created using this mode
309 mode = vfs.stat().st_mode
309 mode = vfs.stat().st_mode
310 # avoid some useless chmods
310 # avoid some useless chmods
311 if (0o777 & ~util.umask) == (0o777 & mode):
311 if (0o777 & ~util.umask) == (0o777 & mode):
312 mode = None
312 mode = None
313 except OSError:
313 except OSError:
314 mode = None
314 mode = None
315 return mode
315 return mode
316
316
317 _data = ('data meta 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
317 _data = ('data meta 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
318 ' phaseroots obsstore')
318 ' phaseroots obsstore')
319
319
320 class basicstore(object):
320 class basicstore(object):
321 '''base class for local repository stores'''
321 '''base class for local repository stores'''
322 def __init__(self, path, vfstype):
322 def __init__(self, path, vfstype):
323 vfs = vfstype(path)
323 vfs = vfstype(path)
324 self.path = vfs.base
324 self.path = vfs.base
325 self.createmode = _calcmode(vfs)
325 self.createmode = _calcmode(vfs)
326 vfs.createmode = self.createmode
326 vfs.createmode = self.createmode
327 self.rawvfs = vfs
327 self.rawvfs = vfs
328 self.vfs = scmutil.filtervfs(vfs, encodedir)
328 self.vfs = scmutil.filtervfs(vfs, encodedir)
329 self.opener = self.vfs
329 self.opener = self.vfs
330
330
331 def join(self, f):
331 def join(self, f):
332 return self.path + '/' + encodedir(f)
332 return self.path + '/' + encodedir(f)
333
333
334 def _walk(self, relpath, recurse):
334 def _walk(self, relpath, recurse):
335 '''yields (unencoded, encoded, size)'''
335 '''yields (unencoded, encoded, size)'''
336 path = self.path
336 path = self.path
337 if relpath:
337 if relpath:
338 path += '/' + relpath
338 path += '/' + relpath
339 striplen = len(self.path) + 1
339 striplen = len(self.path) + 1
340 l = []
340 l = []
341 if self.rawvfs.isdir(path):
341 if self.rawvfs.isdir(path):
342 visit = [path]
342 visit = [path]
343 readdir = self.rawvfs.readdir
343 readdir = self.rawvfs.readdir
344 while visit:
344 while visit:
345 p = visit.pop()
345 p = visit.pop()
346 for f, kind, st in readdir(p, stat=True):
346 for f, kind, st in readdir(p, stat=True):
347 fp = p + '/' + f
347 fp = p + '/' + f
348 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
348 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
349 n = util.pconvert(fp[striplen:])
349 n = util.pconvert(fp[striplen:])
350 l.append((decodedir(n), n, st.st_size))
350 l.append((decodedir(n), n, st.st_size))
351 elif kind == stat.S_IFDIR and recurse:
351 elif kind == stat.S_IFDIR and recurse:
352 visit.append(fp)
352 visit.append(fp)
353 l.sort()
353 l.sort()
354 return l
354 return l
355
355
356 def datafiles(self):
356 def datafiles(self):
357 return self._walk('data', True) + self._walk('meta', True)
357 return self._walk('data', True) + self._walk('meta', True)
358
358
359 def topfiles(self):
359 def topfiles(self):
360 # yield manifest before changelog
360 # yield manifest before changelog
361 return reversed(self._walk('', False))
361 return reversed(self._walk('', False))
362
362
363 def walk(self):
363 def walk(self):
364 '''yields (unencoded, encoded, size)'''
364 '''yields (unencoded, encoded, size)'''
365 # yield data files first
365 # yield data files first
366 for x in self.datafiles():
366 for x in self.datafiles():
367 yield x
367 yield x
368 for x in self.topfiles():
368 for x in self.topfiles():
369 yield x
369 yield x
370
370
371 def copylist(self):
371 def copylist(self):
372 return ['requires'] + _data.split()
372 return ['requires'] + _data.split()
373
373
374 def write(self, tr):
374 def write(self, tr):
375 pass
375 pass
376
376
377 def invalidatecaches(self):
377 def invalidatecaches(self):
378 pass
378 pass
379
379
380 def markremoved(self, fn):
380 def markremoved(self, fn):
381 pass
381 pass
382
382
383 def __contains__(self, path):
383 def __contains__(self, path):
384 '''Checks if the store contains path'''
384 '''Checks if the store contains path'''
385 path = "/".join(("data", path))
385 path = "/".join(("data", path))
386 # file?
386 # file?
387 if self.vfs.exists(path + ".i"):
387 if self.vfs.exists(path + ".i"):
388 return True
388 return True
389 # dir?
389 # dir?
390 if not path.endswith("/"):
390 if not path.endswith("/"):
391 path = path + "/"
391 path = path + "/"
392 return self.vfs.exists(path)
392 return self.vfs.exists(path)
393
393
394 class encodedstore(basicstore):
394 class encodedstore(basicstore):
395 def __init__(self, path, vfstype):
395 def __init__(self, path, vfstype):
396 vfs = vfstype(path + '/store')
396 vfs = vfstype(path + '/store')
397 self.path = vfs.base
397 self.path = vfs.base
398 self.createmode = _calcmode(vfs)
398 self.createmode = _calcmode(vfs)
399 vfs.createmode = self.createmode
399 vfs.createmode = self.createmode
400 self.rawvfs = vfs
400 self.rawvfs = vfs
401 self.vfs = scmutil.filtervfs(vfs, encodefilename)
401 self.vfs = scmutil.filtervfs(vfs, encodefilename)
402 self.opener = self.vfs
402 self.opener = self.vfs
403
403
404 def datafiles(self):
404 def datafiles(self):
405 for a, b, size in super(encodedstore, self).datafiles():
405 for a, b, size in super(encodedstore, self).datafiles():
406 try:
406 try:
407 a = decodefilename(a)
407 a = decodefilename(a)
408 except KeyError:
408 except KeyError:
409 a = None
409 a = None
410 yield a, b, size
410 yield a, b, size
411
411
412 def join(self, f):
412 def join(self, f):
413 return self.path + '/' + encodefilename(f)
413 return self.path + '/' + encodefilename(f)
414
414
415 def copylist(self):
415 def copylist(self):
416 return (['requires', '00changelog.i'] +
416 return (['requires', '00changelog.i'] +
417 ['store/' + f for f in _data.split()])
417 ['store/' + f for f in _data.split()])
418
418
419 class fncache(object):
419 class fncache(object):
420 # the filename used to be partially encoded
420 # the filename used to be partially encoded
421 # hence the encodedir/decodedir dance
421 # hence the encodedir/decodedir dance
422 def __init__(self, vfs):
422 def __init__(self, vfs):
423 self.vfs = vfs
423 self.vfs = vfs
424 self.entries = None
424 self.entries = None
425 self._dirty = False
425 self._dirty = False
426
426
427 def _load(self):
427 def _load(self):
428 '''fill the entries from the fncache file'''
428 '''fill the entries from the fncache file'''
429 self._dirty = False
429 self._dirty = False
430 try:
430 try:
431 fp = self.vfs('fncache', mode='rb')
431 fp = self.vfs('fncache', mode='rb')
432 except IOError:
432 except IOError:
433 # skip nonexistent file
433 # skip nonexistent file
434 self.entries = set()
434 self.entries = set()
435 return
435 return
436 self.entries = set(decodedir(fp.read()).splitlines())
436 self.entries = set(decodedir(fp.read()).splitlines())
437 if '' in self.entries:
437 if '' in self.entries:
438 fp.seek(0)
438 fp.seek(0)
439 for n, line in enumerate(util.iterfile(fp)):
439 for n, line in enumerate(util.iterfile(fp)):
440 if not line.rstrip('\n'):
440 if not line.rstrip('\n'):
441 t = _('invalid entry in fncache, line %d') % (n + 1)
441 t = _('invalid entry in fncache, line %d') % (n + 1)
442 raise error.Abort(t)
442 raise error.Abort(t)
443 fp.close()
443 fp.close()
444
444
445 def write(self, tr):
445 def write(self, tr):
446 if self._dirty:
446 if self._dirty:
447 tr.addbackup('fncache')
447 tr.addbackup('fncache')
448 fp = self.vfs('fncache', mode='wb', atomictemp=True)
448 fp = self.vfs('fncache', mode='wb', atomictemp=True)
449 if self.entries:
449 if self.entries:
450 fp.write(encodedir('\n'.join(self.entries) + '\n'))
450 fp.write(encodedir('\n'.join(self.entries) + '\n'))
451 fp.close()
451 fp.close()
452 self._dirty = False
452 self._dirty = False
453
453
454 def add(self, fn):
454 def add(self, fn):
455 if self.entries is None:
455 if self.entries is None:
456 self._load()
456 self._load()
457 if fn not in self.entries:
457 if fn not in self.entries:
458 self._dirty = True
458 self._dirty = True
459 self.entries.add(fn)
459 self.entries.add(fn)
460
460
461 def remove(self, fn):
461 def remove(self, fn):
462 if self.entries is None:
462 if self.entries is None:
463 self._load()
463 self._load()
464 try:
464 try:
465 self.entries.remove(fn)
465 self.entries.remove(fn)
466 self._dirty = True
466 self._dirty = True
467 except KeyError:
467 except KeyError:
468 pass
468 pass
469
469
470 def __contains__(self, fn):
470 def __contains__(self, fn):
471 if self.entries is None:
471 if self.entries is None:
472 self._load()
472 self._load()
473 return fn in self.entries
473 return fn in self.entries
474
474
475 def __iter__(self):
475 def __iter__(self):
476 if self.entries is None:
476 if self.entries is None:
477 self._load()
477 self._load()
478 return iter(self.entries)
478 return iter(self.entries)
479
479
480 class _fncachevfs(scmutil.abstractvfs, scmutil.auditvfs):
480 class _fncachevfs(scmutil.abstractvfs, scmutil.auditvfs):
481 def __init__(self, vfs, fnc, encode):
481 def __init__(self, vfs, fnc, encode):
482 scmutil.auditvfs.__init__(self, vfs)
482 scmutil.auditvfs.__init__(self, vfs)
483 self.fncache = fnc
483 self.fncache = fnc
484 self.encode = encode
484 self.encode = encode
485
485
486 def __call__(self, path, mode='r', *args, **kw):
486 def __call__(self, path, mode='r', *args, **kw):
487 if mode not in ('r', 'rb') and (path.startswith('data/') or
487 if mode not in ('r', 'rb') and (path.startswith('data/') or
488 path.startswith('meta/')):
488 path.startswith('meta/')):
489 self.fncache.add(path)
489 self.fncache.add(path)
490 return self.vfs(self.encode(path), mode, *args, **kw)
490 return self.vfs(self.encode(path), mode, *args, **kw)
491
491
492 def join(self, path):
492 def join(self, path):
493 if path:
493 if path:
494 return self.vfs.join(self.encode(path))
494 return self.vfs.join(self.encode(path))
495 else:
495 else:
496 return self.vfs.join(path)
496 return self.vfs.join(path)
497
497
498 class fncachestore(basicstore):
498 class fncachestore(basicstore):
499 def __init__(self, path, vfstype, dotencode):
499 def __init__(self, path, vfstype, dotencode):
500 if dotencode:
500 if dotencode:
501 encode = _pathencode
501 encode = _pathencode
502 else:
502 else:
503 encode = _plainhybridencode
503 encode = _plainhybridencode
504 self.encode = encode
504 self.encode = encode
505 vfs = vfstype(path + '/store')
505 vfs = vfstype(path + '/store')
506 self.path = vfs.base
506 self.path = vfs.base
507 self.pathsep = self.path + '/'
507 self.pathsep = self.path + '/'
508 self.createmode = _calcmode(vfs)
508 self.createmode = _calcmode(vfs)
509 vfs.createmode = self.createmode
509 vfs.createmode = self.createmode
510 self.rawvfs = vfs
510 self.rawvfs = vfs
511 fnc = fncache(vfs)
511 fnc = fncache(vfs)
512 self.fncache = fnc
512 self.fncache = fnc
513 self.vfs = _fncachevfs(vfs, fnc, encode)
513 self.vfs = _fncachevfs(vfs, fnc, encode)
514 self.opener = self.vfs
514 self.opener = self.vfs
515
515
516 def join(self, f):
516 def join(self, f):
517 return self.pathsep + self.encode(f)
517 return self.pathsep + self.encode(f)
518
518
519 def getsize(self, path):
519 def getsize(self, path):
520 return self.rawvfs.stat(path).st_size
520 return self.rawvfs.stat(path).st_size
521
521
522 def datafiles(self):
522 def datafiles(self):
523 for f in sorted(self.fncache):
523 for f in sorted(self.fncache):
524 ef = self.encode(f)
524 ef = self.encode(f)
525 try:
525 try:
526 yield f, ef, self.getsize(ef)
526 yield f, ef, self.getsize(ef)
527 except OSError as err:
527 except OSError as err:
528 if err.errno != errno.ENOENT:
528 if err.errno != errno.ENOENT:
529 raise
529 raise
530
530
531 def copylist(self):
531 def copylist(self):
532 d = ('data meta dh fncache phaseroots obsstore'
532 d = ('data meta dh fncache phaseroots obsstore'
533 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
533 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
534 return (['requires', '00changelog.i'] +
534 return (['requires', '00changelog.i'] +
535 ['store/' + f for f in d.split()])
535 ['store/' + f for f in d.split()])
536
536
537 def write(self, tr):
537 def write(self, tr):
538 self.fncache.write(tr)
538 self.fncache.write(tr)
539
539
540 def invalidatecaches(self):
540 def invalidatecaches(self):
541 self.fncache.entries = None
541 self.fncache.entries = None
542
542
543 def markremoved(self, fn):
543 def markremoved(self, fn):
544 self.fncache.remove(fn)
544 self.fncache.remove(fn)
545
545
546 def _exists(self, f):
546 def _exists(self, f):
547 ef = self.encode(f)
547 ef = self.encode(f)
548 try:
548 try:
549 self.getsize(ef)
549 self.getsize(ef)
550 return True
550 return True
551 except OSError as err:
551 except OSError as err:
552 if err.errno != errno.ENOENT:
552 if err.errno != errno.ENOENT:
553 raise
553 raise
554 # nonexistent entry
554 # nonexistent entry
555 return False
555 return False
556
556
557 def __contains__(self, path):
557 def __contains__(self, path):
558 '''Checks if the store contains path'''
558 '''Checks if the store contains path'''
559 path = "/".join(("data", path))
559 path = "/".join(("data", path))
560 # check for files (exact match)
560 # check for files (exact match)
561 e = path + '.i'
561 e = path + '.i'
562 if e in self.fncache and self._exists(e):
562 if e in self.fncache and self._exists(e):
563 return True
563 return True
564 # now check for directories (prefix match)
564 # now check for directories (prefix match)
565 if not path.endswith('/'):
565 if not path.endswith('/'):
566 path += '/'
566 path += '/'
567 for e in self.fncache:
567 for e in self.fncache:
568 if e.startswith(path) and self._exists(e):
568 if e.startswith(path) and self._exists(e):
569 return True
569 return True
570 return False
570 return False
571
571
572 def store(requirements, path, vfstype):
572 def store(requirements, path, vfstype):
573 if 'store' in requirements:
573 if 'store' in requirements:
574 if 'fncache' in requirements:
574 if 'fncache' in requirements:
575 return fncachestore(path, vfstype, 'dotencode' in requirements)
575 return fncachestore(path, vfstype, 'dotencode' in requirements)
576 return encodedstore(path, vfstype)
576 return encodedstore(path, vfstype)
577 return basicstore(path, vfstype)
577 return basicstore(path, vfstype)
General Comments 0
You need to be logged in to leave comments. Login now