##// END OF EJS Templates
store: change names to comply with project coding standards...
Adrian Buehlmann -
r14288:00a0ab08 default
parent child Browse files
Show More
@@ -1,427 +1,427 b''
1 # store.py - repository store handling for Mercurial
1 # store.py - repository store handling for Mercurial
2 #
2 #
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import osutil, scmutil, util
9 import osutil, scmutil, util
10 import os, stat
10 import os, stat
11
11
12 _sha = util.sha1
12 _sha = util.sha1
13
13
14 # This avoids a collision between a file named foo and a dir named
14 # This avoids a collision between a file named foo and a dir named
15 # foo.i or foo.d
15 # foo.i or foo.d
16 def encodedir(path):
16 def encodedir(path):
17 '''
17 '''
18 >>> encodedir('data/foo.i')
18 >>> encodedir('data/foo.i')
19 'data/foo.i'
19 'data/foo.i'
20 >>> encodedir('data/foo.i/bla.i')
20 >>> encodedir('data/foo.i/bla.i')
21 'data/foo.i.hg/bla.i'
21 'data/foo.i.hg/bla.i'
22 >>> encodedir('data/foo.i.hg/bla.i')
22 >>> encodedir('data/foo.i.hg/bla.i')
23 'data/foo.i.hg.hg/bla.i'
23 'data/foo.i.hg.hg/bla.i'
24 '''
24 '''
25 if not path.startswith('data/'):
25 if not path.startswith('data/'):
26 return path
26 return path
27 return (path
27 return (path
28 .replace(".hg/", ".hg.hg/")
28 .replace(".hg/", ".hg.hg/")
29 .replace(".i/", ".i.hg/")
29 .replace(".i/", ".i.hg/")
30 .replace(".d/", ".d.hg/"))
30 .replace(".d/", ".d.hg/"))
31
31
32 def decodedir(path):
32 def decodedir(path):
33 '''
33 '''
34 >>> decodedir('data/foo.i')
34 >>> decodedir('data/foo.i')
35 'data/foo.i'
35 'data/foo.i'
36 >>> decodedir('data/foo.i.hg/bla.i')
36 >>> decodedir('data/foo.i.hg/bla.i')
37 'data/foo.i/bla.i'
37 'data/foo.i/bla.i'
38 >>> decodedir('data/foo.i.hg.hg/bla.i')
38 >>> decodedir('data/foo.i.hg.hg/bla.i')
39 'data/foo.i.hg/bla.i'
39 'data/foo.i.hg/bla.i'
40 '''
40 '''
41 if not path.startswith('data/') or ".hg/" not in path:
41 if not path.startswith('data/') or ".hg/" not in path:
42 return path
42 return path
43 return (path
43 return (path
44 .replace(".d.hg/", ".d/")
44 .replace(".d.hg/", ".d/")
45 .replace(".i.hg/", ".i/")
45 .replace(".i.hg/", ".i/")
46 .replace(".hg.hg/", ".hg/"))
46 .replace(".hg.hg/", ".hg/"))
47
47
48 def _buildencodefun():
48 def _buildencodefun():
49 '''
49 '''
50 >>> enc, dec = _buildencodefun()
50 >>> enc, dec = _buildencodefun()
51
51
52 >>> enc('nothing/special.txt')
52 >>> enc('nothing/special.txt')
53 'nothing/special.txt'
53 'nothing/special.txt'
54 >>> dec('nothing/special.txt')
54 >>> dec('nothing/special.txt')
55 'nothing/special.txt'
55 'nothing/special.txt'
56
56
57 >>> enc('HELLO')
57 >>> enc('HELLO')
58 '_h_e_l_l_o'
58 '_h_e_l_l_o'
59 >>> dec('_h_e_l_l_o')
59 >>> dec('_h_e_l_l_o')
60 'HELLO'
60 'HELLO'
61
61
62 >>> enc('hello:world?')
62 >>> enc('hello:world?')
63 'hello~3aworld~3f'
63 'hello~3aworld~3f'
64 >>> dec('hello~3aworld~3f')
64 >>> dec('hello~3aworld~3f')
65 'hello:world?'
65 'hello:world?'
66
66
67 >>> enc('the\x07quick\xADshot')
67 >>> enc('the\x07quick\xADshot')
68 'the~07quick~adshot'
68 'the~07quick~adshot'
69 >>> dec('the~07quick~adshot')
69 >>> dec('the~07quick~adshot')
70 'the\\x07quick\\xadshot'
70 'the\\x07quick\\xadshot'
71 '''
71 '''
72 e = '_'
72 e = '_'
73 win_reserved = [ord(x) for x in '\\:*?"<>|']
73 winreserved = [ord(x) for x in '\\:*?"<>|']
74 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
74 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
75 for x in (range(32) + range(126, 256) + win_reserved):
75 for x in (range(32) + range(126, 256) + winreserved):
76 cmap[chr(x)] = "~%02x" % x
76 cmap[chr(x)] = "~%02x" % x
77 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
77 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
78 cmap[chr(x)] = e + chr(x).lower()
78 cmap[chr(x)] = e + chr(x).lower()
79 dmap = {}
79 dmap = {}
80 for k, v in cmap.iteritems():
80 for k, v in cmap.iteritems():
81 dmap[v] = k
81 dmap[v] = k
82 def decode(s):
82 def decode(s):
83 i = 0
83 i = 0
84 while i < len(s):
84 while i < len(s):
85 for l in xrange(1, 4):
85 for l in xrange(1, 4):
86 try:
86 try:
87 yield dmap[s[i:i + l]]
87 yield dmap[s[i:i + l]]
88 i += l
88 i += l
89 break
89 break
90 except KeyError:
90 except KeyError:
91 pass
91 pass
92 else:
92 else:
93 raise KeyError
93 raise KeyError
94 return (lambda s: "".join([cmap[c] for c in encodedir(s)]),
94 return (lambda s: "".join([cmap[c] for c in encodedir(s)]),
95 lambda s: decodedir("".join(list(decode(s)))))
95 lambda s: decodedir("".join(list(decode(s)))))
96
96
97 encodefilename, decodefilename = _buildencodefun()
97 encodefilename, decodefilename = _buildencodefun()
98
98
99 def _build_lower_encodefun():
99 def _buildlowerencodefun():
100 '''
100 '''
101 >>> f = _build_lower_encodefun()
101 >>> f = _buildlowerencodefun()
102 >>> f('nothing/special.txt')
102 >>> f('nothing/special.txt')
103 'nothing/special.txt'
103 'nothing/special.txt'
104 >>> f('HELLO')
104 >>> f('HELLO')
105 'hello'
105 'hello'
106 >>> f('hello:world?')
106 >>> f('hello:world?')
107 'hello~3aworld~3f'
107 'hello~3aworld~3f'
108 >>> f('the\x07quick\xADshot')
108 >>> f('the\x07quick\xADshot')
109 'the~07quick~adshot'
109 'the~07quick~adshot'
110 '''
110 '''
111 win_reserved = [ord(x) for x in '\\:*?"<>|']
111 winreserved = [ord(x) for x in '\\:*?"<>|']
112 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
112 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
113 for x in (range(32) + range(126, 256) + win_reserved):
113 for x in (range(32) + range(126, 256) + winreserved):
114 cmap[chr(x)] = "~%02x" % x
114 cmap[chr(x)] = "~%02x" % x
115 for x in range(ord("A"), ord("Z")+1):
115 for x in range(ord("A"), ord("Z")+1):
116 cmap[chr(x)] = chr(x).lower()
116 cmap[chr(x)] = chr(x).lower()
117 return lambda s: "".join([cmap[c] for c in s])
117 return lambda s: "".join([cmap[c] for c in s])
118
118
119 lowerencode = _build_lower_encodefun()
119 lowerencode = _buildlowerencodefun()
120
120
121 _windows_reserved_filenames = '''con prn aux nul
121 _winreservednames = '''con prn aux nul
122 com1 com2 com3 com4 com5 com6 com7 com8 com9
122 com1 com2 com3 com4 com5 com6 com7 com8 com9
123 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
123 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
124 def _auxencode(path, dotencode):
124 def _auxencode(path, dotencode):
125 '''
125 '''
126 Encodes filenames containing names reserved by Windows or which end in
126 Encodes filenames containing names reserved by Windows or which end in
127 period or space. Does not touch other single reserved characters c.
127 period or space. Does not touch other single reserved characters c.
128 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
128 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
129 Additionally encodes space or period at the beginning, if dotencode is
129 Additionally encodes space or period at the beginning, if dotencode is
130 True.
130 True.
131 path is assumed to be all lowercase.
131 path is assumed to be all lowercase.
132
132
133 >>> _auxencode('.foo/aux.txt/txt.aux/con/prn/nul/foo.', True)
133 >>> _auxencode('.foo/aux.txt/txt.aux/con/prn/nul/foo.', True)
134 '~2efoo/au~78.txt/txt.aux/co~6e/pr~6e/nu~6c/foo~2e'
134 '~2efoo/au~78.txt/txt.aux/co~6e/pr~6e/nu~6c/foo~2e'
135 >>> _auxencode('.com1com2/lpt9.lpt4.lpt1/conprn/foo.', False)
135 >>> _auxencode('.com1com2/lpt9.lpt4.lpt1/conprn/foo.', False)
136 '.com1com2/lp~749.lpt4.lpt1/conprn/foo~2e'
136 '.com1com2/lp~749.lpt4.lpt1/conprn/foo~2e'
137 >>> _auxencode('foo. ', True)
137 >>> _auxencode('foo. ', True)
138 'foo.~20'
138 'foo.~20'
139 >>> _auxencode(' .foo', True)
139 >>> _auxencode(' .foo', True)
140 '~20.foo'
140 '~20.foo'
141 '''
141 '''
142 res = []
142 res = []
143 for n in path.split('/'):
143 for n in path.split('/'):
144 if n:
144 if n:
145 base = n.split('.')[0]
145 base = n.split('.')[0]
146 if base and (base in _windows_reserved_filenames):
146 if base and (base in _winreservednames):
147 # encode third letter ('aux' -> 'au~78')
147 # encode third letter ('aux' -> 'au~78')
148 ec = "~%02x" % ord(n[2])
148 ec = "~%02x" % ord(n[2])
149 n = n[0:2] + ec + n[3:]
149 n = n[0:2] + ec + n[3:]
150 if n[-1] in '. ':
150 if n[-1] in '. ':
151 # encode last period or space ('foo...' -> 'foo..~2e')
151 # encode last period or space ('foo...' -> 'foo..~2e')
152 n = n[:-1] + "~%02x" % ord(n[-1])
152 n = n[:-1] + "~%02x" % ord(n[-1])
153 if dotencode and n[0] in '. ':
153 if dotencode and n[0] in '. ':
154 n = "~%02x" % ord(n[0]) + n[1:]
154 n = "~%02x" % ord(n[0]) + n[1:]
155 res.append(n)
155 res.append(n)
156 return '/'.join(res)
156 return '/'.join(res)
157
157
158 MAX_PATH_LEN_IN_HGSTORE = 120
158 _maxstorepathlen = 120
159 DIR_PREFIX_LEN = 8
159 _dirprefixlen = 8
160 _MAX_SHORTENED_DIRS_LEN = 8 * (DIR_PREFIX_LEN + 1) - 4
160 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
161 def _hybridencode(path, auxencode):
161 def _hybridencode(path, auxencode):
162 '''encodes path with a length limit
162 '''encodes path with a length limit
163
163
164 Encodes all paths that begin with 'data/', according to the following.
164 Encodes all paths that begin with 'data/', according to the following.
165
165
166 Default encoding (reversible):
166 Default encoding (reversible):
167
167
168 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
168 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
169 characters are encoded as '~xx', where xx is the two digit hex code
169 characters are encoded as '~xx', where xx is the two digit hex code
170 of the character (see encodefilename).
170 of the character (see encodefilename).
171 Relevant path components consisting of Windows reserved filenames are
171 Relevant path components consisting of Windows reserved filenames are
172 masked by encoding the third character ('aux' -> 'au~78', see auxencode).
172 masked by encoding the third character ('aux' -> 'au~78', see auxencode).
173
173
174 Hashed encoding (not reversible):
174 Hashed encoding (not reversible):
175
175
176 If the default-encoded path is longer than MAX_PATH_LEN_IN_HGSTORE, a
176 If the default-encoded path is longer than _maxstorepathlen, a
177 non-reversible hybrid hashing of the path is done instead.
177 non-reversible hybrid hashing of the path is done instead.
178 This encoding uses up to DIR_PREFIX_LEN characters of all directory
178 This encoding uses up to _dirprefixlen characters of all directory
179 levels of the lowerencoded path, but not more levels than can fit into
179 levels of the lowerencoded path, but not more levels than can fit into
180 _MAX_SHORTENED_DIRS_LEN.
180 _maxshortdirslen.
181 Then follows the filler followed by the sha digest of the full path.
181 Then follows the filler followed by the sha digest of the full path.
182 The filler is the beginning of the basename of the lowerencoded path
182 The filler is the beginning of the basename of the lowerencoded path
183 (the basename is everything after the last path separator). The filler
183 (the basename is everything after the last path separator). The filler
184 is as long as possible, filling in characters from the basename until
184 is as long as possible, filling in characters from the basename until
185 the encoded path has MAX_PATH_LEN_IN_HGSTORE characters (or all chars
185 the encoded path has _maxstorepathlen characters (or all chars of the
186 of the basename have been taken).
186 basename have been taken).
187 The extension (e.g. '.i' or '.d') is preserved.
187 The extension (e.g. '.i' or '.d') is preserved.
188
188
189 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
189 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
190 encoding was used.
190 encoding was used.
191 '''
191 '''
192 if not path.startswith('data/'):
192 if not path.startswith('data/'):
193 return path
193 return path
194 # escape directories ending with .i and .d
194 # escape directories ending with .i and .d
195 path = encodedir(path)
195 path = encodedir(path)
196 ndpath = path[len('data/'):]
196 ndpath = path[len('data/'):]
197 res = 'data/' + auxencode(encodefilename(ndpath))
197 res = 'data/' + auxencode(encodefilename(ndpath))
198 if len(res) > MAX_PATH_LEN_IN_HGSTORE:
198 if len(res) > _maxstorepathlen:
199 digest = _sha(path).hexdigest()
199 digest = _sha(path).hexdigest()
200 aep = auxencode(lowerencode(ndpath))
200 aep = auxencode(lowerencode(ndpath))
201 _root, ext = os.path.splitext(aep)
201 _root, ext = os.path.splitext(aep)
202 parts = aep.split('/')
202 parts = aep.split('/')
203 basename = parts[-1]
203 basename = parts[-1]
204 sdirs = []
204 sdirs = []
205 for p in parts[:-1]:
205 for p in parts[:-1]:
206 d = p[:DIR_PREFIX_LEN]
206 d = p[:_dirprefixlen]
207 if d[-1] in '. ':
207 if d[-1] in '. ':
208 # Windows can't access dirs ending in period or space
208 # Windows can't access dirs ending in period or space
209 d = d[:-1] + '_'
209 d = d[:-1] + '_'
210 t = '/'.join(sdirs) + '/' + d
210 t = '/'.join(sdirs) + '/' + d
211 if len(t) > _MAX_SHORTENED_DIRS_LEN:
211 if len(t) > _maxshortdirslen:
212 break
212 break
213 sdirs.append(d)
213 sdirs.append(d)
214 dirs = '/'.join(sdirs)
214 dirs = '/'.join(sdirs)
215 if len(dirs) > 0:
215 if len(dirs) > 0:
216 dirs += '/'
216 dirs += '/'
217 res = 'dh/' + dirs + digest + ext
217 res = 'dh/' + dirs + digest + ext
218 space_left = MAX_PATH_LEN_IN_HGSTORE - len(res)
218 spaceleft = _maxstorepathlen - len(res)
219 if space_left > 0:
219 if spaceleft > 0:
220 filler = basename[:space_left]
220 filler = basename[:spaceleft]
221 res = 'dh/' + dirs + filler + digest + ext
221 res = 'dh/' + dirs + filler + digest + ext
222 return res
222 return res
223
223
224 def _calcmode(path):
224 def _calcmode(path):
225 try:
225 try:
226 # files in .hg/ will be created using this mode
226 # files in .hg/ will be created using this mode
227 mode = os.stat(path).st_mode
227 mode = os.stat(path).st_mode
228 # avoid some useless chmods
228 # avoid some useless chmods
229 if (0777 & ~util.umask) == (0777 & mode):
229 if (0777 & ~util.umask) == (0777 & mode):
230 mode = None
230 mode = None
231 except OSError:
231 except OSError:
232 mode = None
232 mode = None
233 return mode
233 return mode
234
234
235 _data = 'data 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
235 _data = 'data 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
236
236
237 class basicstore(object):
237 class basicstore(object):
238 '''base class for local repository stores'''
238 '''base class for local repository stores'''
239 def __init__(self, path, openertype):
239 def __init__(self, path, openertype):
240 self.path = path
240 self.path = path
241 self.createmode = _calcmode(path)
241 self.createmode = _calcmode(path)
242 op = openertype(self.path)
242 op = openertype(self.path)
243 op.createmode = self.createmode
243 op.createmode = self.createmode
244 self.opener = scmutil.filteropener(op, encodedir)
244 self.opener = scmutil.filteropener(op, encodedir)
245
245
246 def join(self, f):
246 def join(self, f):
247 return self.path + '/' + encodedir(f)
247 return self.path + '/' + encodedir(f)
248
248
249 def _walk(self, relpath, recurse):
249 def _walk(self, relpath, recurse):
250 '''yields (unencoded, encoded, size)'''
250 '''yields (unencoded, encoded, size)'''
251 path = self.path
251 path = self.path
252 if relpath:
252 if relpath:
253 path += '/' + relpath
253 path += '/' + relpath
254 striplen = len(self.path) + 1
254 striplen = len(self.path) + 1
255 l = []
255 l = []
256 if os.path.isdir(path):
256 if os.path.isdir(path):
257 visit = [path]
257 visit = [path]
258 while visit:
258 while visit:
259 p = visit.pop()
259 p = visit.pop()
260 for f, kind, st in osutil.listdir(p, stat=True):
260 for f, kind, st in osutil.listdir(p, stat=True):
261 fp = p + '/' + f
261 fp = p + '/' + f
262 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
262 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
263 n = util.pconvert(fp[striplen:])
263 n = util.pconvert(fp[striplen:])
264 l.append((decodedir(n), n, st.st_size))
264 l.append((decodedir(n), n, st.st_size))
265 elif kind == stat.S_IFDIR and recurse:
265 elif kind == stat.S_IFDIR and recurse:
266 visit.append(fp)
266 visit.append(fp)
267 return sorted(l)
267 return sorted(l)
268
268
269 def datafiles(self):
269 def datafiles(self):
270 return self._walk('data', True)
270 return self._walk('data', True)
271
271
272 def walk(self):
272 def walk(self):
273 '''yields (unencoded, encoded, size)'''
273 '''yields (unencoded, encoded, size)'''
274 # yield data files first
274 # yield data files first
275 for x in self.datafiles():
275 for x in self.datafiles():
276 yield x
276 yield x
277 # yield manifest before changelog
277 # yield manifest before changelog
278 for x in reversed(self._walk('', False)):
278 for x in reversed(self._walk('', False)):
279 yield x
279 yield x
280
280
281 def copylist(self):
281 def copylist(self):
282 return ['requires'] + _data.split()
282 return ['requires'] + _data.split()
283
283
284 def write(self):
284 def write(self):
285 pass
285 pass
286
286
287 class encodedstore(basicstore):
287 class encodedstore(basicstore):
288 def __init__(self, path, openertype):
288 def __init__(self, path, openertype):
289 self.path = path + '/store'
289 self.path = path + '/store'
290 self.createmode = _calcmode(self.path)
290 self.createmode = _calcmode(self.path)
291 op = openertype(self.path)
291 op = openertype(self.path)
292 op.createmode = self.createmode
292 op.createmode = self.createmode
293 self.opener = scmutil.filteropener(op, encodefilename)
293 self.opener = scmutil.filteropener(op, encodefilename)
294
294
295 def datafiles(self):
295 def datafiles(self):
296 for a, b, size in self._walk('data', True):
296 for a, b, size in self._walk('data', True):
297 try:
297 try:
298 a = decodefilename(a)
298 a = decodefilename(a)
299 except KeyError:
299 except KeyError:
300 a = None
300 a = None
301 yield a, b, size
301 yield a, b, size
302
302
303 def join(self, f):
303 def join(self, f):
304 return self.path + '/' + encodefilename(f)
304 return self.path + '/' + encodefilename(f)
305
305
306 def copylist(self):
306 def copylist(self):
307 return (['requires', '00changelog.i'] +
307 return (['requires', '00changelog.i'] +
308 ['store/' + f for f in _data.split()])
308 ['store/' + f for f in _data.split()])
309
309
310 class fncache(object):
310 class fncache(object):
311 # the filename used to be partially encoded
311 # the filename used to be partially encoded
312 # hence the encodedir/decodedir dance
312 # hence the encodedir/decodedir dance
313 def __init__(self, opener):
313 def __init__(self, opener):
314 self.opener = opener
314 self.opener = opener
315 self.entries = None
315 self.entries = None
316 self._dirty = False
316 self._dirty = False
317
317
318 def _load(self):
318 def _load(self):
319 '''fill the entries from the fncache file'''
319 '''fill the entries from the fncache file'''
320 self.entries = set()
320 self.entries = set()
321 self._dirty = False
321 self._dirty = False
322 try:
322 try:
323 fp = self.opener('fncache', mode='rb')
323 fp = self.opener('fncache', mode='rb')
324 except IOError:
324 except IOError:
325 # skip nonexistent file
325 # skip nonexistent file
326 return
326 return
327 for n, line in enumerate(fp):
327 for n, line in enumerate(fp):
328 if (len(line) < 2) or (line[-1] != '\n'):
328 if (len(line) < 2) or (line[-1] != '\n'):
329 t = _('invalid entry in fncache, line %s') % (n + 1)
329 t = _('invalid entry in fncache, line %s') % (n + 1)
330 raise util.Abort(t)
330 raise util.Abort(t)
331 self.entries.add(decodedir(line[:-1]))
331 self.entries.add(decodedir(line[:-1]))
332 fp.close()
332 fp.close()
333
333
334 def rewrite(self, files):
334 def rewrite(self, files):
335 fp = self.opener('fncache', mode='wb')
335 fp = self.opener('fncache', mode='wb')
336 for p in files:
336 for p in files:
337 fp.write(encodedir(p) + '\n')
337 fp.write(encodedir(p) + '\n')
338 fp.close()
338 fp.close()
339 self.entries = set(files)
339 self.entries = set(files)
340 self._dirty = False
340 self._dirty = False
341
341
342 def write(self):
342 def write(self):
343 if not self._dirty:
343 if not self._dirty:
344 return
344 return
345 fp = self.opener('fncache', mode='wb', atomictemp=True)
345 fp = self.opener('fncache', mode='wb', atomictemp=True)
346 for p in self.entries:
346 for p in self.entries:
347 fp.write(encodedir(p) + '\n')
347 fp.write(encodedir(p) + '\n')
348 fp.rename()
348 fp.rename()
349 self._dirty = False
349 self._dirty = False
350
350
351 def add(self, fn):
351 def add(self, fn):
352 if self.entries is None:
352 if self.entries is None:
353 self._load()
353 self._load()
354 if fn not in self.entries:
354 if fn not in self.entries:
355 self._dirty = True
355 self._dirty = True
356 self.entries.add(fn)
356 self.entries.add(fn)
357
357
358 def __contains__(self, fn):
358 def __contains__(self, fn):
359 if self.entries is None:
359 if self.entries is None:
360 self._load()
360 self._load()
361 return fn in self.entries
361 return fn in self.entries
362
362
363 def __iter__(self):
363 def __iter__(self):
364 if self.entries is None:
364 if self.entries is None:
365 self._load()
365 self._load()
366 return iter(self.entries)
366 return iter(self.entries)
367
367
368 class _fncacheopener(scmutil.abstractopener):
368 class _fncacheopener(scmutil.abstractopener):
369 def __init__(self, op, fnc, encode):
369 def __init__(self, op, fnc, encode):
370 self.opener = op
370 self.opener = op
371 self.fncache = fnc
371 self.fncache = fnc
372 self.encode = encode
372 self.encode = encode
373
373
374 def __call__(self, path, mode='r', *args, **kw):
374 def __call__(self, path, mode='r', *args, **kw):
375 if mode not in ('r', 'rb') and path.startswith('data/'):
375 if mode not in ('r', 'rb') and path.startswith('data/'):
376 self.fncache.add(path)
376 self.fncache.add(path)
377 return self.opener(self.encode(path), mode, *args, **kw)
377 return self.opener(self.encode(path), mode, *args, **kw)
378
378
379 class fncachestore(basicstore):
379 class fncachestore(basicstore):
380 def __init__(self, path, openertype, encode):
380 def __init__(self, path, openertype, encode):
381 self.encode = encode
381 self.encode = encode
382 self.path = path + '/store'
382 self.path = path + '/store'
383 self.createmode = _calcmode(self.path)
383 self.createmode = _calcmode(self.path)
384 op = openertype(self.path)
384 op = openertype(self.path)
385 op.createmode = self.createmode
385 op.createmode = self.createmode
386 fnc = fncache(op)
386 fnc = fncache(op)
387 self.fncache = fnc
387 self.fncache = fnc
388 self.opener = _fncacheopener(op, fnc, encode)
388 self.opener = _fncacheopener(op, fnc, encode)
389
389
390 def join(self, f):
390 def join(self, f):
391 return self.path + '/' + self.encode(f)
391 return self.path + '/' + self.encode(f)
392
392
393 def datafiles(self):
393 def datafiles(self):
394 rewrite = False
394 rewrite = False
395 existing = []
395 existing = []
396 spath = self.path
396 spath = self.path
397 for f in self.fncache:
397 for f in self.fncache:
398 ef = self.encode(f)
398 ef = self.encode(f)
399 try:
399 try:
400 st = os.stat(spath + '/' + ef)
400 st = os.stat(spath + '/' + ef)
401 yield f, ef, st.st_size
401 yield f, ef, st.st_size
402 existing.append(f)
402 existing.append(f)
403 except OSError:
403 except OSError:
404 # nonexistent entry
404 # nonexistent entry
405 rewrite = True
405 rewrite = True
406 if rewrite:
406 if rewrite:
407 # rewrite fncache to remove nonexistent entries
407 # rewrite fncache to remove nonexistent entries
408 # (may be caused by rollback / strip)
408 # (may be caused by rollback / strip)
409 self.fncache.rewrite(existing)
409 self.fncache.rewrite(existing)
410
410
411 def copylist(self):
411 def copylist(self):
412 d = ('data dh fncache'
412 d = ('data dh fncache'
413 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
413 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
414 return (['requires', '00changelog.i'] +
414 return (['requires', '00changelog.i'] +
415 ['store/' + f for f in d.split()])
415 ['store/' + f for f in d.split()])
416
416
417 def write(self):
417 def write(self):
418 self.fncache.write()
418 self.fncache.write()
419
419
420 def store(requirements, path, openertype):
420 def store(requirements, path, openertype):
421 if 'store' in requirements:
421 if 'store' in requirements:
422 if 'fncache' in requirements:
422 if 'fncache' in requirements:
423 auxencode = lambda f: _auxencode(f, 'dotencode' in requirements)
423 auxencode = lambda f: _auxencode(f, 'dotencode' in requirements)
424 encode = lambda f: _hybridencode(f, auxencode)
424 encode = lambda f: _hybridencode(f, auxencode)
425 return fncachestore(path, openertype, encode)
425 return fncachestore(path, openertype, encode)
426 return encodedstore(path, openertype)
426 return encodedstore(path, openertype)
427 return basicstore(path, openertype)
427 return basicstore(path, openertype)
General Comments 0
You need to be logged in to leave comments. Login now