##// END OF EJS Templates
store: reduce string concatenation when joining...
Bryan O'Sullivan -
r17562:b42b0729 default
parent child Browse files
Show More
@@ -1,440 +1,441 b''
1 # store.py - repository store handling for Mercurial
1 # store.py - repository store handling for Mercurial
2 #
2 #
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import osutil, scmutil, util
9 import osutil, scmutil, util
10 import os, stat, errno
10 import os, stat, errno
11
11
12 _sha = util.sha1
12 _sha = util.sha1
13
13
14 # This avoids a collision between a file named foo and a dir named
14 # This avoids a collision between a file named foo and a dir named
15 # foo.i or foo.d
15 # foo.i or foo.d
16 def encodedir(path):
16 def encodedir(path):
17 '''
17 '''
18 >>> encodedir('data/foo.i')
18 >>> encodedir('data/foo.i')
19 'data/foo.i'
19 'data/foo.i'
20 >>> encodedir('data/foo.i/bla.i')
20 >>> encodedir('data/foo.i/bla.i')
21 'data/foo.i.hg/bla.i'
21 'data/foo.i.hg/bla.i'
22 >>> encodedir('data/foo.i.hg/bla.i')
22 >>> encodedir('data/foo.i.hg/bla.i')
23 'data/foo.i.hg.hg/bla.i'
23 'data/foo.i.hg.hg/bla.i'
24 '''
24 '''
25 if not path.startswith('data/'):
25 if not path.startswith('data/'):
26 return path
26 return path
27 return (path
27 return (path
28 .replace(".hg/", ".hg.hg/")
28 .replace(".hg/", ".hg.hg/")
29 .replace(".i/", ".i.hg/")
29 .replace(".i/", ".i.hg/")
30 .replace(".d/", ".d.hg/"))
30 .replace(".d/", ".d.hg/"))
31
31
32 def decodedir(path):
32 def decodedir(path):
33 '''
33 '''
34 >>> decodedir('data/foo.i')
34 >>> decodedir('data/foo.i')
35 'data/foo.i'
35 'data/foo.i'
36 >>> decodedir('data/foo.i.hg/bla.i')
36 >>> decodedir('data/foo.i.hg/bla.i')
37 'data/foo.i/bla.i'
37 'data/foo.i/bla.i'
38 >>> decodedir('data/foo.i.hg.hg/bla.i')
38 >>> decodedir('data/foo.i.hg.hg/bla.i')
39 'data/foo.i.hg/bla.i'
39 'data/foo.i.hg/bla.i'
40 '''
40 '''
41 if not path.startswith('data/') or ".hg/" not in path:
41 if not path.startswith('data/') or ".hg/" not in path:
42 return path
42 return path
43 return (path
43 return (path
44 .replace(".d.hg/", ".d/")
44 .replace(".d.hg/", ".d/")
45 .replace(".i.hg/", ".i/")
45 .replace(".i.hg/", ".i/")
46 .replace(".hg.hg/", ".hg/"))
46 .replace(".hg.hg/", ".hg/"))
47
47
48 def _buildencodefun():
48 def _buildencodefun():
49 '''
49 '''
50 >>> enc, dec = _buildencodefun()
50 >>> enc, dec = _buildencodefun()
51
51
52 >>> enc('nothing/special.txt')
52 >>> enc('nothing/special.txt')
53 'nothing/special.txt'
53 'nothing/special.txt'
54 >>> dec('nothing/special.txt')
54 >>> dec('nothing/special.txt')
55 'nothing/special.txt'
55 'nothing/special.txt'
56
56
57 >>> enc('HELLO')
57 >>> enc('HELLO')
58 '_h_e_l_l_o'
58 '_h_e_l_l_o'
59 >>> dec('_h_e_l_l_o')
59 >>> dec('_h_e_l_l_o')
60 'HELLO'
60 'HELLO'
61
61
62 >>> enc('hello:world?')
62 >>> enc('hello:world?')
63 'hello~3aworld~3f'
63 'hello~3aworld~3f'
64 >>> dec('hello~3aworld~3f')
64 >>> dec('hello~3aworld~3f')
65 'hello:world?'
65 'hello:world?'
66
66
67 >>> enc('the\x07quick\xADshot')
67 >>> enc('the\x07quick\xADshot')
68 'the~07quick~adshot'
68 'the~07quick~adshot'
69 >>> dec('the~07quick~adshot')
69 >>> dec('the~07quick~adshot')
70 'the\\x07quick\\xadshot'
70 'the\\x07quick\\xadshot'
71 '''
71 '''
72 e = '_'
72 e = '_'
73 winreserved = [ord(x) for x in '\\:*?"<>|']
73 winreserved = [ord(x) for x in '\\:*?"<>|']
74 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
74 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
75 for x in (range(32) + range(126, 256) + winreserved):
75 for x in (range(32) + range(126, 256) + winreserved):
76 cmap[chr(x)] = "~%02x" % x
76 cmap[chr(x)] = "~%02x" % x
77 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
77 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
78 cmap[chr(x)] = e + chr(x).lower()
78 cmap[chr(x)] = e + chr(x).lower()
79 dmap = {}
79 dmap = {}
80 for k, v in cmap.iteritems():
80 for k, v in cmap.iteritems():
81 dmap[v] = k
81 dmap[v] = k
82 def decode(s):
82 def decode(s):
83 i = 0
83 i = 0
84 while i < len(s):
84 while i < len(s):
85 for l in xrange(1, 4):
85 for l in xrange(1, 4):
86 try:
86 try:
87 yield dmap[s[i:i + l]]
87 yield dmap[s[i:i + l]]
88 i += l
88 i += l
89 break
89 break
90 except KeyError:
90 except KeyError:
91 pass
91 pass
92 else:
92 else:
93 raise KeyError
93 raise KeyError
94 return (lambda s: "".join([cmap[c] for c in encodedir(s)]),
94 return (lambda s: "".join([cmap[c] for c in encodedir(s)]),
95 lambda s: decodedir("".join(list(decode(s)))))
95 lambda s: decodedir("".join(list(decode(s)))))
96
96
97 encodefilename, decodefilename = _buildencodefun()
97 encodefilename, decodefilename = _buildencodefun()
98
98
99 def _buildlowerencodefun():
99 def _buildlowerencodefun():
100 '''
100 '''
101 >>> f = _buildlowerencodefun()
101 >>> f = _buildlowerencodefun()
102 >>> f('nothing/special.txt')
102 >>> f('nothing/special.txt')
103 'nothing/special.txt'
103 'nothing/special.txt'
104 >>> f('HELLO')
104 >>> f('HELLO')
105 'hello'
105 'hello'
106 >>> f('hello:world?')
106 >>> f('hello:world?')
107 'hello~3aworld~3f'
107 'hello~3aworld~3f'
108 >>> f('the\x07quick\xADshot')
108 >>> f('the\x07quick\xADshot')
109 'the~07quick~adshot'
109 'the~07quick~adshot'
110 '''
110 '''
111 winreserved = [ord(x) for x in '\\:*?"<>|']
111 winreserved = [ord(x) for x in '\\:*?"<>|']
112 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
112 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
113 for x in (range(32) + range(126, 256) + winreserved):
113 for x in (range(32) + range(126, 256) + winreserved):
114 cmap[chr(x)] = "~%02x" % x
114 cmap[chr(x)] = "~%02x" % x
115 for x in range(ord("A"), ord("Z")+1):
115 for x in range(ord("A"), ord("Z")+1):
116 cmap[chr(x)] = chr(x).lower()
116 cmap[chr(x)] = chr(x).lower()
117 return lambda s: "".join([cmap[c] for c in s])
117 return lambda s: "".join([cmap[c] for c in s])
118
118
119 lowerencode = _buildlowerencodefun()
119 lowerencode = _buildlowerencodefun()
120
120
121 _winreservednames = '''con prn aux nul
121 _winreservednames = '''con prn aux nul
122 com1 com2 com3 com4 com5 com6 com7 com8 com9
122 com1 com2 com3 com4 com5 com6 com7 com8 com9
123 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
123 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
124 def _auxencode(path, dotencode):
124 def _auxencode(path, dotencode):
125 '''
125 '''
126 Encodes filenames containing names reserved by Windows or which end in
126 Encodes filenames containing names reserved by Windows or which end in
127 period or space. Does not touch other single reserved characters c.
127 period or space. Does not touch other single reserved characters c.
128 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
128 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
129 Additionally encodes space or period at the beginning, if dotencode is
129 Additionally encodes space or period at the beginning, if dotencode is
130 True.
130 True.
131 path is assumed to be all lowercase.
131 path is assumed to be all lowercase.
132
132
133 >>> _auxencode('.foo/aux.txt/txt.aux/con/prn/nul/foo.', True)
133 >>> _auxencode('.foo/aux.txt/txt.aux/con/prn/nul/foo.', True)
134 '~2efoo/au~78.txt/txt.aux/co~6e/pr~6e/nu~6c/foo~2e'
134 '~2efoo/au~78.txt/txt.aux/co~6e/pr~6e/nu~6c/foo~2e'
135 >>> _auxencode('.com1com2/lpt9.lpt4.lpt1/conprn/foo.', False)
135 >>> _auxencode('.com1com2/lpt9.lpt4.lpt1/conprn/foo.', False)
136 '.com1com2/lp~749.lpt4.lpt1/conprn/foo~2e'
136 '.com1com2/lp~749.lpt4.lpt1/conprn/foo~2e'
137 >>> _auxencode('foo. ', True)
137 >>> _auxencode('foo. ', True)
138 'foo.~20'
138 'foo.~20'
139 >>> _auxencode(' .foo', True)
139 >>> _auxencode(' .foo', True)
140 '~20.foo'
140 '~20.foo'
141 '''
141 '''
142 res = []
142 res = []
143 for n in path.split('/'):
143 for n in path.split('/'):
144 if n:
144 if n:
145 base = n.split('.')[0]
145 base = n.split('.')[0]
146 if base and (base in _winreservednames):
146 if base and (base in _winreservednames):
147 # encode third letter ('aux' -> 'au~78')
147 # encode third letter ('aux' -> 'au~78')
148 ec = "~%02x" % ord(n[2])
148 ec = "~%02x" % ord(n[2])
149 n = n[0:2] + ec + n[3:]
149 n = n[0:2] + ec + n[3:]
150 if n[-1] in '. ':
150 if n[-1] in '. ':
151 # encode last period or space ('foo...' -> 'foo..~2e')
151 # encode last period or space ('foo...' -> 'foo..~2e')
152 n = n[:-1] + "~%02x" % ord(n[-1])
152 n = n[:-1] + "~%02x" % ord(n[-1])
153 if dotencode and n[0] in '. ':
153 if dotencode and n[0] in '. ':
154 n = "~%02x" % ord(n[0]) + n[1:]
154 n = "~%02x" % ord(n[0]) + n[1:]
155 res.append(n)
155 res.append(n)
156 return '/'.join(res)
156 return '/'.join(res)
157
157
158 _maxstorepathlen = 120
158 _maxstorepathlen = 120
159 _dirprefixlen = 8
159 _dirprefixlen = 8
160 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
160 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
161 def _hybridencode(path, auxencode):
161 def _hybridencode(path, auxencode):
162 '''encodes path with a length limit
162 '''encodes path with a length limit
163
163
164 Encodes all paths that begin with 'data/', according to the following.
164 Encodes all paths that begin with 'data/', according to the following.
165
165
166 Default encoding (reversible):
166 Default encoding (reversible):
167
167
168 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
168 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
169 characters are encoded as '~xx', where xx is the two digit hex code
169 characters are encoded as '~xx', where xx is the two digit hex code
170 of the character (see encodefilename).
170 of the character (see encodefilename).
171 Relevant path components consisting of Windows reserved filenames are
171 Relevant path components consisting of Windows reserved filenames are
172 masked by encoding the third character ('aux' -> 'au~78', see auxencode).
172 masked by encoding the third character ('aux' -> 'au~78', see auxencode).
173
173
174 Hashed encoding (not reversible):
174 Hashed encoding (not reversible):
175
175
176 If the default-encoded path is longer than _maxstorepathlen, a
176 If the default-encoded path is longer than _maxstorepathlen, a
177 non-reversible hybrid hashing of the path is done instead.
177 non-reversible hybrid hashing of the path is done instead.
178 This encoding uses up to _dirprefixlen characters of all directory
178 This encoding uses up to _dirprefixlen characters of all directory
179 levels of the lowerencoded path, but not more levels than can fit into
179 levels of the lowerencoded path, but not more levels than can fit into
180 _maxshortdirslen.
180 _maxshortdirslen.
181 Then follows the filler followed by the sha digest of the full path.
181 Then follows the filler followed by the sha digest of the full path.
182 The filler is the beginning of the basename of the lowerencoded path
182 The filler is the beginning of the basename of the lowerencoded path
183 (the basename is everything after the last path separator). The filler
183 (the basename is everything after the last path separator). The filler
184 is as long as possible, filling in characters from the basename until
184 is as long as possible, filling in characters from the basename until
185 the encoded path has _maxstorepathlen characters (or all chars of the
185 the encoded path has _maxstorepathlen characters (or all chars of the
186 basename have been taken).
186 basename have been taken).
187 The extension (e.g. '.i' or '.d') is preserved.
187 The extension (e.g. '.i' or '.d') is preserved.
188
188
189 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
189 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
190 encoding was used.
190 encoding was used.
191 '''
191 '''
192 if not path.startswith('data/'):
192 if not path.startswith('data/'):
193 return path
193 return path
194 # escape directories ending with .i and .d
194 # escape directories ending with .i and .d
195 path = encodedir(path)
195 path = encodedir(path)
196 ndpath = path[len('data/'):]
196 ndpath = path[len('data/'):]
197 res = 'data/' + auxencode(encodefilename(ndpath))
197 res = 'data/' + auxencode(encodefilename(ndpath))
198 if len(res) > _maxstorepathlen:
198 if len(res) > _maxstorepathlen:
199 digest = _sha(path).hexdigest()
199 digest = _sha(path).hexdigest()
200 aep = auxencode(lowerencode(ndpath))
200 aep = auxencode(lowerencode(ndpath))
201 _root, ext = os.path.splitext(aep)
201 _root, ext = os.path.splitext(aep)
202 parts = aep.split('/')
202 parts = aep.split('/')
203 basename = parts[-1]
203 basename = parts[-1]
204 sdirs = []
204 sdirs = []
205 for p in parts[:-1]:
205 for p in parts[:-1]:
206 d = p[:_dirprefixlen]
206 d = p[:_dirprefixlen]
207 if d[-1] in '. ':
207 if d[-1] in '. ':
208 # Windows can't access dirs ending in period or space
208 # Windows can't access dirs ending in period or space
209 d = d[:-1] + '_'
209 d = d[:-1] + '_'
210 t = '/'.join(sdirs) + '/' + d
210 t = '/'.join(sdirs) + '/' + d
211 if len(t) > _maxshortdirslen:
211 if len(t) > _maxshortdirslen:
212 break
212 break
213 sdirs.append(d)
213 sdirs.append(d)
214 dirs = '/'.join(sdirs)
214 dirs = '/'.join(sdirs)
215 if len(dirs) > 0:
215 if len(dirs) > 0:
216 dirs += '/'
216 dirs += '/'
217 res = 'dh/' + dirs + digest + ext
217 res = 'dh/' + dirs + digest + ext
218 spaceleft = _maxstorepathlen - len(res)
218 spaceleft = _maxstorepathlen - len(res)
219 if spaceleft > 0:
219 if spaceleft > 0:
220 filler = basename[:spaceleft]
220 filler = basename[:spaceleft]
221 res = 'dh/' + dirs + filler + digest + ext
221 res = 'dh/' + dirs + filler + digest + ext
222 return res
222 return res
223
223
224 def _calcmode(path):
224 def _calcmode(path):
225 try:
225 try:
226 # files in .hg/ will be created using this mode
226 # files in .hg/ will be created using this mode
227 mode = os.stat(path).st_mode
227 mode = os.stat(path).st_mode
228 # avoid some useless chmods
228 # avoid some useless chmods
229 if (0777 & ~util.umask) == (0777 & mode):
229 if (0777 & ~util.umask) == (0777 & mode):
230 mode = None
230 mode = None
231 except OSError:
231 except OSError:
232 mode = None
232 mode = None
233 return mode
233 return mode
234
234
235 _data = ('data 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
235 _data = ('data 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
236 ' phaseroots obsstore')
236 ' phaseroots obsstore')
237
237
238 class basicstore(object):
238 class basicstore(object):
239 '''base class for local repository stores'''
239 '''base class for local repository stores'''
240 def __init__(self, path, openertype):
240 def __init__(self, path, openertype):
241 self.path = path
241 self.path = path
242 self.createmode = _calcmode(path)
242 self.createmode = _calcmode(path)
243 op = openertype(self.path)
243 op = openertype(self.path)
244 op.createmode = self.createmode
244 op.createmode = self.createmode
245 self.opener = scmutil.filteropener(op, encodedir)
245 self.opener = scmutil.filteropener(op, encodedir)
246
246
247 def join(self, f):
247 def join(self, f):
248 return self.path + '/' + encodedir(f)
248 return self.path + '/' + encodedir(f)
249
249
250 def _walk(self, relpath, recurse):
250 def _walk(self, relpath, recurse):
251 '''yields (unencoded, encoded, size)'''
251 '''yields (unencoded, encoded, size)'''
252 path = self.path
252 path = self.path
253 if relpath:
253 if relpath:
254 path += '/' + relpath
254 path += '/' + relpath
255 striplen = len(self.path) + 1
255 striplen = len(self.path) + 1
256 l = []
256 l = []
257 if os.path.isdir(path):
257 if os.path.isdir(path):
258 visit = [path]
258 visit = [path]
259 while visit:
259 while visit:
260 p = visit.pop()
260 p = visit.pop()
261 for f, kind, st in osutil.listdir(p, stat=True):
261 for f, kind, st in osutil.listdir(p, stat=True):
262 fp = p + '/' + f
262 fp = p + '/' + f
263 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
263 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
264 n = util.pconvert(fp[striplen:])
264 n = util.pconvert(fp[striplen:])
265 l.append((decodedir(n), n, st.st_size))
265 l.append((decodedir(n), n, st.st_size))
266 elif kind == stat.S_IFDIR and recurse:
266 elif kind == stat.S_IFDIR and recurse:
267 visit.append(fp)
267 visit.append(fp)
268 l.sort()
268 l.sort()
269 return l
269 return l
270
270
271 def datafiles(self):
271 def datafiles(self):
272 return self._walk('data', True)
272 return self._walk('data', True)
273
273
274 def walk(self):
274 def walk(self):
275 '''yields (unencoded, encoded, size)'''
275 '''yields (unencoded, encoded, size)'''
276 # yield data files first
276 # yield data files first
277 for x in self.datafiles():
277 for x in self.datafiles():
278 yield x
278 yield x
279 # yield manifest before changelog
279 # yield manifest before changelog
280 for x in reversed(self._walk('', False)):
280 for x in reversed(self._walk('', False)):
281 yield x
281 yield x
282
282
283 def copylist(self):
283 def copylist(self):
284 return ['requires'] + _data.split()
284 return ['requires'] + _data.split()
285
285
286 def write(self):
286 def write(self):
287 pass
287 pass
288
288
289 class encodedstore(basicstore):
289 class encodedstore(basicstore):
290 def __init__(self, path, openertype):
290 def __init__(self, path, openertype):
291 self.path = path + '/store'
291 self.path = path + '/store'
292 self.createmode = _calcmode(self.path)
292 self.createmode = _calcmode(self.path)
293 op = openertype(self.path)
293 op = openertype(self.path)
294 op.createmode = self.createmode
294 op.createmode = self.createmode
295 self.opener = scmutil.filteropener(op, encodefilename)
295 self.opener = scmutil.filteropener(op, encodefilename)
296
296
297 def datafiles(self):
297 def datafiles(self):
298 for a, b, size in self._walk('data', True):
298 for a, b, size in self._walk('data', True):
299 try:
299 try:
300 a = decodefilename(a)
300 a = decodefilename(a)
301 except KeyError:
301 except KeyError:
302 a = None
302 a = None
303 yield a, b, size
303 yield a, b, size
304
304
305 def join(self, f):
305 def join(self, f):
306 return self.path + '/' + encodefilename(f)
306 return self.path + '/' + encodefilename(f)
307
307
308 def copylist(self):
308 def copylist(self):
309 return (['requires', '00changelog.i'] +
309 return (['requires', '00changelog.i'] +
310 ['store/' + f for f in _data.split()])
310 ['store/' + f for f in _data.split()])
311
311
312 class fncache(object):
312 class fncache(object):
313 # the filename used to be partially encoded
313 # the filename used to be partially encoded
314 # hence the encodedir/decodedir dance
314 # hence the encodedir/decodedir dance
315 def __init__(self, opener):
315 def __init__(self, opener):
316 self.opener = opener
316 self.opener = opener
317 self.entries = None
317 self.entries = None
318 self._dirty = False
318 self._dirty = False
319
319
320 def _load(self):
320 def _load(self):
321 '''fill the entries from the fncache file'''
321 '''fill the entries from the fncache file'''
322 self._dirty = False
322 self._dirty = False
323 try:
323 try:
324 fp = self.opener('fncache', mode='rb')
324 fp = self.opener('fncache', mode='rb')
325 except IOError:
325 except IOError:
326 # skip nonexistent file
326 # skip nonexistent file
327 self.entries = set()
327 self.entries = set()
328 return
328 return
329 self.entries = set(map(decodedir, fp.read().splitlines()))
329 self.entries = set(map(decodedir, fp.read().splitlines()))
330 if '' in self.entries:
330 if '' in self.entries:
331 fp.seek(0)
331 fp.seek(0)
332 for n, line in enumerate(fp):
332 for n, line in enumerate(fp):
333 if not line.rstrip('\n'):
333 if not line.rstrip('\n'):
334 t = _('invalid entry in fncache, line %s') % (n + 1)
334 t = _('invalid entry in fncache, line %s') % (n + 1)
335 raise util.Abort(t)
335 raise util.Abort(t)
336 fp.close()
336 fp.close()
337
337
338 def _write(self, files, atomictemp):
338 def _write(self, files, atomictemp):
339 fp = self.opener('fncache', mode='wb', atomictemp=atomictemp)
339 fp = self.opener('fncache', mode='wb', atomictemp=atomictemp)
340 if files:
340 if files:
341 fp.write('\n'.join(map(encodedir, files)) + '\n')
341 fp.write('\n'.join(map(encodedir, files)) + '\n')
342 fp.close()
342 fp.close()
343 self._dirty = False
343 self._dirty = False
344
344
345 def rewrite(self, files):
345 def rewrite(self, files):
346 self._write(files, False)
346 self._write(files, False)
347 self.entries = set(files)
347 self.entries = set(files)
348
348
349 def write(self):
349 def write(self):
350 if self._dirty:
350 if self._dirty:
351 self._write(self.entries, True)
351 self._write(self.entries, True)
352
352
353 def add(self, fn):
353 def add(self, fn):
354 if self.entries is None:
354 if self.entries is None:
355 self._load()
355 self._load()
356 if fn not in self.entries:
356 if fn not in self.entries:
357 self._dirty = True
357 self._dirty = True
358 self.entries.add(fn)
358 self.entries.add(fn)
359
359
360 def __contains__(self, fn):
360 def __contains__(self, fn):
361 if self.entries is None:
361 if self.entries is None:
362 self._load()
362 self._load()
363 return fn in self.entries
363 return fn in self.entries
364
364
365 def __iter__(self):
365 def __iter__(self):
366 if self.entries is None:
366 if self.entries is None:
367 self._load()
367 self._load()
368 return iter(self.entries)
368 return iter(self.entries)
369
369
370 class _fncacheopener(scmutil.abstractopener):
370 class _fncacheopener(scmutil.abstractopener):
371 def __init__(self, op, fnc, encode):
371 def __init__(self, op, fnc, encode):
372 self.opener = op
372 self.opener = op
373 self.fncache = fnc
373 self.fncache = fnc
374 self.encode = encode
374 self.encode = encode
375
375
376 def _getmustaudit(self):
376 def _getmustaudit(self):
377 return self.opener.mustaudit
377 return self.opener.mustaudit
378
378
379 def _setmustaudit(self, onoff):
379 def _setmustaudit(self, onoff):
380 self.opener.mustaudit = onoff
380 self.opener.mustaudit = onoff
381
381
382 mustaudit = property(_getmustaudit, _setmustaudit)
382 mustaudit = property(_getmustaudit, _setmustaudit)
383
383
384 def __call__(self, path, mode='r', *args, **kw):
384 def __call__(self, path, mode='r', *args, **kw):
385 if mode not in ('r', 'rb') and path.startswith('data/'):
385 if mode not in ('r', 'rb') and path.startswith('data/'):
386 self.fncache.add(path)
386 self.fncache.add(path)
387 return self.opener(self.encode(path), mode, *args, **kw)
387 return self.opener(self.encode(path), mode, *args, **kw)
388
388
389 class fncachestore(basicstore):
389 class fncachestore(basicstore):
390 def __init__(self, path, openertype, encode):
390 def __init__(self, path, openertype, encode):
391 self.encode = encode
391 self.encode = encode
392 self.path = path + '/store'
392 self.path = path + '/store'
393 self.pathsep = self.path + '/'
393 self.createmode = _calcmode(self.path)
394 self.createmode = _calcmode(self.path)
394 op = openertype(self.path)
395 op = openertype(self.path)
395 op.createmode = self.createmode
396 op.createmode = self.createmode
396 fnc = fncache(op)
397 fnc = fncache(op)
397 self.fncache = fnc
398 self.fncache = fnc
398 self.opener = _fncacheopener(op, fnc, encode)
399 self.opener = _fncacheopener(op, fnc, encode)
399
400
400 def join(self, f):
401 def join(self, f):
401 return self.path + '/' + self.encode(f)
402 return self.pathsep + self.encode(f)
402
403
403 def getsize(self, path):
404 def getsize(self, path):
404 return os.stat(self.path + '/' + path).st_size
405 return os.stat(self.pathsep + path).st_size
405
406
406 def datafiles(self):
407 def datafiles(self):
407 rewrite = False
408 rewrite = False
408 existing = []
409 existing = []
409 for f in sorted(self.fncache):
410 for f in sorted(self.fncache):
410 ef = self.encode(f)
411 ef = self.encode(f)
411 try:
412 try:
412 yield f, ef, self.getsize(ef)
413 yield f, ef, self.getsize(ef)
413 existing.append(f)
414 existing.append(f)
414 except OSError, err:
415 except OSError, err:
415 if err.errno != errno.ENOENT:
416 if err.errno != errno.ENOENT:
416 raise
417 raise
417 # nonexistent entry
418 # nonexistent entry
418 rewrite = True
419 rewrite = True
419 if rewrite:
420 if rewrite:
420 # rewrite fncache to remove nonexistent entries
421 # rewrite fncache to remove nonexistent entries
421 # (may be caused by rollback / strip)
422 # (may be caused by rollback / strip)
422 self.fncache.rewrite(existing)
423 self.fncache.rewrite(existing)
423
424
424 def copylist(self):
425 def copylist(self):
425 d = ('data dh fncache phaseroots obsstore'
426 d = ('data dh fncache phaseroots obsstore'
426 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
427 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
427 return (['requires', '00changelog.i'] +
428 return (['requires', '00changelog.i'] +
428 ['store/' + f for f in d.split()])
429 ['store/' + f for f in d.split()])
429
430
430 def write(self):
431 def write(self):
431 self.fncache.write()
432 self.fncache.write()
432
433
433 def store(requirements, path, openertype):
434 def store(requirements, path, openertype):
434 if 'store' in requirements:
435 if 'store' in requirements:
435 if 'fncache' in requirements:
436 if 'fncache' in requirements:
436 auxencode = lambda f: _auxencode(f, 'dotencode' in requirements)
437 auxencode = lambda f: _auxencode(f, 'dotencode' in requirements)
437 encode = lambda f: _hybridencode(f, auxencode)
438 encode = lambda f: _hybridencode(f, auxencode)
438 return fncachestore(path, openertype, encode)
439 return fncachestore(path, openertype, encode)
439 return encodedstore(path, openertype)
440 return encodedstore(path, openertype)
440 return basicstore(path, openertype)
441 return basicstore(path, openertype)
General Comments 0
You need to be logged in to leave comments. Login now