##// END OF EJS Templates
store: use fast C implementation of encodedir() if it's available...
Adrian Buehlmann -
r17607:cc58dc47 default
parent child Browse files
Show More
@@ -1,461 +1,463 b''
1 # store.py - repository store handling for Mercurial
1 # store.py - repository store handling for Mercurial
2 #
2 #
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import osutil, scmutil, util
9 import osutil, scmutil, util, parsers
10 import os, stat, errno
10 import os, stat, errno
11
11
12 _sha = util.sha1
12 _sha = util.sha1
13
13
14 # This avoids a collision between a file named foo and a dir named
14 # This avoids a collision between a file named foo and a dir named
15 # foo.i or foo.d
15 # foo.i or foo.d
16 def encodedir(path):
16 def _encodedir(path):
17 '''
17 '''
18 >>> encodedir('data/foo.i')
18 >>> _encodedir('data/foo.i')
19 'data/foo.i'
19 'data/foo.i'
20 >>> encodedir('data/foo.i/bla.i')
20 >>> _encodedir('data/foo.i/bla.i')
21 'data/foo.i.hg/bla.i'
21 'data/foo.i.hg/bla.i'
22 >>> encodedir('data/foo.i.hg/bla.i')
22 >>> _encodedir('data/foo.i.hg/bla.i')
23 'data/foo.i.hg.hg/bla.i'
23 'data/foo.i.hg.hg/bla.i'
24 >>> encodedir('data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
24 >>> _encodedir('data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
25 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
25 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
26 '''
26 '''
27 return (path
27 return (path
28 .replace(".hg/", ".hg.hg/")
28 .replace(".hg/", ".hg.hg/")
29 .replace(".i/", ".i.hg/")
29 .replace(".i/", ".i.hg/")
30 .replace(".d/", ".d.hg/"))
30 .replace(".d/", ".d.hg/"))
31
31
32 encodedir = getattr(parsers, 'encodedir', _encodedir)
33
32 def decodedir(path):
34 def decodedir(path):
33 '''
35 '''
34 >>> decodedir('data/foo.i')
36 >>> decodedir('data/foo.i')
35 'data/foo.i'
37 'data/foo.i'
36 >>> decodedir('data/foo.i.hg/bla.i')
38 >>> decodedir('data/foo.i.hg/bla.i')
37 'data/foo.i/bla.i'
39 'data/foo.i/bla.i'
38 >>> decodedir('data/foo.i.hg.hg/bla.i')
40 >>> decodedir('data/foo.i.hg.hg/bla.i')
39 'data/foo.i.hg/bla.i'
41 'data/foo.i.hg/bla.i'
40 '''
42 '''
41 if ".hg/" not in path:
43 if ".hg/" not in path:
42 return path
44 return path
43 return (path
45 return (path
44 .replace(".d.hg/", ".d/")
46 .replace(".d.hg/", ".d/")
45 .replace(".i.hg/", ".i/")
47 .replace(".i.hg/", ".i/")
46 .replace(".hg.hg/", ".hg/"))
48 .replace(".hg.hg/", ".hg/"))
47
49
48 def _buildencodefun():
50 def _buildencodefun():
49 '''
51 '''
50 >>> enc, dec = _buildencodefun()
52 >>> enc, dec = _buildencodefun()
51
53
52 >>> enc('nothing/special.txt')
54 >>> enc('nothing/special.txt')
53 'nothing/special.txt'
55 'nothing/special.txt'
54 >>> dec('nothing/special.txt')
56 >>> dec('nothing/special.txt')
55 'nothing/special.txt'
57 'nothing/special.txt'
56
58
57 >>> enc('HELLO')
59 >>> enc('HELLO')
58 '_h_e_l_l_o'
60 '_h_e_l_l_o'
59 >>> dec('_h_e_l_l_o')
61 >>> dec('_h_e_l_l_o')
60 'HELLO'
62 'HELLO'
61
63
62 >>> enc('hello:world?')
64 >>> enc('hello:world?')
63 'hello~3aworld~3f'
65 'hello~3aworld~3f'
64 >>> dec('hello~3aworld~3f')
66 >>> dec('hello~3aworld~3f')
65 'hello:world?'
67 'hello:world?'
66
68
67 >>> enc('the\x07quick\xADshot')
69 >>> enc('the\x07quick\xADshot')
68 'the~07quick~adshot'
70 'the~07quick~adshot'
69 >>> dec('the~07quick~adshot')
71 >>> dec('the~07quick~adshot')
70 'the\\x07quick\\xadshot'
72 'the\\x07quick\\xadshot'
71 '''
73 '''
72 e = '_'
74 e = '_'
73 winreserved = [ord(x) for x in '\\:*?"<>|']
75 winreserved = [ord(x) for x in '\\:*?"<>|']
74 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
76 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
75 for x in (range(32) + range(126, 256) + winreserved):
77 for x in (range(32) + range(126, 256) + winreserved):
76 cmap[chr(x)] = "~%02x" % x
78 cmap[chr(x)] = "~%02x" % x
77 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
79 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
78 cmap[chr(x)] = e + chr(x).lower()
80 cmap[chr(x)] = e + chr(x).lower()
79 dmap = {}
81 dmap = {}
80 for k, v in cmap.iteritems():
82 for k, v in cmap.iteritems():
81 dmap[v] = k
83 dmap[v] = k
82 def decode(s):
84 def decode(s):
83 i = 0
85 i = 0
84 while i < len(s):
86 while i < len(s):
85 for l in xrange(1, 4):
87 for l in xrange(1, 4):
86 try:
88 try:
87 yield dmap[s[i:i + l]]
89 yield dmap[s[i:i + l]]
88 i += l
90 i += l
89 break
91 break
90 except KeyError:
92 except KeyError:
91 pass
93 pass
92 else:
94 else:
93 raise KeyError
95 raise KeyError
94 return (lambda s: "".join([cmap[c] for c in encodedir(s)]),
96 return (lambda s: "".join([cmap[c] for c in encodedir(s)]),
95 lambda s: decodedir("".join(list(decode(s)))))
97 lambda s: decodedir("".join(list(decode(s)))))
96
98
97 encodefilename, decodefilename = _buildencodefun()
99 encodefilename, decodefilename = _buildencodefun()
98
100
99 def _buildlowerencodefun():
101 def _buildlowerencodefun():
100 '''
102 '''
101 >>> f = _buildlowerencodefun()
103 >>> f = _buildlowerencodefun()
102 >>> f('nothing/special.txt')
104 >>> f('nothing/special.txt')
103 'nothing/special.txt'
105 'nothing/special.txt'
104 >>> f('HELLO')
106 >>> f('HELLO')
105 'hello'
107 'hello'
106 >>> f('hello:world?')
108 >>> f('hello:world?')
107 'hello~3aworld~3f'
109 'hello~3aworld~3f'
108 >>> f('the\x07quick\xADshot')
110 >>> f('the\x07quick\xADshot')
109 'the~07quick~adshot'
111 'the~07quick~adshot'
110 '''
112 '''
111 winreserved = [ord(x) for x in '\\:*?"<>|']
113 winreserved = [ord(x) for x in '\\:*?"<>|']
112 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
114 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
113 for x in (range(32) + range(126, 256) + winreserved):
115 for x in (range(32) + range(126, 256) + winreserved):
114 cmap[chr(x)] = "~%02x" % x
116 cmap[chr(x)] = "~%02x" % x
115 for x in range(ord("A"), ord("Z")+1):
117 for x in range(ord("A"), ord("Z")+1):
116 cmap[chr(x)] = chr(x).lower()
118 cmap[chr(x)] = chr(x).lower()
117 return lambda s: "".join([cmap[c] for c in s])
119 return lambda s: "".join([cmap[c] for c in s])
118
120
119 lowerencode = _buildlowerencodefun()
121 lowerencode = _buildlowerencodefun()
120
122
121 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
123 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
122 _winres3 = ('aux', 'con', 'prn', 'nul') # length 3
124 _winres3 = ('aux', 'con', 'prn', 'nul') # length 3
123 _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9)
125 _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9)
124 def _auxencode(path, dotencode):
126 def _auxencode(path, dotencode):
125 '''
127 '''
126 Encodes filenames containing names reserved by Windows or which end in
128 Encodes filenames containing names reserved by Windows or which end in
127 period or space. Does not touch other single reserved characters c.
129 period or space. Does not touch other single reserved characters c.
128 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
130 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
129 Additionally encodes space or period at the beginning, if dotencode is
131 Additionally encodes space or period at the beginning, if dotencode is
130 True. Parameter path is assumed to be all lowercase.
132 True. Parameter path is assumed to be all lowercase.
131 A segment only needs encoding if a reserved name appears as a
133 A segment only needs encoding if a reserved name appears as a
132 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
134 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
133 doesn't need encoding.
135 doesn't need encoding.
134
136
135 >>> s = '.foo/aux.txt/txt.aux/con/prn/nul/foo.'
137 >>> s = '.foo/aux.txt/txt.aux/con/prn/nul/foo.'
136 >>> _auxencode(s.split('/'), True)
138 >>> _auxencode(s.split('/'), True)
137 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
139 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
138 >>> s = '.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
140 >>> s = '.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
139 >>> _auxencode(s.split('/'), False)
141 >>> _auxencode(s.split('/'), False)
140 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
142 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
141 >>> _auxencode(['foo. '], True)
143 >>> _auxencode(['foo. '], True)
142 ['foo.~20']
144 ['foo.~20']
143 >>> _auxencode([' .foo'], True)
145 >>> _auxencode([' .foo'], True)
144 ['~20.foo']
146 ['~20.foo']
145 '''
147 '''
146 for i, n in enumerate(path):
148 for i, n in enumerate(path):
147 if not n:
149 if not n:
148 continue
150 continue
149 if dotencode and n[0] in '. ':
151 if dotencode and n[0] in '. ':
150 n = "~%02x" % ord(n[0]) + n[1:]
152 n = "~%02x" % ord(n[0]) + n[1:]
151 path[i] = n
153 path[i] = n
152 else:
154 else:
153 l = n.find('.')
155 l = n.find('.')
154 if l == -1:
156 if l == -1:
155 l = len(n)
157 l = len(n)
156 if ((l == 3 and n[:3] in _winres3) or
158 if ((l == 3 and n[:3] in _winres3) or
157 (l == 4 and n[3] <= '9' and n[3] >= '1'
159 (l == 4 and n[3] <= '9' and n[3] >= '1'
158 and n[:3] in _winres4)):
160 and n[:3] in _winres4)):
159 # encode third letter ('aux' -> 'au~78')
161 # encode third letter ('aux' -> 'au~78')
160 ec = "~%02x" % ord(n[2])
162 ec = "~%02x" % ord(n[2])
161 n = n[0:2] + ec + n[3:]
163 n = n[0:2] + ec + n[3:]
162 path[i] = n
164 path[i] = n
163 if n[-1] in '. ':
165 if n[-1] in '. ':
164 # encode last period or space ('foo...' -> 'foo..~2e')
166 # encode last period or space ('foo...' -> 'foo..~2e')
165 path[i] = n[:-1] + "~%02x" % ord(n[-1])
167 path[i] = n[:-1] + "~%02x" % ord(n[-1])
166 return path
168 return path
167
169
168 _maxstorepathlen = 120
170 _maxstorepathlen = 120
169 _dirprefixlen = 8
171 _dirprefixlen = 8
170 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
172 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
171 def _hybridencode(path, dotencode):
173 def _hybridencode(path, dotencode):
172 '''encodes path with a length limit
174 '''encodes path with a length limit
173
175
174 Encodes all paths that begin with 'data/', according to the following.
176 Encodes all paths that begin with 'data/', according to the following.
175
177
176 Default encoding (reversible):
178 Default encoding (reversible):
177
179
178 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
180 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
179 characters are encoded as '~xx', where xx is the two digit hex code
181 characters are encoded as '~xx', where xx is the two digit hex code
180 of the character (see encodefilename).
182 of the character (see encodefilename).
181 Relevant path components consisting of Windows reserved filenames are
183 Relevant path components consisting of Windows reserved filenames are
182 masked by encoding the third character ('aux' -> 'au~78', see auxencode).
184 masked by encoding the third character ('aux' -> 'au~78', see auxencode).
183
185
184 Hashed encoding (not reversible):
186 Hashed encoding (not reversible):
185
187
186 If the default-encoded path is longer than _maxstorepathlen, a
188 If the default-encoded path is longer than _maxstorepathlen, a
187 non-reversible hybrid hashing of the path is done instead.
189 non-reversible hybrid hashing of the path is done instead.
188 This encoding uses up to _dirprefixlen characters of all directory
190 This encoding uses up to _dirprefixlen characters of all directory
189 levels of the lowerencoded path, but not more levels than can fit into
191 levels of the lowerencoded path, but not more levels than can fit into
190 _maxshortdirslen.
192 _maxshortdirslen.
191 Then follows the filler followed by the sha digest of the full path.
193 Then follows the filler followed by the sha digest of the full path.
192 The filler is the beginning of the basename of the lowerencoded path
194 The filler is the beginning of the basename of the lowerencoded path
193 (the basename is everything after the last path separator). The filler
195 (the basename is everything after the last path separator). The filler
194 is as long as possible, filling in characters from the basename until
196 is as long as possible, filling in characters from the basename until
195 the encoded path has _maxstorepathlen characters (or all chars of the
197 the encoded path has _maxstorepathlen characters (or all chars of the
196 basename have been taken).
198 basename have been taken).
197 The extension (e.g. '.i' or '.d') is preserved.
199 The extension (e.g. '.i' or '.d') is preserved.
198
200
199 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
201 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
200 encoding was used.
202 encoding was used.
201 '''
203 '''
202 ef = encodefilename(path).split('/')
204 ef = encodefilename(path).split('/')
203 res = '/'.join(_auxencode(ef, dotencode))
205 res = '/'.join(_auxencode(ef, dotencode))
204 if len(res) > _maxstorepathlen:
206 if len(res) > _maxstorepathlen:
205 path = encodedir(path)
207 path = encodedir(path)
206 digest = _sha(path).hexdigest()
208 digest = _sha(path).hexdigest()
207 le = lowerencode(path).split('/')[1:]
209 le = lowerencode(path).split('/')[1:]
208 parts = _auxencode(le, dotencode)
210 parts = _auxencode(le, dotencode)
209 basename = parts[-1]
211 basename = parts[-1]
210 _root, ext = os.path.splitext(basename)
212 _root, ext = os.path.splitext(basename)
211 sdirs = []
213 sdirs = []
212 sdirslen = 0
214 sdirslen = 0
213 for p in parts[:-1]:
215 for p in parts[:-1]:
214 d = p[:_dirprefixlen]
216 d = p[:_dirprefixlen]
215 if d[-1] in '. ':
217 if d[-1] in '. ':
216 # Windows can't access dirs ending in period or space
218 # Windows can't access dirs ending in period or space
217 d = d[:-1] + '_'
219 d = d[:-1] + '_'
218 if sdirslen == 0:
220 if sdirslen == 0:
219 t = len(d)
221 t = len(d)
220 else:
222 else:
221 t = sdirslen + 1 + len(d)
223 t = sdirslen + 1 + len(d)
222 if t > _maxshortdirslen:
224 if t > _maxshortdirslen:
223 break
225 break
224 sdirs.append(d)
226 sdirs.append(d)
225 sdirslen = t
227 sdirslen = t
226 dirs = '/'.join(sdirs)
228 dirs = '/'.join(sdirs)
227 if len(dirs) > 0:
229 if len(dirs) > 0:
228 dirs += '/'
230 dirs += '/'
229 res = 'dh/' + dirs + digest + ext
231 res = 'dh/' + dirs + digest + ext
230 spaceleft = _maxstorepathlen - len(res)
232 spaceleft = _maxstorepathlen - len(res)
231 if spaceleft > 0:
233 if spaceleft > 0:
232 filler = basename[:spaceleft]
234 filler = basename[:spaceleft]
233 res = 'dh/' + dirs + filler + digest + ext
235 res = 'dh/' + dirs + filler + digest + ext
234 return res
236 return res
235
237
236 def _calcmode(path):
238 def _calcmode(path):
237 try:
239 try:
238 # files in .hg/ will be created using this mode
240 # files in .hg/ will be created using this mode
239 mode = os.stat(path).st_mode
241 mode = os.stat(path).st_mode
240 # avoid some useless chmods
242 # avoid some useless chmods
241 if (0777 & ~util.umask) == (0777 & mode):
243 if (0777 & ~util.umask) == (0777 & mode):
242 mode = None
244 mode = None
243 except OSError:
245 except OSError:
244 mode = None
246 mode = None
245 return mode
247 return mode
246
248
247 _data = ('data 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
249 _data = ('data 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
248 ' phaseroots obsstore')
250 ' phaseroots obsstore')
249
251
250 class basicstore(object):
252 class basicstore(object):
251 '''base class for local repository stores'''
253 '''base class for local repository stores'''
252 def __init__(self, path, openertype):
254 def __init__(self, path, openertype):
253 self.path = path
255 self.path = path
254 self.createmode = _calcmode(path)
256 self.createmode = _calcmode(path)
255 op = openertype(self.path)
257 op = openertype(self.path)
256 op.createmode = self.createmode
258 op.createmode = self.createmode
257 self.opener = scmutil.filteropener(op, encodedir)
259 self.opener = scmutil.filteropener(op, encodedir)
258
260
259 def join(self, f):
261 def join(self, f):
260 return self.path + '/' + encodedir(f)
262 return self.path + '/' + encodedir(f)
261
263
262 def _walk(self, relpath, recurse):
264 def _walk(self, relpath, recurse):
263 '''yields (unencoded, encoded, size)'''
265 '''yields (unencoded, encoded, size)'''
264 path = self.path
266 path = self.path
265 if relpath:
267 if relpath:
266 path += '/' + relpath
268 path += '/' + relpath
267 striplen = len(self.path) + 1
269 striplen = len(self.path) + 1
268 l = []
270 l = []
269 if os.path.isdir(path):
271 if os.path.isdir(path):
270 visit = [path]
272 visit = [path]
271 while visit:
273 while visit:
272 p = visit.pop()
274 p = visit.pop()
273 for f, kind, st in osutil.listdir(p, stat=True):
275 for f, kind, st in osutil.listdir(p, stat=True):
274 fp = p + '/' + f
276 fp = p + '/' + f
275 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
277 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
276 n = util.pconvert(fp[striplen:])
278 n = util.pconvert(fp[striplen:])
277 l.append((decodedir(n), n, st.st_size))
279 l.append((decodedir(n), n, st.st_size))
278 elif kind == stat.S_IFDIR and recurse:
280 elif kind == stat.S_IFDIR and recurse:
279 visit.append(fp)
281 visit.append(fp)
280 l.sort()
282 l.sort()
281 return l
283 return l
282
284
283 def datafiles(self):
285 def datafiles(self):
284 return self._walk('data', True)
286 return self._walk('data', True)
285
287
286 def walk(self):
288 def walk(self):
287 '''yields (unencoded, encoded, size)'''
289 '''yields (unencoded, encoded, size)'''
288 # yield data files first
290 # yield data files first
289 for x in self.datafiles():
291 for x in self.datafiles():
290 yield x
292 yield x
291 # yield manifest before changelog
293 # yield manifest before changelog
292 for x in reversed(self._walk('', False)):
294 for x in reversed(self._walk('', False)):
293 yield x
295 yield x
294
296
295 def copylist(self):
297 def copylist(self):
296 return ['requires'] + _data.split()
298 return ['requires'] + _data.split()
297
299
298 def write(self):
300 def write(self):
299 pass
301 pass
300
302
301 class encodedstore(basicstore):
303 class encodedstore(basicstore):
302 def __init__(self, path, openertype):
304 def __init__(self, path, openertype):
303 self.path = path + '/store'
305 self.path = path + '/store'
304 self.createmode = _calcmode(self.path)
306 self.createmode = _calcmode(self.path)
305 op = openertype(self.path)
307 op = openertype(self.path)
306 op.createmode = self.createmode
308 op.createmode = self.createmode
307 self.opener = scmutil.filteropener(op, encodefilename)
309 self.opener = scmutil.filteropener(op, encodefilename)
308
310
309 def datafiles(self):
311 def datafiles(self):
310 for a, b, size in self._walk('data', True):
312 for a, b, size in self._walk('data', True):
311 try:
313 try:
312 a = decodefilename(a)
314 a = decodefilename(a)
313 except KeyError:
315 except KeyError:
314 a = None
316 a = None
315 yield a, b, size
317 yield a, b, size
316
318
317 def join(self, f):
319 def join(self, f):
318 return self.path + '/' + encodefilename(f)
320 return self.path + '/' + encodefilename(f)
319
321
320 def copylist(self):
322 def copylist(self):
321 return (['requires', '00changelog.i'] +
323 return (['requires', '00changelog.i'] +
322 ['store/' + f for f in _data.split()])
324 ['store/' + f for f in _data.split()])
323
325
324 class fncache(object):
326 class fncache(object):
325 # the filename used to be partially encoded
327 # the filename used to be partially encoded
326 # hence the encodedir/decodedir dance
328 # hence the encodedir/decodedir dance
327 def __init__(self, opener):
329 def __init__(self, opener):
328 self.opener = opener
330 self.opener = opener
329 self.entries = None
331 self.entries = None
330 self._dirty = False
332 self._dirty = False
331
333
332 def _load(self):
334 def _load(self):
333 '''fill the entries from the fncache file'''
335 '''fill the entries from the fncache file'''
334 self._dirty = False
336 self._dirty = False
335 try:
337 try:
336 fp = self.opener('fncache', mode='rb')
338 fp = self.opener('fncache', mode='rb')
337 except IOError:
339 except IOError:
338 # skip nonexistent file
340 # skip nonexistent file
339 self.entries = set()
341 self.entries = set()
340 return
342 return
341 self.entries = set(decodedir(fp.read()).splitlines())
343 self.entries = set(decodedir(fp.read()).splitlines())
342 if '' in self.entries:
344 if '' in self.entries:
343 fp.seek(0)
345 fp.seek(0)
344 for n, line in enumerate(fp):
346 for n, line in enumerate(fp):
345 if not line.rstrip('\n'):
347 if not line.rstrip('\n'):
346 t = _('invalid entry in fncache, line %s') % (n + 1)
348 t = _('invalid entry in fncache, line %s') % (n + 1)
347 raise util.Abort(t)
349 raise util.Abort(t)
348 fp.close()
350 fp.close()
349
351
350 def _write(self, files, atomictemp):
352 def _write(self, files, atomictemp):
351 fp = self.opener('fncache', mode='wb', atomictemp=atomictemp)
353 fp = self.opener('fncache', mode='wb', atomictemp=atomictemp)
352 if files:
354 if files:
353 fp.write(encodedir('\n'.join(files) + '\n'))
355 fp.write(encodedir('\n'.join(files) + '\n'))
354 fp.close()
356 fp.close()
355 self._dirty = False
357 self._dirty = False
356
358
357 def rewrite(self, files):
359 def rewrite(self, files):
358 self._write(files, False)
360 self._write(files, False)
359 self.entries = set(files)
361 self.entries = set(files)
360
362
361 def write(self):
363 def write(self):
362 if self._dirty:
364 if self._dirty:
363 self._write(self.entries, True)
365 self._write(self.entries, True)
364
366
365 def add(self, fn):
367 def add(self, fn):
366 if self.entries is None:
368 if self.entries is None:
367 self._load()
369 self._load()
368 if fn not in self.entries:
370 if fn not in self.entries:
369 self._dirty = True
371 self._dirty = True
370 self.entries.add(fn)
372 self.entries.add(fn)
371
373
372 def __contains__(self, fn):
374 def __contains__(self, fn):
373 if self.entries is None:
375 if self.entries is None:
374 self._load()
376 self._load()
375 return fn in self.entries
377 return fn in self.entries
376
378
377 def __iter__(self):
379 def __iter__(self):
378 if self.entries is None:
380 if self.entries is None:
379 self._load()
381 self._load()
380 return iter(self.entries)
382 return iter(self.entries)
381
383
382 class _fncacheopener(scmutil.abstractopener):
384 class _fncacheopener(scmutil.abstractopener):
383 def __init__(self, op, fnc, encode):
385 def __init__(self, op, fnc, encode):
384 self.opener = op
386 self.opener = op
385 self.fncache = fnc
387 self.fncache = fnc
386 self.encode = encode
388 self.encode = encode
387
389
388 def _getmustaudit(self):
390 def _getmustaudit(self):
389 return self.opener.mustaudit
391 return self.opener.mustaudit
390
392
391 def _setmustaudit(self, onoff):
393 def _setmustaudit(self, onoff):
392 self.opener.mustaudit = onoff
394 self.opener.mustaudit = onoff
393
395
394 mustaudit = property(_getmustaudit, _setmustaudit)
396 mustaudit = property(_getmustaudit, _setmustaudit)
395
397
396 def __call__(self, path, mode='r', *args, **kw):
398 def __call__(self, path, mode='r', *args, **kw):
397 if mode not in ('r', 'rb') and path.startswith('data/'):
399 if mode not in ('r', 'rb') and path.startswith('data/'):
398 self.fncache.add(path)
400 self.fncache.add(path)
399 return self.opener(self.encode(path), mode, *args, **kw)
401 return self.opener(self.encode(path), mode, *args, **kw)
400
402
401 def _plainhybridencode(f):
403 def _plainhybridencode(f):
402 return _hybridencode(f, False)
404 return _hybridencode(f, False)
403
405
404 def _dothybridencode(f):
406 def _dothybridencode(f):
405 return _hybridencode(f, True)
407 return _hybridencode(f, True)
406
408
407 class fncachestore(basicstore):
409 class fncachestore(basicstore):
408 def __init__(self, path, openertype, dotencode):
410 def __init__(self, path, openertype, dotencode):
409 if dotencode:
411 if dotencode:
410 encode = _dothybridencode
412 encode = _dothybridencode
411 else:
413 else:
412 encode = _plainhybridencode
414 encode = _plainhybridencode
413 self.encode = encode
415 self.encode = encode
414 self.path = path + '/store'
416 self.path = path + '/store'
415 self.pathsep = self.path + '/'
417 self.pathsep = self.path + '/'
416 self.createmode = _calcmode(self.path)
418 self.createmode = _calcmode(self.path)
417 op = openertype(self.path)
419 op = openertype(self.path)
418 op.createmode = self.createmode
420 op.createmode = self.createmode
419 fnc = fncache(op)
421 fnc = fncache(op)
420 self.fncache = fnc
422 self.fncache = fnc
421 self.opener = _fncacheopener(op, fnc, encode)
423 self.opener = _fncacheopener(op, fnc, encode)
422
424
423 def join(self, f):
425 def join(self, f):
424 return self.pathsep + self.encode(f)
426 return self.pathsep + self.encode(f)
425
427
426 def getsize(self, path):
428 def getsize(self, path):
427 return os.stat(self.pathsep + path).st_size
429 return os.stat(self.pathsep + path).st_size
428
430
429 def datafiles(self):
431 def datafiles(self):
430 rewrite = False
432 rewrite = False
431 existing = []
433 existing = []
432 for f in sorted(self.fncache):
434 for f in sorted(self.fncache):
433 ef = self.encode(f)
435 ef = self.encode(f)
434 try:
436 try:
435 yield f, ef, self.getsize(ef)
437 yield f, ef, self.getsize(ef)
436 existing.append(f)
438 existing.append(f)
437 except OSError, err:
439 except OSError, err:
438 if err.errno != errno.ENOENT:
440 if err.errno != errno.ENOENT:
439 raise
441 raise
440 # nonexistent entry
442 # nonexistent entry
441 rewrite = True
443 rewrite = True
442 if rewrite:
444 if rewrite:
443 # rewrite fncache to remove nonexistent entries
445 # rewrite fncache to remove nonexistent entries
444 # (may be caused by rollback / strip)
446 # (may be caused by rollback / strip)
445 self.fncache.rewrite(existing)
447 self.fncache.rewrite(existing)
446
448
447 def copylist(self):
449 def copylist(self):
448 d = ('data dh fncache phaseroots obsstore'
450 d = ('data dh fncache phaseroots obsstore'
449 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
451 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
450 return (['requires', '00changelog.i'] +
452 return (['requires', '00changelog.i'] +
451 ['store/' + f for f in d.split()])
453 ['store/' + f for f in d.split()])
452
454
453 def write(self):
455 def write(self):
454 self.fncache.write()
456 self.fncache.write()
455
457
456 def store(requirements, path, openertype):
458 def store(requirements, path, openertype):
457 if 'store' in requirements:
459 if 'store' in requirements:
458 if 'fncache' in requirements:
460 if 'fncache' in requirements:
459 return fncachestore(path, openertype, 'dotencode' in requirements)
461 return fncachestore(path, openertype, 'dotencode' in requirements)
460 return encodedstore(path, openertype)
462 return encodedstore(path, openertype)
461 return basicstore(path, openertype)
463 return basicstore(path, openertype)
General Comments 0
You need to be logged in to leave comments. Login now