##// END OF EJS Templates
store: remove uneeded startswith('data/') checks in encodedir() and decodedir()...
Adrian Buehlmann -
r17586:2f1475da default
parent child Browse files
Show More
@@ -1,445 +1,443
1 # store.py - repository store handling for Mercurial
1 # store.py - repository store handling for Mercurial
2 #
2 #
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import osutil, scmutil, util
9 import osutil, scmutil, util
10 import os, stat, errno
10 import os, stat, errno
11
11
12 _sha = util.sha1
12 _sha = util.sha1
13
13
14 # This avoids a collision between a file named foo and a dir named
14 # This avoids a collision between a file named foo and a dir named
15 # foo.i or foo.d
15 # foo.i or foo.d
16 def encodedir(path):
16 def encodedir(path):
17 '''
17 '''
18 >>> encodedir('data/foo.i')
18 >>> encodedir('data/foo.i')
19 'data/foo.i'
19 'data/foo.i'
20 >>> encodedir('data/foo.i/bla.i')
20 >>> encodedir('data/foo.i/bla.i')
21 'data/foo.i.hg/bla.i'
21 'data/foo.i.hg/bla.i'
22 >>> encodedir('data/foo.i.hg/bla.i')
22 >>> encodedir('data/foo.i.hg/bla.i')
23 'data/foo.i.hg.hg/bla.i'
23 'data/foo.i.hg.hg/bla.i'
24 '''
24 '''
25 if not path.startswith('data/'):
26 return path
27 return (path
25 return (path
28 .replace(".hg/", ".hg.hg/")
26 .replace(".hg/", ".hg.hg/")
29 .replace(".i/", ".i.hg/")
27 .replace(".i/", ".i.hg/")
30 .replace(".d/", ".d.hg/"))
28 .replace(".d/", ".d.hg/"))
31
29
32 def decodedir(path):
30 def decodedir(path):
33 '''
31 '''
34 >>> decodedir('data/foo.i')
32 >>> decodedir('data/foo.i')
35 'data/foo.i'
33 'data/foo.i'
36 >>> decodedir('data/foo.i.hg/bla.i')
34 >>> decodedir('data/foo.i.hg/bla.i')
37 'data/foo.i/bla.i'
35 'data/foo.i/bla.i'
38 >>> decodedir('data/foo.i.hg.hg/bla.i')
36 >>> decodedir('data/foo.i.hg.hg/bla.i')
39 'data/foo.i.hg/bla.i'
37 'data/foo.i.hg/bla.i'
40 '''
38 '''
41 if not path.startswith('data/') or ".hg/" not in path:
39 if ".hg/" not in path:
42 return path
40 return path
43 return (path
41 return (path
44 .replace(".d.hg/", ".d/")
42 .replace(".d.hg/", ".d/")
45 .replace(".i.hg/", ".i/")
43 .replace(".i.hg/", ".i/")
46 .replace(".hg.hg/", ".hg/"))
44 .replace(".hg.hg/", ".hg/"))
47
45
48 def _buildencodefun():
46 def _buildencodefun():
49 '''
47 '''
50 >>> enc, dec = _buildencodefun()
48 >>> enc, dec = _buildencodefun()
51
49
52 >>> enc('nothing/special.txt')
50 >>> enc('nothing/special.txt')
53 'nothing/special.txt'
51 'nothing/special.txt'
54 >>> dec('nothing/special.txt')
52 >>> dec('nothing/special.txt')
55 'nothing/special.txt'
53 'nothing/special.txt'
56
54
57 >>> enc('HELLO')
55 >>> enc('HELLO')
58 '_h_e_l_l_o'
56 '_h_e_l_l_o'
59 >>> dec('_h_e_l_l_o')
57 >>> dec('_h_e_l_l_o')
60 'HELLO'
58 'HELLO'
61
59
62 >>> enc('hello:world?')
60 >>> enc('hello:world?')
63 'hello~3aworld~3f'
61 'hello~3aworld~3f'
64 >>> dec('hello~3aworld~3f')
62 >>> dec('hello~3aworld~3f')
65 'hello:world?'
63 'hello:world?'
66
64
67 >>> enc('the\x07quick\xADshot')
65 >>> enc('the\x07quick\xADshot')
68 'the~07quick~adshot'
66 'the~07quick~adshot'
69 >>> dec('the~07quick~adshot')
67 >>> dec('the~07quick~adshot')
70 'the\\x07quick\\xadshot'
68 'the\\x07quick\\xadshot'
71 '''
69 '''
72 e = '_'
70 e = '_'
73 winreserved = [ord(x) for x in '\\:*?"<>|']
71 winreserved = [ord(x) for x in '\\:*?"<>|']
74 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
72 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
75 for x in (range(32) + range(126, 256) + winreserved):
73 for x in (range(32) + range(126, 256) + winreserved):
76 cmap[chr(x)] = "~%02x" % x
74 cmap[chr(x)] = "~%02x" % x
77 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
75 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
78 cmap[chr(x)] = e + chr(x).lower()
76 cmap[chr(x)] = e + chr(x).lower()
79 dmap = {}
77 dmap = {}
80 for k, v in cmap.iteritems():
78 for k, v in cmap.iteritems():
81 dmap[v] = k
79 dmap[v] = k
82 def decode(s):
80 def decode(s):
83 i = 0
81 i = 0
84 while i < len(s):
82 while i < len(s):
85 for l in xrange(1, 4):
83 for l in xrange(1, 4):
86 try:
84 try:
87 yield dmap[s[i:i + l]]
85 yield dmap[s[i:i + l]]
88 i += l
86 i += l
89 break
87 break
90 except KeyError:
88 except KeyError:
91 pass
89 pass
92 else:
90 else:
93 raise KeyError
91 raise KeyError
94 return (lambda s: "".join([cmap[c] for c in encodedir(s)]),
92 return (lambda s: "".join([cmap[c] for c in encodedir(s)]),
95 lambda s: decodedir("".join(list(decode(s)))))
93 lambda s: decodedir("".join(list(decode(s)))))
96
94
97 encodefilename, decodefilename = _buildencodefun()
95 encodefilename, decodefilename = _buildencodefun()
98
96
99 def _buildlowerencodefun():
97 def _buildlowerencodefun():
100 '''
98 '''
101 >>> f = _buildlowerencodefun()
99 >>> f = _buildlowerencodefun()
102 >>> f('nothing/special.txt')
100 >>> f('nothing/special.txt')
103 'nothing/special.txt'
101 'nothing/special.txt'
104 >>> f('HELLO')
102 >>> f('HELLO')
105 'hello'
103 'hello'
106 >>> f('hello:world?')
104 >>> f('hello:world?')
107 'hello~3aworld~3f'
105 'hello~3aworld~3f'
108 >>> f('the\x07quick\xADshot')
106 >>> f('the\x07quick\xADshot')
109 'the~07quick~adshot'
107 'the~07quick~adshot'
110 '''
108 '''
111 winreserved = [ord(x) for x in '\\:*?"<>|']
109 winreserved = [ord(x) for x in '\\:*?"<>|']
112 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
110 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
113 for x in (range(32) + range(126, 256) + winreserved):
111 for x in (range(32) + range(126, 256) + winreserved):
114 cmap[chr(x)] = "~%02x" % x
112 cmap[chr(x)] = "~%02x" % x
115 for x in range(ord("A"), ord("Z")+1):
113 for x in range(ord("A"), ord("Z")+1):
116 cmap[chr(x)] = chr(x).lower()
114 cmap[chr(x)] = chr(x).lower()
117 return lambda s: "".join([cmap[c] for c in s])
115 return lambda s: "".join([cmap[c] for c in s])
118
116
119 lowerencode = _buildlowerencodefun()
117 lowerencode = _buildlowerencodefun()
120
118
121 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
119 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
122 _winres3 = ('aux', 'con', 'prn', 'nul') # length 3
120 _winres3 = ('aux', 'con', 'prn', 'nul') # length 3
123 _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9)
121 _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9)
124 def _auxencode(path, dotencode):
122 def _auxencode(path, dotencode):
125 '''
123 '''
126 Encodes filenames containing names reserved by Windows or which end in
124 Encodes filenames containing names reserved by Windows or which end in
127 period or space. Does not touch other single reserved characters c.
125 period or space. Does not touch other single reserved characters c.
128 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
126 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
129 Additionally encodes space or period at the beginning, if dotencode is
127 Additionally encodes space or period at the beginning, if dotencode is
130 True. Parameter path is assumed to be all lowercase.
128 True. Parameter path is assumed to be all lowercase.
131 A segment only needs encoding if a reserved name appears as a
129 A segment only needs encoding if a reserved name appears as a
132 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
130 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
133 doesn't need encoding.
131 doesn't need encoding.
134
132
135 >>> _auxencode('.foo/aux.txt/txt.aux/con/prn/nul/foo.', True)
133 >>> _auxencode('.foo/aux.txt/txt.aux/con/prn/nul/foo.', True)
136 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
134 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
137 >>> _auxencode('.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.', False)
135 >>> _auxencode('.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.', False)
138 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
136 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
139 >>> _auxencode('foo. ', True)
137 >>> _auxencode('foo. ', True)
140 ['foo.~20']
138 ['foo.~20']
141 >>> _auxencode(' .foo', True)
139 >>> _auxencode(' .foo', True)
142 ['~20.foo']
140 ['~20.foo']
143 '''
141 '''
144 res = path.split('/')
142 res = path.split('/')
145 for i, n in enumerate(res):
143 for i, n in enumerate(res):
146 if not n:
144 if not n:
147 continue
145 continue
148 if dotencode and n[0] in '. ':
146 if dotencode and n[0] in '. ':
149 n = "~%02x" % ord(n[0]) + n[1:]
147 n = "~%02x" % ord(n[0]) + n[1:]
150 res[i] = n
148 res[i] = n
151 else:
149 else:
152 l = n.find('.')
150 l = n.find('.')
153 if l == -1:
151 if l == -1:
154 l = len(n)
152 l = len(n)
155 if ((l == 3 and n[:3] in _winres3) or
153 if ((l == 3 and n[:3] in _winres3) or
156 (l == 4 and n[3] <= '9' and n[3] >= '1'
154 (l == 4 and n[3] <= '9' and n[3] >= '1'
157 and n[:3] in _winres4)):
155 and n[:3] in _winres4)):
158 # encode third letter ('aux' -> 'au~78')
156 # encode third letter ('aux' -> 'au~78')
159 ec = "~%02x" % ord(n[2])
157 ec = "~%02x" % ord(n[2])
160 n = n[0:2] + ec + n[3:]
158 n = n[0:2] + ec + n[3:]
161 res[i] = n
159 res[i] = n
162 if n[-1] in '. ':
160 if n[-1] in '. ':
163 # encode last period or space ('foo...' -> 'foo..~2e')
161 # encode last period or space ('foo...' -> 'foo..~2e')
164 res[i] = n[:-1] + "~%02x" % ord(n[-1])
162 res[i] = n[:-1] + "~%02x" % ord(n[-1])
165 return res
163 return res
166
164
167 _maxstorepathlen = 120
165 _maxstorepathlen = 120
168 _dirprefixlen = 8
166 _dirprefixlen = 8
169 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
167 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
170 def _hybridencode(path, auxencode):
168 def _hybridencode(path, auxencode):
171 '''encodes path with a length limit
169 '''encodes path with a length limit
172
170
173 Encodes all paths that begin with 'data/', according to the following.
171 Encodes all paths that begin with 'data/', according to the following.
174
172
175 Default encoding (reversible):
173 Default encoding (reversible):
176
174
177 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
175 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
178 characters are encoded as '~xx', where xx is the two digit hex code
176 characters are encoded as '~xx', where xx is the two digit hex code
179 of the character (see encodefilename).
177 of the character (see encodefilename).
180 Relevant path components consisting of Windows reserved filenames are
178 Relevant path components consisting of Windows reserved filenames are
181 masked by encoding the third character ('aux' -> 'au~78', see auxencode).
179 masked by encoding the third character ('aux' -> 'au~78', see auxencode).
182
180
183 Hashed encoding (not reversible):
181 Hashed encoding (not reversible):
184
182
185 If the default-encoded path is longer than _maxstorepathlen, a
183 If the default-encoded path is longer than _maxstorepathlen, a
186 non-reversible hybrid hashing of the path is done instead.
184 non-reversible hybrid hashing of the path is done instead.
187 This encoding uses up to _dirprefixlen characters of all directory
185 This encoding uses up to _dirprefixlen characters of all directory
188 levels of the lowerencoded path, but not more levels than can fit into
186 levels of the lowerencoded path, but not more levels than can fit into
189 _maxshortdirslen.
187 _maxshortdirslen.
190 Then follows the filler followed by the sha digest of the full path.
188 Then follows the filler followed by the sha digest of the full path.
191 The filler is the beginning of the basename of the lowerencoded path
189 The filler is the beginning of the basename of the lowerencoded path
192 (the basename is everything after the last path separator). The filler
190 (the basename is everything after the last path separator). The filler
193 is as long as possible, filling in characters from the basename until
191 is as long as possible, filling in characters from the basename until
194 the encoded path has _maxstorepathlen characters (or all chars of the
192 the encoded path has _maxstorepathlen characters (or all chars of the
195 basename have been taken).
193 basename have been taken).
196 The extension (e.g. '.i' or '.d') is preserved.
194 The extension (e.g. '.i' or '.d') is preserved.
197
195
198 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
196 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
199 encoding was used.
197 encoding was used.
200 '''
198 '''
201 res = '/'.join(auxencode(encodefilename(path)))
199 res = '/'.join(auxencode(encodefilename(path)))
202 if len(res) > _maxstorepathlen:
200 if len(res) > _maxstorepathlen:
203 path = encodedir(path)
201 path = encodedir(path)
204 digest = _sha(path).hexdigest()
202 digest = _sha(path).hexdigest()
205 parts = auxencode(lowerencode(path))[1:]
203 parts = auxencode(lowerencode(path))[1:]
206 _root, ext = os.path.splitext(parts[-1])
204 _root, ext = os.path.splitext(parts[-1])
207 basename = parts[-1]
205 basename = parts[-1]
208 sdirs = []
206 sdirs = []
209 for p in parts[:-1]:
207 for p in parts[:-1]:
210 d = p[:_dirprefixlen]
208 d = p[:_dirprefixlen]
211 if d[-1] in '. ':
209 if d[-1] in '. ':
212 # Windows can't access dirs ending in period or space
210 # Windows can't access dirs ending in period or space
213 d = d[:-1] + '_'
211 d = d[:-1] + '_'
214 t = '/'.join(sdirs) + '/' + d
212 t = '/'.join(sdirs) + '/' + d
215 if len(t) > _maxshortdirslen:
213 if len(t) > _maxshortdirslen:
216 break
214 break
217 sdirs.append(d)
215 sdirs.append(d)
218 dirs = '/'.join(sdirs)
216 dirs = '/'.join(sdirs)
219 if len(dirs) > 0:
217 if len(dirs) > 0:
220 dirs += '/'
218 dirs += '/'
221 res = 'dh/' + dirs + digest + ext
219 res = 'dh/' + dirs + digest + ext
222 spaceleft = _maxstorepathlen - len(res)
220 spaceleft = _maxstorepathlen - len(res)
223 if spaceleft > 0:
221 if spaceleft > 0:
224 filler = basename[:spaceleft]
222 filler = basename[:spaceleft]
225 res = 'dh/' + dirs + filler + digest + ext
223 res = 'dh/' + dirs + filler + digest + ext
226 return res
224 return res
227
225
228 def _calcmode(path):
226 def _calcmode(path):
229 try:
227 try:
230 # files in .hg/ will be created using this mode
228 # files in .hg/ will be created using this mode
231 mode = os.stat(path).st_mode
229 mode = os.stat(path).st_mode
232 # avoid some useless chmods
230 # avoid some useless chmods
233 if (0777 & ~util.umask) == (0777 & mode):
231 if (0777 & ~util.umask) == (0777 & mode):
234 mode = None
232 mode = None
235 except OSError:
233 except OSError:
236 mode = None
234 mode = None
237 return mode
235 return mode
238
236
239 _data = ('data 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
237 _data = ('data 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
240 ' phaseroots obsstore')
238 ' phaseroots obsstore')
241
239
242 class basicstore(object):
240 class basicstore(object):
243 '''base class for local repository stores'''
241 '''base class for local repository stores'''
244 def __init__(self, path, openertype):
242 def __init__(self, path, openertype):
245 self.path = path
243 self.path = path
246 self.createmode = _calcmode(path)
244 self.createmode = _calcmode(path)
247 op = openertype(self.path)
245 op = openertype(self.path)
248 op.createmode = self.createmode
246 op.createmode = self.createmode
249 self.opener = scmutil.filteropener(op, encodedir)
247 self.opener = scmutil.filteropener(op, encodedir)
250
248
251 def join(self, f):
249 def join(self, f):
252 return self.path + '/' + encodedir(f)
250 return self.path + '/' + encodedir(f)
253
251
254 def _walk(self, relpath, recurse):
252 def _walk(self, relpath, recurse):
255 '''yields (unencoded, encoded, size)'''
253 '''yields (unencoded, encoded, size)'''
256 path = self.path
254 path = self.path
257 if relpath:
255 if relpath:
258 path += '/' + relpath
256 path += '/' + relpath
259 striplen = len(self.path) + 1
257 striplen = len(self.path) + 1
260 l = []
258 l = []
261 if os.path.isdir(path):
259 if os.path.isdir(path):
262 visit = [path]
260 visit = [path]
263 while visit:
261 while visit:
264 p = visit.pop()
262 p = visit.pop()
265 for f, kind, st in osutil.listdir(p, stat=True):
263 for f, kind, st in osutil.listdir(p, stat=True):
266 fp = p + '/' + f
264 fp = p + '/' + f
267 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
265 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
268 n = util.pconvert(fp[striplen:])
266 n = util.pconvert(fp[striplen:])
269 l.append((decodedir(n), n, st.st_size))
267 l.append((decodedir(n), n, st.st_size))
270 elif kind == stat.S_IFDIR and recurse:
268 elif kind == stat.S_IFDIR and recurse:
271 visit.append(fp)
269 visit.append(fp)
272 l.sort()
270 l.sort()
273 return l
271 return l
274
272
275 def datafiles(self):
273 def datafiles(self):
276 return self._walk('data', True)
274 return self._walk('data', True)
277
275
278 def walk(self):
276 def walk(self):
279 '''yields (unencoded, encoded, size)'''
277 '''yields (unencoded, encoded, size)'''
280 # yield data files first
278 # yield data files first
281 for x in self.datafiles():
279 for x in self.datafiles():
282 yield x
280 yield x
283 # yield manifest before changelog
281 # yield manifest before changelog
284 for x in reversed(self._walk('', False)):
282 for x in reversed(self._walk('', False)):
285 yield x
283 yield x
286
284
287 def copylist(self):
285 def copylist(self):
288 return ['requires'] + _data.split()
286 return ['requires'] + _data.split()
289
287
290 def write(self):
288 def write(self):
291 pass
289 pass
292
290
293 class encodedstore(basicstore):
291 class encodedstore(basicstore):
294 def __init__(self, path, openertype):
292 def __init__(self, path, openertype):
295 self.path = path + '/store'
293 self.path = path + '/store'
296 self.createmode = _calcmode(self.path)
294 self.createmode = _calcmode(self.path)
297 op = openertype(self.path)
295 op = openertype(self.path)
298 op.createmode = self.createmode
296 op.createmode = self.createmode
299 self.opener = scmutil.filteropener(op, encodefilename)
297 self.opener = scmutil.filteropener(op, encodefilename)
300
298
301 def datafiles(self):
299 def datafiles(self):
302 for a, b, size in self._walk('data', True):
300 for a, b, size in self._walk('data', True):
303 try:
301 try:
304 a = decodefilename(a)
302 a = decodefilename(a)
305 except KeyError:
303 except KeyError:
306 a = None
304 a = None
307 yield a, b, size
305 yield a, b, size
308
306
309 def join(self, f):
307 def join(self, f):
310 return self.path + '/' + encodefilename(f)
308 return self.path + '/' + encodefilename(f)
311
309
312 def copylist(self):
310 def copylist(self):
313 return (['requires', '00changelog.i'] +
311 return (['requires', '00changelog.i'] +
314 ['store/' + f for f in _data.split()])
312 ['store/' + f for f in _data.split()])
315
313
316 class fncache(object):
314 class fncache(object):
317 # the filename used to be partially encoded
315 # the filename used to be partially encoded
318 # hence the encodedir/decodedir dance
316 # hence the encodedir/decodedir dance
319 def __init__(self, opener):
317 def __init__(self, opener):
320 self.opener = opener
318 self.opener = opener
321 self.entries = None
319 self.entries = None
322 self._dirty = False
320 self._dirty = False
323
321
324 def _load(self):
322 def _load(self):
325 '''fill the entries from the fncache file'''
323 '''fill the entries from the fncache file'''
326 self._dirty = False
324 self._dirty = False
327 try:
325 try:
328 fp = self.opener('fncache', mode='rb')
326 fp = self.opener('fncache', mode='rb')
329 except IOError:
327 except IOError:
330 # skip nonexistent file
328 # skip nonexistent file
331 self.entries = set()
329 self.entries = set()
332 return
330 return
333 self.entries = set(map(decodedir, fp.read().splitlines()))
331 self.entries = set(map(decodedir, fp.read().splitlines()))
334 if '' in self.entries:
332 if '' in self.entries:
335 fp.seek(0)
333 fp.seek(0)
336 for n, line in enumerate(fp):
334 for n, line in enumerate(fp):
337 if not line.rstrip('\n'):
335 if not line.rstrip('\n'):
338 t = _('invalid entry in fncache, line %s') % (n + 1)
336 t = _('invalid entry in fncache, line %s') % (n + 1)
339 raise util.Abort(t)
337 raise util.Abort(t)
340 fp.close()
338 fp.close()
341
339
342 def _write(self, files, atomictemp):
340 def _write(self, files, atomictemp):
343 fp = self.opener('fncache', mode='wb', atomictemp=atomictemp)
341 fp = self.opener('fncache', mode='wb', atomictemp=atomictemp)
344 if files:
342 if files:
345 fp.write('\n'.join(map(encodedir, files)) + '\n')
343 fp.write('\n'.join(map(encodedir, files)) + '\n')
346 fp.close()
344 fp.close()
347 self._dirty = False
345 self._dirty = False
348
346
349 def rewrite(self, files):
347 def rewrite(self, files):
350 self._write(files, False)
348 self._write(files, False)
351 self.entries = set(files)
349 self.entries = set(files)
352
350
353 def write(self):
351 def write(self):
354 if self._dirty:
352 if self._dirty:
355 self._write(self.entries, True)
353 self._write(self.entries, True)
356
354
357 def add(self, fn):
355 def add(self, fn):
358 if self.entries is None:
356 if self.entries is None:
359 self._load()
357 self._load()
360 if fn not in self.entries:
358 if fn not in self.entries:
361 self._dirty = True
359 self._dirty = True
362 self.entries.add(fn)
360 self.entries.add(fn)
363
361
364 def __contains__(self, fn):
362 def __contains__(self, fn):
365 if self.entries is None:
363 if self.entries is None:
366 self._load()
364 self._load()
367 return fn in self.entries
365 return fn in self.entries
368
366
369 def __iter__(self):
367 def __iter__(self):
370 if self.entries is None:
368 if self.entries is None:
371 self._load()
369 self._load()
372 return iter(self.entries)
370 return iter(self.entries)
373
371
374 class _fncacheopener(scmutil.abstractopener):
372 class _fncacheopener(scmutil.abstractopener):
375 def __init__(self, op, fnc, encode):
373 def __init__(self, op, fnc, encode):
376 self.opener = op
374 self.opener = op
377 self.fncache = fnc
375 self.fncache = fnc
378 self.encode = encode
376 self.encode = encode
379
377
380 def _getmustaudit(self):
378 def _getmustaudit(self):
381 return self.opener.mustaudit
379 return self.opener.mustaudit
382
380
383 def _setmustaudit(self, onoff):
381 def _setmustaudit(self, onoff):
384 self.opener.mustaudit = onoff
382 self.opener.mustaudit = onoff
385
383
386 mustaudit = property(_getmustaudit, _setmustaudit)
384 mustaudit = property(_getmustaudit, _setmustaudit)
387
385
388 def __call__(self, path, mode='r', *args, **kw):
386 def __call__(self, path, mode='r', *args, **kw):
389 if mode not in ('r', 'rb') and path.startswith('data/'):
387 if mode not in ('r', 'rb') and path.startswith('data/'):
390 self.fncache.add(path)
388 self.fncache.add(path)
391 return self.opener(self.encode(path), mode, *args, **kw)
389 return self.opener(self.encode(path), mode, *args, **kw)
392
390
393 class fncachestore(basicstore):
391 class fncachestore(basicstore):
394 def __init__(self, path, openertype, encode):
392 def __init__(self, path, openertype, encode):
395 self.encode = encode
393 self.encode = encode
396 self.path = path + '/store'
394 self.path = path + '/store'
397 self.pathsep = self.path + '/'
395 self.pathsep = self.path + '/'
398 self.createmode = _calcmode(self.path)
396 self.createmode = _calcmode(self.path)
399 op = openertype(self.path)
397 op = openertype(self.path)
400 op.createmode = self.createmode
398 op.createmode = self.createmode
401 fnc = fncache(op)
399 fnc = fncache(op)
402 self.fncache = fnc
400 self.fncache = fnc
403 self.opener = _fncacheopener(op, fnc, encode)
401 self.opener = _fncacheopener(op, fnc, encode)
404
402
405 def join(self, f):
403 def join(self, f):
406 return self.pathsep + self.encode(f)
404 return self.pathsep + self.encode(f)
407
405
408 def getsize(self, path):
406 def getsize(self, path):
409 return os.stat(self.pathsep + path).st_size
407 return os.stat(self.pathsep + path).st_size
410
408
411 def datafiles(self):
409 def datafiles(self):
412 rewrite = False
410 rewrite = False
413 existing = []
411 existing = []
414 for f in sorted(self.fncache):
412 for f in sorted(self.fncache):
415 ef = self.encode(f)
413 ef = self.encode(f)
416 try:
414 try:
417 yield f, ef, self.getsize(ef)
415 yield f, ef, self.getsize(ef)
418 existing.append(f)
416 existing.append(f)
419 except OSError, err:
417 except OSError, err:
420 if err.errno != errno.ENOENT:
418 if err.errno != errno.ENOENT:
421 raise
419 raise
422 # nonexistent entry
420 # nonexistent entry
423 rewrite = True
421 rewrite = True
424 if rewrite:
422 if rewrite:
425 # rewrite fncache to remove nonexistent entries
423 # rewrite fncache to remove nonexistent entries
426 # (may be caused by rollback / strip)
424 # (may be caused by rollback / strip)
427 self.fncache.rewrite(existing)
425 self.fncache.rewrite(existing)
428
426
429 def copylist(self):
427 def copylist(self):
430 d = ('data dh fncache phaseroots obsstore'
428 d = ('data dh fncache phaseroots obsstore'
431 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
429 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
432 return (['requires', '00changelog.i'] +
430 return (['requires', '00changelog.i'] +
433 ['store/' + f for f in d.split()])
431 ['store/' + f for f in d.split()])
434
432
435 def write(self):
433 def write(self):
436 self.fncache.write()
434 self.fncache.write()
437
435
438 def store(requirements, path, openertype):
436 def store(requirements, path, openertype):
439 if 'store' in requirements:
437 if 'store' in requirements:
440 if 'fncache' in requirements:
438 if 'fncache' in requirements:
441 auxencode = lambda f: _auxencode(f, 'dotencode' in requirements)
439 auxencode = lambda f: _auxencode(f, 'dotencode' in requirements)
442 encode = lambda f: _hybridencode(f, auxencode)
440 encode = lambda f: _hybridencode(f, auxencode)
443 return fncachestore(path, openertype, encode)
441 return fncachestore(path, openertype, encode)
444 return encodedstore(path, openertype)
442 return encodedstore(path, openertype)
445 return basicstore(path, openertype)
443 return basicstore(path, openertype)
General Comments 0
You need to be logged in to leave comments. Login now