##// END OF EJS Templates
store: add some doctests
Adrian Buehlmann -
r13949:ba43aa1e default
parent child Browse files
Show More
@@ -1,354 +1,421 b''
1 # store.py - repository store handling for Mercurial
1 # store.py - repository store handling for Mercurial
2 #
2 #
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import osutil, util
9 import osutil, util
10 import os, stat
10 import os, stat
11
11
12 _sha = util.sha1
12 _sha = util.sha1
13
13
14 # This avoids a collision between a file named foo and a dir named
14 # This avoids a collision between a file named foo and a dir named
15 # foo.i or foo.d
15 # foo.i or foo.d
16 def encodedir(path):
16 def encodedir(path):
17 '''
18 >>> encodedir('data/foo.i')
19 'data/foo.i'
20 >>> encodedir('data/foo.i/bla.i')
21 'data/foo.i.hg/bla.i'
22 >>> encodedir('data/foo.i.hg/bla.i')
23 'data/foo.i.hg.hg/bla.i'
24 '''
17 if not path.startswith('data/'):
25 if not path.startswith('data/'):
18 return path
26 return path
19 return (path
27 return (path
20 .replace(".hg/", ".hg.hg/")
28 .replace(".hg/", ".hg.hg/")
21 .replace(".i/", ".i.hg/")
29 .replace(".i/", ".i.hg/")
22 .replace(".d/", ".d.hg/"))
30 .replace(".d/", ".d.hg/"))
23
31
24 def decodedir(path):
32 def decodedir(path):
33 '''
34 >>> decodedir('data/foo.i')
35 'data/foo.i'
36 >>> decodedir('data/foo.i.hg/bla.i')
37 'data/foo.i/bla.i'
38 >>> decodedir('data/foo.i.hg.hg/bla.i')
39 'data/foo.i.hg/bla.i'
40 '''
25 if not path.startswith('data/') or ".hg/" not in path:
41 if not path.startswith('data/') or ".hg/" not in path:
26 return path
42 return path
27 return (path
43 return (path
28 .replace(".d.hg/", ".d/")
44 .replace(".d.hg/", ".d/")
29 .replace(".i.hg/", ".i/")
45 .replace(".i.hg/", ".i/")
30 .replace(".hg.hg/", ".hg/"))
46 .replace(".hg.hg/", ".hg/"))
31
47
32 def _buildencodefun():
48 def _buildencodefun():
49 '''
50 >>> enc, dec = _buildencodefun()
51
52 >>> enc('nothing/special.txt')
53 'nothing/special.txt'
54 >>> dec('nothing/special.txt')
55 'nothing/special.txt'
56
57 >>> enc('HELLO')
58 '_h_e_l_l_o'
59 >>> dec('_h_e_l_l_o')
60 'HELLO'
61
62 >>> enc('hello:world?')
63 'hello~3aworld~3f'
64 >>> dec('hello~3aworld~3f')
65 'hello:world?'
66
67 >>> enc('the\x07quick\xADshot')
68 'the~07quick~adshot'
69 >>> dec('the~07quick~adshot')
70 'the\\x07quick\\xadshot'
71 '''
33 e = '_'
72 e = '_'
34 win_reserved = [ord(x) for x in '\\:*?"<>|']
73 win_reserved = [ord(x) for x in '\\:*?"<>|']
35 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
74 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
36 for x in (range(32) + range(126, 256) + win_reserved):
75 for x in (range(32) + range(126, 256) + win_reserved):
37 cmap[chr(x)] = "~%02x" % x
76 cmap[chr(x)] = "~%02x" % x
38 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
77 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
39 cmap[chr(x)] = e + chr(x).lower()
78 cmap[chr(x)] = e + chr(x).lower()
40 dmap = {}
79 dmap = {}
41 for k, v in cmap.iteritems():
80 for k, v in cmap.iteritems():
42 dmap[v] = k
81 dmap[v] = k
43 def decode(s):
82 def decode(s):
44 i = 0
83 i = 0
45 while i < len(s):
84 while i < len(s):
46 for l in xrange(1, 4):
85 for l in xrange(1, 4):
47 try:
86 try:
48 yield dmap[s[i:i + l]]
87 yield dmap[s[i:i + l]]
49 i += l
88 i += l
50 break
89 break
51 except KeyError:
90 except KeyError:
52 pass
91 pass
53 else:
92 else:
54 raise KeyError
93 raise KeyError
55 return (lambda s: "".join([cmap[c] for c in encodedir(s)]),
94 return (lambda s: "".join([cmap[c] for c in encodedir(s)]),
56 lambda s: decodedir("".join(list(decode(s)))))
95 lambda s: decodedir("".join(list(decode(s)))))
57
96
58 encodefilename, decodefilename = _buildencodefun()
97 encodefilename, decodefilename = _buildencodefun()
59
98
60 def _build_lower_encodefun():
99 def _build_lower_encodefun():
100 '''
101 >>> f = _build_lower_encodefun()
102 >>> f('nothing/special.txt')
103 'nothing/special.txt'
104 >>> f('HELLO')
105 'hello'
106 >>> f('hello:world?')
107 'hello~3aworld~3f'
108 >>> f('the\x07quick\xADshot')
109 'the~07quick~adshot'
110 '''
61 win_reserved = [ord(x) for x in '\\:*?"<>|']
111 win_reserved = [ord(x) for x in '\\:*?"<>|']
62 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
112 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
63 for x in (range(32) + range(126, 256) + win_reserved):
113 for x in (range(32) + range(126, 256) + win_reserved):
64 cmap[chr(x)] = "~%02x" % x
114 cmap[chr(x)] = "~%02x" % x
65 for x in range(ord("A"), ord("Z")+1):
115 for x in range(ord("A"), ord("Z")+1):
66 cmap[chr(x)] = chr(x).lower()
116 cmap[chr(x)] = chr(x).lower()
67 return lambda s: "".join([cmap[c] for c in s])
117 return lambda s: "".join([cmap[c] for c in s])
68
118
69 lowerencode = _build_lower_encodefun()
119 lowerencode = _build_lower_encodefun()
70
120
71 _windows_reserved_filenames = '''con prn aux nul
121 _windows_reserved_filenames = '''con prn aux nul
72 com1 com2 com3 com4 com5 com6 com7 com8 com9
122 com1 com2 com3 com4 com5 com6 com7 com8 com9
73 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
123 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
74 def _auxencode(path, dotencode):
124 def _auxencode(path, dotencode):
125 '''
126 Encodes filenames containing names reserved by Windows or which end in
127 period or space. Does not touch other single reserved characters c.
128 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
129 Additionally encodes space or period at the beginning, if dotencode is
130 True.
131 path is assumed to be all lowercase.
132
133 >>> _auxencode('.foo/aux.txt/txt.aux/con/prn/nul/foo.', True)
134 '~2efoo/au~78.txt/txt.aux/co~6e/pr~6e/nu~6c/foo~2e'
135 >>> _auxencode('.com1com2/lpt9.lpt4.lpt1/conprn/foo.', False)
136 '.com1com2/lp~749.lpt4.lpt1/conprn/foo~2e'
137 >>> _auxencode('foo. ', True)
138 'foo.~20'
139 >>> _auxencode(' .foo', True)
140 '~20.foo'
141 '''
75 res = []
142 res = []
76 for n in path.split('/'):
143 for n in path.split('/'):
77 if n:
144 if n:
78 base = n.split('.')[0]
145 base = n.split('.')[0]
79 if base and (base in _windows_reserved_filenames):
146 if base and (base in _windows_reserved_filenames):
80 # encode third letter ('aux' -> 'au~78')
147 # encode third letter ('aux' -> 'au~78')
81 ec = "~%02x" % ord(n[2])
148 ec = "~%02x" % ord(n[2])
82 n = n[0:2] + ec + n[3:]
149 n = n[0:2] + ec + n[3:]
83 if n[-1] in '. ':
150 if n[-1] in '. ':
84 # encode last period or space ('foo...' -> 'foo..~2e')
151 # encode last period or space ('foo...' -> 'foo..~2e')
85 n = n[:-1] + "~%02x" % ord(n[-1])
152 n = n[:-1] + "~%02x" % ord(n[-1])
86 if dotencode and n[0] in '. ':
153 if dotencode and n[0] in '. ':
87 n = "~%02x" % ord(n[0]) + n[1:]
154 n = "~%02x" % ord(n[0]) + n[1:]
88 res.append(n)
155 res.append(n)
89 return '/'.join(res)
156 return '/'.join(res)
90
157
91 MAX_PATH_LEN_IN_HGSTORE = 120
158 MAX_PATH_LEN_IN_HGSTORE = 120
92 DIR_PREFIX_LEN = 8
159 DIR_PREFIX_LEN = 8
93 _MAX_SHORTENED_DIRS_LEN = 8 * (DIR_PREFIX_LEN + 1) - 4
160 _MAX_SHORTENED_DIRS_LEN = 8 * (DIR_PREFIX_LEN + 1) - 4
94 def _hybridencode(path, auxencode):
161 def _hybridencode(path, auxencode):
95 '''encodes path with a length limit
162 '''encodes path with a length limit
96
163
97 Encodes all paths that begin with 'data/', according to the following.
164 Encodes all paths that begin with 'data/', according to the following.
98
165
99 Default encoding (reversible):
166 Default encoding (reversible):
100
167
101 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
168 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
102 characters are encoded as '~xx', where xx is the two digit hex code
169 characters are encoded as '~xx', where xx is the two digit hex code
103 of the character (see encodefilename).
170 of the character (see encodefilename).
104 Relevant path components consisting of Windows reserved filenames are
171 Relevant path components consisting of Windows reserved filenames are
105 masked by encoding the third character ('aux' -> 'au~78', see auxencode).
172 masked by encoding the third character ('aux' -> 'au~78', see auxencode).
106
173
107 Hashed encoding (not reversible):
174 Hashed encoding (not reversible):
108
175
109 If the default-encoded path is longer than MAX_PATH_LEN_IN_HGSTORE, a
176 If the default-encoded path is longer than MAX_PATH_LEN_IN_HGSTORE, a
110 non-reversible hybrid hashing of the path is done instead.
177 non-reversible hybrid hashing of the path is done instead.
111 This encoding uses up to DIR_PREFIX_LEN characters of all directory
178 This encoding uses up to DIR_PREFIX_LEN characters of all directory
112 levels of the lowerencoded path, but not more levels than can fit into
179 levels of the lowerencoded path, but not more levels than can fit into
113 _MAX_SHORTENED_DIRS_LEN.
180 _MAX_SHORTENED_DIRS_LEN.
114 Then follows the filler followed by the sha digest of the full path.
181 Then follows the filler followed by the sha digest of the full path.
115 The filler is the beginning of the basename of the lowerencoded path
182 The filler is the beginning of the basename of the lowerencoded path
116 (the basename is everything after the last path separator). The filler
183 (the basename is everything after the last path separator). The filler
117 is as long as possible, filling in characters from the basename until
184 is as long as possible, filling in characters from the basename until
118 the encoded path has MAX_PATH_LEN_IN_HGSTORE characters (or all chars
185 the encoded path has MAX_PATH_LEN_IN_HGSTORE characters (or all chars
119 of the basename have been taken).
186 of the basename have been taken).
120 The extension (e.g. '.i' or '.d') is preserved.
187 The extension (e.g. '.i' or '.d') is preserved.
121
188
122 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
189 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
123 encoding was used.
190 encoding was used.
124 '''
191 '''
125 if not path.startswith('data/'):
192 if not path.startswith('data/'):
126 return path
193 return path
127 # escape directories ending with .i and .d
194 # escape directories ending with .i and .d
128 path = encodedir(path)
195 path = encodedir(path)
129 ndpath = path[len('data/'):]
196 ndpath = path[len('data/'):]
130 res = 'data/' + auxencode(encodefilename(ndpath))
197 res = 'data/' + auxencode(encodefilename(ndpath))
131 if len(res) > MAX_PATH_LEN_IN_HGSTORE:
198 if len(res) > MAX_PATH_LEN_IN_HGSTORE:
132 digest = _sha(path).hexdigest()
199 digest = _sha(path).hexdigest()
133 aep = auxencode(lowerencode(ndpath))
200 aep = auxencode(lowerencode(ndpath))
134 _root, ext = os.path.splitext(aep)
201 _root, ext = os.path.splitext(aep)
135 parts = aep.split('/')
202 parts = aep.split('/')
136 basename = parts[-1]
203 basename = parts[-1]
137 sdirs = []
204 sdirs = []
138 for p in parts[:-1]:
205 for p in parts[:-1]:
139 d = p[:DIR_PREFIX_LEN]
206 d = p[:DIR_PREFIX_LEN]
140 if d[-1] in '. ':
207 if d[-1] in '. ':
141 # Windows can't access dirs ending in period or space
208 # Windows can't access dirs ending in period or space
142 d = d[:-1] + '_'
209 d = d[:-1] + '_'
143 t = '/'.join(sdirs) + '/' + d
210 t = '/'.join(sdirs) + '/' + d
144 if len(t) > _MAX_SHORTENED_DIRS_LEN:
211 if len(t) > _MAX_SHORTENED_DIRS_LEN:
145 break
212 break
146 sdirs.append(d)
213 sdirs.append(d)
147 dirs = '/'.join(sdirs)
214 dirs = '/'.join(sdirs)
148 if len(dirs) > 0:
215 if len(dirs) > 0:
149 dirs += '/'
216 dirs += '/'
150 res = 'dh/' + dirs + digest + ext
217 res = 'dh/' + dirs + digest + ext
151 space_left = MAX_PATH_LEN_IN_HGSTORE - len(res)
218 space_left = MAX_PATH_LEN_IN_HGSTORE - len(res)
152 if space_left > 0:
219 if space_left > 0:
153 filler = basename[:space_left]
220 filler = basename[:space_left]
154 res = 'dh/' + dirs + filler + digest + ext
221 res = 'dh/' + dirs + filler + digest + ext
155 return res
222 return res
156
223
157 def _calcmode(path):
224 def _calcmode(path):
158 try:
225 try:
159 # files in .hg/ will be created using this mode
226 # files in .hg/ will be created using this mode
160 mode = os.stat(path).st_mode
227 mode = os.stat(path).st_mode
161 # avoid some useless chmods
228 # avoid some useless chmods
162 if (0777 & ~util.umask) == (0777 & mode):
229 if (0777 & ~util.umask) == (0777 & mode):
163 mode = None
230 mode = None
164 except OSError:
231 except OSError:
165 mode = None
232 mode = None
166 return mode
233 return mode
167
234
168 _data = 'data 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
235 _data = 'data 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
169
236
170 class basicstore(object):
237 class basicstore(object):
171 '''base class for local repository stores'''
238 '''base class for local repository stores'''
172 def __init__(self, path, opener):
239 def __init__(self, path, opener):
173 self.path = path
240 self.path = path
174 self.createmode = _calcmode(path)
241 self.createmode = _calcmode(path)
175 op = opener(self.path)
242 op = opener(self.path)
176 op.createmode = self.createmode
243 op.createmode = self.createmode
177 self.opener = lambda f, *args, **kw: op(encodedir(f), *args, **kw)
244 self.opener = lambda f, *args, **kw: op(encodedir(f), *args, **kw)
178
245
179 def join(self, f):
246 def join(self, f):
180 return self.path + '/' + encodedir(f)
247 return self.path + '/' + encodedir(f)
181
248
182 def _walk(self, relpath, recurse):
249 def _walk(self, relpath, recurse):
183 '''yields (unencoded, encoded, size)'''
250 '''yields (unencoded, encoded, size)'''
184 path = self.path
251 path = self.path
185 if relpath:
252 if relpath:
186 path += '/' + relpath
253 path += '/' + relpath
187 striplen = len(self.path) + 1
254 striplen = len(self.path) + 1
188 l = []
255 l = []
189 if os.path.isdir(path):
256 if os.path.isdir(path):
190 visit = [path]
257 visit = [path]
191 while visit:
258 while visit:
192 p = visit.pop()
259 p = visit.pop()
193 for f, kind, st in osutil.listdir(p, stat=True):
260 for f, kind, st in osutil.listdir(p, stat=True):
194 fp = p + '/' + f
261 fp = p + '/' + f
195 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
262 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
196 n = util.pconvert(fp[striplen:])
263 n = util.pconvert(fp[striplen:])
197 l.append((decodedir(n), n, st.st_size))
264 l.append((decodedir(n), n, st.st_size))
198 elif kind == stat.S_IFDIR and recurse:
265 elif kind == stat.S_IFDIR and recurse:
199 visit.append(fp)
266 visit.append(fp)
200 return sorted(l)
267 return sorted(l)
201
268
202 def datafiles(self):
269 def datafiles(self):
203 return self._walk('data', True)
270 return self._walk('data', True)
204
271
205 def walk(self):
272 def walk(self):
206 '''yields (unencoded, encoded, size)'''
273 '''yields (unencoded, encoded, size)'''
207 # yield data files first
274 # yield data files first
208 for x in self.datafiles():
275 for x in self.datafiles():
209 yield x
276 yield x
210 # yield manifest before changelog
277 # yield manifest before changelog
211 for x in reversed(self._walk('', False)):
278 for x in reversed(self._walk('', False)):
212 yield x
279 yield x
213
280
214 def copylist(self):
281 def copylist(self):
215 return ['requires'] + _data.split()
282 return ['requires'] + _data.split()
216
283
217 def write(self):
284 def write(self):
218 pass
285 pass
219
286
220 class encodedstore(basicstore):
287 class encodedstore(basicstore):
221 def __init__(self, path, opener):
288 def __init__(self, path, opener):
222 self.path = path + '/store'
289 self.path = path + '/store'
223 self.createmode = _calcmode(self.path)
290 self.createmode = _calcmode(self.path)
224 op = opener(self.path)
291 op = opener(self.path)
225 op.createmode = self.createmode
292 op.createmode = self.createmode
226 self.opener = lambda f, *args, **kw: op(encodefilename(f), *args, **kw)
293 self.opener = lambda f, *args, **kw: op(encodefilename(f), *args, **kw)
227
294
228 def datafiles(self):
295 def datafiles(self):
229 for a, b, size in self._walk('data', True):
296 for a, b, size in self._walk('data', True):
230 try:
297 try:
231 a = decodefilename(a)
298 a = decodefilename(a)
232 except KeyError:
299 except KeyError:
233 a = None
300 a = None
234 yield a, b, size
301 yield a, b, size
235
302
236 def join(self, f):
303 def join(self, f):
237 return self.path + '/' + encodefilename(f)
304 return self.path + '/' + encodefilename(f)
238
305
239 def copylist(self):
306 def copylist(self):
240 return (['requires', '00changelog.i'] +
307 return (['requires', '00changelog.i'] +
241 ['store/' + f for f in _data.split()])
308 ['store/' + f for f in _data.split()])
242
309
243 class fncache(object):
310 class fncache(object):
244 # the filename used to be partially encoded
311 # the filename used to be partially encoded
245 # hence the encodedir/decodedir dance
312 # hence the encodedir/decodedir dance
246 def __init__(self, opener):
313 def __init__(self, opener):
247 self.opener = opener
314 self.opener = opener
248 self.entries = None
315 self.entries = None
249 self._dirty = False
316 self._dirty = False
250
317
251 def _load(self):
318 def _load(self):
252 '''fill the entries from the fncache file'''
319 '''fill the entries from the fncache file'''
253 self.entries = set()
320 self.entries = set()
254 self._dirty = False
321 self._dirty = False
255 try:
322 try:
256 fp = self.opener('fncache', mode='rb')
323 fp = self.opener('fncache', mode='rb')
257 except IOError:
324 except IOError:
258 # skip nonexistent file
325 # skip nonexistent file
259 return
326 return
260 for n, line in enumerate(fp):
327 for n, line in enumerate(fp):
261 if (len(line) < 2) or (line[-1] != '\n'):
328 if (len(line) < 2) or (line[-1] != '\n'):
262 t = _('invalid entry in fncache, line %s') % (n + 1)
329 t = _('invalid entry in fncache, line %s') % (n + 1)
263 raise util.Abort(t)
330 raise util.Abort(t)
264 self.entries.add(decodedir(line[:-1]))
331 self.entries.add(decodedir(line[:-1]))
265 fp.close()
332 fp.close()
266
333
267 def rewrite(self, files):
334 def rewrite(self, files):
268 fp = self.opener('fncache', mode='wb')
335 fp = self.opener('fncache', mode='wb')
269 for p in files:
336 for p in files:
270 fp.write(encodedir(p) + '\n')
337 fp.write(encodedir(p) + '\n')
271 fp.close()
338 fp.close()
272 self.entries = set(files)
339 self.entries = set(files)
273 self._dirty = False
340 self._dirty = False
274
341
275 def write(self):
342 def write(self):
276 if not self._dirty:
343 if not self._dirty:
277 return
344 return
278 fp = self.opener('fncache', mode='wb', atomictemp=True)
345 fp = self.opener('fncache', mode='wb', atomictemp=True)
279 for p in self.entries:
346 for p in self.entries:
280 fp.write(encodedir(p) + '\n')
347 fp.write(encodedir(p) + '\n')
281 fp.rename()
348 fp.rename()
282 self._dirty = False
349 self._dirty = False
283
350
284 def add(self, fn):
351 def add(self, fn):
285 if self.entries is None:
352 if self.entries is None:
286 self._load()
353 self._load()
287 if fn not in self.entries:
354 if fn not in self.entries:
288 self._dirty = True
355 self._dirty = True
289 self.entries.add(fn)
356 self.entries.add(fn)
290
357
291 def __contains__(self, fn):
358 def __contains__(self, fn):
292 if self.entries is None:
359 if self.entries is None:
293 self._load()
360 self._load()
294 return fn in self.entries
361 return fn in self.entries
295
362
296 def __iter__(self):
363 def __iter__(self):
297 if self.entries is None:
364 if self.entries is None:
298 self._load()
365 self._load()
299 return iter(self.entries)
366 return iter(self.entries)
300
367
301 class fncachestore(basicstore):
368 class fncachestore(basicstore):
302 def __init__(self, path, opener, encode):
369 def __init__(self, path, opener, encode):
303 self.encode = encode
370 self.encode = encode
304 self.path = path + '/store'
371 self.path = path + '/store'
305 self.createmode = _calcmode(self.path)
372 self.createmode = _calcmode(self.path)
306 op = opener(self.path)
373 op = opener(self.path)
307 op.createmode = self.createmode
374 op.createmode = self.createmode
308 fnc = fncache(op)
375 fnc = fncache(op)
309 self.fncache = fnc
376 self.fncache = fnc
310
377
311 def fncacheopener(path, mode='r', *args, **kw):
378 def fncacheopener(path, mode='r', *args, **kw):
312 if mode not in ('r', 'rb') and path.startswith('data/'):
379 if mode not in ('r', 'rb') and path.startswith('data/'):
313 fnc.add(path)
380 fnc.add(path)
314 return op(self.encode(path), mode, *args, **kw)
381 return op(self.encode(path), mode, *args, **kw)
315 self.opener = fncacheopener
382 self.opener = fncacheopener
316
383
317 def join(self, f):
384 def join(self, f):
318 return self.path + '/' + self.encode(f)
385 return self.path + '/' + self.encode(f)
319
386
320 def datafiles(self):
387 def datafiles(self):
321 rewrite = False
388 rewrite = False
322 existing = []
389 existing = []
323 spath = self.path
390 spath = self.path
324 for f in self.fncache:
391 for f in self.fncache:
325 ef = self.encode(f)
392 ef = self.encode(f)
326 try:
393 try:
327 st = os.stat(spath + '/' + ef)
394 st = os.stat(spath + '/' + ef)
328 yield f, ef, st.st_size
395 yield f, ef, st.st_size
329 existing.append(f)
396 existing.append(f)
330 except OSError:
397 except OSError:
331 # nonexistent entry
398 # nonexistent entry
332 rewrite = True
399 rewrite = True
333 if rewrite:
400 if rewrite:
334 # rewrite fncache to remove nonexistent entries
401 # rewrite fncache to remove nonexistent entries
335 # (may be caused by rollback / strip)
402 # (may be caused by rollback / strip)
336 self.fncache.rewrite(existing)
403 self.fncache.rewrite(existing)
337
404
338 def copylist(self):
405 def copylist(self):
339 d = ('data dh fncache'
406 d = ('data dh fncache'
340 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
407 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
341 return (['requires', '00changelog.i'] +
408 return (['requires', '00changelog.i'] +
342 ['store/' + f for f in d.split()])
409 ['store/' + f for f in d.split()])
343
410
344 def write(self):
411 def write(self):
345 self.fncache.write()
412 self.fncache.write()
346
413
347 def store(requirements, path, opener):
414 def store(requirements, path, opener):
348 if 'store' in requirements:
415 if 'store' in requirements:
349 if 'fncache' in requirements:
416 if 'fncache' in requirements:
350 auxencode = lambda f: _auxencode(f, 'dotencode' in requirements)
417 auxencode = lambda f: _auxencode(f, 'dotencode' in requirements)
351 encode = lambda f: _hybridencode(f, auxencode)
418 encode = lambda f: _hybridencode(f, auxencode)
352 return fncachestore(path, opener, encode)
419 return fncachestore(path, opener, encode)
353 return encodedstore(path, opener)
420 return encodedstore(path, opener)
354 return basicstore(path, opener)
421 return basicstore(path, opener)
@@ -1,29 +1,32 b''
1 # this is hack to make sure no escape characters are inserted into the output
1 # this is hack to make sure no escape characters are inserted into the output
2 import os
2 import os
3 if 'TERM' in os.environ:
3 if 'TERM' in os.environ:
4 del os.environ['TERM']
4 del os.environ['TERM']
5 import doctest
5 import doctest
6
6
7 import mercurial.changelog
7 import mercurial.changelog
8 doctest.testmod(mercurial.changelog)
8 doctest.testmod(mercurial.changelog)
9
9
10 import mercurial.dagparser
10 import mercurial.dagparser
11 doctest.testmod(mercurial.dagparser, optionflags=doctest.NORMALIZE_WHITESPACE)
11 doctest.testmod(mercurial.dagparser, optionflags=doctest.NORMALIZE_WHITESPACE)
12
12
13 import mercurial.match
13 import mercurial.match
14 doctest.testmod(mercurial.match)
14 doctest.testmod(mercurial.match)
15
15
16 import mercurial.store
17 doctest.testmod(mercurial.store)
18
16 import mercurial.url
19 import mercurial.url
17 doctest.testmod(mercurial.url)
20 doctest.testmod(mercurial.url)
18
21
19 import mercurial.util
22 import mercurial.util
20 doctest.testmod(mercurial.util)
23 doctest.testmod(mercurial.util)
21
24
22 import mercurial.encoding
25 import mercurial.encoding
23 doctest.testmod(mercurial.encoding)
26 doctest.testmod(mercurial.encoding)
24
27
25 import mercurial.hgweb.hgwebdir_mod
28 import mercurial.hgweb.hgwebdir_mod
26 doctest.testmod(mercurial.hgweb.hgwebdir_mod)
29 doctest.testmod(mercurial.hgweb.hgwebdir_mod)
27
30
28 import hgext.convert.cvsps
31 import hgext.convert.cvsps
29 doctest.testmod(hgext.convert.cvsps)
32 doctest.testmod(hgext.convert.cvsps)
General Comments 0
You need to be logged in to leave comments. Login now