##// END OF EJS Templates
store: remove pointless pathjoiner parameter...
Adrian Buehlmann -
r13426:643b8212 default
parent child Browse files
Show More
@@ -1,147 +1,145 b''
1 # statichttprepo.py - simple http repository class for mercurial
1 # statichttprepo.py - simple http repository class for mercurial
2 #
2 #
3 # This provides read-only repo access to repositories exported via static http
3 # This provides read-only repo access to repositories exported via static http
4 #
4 #
5 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 from i18n import _
10 from i18n import _
11 import changelog, byterange, url, error
11 import changelog, byterange, url, error
12 import localrepo, manifest, util, store
12 import localrepo, manifest, util, store
13 import urllib, urllib2, errno
13 import urllib, urllib2, errno
14
14
15 class httprangereader(object):
15 class httprangereader(object):
16 def __init__(self, url, opener):
16 def __init__(self, url, opener):
17 # we assume opener has HTTPRangeHandler
17 # we assume opener has HTTPRangeHandler
18 self.url = url
18 self.url = url
19 self.pos = 0
19 self.pos = 0
20 self.opener = opener
20 self.opener = opener
21 self.name = url
21 self.name = url
22 def seek(self, pos):
22 def seek(self, pos):
23 self.pos = pos
23 self.pos = pos
24 def read(self, bytes=None):
24 def read(self, bytes=None):
25 req = urllib2.Request(self.url)
25 req = urllib2.Request(self.url)
26 end = ''
26 end = ''
27 if bytes:
27 if bytes:
28 end = self.pos + bytes - 1
28 end = self.pos + bytes - 1
29 req.add_header('Range', 'bytes=%d-%s' % (self.pos, end))
29 req.add_header('Range', 'bytes=%d-%s' % (self.pos, end))
30
30
31 try:
31 try:
32 f = self.opener.open(req)
32 f = self.opener.open(req)
33 data = f.read()
33 data = f.read()
34 if hasattr(f, 'getcode'):
34 if hasattr(f, 'getcode'):
35 # python 2.6+
35 # python 2.6+
36 code = f.getcode()
36 code = f.getcode()
37 elif hasattr(f, 'code'):
37 elif hasattr(f, 'code'):
38 # undocumented attribute, seems to be set in 2.4 and 2.5
38 # undocumented attribute, seems to be set in 2.4 and 2.5
39 code = f.code
39 code = f.code
40 else:
40 else:
41 # Don't know how to check, hope for the best.
41 # Don't know how to check, hope for the best.
42 code = 206
42 code = 206
43 except urllib2.HTTPError, inst:
43 except urllib2.HTTPError, inst:
44 num = inst.code == 404 and errno.ENOENT or None
44 num = inst.code == 404 and errno.ENOENT or None
45 raise IOError(num, inst)
45 raise IOError(num, inst)
46 except urllib2.URLError, inst:
46 except urllib2.URLError, inst:
47 raise IOError(None, inst.reason[1])
47 raise IOError(None, inst.reason[1])
48
48
49 if code == 200:
49 if code == 200:
50 # HTTPRangeHandler does nothing if remote does not support
50 # HTTPRangeHandler does nothing if remote does not support
51 # Range headers and returns the full entity. Let's slice it.
51 # Range headers and returns the full entity. Let's slice it.
52 if bytes:
52 if bytes:
53 data = data[self.pos:self.pos + bytes]
53 data = data[self.pos:self.pos + bytes]
54 else:
54 else:
55 data = data[self.pos:]
55 data = data[self.pos:]
56 elif bytes:
56 elif bytes:
57 data = data[:bytes]
57 data = data[:bytes]
58 self.pos += len(data)
58 self.pos += len(data)
59 return data
59 return data
60 def __iter__(self):
60 def __iter__(self):
61 return iter(self.read().splitlines(1))
61 return iter(self.read().splitlines(1))
62 def close(self):
62 def close(self):
63 pass
63 pass
64
64
65 def build_opener(ui, authinfo):
65 def build_opener(ui, authinfo):
66 # urllib cannot handle URLs with embedded user or passwd
66 # urllib cannot handle URLs with embedded user or passwd
67 urlopener = url.opener(ui, authinfo)
67 urlopener = url.opener(ui, authinfo)
68 urlopener.add_handler(byterange.HTTPRangeHandler())
68 urlopener.add_handler(byterange.HTTPRangeHandler())
69
69
70 def opener(base):
70 def opener(base):
71 """return a function that opens files over http"""
71 """return a function that opens files over http"""
72 p = base
72 p = base
73 def o(path, mode="r", atomictemp=None):
73 def o(path, mode="r", atomictemp=None):
74 if 'a' in mode or 'w' in mode:
74 if 'a' in mode or 'w' in mode:
75 raise IOError('Permission denied')
75 raise IOError('Permission denied')
76 f = "/".join((p, urllib.quote(path)))
76 f = "/".join((p, urllib.quote(path)))
77 return httprangereader(f, urlopener)
77 return httprangereader(f, urlopener)
78 return o
78 return o
79
79
80 return opener
80 return opener
81
81
82 class statichttprepository(localrepo.localrepository):
82 class statichttprepository(localrepo.localrepository):
83 def __init__(self, ui, path):
83 def __init__(self, ui, path):
84 self._url = path
84 self._url = path
85 self.ui = ui
85 self.ui = ui
86
86
87 self.root = path
87 self.root = path
88 self.path, authinfo = url.getauthinfo(path.rstrip('/') + "/.hg")
88 self.path, authinfo = url.getauthinfo(path.rstrip('/') + "/.hg")
89
89
90 opener = build_opener(ui, authinfo)
90 opener = build_opener(ui, authinfo)
91 self.opener = opener(self.path)
91 self.opener = opener(self.path)
92
92
93 # find requirements
93 # find requirements
94 try:
94 try:
95 requirements = self.opener("requires").read().splitlines()
95 requirements = self.opener("requires").read().splitlines()
96 except IOError, inst:
96 except IOError, inst:
97 if inst.errno != errno.ENOENT:
97 if inst.errno != errno.ENOENT:
98 raise
98 raise
99 # check if it is a non-empty old-style repository
99 # check if it is a non-empty old-style repository
100 try:
100 try:
101 fp = self.opener("00changelog.i")
101 fp = self.opener("00changelog.i")
102 fp.read(1)
102 fp.read(1)
103 fp.close()
103 fp.close()
104 except IOError, inst:
104 except IOError, inst:
105 if inst.errno != errno.ENOENT:
105 if inst.errno != errno.ENOENT:
106 raise
106 raise
107 # we do not care about empty old-style repositories here
107 # we do not care about empty old-style repositories here
108 msg = _("'%s' does not appear to be an hg repository") % path
108 msg = _("'%s' does not appear to be an hg repository") % path
109 raise error.RepoError(msg)
109 raise error.RepoError(msg)
110 requirements = []
110 requirements = []
111
111
112 # check them
112 # check them
113 for r in requirements:
113 for r in requirements:
114 if r not in self.supported:
114 if r not in self.supported:
115 raise error.RepoError(_("requirement '%s' not supported") % r)
115 raise error.RepoError(_("requirement '%s' not supported") % r)
116
116
117 # setup store
117 # setup store
118 def pjoin(a, b):
118 self.store = store.store(requirements, self.path, opener)
119 return a + '/' + b
120 self.store = store.store(requirements, self.path, opener, pjoin)
121 self.spath = self.store.path
119 self.spath = self.store.path
122 self.sopener = self.store.opener
120 self.sopener = self.store.opener
123 self.sjoin = self.store.join
121 self.sjoin = self.store.join
124
122
125 self.manifest = manifest.manifest(self.sopener)
123 self.manifest = manifest.manifest(self.sopener)
126 self.changelog = changelog.changelog(self.sopener)
124 self.changelog = changelog.changelog(self.sopener)
127 self._tags = None
125 self._tags = None
128 self.nodetagscache = None
126 self.nodetagscache = None
129 self._branchcache = None
127 self._branchcache = None
130 self._branchcachetip = None
128 self._branchcachetip = None
131 self.encodepats = None
129 self.encodepats = None
132 self.decodepats = None
130 self.decodepats = None
133 self.capabilities.remove("pushkey")
131 self.capabilities.remove("pushkey")
134
132
135 def url(self):
133 def url(self):
136 return self._url
134 return self._url
137
135
138 def local(self):
136 def local(self):
139 return False
137 return False
140
138
141 def lock(self, wait=True):
139 def lock(self, wait=True):
142 raise util.Abort(_('cannot lock static-http repository'))
140 raise util.Abort(_('cannot lock static-http repository'))
143
141
144 def instance(ui, path, create):
142 def instance(ui, path, create):
145 if create:
143 if create:
146 raise util.Abort(_('cannot create new static-http repository'))
144 raise util.Abort(_('cannot create new static-http repository'))
147 return statichttprepository(ui, path[7:])
145 return statichttprepository(ui, path[7:])
@@ -1,357 +1,354 b''
1 # store.py - repository store handling for Mercurial
1 # store.py - repository store handling for Mercurial
2 #
2 #
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import osutil, util
9 import osutil, util
10 import os, stat
10 import os, stat
11
11
12 _sha = util.sha1
12 _sha = util.sha1
13
13
14 # This avoids a collision between a file named foo and a dir named
14 # This avoids a collision between a file named foo and a dir named
15 # foo.i or foo.d
15 # foo.i or foo.d
16 def encodedir(path):
16 def encodedir(path):
17 if not path.startswith('data/'):
17 if not path.startswith('data/'):
18 return path
18 return path
19 return (path
19 return (path
20 .replace(".hg/", ".hg.hg/")
20 .replace(".hg/", ".hg.hg/")
21 .replace(".i/", ".i.hg/")
21 .replace(".i/", ".i.hg/")
22 .replace(".d/", ".d.hg/"))
22 .replace(".d/", ".d.hg/"))
23
23
24 def decodedir(path):
24 def decodedir(path):
25 if not path.startswith('data/') or ".hg/" not in path:
25 if not path.startswith('data/') or ".hg/" not in path:
26 return path
26 return path
27 return (path
27 return (path
28 .replace(".d.hg/", ".d/")
28 .replace(".d.hg/", ".d/")
29 .replace(".i.hg/", ".i/")
29 .replace(".i.hg/", ".i/")
30 .replace(".hg.hg/", ".hg/"))
30 .replace(".hg.hg/", ".hg/"))
31
31
32 def _buildencodefun():
32 def _buildencodefun():
33 e = '_'
33 e = '_'
34 win_reserved = [ord(x) for x in '\\:*?"<>|']
34 win_reserved = [ord(x) for x in '\\:*?"<>|']
35 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
35 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
36 for x in (range(32) + range(126, 256) + win_reserved):
36 for x in (range(32) + range(126, 256) + win_reserved):
37 cmap[chr(x)] = "~%02x" % x
37 cmap[chr(x)] = "~%02x" % x
38 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
38 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
39 cmap[chr(x)] = e + chr(x).lower()
39 cmap[chr(x)] = e + chr(x).lower()
40 dmap = {}
40 dmap = {}
41 for k, v in cmap.iteritems():
41 for k, v in cmap.iteritems():
42 dmap[v] = k
42 dmap[v] = k
43 def decode(s):
43 def decode(s):
44 i = 0
44 i = 0
45 while i < len(s):
45 while i < len(s):
46 for l in xrange(1, 4):
46 for l in xrange(1, 4):
47 try:
47 try:
48 yield dmap[s[i:i + l]]
48 yield dmap[s[i:i + l]]
49 i += l
49 i += l
50 break
50 break
51 except KeyError:
51 except KeyError:
52 pass
52 pass
53 else:
53 else:
54 raise KeyError
54 raise KeyError
55 return (lambda s: "".join([cmap[c] for c in encodedir(s)]),
55 return (lambda s: "".join([cmap[c] for c in encodedir(s)]),
56 lambda s: decodedir("".join(list(decode(s)))))
56 lambda s: decodedir("".join(list(decode(s)))))
57
57
58 encodefilename, decodefilename = _buildencodefun()
58 encodefilename, decodefilename = _buildencodefun()
59
59
60 def _build_lower_encodefun():
60 def _build_lower_encodefun():
61 win_reserved = [ord(x) for x in '\\:*?"<>|']
61 win_reserved = [ord(x) for x in '\\:*?"<>|']
62 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
62 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
63 for x in (range(32) + range(126, 256) + win_reserved):
63 for x in (range(32) + range(126, 256) + win_reserved):
64 cmap[chr(x)] = "~%02x" % x
64 cmap[chr(x)] = "~%02x" % x
65 for x in range(ord("A"), ord("Z")+1):
65 for x in range(ord("A"), ord("Z")+1):
66 cmap[chr(x)] = chr(x).lower()
66 cmap[chr(x)] = chr(x).lower()
67 return lambda s: "".join([cmap[c] for c in s])
67 return lambda s: "".join([cmap[c] for c in s])
68
68
69 lowerencode = _build_lower_encodefun()
69 lowerencode = _build_lower_encodefun()
70
70
71 _windows_reserved_filenames = '''con prn aux nul
71 _windows_reserved_filenames = '''con prn aux nul
72 com1 com2 com3 com4 com5 com6 com7 com8 com9
72 com1 com2 com3 com4 com5 com6 com7 com8 com9
73 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
73 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
74 def _auxencode(path, dotencode):
74 def _auxencode(path, dotencode):
75 res = []
75 res = []
76 for n in path.split('/'):
76 for n in path.split('/'):
77 if n:
77 if n:
78 base = n.split('.')[0]
78 base = n.split('.')[0]
79 if base and (base in _windows_reserved_filenames):
79 if base and (base in _windows_reserved_filenames):
80 # encode third letter ('aux' -> 'au~78')
80 # encode third letter ('aux' -> 'au~78')
81 ec = "~%02x" % ord(n[2])
81 ec = "~%02x" % ord(n[2])
82 n = n[0:2] + ec + n[3:]
82 n = n[0:2] + ec + n[3:]
83 if n[-1] in '. ':
83 if n[-1] in '. ':
84 # encode last period or space ('foo...' -> 'foo..~2e')
84 # encode last period or space ('foo...' -> 'foo..~2e')
85 n = n[:-1] + "~%02x" % ord(n[-1])
85 n = n[:-1] + "~%02x" % ord(n[-1])
86 if dotencode and n[0] in '. ':
86 if dotencode and n[0] in '. ':
87 n = "~%02x" % ord(n[0]) + n[1:]
87 n = "~%02x" % ord(n[0]) + n[1:]
88 res.append(n)
88 res.append(n)
89 return '/'.join(res)
89 return '/'.join(res)
90
90
91 MAX_PATH_LEN_IN_HGSTORE = 120
91 MAX_PATH_LEN_IN_HGSTORE = 120
92 DIR_PREFIX_LEN = 8
92 DIR_PREFIX_LEN = 8
93 _MAX_SHORTENED_DIRS_LEN = 8 * (DIR_PREFIX_LEN + 1) - 4
93 _MAX_SHORTENED_DIRS_LEN = 8 * (DIR_PREFIX_LEN + 1) - 4
94 def _hybridencode(path, auxencode):
94 def _hybridencode(path, auxencode):
95 '''encodes path with a length limit
95 '''encodes path with a length limit
96
96
97 Encodes all paths that begin with 'data/', according to the following.
97 Encodes all paths that begin with 'data/', according to the following.
98
98
99 Default encoding (reversible):
99 Default encoding (reversible):
100
100
101 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
101 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
102 characters are encoded as '~xx', where xx is the two digit hex code
102 characters are encoded as '~xx', where xx is the two digit hex code
103 of the character (see encodefilename).
103 of the character (see encodefilename).
104 Relevant path components consisting of Windows reserved filenames are
104 Relevant path components consisting of Windows reserved filenames are
105 masked by encoding the third character ('aux' -> 'au~78', see auxencode).
105 masked by encoding the third character ('aux' -> 'au~78', see auxencode).
106
106
107 Hashed encoding (not reversible):
107 Hashed encoding (not reversible):
108
108
109 If the default-encoded path is longer than MAX_PATH_LEN_IN_HGSTORE, a
109 If the default-encoded path is longer than MAX_PATH_LEN_IN_HGSTORE, a
110 non-reversible hybrid hashing of the path is done instead.
110 non-reversible hybrid hashing of the path is done instead.
111 This encoding uses up to DIR_PREFIX_LEN characters of all directory
111 This encoding uses up to DIR_PREFIX_LEN characters of all directory
112 levels of the lowerencoded path, but not more levels than can fit into
112 levels of the lowerencoded path, but not more levels than can fit into
113 _MAX_SHORTENED_DIRS_LEN.
113 _MAX_SHORTENED_DIRS_LEN.
114 Then follows the filler followed by the sha digest of the full path.
114 Then follows the filler followed by the sha digest of the full path.
115 The filler is the beginning of the basename of the lowerencoded path
115 The filler is the beginning of the basename of the lowerencoded path
116 (the basename is everything after the last path separator). The filler
116 (the basename is everything after the last path separator). The filler
117 is as long as possible, filling in characters from the basename until
117 is as long as possible, filling in characters from the basename until
118 the encoded path has MAX_PATH_LEN_IN_HGSTORE characters (or all chars
118 the encoded path has MAX_PATH_LEN_IN_HGSTORE characters (or all chars
119 of the basename have been taken).
119 of the basename have been taken).
120 The extension (e.g. '.i' or '.d') is preserved.
120 The extension (e.g. '.i' or '.d') is preserved.
121
121
122 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
122 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
123 encoding was used.
123 encoding was used.
124 '''
124 '''
125 if not path.startswith('data/'):
125 if not path.startswith('data/'):
126 return path
126 return path
127 # escape directories ending with .i and .d
127 # escape directories ending with .i and .d
128 path = encodedir(path)
128 path = encodedir(path)
129 ndpath = path[len('data/'):]
129 ndpath = path[len('data/'):]
130 res = 'data/' + auxencode(encodefilename(ndpath))
130 res = 'data/' + auxencode(encodefilename(ndpath))
131 if len(res) > MAX_PATH_LEN_IN_HGSTORE:
131 if len(res) > MAX_PATH_LEN_IN_HGSTORE:
132 digest = _sha(path).hexdigest()
132 digest = _sha(path).hexdigest()
133 aep = auxencode(lowerencode(ndpath))
133 aep = auxencode(lowerencode(ndpath))
134 _root, ext = os.path.splitext(aep)
134 _root, ext = os.path.splitext(aep)
135 parts = aep.split('/')
135 parts = aep.split('/')
136 basename = parts[-1]
136 basename = parts[-1]
137 sdirs = []
137 sdirs = []
138 for p in parts[:-1]:
138 for p in parts[:-1]:
139 d = p[:DIR_PREFIX_LEN]
139 d = p[:DIR_PREFIX_LEN]
140 if d[-1] in '. ':
140 if d[-1] in '. ':
141 # Windows can't access dirs ending in period or space
141 # Windows can't access dirs ending in period or space
142 d = d[:-1] + '_'
142 d = d[:-1] + '_'
143 t = '/'.join(sdirs) + '/' + d
143 t = '/'.join(sdirs) + '/' + d
144 if len(t) > _MAX_SHORTENED_DIRS_LEN:
144 if len(t) > _MAX_SHORTENED_DIRS_LEN:
145 break
145 break
146 sdirs.append(d)
146 sdirs.append(d)
147 dirs = '/'.join(sdirs)
147 dirs = '/'.join(sdirs)
148 if len(dirs) > 0:
148 if len(dirs) > 0:
149 dirs += '/'
149 dirs += '/'
150 res = 'dh/' + dirs + digest + ext
150 res = 'dh/' + dirs + digest + ext
151 space_left = MAX_PATH_LEN_IN_HGSTORE - len(res)
151 space_left = MAX_PATH_LEN_IN_HGSTORE - len(res)
152 if space_left > 0:
152 if space_left > 0:
153 filler = basename[:space_left]
153 filler = basename[:space_left]
154 res = 'dh/' + dirs + filler + digest + ext
154 res = 'dh/' + dirs + filler + digest + ext
155 return res
155 return res
156
156
157 def _calcmode(path):
157 def _calcmode(path):
158 try:
158 try:
159 # files in .hg/ will be created using this mode
159 # files in .hg/ will be created using this mode
160 mode = os.stat(path).st_mode
160 mode = os.stat(path).st_mode
161 # avoid some useless chmods
161 # avoid some useless chmods
162 if (0777 & ~util.umask) == (0777 & mode):
162 if (0777 & ~util.umask) == (0777 & mode):
163 mode = None
163 mode = None
164 except OSError:
164 except OSError:
165 mode = None
165 mode = None
166 return mode
166 return mode
167
167
168 _data = 'data 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
168 _data = 'data 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
169
169
170 class basicstore(object):
170 class basicstore(object):
171 '''base class for local repository stores'''
171 '''base class for local repository stores'''
172 def __init__(self, path, opener, pathjoiner):
172 def __init__(self, path, opener):
173 self.pathjoiner = pathjoiner
174 self.path = path
173 self.path = path
175 self.createmode = _calcmode(path)
174 self.createmode = _calcmode(path)
176 op = opener(self.path)
175 op = opener(self.path)
177 op.createmode = self.createmode
176 op.createmode = self.createmode
178 self.opener = lambda f, *args, **kw: op(encodedir(f), *args, **kw)
177 self.opener = lambda f, *args, **kw: op(encodedir(f), *args, **kw)
179
178
180 def join(self, f):
179 def join(self, f):
181 return self.pathjoiner(self.path, encodedir(f))
180 return self.path + '/' + encodedir(f)
182
181
183 def _walk(self, relpath, recurse):
182 def _walk(self, relpath, recurse):
184 '''yields (unencoded, encoded, size)'''
183 '''yields (unencoded, encoded, size)'''
185 path = self.pathjoiner(self.path, relpath)
184 path = self.path
186 striplen = len(self.path) + len(os.sep)
185 if relpath:
186 path += '/' + relpath
187 striplen = len(self.path) + 1
187 l = []
188 l = []
188 if os.path.isdir(path):
189 if os.path.isdir(path):
189 visit = [path]
190 visit = [path]
190 while visit:
191 while visit:
191 p = visit.pop()
192 p = visit.pop()
192 for f, kind, st in osutil.listdir(p, stat=True):
193 for f, kind, st in osutil.listdir(p, stat=True):
193 fp = self.pathjoiner(p, f)
194 fp = p + '/' + f
194 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
195 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
195 n = util.pconvert(fp[striplen:])
196 n = util.pconvert(fp[striplen:])
196 l.append((decodedir(n), n, st.st_size))
197 l.append((decodedir(n), n, st.st_size))
197 elif kind == stat.S_IFDIR and recurse:
198 elif kind == stat.S_IFDIR and recurse:
198 visit.append(fp)
199 visit.append(fp)
199 return sorted(l)
200 return sorted(l)
200
201
201 def datafiles(self):
202 def datafiles(self):
202 return self._walk('data', True)
203 return self._walk('data', True)
203
204
204 def walk(self):
205 def walk(self):
205 '''yields (unencoded, encoded, size)'''
206 '''yields (unencoded, encoded, size)'''
206 # yield data files first
207 # yield data files first
207 for x in self.datafiles():
208 for x in self.datafiles():
208 yield x
209 yield x
209 # yield manifest before changelog
210 # yield manifest before changelog
210 for x in reversed(self._walk('', False)):
211 for x in reversed(self._walk('', False)):
211 yield x
212 yield x
212
213
213 def copylist(self):
214 def copylist(self):
214 return ['requires'] + _data.split()
215 return ['requires'] + _data.split()
215
216
216 def write(self):
217 def write(self):
217 pass
218 pass
218
219
219 class encodedstore(basicstore):
220 class encodedstore(basicstore):
220 def __init__(self, path, opener, pathjoiner):
221 def __init__(self, path, opener):
221 self.pathjoiner = pathjoiner
222 self.path = path + '/store'
222 self.path = self.pathjoiner(path, 'store')
223 self.createmode = _calcmode(self.path)
223 self.createmode = _calcmode(self.path)
224 op = opener(self.path)
224 op = opener(self.path)
225 op.createmode = self.createmode
225 op.createmode = self.createmode
226 self.opener = lambda f, *args, **kw: op(encodefilename(f), *args, **kw)
226 self.opener = lambda f, *args, **kw: op(encodefilename(f), *args, **kw)
227
227
228 def datafiles(self):
228 def datafiles(self):
229 for a, b, size in self._walk('data', True):
229 for a, b, size in self._walk('data', True):
230 try:
230 try:
231 a = decodefilename(a)
231 a = decodefilename(a)
232 except KeyError:
232 except KeyError:
233 a = None
233 a = None
234 yield a, b, size
234 yield a, b, size
235
235
236 def join(self, f):
236 def join(self, f):
237 return self.pathjoiner(self.path, encodefilename(f))
237 return self.path + '/' + encodefilename(f)
238
238
239 def copylist(self):
239 def copylist(self):
240 return (['requires', '00changelog.i'] +
240 return (['requires', '00changelog.i'] +
241 [self.pathjoiner('store', f) for f in _data.split()])
241 ['store/' + f for f in _data.split()])
242
242
243 class fncache(object):
243 class fncache(object):
244 # the filename used to be partially encoded
244 # the filename used to be partially encoded
245 # hence the encodedir/decodedir dance
245 # hence the encodedir/decodedir dance
246 def __init__(self, opener):
246 def __init__(self, opener):
247 self.opener = opener
247 self.opener = opener
248 self.entries = None
248 self.entries = None
249 self._dirty = False
249 self._dirty = False
250
250
251 def _load(self):
251 def _load(self):
252 '''fill the entries from the fncache file'''
252 '''fill the entries from the fncache file'''
253 self.entries = set()
253 self.entries = set()
254 self._dirty = False
254 self._dirty = False
255 try:
255 try:
256 fp = self.opener('fncache', mode='rb')
256 fp = self.opener('fncache', mode='rb')
257 except IOError:
257 except IOError:
258 # skip nonexistent file
258 # skip nonexistent file
259 return
259 return
260 for n, line in enumerate(fp):
260 for n, line in enumerate(fp):
261 if (len(line) < 2) or (line[-1] != '\n'):
261 if (len(line) < 2) or (line[-1] != '\n'):
262 t = _('invalid entry in fncache, line %s') % (n + 1)
262 t = _('invalid entry in fncache, line %s') % (n + 1)
263 raise util.Abort(t)
263 raise util.Abort(t)
264 self.entries.add(decodedir(line[:-1]))
264 self.entries.add(decodedir(line[:-1]))
265 fp.close()
265 fp.close()
266
266
267 def rewrite(self, files):
267 def rewrite(self, files):
268 fp = self.opener('fncache', mode='wb')
268 fp = self.opener('fncache', mode='wb')
269 for p in files:
269 for p in files:
270 fp.write(encodedir(p) + '\n')
270 fp.write(encodedir(p) + '\n')
271 fp.close()
271 fp.close()
272 self.entries = set(files)
272 self.entries = set(files)
273 self._dirty = False
273 self._dirty = False
274
274
275 def write(self):
275 def write(self):
276 if not self._dirty:
276 if not self._dirty:
277 return
277 return
278 fp = self.opener('fncache', mode='wb', atomictemp=True)
278 fp = self.opener('fncache', mode='wb', atomictemp=True)
279 for p in self.entries:
279 for p in self.entries:
280 fp.write(encodedir(p) + '\n')
280 fp.write(encodedir(p) + '\n')
281 fp.rename()
281 fp.rename()
282 self._dirty = False
282 self._dirty = False
283
283
284 def add(self, fn):
284 def add(self, fn):
285 if self.entries is None:
285 if self.entries is None:
286 self._load()
286 self._load()
287 if fn not in self.entries:
287 if fn not in self.entries:
288 self._dirty = True
288 self._dirty = True
289 self.entries.add(fn)
289 self.entries.add(fn)
290
290
291 def __contains__(self, fn):
291 def __contains__(self, fn):
292 if self.entries is None:
292 if self.entries is None:
293 self._load()
293 self._load()
294 return fn in self.entries
294 return fn in self.entries
295
295
296 def __iter__(self):
296 def __iter__(self):
297 if self.entries is None:
297 if self.entries is None:
298 self._load()
298 self._load()
299 return iter(self.entries)
299 return iter(self.entries)
300
300
301 class fncachestore(basicstore):
301 class fncachestore(basicstore):
302 def __init__(self, path, opener, pathjoiner, encode):
302 def __init__(self, path, opener, encode):
303 self.encode = encode
303 self.encode = encode
304 self.pathjoiner = pathjoiner
304 self.path = path + '/store'
305 self.path = self.pathjoiner(path, 'store')
306 self.createmode = _calcmode(self.path)
305 self.createmode = _calcmode(self.path)
307 op = opener(self.path)
306 op = opener(self.path)
308 op.createmode = self.createmode
307 op.createmode = self.createmode
309 fnc = fncache(op)
308 fnc = fncache(op)
310 self.fncache = fnc
309 self.fncache = fnc
311
310
312 def fncacheopener(path, mode='r', *args, **kw):
311 def fncacheopener(path, mode='r', *args, **kw):
313 if mode not in ('r', 'rb') and path.startswith('data/'):
312 if mode not in ('r', 'rb') and path.startswith('data/'):
314 fnc.add(path)
313 fnc.add(path)
315 return op(self.encode(path), mode, *args, **kw)
314 return op(self.encode(path), mode, *args, **kw)
316 self.opener = fncacheopener
315 self.opener = fncacheopener
317
316
318 def join(self, f):
317 def join(self, f):
319 return self.pathjoiner(self.path, self.encode(f))
318 return self.path + '/' + self.encode(f)
320
319
321 def datafiles(self):
320 def datafiles(self):
322 rewrite = False
321 rewrite = False
323 existing = []
322 existing = []
324 pjoin = self.pathjoiner
325 spath = self.path
323 spath = self.path
326 for f in self.fncache:
324 for f in self.fncache:
327 ef = self.encode(f)
325 ef = self.encode(f)
328 try:
326 try:
329 st = os.stat(pjoin(spath, ef))
327 st = os.stat(spath + '/' + ef)
330 yield f, ef, st.st_size
328 yield f, ef, st.st_size
331 existing.append(f)
329 existing.append(f)
332 except OSError:
330 except OSError:
333 # nonexistent entry
331 # nonexistent entry
334 rewrite = True
332 rewrite = True
335 if rewrite:
333 if rewrite:
336 # rewrite fncache to remove nonexistent entries
334 # rewrite fncache to remove nonexistent entries
337 # (may be caused by rollback / strip)
335 # (may be caused by rollback / strip)
338 self.fncache.rewrite(existing)
336 self.fncache.rewrite(existing)
339
337
340 def copylist(self):
338 def copylist(self):
341 d = ('data dh fncache'
339 d = ('data dh fncache'
342 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
340 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
343 return (['requires', '00changelog.i'] +
341 return (['requires', '00changelog.i'] +
344 [self.pathjoiner('store', f) for f in d.split()])
342 ['store/' + f for f in d.split()])
345
343
346 def write(self):
344 def write(self):
347 self.fncache.write()
345 self.fncache.write()
348
346
349 def store(requirements, path, opener, pathjoiner=None):
347 def store(requirements, path, opener):
350 pathjoiner = pathjoiner or os.path.join
351 if 'store' in requirements:
348 if 'store' in requirements:
352 if 'fncache' in requirements:
349 if 'fncache' in requirements:
353 auxencode = lambda f: _auxencode(f, 'dotencode' in requirements)
350 auxencode = lambda f: _auxencode(f, 'dotencode' in requirements)
354 encode = lambda f: _hybridencode(f, auxencode)
351 encode = lambda f: _hybridencode(f, auxencode)
355 return fncachestore(path, opener, pathjoiner, encode)
352 return fncachestore(path, opener, encode)
356 return encodedstore(path, opener, pathjoiner)
353 return encodedstore(path, opener)
357 return basicstore(path, opener, pathjoiner)
354 return basicstore(path, opener)
General Comments 0
You need to be logged in to leave comments. Login now