##// END OF EJS Templates
scmutil: abstract out mustaudit delegation
Bryan O'Sullivan -
r17845:408ded42 stable
parent child Browse files
Show More
@@ -1,950 +1,962 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import util, error, osutil, revset, similar, encoding, phases
9 import util, error, osutil, revset, similar, encoding, phases
10 import match as matchmod
10 import match as matchmod
11 import os, errno, re, stat, sys, glob
11 import os, errno, re, stat, sys, glob
12
12
13 def nochangesfound(ui, repo, excluded=None):
13 def nochangesfound(ui, repo, excluded=None):
14 '''Report no changes for push/pull, excluded is None or a list of
14 '''Report no changes for push/pull, excluded is None or a list of
15 nodes excluded from the push/pull.
15 nodes excluded from the push/pull.
16 '''
16 '''
17 secretlist = []
17 secretlist = []
18 if excluded:
18 if excluded:
19 for n in excluded:
19 for n in excluded:
20 ctx = repo[n]
20 ctx = repo[n]
21 if ctx.phase() >= phases.secret and not ctx.extinct():
21 if ctx.phase() >= phases.secret and not ctx.extinct():
22 secretlist.append(n)
22 secretlist.append(n)
23
23
24 if secretlist:
24 if secretlist:
25 ui.status(_("no changes found (ignored %d secret changesets)\n")
25 ui.status(_("no changes found (ignored %d secret changesets)\n")
26 % len(secretlist))
26 % len(secretlist))
27 else:
27 else:
28 ui.status(_("no changes found\n"))
28 ui.status(_("no changes found\n"))
29
29
30 def checknewlabel(repo, lbl, kind):
30 def checknewlabel(repo, lbl, kind):
31 if lbl in ['tip', '.', 'null']:
31 if lbl in ['tip', '.', 'null']:
32 raise util.Abort(_("the name '%s' is reserved") % lbl)
32 raise util.Abort(_("the name '%s' is reserved") % lbl)
33 for c in (':', '\0', '\n', '\r'):
33 for c in (':', '\0', '\n', '\r'):
34 if c in lbl:
34 if c in lbl:
35 raise util.Abort(_("%r cannot be used in a %s name") %
35 raise util.Abort(_("%r cannot be used in a %s name") %
36 (c, kind))
36 (c, kind))
37
37
38 def checkfilename(f):
38 def checkfilename(f):
39 '''Check that the filename f is an acceptable filename for a tracked file'''
39 '''Check that the filename f is an acceptable filename for a tracked file'''
40 if '\r' in f or '\n' in f:
40 if '\r' in f or '\n' in f:
41 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
41 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
42
42
43 def checkportable(ui, f):
43 def checkportable(ui, f):
44 '''Check if filename f is portable and warn or abort depending on config'''
44 '''Check if filename f is portable and warn or abort depending on config'''
45 checkfilename(f)
45 checkfilename(f)
46 abort, warn = checkportabilityalert(ui)
46 abort, warn = checkportabilityalert(ui)
47 if abort or warn:
47 if abort or warn:
48 msg = util.checkwinfilename(f)
48 msg = util.checkwinfilename(f)
49 if msg:
49 if msg:
50 msg = "%s: %r" % (msg, f)
50 msg = "%s: %r" % (msg, f)
51 if abort:
51 if abort:
52 raise util.Abort(msg)
52 raise util.Abort(msg)
53 ui.warn(_("warning: %s\n") % msg)
53 ui.warn(_("warning: %s\n") % msg)
54
54
55 def checkportabilityalert(ui):
55 def checkportabilityalert(ui):
56 '''check if the user's config requests nothing, a warning, or abort for
56 '''check if the user's config requests nothing, a warning, or abort for
57 non-portable filenames'''
57 non-portable filenames'''
58 val = ui.config('ui', 'portablefilenames', 'warn')
58 val = ui.config('ui', 'portablefilenames', 'warn')
59 lval = val.lower()
59 lval = val.lower()
60 bval = util.parsebool(val)
60 bval = util.parsebool(val)
61 abort = os.name == 'nt' or lval == 'abort'
61 abort = os.name == 'nt' or lval == 'abort'
62 warn = bval or lval == 'warn'
62 warn = bval or lval == 'warn'
63 if bval is None and not (warn or abort or lval == 'ignore'):
63 if bval is None and not (warn or abort or lval == 'ignore'):
64 raise error.ConfigError(
64 raise error.ConfigError(
65 _("ui.portablefilenames value is invalid ('%s')") % val)
65 _("ui.portablefilenames value is invalid ('%s')") % val)
66 return abort, warn
66 return abort, warn
67
67
68 class casecollisionauditor(object):
68 class casecollisionauditor(object):
69 def __init__(self, ui, abort, dirstate):
69 def __init__(self, ui, abort, dirstate):
70 self._ui = ui
70 self._ui = ui
71 self._abort = abort
71 self._abort = abort
72 allfiles = '\0'.join(dirstate._map)
72 allfiles = '\0'.join(dirstate._map)
73 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
73 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
74 self._dirstate = dirstate
74 self._dirstate = dirstate
75 # The purpose of _newfiles is so that we don't complain about
75 # The purpose of _newfiles is so that we don't complain about
76 # case collisions if someone were to call this object with the
76 # case collisions if someone were to call this object with the
77 # same filename twice.
77 # same filename twice.
78 self._newfiles = set()
78 self._newfiles = set()
79
79
80 def __call__(self, f):
80 def __call__(self, f):
81 fl = encoding.lower(f)
81 fl = encoding.lower(f)
82 if (fl in self._loweredfiles and f not in self._dirstate and
82 if (fl in self._loweredfiles and f not in self._dirstate and
83 f not in self._newfiles):
83 f not in self._newfiles):
84 msg = _('possible case-folding collision for %s') % f
84 msg = _('possible case-folding collision for %s') % f
85 if self._abort:
85 if self._abort:
86 raise util.Abort(msg)
86 raise util.Abort(msg)
87 self._ui.warn(_("warning: %s\n") % msg)
87 self._ui.warn(_("warning: %s\n") % msg)
88 self._loweredfiles.add(fl)
88 self._loweredfiles.add(fl)
89 self._newfiles.add(f)
89 self._newfiles.add(f)
90
90
91 class pathauditor(object):
91 class pathauditor(object):
92 '''ensure that a filesystem path contains no banned components.
92 '''ensure that a filesystem path contains no banned components.
93 the following properties of a path are checked:
93 the following properties of a path are checked:
94
94
95 - ends with a directory separator
95 - ends with a directory separator
96 - under top-level .hg
96 - under top-level .hg
97 - starts at the root of a windows drive
97 - starts at the root of a windows drive
98 - contains ".."
98 - contains ".."
99 - traverses a symlink (e.g. a/symlink_here/b)
99 - traverses a symlink (e.g. a/symlink_here/b)
100 - inside a nested repository (a callback can be used to approve
100 - inside a nested repository (a callback can be used to approve
101 some nested repositories, e.g., subrepositories)
101 some nested repositories, e.g., subrepositories)
102 '''
102 '''
103
103
104 def __init__(self, root, callback=None):
104 def __init__(self, root, callback=None):
105 self.audited = set()
105 self.audited = set()
106 self.auditeddir = set()
106 self.auditeddir = set()
107 self.root = root
107 self.root = root
108 self.callback = callback
108 self.callback = callback
109 if os.path.lexists(root) and not util.checkcase(root):
109 if os.path.lexists(root) and not util.checkcase(root):
110 self.normcase = util.normcase
110 self.normcase = util.normcase
111 else:
111 else:
112 self.normcase = lambda x: x
112 self.normcase = lambda x: x
113
113
114 def __call__(self, path):
114 def __call__(self, path):
115 '''Check the relative path.
115 '''Check the relative path.
116 path may contain a pattern (e.g. foodir/**.txt)'''
116 path may contain a pattern (e.g. foodir/**.txt)'''
117
117
118 path = util.localpath(path)
118 path = util.localpath(path)
119 normpath = self.normcase(path)
119 normpath = self.normcase(path)
120 if normpath in self.audited:
120 if normpath in self.audited:
121 return
121 return
122 # AIX ignores "/" at end of path, others raise EISDIR.
122 # AIX ignores "/" at end of path, others raise EISDIR.
123 if util.endswithsep(path):
123 if util.endswithsep(path):
124 raise util.Abort(_("path ends in directory separator: %s") % path)
124 raise util.Abort(_("path ends in directory separator: %s") % path)
125 parts = util.splitpath(path)
125 parts = util.splitpath(path)
126 if (os.path.splitdrive(path)[0]
126 if (os.path.splitdrive(path)[0]
127 or parts[0].lower() in ('.hg', '.hg.', '')
127 or parts[0].lower() in ('.hg', '.hg.', '')
128 or os.pardir in parts):
128 or os.pardir in parts):
129 raise util.Abort(_("path contains illegal component: %s") % path)
129 raise util.Abort(_("path contains illegal component: %s") % path)
130 if '.hg' in path.lower():
130 if '.hg' in path.lower():
131 lparts = [p.lower() for p in parts]
131 lparts = [p.lower() for p in parts]
132 for p in '.hg', '.hg.':
132 for p in '.hg', '.hg.':
133 if p in lparts[1:]:
133 if p in lparts[1:]:
134 pos = lparts.index(p)
134 pos = lparts.index(p)
135 base = os.path.join(*parts[:pos])
135 base = os.path.join(*parts[:pos])
136 raise util.Abort(_("path '%s' is inside nested repo %r")
136 raise util.Abort(_("path '%s' is inside nested repo %r")
137 % (path, base))
137 % (path, base))
138
138
139 normparts = util.splitpath(normpath)
139 normparts = util.splitpath(normpath)
140 assert len(parts) == len(normparts)
140 assert len(parts) == len(normparts)
141
141
142 parts.pop()
142 parts.pop()
143 normparts.pop()
143 normparts.pop()
144 prefixes = []
144 prefixes = []
145 while parts:
145 while parts:
146 prefix = os.sep.join(parts)
146 prefix = os.sep.join(parts)
147 normprefix = os.sep.join(normparts)
147 normprefix = os.sep.join(normparts)
148 if normprefix in self.auditeddir:
148 if normprefix in self.auditeddir:
149 break
149 break
150 curpath = os.path.join(self.root, prefix)
150 curpath = os.path.join(self.root, prefix)
151 try:
151 try:
152 st = os.lstat(curpath)
152 st = os.lstat(curpath)
153 except OSError, err:
153 except OSError, err:
154 # EINVAL can be raised as invalid path syntax under win32.
154 # EINVAL can be raised as invalid path syntax under win32.
155 # They must be ignored for patterns can be checked too.
155 # They must be ignored for patterns can be checked too.
156 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
156 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
157 raise
157 raise
158 else:
158 else:
159 if stat.S_ISLNK(st.st_mode):
159 if stat.S_ISLNK(st.st_mode):
160 raise util.Abort(
160 raise util.Abort(
161 _('path %r traverses symbolic link %r')
161 _('path %r traverses symbolic link %r')
162 % (path, prefix))
162 % (path, prefix))
163 elif (stat.S_ISDIR(st.st_mode) and
163 elif (stat.S_ISDIR(st.st_mode) and
164 os.path.isdir(os.path.join(curpath, '.hg'))):
164 os.path.isdir(os.path.join(curpath, '.hg'))):
165 if not self.callback or not self.callback(curpath):
165 if not self.callback or not self.callback(curpath):
166 raise util.Abort(_("path '%s' is inside nested "
166 raise util.Abort(_("path '%s' is inside nested "
167 "repo %r")
167 "repo %r")
168 % (path, prefix))
168 % (path, prefix))
169 prefixes.append(normprefix)
169 prefixes.append(normprefix)
170 parts.pop()
170 parts.pop()
171 normparts.pop()
171 normparts.pop()
172
172
173 self.audited.add(normpath)
173 self.audited.add(normpath)
174 # only add prefixes to the cache after checking everything: we don't
174 # only add prefixes to the cache after checking everything: we don't
175 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
175 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
176 self.auditeddir.update(prefixes)
176 self.auditeddir.update(prefixes)
177
177
178 class abstractvfs(object):
178 class abstractvfs(object):
179 """Abstract base class; cannot be instantiated"""
179 """Abstract base class; cannot be instantiated"""
180
180
181 def __init__(self, *args, **kwargs):
181 def __init__(self, *args, **kwargs):
182 '''Prevent instantiation; don't call this from subclasses.'''
182 '''Prevent instantiation; don't call this from subclasses.'''
183 raise NotImplementedError('attempted instantiating ' + str(type(self)))
183 raise NotImplementedError('attempted instantiating ' + str(type(self)))
184
184
185 def tryread(self, path):
185 def tryread(self, path):
186 '''gracefully return an empty string for missing files'''
186 '''gracefully return an empty string for missing files'''
187 try:
187 try:
188 return self.read(path)
188 return self.read(path)
189 except IOError, inst:
189 except IOError, inst:
190 if inst.errno != errno.ENOENT:
190 if inst.errno != errno.ENOENT:
191 raise
191 raise
192 return ""
192 return ""
193
193
194 def read(self, path):
194 def read(self, path):
195 fp = self(path, 'rb')
195 fp = self(path, 'rb')
196 try:
196 try:
197 return fp.read()
197 return fp.read()
198 finally:
198 finally:
199 fp.close()
199 fp.close()
200
200
201 def write(self, path, data):
201 def write(self, path, data):
202 fp = self(path, 'wb')
202 fp = self(path, 'wb')
203 try:
203 try:
204 return fp.write(data)
204 return fp.write(data)
205 finally:
205 finally:
206 fp.close()
206 fp.close()
207
207
208 def append(self, path, data):
208 def append(self, path, data):
209 fp = self(path, 'ab')
209 fp = self(path, 'ab')
210 try:
210 try:
211 return fp.write(data)
211 return fp.write(data)
212 finally:
212 finally:
213 fp.close()
213 fp.close()
214
214
215 def exists(self, path=None):
215 def exists(self, path=None):
216 return os.path.exists(self.join(path))
216 return os.path.exists(self.join(path))
217
217
218 def isdir(self, path=None):
218 def isdir(self, path=None):
219 return os.path.isdir(self.join(path))
219 return os.path.isdir(self.join(path))
220
220
221 def makedir(self, path=None, notindexed=True):
221 def makedir(self, path=None, notindexed=True):
222 return util.makedir(self.join(path), notindexed)
222 return util.makedir(self.join(path), notindexed)
223
223
224 def makedirs(self, path=None, mode=None):
224 def makedirs(self, path=None, mode=None):
225 return util.makedirs(self.join(path), mode)
225 return util.makedirs(self.join(path), mode)
226
226
227 def mkdir(self, path=None):
227 def mkdir(self, path=None):
228 return os.mkdir(self.join(path))
228 return os.mkdir(self.join(path))
229
229
230 def readdir(self, path=None, stat=None, skip=None):
230 def readdir(self, path=None, stat=None, skip=None):
231 return osutil.listdir(self.join(path), stat, skip)
231 return osutil.listdir(self.join(path), stat, skip)
232
232
233 def stat(self, path=None):
233 def stat(self, path=None):
234 return os.stat(self.join(path))
234 return os.stat(self.join(path))
235
235
236 class vfs(abstractvfs):
236 class vfs(abstractvfs):
237 '''Operate files relative to a base directory
237 '''Operate files relative to a base directory
238
238
239 This class is used to hide the details of COW semantics and
239 This class is used to hide the details of COW semantics and
240 remote file access from higher level code.
240 remote file access from higher level code.
241 '''
241 '''
242 def __init__(self, base, audit=True, expand=False):
242 def __init__(self, base, audit=True, expand=False):
243 if expand:
243 if expand:
244 base = os.path.realpath(util.expandpath(base))
244 base = os.path.realpath(util.expandpath(base))
245 self.base = base
245 self.base = base
246 self._setmustaudit(audit)
246 self._setmustaudit(audit)
247 self.createmode = None
247 self.createmode = None
248 self._trustnlink = None
248 self._trustnlink = None
249
249
250 def _getmustaudit(self):
250 def _getmustaudit(self):
251 return self._audit
251 return self._audit
252
252
253 def _setmustaudit(self, onoff):
253 def _setmustaudit(self, onoff):
254 self._audit = onoff
254 self._audit = onoff
255 if onoff:
255 if onoff:
256 self.auditor = pathauditor(self.base)
256 self.auditor = pathauditor(self.base)
257 else:
257 else:
258 self.auditor = util.always
258 self.auditor = util.always
259
259
260 mustaudit = property(_getmustaudit, _setmustaudit)
260 mustaudit = property(_getmustaudit, _setmustaudit)
261
261
262 @util.propertycache
262 @util.propertycache
263 def _cansymlink(self):
263 def _cansymlink(self):
264 return util.checklink(self.base)
264 return util.checklink(self.base)
265
265
266 def _fixfilemode(self, name):
266 def _fixfilemode(self, name):
267 if self.createmode is None:
267 if self.createmode is None:
268 return
268 return
269 os.chmod(name, self.createmode & 0666)
269 os.chmod(name, self.createmode & 0666)
270
270
271 def __call__(self, path, mode="r", text=False, atomictemp=False):
271 def __call__(self, path, mode="r", text=False, atomictemp=False):
272 if self._audit:
272 if self._audit:
273 r = util.checkosfilename(path)
273 r = util.checkosfilename(path)
274 if r:
274 if r:
275 raise util.Abort("%s: %r" % (r, path))
275 raise util.Abort("%s: %r" % (r, path))
276 self.auditor(path)
276 self.auditor(path)
277 f = self.join(path)
277 f = self.join(path)
278
278
279 if not text and "b" not in mode:
279 if not text and "b" not in mode:
280 mode += "b" # for that other OS
280 mode += "b" # for that other OS
281
281
282 nlink = -1
282 nlink = -1
283 dirname, basename = util.split(f)
283 dirname, basename = util.split(f)
284 # If basename is empty, then the path is malformed because it points
284 # If basename is empty, then the path is malformed because it points
285 # to a directory. Let the posixfile() call below raise IOError.
285 # to a directory. Let the posixfile() call below raise IOError.
286 if basename and mode not in ('r', 'rb'):
286 if basename and mode not in ('r', 'rb'):
287 if atomictemp:
287 if atomictemp:
288 if not os.path.isdir(dirname):
288 if not os.path.isdir(dirname):
289 util.makedirs(dirname, self.createmode)
289 util.makedirs(dirname, self.createmode)
290 return util.atomictempfile(f, mode, self.createmode)
290 return util.atomictempfile(f, mode, self.createmode)
291 try:
291 try:
292 if 'w' in mode:
292 if 'w' in mode:
293 util.unlink(f)
293 util.unlink(f)
294 nlink = 0
294 nlink = 0
295 else:
295 else:
296 # nlinks() may behave differently for files on Windows
296 # nlinks() may behave differently for files on Windows
297 # shares if the file is open.
297 # shares if the file is open.
298 fd = util.posixfile(f)
298 fd = util.posixfile(f)
299 nlink = util.nlinks(f)
299 nlink = util.nlinks(f)
300 if nlink < 1:
300 if nlink < 1:
301 nlink = 2 # force mktempcopy (issue1922)
301 nlink = 2 # force mktempcopy (issue1922)
302 fd.close()
302 fd.close()
303 except (OSError, IOError), e:
303 except (OSError, IOError), e:
304 if e.errno != errno.ENOENT:
304 if e.errno != errno.ENOENT:
305 raise
305 raise
306 nlink = 0
306 nlink = 0
307 if not os.path.isdir(dirname):
307 if not os.path.isdir(dirname):
308 util.makedirs(dirname, self.createmode)
308 util.makedirs(dirname, self.createmode)
309 if nlink > 0:
309 if nlink > 0:
310 if self._trustnlink is None:
310 if self._trustnlink is None:
311 self._trustnlink = nlink > 1 or util.checknlink(f)
311 self._trustnlink = nlink > 1 or util.checknlink(f)
312 if nlink > 1 or not self._trustnlink:
312 if nlink > 1 or not self._trustnlink:
313 util.rename(util.mktempcopy(f), f)
313 util.rename(util.mktempcopy(f), f)
314 fp = util.posixfile(f, mode)
314 fp = util.posixfile(f, mode)
315 if nlink == 0:
315 if nlink == 0:
316 self._fixfilemode(f)
316 self._fixfilemode(f)
317 return fp
317 return fp
318
318
319 def symlink(self, src, dst):
319 def symlink(self, src, dst):
320 self.auditor(dst)
320 self.auditor(dst)
321 linkname = self.join(dst)
321 linkname = self.join(dst)
322 try:
322 try:
323 os.unlink(linkname)
323 os.unlink(linkname)
324 except OSError:
324 except OSError:
325 pass
325 pass
326
326
327 dirname = os.path.dirname(linkname)
327 dirname = os.path.dirname(linkname)
328 if not os.path.exists(dirname):
328 if not os.path.exists(dirname):
329 util.makedirs(dirname, self.createmode)
329 util.makedirs(dirname, self.createmode)
330
330
331 if self._cansymlink:
331 if self._cansymlink:
332 try:
332 try:
333 os.symlink(src, linkname)
333 os.symlink(src, linkname)
334 except OSError, err:
334 except OSError, err:
335 raise OSError(err.errno, _('could not symlink to %r: %s') %
335 raise OSError(err.errno, _('could not symlink to %r: %s') %
336 (src, err.strerror), linkname)
336 (src, err.strerror), linkname)
337 else:
337 else:
338 self.write(dst, src)
338 self.write(dst, src)
339
339
340 def audit(self, path):
340 def audit(self, path):
341 self.auditor(path)
341 self.auditor(path)
342
342
343 def join(self, path):
343 def join(self, path):
344 if path:
344 if path:
345 return os.path.join(self.base, path)
345 return os.path.join(self.base, path)
346 else:
346 else:
347 return self.base
347 return self.base
348
348
349 opener = vfs
349 opener = vfs
350
350
351 class auditvfs(object):
352 def __init__(self, vfs):
353 self.vfs = vfs
354
355 def _getmustaudit(self):
356 return self.vfs.mustaudit
357
358 def _setmustaudit(self, onoff):
359 self.vfs.mustaudit = onoff
360
361 mustaudit = property(_getmustaudit, _setmustaudit)
362
351 class filtervfs(abstractvfs):
363 class filtervfs(abstractvfs):
352 '''Wrapper vfs for filtering filenames with a function.'''
364 '''Wrapper vfs for filtering filenames with a function.'''
353
365
354 def __init__(self, opener, filter):
366 def __init__(self, opener, filter):
355 self._filter = filter
367 self._filter = filter
356 self._orig = opener
368 self._orig = opener
357
369
358 def __call__(self, path, *args, **kwargs):
370 def __call__(self, path, *args, **kwargs):
359 return self._orig(self._filter(path), *args, **kwargs)
371 return self._orig(self._filter(path), *args, **kwargs)
360
372
361 def join(self, path):
373 def join(self, path):
362 if path:
374 if path:
363 return self._orig.join(self._filter(path))
375 return self._orig.join(self._filter(path))
364 else:
376 else:
365 return self._orig.join(path)
377 return self._orig.join(path)
366
378
367 filteropener = filtervfs
379 filteropener = filtervfs
368
380
369 def canonpath(root, cwd, myname, auditor=None):
381 def canonpath(root, cwd, myname, auditor=None):
370 '''return the canonical path of myname, given cwd and root'''
382 '''return the canonical path of myname, given cwd and root'''
371 if util.endswithsep(root):
383 if util.endswithsep(root):
372 rootsep = root
384 rootsep = root
373 else:
385 else:
374 rootsep = root + os.sep
386 rootsep = root + os.sep
375 name = myname
387 name = myname
376 if not os.path.isabs(name):
388 if not os.path.isabs(name):
377 name = os.path.join(root, cwd, name)
389 name = os.path.join(root, cwd, name)
378 name = os.path.normpath(name)
390 name = os.path.normpath(name)
379 if auditor is None:
391 if auditor is None:
380 auditor = pathauditor(root)
392 auditor = pathauditor(root)
381 if name != rootsep and name.startswith(rootsep):
393 if name != rootsep and name.startswith(rootsep):
382 name = name[len(rootsep):]
394 name = name[len(rootsep):]
383 auditor(name)
395 auditor(name)
384 return util.pconvert(name)
396 return util.pconvert(name)
385 elif name == root:
397 elif name == root:
386 return ''
398 return ''
387 else:
399 else:
388 # Determine whether `name' is in the hierarchy at or beneath `root',
400 # Determine whether `name' is in the hierarchy at or beneath `root',
389 # by iterating name=dirname(name) until that causes no change (can't
401 # by iterating name=dirname(name) until that causes no change (can't
390 # check name == '/', because that doesn't work on windows). The list
402 # check name == '/', because that doesn't work on windows). The list
391 # `rel' holds the reversed list of components making up the relative
403 # `rel' holds the reversed list of components making up the relative
392 # file name we want.
404 # file name we want.
393 rel = []
405 rel = []
394 while True:
406 while True:
395 try:
407 try:
396 s = util.samefile(name, root)
408 s = util.samefile(name, root)
397 except OSError:
409 except OSError:
398 s = False
410 s = False
399 if s:
411 if s:
400 if not rel:
412 if not rel:
401 # name was actually the same as root (maybe a symlink)
413 # name was actually the same as root (maybe a symlink)
402 return ''
414 return ''
403 rel.reverse()
415 rel.reverse()
404 name = os.path.join(*rel)
416 name = os.path.join(*rel)
405 auditor(name)
417 auditor(name)
406 return util.pconvert(name)
418 return util.pconvert(name)
407 dirname, basename = util.split(name)
419 dirname, basename = util.split(name)
408 rel.append(basename)
420 rel.append(basename)
409 if dirname == name:
421 if dirname == name:
410 break
422 break
411 name = dirname
423 name = dirname
412
424
413 raise util.Abort('%s not under root' % myname)
425 raise util.Abort('%s not under root' % myname)
414
426
415 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
427 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
416 '''yield every hg repository under path, always recursively.
428 '''yield every hg repository under path, always recursively.
417 The recurse flag will only control recursion into repo working dirs'''
429 The recurse flag will only control recursion into repo working dirs'''
418 def errhandler(err):
430 def errhandler(err):
419 if err.filename == path:
431 if err.filename == path:
420 raise err
432 raise err
421 samestat = getattr(os.path, 'samestat', None)
433 samestat = getattr(os.path, 'samestat', None)
422 if followsym and samestat is not None:
434 if followsym and samestat is not None:
423 def adddir(dirlst, dirname):
435 def adddir(dirlst, dirname):
424 match = False
436 match = False
425 dirstat = os.stat(dirname)
437 dirstat = os.stat(dirname)
426 for lstdirstat in dirlst:
438 for lstdirstat in dirlst:
427 if samestat(dirstat, lstdirstat):
439 if samestat(dirstat, lstdirstat):
428 match = True
440 match = True
429 break
441 break
430 if not match:
442 if not match:
431 dirlst.append(dirstat)
443 dirlst.append(dirstat)
432 return not match
444 return not match
433 else:
445 else:
434 followsym = False
446 followsym = False
435
447
436 if (seen_dirs is None) and followsym:
448 if (seen_dirs is None) and followsym:
437 seen_dirs = []
449 seen_dirs = []
438 adddir(seen_dirs, path)
450 adddir(seen_dirs, path)
439 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
451 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
440 dirs.sort()
452 dirs.sort()
441 if '.hg' in dirs:
453 if '.hg' in dirs:
442 yield root # found a repository
454 yield root # found a repository
443 qroot = os.path.join(root, '.hg', 'patches')
455 qroot = os.path.join(root, '.hg', 'patches')
444 if os.path.isdir(os.path.join(qroot, '.hg')):
456 if os.path.isdir(os.path.join(qroot, '.hg')):
445 yield qroot # we have a patch queue repo here
457 yield qroot # we have a patch queue repo here
446 if recurse:
458 if recurse:
447 # avoid recursing inside the .hg directory
459 # avoid recursing inside the .hg directory
448 dirs.remove('.hg')
460 dirs.remove('.hg')
449 else:
461 else:
450 dirs[:] = [] # don't descend further
462 dirs[:] = [] # don't descend further
451 elif followsym:
463 elif followsym:
452 newdirs = []
464 newdirs = []
453 for d in dirs:
465 for d in dirs:
454 fname = os.path.join(root, d)
466 fname = os.path.join(root, d)
455 if adddir(seen_dirs, fname):
467 if adddir(seen_dirs, fname):
456 if os.path.islink(fname):
468 if os.path.islink(fname):
457 for hgname in walkrepos(fname, True, seen_dirs):
469 for hgname in walkrepos(fname, True, seen_dirs):
458 yield hgname
470 yield hgname
459 else:
471 else:
460 newdirs.append(d)
472 newdirs.append(d)
461 dirs[:] = newdirs
473 dirs[:] = newdirs
462
474
463 def osrcpath():
475 def osrcpath():
464 '''return default os-specific hgrc search path'''
476 '''return default os-specific hgrc search path'''
465 path = systemrcpath()
477 path = systemrcpath()
466 path.extend(userrcpath())
478 path.extend(userrcpath())
467 path = [os.path.normpath(f) for f in path]
479 path = [os.path.normpath(f) for f in path]
468 return path
480 return path
469
481
470 _rcpath = None
482 _rcpath = None
471
483
472 def rcpath():
484 def rcpath():
473 '''return hgrc search path. if env var HGRCPATH is set, use it.
485 '''return hgrc search path. if env var HGRCPATH is set, use it.
474 for each item in path, if directory, use files ending in .rc,
486 for each item in path, if directory, use files ending in .rc,
475 else use item.
487 else use item.
476 make HGRCPATH empty to only look in .hg/hgrc of current repo.
488 make HGRCPATH empty to only look in .hg/hgrc of current repo.
477 if no HGRCPATH, use default os-specific path.'''
489 if no HGRCPATH, use default os-specific path.'''
478 global _rcpath
490 global _rcpath
479 if _rcpath is None:
491 if _rcpath is None:
480 if 'HGRCPATH' in os.environ:
492 if 'HGRCPATH' in os.environ:
481 _rcpath = []
493 _rcpath = []
482 for p in os.environ['HGRCPATH'].split(os.pathsep):
494 for p in os.environ['HGRCPATH'].split(os.pathsep):
483 if not p:
495 if not p:
484 continue
496 continue
485 p = util.expandpath(p)
497 p = util.expandpath(p)
486 if os.path.isdir(p):
498 if os.path.isdir(p):
487 for f, kind in osutil.listdir(p):
499 for f, kind in osutil.listdir(p):
488 if f.endswith('.rc'):
500 if f.endswith('.rc'):
489 _rcpath.append(os.path.join(p, f))
501 _rcpath.append(os.path.join(p, f))
490 else:
502 else:
491 _rcpath.append(p)
503 _rcpath.append(p)
492 else:
504 else:
493 _rcpath = osrcpath()
505 _rcpath = osrcpath()
494 return _rcpath
506 return _rcpath
495
507
496 if os.name != 'nt':
508 if os.name != 'nt':
497
509
498 def rcfiles(path):
510 def rcfiles(path):
499 rcs = [os.path.join(path, 'hgrc')]
511 rcs = [os.path.join(path, 'hgrc')]
500 rcdir = os.path.join(path, 'hgrc.d')
512 rcdir = os.path.join(path, 'hgrc.d')
501 try:
513 try:
502 rcs.extend([os.path.join(rcdir, f)
514 rcs.extend([os.path.join(rcdir, f)
503 for f, kind in osutil.listdir(rcdir)
515 for f, kind in osutil.listdir(rcdir)
504 if f.endswith(".rc")])
516 if f.endswith(".rc")])
505 except OSError:
517 except OSError:
506 pass
518 pass
507 return rcs
519 return rcs
508
520
509 def systemrcpath():
521 def systemrcpath():
510 path = []
522 path = []
511 if sys.platform == 'plan9':
523 if sys.platform == 'plan9':
512 root = 'lib/mercurial'
524 root = 'lib/mercurial'
513 else:
525 else:
514 root = 'etc/mercurial'
526 root = 'etc/mercurial'
515 # old mod_python does not set sys.argv
527 # old mod_python does not set sys.argv
516 if len(getattr(sys, 'argv', [])) > 0:
528 if len(getattr(sys, 'argv', [])) > 0:
517 p = os.path.dirname(os.path.dirname(sys.argv[0]))
529 p = os.path.dirname(os.path.dirname(sys.argv[0]))
518 path.extend(rcfiles(os.path.join(p, root)))
530 path.extend(rcfiles(os.path.join(p, root)))
519 path.extend(rcfiles('/' + root))
531 path.extend(rcfiles('/' + root))
520 return path
532 return path
521
533
522 def userrcpath():
534 def userrcpath():
523 if sys.platform == 'plan9':
535 if sys.platform == 'plan9':
524 return [os.environ['home'] + '/lib/hgrc']
536 return [os.environ['home'] + '/lib/hgrc']
525 else:
537 else:
526 return [os.path.expanduser('~/.hgrc')]
538 return [os.path.expanduser('~/.hgrc')]
527
539
528 else:
540 else:
529
541
530 import _winreg
542 import _winreg
531
543
532 def systemrcpath():
544 def systemrcpath():
533 '''return default os-specific hgrc search path'''
545 '''return default os-specific hgrc search path'''
534 rcpath = []
546 rcpath = []
535 filename = util.executablepath()
547 filename = util.executablepath()
536 # Use mercurial.ini found in directory with hg.exe
548 # Use mercurial.ini found in directory with hg.exe
537 progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
549 progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
538 if os.path.isfile(progrc):
550 if os.path.isfile(progrc):
539 rcpath.append(progrc)
551 rcpath.append(progrc)
540 return rcpath
552 return rcpath
541 # Use hgrc.d found in directory with hg.exe
553 # Use hgrc.d found in directory with hg.exe
542 progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d')
554 progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d')
543 if os.path.isdir(progrcd):
555 if os.path.isdir(progrcd):
544 for f, kind in osutil.listdir(progrcd):
556 for f, kind in osutil.listdir(progrcd):
545 if f.endswith('.rc'):
557 if f.endswith('.rc'):
546 rcpath.append(os.path.join(progrcd, f))
558 rcpath.append(os.path.join(progrcd, f))
547 return rcpath
559 return rcpath
548 # else look for a system rcpath in the registry
560 # else look for a system rcpath in the registry
549 value = util.lookupreg('SOFTWARE\\Mercurial', None,
561 value = util.lookupreg('SOFTWARE\\Mercurial', None,
550 _winreg.HKEY_LOCAL_MACHINE)
562 _winreg.HKEY_LOCAL_MACHINE)
551 if not isinstance(value, str) or not value:
563 if not isinstance(value, str) or not value:
552 return rcpath
564 return rcpath
553 value = util.localpath(value)
565 value = util.localpath(value)
554 for p in value.split(os.pathsep):
566 for p in value.split(os.pathsep):
555 if p.lower().endswith('mercurial.ini'):
567 if p.lower().endswith('mercurial.ini'):
556 rcpath.append(p)
568 rcpath.append(p)
557 elif os.path.isdir(p):
569 elif os.path.isdir(p):
558 for f, kind in osutil.listdir(p):
570 for f, kind in osutil.listdir(p):
559 if f.endswith('.rc'):
571 if f.endswith('.rc'):
560 rcpath.append(os.path.join(p, f))
572 rcpath.append(os.path.join(p, f))
561 return rcpath
573 return rcpath
562
574
563 def userrcpath():
575 def userrcpath():
564 '''return os-specific hgrc search path to the user dir'''
576 '''return os-specific hgrc search path to the user dir'''
565 home = os.path.expanduser('~')
577 home = os.path.expanduser('~')
566 path = [os.path.join(home, 'mercurial.ini'),
578 path = [os.path.join(home, 'mercurial.ini'),
567 os.path.join(home, '.hgrc')]
579 os.path.join(home, '.hgrc')]
568 userprofile = os.environ.get('USERPROFILE')
580 userprofile = os.environ.get('USERPROFILE')
569 if userprofile:
581 if userprofile:
570 path.append(os.path.join(userprofile, 'mercurial.ini'))
582 path.append(os.path.join(userprofile, 'mercurial.ini'))
571 path.append(os.path.join(userprofile, '.hgrc'))
583 path.append(os.path.join(userprofile, '.hgrc'))
572 return path
584 return path
573
585
574 def revsingle(repo, revspec, default='.'):
586 def revsingle(repo, revspec, default='.'):
575 if not revspec:
587 if not revspec:
576 return repo[default]
588 return repo[default]
577
589
578 l = revrange(repo, [revspec])
590 l = revrange(repo, [revspec])
579 if len(l) < 1:
591 if len(l) < 1:
580 raise util.Abort(_('empty revision set'))
592 raise util.Abort(_('empty revision set'))
581 return repo[l[-1]]
593 return repo[l[-1]]
582
594
583 def revpair(repo, revs):
595 def revpair(repo, revs):
584 if not revs:
596 if not revs:
585 return repo.dirstate.p1(), None
597 return repo.dirstate.p1(), None
586
598
587 l = revrange(repo, revs)
599 l = revrange(repo, revs)
588
600
589 if len(l) == 0:
601 if len(l) == 0:
590 if revs:
602 if revs:
591 raise util.Abort(_('empty revision range'))
603 raise util.Abort(_('empty revision range'))
592 return repo.dirstate.p1(), None
604 return repo.dirstate.p1(), None
593
605
594 if len(l) == 1 and len(revs) == 1 and _revrangesep not in revs[0]:
606 if len(l) == 1 and len(revs) == 1 and _revrangesep not in revs[0]:
595 return repo.lookup(l[0]), None
607 return repo.lookup(l[0]), None
596
608
597 return repo.lookup(l[0]), repo.lookup(l[-1])
609 return repo.lookup(l[0]), repo.lookup(l[-1])
598
610
599 _revrangesep = ':'
611 _revrangesep = ':'
600
612
601 def revrange(repo, revs):
613 def revrange(repo, revs):
602 """Yield revision as strings from a list of revision specifications."""
614 """Yield revision as strings from a list of revision specifications."""
603
615
604 def revfix(repo, val, defval):
616 def revfix(repo, val, defval):
605 if not val and val != 0 and defval is not None:
617 if not val and val != 0 and defval is not None:
606 return defval
618 return defval
607 return repo[val].rev()
619 return repo[val].rev()
608
620
609 seen, l = set(), []
621 seen, l = set(), []
610 for spec in revs:
622 for spec in revs:
611 if l and not seen:
623 if l and not seen:
612 seen = set(l)
624 seen = set(l)
613 # attempt to parse old-style ranges first to deal with
625 # attempt to parse old-style ranges first to deal with
614 # things like old-tag which contain query metacharacters
626 # things like old-tag which contain query metacharacters
615 try:
627 try:
616 if isinstance(spec, int):
628 if isinstance(spec, int):
617 seen.add(spec)
629 seen.add(spec)
618 l.append(spec)
630 l.append(spec)
619 continue
631 continue
620
632
621 if _revrangesep in spec:
633 if _revrangesep in spec:
622 start, end = spec.split(_revrangesep, 1)
634 start, end = spec.split(_revrangesep, 1)
623 start = revfix(repo, start, 0)
635 start = revfix(repo, start, 0)
624 end = revfix(repo, end, len(repo) - 1)
636 end = revfix(repo, end, len(repo) - 1)
625 step = start > end and -1 or 1
637 step = start > end and -1 or 1
626 if not seen and not l:
638 if not seen and not l:
627 # by far the most common case: revs = ["-1:0"]
639 # by far the most common case: revs = ["-1:0"]
628 l = range(start, end + step, step)
640 l = range(start, end + step, step)
629 # defer syncing seen until next iteration
641 # defer syncing seen until next iteration
630 continue
642 continue
631 newrevs = set(xrange(start, end + step, step))
643 newrevs = set(xrange(start, end + step, step))
632 if seen:
644 if seen:
633 newrevs.difference_update(seen)
645 newrevs.difference_update(seen)
634 seen.update(newrevs)
646 seen.update(newrevs)
635 else:
647 else:
636 seen = newrevs
648 seen = newrevs
637 l.extend(sorted(newrevs, reverse=start > end))
649 l.extend(sorted(newrevs, reverse=start > end))
638 continue
650 continue
639 elif spec and spec in repo: # single unquoted rev
651 elif spec and spec in repo: # single unquoted rev
640 rev = revfix(repo, spec, None)
652 rev = revfix(repo, spec, None)
641 if rev in seen:
653 if rev in seen:
642 continue
654 continue
643 seen.add(rev)
655 seen.add(rev)
644 l.append(rev)
656 l.append(rev)
645 continue
657 continue
646 except error.RepoLookupError:
658 except error.RepoLookupError:
647 pass
659 pass
648
660
649 # fall through to new-style queries if old-style fails
661 # fall through to new-style queries if old-style fails
650 m = revset.match(repo.ui, spec)
662 m = revset.match(repo.ui, spec)
651 dl = [r for r in m(repo, list(repo)) if r not in seen]
663 dl = [r for r in m(repo, list(repo)) if r not in seen]
652 l.extend(dl)
664 l.extend(dl)
653 seen.update(dl)
665 seen.update(dl)
654
666
655 return l
667 return l
656
668
657 def expandpats(pats):
669 def expandpats(pats):
658 if not util.expandglobs:
670 if not util.expandglobs:
659 return list(pats)
671 return list(pats)
660 ret = []
672 ret = []
661 for p in pats:
673 for p in pats:
662 kind, name = matchmod._patsplit(p, None)
674 kind, name = matchmod._patsplit(p, None)
663 if kind is None:
675 if kind is None:
664 try:
676 try:
665 globbed = glob.glob(name)
677 globbed = glob.glob(name)
666 except re.error:
678 except re.error:
667 globbed = [name]
679 globbed = [name]
668 if globbed:
680 if globbed:
669 ret.extend(globbed)
681 ret.extend(globbed)
670 continue
682 continue
671 ret.append(p)
683 ret.append(p)
672 return ret
684 return ret
673
685
674 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
686 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
675 if pats == ("",):
687 if pats == ("",):
676 pats = []
688 pats = []
677 if not globbed and default == 'relpath':
689 if not globbed and default == 'relpath':
678 pats = expandpats(pats or [])
690 pats = expandpats(pats or [])
679
691
680 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
692 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
681 default)
693 default)
682 def badfn(f, msg):
694 def badfn(f, msg):
683 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
695 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
684 m.bad = badfn
696 m.bad = badfn
685 return m, pats
697 return m, pats
686
698
687 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
699 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
688 return matchandpats(ctx, pats, opts, globbed, default)[0]
700 return matchandpats(ctx, pats, opts, globbed, default)[0]
689
701
690 def matchall(repo):
702 def matchall(repo):
691 return matchmod.always(repo.root, repo.getcwd())
703 return matchmod.always(repo.root, repo.getcwd())
692
704
693 def matchfiles(repo, files):
705 def matchfiles(repo, files):
694 return matchmod.exact(repo.root, repo.getcwd(), files)
706 return matchmod.exact(repo.root, repo.getcwd(), files)
695
707
696 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
708 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
697 if dry_run is None:
709 if dry_run is None:
698 dry_run = opts.get('dry_run')
710 dry_run = opts.get('dry_run')
699 if similarity is None:
711 if similarity is None:
700 similarity = float(opts.get('similarity') or 0)
712 similarity = float(opts.get('similarity') or 0)
701 # we'd use status here, except handling of symlinks and ignore is tricky
713 # we'd use status here, except handling of symlinks and ignore is tricky
702 added, unknown, deleted, removed = [], [], [], []
714 added, unknown, deleted, removed = [], [], [], []
703 audit_path = pathauditor(repo.root)
715 audit_path = pathauditor(repo.root)
704 m = match(repo[None], pats, opts)
716 m = match(repo[None], pats, opts)
705 rejected = []
717 rejected = []
706 m.bad = lambda x, y: rejected.append(x)
718 m.bad = lambda x, y: rejected.append(x)
707
719
708 for abs in repo.walk(m):
720 for abs in repo.walk(m):
709 target = repo.wjoin(abs)
721 target = repo.wjoin(abs)
710 good = True
722 good = True
711 try:
723 try:
712 audit_path(abs)
724 audit_path(abs)
713 except (OSError, util.Abort):
725 except (OSError, util.Abort):
714 good = False
726 good = False
715 rel = m.rel(abs)
727 rel = m.rel(abs)
716 exact = m.exact(abs)
728 exact = m.exact(abs)
717 if good and abs not in repo.dirstate:
729 if good and abs not in repo.dirstate:
718 unknown.append(abs)
730 unknown.append(abs)
719 if repo.ui.verbose or not exact:
731 if repo.ui.verbose or not exact:
720 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
732 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
721 elif (repo.dirstate[abs] != 'r' and
733 elif (repo.dirstate[abs] != 'r' and
722 (not good or not os.path.lexists(target) or
734 (not good or not os.path.lexists(target) or
723 (os.path.isdir(target) and not os.path.islink(target)))):
735 (os.path.isdir(target) and not os.path.islink(target)))):
724 deleted.append(abs)
736 deleted.append(abs)
725 if repo.ui.verbose or not exact:
737 if repo.ui.verbose or not exact:
726 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
738 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
727 # for finding renames
739 # for finding renames
728 elif repo.dirstate[abs] == 'r':
740 elif repo.dirstate[abs] == 'r':
729 removed.append(abs)
741 removed.append(abs)
730 elif repo.dirstate[abs] == 'a':
742 elif repo.dirstate[abs] == 'a':
731 added.append(abs)
743 added.append(abs)
732 copies = {}
744 copies = {}
733 if similarity > 0:
745 if similarity > 0:
734 for old, new, score in similar.findrenames(repo,
746 for old, new, score in similar.findrenames(repo,
735 added + unknown, removed + deleted, similarity):
747 added + unknown, removed + deleted, similarity):
736 if repo.ui.verbose or not m.exact(old) or not m.exact(new):
748 if repo.ui.verbose or not m.exact(old) or not m.exact(new):
737 repo.ui.status(_('recording removal of %s as rename to %s '
749 repo.ui.status(_('recording removal of %s as rename to %s '
738 '(%d%% similar)\n') %
750 '(%d%% similar)\n') %
739 (m.rel(old), m.rel(new), score * 100))
751 (m.rel(old), m.rel(new), score * 100))
740 copies[new] = old
752 copies[new] = old
741
753
742 if not dry_run:
754 if not dry_run:
743 wctx = repo[None]
755 wctx = repo[None]
744 wlock = repo.wlock()
756 wlock = repo.wlock()
745 try:
757 try:
746 wctx.forget(deleted)
758 wctx.forget(deleted)
747 wctx.add(unknown)
759 wctx.add(unknown)
748 for new, old in copies.iteritems():
760 for new, old in copies.iteritems():
749 wctx.copy(old, new)
761 wctx.copy(old, new)
750 finally:
762 finally:
751 wlock.release()
763 wlock.release()
752
764
753 for f in rejected:
765 for f in rejected:
754 if f in m.files():
766 if f in m.files():
755 return 1
767 return 1
756 return 0
768 return 0
757
769
758 def updatedir(ui, repo, patches, similarity=0):
770 def updatedir(ui, repo, patches, similarity=0):
759 '''Update dirstate after patch application according to metadata'''
771 '''Update dirstate after patch application according to metadata'''
760 if not patches:
772 if not patches:
761 return []
773 return []
762 copies = []
774 copies = []
763 removes = set()
775 removes = set()
764 cfiles = patches.keys()
776 cfiles = patches.keys()
765 cwd = repo.getcwd()
777 cwd = repo.getcwd()
766 if cwd:
778 if cwd:
767 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
779 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
768 for f in patches:
780 for f in patches:
769 gp = patches[f]
781 gp = patches[f]
770 if not gp:
782 if not gp:
771 continue
783 continue
772 if gp.op == 'RENAME':
784 if gp.op == 'RENAME':
773 copies.append((gp.oldpath, gp.path))
785 copies.append((gp.oldpath, gp.path))
774 removes.add(gp.oldpath)
786 removes.add(gp.oldpath)
775 elif gp.op == 'COPY':
787 elif gp.op == 'COPY':
776 copies.append((gp.oldpath, gp.path))
788 copies.append((gp.oldpath, gp.path))
777 elif gp.op == 'DELETE':
789 elif gp.op == 'DELETE':
778 removes.add(gp.path)
790 removes.add(gp.path)
779
791
780 wctx = repo[None]
792 wctx = repo[None]
781 for src, dst in copies:
793 for src, dst in copies:
782 dirstatecopy(ui, repo, wctx, src, dst, cwd=cwd)
794 dirstatecopy(ui, repo, wctx, src, dst, cwd=cwd)
783 if (not similarity) and removes:
795 if (not similarity) and removes:
784 wctx.remove(sorted(removes), True)
796 wctx.remove(sorted(removes), True)
785
797
786 for f in patches:
798 for f in patches:
787 gp = patches[f]
799 gp = patches[f]
788 if gp and gp.mode:
800 if gp and gp.mode:
789 islink, isexec = gp.mode
801 islink, isexec = gp.mode
790 dst = repo.wjoin(gp.path)
802 dst = repo.wjoin(gp.path)
791 # patch won't create empty files
803 # patch won't create empty files
792 if gp.op == 'ADD' and not os.path.lexists(dst):
804 if gp.op == 'ADD' and not os.path.lexists(dst):
793 flags = (isexec and 'x' or '') + (islink and 'l' or '')
805 flags = (isexec and 'x' or '') + (islink and 'l' or '')
794 repo.wwrite(gp.path, '', flags)
806 repo.wwrite(gp.path, '', flags)
795 util.setflags(dst, islink, isexec)
807 util.setflags(dst, islink, isexec)
796 addremove(repo, cfiles, similarity=similarity)
808 addremove(repo, cfiles, similarity=similarity)
797 files = patches.keys()
809 files = patches.keys()
798 files.extend([r for r in removes if r not in files])
810 files.extend([r for r in removes if r not in files])
799 return sorted(files)
811 return sorted(files)
800
812
801 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
813 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
802 """Update the dirstate to reflect the intent of copying src to dst. For
814 """Update the dirstate to reflect the intent of copying src to dst. For
803 different reasons it might not end with dst being marked as copied from src.
815 different reasons it might not end with dst being marked as copied from src.
804 """
816 """
805 origsrc = repo.dirstate.copied(src) or src
817 origsrc = repo.dirstate.copied(src) or src
806 if dst == origsrc: # copying back a copy?
818 if dst == origsrc: # copying back a copy?
807 if repo.dirstate[dst] not in 'mn' and not dryrun:
819 if repo.dirstate[dst] not in 'mn' and not dryrun:
808 repo.dirstate.normallookup(dst)
820 repo.dirstate.normallookup(dst)
809 else:
821 else:
810 if repo.dirstate[origsrc] == 'a' and origsrc == src:
822 if repo.dirstate[origsrc] == 'a' and origsrc == src:
811 if not ui.quiet:
823 if not ui.quiet:
812 ui.warn(_("%s has not been committed yet, so no copy "
824 ui.warn(_("%s has not been committed yet, so no copy "
813 "data will be stored for %s.\n")
825 "data will be stored for %s.\n")
814 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
826 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
815 if repo.dirstate[dst] in '?r' and not dryrun:
827 if repo.dirstate[dst] in '?r' and not dryrun:
816 wctx.add([dst])
828 wctx.add([dst])
817 elif not dryrun:
829 elif not dryrun:
818 wctx.copy(origsrc, dst)
830 wctx.copy(origsrc, dst)
819
831
820 def readrequires(opener, supported):
832 def readrequires(opener, supported):
821 '''Reads and parses .hg/requires and checks if all entries found
833 '''Reads and parses .hg/requires and checks if all entries found
822 are in the list of supported features.'''
834 are in the list of supported features.'''
823 requirements = set(opener.read("requires").splitlines())
835 requirements = set(opener.read("requires").splitlines())
824 missings = []
836 missings = []
825 for r in requirements:
837 for r in requirements:
826 if r not in supported:
838 if r not in supported:
827 if not r or not r[0].isalnum():
839 if not r or not r[0].isalnum():
828 raise error.RequirementError(_(".hg/requires file is corrupt"))
840 raise error.RequirementError(_(".hg/requires file is corrupt"))
829 missings.append(r)
841 missings.append(r)
830 missings.sort()
842 missings.sort()
831 if missings:
843 if missings:
832 raise error.RequirementError(
844 raise error.RequirementError(
833 _("unknown repository format: requires features '%s' (upgrade "
845 _("unknown repository format: requires features '%s' (upgrade "
834 "Mercurial)") % "', '".join(missings))
846 "Mercurial)") % "', '".join(missings))
835 return requirements
847 return requirements
836
848
837 class filecacheentry(object):
849 class filecacheentry(object):
838 def __init__(self, path):
850 def __init__(self, path):
839 self.path = path
851 self.path = path
840 self.cachestat = filecacheentry.stat(self.path)
852 self.cachestat = filecacheentry.stat(self.path)
841
853
842 if self.cachestat:
854 if self.cachestat:
843 self._cacheable = self.cachestat.cacheable()
855 self._cacheable = self.cachestat.cacheable()
844 else:
856 else:
845 # None means we don't know yet
857 # None means we don't know yet
846 self._cacheable = None
858 self._cacheable = None
847
859
848 def refresh(self):
860 def refresh(self):
849 if self.cacheable():
861 if self.cacheable():
850 self.cachestat = filecacheentry.stat(self.path)
862 self.cachestat = filecacheentry.stat(self.path)
851
863
852 def cacheable(self):
864 def cacheable(self):
853 if self._cacheable is not None:
865 if self._cacheable is not None:
854 return self._cacheable
866 return self._cacheable
855
867
856 # we don't know yet, assume it is for now
868 # we don't know yet, assume it is for now
857 return True
869 return True
858
870
859 def changed(self):
871 def changed(self):
860 # no point in going further if we can't cache it
872 # no point in going further if we can't cache it
861 if not self.cacheable():
873 if not self.cacheable():
862 return True
874 return True
863
875
864 newstat = filecacheentry.stat(self.path)
876 newstat = filecacheentry.stat(self.path)
865
877
866 # we may not know if it's cacheable yet, check again now
878 # we may not know if it's cacheable yet, check again now
867 if newstat and self._cacheable is None:
879 if newstat and self._cacheable is None:
868 self._cacheable = newstat.cacheable()
880 self._cacheable = newstat.cacheable()
869
881
870 # check again
882 # check again
871 if not self._cacheable:
883 if not self._cacheable:
872 return True
884 return True
873
885
874 if self.cachestat != newstat:
886 if self.cachestat != newstat:
875 self.cachestat = newstat
887 self.cachestat = newstat
876 return True
888 return True
877 else:
889 else:
878 return False
890 return False
879
891
880 @staticmethod
892 @staticmethod
881 def stat(path):
893 def stat(path):
882 try:
894 try:
883 return util.cachestat(path)
895 return util.cachestat(path)
884 except OSError, e:
896 except OSError, e:
885 if e.errno != errno.ENOENT:
897 if e.errno != errno.ENOENT:
886 raise
898 raise
887
899
888 class filecache(object):
900 class filecache(object):
889 '''A property like decorator that tracks a file under .hg/ for updates.
901 '''A property like decorator that tracks a file under .hg/ for updates.
890
902
891 Records stat info when called in _filecache.
903 Records stat info when called in _filecache.
892
904
893 On subsequent calls, compares old stat info with new info, and recreates
905 On subsequent calls, compares old stat info with new info, and recreates
894 the object when needed, updating the new stat info in _filecache.
906 the object when needed, updating the new stat info in _filecache.
895
907
896 Mercurial either atomic renames or appends for files under .hg,
908 Mercurial either atomic renames or appends for files under .hg,
897 so to ensure the cache is reliable we need the filesystem to be able
909 so to ensure the cache is reliable we need the filesystem to be able
898 to tell us if a file has been replaced. If it can't, we fallback to
910 to tell us if a file has been replaced. If it can't, we fallback to
899 recreating the object on every call (essentially the same behaviour as
911 recreating the object on every call (essentially the same behaviour as
900 propertycache).'''
912 propertycache).'''
901 def __init__(self, path):
913 def __init__(self, path):
902 self.path = path
914 self.path = path
903
915
904 def join(self, obj, fname):
916 def join(self, obj, fname):
905 """Used to compute the runtime path of the cached file.
917 """Used to compute the runtime path of the cached file.
906
918
907 Users should subclass filecache and provide their own version of this
919 Users should subclass filecache and provide their own version of this
908 function to call the appropriate join function on 'obj' (an instance
920 function to call the appropriate join function on 'obj' (an instance
909 of the class that its member function was decorated).
921 of the class that its member function was decorated).
910 """
922 """
911 return obj.join(fname)
923 return obj.join(fname)
912
924
913 def __call__(self, func):
925 def __call__(self, func):
914 self.func = func
926 self.func = func
915 self.name = func.__name__
927 self.name = func.__name__
916 return self
928 return self
917
929
918 def __get__(self, obj, type=None):
930 def __get__(self, obj, type=None):
919 # do we need to check if the file changed?
931 # do we need to check if the file changed?
920 if self.name in obj.__dict__:
932 if self.name in obj.__dict__:
921 return obj.__dict__[self.name]
933 return obj.__dict__[self.name]
922
934
923 entry = obj._filecache.get(self.name)
935 entry = obj._filecache.get(self.name)
924
936
925 if entry:
937 if entry:
926 if entry.changed():
938 if entry.changed():
927 entry.obj = self.func(obj)
939 entry.obj = self.func(obj)
928 else:
940 else:
929 path = self.join(obj, self.path)
941 path = self.join(obj, self.path)
930
942
931 # We stat -before- creating the object so our cache doesn't lie if
943 # We stat -before- creating the object so our cache doesn't lie if
932 # a writer modified between the time we read and stat
944 # a writer modified between the time we read and stat
933 entry = filecacheentry(path)
945 entry = filecacheentry(path)
934 entry.obj = self.func(obj)
946 entry.obj = self.func(obj)
935
947
936 obj._filecache[self.name] = entry
948 obj._filecache[self.name] = entry
937
949
938 obj.__dict__[self.name] = entry.obj
950 obj.__dict__[self.name] = entry.obj
939 return entry.obj
951 return entry.obj
940
952
941 def __set__(self, obj, value):
953 def __set__(self, obj, value):
942 if self.name in obj._filecache:
954 if self.name in obj._filecache:
943 obj._filecache[self.name].obj = value # update cached copy
955 obj._filecache[self.name].obj = value # update cached copy
944 obj.__dict__[self.name] = value # update copy returned by obj.x
956 obj.__dict__[self.name] = value # update copy returned by obj.x
945
957
946 def __delete__(self, obj):
958 def __delete__(self, obj):
947 try:
959 try:
948 del obj.__dict__[self.name]
960 del obj.__dict__[self.name]
949 except KeyError:
961 except KeyError:
950 raise AttributeError, self.name
962 raise AttributeError, self.name
@@ -1,546 +1,538 b''
1 # store.py - repository store handling for Mercurial
1 # store.py - repository store handling for Mercurial
2 #
2 #
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import scmutil, util, parsers
9 import scmutil, util, parsers
10 import os, stat, errno
10 import os, stat, errno
11
11
12 _sha = util.sha1
12 _sha = util.sha1
13
13
14 # This avoids a collision between a file named foo and a dir named
14 # This avoids a collision between a file named foo and a dir named
15 # foo.i or foo.d
15 # foo.i or foo.d
16 def _encodedir(path):
16 def _encodedir(path):
17 '''
17 '''
18 >>> _encodedir('data/foo.i')
18 >>> _encodedir('data/foo.i')
19 'data/foo.i'
19 'data/foo.i'
20 >>> _encodedir('data/foo.i/bla.i')
20 >>> _encodedir('data/foo.i/bla.i')
21 'data/foo.i.hg/bla.i'
21 'data/foo.i.hg/bla.i'
22 >>> _encodedir('data/foo.i.hg/bla.i')
22 >>> _encodedir('data/foo.i.hg/bla.i')
23 'data/foo.i.hg.hg/bla.i'
23 'data/foo.i.hg.hg/bla.i'
24 >>> _encodedir('data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
24 >>> _encodedir('data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
25 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
25 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
26 '''
26 '''
27 return (path
27 return (path
28 .replace(".hg/", ".hg.hg/")
28 .replace(".hg/", ".hg.hg/")
29 .replace(".i/", ".i.hg/")
29 .replace(".i/", ".i.hg/")
30 .replace(".d/", ".d.hg/"))
30 .replace(".d/", ".d.hg/"))
31
31
32 encodedir = getattr(parsers, 'encodedir', _encodedir)
32 encodedir = getattr(parsers, 'encodedir', _encodedir)
33
33
34 def decodedir(path):
34 def decodedir(path):
35 '''
35 '''
36 >>> decodedir('data/foo.i')
36 >>> decodedir('data/foo.i')
37 'data/foo.i'
37 'data/foo.i'
38 >>> decodedir('data/foo.i.hg/bla.i')
38 >>> decodedir('data/foo.i.hg/bla.i')
39 'data/foo.i/bla.i'
39 'data/foo.i/bla.i'
40 >>> decodedir('data/foo.i.hg.hg/bla.i')
40 >>> decodedir('data/foo.i.hg.hg/bla.i')
41 'data/foo.i.hg/bla.i'
41 'data/foo.i.hg/bla.i'
42 '''
42 '''
43 if ".hg/" not in path:
43 if ".hg/" not in path:
44 return path
44 return path
45 return (path
45 return (path
46 .replace(".d.hg/", ".d/")
46 .replace(".d.hg/", ".d/")
47 .replace(".i.hg/", ".i/")
47 .replace(".i.hg/", ".i/")
48 .replace(".hg.hg/", ".hg/"))
48 .replace(".hg.hg/", ".hg/"))
49
49
50 def _buildencodefun():
50 def _buildencodefun():
51 '''
51 '''
52 >>> enc, dec = _buildencodefun()
52 >>> enc, dec = _buildencodefun()
53
53
54 >>> enc('nothing/special.txt')
54 >>> enc('nothing/special.txt')
55 'nothing/special.txt'
55 'nothing/special.txt'
56 >>> dec('nothing/special.txt')
56 >>> dec('nothing/special.txt')
57 'nothing/special.txt'
57 'nothing/special.txt'
58
58
59 >>> enc('HELLO')
59 >>> enc('HELLO')
60 '_h_e_l_l_o'
60 '_h_e_l_l_o'
61 >>> dec('_h_e_l_l_o')
61 >>> dec('_h_e_l_l_o')
62 'HELLO'
62 'HELLO'
63
63
64 >>> enc('hello:world?')
64 >>> enc('hello:world?')
65 'hello~3aworld~3f'
65 'hello~3aworld~3f'
66 >>> dec('hello~3aworld~3f')
66 >>> dec('hello~3aworld~3f')
67 'hello:world?'
67 'hello:world?'
68
68
69 >>> enc('the\x07quick\xADshot')
69 >>> enc('the\x07quick\xADshot')
70 'the~07quick~adshot'
70 'the~07quick~adshot'
71 >>> dec('the~07quick~adshot')
71 >>> dec('the~07quick~adshot')
72 'the\\x07quick\\xadshot'
72 'the\\x07quick\\xadshot'
73 '''
73 '''
74 e = '_'
74 e = '_'
75 winreserved = [ord(x) for x in '\\:*?"<>|']
75 winreserved = [ord(x) for x in '\\:*?"<>|']
76 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
76 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
77 for x in (range(32) + range(126, 256) + winreserved):
77 for x in (range(32) + range(126, 256) + winreserved):
78 cmap[chr(x)] = "~%02x" % x
78 cmap[chr(x)] = "~%02x" % x
79 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
79 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
80 cmap[chr(x)] = e + chr(x).lower()
80 cmap[chr(x)] = e + chr(x).lower()
81 dmap = {}
81 dmap = {}
82 for k, v in cmap.iteritems():
82 for k, v in cmap.iteritems():
83 dmap[v] = k
83 dmap[v] = k
84 def decode(s):
84 def decode(s):
85 i = 0
85 i = 0
86 while i < len(s):
86 while i < len(s):
87 for l in xrange(1, 4):
87 for l in xrange(1, 4):
88 try:
88 try:
89 yield dmap[s[i:i + l]]
89 yield dmap[s[i:i + l]]
90 i += l
90 i += l
91 break
91 break
92 except KeyError:
92 except KeyError:
93 pass
93 pass
94 else:
94 else:
95 raise KeyError
95 raise KeyError
96 return (lambda s: ''.join([cmap[c] for c in s]),
96 return (lambda s: ''.join([cmap[c] for c in s]),
97 lambda s: ''.join(list(decode(s))))
97 lambda s: ''.join(list(decode(s))))
98
98
99 _encodefname, _decodefname = _buildencodefun()
99 _encodefname, _decodefname = _buildencodefun()
100
100
101 def encodefilename(s):
101 def encodefilename(s):
102 '''
102 '''
103 >>> encodefilename('foo.i/bar.d/bla.hg/hi:world?/HELLO')
103 >>> encodefilename('foo.i/bar.d/bla.hg/hi:world?/HELLO')
104 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
104 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
105 '''
105 '''
106 return _encodefname(encodedir(s))
106 return _encodefname(encodedir(s))
107
107
108 def decodefilename(s):
108 def decodefilename(s):
109 '''
109 '''
110 >>> decodefilename('foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
110 >>> decodefilename('foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
111 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
111 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
112 '''
112 '''
113 return decodedir(_decodefname(s))
113 return decodedir(_decodefname(s))
114
114
115 def _buildlowerencodefun():
115 def _buildlowerencodefun():
116 '''
116 '''
117 >>> f = _buildlowerencodefun()
117 >>> f = _buildlowerencodefun()
118 >>> f('nothing/special.txt')
118 >>> f('nothing/special.txt')
119 'nothing/special.txt'
119 'nothing/special.txt'
120 >>> f('HELLO')
120 >>> f('HELLO')
121 'hello'
121 'hello'
122 >>> f('hello:world?')
122 >>> f('hello:world?')
123 'hello~3aworld~3f'
123 'hello~3aworld~3f'
124 >>> f('the\x07quick\xADshot')
124 >>> f('the\x07quick\xADshot')
125 'the~07quick~adshot'
125 'the~07quick~adshot'
126 '''
126 '''
127 winreserved = [ord(x) for x in '\\:*?"<>|']
127 winreserved = [ord(x) for x in '\\:*?"<>|']
128 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
128 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
129 for x in (range(32) + range(126, 256) + winreserved):
129 for x in (range(32) + range(126, 256) + winreserved):
130 cmap[chr(x)] = "~%02x" % x
130 cmap[chr(x)] = "~%02x" % x
131 for x in range(ord("A"), ord("Z")+1):
131 for x in range(ord("A"), ord("Z")+1):
132 cmap[chr(x)] = chr(x).lower()
132 cmap[chr(x)] = chr(x).lower()
133 return lambda s: "".join([cmap[c] for c in s])
133 return lambda s: "".join([cmap[c] for c in s])
134
134
135 lowerencode = _buildlowerencodefun()
135 lowerencode = _buildlowerencodefun()
136
136
137 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
137 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
138 _winres3 = ('aux', 'con', 'prn', 'nul') # length 3
138 _winres3 = ('aux', 'con', 'prn', 'nul') # length 3
139 _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9)
139 _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9)
140 def _auxencode(path, dotencode):
140 def _auxencode(path, dotencode):
141 '''
141 '''
142 Encodes filenames containing names reserved by Windows or which end in
142 Encodes filenames containing names reserved by Windows or which end in
143 period or space. Does not touch other single reserved characters c.
143 period or space. Does not touch other single reserved characters c.
144 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
144 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
145 Additionally encodes space or period at the beginning, if dotencode is
145 Additionally encodes space or period at the beginning, if dotencode is
146 True. Parameter path is assumed to be all lowercase.
146 True. Parameter path is assumed to be all lowercase.
147 A segment only needs encoding if a reserved name appears as a
147 A segment only needs encoding if a reserved name appears as a
148 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
148 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
149 doesn't need encoding.
149 doesn't need encoding.
150
150
151 >>> s = '.foo/aux.txt/txt.aux/con/prn/nul/foo.'
151 >>> s = '.foo/aux.txt/txt.aux/con/prn/nul/foo.'
152 >>> _auxencode(s.split('/'), True)
152 >>> _auxencode(s.split('/'), True)
153 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
153 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
154 >>> s = '.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
154 >>> s = '.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
155 >>> _auxencode(s.split('/'), False)
155 >>> _auxencode(s.split('/'), False)
156 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
156 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
157 >>> _auxencode(['foo. '], True)
157 >>> _auxencode(['foo. '], True)
158 ['foo.~20']
158 ['foo.~20']
159 >>> _auxencode([' .foo'], True)
159 >>> _auxencode([' .foo'], True)
160 ['~20.foo']
160 ['~20.foo']
161 '''
161 '''
162 for i, n in enumerate(path):
162 for i, n in enumerate(path):
163 if not n:
163 if not n:
164 continue
164 continue
165 if dotencode and n[0] in '. ':
165 if dotencode and n[0] in '. ':
166 n = "~%02x" % ord(n[0]) + n[1:]
166 n = "~%02x" % ord(n[0]) + n[1:]
167 path[i] = n
167 path[i] = n
168 else:
168 else:
169 l = n.find('.')
169 l = n.find('.')
170 if l == -1:
170 if l == -1:
171 l = len(n)
171 l = len(n)
172 if ((l == 3 and n[:3] in _winres3) or
172 if ((l == 3 and n[:3] in _winres3) or
173 (l == 4 and n[3] <= '9' and n[3] >= '1'
173 (l == 4 and n[3] <= '9' and n[3] >= '1'
174 and n[:3] in _winres4)):
174 and n[:3] in _winres4)):
175 # encode third letter ('aux' -> 'au~78')
175 # encode third letter ('aux' -> 'au~78')
176 ec = "~%02x" % ord(n[2])
176 ec = "~%02x" % ord(n[2])
177 n = n[0:2] + ec + n[3:]
177 n = n[0:2] + ec + n[3:]
178 path[i] = n
178 path[i] = n
179 if n[-1] in '. ':
179 if n[-1] in '. ':
180 # encode last period or space ('foo...' -> 'foo..~2e')
180 # encode last period or space ('foo...' -> 'foo..~2e')
181 path[i] = n[:-1] + "~%02x" % ord(n[-1])
181 path[i] = n[:-1] + "~%02x" % ord(n[-1])
182 return path
182 return path
183
183
184 _maxstorepathlen = 120
184 _maxstorepathlen = 120
185 _dirprefixlen = 8
185 _dirprefixlen = 8
186 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
186 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
187
187
188 def _hashencode(path, dotencode):
188 def _hashencode(path, dotencode):
189 digest = _sha(path).hexdigest()
189 digest = _sha(path).hexdigest()
190 le = lowerencode(path).split('/')[1:]
190 le = lowerencode(path).split('/')[1:]
191 parts = _auxencode(le, dotencode)
191 parts = _auxencode(le, dotencode)
192 basename = parts[-1]
192 basename = parts[-1]
193 _root, ext = os.path.splitext(basename)
193 _root, ext = os.path.splitext(basename)
194 sdirs = []
194 sdirs = []
195 sdirslen = 0
195 sdirslen = 0
196 for p in parts[:-1]:
196 for p in parts[:-1]:
197 d = p[:_dirprefixlen]
197 d = p[:_dirprefixlen]
198 if d[-1] in '. ':
198 if d[-1] in '. ':
199 # Windows can't access dirs ending in period or space
199 # Windows can't access dirs ending in period or space
200 d = d[:-1] + '_'
200 d = d[:-1] + '_'
201 if sdirslen == 0:
201 if sdirslen == 0:
202 t = len(d)
202 t = len(d)
203 else:
203 else:
204 t = sdirslen + 1 + len(d)
204 t = sdirslen + 1 + len(d)
205 if t > _maxshortdirslen:
205 if t > _maxshortdirslen:
206 break
206 break
207 sdirs.append(d)
207 sdirs.append(d)
208 sdirslen = t
208 sdirslen = t
209 dirs = '/'.join(sdirs)
209 dirs = '/'.join(sdirs)
210 if len(dirs) > 0:
210 if len(dirs) > 0:
211 dirs += '/'
211 dirs += '/'
212 res = 'dh/' + dirs + digest + ext
212 res = 'dh/' + dirs + digest + ext
213 spaceleft = _maxstorepathlen - len(res)
213 spaceleft = _maxstorepathlen - len(res)
214 if spaceleft > 0:
214 if spaceleft > 0:
215 filler = basename[:spaceleft]
215 filler = basename[:spaceleft]
216 res = 'dh/' + dirs + filler + digest + ext
216 res = 'dh/' + dirs + filler + digest + ext
217 return res
217 return res
218
218
219 def _hybridencode(path, dotencode):
219 def _hybridencode(path, dotencode):
220 '''encodes path with a length limit
220 '''encodes path with a length limit
221
221
222 Encodes all paths that begin with 'data/', according to the following.
222 Encodes all paths that begin with 'data/', according to the following.
223
223
224 Default encoding (reversible):
224 Default encoding (reversible):
225
225
226 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
226 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
227 characters are encoded as '~xx', where xx is the two digit hex code
227 characters are encoded as '~xx', where xx is the two digit hex code
228 of the character (see encodefilename).
228 of the character (see encodefilename).
229 Relevant path components consisting of Windows reserved filenames are
229 Relevant path components consisting of Windows reserved filenames are
230 masked by encoding the third character ('aux' -> 'au~78', see _auxencode).
230 masked by encoding the third character ('aux' -> 'au~78', see _auxencode).
231
231
232 Hashed encoding (not reversible):
232 Hashed encoding (not reversible):
233
233
234 If the default-encoded path is longer than _maxstorepathlen, a
234 If the default-encoded path is longer than _maxstorepathlen, a
235 non-reversible hybrid hashing of the path is done instead.
235 non-reversible hybrid hashing of the path is done instead.
236 This encoding uses up to _dirprefixlen characters of all directory
236 This encoding uses up to _dirprefixlen characters of all directory
237 levels of the lowerencoded path, but not more levels than can fit into
237 levels of the lowerencoded path, but not more levels than can fit into
238 _maxshortdirslen.
238 _maxshortdirslen.
239 Then follows the filler followed by the sha digest of the full path.
239 Then follows the filler followed by the sha digest of the full path.
240 The filler is the beginning of the basename of the lowerencoded path
240 The filler is the beginning of the basename of the lowerencoded path
241 (the basename is everything after the last path separator). The filler
241 (the basename is everything after the last path separator). The filler
242 is as long as possible, filling in characters from the basename until
242 is as long as possible, filling in characters from the basename until
243 the encoded path has _maxstorepathlen characters (or all chars of the
243 the encoded path has _maxstorepathlen characters (or all chars of the
244 basename have been taken).
244 basename have been taken).
245 The extension (e.g. '.i' or '.d') is preserved.
245 The extension (e.g. '.i' or '.d') is preserved.
246
246
247 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
247 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
248 encoding was used.
248 encoding was used.
249 '''
249 '''
250 path = encodedir(path)
250 path = encodedir(path)
251 ef = _encodefname(path).split('/')
251 ef = _encodefname(path).split('/')
252 res = '/'.join(_auxencode(ef, dotencode))
252 res = '/'.join(_auxencode(ef, dotencode))
253 if len(res) > _maxstorepathlen:
253 if len(res) > _maxstorepathlen:
254 res = _hashencode(path, dotencode)
254 res = _hashencode(path, dotencode)
255 return res
255 return res
256
256
257 def _pathencode(path):
257 def _pathencode(path):
258 if len(path) > _maxstorepathlen:
258 if len(path) > _maxstorepathlen:
259 return None
259 return None
260 ef = _encodefname(encodedir(path)).split('/')
260 ef = _encodefname(encodedir(path)).split('/')
261 res = '/'.join(_auxencode(ef, True))
261 res = '/'.join(_auxencode(ef, True))
262 if len(res) > _maxstorepathlen:
262 if len(res) > _maxstorepathlen:
263 return None
263 return None
264 return res
264 return res
265
265
266 _pathencode = getattr(parsers, 'pathencode', _pathencode)
266 _pathencode = getattr(parsers, 'pathencode', _pathencode)
267
267
268 def _dothybridencode(f):
268 def _dothybridencode(f):
269 ef = _pathencode(f)
269 ef = _pathencode(f)
270 if ef is None:
270 if ef is None:
271 return _hashencode(encodedir(f), True)
271 return _hashencode(encodedir(f), True)
272 return ef
272 return ef
273
273
274 def _plainhybridencode(f):
274 def _plainhybridencode(f):
275 return _hybridencode(f, False)
275 return _hybridencode(f, False)
276
276
277 def _calcmode(vfs):
277 def _calcmode(vfs):
278 try:
278 try:
279 # files in .hg/ will be created using this mode
279 # files in .hg/ will be created using this mode
280 mode = vfs.stat().st_mode
280 mode = vfs.stat().st_mode
281 # avoid some useless chmods
281 # avoid some useless chmods
282 if (0777 & ~util.umask) == (0777 & mode):
282 if (0777 & ~util.umask) == (0777 & mode):
283 mode = None
283 mode = None
284 except OSError:
284 except OSError:
285 mode = None
285 mode = None
286 return mode
286 return mode
287
287
288 _data = ('data 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
288 _data = ('data 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
289 ' phaseroots obsstore')
289 ' phaseroots obsstore')
290
290
291 class basicstore(object):
291 class basicstore(object):
292 '''base class for local repository stores'''
292 '''base class for local repository stores'''
293 def __init__(self, path, vfstype):
293 def __init__(self, path, vfstype):
294 vfs = vfstype(path)
294 vfs = vfstype(path)
295 self.path = vfs.base
295 self.path = vfs.base
296 self.createmode = _calcmode(vfs)
296 self.createmode = _calcmode(vfs)
297 vfs.createmode = self.createmode
297 vfs.createmode = self.createmode
298 self.rawvfs = vfs
298 self.rawvfs = vfs
299 self.vfs = scmutil.filtervfs(vfs, encodedir)
299 self.vfs = scmutil.filtervfs(vfs, encodedir)
300 self.opener = self.vfs
300 self.opener = self.vfs
301
301
302 def join(self, f):
302 def join(self, f):
303 return self.path + '/' + encodedir(f)
303 return self.path + '/' + encodedir(f)
304
304
305 def _walk(self, relpath, recurse):
305 def _walk(self, relpath, recurse):
306 '''yields (unencoded, encoded, size)'''
306 '''yields (unencoded, encoded, size)'''
307 path = self.path
307 path = self.path
308 if relpath:
308 if relpath:
309 path += '/' + relpath
309 path += '/' + relpath
310 striplen = len(self.path) + 1
310 striplen = len(self.path) + 1
311 l = []
311 l = []
312 if self.rawvfs.isdir(path):
312 if self.rawvfs.isdir(path):
313 visit = [path]
313 visit = [path]
314 readdir = self.rawvfs.readdir
314 readdir = self.rawvfs.readdir
315 while visit:
315 while visit:
316 p = visit.pop()
316 p = visit.pop()
317 for f, kind, st in readdir(p, stat=True):
317 for f, kind, st in readdir(p, stat=True):
318 fp = p + '/' + f
318 fp = p + '/' + f
319 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
319 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
320 n = util.pconvert(fp[striplen:])
320 n = util.pconvert(fp[striplen:])
321 l.append((decodedir(n), n, st.st_size))
321 l.append((decodedir(n), n, st.st_size))
322 elif kind == stat.S_IFDIR and recurse:
322 elif kind == stat.S_IFDIR and recurse:
323 visit.append(fp)
323 visit.append(fp)
324 l.sort()
324 l.sort()
325 return l
325 return l
326
326
327 def datafiles(self):
327 def datafiles(self):
328 return self._walk('data', True)
328 return self._walk('data', True)
329
329
330 def walk(self):
330 def walk(self):
331 '''yields (unencoded, encoded, size)'''
331 '''yields (unencoded, encoded, size)'''
332 # yield data files first
332 # yield data files first
333 for x in self.datafiles():
333 for x in self.datafiles():
334 yield x
334 yield x
335 # yield manifest before changelog
335 # yield manifest before changelog
336 for x in reversed(self._walk('', False)):
336 for x in reversed(self._walk('', False)):
337 yield x
337 yield x
338
338
339 def copylist(self):
339 def copylist(self):
340 return ['requires'] + _data.split()
340 return ['requires'] + _data.split()
341
341
342 def write(self):
342 def write(self):
343 pass
343 pass
344
344
345 def __contains__(self, path):
345 def __contains__(self, path):
346 '''Checks if the store contains path'''
346 '''Checks if the store contains path'''
347 path = "/".join(("data", path))
347 path = "/".join(("data", path))
348 # file?
348 # file?
349 if os.path.exists(self.join(path + ".i")):
349 if os.path.exists(self.join(path + ".i")):
350 return True
350 return True
351 # dir?
351 # dir?
352 if not path.endswith("/"):
352 if not path.endswith("/"):
353 path = path + "/"
353 path = path + "/"
354 return os.path.exists(self.join(path))
354 return os.path.exists(self.join(path))
355
355
356 class encodedstore(basicstore):
356 class encodedstore(basicstore):
357 def __init__(self, path, vfstype):
357 def __init__(self, path, vfstype):
358 vfs = vfstype(path + '/store')
358 vfs = vfstype(path + '/store')
359 self.path = vfs.base
359 self.path = vfs.base
360 self.createmode = _calcmode(vfs)
360 self.createmode = _calcmode(vfs)
361 vfs.createmode = self.createmode
361 vfs.createmode = self.createmode
362 self.rawvfs = vfs
362 self.rawvfs = vfs
363 self.vfs = scmutil.filtervfs(vfs, encodefilename)
363 self.vfs = scmutil.filtervfs(vfs, encodefilename)
364 self.opener = self.vfs
364 self.opener = self.vfs
365
365
366 def datafiles(self):
366 def datafiles(self):
367 for a, b, size in self._walk('data', True):
367 for a, b, size in self._walk('data', True):
368 try:
368 try:
369 a = decodefilename(a)
369 a = decodefilename(a)
370 except KeyError:
370 except KeyError:
371 a = None
371 a = None
372 yield a, b, size
372 yield a, b, size
373
373
374 def join(self, f):
374 def join(self, f):
375 return self.path + '/' + encodefilename(f)
375 return self.path + '/' + encodefilename(f)
376
376
377 def copylist(self):
377 def copylist(self):
378 return (['requires', '00changelog.i'] +
378 return (['requires', '00changelog.i'] +
379 ['store/' + f for f in _data.split()])
379 ['store/' + f for f in _data.split()])
380
380
381 class fncache(object):
381 class fncache(object):
382 # the filename used to be partially encoded
382 # the filename used to be partially encoded
383 # hence the encodedir/decodedir dance
383 # hence the encodedir/decodedir dance
384 def __init__(self, vfs):
384 def __init__(self, vfs):
385 self.vfs = vfs
385 self.vfs = vfs
386 self.entries = None
386 self.entries = None
387 self._dirty = False
387 self._dirty = False
388
388
389 def _load(self):
389 def _load(self):
390 '''fill the entries from the fncache file'''
390 '''fill the entries from the fncache file'''
391 self._dirty = False
391 self._dirty = False
392 try:
392 try:
393 fp = self.vfs('fncache', mode='rb')
393 fp = self.vfs('fncache', mode='rb')
394 except IOError:
394 except IOError:
395 # skip nonexistent file
395 # skip nonexistent file
396 self.entries = set()
396 self.entries = set()
397 return
397 return
398 self.entries = set(decodedir(fp.read()).splitlines())
398 self.entries = set(decodedir(fp.read()).splitlines())
399 if '' in self.entries:
399 if '' in self.entries:
400 fp.seek(0)
400 fp.seek(0)
401 for n, line in enumerate(fp):
401 for n, line in enumerate(fp):
402 if not line.rstrip('\n'):
402 if not line.rstrip('\n'):
403 t = _('invalid entry in fncache, line %s') % (n + 1)
403 t = _('invalid entry in fncache, line %s') % (n + 1)
404 raise util.Abort(t)
404 raise util.Abort(t)
405 fp.close()
405 fp.close()
406
406
407 def _write(self, files, atomictemp):
407 def _write(self, files, atomictemp):
408 fp = self.vfs('fncache', mode='wb', atomictemp=atomictemp)
408 fp = self.vfs('fncache', mode='wb', atomictemp=atomictemp)
409 if files:
409 if files:
410 fp.write(encodedir('\n'.join(files) + '\n'))
410 fp.write(encodedir('\n'.join(files) + '\n'))
411 fp.close()
411 fp.close()
412 self._dirty = False
412 self._dirty = False
413
413
414 def rewrite(self, files):
414 def rewrite(self, files):
415 self._write(files, False)
415 self._write(files, False)
416 self.entries = set(files)
416 self.entries = set(files)
417
417
418 def write(self):
418 def write(self):
419 if self._dirty:
419 if self._dirty:
420 self._write(self.entries, True)
420 self._write(self.entries, True)
421
421
422 def add(self, fn):
422 def add(self, fn):
423 if self.entries is None:
423 if self.entries is None:
424 self._load()
424 self._load()
425 if fn not in self.entries:
425 if fn not in self.entries:
426 self._dirty = True
426 self._dirty = True
427 self.entries.add(fn)
427 self.entries.add(fn)
428
428
429 def __contains__(self, fn):
429 def __contains__(self, fn):
430 if self.entries is None:
430 if self.entries is None:
431 self._load()
431 self._load()
432 return fn in self.entries
432 return fn in self.entries
433
433
434 def __iter__(self):
434 def __iter__(self):
435 if self.entries is None:
435 if self.entries is None:
436 self._load()
436 self._load()
437 return iter(self.entries)
437 return iter(self.entries)
438
438
439 class _fncachevfs(scmutil.abstractvfs):
439 class _fncachevfs(scmutil.abstractvfs, scmutil.auditvfs):
440 def __init__(self, vfs, fnc, encode):
440 def __init__(self, vfs, fnc, encode):
441 self.vfs = vfs
441 scmutil.auditvfs.__init__(self, vfs)
442 self.fncache = fnc
442 self.fncache = fnc
443 self.encode = encode
443 self.encode = encode
444
444
445 def _getmustaudit(self):
446 return self.vfs.mustaudit
447
448 def _setmustaudit(self, onoff):
449 self.vfs.mustaudit = onoff
450
451 mustaudit = property(_getmustaudit, _setmustaudit)
452
453 def __call__(self, path, mode='r', *args, **kw):
445 def __call__(self, path, mode='r', *args, **kw):
454 if mode not in ('r', 'rb') and path.startswith('data/'):
446 if mode not in ('r', 'rb') and path.startswith('data/'):
455 self.fncache.add(path)
447 self.fncache.add(path)
456 return self.vfs(self.encode(path), mode, *args, **kw)
448 return self.vfs(self.encode(path), mode, *args, **kw)
457
449
458 def join(self, path):
450 def join(self, path):
459 if path:
451 if path:
460 return self.vfs.join(self.encode(path))
452 return self.vfs.join(self.encode(path))
461 else:
453 else:
462 return self.vfs.join(path)
454 return self.vfs.join(path)
463
455
464 class fncachestore(basicstore):
456 class fncachestore(basicstore):
465 def __init__(self, path, vfstype, dotencode):
457 def __init__(self, path, vfstype, dotencode):
466 if dotencode:
458 if dotencode:
467 encode = _dothybridencode
459 encode = _dothybridencode
468 else:
460 else:
469 encode = _plainhybridencode
461 encode = _plainhybridencode
470 self.encode = encode
462 self.encode = encode
471 vfs = vfstype(path + '/store')
463 vfs = vfstype(path + '/store')
472 self.path = vfs.base
464 self.path = vfs.base
473 self.pathsep = self.path + '/'
465 self.pathsep = self.path + '/'
474 self.createmode = _calcmode(vfs)
466 self.createmode = _calcmode(vfs)
475 vfs.createmode = self.createmode
467 vfs.createmode = self.createmode
476 self.rawvfs = vfs
468 self.rawvfs = vfs
477 fnc = fncache(vfs)
469 fnc = fncache(vfs)
478 self.fncache = fnc
470 self.fncache = fnc
479 self.vfs = _fncachevfs(vfs, fnc, encode)
471 self.vfs = _fncachevfs(vfs, fnc, encode)
480 self.opener = self.vfs
472 self.opener = self.vfs
481
473
482 def join(self, f):
474 def join(self, f):
483 return self.pathsep + self.encode(f)
475 return self.pathsep + self.encode(f)
484
476
485 def getsize(self, path):
477 def getsize(self, path):
486 return self.rawvfs.stat(path).st_size
478 return self.rawvfs.stat(path).st_size
487
479
488 def datafiles(self):
480 def datafiles(self):
489 rewrite = False
481 rewrite = False
490 existing = []
482 existing = []
491 for f in sorted(self.fncache):
483 for f in sorted(self.fncache):
492 ef = self.encode(f)
484 ef = self.encode(f)
493 try:
485 try:
494 yield f, ef, self.getsize(ef)
486 yield f, ef, self.getsize(ef)
495 existing.append(f)
487 existing.append(f)
496 except OSError, err:
488 except OSError, err:
497 if err.errno != errno.ENOENT:
489 if err.errno != errno.ENOENT:
498 raise
490 raise
499 # nonexistent entry
491 # nonexistent entry
500 rewrite = True
492 rewrite = True
501 if rewrite:
493 if rewrite:
502 # rewrite fncache to remove nonexistent entries
494 # rewrite fncache to remove nonexistent entries
503 # (may be caused by rollback / strip)
495 # (may be caused by rollback / strip)
504 self.fncache.rewrite(existing)
496 self.fncache.rewrite(existing)
505
497
506 def copylist(self):
498 def copylist(self):
507 d = ('data dh fncache phaseroots obsstore'
499 d = ('data dh fncache phaseroots obsstore'
508 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
500 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
509 return (['requires', '00changelog.i'] +
501 return (['requires', '00changelog.i'] +
510 ['store/' + f for f in d.split()])
502 ['store/' + f for f in d.split()])
511
503
512 def write(self):
504 def write(self):
513 self.fncache.write()
505 self.fncache.write()
514
506
515 def _exists(self, f):
507 def _exists(self, f):
516 ef = self.encode(f)
508 ef = self.encode(f)
517 try:
509 try:
518 self.getsize(ef)
510 self.getsize(ef)
519 return True
511 return True
520 except OSError, err:
512 except OSError, err:
521 if err.errno != errno.ENOENT:
513 if err.errno != errno.ENOENT:
522 raise
514 raise
523 # nonexistent entry
515 # nonexistent entry
524 return False
516 return False
525
517
526 def __contains__(self, path):
518 def __contains__(self, path):
527 '''Checks if the store contains path'''
519 '''Checks if the store contains path'''
528 path = "/".join(("data", path))
520 path = "/".join(("data", path))
529 # check for files (exact match)
521 # check for files (exact match)
530 e = path + '.i'
522 e = path + '.i'
531 if e in self.fncache and self._exists(e):
523 if e in self.fncache and self._exists(e):
532 return True
524 return True
533 # now check for directories (prefix match)
525 # now check for directories (prefix match)
534 if not path.endswith('/'):
526 if not path.endswith('/'):
535 path += '/'
527 path += '/'
536 for e in self.fncache:
528 for e in self.fncache:
537 if e.startswith(path) and self._exists(e):
529 if e.startswith(path) and self._exists(e):
538 return True
530 return True
539 return False
531 return False
540
532
541 def store(requirements, path, vfstype):
533 def store(requirements, path, vfstype):
542 if 'store' in requirements:
534 if 'store' in requirements:
543 if 'fncache' in requirements:
535 if 'fncache' in requirements:
544 return fncachestore(path, vfstype, 'dotencode' in requirements)
536 return fncachestore(path, vfstype, 'dotencode' in requirements)
545 return encodedstore(path, vfstype)
537 return encodedstore(path, vfstype)
546 return basicstore(path, vfstype)
538 return basicstore(path, vfstype)
General Comments 0
You need to be logged in to leave comments. Login now