##// END OF EJS Templates
vfs: define "join()" in each classes derived from "abstractvfs"...
FUJIWARA Katsunori -
r17725:ffd589d4 default
parent child Browse files
Show More
@@ -1,933 +1,939 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import util, error, osutil, revset, similar, encoding, phases
9 import util, error, osutil, revset, similar, encoding, phases
10 import match as matchmod
10 import match as matchmod
11 import os, errno, re, stat, sys, glob
11 import os, errno, re, stat, sys, glob
12
12
13 def nochangesfound(ui, repo, excluded=None):
13 def nochangesfound(ui, repo, excluded=None):
14 '''Report no changes for push/pull, excluded is None or a list of
14 '''Report no changes for push/pull, excluded is None or a list of
15 nodes excluded from the push/pull.
15 nodes excluded from the push/pull.
16 '''
16 '''
17 secretlist = []
17 secretlist = []
18 if excluded:
18 if excluded:
19 for n in excluded:
19 for n in excluded:
20 ctx = repo[n]
20 ctx = repo[n]
21 if ctx.phase() >= phases.secret and not ctx.extinct():
21 if ctx.phase() >= phases.secret and not ctx.extinct():
22 secretlist.append(n)
22 secretlist.append(n)
23
23
24 if secretlist:
24 if secretlist:
25 ui.status(_("no changes found (ignored %d secret changesets)\n")
25 ui.status(_("no changes found (ignored %d secret changesets)\n")
26 % len(secretlist))
26 % len(secretlist))
27 else:
27 else:
28 ui.status(_("no changes found\n"))
28 ui.status(_("no changes found\n"))
29
29
30 def checkfilename(f):
30 def checkfilename(f):
31 '''Check that the filename f is an acceptable filename for a tracked file'''
31 '''Check that the filename f is an acceptable filename for a tracked file'''
32 if '\r' in f or '\n' in f:
32 if '\r' in f or '\n' in f:
33 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
33 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
34
34
35 def checkportable(ui, f):
35 def checkportable(ui, f):
36 '''Check if filename f is portable and warn or abort depending on config'''
36 '''Check if filename f is portable and warn or abort depending on config'''
37 checkfilename(f)
37 checkfilename(f)
38 abort, warn = checkportabilityalert(ui)
38 abort, warn = checkportabilityalert(ui)
39 if abort or warn:
39 if abort or warn:
40 msg = util.checkwinfilename(f)
40 msg = util.checkwinfilename(f)
41 if msg:
41 if msg:
42 msg = "%s: %r" % (msg, f)
42 msg = "%s: %r" % (msg, f)
43 if abort:
43 if abort:
44 raise util.Abort(msg)
44 raise util.Abort(msg)
45 ui.warn(_("warning: %s\n") % msg)
45 ui.warn(_("warning: %s\n") % msg)
46
46
47 def checkportabilityalert(ui):
47 def checkportabilityalert(ui):
48 '''check if the user's config requests nothing, a warning, or abort for
48 '''check if the user's config requests nothing, a warning, or abort for
49 non-portable filenames'''
49 non-portable filenames'''
50 val = ui.config('ui', 'portablefilenames', 'warn')
50 val = ui.config('ui', 'portablefilenames', 'warn')
51 lval = val.lower()
51 lval = val.lower()
52 bval = util.parsebool(val)
52 bval = util.parsebool(val)
53 abort = os.name == 'nt' or lval == 'abort'
53 abort = os.name == 'nt' or lval == 'abort'
54 warn = bval or lval == 'warn'
54 warn = bval or lval == 'warn'
55 if bval is None and not (warn or abort or lval == 'ignore'):
55 if bval is None and not (warn or abort or lval == 'ignore'):
56 raise error.ConfigError(
56 raise error.ConfigError(
57 _("ui.portablefilenames value is invalid ('%s')") % val)
57 _("ui.portablefilenames value is invalid ('%s')") % val)
58 return abort, warn
58 return abort, warn
59
59
60 class casecollisionauditor(object):
60 class casecollisionauditor(object):
61 def __init__(self, ui, abort, dirstate):
61 def __init__(self, ui, abort, dirstate):
62 self._ui = ui
62 self._ui = ui
63 self._abort = abort
63 self._abort = abort
64 allfiles = '\0'.join(dirstate._map)
64 allfiles = '\0'.join(dirstate._map)
65 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
65 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
66 self._dirstate = dirstate
66 self._dirstate = dirstate
67 # The purpose of _newfiles is so that we don't complain about
67 # The purpose of _newfiles is so that we don't complain about
68 # case collisions if someone were to call this object with the
68 # case collisions if someone were to call this object with the
69 # same filename twice.
69 # same filename twice.
70 self._newfiles = set()
70 self._newfiles = set()
71
71
72 def __call__(self, f):
72 def __call__(self, f):
73 fl = encoding.lower(f)
73 fl = encoding.lower(f)
74 if (fl in self._loweredfiles and f not in self._dirstate and
74 if (fl in self._loweredfiles and f not in self._dirstate and
75 f not in self._newfiles):
75 f not in self._newfiles):
76 msg = _('possible case-folding collision for %s') % f
76 msg = _('possible case-folding collision for %s') % f
77 if self._abort:
77 if self._abort:
78 raise util.Abort(msg)
78 raise util.Abort(msg)
79 self._ui.warn(_("warning: %s\n") % msg)
79 self._ui.warn(_("warning: %s\n") % msg)
80 self._loweredfiles.add(fl)
80 self._loweredfiles.add(fl)
81 self._newfiles.add(f)
81 self._newfiles.add(f)
82
82
83 class pathauditor(object):
83 class pathauditor(object):
84 '''ensure that a filesystem path contains no banned components.
84 '''ensure that a filesystem path contains no banned components.
85 the following properties of a path are checked:
85 the following properties of a path are checked:
86
86
87 - ends with a directory separator
87 - ends with a directory separator
88 - under top-level .hg
88 - under top-level .hg
89 - starts at the root of a windows drive
89 - starts at the root of a windows drive
90 - contains ".."
90 - contains ".."
91 - traverses a symlink (e.g. a/symlink_here/b)
91 - traverses a symlink (e.g. a/symlink_here/b)
92 - inside a nested repository (a callback can be used to approve
92 - inside a nested repository (a callback can be used to approve
93 some nested repositories, e.g., subrepositories)
93 some nested repositories, e.g., subrepositories)
94 '''
94 '''
95
95
96 def __init__(self, root, callback=None):
96 def __init__(self, root, callback=None):
97 self.audited = set()
97 self.audited = set()
98 self.auditeddir = set()
98 self.auditeddir = set()
99 self.root = root
99 self.root = root
100 self.callback = callback
100 self.callback = callback
101 if os.path.lexists(root) and not util.checkcase(root):
101 if os.path.lexists(root) and not util.checkcase(root):
102 self.normcase = util.normcase
102 self.normcase = util.normcase
103 else:
103 else:
104 self.normcase = lambda x: x
104 self.normcase = lambda x: x
105
105
106 def __call__(self, path):
106 def __call__(self, path):
107 '''Check the relative path.
107 '''Check the relative path.
108 path may contain a pattern (e.g. foodir/**.txt)'''
108 path may contain a pattern (e.g. foodir/**.txt)'''
109
109
110 path = util.localpath(path)
110 path = util.localpath(path)
111 normpath = self.normcase(path)
111 normpath = self.normcase(path)
112 if normpath in self.audited:
112 if normpath in self.audited:
113 return
113 return
114 # AIX ignores "/" at end of path, others raise EISDIR.
114 # AIX ignores "/" at end of path, others raise EISDIR.
115 if util.endswithsep(path):
115 if util.endswithsep(path):
116 raise util.Abort(_("path ends in directory separator: %s") % path)
116 raise util.Abort(_("path ends in directory separator: %s") % path)
117 parts = util.splitpath(path)
117 parts = util.splitpath(path)
118 if (os.path.splitdrive(path)[0]
118 if (os.path.splitdrive(path)[0]
119 or parts[0].lower() in ('.hg', '.hg.', '')
119 or parts[0].lower() in ('.hg', '.hg.', '')
120 or os.pardir in parts):
120 or os.pardir in parts):
121 raise util.Abort(_("path contains illegal component: %s") % path)
121 raise util.Abort(_("path contains illegal component: %s") % path)
122 if '.hg' in path.lower():
122 if '.hg' in path.lower():
123 lparts = [p.lower() for p in parts]
123 lparts = [p.lower() for p in parts]
124 for p in '.hg', '.hg.':
124 for p in '.hg', '.hg.':
125 if p in lparts[1:]:
125 if p in lparts[1:]:
126 pos = lparts.index(p)
126 pos = lparts.index(p)
127 base = os.path.join(*parts[:pos])
127 base = os.path.join(*parts[:pos])
128 raise util.Abort(_("path '%s' is inside nested repo %r")
128 raise util.Abort(_("path '%s' is inside nested repo %r")
129 % (path, base))
129 % (path, base))
130
130
131 normparts = util.splitpath(normpath)
131 normparts = util.splitpath(normpath)
132 assert len(parts) == len(normparts)
132 assert len(parts) == len(normparts)
133
133
134 parts.pop()
134 parts.pop()
135 normparts.pop()
135 normparts.pop()
136 prefixes = []
136 prefixes = []
137 while parts:
137 while parts:
138 prefix = os.sep.join(parts)
138 prefix = os.sep.join(parts)
139 normprefix = os.sep.join(normparts)
139 normprefix = os.sep.join(normparts)
140 if normprefix in self.auditeddir:
140 if normprefix in self.auditeddir:
141 break
141 break
142 curpath = os.path.join(self.root, prefix)
142 curpath = os.path.join(self.root, prefix)
143 try:
143 try:
144 st = os.lstat(curpath)
144 st = os.lstat(curpath)
145 except OSError, err:
145 except OSError, err:
146 # EINVAL can be raised as invalid path syntax under win32.
146 # EINVAL can be raised as invalid path syntax under win32.
147 # They must be ignored for patterns can be checked too.
147 # They must be ignored for patterns can be checked too.
148 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
148 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
149 raise
149 raise
150 else:
150 else:
151 if stat.S_ISLNK(st.st_mode):
151 if stat.S_ISLNK(st.st_mode):
152 raise util.Abort(
152 raise util.Abort(
153 _('path %r traverses symbolic link %r')
153 _('path %r traverses symbolic link %r')
154 % (path, prefix))
154 % (path, prefix))
155 elif (stat.S_ISDIR(st.st_mode) and
155 elif (stat.S_ISDIR(st.st_mode) and
156 os.path.isdir(os.path.join(curpath, '.hg'))):
156 os.path.isdir(os.path.join(curpath, '.hg'))):
157 if not self.callback or not self.callback(curpath):
157 if not self.callback or not self.callback(curpath):
158 raise util.Abort(_("path '%s' is inside nested "
158 raise util.Abort(_("path '%s' is inside nested "
159 "repo %r")
159 "repo %r")
160 % (path, prefix))
160 % (path, prefix))
161 prefixes.append(normprefix)
161 prefixes.append(normprefix)
162 parts.pop()
162 parts.pop()
163 normparts.pop()
163 normparts.pop()
164
164
165 self.audited.add(normpath)
165 self.audited.add(normpath)
166 # only add prefixes to the cache after checking everything: we don't
166 # only add prefixes to the cache after checking everything: we don't
167 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
167 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
168 self.auditeddir.update(prefixes)
168 self.auditeddir.update(prefixes)
169
169
170 class abstractvfs(object):
170 class abstractvfs(object):
171 """Abstract base class; cannot be instantiated"""
171 """Abstract base class; cannot be instantiated"""
172
172
173 def __init__(self, *args, **kwargs):
173 def __init__(self, *args, **kwargs):
174 '''Prevent instantiation; don't call this from subclasses.'''
174 '''Prevent instantiation; don't call this from subclasses.'''
175 raise NotImplementedError('attempted instantiating ' + str(type(self)))
175 raise NotImplementedError('attempted instantiating ' + str(type(self)))
176
176
177 def tryread(self, path):
177 def tryread(self, path):
178 '''gracefully return an empty string for missing files'''
178 '''gracefully return an empty string for missing files'''
179 try:
179 try:
180 return self.read(path)
180 return self.read(path)
181 except IOError, inst:
181 except IOError, inst:
182 if inst.errno != errno.ENOENT:
182 if inst.errno != errno.ENOENT:
183 raise
183 raise
184 return ""
184 return ""
185
185
186 def read(self, path):
186 def read(self, path):
187 fp = self(path, 'rb')
187 fp = self(path, 'rb')
188 try:
188 try:
189 return fp.read()
189 return fp.read()
190 finally:
190 finally:
191 fp.close()
191 fp.close()
192
192
193 def write(self, path, data):
193 def write(self, path, data):
194 fp = self(path, 'wb')
194 fp = self(path, 'wb')
195 try:
195 try:
196 return fp.write(data)
196 return fp.write(data)
197 finally:
197 finally:
198 fp.close()
198 fp.close()
199
199
200 def append(self, path, data):
200 def append(self, path, data):
201 fp = self(path, 'ab')
201 fp = self(path, 'ab')
202 try:
202 try:
203 return fp.write(data)
203 return fp.write(data)
204 finally:
204 finally:
205 fp.close()
205 fp.close()
206
206
207 def exists(self, path=None):
207 def exists(self, path=None):
208 return os.path.exists(self.join(path))
208 return os.path.exists(self.join(path))
209
209
210 def isdir(self, path=None):
210 def isdir(self, path=None):
211 return os.path.isdir(self.join(path))
211 return os.path.isdir(self.join(path))
212
212
213 def makedir(self, path=None, notindexed=True):
213 def makedir(self, path=None, notindexed=True):
214 return util.makedir(self.join(path), notindexed)
214 return util.makedir(self.join(path), notindexed)
215
215
216 def makedirs(self, path=None, mode=None):
216 def makedirs(self, path=None, mode=None):
217 return util.makedirs(self.join(path), mode)
217 return util.makedirs(self.join(path), mode)
218
218
219 def mkdir(self, path=None):
219 def mkdir(self, path=None):
220 return os.mkdir(self.join(path))
220 return os.mkdir(self.join(path))
221
221
222 class vfs(abstractvfs):
222 class vfs(abstractvfs):
223 '''Operate files relative to a base directory
223 '''Operate files relative to a base directory
224
224
225 This class is used to hide the details of COW semantics and
225 This class is used to hide the details of COW semantics and
226 remote file access from higher level code.
226 remote file access from higher level code.
227 '''
227 '''
228 def __init__(self, base, audit=True, expand=False):
228 def __init__(self, base, audit=True, expand=False):
229 if expand:
229 if expand:
230 base = os.path.realpath(util.expandpath(base))
230 base = os.path.realpath(util.expandpath(base))
231 self.base = base
231 self.base = base
232 self._setmustaudit(audit)
232 self._setmustaudit(audit)
233 self.createmode = None
233 self.createmode = None
234 self._trustnlink = None
234 self._trustnlink = None
235
235
236 def _getmustaudit(self):
236 def _getmustaudit(self):
237 return self._audit
237 return self._audit
238
238
239 def _setmustaudit(self, onoff):
239 def _setmustaudit(self, onoff):
240 self._audit = onoff
240 self._audit = onoff
241 if onoff:
241 if onoff:
242 self.auditor = pathauditor(self.base)
242 self.auditor = pathauditor(self.base)
243 else:
243 else:
244 self.auditor = util.always
244 self.auditor = util.always
245
245
246 mustaudit = property(_getmustaudit, _setmustaudit)
246 mustaudit = property(_getmustaudit, _setmustaudit)
247
247
248 @util.propertycache
248 @util.propertycache
249 def _cansymlink(self):
249 def _cansymlink(self):
250 return util.checklink(self.base)
250 return util.checklink(self.base)
251
251
252 def _fixfilemode(self, name):
252 def _fixfilemode(self, name):
253 if self.createmode is None:
253 if self.createmode is None:
254 return
254 return
255 os.chmod(name, self.createmode & 0666)
255 os.chmod(name, self.createmode & 0666)
256
256
257 def __call__(self, path, mode="r", text=False, atomictemp=False):
257 def __call__(self, path, mode="r", text=False, atomictemp=False):
258 if self._audit:
258 if self._audit:
259 r = util.checkosfilename(path)
259 r = util.checkosfilename(path)
260 if r:
260 if r:
261 raise util.Abort("%s: %r" % (r, path))
261 raise util.Abort("%s: %r" % (r, path))
262 self.auditor(path)
262 self.auditor(path)
263 f = self.join(path)
263 f = self.join(path)
264
264
265 if not text and "b" not in mode:
265 if not text and "b" not in mode:
266 mode += "b" # for that other OS
266 mode += "b" # for that other OS
267
267
268 nlink = -1
268 nlink = -1
269 dirname, basename = util.split(f)
269 dirname, basename = util.split(f)
270 # If basename is empty, then the path is malformed because it points
270 # If basename is empty, then the path is malformed because it points
271 # to a directory. Let the posixfile() call below raise IOError.
271 # to a directory. Let the posixfile() call below raise IOError.
272 if basename and mode not in ('r', 'rb'):
272 if basename and mode not in ('r', 'rb'):
273 if atomictemp:
273 if atomictemp:
274 if not os.path.isdir(dirname):
274 if not os.path.isdir(dirname):
275 util.makedirs(dirname, self.createmode)
275 util.makedirs(dirname, self.createmode)
276 return util.atomictempfile(f, mode, self.createmode)
276 return util.atomictempfile(f, mode, self.createmode)
277 try:
277 try:
278 if 'w' in mode:
278 if 'w' in mode:
279 util.unlink(f)
279 util.unlink(f)
280 nlink = 0
280 nlink = 0
281 else:
281 else:
282 # nlinks() may behave differently for files on Windows
282 # nlinks() may behave differently for files on Windows
283 # shares if the file is open.
283 # shares if the file is open.
284 fd = util.posixfile(f)
284 fd = util.posixfile(f)
285 nlink = util.nlinks(f)
285 nlink = util.nlinks(f)
286 if nlink < 1:
286 if nlink < 1:
287 nlink = 2 # force mktempcopy (issue1922)
287 nlink = 2 # force mktempcopy (issue1922)
288 fd.close()
288 fd.close()
289 except (OSError, IOError), e:
289 except (OSError, IOError), e:
290 if e.errno != errno.ENOENT:
290 if e.errno != errno.ENOENT:
291 raise
291 raise
292 nlink = 0
292 nlink = 0
293 if not os.path.isdir(dirname):
293 if not os.path.isdir(dirname):
294 util.makedirs(dirname, self.createmode)
294 util.makedirs(dirname, self.createmode)
295 if nlink > 0:
295 if nlink > 0:
296 if self._trustnlink is None:
296 if self._trustnlink is None:
297 self._trustnlink = nlink > 1 or util.checknlink(f)
297 self._trustnlink = nlink > 1 or util.checknlink(f)
298 if nlink > 1 or not self._trustnlink:
298 if nlink > 1 or not self._trustnlink:
299 util.rename(util.mktempcopy(f), f)
299 util.rename(util.mktempcopy(f), f)
300 fp = util.posixfile(f, mode)
300 fp = util.posixfile(f, mode)
301 if nlink == 0:
301 if nlink == 0:
302 self._fixfilemode(f)
302 self._fixfilemode(f)
303 return fp
303 return fp
304
304
305 def symlink(self, src, dst):
305 def symlink(self, src, dst):
306 self.auditor(dst)
306 self.auditor(dst)
307 linkname = self.join(dst)
307 linkname = self.join(dst)
308 try:
308 try:
309 os.unlink(linkname)
309 os.unlink(linkname)
310 except OSError:
310 except OSError:
311 pass
311 pass
312
312
313 dirname = os.path.dirname(linkname)
313 dirname = os.path.dirname(linkname)
314 if not os.path.exists(dirname):
314 if not os.path.exists(dirname):
315 util.makedirs(dirname, self.createmode)
315 util.makedirs(dirname, self.createmode)
316
316
317 if self._cansymlink:
317 if self._cansymlink:
318 try:
318 try:
319 os.symlink(src, linkname)
319 os.symlink(src, linkname)
320 except OSError, err:
320 except OSError, err:
321 raise OSError(err.errno, _('could not symlink to %r: %s') %
321 raise OSError(err.errno, _('could not symlink to %r: %s') %
322 (src, err.strerror), linkname)
322 (src, err.strerror), linkname)
323 else:
323 else:
324 f = self(dst, "w")
324 f = self(dst, "w")
325 f.write(src)
325 f.write(src)
326 f.close()
326 f.close()
327 self._fixfilemode(dst)
327 self._fixfilemode(dst)
328
328
329 def audit(self, path):
329 def audit(self, path):
330 self.auditor(path)
330 self.auditor(path)
331
331
332 def join(self, path):
332 def join(self, path):
333 if path:
333 if path:
334 return os.path.join(self.base, path)
334 return os.path.join(self.base, path)
335 else:
335 else:
336 return self.base
336 return self.base
337
337
338 opener = vfs
338 opener = vfs
339
339
340 class filtervfs(abstractvfs):
340 class filtervfs(abstractvfs):
341 '''Wrapper vfs for filtering filenames with a function.'''
341 '''Wrapper vfs for filtering filenames with a function.'''
342
342
343 def __init__(self, opener, filter):
343 def __init__(self, opener, filter):
344 self._filter = filter
344 self._filter = filter
345 self._orig = opener
345 self._orig = opener
346
346
347 def __call__(self, path, *args, **kwargs):
347 def __call__(self, path, *args, **kwargs):
348 return self._orig(self._filter(path), *args, **kwargs)
348 return self._orig(self._filter(path), *args, **kwargs)
349
349
350 def join(self, path):
351 if path:
352 return self._orig.join(self._filter(path))
353 else:
354 return self._orig.join(path)
355
350 filteropener = filtervfs
356 filteropener = filtervfs
351
357
352 def canonpath(root, cwd, myname, auditor=None):
358 def canonpath(root, cwd, myname, auditor=None):
353 '''return the canonical path of myname, given cwd and root'''
359 '''return the canonical path of myname, given cwd and root'''
354 if util.endswithsep(root):
360 if util.endswithsep(root):
355 rootsep = root
361 rootsep = root
356 else:
362 else:
357 rootsep = root + os.sep
363 rootsep = root + os.sep
358 name = myname
364 name = myname
359 if not os.path.isabs(name):
365 if not os.path.isabs(name):
360 name = os.path.join(root, cwd, name)
366 name = os.path.join(root, cwd, name)
361 name = os.path.normpath(name)
367 name = os.path.normpath(name)
362 if auditor is None:
368 if auditor is None:
363 auditor = pathauditor(root)
369 auditor = pathauditor(root)
364 if name != rootsep and name.startswith(rootsep):
370 if name != rootsep and name.startswith(rootsep):
365 name = name[len(rootsep):]
371 name = name[len(rootsep):]
366 auditor(name)
372 auditor(name)
367 return util.pconvert(name)
373 return util.pconvert(name)
368 elif name == root:
374 elif name == root:
369 return ''
375 return ''
370 else:
376 else:
371 # Determine whether `name' is in the hierarchy at or beneath `root',
377 # Determine whether `name' is in the hierarchy at or beneath `root',
372 # by iterating name=dirname(name) until that causes no change (can't
378 # by iterating name=dirname(name) until that causes no change (can't
373 # check name == '/', because that doesn't work on windows). The list
379 # check name == '/', because that doesn't work on windows). The list
374 # `rel' holds the reversed list of components making up the relative
380 # `rel' holds the reversed list of components making up the relative
375 # file name we want.
381 # file name we want.
376 rel = []
382 rel = []
377 while True:
383 while True:
378 try:
384 try:
379 s = util.samefile(name, root)
385 s = util.samefile(name, root)
380 except OSError:
386 except OSError:
381 s = False
387 s = False
382 if s:
388 if s:
383 if not rel:
389 if not rel:
384 # name was actually the same as root (maybe a symlink)
390 # name was actually the same as root (maybe a symlink)
385 return ''
391 return ''
386 rel.reverse()
392 rel.reverse()
387 name = os.path.join(*rel)
393 name = os.path.join(*rel)
388 auditor(name)
394 auditor(name)
389 return util.pconvert(name)
395 return util.pconvert(name)
390 dirname, basename = util.split(name)
396 dirname, basename = util.split(name)
391 rel.append(basename)
397 rel.append(basename)
392 if dirname == name:
398 if dirname == name:
393 break
399 break
394 name = dirname
400 name = dirname
395
401
396 raise util.Abort('%s not under root' % myname)
402 raise util.Abort('%s not under root' % myname)
397
403
398 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
404 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
399 '''yield every hg repository under path, always recursively.
405 '''yield every hg repository under path, always recursively.
400 The recurse flag will only control recursion into repo working dirs'''
406 The recurse flag will only control recursion into repo working dirs'''
401 def errhandler(err):
407 def errhandler(err):
402 if err.filename == path:
408 if err.filename == path:
403 raise err
409 raise err
404 samestat = getattr(os.path, 'samestat', None)
410 samestat = getattr(os.path, 'samestat', None)
405 if followsym and samestat is not None:
411 if followsym and samestat is not None:
406 def adddir(dirlst, dirname):
412 def adddir(dirlst, dirname):
407 match = False
413 match = False
408 dirstat = os.stat(dirname)
414 dirstat = os.stat(dirname)
409 for lstdirstat in dirlst:
415 for lstdirstat in dirlst:
410 if samestat(dirstat, lstdirstat):
416 if samestat(dirstat, lstdirstat):
411 match = True
417 match = True
412 break
418 break
413 if not match:
419 if not match:
414 dirlst.append(dirstat)
420 dirlst.append(dirstat)
415 return not match
421 return not match
416 else:
422 else:
417 followsym = False
423 followsym = False
418
424
419 if (seen_dirs is None) and followsym:
425 if (seen_dirs is None) and followsym:
420 seen_dirs = []
426 seen_dirs = []
421 adddir(seen_dirs, path)
427 adddir(seen_dirs, path)
422 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
428 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
423 dirs.sort()
429 dirs.sort()
424 if '.hg' in dirs:
430 if '.hg' in dirs:
425 yield root # found a repository
431 yield root # found a repository
426 qroot = os.path.join(root, '.hg', 'patches')
432 qroot = os.path.join(root, '.hg', 'patches')
427 if os.path.isdir(os.path.join(qroot, '.hg')):
433 if os.path.isdir(os.path.join(qroot, '.hg')):
428 yield qroot # we have a patch queue repo here
434 yield qroot # we have a patch queue repo here
429 if recurse:
435 if recurse:
430 # avoid recursing inside the .hg directory
436 # avoid recursing inside the .hg directory
431 dirs.remove('.hg')
437 dirs.remove('.hg')
432 else:
438 else:
433 dirs[:] = [] # don't descend further
439 dirs[:] = [] # don't descend further
434 elif followsym:
440 elif followsym:
435 newdirs = []
441 newdirs = []
436 for d in dirs:
442 for d in dirs:
437 fname = os.path.join(root, d)
443 fname = os.path.join(root, d)
438 if adddir(seen_dirs, fname):
444 if adddir(seen_dirs, fname):
439 if os.path.islink(fname):
445 if os.path.islink(fname):
440 for hgname in walkrepos(fname, True, seen_dirs):
446 for hgname in walkrepos(fname, True, seen_dirs):
441 yield hgname
447 yield hgname
442 else:
448 else:
443 newdirs.append(d)
449 newdirs.append(d)
444 dirs[:] = newdirs
450 dirs[:] = newdirs
445
451
446 def osrcpath():
452 def osrcpath():
447 '''return default os-specific hgrc search path'''
453 '''return default os-specific hgrc search path'''
448 path = systemrcpath()
454 path = systemrcpath()
449 path.extend(userrcpath())
455 path.extend(userrcpath())
450 path = [os.path.normpath(f) for f in path]
456 path = [os.path.normpath(f) for f in path]
451 return path
457 return path
452
458
453 _rcpath = None
459 _rcpath = None
454
460
455 def rcpath():
461 def rcpath():
456 '''return hgrc search path. if env var HGRCPATH is set, use it.
462 '''return hgrc search path. if env var HGRCPATH is set, use it.
457 for each item in path, if directory, use files ending in .rc,
463 for each item in path, if directory, use files ending in .rc,
458 else use item.
464 else use item.
459 make HGRCPATH empty to only look in .hg/hgrc of current repo.
465 make HGRCPATH empty to only look in .hg/hgrc of current repo.
460 if no HGRCPATH, use default os-specific path.'''
466 if no HGRCPATH, use default os-specific path.'''
461 global _rcpath
467 global _rcpath
462 if _rcpath is None:
468 if _rcpath is None:
463 if 'HGRCPATH' in os.environ:
469 if 'HGRCPATH' in os.environ:
464 _rcpath = []
470 _rcpath = []
465 for p in os.environ['HGRCPATH'].split(os.pathsep):
471 for p in os.environ['HGRCPATH'].split(os.pathsep):
466 if not p:
472 if not p:
467 continue
473 continue
468 p = util.expandpath(p)
474 p = util.expandpath(p)
469 if os.path.isdir(p):
475 if os.path.isdir(p):
470 for f, kind in osutil.listdir(p):
476 for f, kind in osutil.listdir(p):
471 if f.endswith('.rc'):
477 if f.endswith('.rc'):
472 _rcpath.append(os.path.join(p, f))
478 _rcpath.append(os.path.join(p, f))
473 else:
479 else:
474 _rcpath.append(p)
480 _rcpath.append(p)
475 else:
481 else:
476 _rcpath = osrcpath()
482 _rcpath = osrcpath()
477 return _rcpath
483 return _rcpath
478
484
479 if os.name != 'nt':
485 if os.name != 'nt':
480
486
481 def rcfiles(path):
487 def rcfiles(path):
482 rcs = [os.path.join(path, 'hgrc')]
488 rcs = [os.path.join(path, 'hgrc')]
483 rcdir = os.path.join(path, 'hgrc.d')
489 rcdir = os.path.join(path, 'hgrc.d')
484 try:
490 try:
485 rcs.extend([os.path.join(rcdir, f)
491 rcs.extend([os.path.join(rcdir, f)
486 for f, kind in osutil.listdir(rcdir)
492 for f, kind in osutil.listdir(rcdir)
487 if f.endswith(".rc")])
493 if f.endswith(".rc")])
488 except OSError:
494 except OSError:
489 pass
495 pass
490 return rcs
496 return rcs
491
497
492 def systemrcpath():
498 def systemrcpath():
493 path = []
499 path = []
494 if sys.platform == 'plan9':
500 if sys.platform == 'plan9':
495 root = 'lib/mercurial'
501 root = 'lib/mercurial'
496 else:
502 else:
497 root = 'etc/mercurial'
503 root = 'etc/mercurial'
498 # old mod_python does not set sys.argv
504 # old mod_python does not set sys.argv
499 if len(getattr(sys, 'argv', [])) > 0:
505 if len(getattr(sys, 'argv', [])) > 0:
500 p = os.path.dirname(os.path.dirname(sys.argv[0]))
506 p = os.path.dirname(os.path.dirname(sys.argv[0]))
501 path.extend(rcfiles(os.path.join(p, root)))
507 path.extend(rcfiles(os.path.join(p, root)))
502 path.extend(rcfiles('/' + root))
508 path.extend(rcfiles('/' + root))
503 return path
509 return path
504
510
505 def userrcpath():
511 def userrcpath():
506 if sys.platform == 'plan9':
512 if sys.platform == 'plan9':
507 return [os.environ['home'] + '/lib/hgrc']
513 return [os.environ['home'] + '/lib/hgrc']
508 else:
514 else:
509 return [os.path.expanduser('~/.hgrc')]
515 return [os.path.expanduser('~/.hgrc')]
510
516
511 else:
517 else:
512
518
513 import _winreg
519 import _winreg
514
520
515 def systemrcpath():
521 def systemrcpath():
516 '''return default os-specific hgrc search path'''
522 '''return default os-specific hgrc search path'''
517 rcpath = []
523 rcpath = []
518 filename = util.executablepath()
524 filename = util.executablepath()
519 # Use mercurial.ini found in directory with hg.exe
525 # Use mercurial.ini found in directory with hg.exe
520 progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
526 progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
521 if os.path.isfile(progrc):
527 if os.path.isfile(progrc):
522 rcpath.append(progrc)
528 rcpath.append(progrc)
523 return rcpath
529 return rcpath
524 # Use hgrc.d found in directory with hg.exe
530 # Use hgrc.d found in directory with hg.exe
525 progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d')
531 progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d')
526 if os.path.isdir(progrcd):
532 if os.path.isdir(progrcd):
527 for f, kind in osutil.listdir(progrcd):
533 for f, kind in osutil.listdir(progrcd):
528 if f.endswith('.rc'):
534 if f.endswith('.rc'):
529 rcpath.append(os.path.join(progrcd, f))
535 rcpath.append(os.path.join(progrcd, f))
530 return rcpath
536 return rcpath
531 # else look for a system rcpath in the registry
537 # else look for a system rcpath in the registry
532 value = util.lookupreg('SOFTWARE\\Mercurial', None,
538 value = util.lookupreg('SOFTWARE\\Mercurial', None,
533 _winreg.HKEY_LOCAL_MACHINE)
539 _winreg.HKEY_LOCAL_MACHINE)
534 if not isinstance(value, str) or not value:
540 if not isinstance(value, str) or not value:
535 return rcpath
541 return rcpath
536 value = util.localpath(value)
542 value = util.localpath(value)
537 for p in value.split(os.pathsep):
543 for p in value.split(os.pathsep):
538 if p.lower().endswith('mercurial.ini'):
544 if p.lower().endswith('mercurial.ini'):
539 rcpath.append(p)
545 rcpath.append(p)
540 elif os.path.isdir(p):
546 elif os.path.isdir(p):
541 for f, kind in osutil.listdir(p):
547 for f, kind in osutil.listdir(p):
542 if f.endswith('.rc'):
548 if f.endswith('.rc'):
543 rcpath.append(os.path.join(p, f))
549 rcpath.append(os.path.join(p, f))
544 return rcpath
550 return rcpath
545
551
546 def userrcpath():
552 def userrcpath():
547 '''return os-specific hgrc search path to the user dir'''
553 '''return os-specific hgrc search path to the user dir'''
548 home = os.path.expanduser('~')
554 home = os.path.expanduser('~')
549 path = [os.path.join(home, 'mercurial.ini'),
555 path = [os.path.join(home, 'mercurial.ini'),
550 os.path.join(home, '.hgrc')]
556 os.path.join(home, '.hgrc')]
551 userprofile = os.environ.get('USERPROFILE')
557 userprofile = os.environ.get('USERPROFILE')
552 if userprofile:
558 if userprofile:
553 path.append(os.path.join(userprofile, 'mercurial.ini'))
559 path.append(os.path.join(userprofile, 'mercurial.ini'))
554 path.append(os.path.join(userprofile, '.hgrc'))
560 path.append(os.path.join(userprofile, '.hgrc'))
555 return path
561 return path
556
562
557 def revsingle(repo, revspec, default='.'):
563 def revsingle(repo, revspec, default='.'):
558 if not revspec:
564 if not revspec:
559 return repo[default]
565 return repo[default]
560
566
561 l = revrange(repo, [revspec])
567 l = revrange(repo, [revspec])
562 if len(l) < 1:
568 if len(l) < 1:
563 raise util.Abort(_('empty revision set'))
569 raise util.Abort(_('empty revision set'))
564 return repo[l[-1]]
570 return repo[l[-1]]
565
571
566 def revpair(repo, revs):
572 def revpair(repo, revs):
567 if not revs:
573 if not revs:
568 return repo.dirstate.p1(), None
574 return repo.dirstate.p1(), None
569
575
570 l = revrange(repo, revs)
576 l = revrange(repo, revs)
571
577
572 if len(l) == 0:
578 if len(l) == 0:
573 if revs:
579 if revs:
574 raise util.Abort(_('empty revision range'))
580 raise util.Abort(_('empty revision range'))
575 return repo.dirstate.p1(), None
581 return repo.dirstate.p1(), None
576
582
577 if len(l) == 1 and len(revs) == 1 and _revrangesep not in revs[0]:
583 if len(l) == 1 and len(revs) == 1 and _revrangesep not in revs[0]:
578 return repo.lookup(l[0]), None
584 return repo.lookup(l[0]), None
579
585
580 return repo.lookup(l[0]), repo.lookup(l[-1])
586 return repo.lookup(l[0]), repo.lookup(l[-1])
581
587
582 _revrangesep = ':'
588 _revrangesep = ':'
583
589
584 def revrange(repo, revs):
590 def revrange(repo, revs):
585 """Yield revision as strings from a list of revision specifications."""
591 """Yield revision as strings from a list of revision specifications."""
586
592
587 def revfix(repo, val, defval):
593 def revfix(repo, val, defval):
588 if not val and val != 0 and defval is not None:
594 if not val and val != 0 and defval is not None:
589 return defval
595 return defval
590 return repo[val].rev()
596 return repo[val].rev()
591
597
592 seen, l = set(), []
598 seen, l = set(), []
593 for spec in revs:
599 for spec in revs:
594 if l and not seen:
600 if l and not seen:
595 seen = set(l)
601 seen = set(l)
596 # attempt to parse old-style ranges first to deal with
602 # attempt to parse old-style ranges first to deal with
597 # things like old-tag which contain query metacharacters
603 # things like old-tag which contain query metacharacters
598 try:
604 try:
599 if isinstance(spec, int):
605 if isinstance(spec, int):
600 seen.add(spec)
606 seen.add(spec)
601 l.append(spec)
607 l.append(spec)
602 continue
608 continue
603
609
604 if _revrangesep in spec:
610 if _revrangesep in spec:
605 start, end = spec.split(_revrangesep, 1)
611 start, end = spec.split(_revrangesep, 1)
606 start = revfix(repo, start, 0)
612 start = revfix(repo, start, 0)
607 end = revfix(repo, end, len(repo) - 1)
613 end = revfix(repo, end, len(repo) - 1)
608 step = start > end and -1 or 1
614 step = start > end and -1 or 1
609 if not seen and not l:
615 if not seen and not l:
610 # by far the most common case: revs = ["-1:0"]
616 # by far the most common case: revs = ["-1:0"]
611 l = range(start, end + step, step)
617 l = range(start, end + step, step)
612 # defer syncing seen until next iteration
618 # defer syncing seen until next iteration
613 continue
619 continue
614 newrevs = set(xrange(start, end + step, step))
620 newrevs = set(xrange(start, end + step, step))
615 if seen:
621 if seen:
616 newrevs.difference_update(seen)
622 newrevs.difference_update(seen)
617 seen.update(newrevs)
623 seen.update(newrevs)
618 else:
624 else:
619 seen = newrevs
625 seen = newrevs
620 l.extend(sorted(newrevs, reverse=start > end))
626 l.extend(sorted(newrevs, reverse=start > end))
621 continue
627 continue
622 elif spec and spec in repo: # single unquoted rev
628 elif spec and spec in repo: # single unquoted rev
623 rev = revfix(repo, spec, None)
629 rev = revfix(repo, spec, None)
624 if rev in seen:
630 if rev in seen:
625 continue
631 continue
626 seen.add(rev)
632 seen.add(rev)
627 l.append(rev)
633 l.append(rev)
628 continue
634 continue
629 except error.RepoLookupError:
635 except error.RepoLookupError:
630 pass
636 pass
631
637
632 # fall through to new-style queries if old-style fails
638 # fall through to new-style queries if old-style fails
633 m = revset.match(repo.ui, spec)
639 m = revset.match(repo.ui, spec)
634 dl = [r for r in m(repo, list(repo)) if r not in seen]
640 dl = [r for r in m(repo, list(repo)) if r not in seen]
635 l.extend(dl)
641 l.extend(dl)
636 seen.update(dl)
642 seen.update(dl)
637
643
638 return l
644 return l
639
645
640 def expandpats(pats):
646 def expandpats(pats):
641 if not util.expandglobs:
647 if not util.expandglobs:
642 return list(pats)
648 return list(pats)
643 ret = []
649 ret = []
644 for p in pats:
650 for p in pats:
645 kind, name = matchmod._patsplit(p, None)
651 kind, name = matchmod._patsplit(p, None)
646 if kind is None:
652 if kind is None:
647 try:
653 try:
648 globbed = glob.glob(name)
654 globbed = glob.glob(name)
649 except re.error:
655 except re.error:
650 globbed = [name]
656 globbed = [name]
651 if globbed:
657 if globbed:
652 ret.extend(globbed)
658 ret.extend(globbed)
653 continue
659 continue
654 ret.append(p)
660 ret.append(p)
655 return ret
661 return ret
656
662
657 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
663 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
658 if pats == ("",):
664 if pats == ("",):
659 pats = []
665 pats = []
660 if not globbed and default == 'relpath':
666 if not globbed and default == 'relpath':
661 pats = expandpats(pats or [])
667 pats = expandpats(pats or [])
662
668
663 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
669 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
664 default)
670 default)
665 def badfn(f, msg):
671 def badfn(f, msg):
666 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
672 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
667 m.bad = badfn
673 m.bad = badfn
668 return m, pats
674 return m, pats
669
675
670 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
676 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
671 return matchandpats(ctx, pats, opts, globbed, default)[0]
677 return matchandpats(ctx, pats, opts, globbed, default)[0]
672
678
673 def matchall(repo):
679 def matchall(repo):
674 return matchmod.always(repo.root, repo.getcwd())
680 return matchmod.always(repo.root, repo.getcwd())
675
681
676 def matchfiles(repo, files):
682 def matchfiles(repo, files):
677 return matchmod.exact(repo.root, repo.getcwd(), files)
683 return matchmod.exact(repo.root, repo.getcwd(), files)
678
684
679 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
685 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
680 if dry_run is None:
686 if dry_run is None:
681 dry_run = opts.get('dry_run')
687 dry_run = opts.get('dry_run')
682 if similarity is None:
688 if similarity is None:
683 similarity = float(opts.get('similarity') or 0)
689 similarity = float(opts.get('similarity') or 0)
684 # we'd use status here, except handling of symlinks and ignore is tricky
690 # we'd use status here, except handling of symlinks and ignore is tricky
685 added, unknown, deleted, removed = [], [], [], []
691 added, unknown, deleted, removed = [], [], [], []
686 audit_path = pathauditor(repo.root)
692 audit_path = pathauditor(repo.root)
687 m = match(repo[None], pats, opts)
693 m = match(repo[None], pats, opts)
688 rejected = []
694 rejected = []
689 m.bad = lambda x, y: rejected.append(x)
695 m.bad = lambda x, y: rejected.append(x)
690
696
691 for abs in repo.walk(m):
697 for abs in repo.walk(m):
692 target = repo.wjoin(abs)
698 target = repo.wjoin(abs)
693 good = True
699 good = True
694 try:
700 try:
695 audit_path(abs)
701 audit_path(abs)
696 except (OSError, util.Abort):
702 except (OSError, util.Abort):
697 good = False
703 good = False
698 rel = m.rel(abs)
704 rel = m.rel(abs)
699 exact = m.exact(abs)
705 exact = m.exact(abs)
700 if good and abs not in repo.dirstate:
706 if good and abs not in repo.dirstate:
701 unknown.append(abs)
707 unknown.append(abs)
702 if repo.ui.verbose or not exact:
708 if repo.ui.verbose or not exact:
703 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
709 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
704 elif (repo.dirstate[abs] != 'r' and
710 elif (repo.dirstate[abs] != 'r' and
705 (not good or not os.path.lexists(target) or
711 (not good or not os.path.lexists(target) or
706 (os.path.isdir(target) and not os.path.islink(target)))):
712 (os.path.isdir(target) and not os.path.islink(target)))):
707 deleted.append(abs)
713 deleted.append(abs)
708 if repo.ui.verbose or not exact:
714 if repo.ui.verbose or not exact:
709 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
715 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
710 # for finding renames
716 # for finding renames
711 elif repo.dirstate[abs] == 'r':
717 elif repo.dirstate[abs] == 'r':
712 removed.append(abs)
718 removed.append(abs)
713 elif repo.dirstate[abs] == 'a':
719 elif repo.dirstate[abs] == 'a':
714 added.append(abs)
720 added.append(abs)
715 copies = {}
721 copies = {}
716 if similarity > 0:
722 if similarity > 0:
717 for old, new, score in similar.findrenames(repo,
723 for old, new, score in similar.findrenames(repo,
718 added + unknown, removed + deleted, similarity):
724 added + unknown, removed + deleted, similarity):
719 if repo.ui.verbose or not m.exact(old) or not m.exact(new):
725 if repo.ui.verbose or not m.exact(old) or not m.exact(new):
720 repo.ui.status(_('recording removal of %s as rename to %s '
726 repo.ui.status(_('recording removal of %s as rename to %s '
721 '(%d%% similar)\n') %
727 '(%d%% similar)\n') %
722 (m.rel(old), m.rel(new), score * 100))
728 (m.rel(old), m.rel(new), score * 100))
723 copies[new] = old
729 copies[new] = old
724
730
725 if not dry_run:
731 if not dry_run:
726 wctx = repo[None]
732 wctx = repo[None]
727 wlock = repo.wlock()
733 wlock = repo.wlock()
728 try:
734 try:
729 wctx.forget(deleted)
735 wctx.forget(deleted)
730 wctx.add(unknown)
736 wctx.add(unknown)
731 for new, old in copies.iteritems():
737 for new, old in copies.iteritems():
732 wctx.copy(old, new)
738 wctx.copy(old, new)
733 finally:
739 finally:
734 wlock.release()
740 wlock.release()
735
741
736 for f in rejected:
742 for f in rejected:
737 if f in m.files():
743 if f in m.files():
738 return 1
744 return 1
739 return 0
745 return 0
740
746
741 def updatedir(ui, repo, patches, similarity=0):
747 def updatedir(ui, repo, patches, similarity=0):
742 '''Update dirstate after patch application according to metadata'''
748 '''Update dirstate after patch application according to metadata'''
743 if not patches:
749 if not patches:
744 return []
750 return []
745 copies = []
751 copies = []
746 removes = set()
752 removes = set()
747 cfiles = patches.keys()
753 cfiles = patches.keys()
748 cwd = repo.getcwd()
754 cwd = repo.getcwd()
749 if cwd:
755 if cwd:
750 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
756 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
751 for f in patches:
757 for f in patches:
752 gp = patches[f]
758 gp = patches[f]
753 if not gp:
759 if not gp:
754 continue
760 continue
755 if gp.op == 'RENAME':
761 if gp.op == 'RENAME':
756 copies.append((gp.oldpath, gp.path))
762 copies.append((gp.oldpath, gp.path))
757 removes.add(gp.oldpath)
763 removes.add(gp.oldpath)
758 elif gp.op == 'COPY':
764 elif gp.op == 'COPY':
759 copies.append((gp.oldpath, gp.path))
765 copies.append((gp.oldpath, gp.path))
760 elif gp.op == 'DELETE':
766 elif gp.op == 'DELETE':
761 removes.add(gp.path)
767 removes.add(gp.path)
762
768
763 wctx = repo[None]
769 wctx = repo[None]
764 for src, dst in copies:
770 for src, dst in copies:
765 dirstatecopy(ui, repo, wctx, src, dst, cwd=cwd)
771 dirstatecopy(ui, repo, wctx, src, dst, cwd=cwd)
766 if (not similarity) and removes:
772 if (not similarity) and removes:
767 wctx.remove(sorted(removes), True)
773 wctx.remove(sorted(removes), True)
768
774
769 for f in patches:
775 for f in patches:
770 gp = patches[f]
776 gp = patches[f]
771 if gp and gp.mode:
777 if gp and gp.mode:
772 islink, isexec = gp.mode
778 islink, isexec = gp.mode
773 dst = repo.wjoin(gp.path)
779 dst = repo.wjoin(gp.path)
774 # patch won't create empty files
780 # patch won't create empty files
775 if gp.op == 'ADD' and not os.path.lexists(dst):
781 if gp.op == 'ADD' and not os.path.lexists(dst):
776 flags = (isexec and 'x' or '') + (islink and 'l' or '')
782 flags = (isexec and 'x' or '') + (islink and 'l' or '')
777 repo.wwrite(gp.path, '', flags)
783 repo.wwrite(gp.path, '', flags)
778 util.setflags(dst, islink, isexec)
784 util.setflags(dst, islink, isexec)
779 addremove(repo, cfiles, similarity=similarity)
785 addremove(repo, cfiles, similarity=similarity)
780 files = patches.keys()
786 files = patches.keys()
781 files.extend([r for r in removes if r not in files])
787 files.extend([r for r in removes if r not in files])
782 return sorted(files)
788 return sorted(files)
783
789
784 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
790 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
785 """Update the dirstate to reflect the intent of copying src to dst. For
791 """Update the dirstate to reflect the intent of copying src to dst. For
786 different reasons it might not end with dst being marked as copied from src.
792 different reasons it might not end with dst being marked as copied from src.
787 """
793 """
788 origsrc = repo.dirstate.copied(src) or src
794 origsrc = repo.dirstate.copied(src) or src
789 if dst == origsrc: # copying back a copy?
795 if dst == origsrc: # copying back a copy?
790 if repo.dirstate[dst] not in 'mn' and not dryrun:
796 if repo.dirstate[dst] not in 'mn' and not dryrun:
791 repo.dirstate.normallookup(dst)
797 repo.dirstate.normallookup(dst)
792 else:
798 else:
793 if repo.dirstate[origsrc] == 'a' and origsrc == src:
799 if repo.dirstate[origsrc] == 'a' and origsrc == src:
794 if not ui.quiet:
800 if not ui.quiet:
795 ui.warn(_("%s has not been committed yet, so no copy "
801 ui.warn(_("%s has not been committed yet, so no copy "
796 "data will be stored for %s.\n")
802 "data will be stored for %s.\n")
797 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
803 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
798 if repo.dirstate[dst] in '?r' and not dryrun:
804 if repo.dirstate[dst] in '?r' and not dryrun:
799 wctx.add([dst])
805 wctx.add([dst])
800 elif not dryrun:
806 elif not dryrun:
801 wctx.copy(origsrc, dst)
807 wctx.copy(origsrc, dst)
802
808
803 def readrequires(opener, supported):
809 def readrequires(opener, supported):
804 '''Reads and parses .hg/requires and checks if all entries found
810 '''Reads and parses .hg/requires and checks if all entries found
805 are in the list of supported features.'''
811 are in the list of supported features.'''
806 requirements = set(opener.read("requires").splitlines())
812 requirements = set(opener.read("requires").splitlines())
807 missings = []
813 missings = []
808 for r in requirements:
814 for r in requirements:
809 if r not in supported:
815 if r not in supported:
810 if not r or not r[0].isalnum():
816 if not r or not r[0].isalnum():
811 raise error.RequirementError(_(".hg/requires file is corrupt"))
817 raise error.RequirementError(_(".hg/requires file is corrupt"))
812 missings.append(r)
818 missings.append(r)
813 missings.sort()
819 missings.sort()
814 if missings:
820 if missings:
815 raise error.RequirementError(
821 raise error.RequirementError(
816 _("unknown repository format: requires features '%s' (upgrade "
822 _("unknown repository format: requires features '%s' (upgrade "
817 "Mercurial)") % "', '".join(missings))
823 "Mercurial)") % "', '".join(missings))
818 return requirements
824 return requirements
819
825
820 class filecacheentry(object):
826 class filecacheentry(object):
821 def __init__(self, path):
827 def __init__(self, path):
822 self.path = path
828 self.path = path
823 self.cachestat = filecacheentry.stat(self.path)
829 self.cachestat = filecacheentry.stat(self.path)
824
830
825 if self.cachestat:
831 if self.cachestat:
826 self._cacheable = self.cachestat.cacheable()
832 self._cacheable = self.cachestat.cacheable()
827 else:
833 else:
828 # None means we don't know yet
834 # None means we don't know yet
829 self._cacheable = None
835 self._cacheable = None
830
836
831 def refresh(self):
837 def refresh(self):
832 if self.cacheable():
838 if self.cacheable():
833 self.cachestat = filecacheentry.stat(self.path)
839 self.cachestat = filecacheentry.stat(self.path)
834
840
835 def cacheable(self):
841 def cacheable(self):
836 if self._cacheable is not None:
842 if self._cacheable is not None:
837 return self._cacheable
843 return self._cacheable
838
844
839 # we don't know yet, assume it is for now
845 # we don't know yet, assume it is for now
840 return True
846 return True
841
847
842 def changed(self):
848 def changed(self):
843 # no point in going further if we can't cache it
849 # no point in going further if we can't cache it
844 if not self.cacheable():
850 if not self.cacheable():
845 return True
851 return True
846
852
847 newstat = filecacheentry.stat(self.path)
853 newstat = filecacheentry.stat(self.path)
848
854
849 # we may not know if it's cacheable yet, check again now
855 # we may not know if it's cacheable yet, check again now
850 if newstat and self._cacheable is None:
856 if newstat and self._cacheable is None:
851 self._cacheable = newstat.cacheable()
857 self._cacheable = newstat.cacheable()
852
858
853 # check again
859 # check again
854 if not self._cacheable:
860 if not self._cacheable:
855 return True
861 return True
856
862
857 if self.cachestat != newstat:
863 if self.cachestat != newstat:
858 self.cachestat = newstat
864 self.cachestat = newstat
859 return True
865 return True
860 else:
866 else:
861 return False
867 return False
862
868
863 @staticmethod
869 @staticmethod
864 def stat(path):
870 def stat(path):
865 try:
871 try:
866 return util.cachestat(path)
872 return util.cachestat(path)
867 except OSError, e:
873 except OSError, e:
868 if e.errno != errno.ENOENT:
874 if e.errno != errno.ENOENT:
869 raise
875 raise
870
876
871 class filecache(object):
877 class filecache(object):
872 '''A property like decorator that tracks a file under .hg/ for updates.
878 '''A property like decorator that tracks a file under .hg/ for updates.
873
879
874 Records stat info when called in _filecache.
880 Records stat info when called in _filecache.
875
881
876 On subsequent calls, compares old stat info with new info, and recreates
882 On subsequent calls, compares old stat info with new info, and recreates
877 the object when needed, updating the new stat info in _filecache.
883 the object when needed, updating the new stat info in _filecache.
878
884
879 Mercurial either atomic renames or appends for files under .hg,
885 Mercurial either atomic renames or appends for files under .hg,
880 so to ensure the cache is reliable we need the filesystem to be able
886 so to ensure the cache is reliable we need the filesystem to be able
881 to tell us if a file has been replaced. If it can't, we fallback to
887 to tell us if a file has been replaced. If it can't, we fallback to
882 recreating the object on every call (essentially the same behaviour as
888 recreating the object on every call (essentially the same behaviour as
883 propertycache).'''
889 propertycache).'''
884 def __init__(self, path):
890 def __init__(self, path):
885 self.path = path
891 self.path = path
886
892
887 def join(self, obj, fname):
893 def join(self, obj, fname):
888 """Used to compute the runtime path of the cached file.
894 """Used to compute the runtime path of the cached file.
889
895
890 Users should subclass filecache and provide their own version of this
896 Users should subclass filecache and provide their own version of this
891 function to call the appropriate join function on 'obj' (an instance
897 function to call the appropriate join function on 'obj' (an instance
892 of the class that its member function was decorated).
898 of the class that its member function was decorated).
893 """
899 """
894 return obj.join(fname)
900 return obj.join(fname)
895
901
896 def __call__(self, func):
902 def __call__(self, func):
897 self.func = func
903 self.func = func
898 self.name = func.__name__
904 self.name = func.__name__
899 return self
905 return self
900
906
901 def __get__(self, obj, type=None):
907 def __get__(self, obj, type=None):
902 # do we need to check if the file changed?
908 # do we need to check if the file changed?
903 if self.name in obj.__dict__:
909 if self.name in obj.__dict__:
904 return obj.__dict__[self.name]
910 return obj.__dict__[self.name]
905
911
906 entry = obj._filecache.get(self.name)
912 entry = obj._filecache.get(self.name)
907
913
908 if entry:
914 if entry:
909 if entry.changed():
915 if entry.changed():
910 entry.obj = self.func(obj)
916 entry.obj = self.func(obj)
911 else:
917 else:
912 path = self.join(obj, self.path)
918 path = self.join(obj, self.path)
913
919
914 # We stat -before- creating the object so our cache doesn't lie if
920 # We stat -before- creating the object so our cache doesn't lie if
915 # a writer modified between the time we read and stat
921 # a writer modified between the time we read and stat
916 entry = filecacheentry(path)
922 entry = filecacheentry(path)
917 entry.obj = self.func(obj)
923 entry.obj = self.func(obj)
918
924
919 obj._filecache[self.name] = entry
925 obj._filecache[self.name] = entry
920
926
921 obj.__dict__[self.name] = entry.obj
927 obj.__dict__[self.name] = entry.obj
922 return entry.obj
928 return entry.obj
923
929
924 def __set__(self, obj, value):
930 def __set__(self, obj, value):
925 if self.name in obj._filecache:
931 if self.name in obj._filecache:
926 obj._filecache[self.name].obj = value # update cached copy
932 obj._filecache[self.name].obj = value # update cached copy
927 obj.__dict__[self.name] = value # update copy returned by obj.x
933 obj.__dict__[self.name] = value # update copy returned by obj.x
928
934
929 def __delete__(self, obj):
935 def __delete__(self, obj):
930 try:
936 try:
931 del obj.__dict__[self.name]
937 del obj.__dict__[self.name]
932 except KeyError:
938 except KeyError:
933 raise AttributeError, self.name
939 raise AttributeError, self.name
@@ -1,154 +1,160 b''
1 # statichttprepo.py - simple http repository class for mercurial
1 # statichttprepo.py - simple http repository class for mercurial
2 #
2 #
3 # This provides read-only repo access to repositories exported via static http
3 # This provides read-only repo access to repositories exported via static http
4 #
4 #
5 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 from i18n import _
10 from i18n import _
11 import changelog, byterange, url, error
11 import changelog, byterange, url, error
12 import localrepo, manifest, util, scmutil, store
12 import localrepo, manifest, util, scmutil, store
13 import urllib, urllib2, errno
13 import urllib, urllib2, errno
14
14
15 class httprangereader(object):
15 class httprangereader(object):
16 def __init__(self, url, opener):
16 def __init__(self, url, opener):
17 # we assume opener has HTTPRangeHandler
17 # we assume opener has HTTPRangeHandler
18 self.url = url
18 self.url = url
19 self.pos = 0
19 self.pos = 0
20 self.opener = opener
20 self.opener = opener
21 self.name = url
21 self.name = url
22 def seek(self, pos):
22 def seek(self, pos):
23 self.pos = pos
23 self.pos = pos
24 def read(self, bytes=None):
24 def read(self, bytes=None):
25 req = urllib2.Request(self.url)
25 req = urllib2.Request(self.url)
26 end = ''
26 end = ''
27 if bytes:
27 if bytes:
28 end = self.pos + bytes - 1
28 end = self.pos + bytes - 1
29 if self.pos or end:
29 if self.pos or end:
30 req.add_header('Range', 'bytes=%d-%s' % (self.pos, end))
30 req.add_header('Range', 'bytes=%d-%s' % (self.pos, end))
31
31
32 try:
32 try:
33 f = self.opener.open(req)
33 f = self.opener.open(req)
34 data = f.read()
34 data = f.read()
35 # Python 2.6+ defines a getcode() function, and 2.4 and
35 # Python 2.6+ defines a getcode() function, and 2.4 and
36 # 2.5 appear to always have an undocumented code attribute
36 # 2.5 appear to always have an undocumented code attribute
37 # set. If we can't read either of those, fall back to 206
37 # set. If we can't read either of those, fall back to 206
38 # and hope for the best.
38 # and hope for the best.
39 code = getattr(f, 'getcode', lambda : getattr(f, 'code', 206))()
39 code = getattr(f, 'getcode', lambda : getattr(f, 'code', 206))()
40 except urllib2.HTTPError, inst:
40 except urllib2.HTTPError, inst:
41 num = inst.code == 404 and errno.ENOENT or None
41 num = inst.code == 404 and errno.ENOENT or None
42 raise IOError(num, inst)
42 raise IOError(num, inst)
43 except urllib2.URLError, inst:
43 except urllib2.URLError, inst:
44 raise IOError(None, inst.reason[1])
44 raise IOError(None, inst.reason[1])
45
45
46 if code == 200:
46 if code == 200:
47 # HTTPRangeHandler does nothing if remote does not support
47 # HTTPRangeHandler does nothing if remote does not support
48 # Range headers and returns the full entity. Let's slice it.
48 # Range headers and returns the full entity. Let's slice it.
49 if bytes:
49 if bytes:
50 data = data[self.pos:self.pos + bytes]
50 data = data[self.pos:self.pos + bytes]
51 else:
51 else:
52 data = data[self.pos:]
52 data = data[self.pos:]
53 elif bytes:
53 elif bytes:
54 data = data[:bytes]
54 data = data[:bytes]
55 self.pos += len(data)
55 self.pos += len(data)
56 return data
56 return data
57 def __iter__(self):
57 def __iter__(self):
58 return iter(self.read().splitlines(1))
58 return iter(self.read().splitlines(1))
59 def close(self):
59 def close(self):
60 pass
60 pass
61
61
62 def build_opener(ui, authinfo):
62 def build_opener(ui, authinfo):
63 # urllib cannot handle URLs with embedded user or passwd
63 # urllib cannot handle URLs with embedded user or passwd
64 urlopener = url.opener(ui, authinfo)
64 urlopener = url.opener(ui, authinfo)
65 urlopener.add_handler(byterange.HTTPRangeHandler())
65 urlopener.add_handler(byterange.HTTPRangeHandler())
66
66
67 class statichttpvfs(scmutil.abstractvfs):
67 class statichttpvfs(scmutil.abstractvfs):
68 def __init__(self, base):
68 def __init__(self, base):
69 self.base = base
69 self.base = base
70
70
71 def __call__(self, path, mode="r", atomictemp=None):
71 def __call__(self, path, mode="r", atomictemp=None):
72 if mode not in ('r', 'rb'):
72 if mode not in ('r', 'rb'):
73 raise IOError('Permission denied')
73 raise IOError('Permission denied')
74 f = "/".join((self.base, urllib.quote(path)))
74 f = "/".join((self.base, urllib.quote(path)))
75 return httprangereader(f, urlopener)
75 return httprangereader(f, urlopener)
76
76
77 def join(self, path):
78 if path:
79 return os.path.join(self.base, path)
80 else:
81 return self.base
82
77 return statichttpvfs
83 return statichttpvfs
78
84
79 class statichttppeer(localrepo.localpeer):
85 class statichttppeer(localrepo.localpeer):
80 def local(self):
86 def local(self):
81 return None
87 return None
82 def canpush(self):
88 def canpush(self):
83 return False
89 return False
84
90
85 class statichttprepository(localrepo.localrepository):
91 class statichttprepository(localrepo.localrepository):
86 def __init__(self, ui, path):
92 def __init__(self, ui, path):
87 self._url = path
93 self._url = path
88 self.ui = ui
94 self.ui = ui
89
95
90 self.root = path
96 self.root = path
91 u = util.url(path.rstrip('/') + "/.hg")
97 u = util.url(path.rstrip('/') + "/.hg")
92 self.path, authinfo = u.authinfo()
98 self.path, authinfo = u.authinfo()
93
99
94 opener = build_opener(ui, authinfo)
100 opener = build_opener(ui, authinfo)
95 self.opener = opener(self.path)
101 self.opener = opener(self.path)
96 self.vfs = self.opener
102 self.vfs = self.opener
97 self._phasedefaults = []
103 self._phasedefaults = []
98
104
99 try:
105 try:
100 requirements = scmutil.readrequires(self.opener, self.supported)
106 requirements = scmutil.readrequires(self.opener, self.supported)
101 except IOError, inst:
107 except IOError, inst:
102 if inst.errno != errno.ENOENT:
108 if inst.errno != errno.ENOENT:
103 raise
109 raise
104 requirements = set()
110 requirements = set()
105
111
106 # check if it is a non-empty old-style repository
112 # check if it is a non-empty old-style repository
107 try:
113 try:
108 fp = self.opener("00changelog.i")
114 fp = self.opener("00changelog.i")
109 fp.read(1)
115 fp.read(1)
110 fp.close()
116 fp.close()
111 except IOError, inst:
117 except IOError, inst:
112 if inst.errno != errno.ENOENT:
118 if inst.errno != errno.ENOENT:
113 raise
119 raise
114 # we do not care about empty old-style repositories here
120 # we do not care about empty old-style repositories here
115 msg = _("'%s' does not appear to be an hg repository") % path
121 msg = _("'%s' does not appear to be an hg repository") % path
116 raise error.RepoError(msg)
122 raise error.RepoError(msg)
117
123
118 # setup store
124 # setup store
119 self.store = store.store(requirements, self.path, opener)
125 self.store = store.store(requirements, self.path, opener)
120 self.spath = self.store.path
126 self.spath = self.store.path
121 self.sopener = self.store.opener
127 self.sopener = self.store.opener
122 self.svfs = self.sopener
128 self.svfs = self.sopener
123 self.sjoin = self.store.join
129 self.sjoin = self.store.join
124 self._filecache = {}
130 self._filecache = {}
125 self.requirements = requirements
131 self.requirements = requirements
126
132
127 self.manifest = manifest.manifest(self.sopener)
133 self.manifest = manifest.manifest(self.sopener)
128 self.changelog = changelog.changelog(self.sopener)
134 self.changelog = changelog.changelog(self.sopener)
129 self._tags = None
135 self._tags = None
130 self.nodetagscache = None
136 self.nodetagscache = None
131 self._branchcache = None
137 self._branchcache = None
132 self._branchcachetip = None
138 self._branchcachetip = None
133 self.encodepats = None
139 self.encodepats = None
134 self.decodepats = None
140 self.decodepats = None
135
141
136 def _restrictcapabilities(self, caps):
142 def _restrictcapabilities(self, caps):
137 return caps.difference(["pushkey"])
143 return caps.difference(["pushkey"])
138
144
139 def url(self):
145 def url(self):
140 return self._url
146 return self._url
141
147
142 def local(self):
148 def local(self):
143 return False
149 return False
144
150
145 def peer(self):
151 def peer(self):
146 return statichttppeer(self)
152 return statichttppeer(self)
147
153
148 def lock(self, wait=True):
154 def lock(self, wait=True):
149 raise util.Abort(_('cannot lock static-http repository'))
155 raise util.Abort(_('cannot lock static-http repository'))
150
156
151 def instance(ui, path, create):
157 def instance(ui, path, create):
152 if create:
158 if create:
153 raise util.Abort(_('cannot create new static-http repository'))
159 raise util.Abort(_('cannot create new static-http repository'))
154 return statichttprepository(ui, path[7:])
160 return statichttprepository(ui, path[7:])
@@ -1,499 +1,505 b''
1 # store.py - repository store handling for Mercurial
1 # store.py - repository store handling for Mercurial
2 #
2 #
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import osutil, scmutil, util, parsers
9 import osutil, scmutil, util, parsers
10 import os, stat, errno
10 import os, stat, errno
11
11
12 _sha = util.sha1
12 _sha = util.sha1
13
13
14 # This avoids a collision between a file named foo and a dir named
14 # This avoids a collision between a file named foo and a dir named
15 # foo.i or foo.d
15 # foo.i or foo.d
16 def _encodedir(path):
16 def _encodedir(path):
17 '''
17 '''
18 >>> _encodedir('data/foo.i')
18 >>> _encodedir('data/foo.i')
19 'data/foo.i'
19 'data/foo.i'
20 >>> _encodedir('data/foo.i/bla.i')
20 >>> _encodedir('data/foo.i/bla.i')
21 'data/foo.i.hg/bla.i'
21 'data/foo.i.hg/bla.i'
22 >>> _encodedir('data/foo.i.hg/bla.i')
22 >>> _encodedir('data/foo.i.hg/bla.i')
23 'data/foo.i.hg.hg/bla.i'
23 'data/foo.i.hg.hg/bla.i'
24 >>> _encodedir('data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
24 >>> _encodedir('data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n')
25 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
25 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n'
26 '''
26 '''
27 return (path
27 return (path
28 .replace(".hg/", ".hg.hg/")
28 .replace(".hg/", ".hg.hg/")
29 .replace(".i/", ".i.hg/")
29 .replace(".i/", ".i.hg/")
30 .replace(".d/", ".d.hg/"))
30 .replace(".d/", ".d.hg/"))
31
31
32 encodedir = getattr(parsers, 'encodedir', _encodedir)
32 encodedir = getattr(parsers, 'encodedir', _encodedir)
33
33
34 def decodedir(path):
34 def decodedir(path):
35 '''
35 '''
36 >>> decodedir('data/foo.i')
36 >>> decodedir('data/foo.i')
37 'data/foo.i'
37 'data/foo.i'
38 >>> decodedir('data/foo.i.hg/bla.i')
38 >>> decodedir('data/foo.i.hg/bla.i')
39 'data/foo.i/bla.i'
39 'data/foo.i/bla.i'
40 >>> decodedir('data/foo.i.hg.hg/bla.i')
40 >>> decodedir('data/foo.i.hg.hg/bla.i')
41 'data/foo.i.hg/bla.i'
41 'data/foo.i.hg/bla.i'
42 '''
42 '''
43 if ".hg/" not in path:
43 if ".hg/" not in path:
44 return path
44 return path
45 return (path
45 return (path
46 .replace(".d.hg/", ".d/")
46 .replace(".d.hg/", ".d/")
47 .replace(".i.hg/", ".i/")
47 .replace(".i.hg/", ".i/")
48 .replace(".hg.hg/", ".hg/"))
48 .replace(".hg.hg/", ".hg/"))
49
49
50 def _buildencodefun():
50 def _buildencodefun():
51 '''
51 '''
52 >>> enc, dec = _buildencodefun()
52 >>> enc, dec = _buildencodefun()
53
53
54 >>> enc('nothing/special.txt')
54 >>> enc('nothing/special.txt')
55 'nothing/special.txt'
55 'nothing/special.txt'
56 >>> dec('nothing/special.txt')
56 >>> dec('nothing/special.txt')
57 'nothing/special.txt'
57 'nothing/special.txt'
58
58
59 >>> enc('HELLO')
59 >>> enc('HELLO')
60 '_h_e_l_l_o'
60 '_h_e_l_l_o'
61 >>> dec('_h_e_l_l_o')
61 >>> dec('_h_e_l_l_o')
62 'HELLO'
62 'HELLO'
63
63
64 >>> enc('hello:world?')
64 >>> enc('hello:world?')
65 'hello~3aworld~3f'
65 'hello~3aworld~3f'
66 >>> dec('hello~3aworld~3f')
66 >>> dec('hello~3aworld~3f')
67 'hello:world?'
67 'hello:world?'
68
68
69 >>> enc('the\x07quick\xADshot')
69 >>> enc('the\x07quick\xADshot')
70 'the~07quick~adshot'
70 'the~07quick~adshot'
71 >>> dec('the~07quick~adshot')
71 >>> dec('the~07quick~adshot')
72 'the\\x07quick\\xadshot'
72 'the\\x07quick\\xadshot'
73 '''
73 '''
74 e = '_'
74 e = '_'
75 winreserved = [ord(x) for x in '\\:*?"<>|']
75 winreserved = [ord(x) for x in '\\:*?"<>|']
76 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
76 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
77 for x in (range(32) + range(126, 256) + winreserved):
77 for x in (range(32) + range(126, 256) + winreserved):
78 cmap[chr(x)] = "~%02x" % x
78 cmap[chr(x)] = "~%02x" % x
79 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
79 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
80 cmap[chr(x)] = e + chr(x).lower()
80 cmap[chr(x)] = e + chr(x).lower()
81 dmap = {}
81 dmap = {}
82 for k, v in cmap.iteritems():
82 for k, v in cmap.iteritems():
83 dmap[v] = k
83 dmap[v] = k
84 def decode(s):
84 def decode(s):
85 i = 0
85 i = 0
86 while i < len(s):
86 while i < len(s):
87 for l in xrange(1, 4):
87 for l in xrange(1, 4):
88 try:
88 try:
89 yield dmap[s[i:i + l]]
89 yield dmap[s[i:i + l]]
90 i += l
90 i += l
91 break
91 break
92 except KeyError:
92 except KeyError:
93 pass
93 pass
94 else:
94 else:
95 raise KeyError
95 raise KeyError
96 return (lambda s: ''.join([cmap[c] for c in s]),
96 return (lambda s: ''.join([cmap[c] for c in s]),
97 lambda s: ''.join(list(decode(s))))
97 lambda s: ''.join(list(decode(s))))
98
98
99 _encodefname, _decodefname = _buildencodefun()
99 _encodefname, _decodefname = _buildencodefun()
100
100
101 def encodefilename(s):
101 def encodefilename(s):
102 '''
102 '''
103 >>> encodefilename('foo.i/bar.d/bla.hg/hi:world?/HELLO')
103 >>> encodefilename('foo.i/bar.d/bla.hg/hi:world?/HELLO')
104 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
104 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o'
105 '''
105 '''
106 return _encodefname(encodedir(s))
106 return _encodefname(encodedir(s))
107
107
108 def decodefilename(s):
108 def decodefilename(s):
109 '''
109 '''
110 >>> decodefilename('foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
110 >>> decodefilename('foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o')
111 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
111 'foo.i/bar.d/bla.hg/hi:world?/HELLO'
112 '''
112 '''
113 return decodedir(_decodefname(s))
113 return decodedir(_decodefname(s))
114
114
115 def _buildlowerencodefun():
115 def _buildlowerencodefun():
116 '''
116 '''
117 >>> f = _buildlowerencodefun()
117 >>> f = _buildlowerencodefun()
118 >>> f('nothing/special.txt')
118 >>> f('nothing/special.txt')
119 'nothing/special.txt'
119 'nothing/special.txt'
120 >>> f('HELLO')
120 >>> f('HELLO')
121 'hello'
121 'hello'
122 >>> f('hello:world?')
122 >>> f('hello:world?')
123 'hello~3aworld~3f'
123 'hello~3aworld~3f'
124 >>> f('the\x07quick\xADshot')
124 >>> f('the\x07quick\xADshot')
125 'the~07quick~adshot'
125 'the~07quick~adshot'
126 '''
126 '''
127 winreserved = [ord(x) for x in '\\:*?"<>|']
127 winreserved = [ord(x) for x in '\\:*?"<>|']
128 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
128 cmap = dict([(chr(x), chr(x)) for x in xrange(127)])
129 for x in (range(32) + range(126, 256) + winreserved):
129 for x in (range(32) + range(126, 256) + winreserved):
130 cmap[chr(x)] = "~%02x" % x
130 cmap[chr(x)] = "~%02x" % x
131 for x in range(ord("A"), ord("Z")+1):
131 for x in range(ord("A"), ord("Z")+1):
132 cmap[chr(x)] = chr(x).lower()
132 cmap[chr(x)] = chr(x).lower()
133 return lambda s: "".join([cmap[c] for c in s])
133 return lambda s: "".join([cmap[c] for c in s])
134
134
135 lowerencode = _buildlowerencodefun()
135 lowerencode = _buildlowerencodefun()
136
136
137 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
137 # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9
138 _winres3 = ('aux', 'con', 'prn', 'nul') # length 3
138 _winres3 = ('aux', 'con', 'prn', 'nul') # length 3
139 _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9)
139 _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9)
140 def _auxencode(path, dotencode):
140 def _auxencode(path, dotencode):
141 '''
141 '''
142 Encodes filenames containing names reserved by Windows or which end in
142 Encodes filenames containing names reserved by Windows or which end in
143 period or space. Does not touch other single reserved characters c.
143 period or space. Does not touch other single reserved characters c.
144 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
144 Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here.
145 Additionally encodes space or period at the beginning, if dotencode is
145 Additionally encodes space or period at the beginning, if dotencode is
146 True. Parameter path is assumed to be all lowercase.
146 True. Parameter path is assumed to be all lowercase.
147 A segment only needs encoding if a reserved name appears as a
147 A segment only needs encoding if a reserved name appears as a
148 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
148 basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux"
149 doesn't need encoding.
149 doesn't need encoding.
150
150
151 >>> s = '.foo/aux.txt/txt.aux/con/prn/nul/foo.'
151 >>> s = '.foo/aux.txt/txt.aux/con/prn/nul/foo.'
152 >>> _auxencode(s.split('/'), True)
152 >>> _auxencode(s.split('/'), True)
153 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
153 ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e']
154 >>> s = '.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
154 >>> s = '.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.'
155 >>> _auxencode(s.split('/'), False)
155 >>> _auxencode(s.split('/'), False)
156 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
156 ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e']
157 >>> _auxencode(['foo. '], True)
157 >>> _auxencode(['foo. '], True)
158 ['foo.~20']
158 ['foo.~20']
159 >>> _auxencode([' .foo'], True)
159 >>> _auxencode([' .foo'], True)
160 ['~20.foo']
160 ['~20.foo']
161 '''
161 '''
162 for i, n in enumerate(path):
162 for i, n in enumerate(path):
163 if not n:
163 if not n:
164 continue
164 continue
165 if dotencode and n[0] in '. ':
165 if dotencode and n[0] in '. ':
166 n = "~%02x" % ord(n[0]) + n[1:]
166 n = "~%02x" % ord(n[0]) + n[1:]
167 path[i] = n
167 path[i] = n
168 else:
168 else:
169 l = n.find('.')
169 l = n.find('.')
170 if l == -1:
170 if l == -1:
171 l = len(n)
171 l = len(n)
172 if ((l == 3 and n[:3] in _winres3) or
172 if ((l == 3 and n[:3] in _winres3) or
173 (l == 4 and n[3] <= '9' and n[3] >= '1'
173 (l == 4 and n[3] <= '9' and n[3] >= '1'
174 and n[:3] in _winres4)):
174 and n[:3] in _winres4)):
175 # encode third letter ('aux' -> 'au~78')
175 # encode third letter ('aux' -> 'au~78')
176 ec = "~%02x" % ord(n[2])
176 ec = "~%02x" % ord(n[2])
177 n = n[0:2] + ec + n[3:]
177 n = n[0:2] + ec + n[3:]
178 path[i] = n
178 path[i] = n
179 if n[-1] in '. ':
179 if n[-1] in '. ':
180 # encode last period or space ('foo...' -> 'foo..~2e')
180 # encode last period or space ('foo...' -> 'foo..~2e')
181 path[i] = n[:-1] + "~%02x" % ord(n[-1])
181 path[i] = n[:-1] + "~%02x" % ord(n[-1])
182 return path
182 return path
183
183
184 _maxstorepathlen = 120
184 _maxstorepathlen = 120
185 _dirprefixlen = 8
185 _dirprefixlen = 8
186 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
186 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4
187
187
188 def _hashencode(path, dotencode):
188 def _hashencode(path, dotencode):
189 digest = _sha(path).hexdigest()
189 digest = _sha(path).hexdigest()
190 le = lowerencode(path).split('/')[1:]
190 le = lowerencode(path).split('/')[1:]
191 parts = _auxencode(le, dotencode)
191 parts = _auxencode(le, dotencode)
192 basename = parts[-1]
192 basename = parts[-1]
193 _root, ext = os.path.splitext(basename)
193 _root, ext = os.path.splitext(basename)
194 sdirs = []
194 sdirs = []
195 sdirslen = 0
195 sdirslen = 0
196 for p in parts[:-1]:
196 for p in parts[:-1]:
197 d = p[:_dirprefixlen]
197 d = p[:_dirprefixlen]
198 if d[-1] in '. ':
198 if d[-1] in '. ':
199 # Windows can't access dirs ending in period or space
199 # Windows can't access dirs ending in period or space
200 d = d[:-1] + '_'
200 d = d[:-1] + '_'
201 if sdirslen == 0:
201 if sdirslen == 0:
202 t = len(d)
202 t = len(d)
203 else:
203 else:
204 t = sdirslen + 1 + len(d)
204 t = sdirslen + 1 + len(d)
205 if t > _maxshortdirslen:
205 if t > _maxshortdirslen:
206 break
206 break
207 sdirs.append(d)
207 sdirs.append(d)
208 sdirslen = t
208 sdirslen = t
209 dirs = '/'.join(sdirs)
209 dirs = '/'.join(sdirs)
210 if len(dirs) > 0:
210 if len(dirs) > 0:
211 dirs += '/'
211 dirs += '/'
212 res = 'dh/' + dirs + digest + ext
212 res = 'dh/' + dirs + digest + ext
213 spaceleft = _maxstorepathlen - len(res)
213 spaceleft = _maxstorepathlen - len(res)
214 if spaceleft > 0:
214 if spaceleft > 0:
215 filler = basename[:spaceleft]
215 filler = basename[:spaceleft]
216 res = 'dh/' + dirs + filler + digest + ext
216 res = 'dh/' + dirs + filler + digest + ext
217 return res
217 return res
218
218
219 def _hybridencode(path, dotencode):
219 def _hybridencode(path, dotencode):
220 '''encodes path with a length limit
220 '''encodes path with a length limit
221
221
222 Encodes all paths that begin with 'data/', according to the following.
222 Encodes all paths that begin with 'data/', according to the following.
223
223
224 Default encoding (reversible):
224 Default encoding (reversible):
225
225
226 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
226 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
227 characters are encoded as '~xx', where xx is the two digit hex code
227 characters are encoded as '~xx', where xx is the two digit hex code
228 of the character (see encodefilename).
228 of the character (see encodefilename).
229 Relevant path components consisting of Windows reserved filenames are
229 Relevant path components consisting of Windows reserved filenames are
230 masked by encoding the third character ('aux' -> 'au~78', see auxencode).
230 masked by encoding the third character ('aux' -> 'au~78', see auxencode).
231
231
232 Hashed encoding (not reversible):
232 Hashed encoding (not reversible):
233
233
234 If the default-encoded path is longer than _maxstorepathlen, a
234 If the default-encoded path is longer than _maxstorepathlen, a
235 non-reversible hybrid hashing of the path is done instead.
235 non-reversible hybrid hashing of the path is done instead.
236 This encoding uses up to _dirprefixlen characters of all directory
236 This encoding uses up to _dirprefixlen characters of all directory
237 levels of the lowerencoded path, but not more levels than can fit into
237 levels of the lowerencoded path, but not more levels than can fit into
238 _maxshortdirslen.
238 _maxshortdirslen.
239 Then follows the filler followed by the sha digest of the full path.
239 Then follows the filler followed by the sha digest of the full path.
240 The filler is the beginning of the basename of the lowerencoded path
240 The filler is the beginning of the basename of the lowerencoded path
241 (the basename is everything after the last path separator). The filler
241 (the basename is everything after the last path separator). The filler
242 is as long as possible, filling in characters from the basename until
242 is as long as possible, filling in characters from the basename until
243 the encoded path has _maxstorepathlen characters (or all chars of the
243 the encoded path has _maxstorepathlen characters (or all chars of the
244 basename have been taken).
244 basename have been taken).
245 The extension (e.g. '.i' or '.d') is preserved.
245 The extension (e.g. '.i' or '.d') is preserved.
246
246
247 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
247 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
248 encoding was used.
248 encoding was used.
249 '''
249 '''
250 path = encodedir(path)
250 path = encodedir(path)
251 ef = _encodefname(path).split('/')
251 ef = _encodefname(path).split('/')
252 res = '/'.join(_auxencode(ef, dotencode))
252 res = '/'.join(_auxencode(ef, dotencode))
253 if len(res) > _maxstorepathlen:
253 if len(res) > _maxstorepathlen:
254 res = _hashencode(path, dotencode)
254 res = _hashencode(path, dotencode)
255 return res
255 return res
256
256
257 def _pathencode(path):
257 def _pathencode(path):
258 if len(path) > _maxstorepathlen:
258 if len(path) > _maxstorepathlen:
259 return None
259 return None
260 ef = _encodefname(encodedir(path)).split('/')
260 ef = _encodefname(encodedir(path)).split('/')
261 res = '/'.join(_auxencode(ef, True))
261 res = '/'.join(_auxencode(ef, True))
262 if len(res) > _maxstorepathlen:
262 if len(res) > _maxstorepathlen:
263 return None
263 return None
264 return res
264 return res
265
265
266 _pathencode = getattr(parsers, 'pathencode', _pathencode)
266 _pathencode = getattr(parsers, 'pathencode', _pathencode)
267
267
268 def _dothybridencode(f):
268 def _dothybridencode(f):
269 ef = _pathencode(f)
269 ef = _pathencode(f)
270 if ef is None:
270 if ef is None:
271 return _hashencode(encodedir(f), True)
271 return _hashencode(encodedir(f), True)
272 return ef
272 return ef
273
273
274 def _plainhybridencode(f):
274 def _plainhybridencode(f):
275 return _hybridencode(f, False)
275 return _hybridencode(f, False)
276
276
277 def _calcmode(path):
277 def _calcmode(path):
278 try:
278 try:
279 # files in .hg/ will be created using this mode
279 # files in .hg/ will be created using this mode
280 mode = os.stat(path).st_mode
280 mode = os.stat(path).st_mode
281 # avoid some useless chmods
281 # avoid some useless chmods
282 if (0777 & ~util.umask) == (0777 & mode):
282 if (0777 & ~util.umask) == (0777 & mode):
283 mode = None
283 mode = None
284 except OSError:
284 except OSError:
285 mode = None
285 mode = None
286 return mode
286 return mode
287
287
288 _data = ('data 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
288 _data = ('data 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
289 ' phaseroots obsstore')
289 ' phaseroots obsstore')
290
290
291 class basicstore(object):
291 class basicstore(object):
292 '''base class for local repository stores'''
292 '''base class for local repository stores'''
293 def __init__(self, path, vfstype):
293 def __init__(self, path, vfstype):
294 vfs = vfstype(path)
294 vfs = vfstype(path)
295 self.path = vfs.base
295 self.path = vfs.base
296 self.createmode = _calcmode(path)
296 self.createmode = _calcmode(path)
297 vfs.createmode = self.createmode
297 vfs.createmode = self.createmode
298 self.vfs = scmutil.filtervfs(vfs, encodedir)
298 self.vfs = scmutil.filtervfs(vfs, encodedir)
299 self.opener = self.vfs
299 self.opener = self.vfs
300
300
301 def join(self, f):
301 def join(self, f):
302 return self.path + '/' + encodedir(f)
302 return self.path + '/' + encodedir(f)
303
303
304 def _walk(self, relpath, recurse):
304 def _walk(self, relpath, recurse):
305 '''yields (unencoded, encoded, size)'''
305 '''yields (unencoded, encoded, size)'''
306 path = self.path
306 path = self.path
307 if relpath:
307 if relpath:
308 path += '/' + relpath
308 path += '/' + relpath
309 striplen = len(self.path) + 1
309 striplen = len(self.path) + 1
310 l = []
310 l = []
311 if os.path.isdir(path):
311 if os.path.isdir(path):
312 visit = [path]
312 visit = [path]
313 while visit:
313 while visit:
314 p = visit.pop()
314 p = visit.pop()
315 for f, kind, st in osutil.listdir(p, stat=True):
315 for f, kind, st in osutil.listdir(p, stat=True):
316 fp = p + '/' + f
316 fp = p + '/' + f
317 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
317 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
318 n = util.pconvert(fp[striplen:])
318 n = util.pconvert(fp[striplen:])
319 l.append((decodedir(n), n, st.st_size))
319 l.append((decodedir(n), n, st.st_size))
320 elif kind == stat.S_IFDIR and recurse:
320 elif kind == stat.S_IFDIR and recurse:
321 visit.append(fp)
321 visit.append(fp)
322 l.sort()
322 l.sort()
323 return l
323 return l
324
324
325 def datafiles(self):
325 def datafiles(self):
326 return self._walk('data', True)
326 return self._walk('data', True)
327
327
328 def walk(self):
328 def walk(self):
329 '''yields (unencoded, encoded, size)'''
329 '''yields (unencoded, encoded, size)'''
330 # yield data files first
330 # yield data files first
331 for x in self.datafiles():
331 for x in self.datafiles():
332 yield x
332 yield x
333 # yield manifest before changelog
333 # yield manifest before changelog
334 for x in reversed(self._walk('', False)):
334 for x in reversed(self._walk('', False)):
335 yield x
335 yield x
336
336
337 def copylist(self):
337 def copylist(self):
338 return ['requires'] + _data.split()
338 return ['requires'] + _data.split()
339
339
340 def write(self):
340 def write(self):
341 pass
341 pass
342
342
343 class encodedstore(basicstore):
343 class encodedstore(basicstore):
344 def __init__(self, path, vfstype):
344 def __init__(self, path, vfstype):
345 vfs = vfstype(path + '/store')
345 vfs = vfstype(path + '/store')
346 self.path = vfs.base
346 self.path = vfs.base
347 self.createmode = _calcmode(self.path)
347 self.createmode = _calcmode(self.path)
348 vfs.createmode = self.createmode
348 vfs.createmode = self.createmode
349 self.vfs = scmutil.filtervfs(vfs, encodefilename)
349 self.vfs = scmutil.filtervfs(vfs, encodefilename)
350 self.opener = self.vfs
350 self.opener = self.vfs
351
351
352 def datafiles(self):
352 def datafiles(self):
353 for a, b, size in self._walk('data', True):
353 for a, b, size in self._walk('data', True):
354 try:
354 try:
355 a = decodefilename(a)
355 a = decodefilename(a)
356 except KeyError:
356 except KeyError:
357 a = None
357 a = None
358 yield a, b, size
358 yield a, b, size
359
359
360 def join(self, f):
360 def join(self, f):
361 return self.path + '/' + encodefilename(f)
361 return self.path + '/' + encodefilename(f)
362
362
363 def copylist(self):
363 def copylist(self):
364 return (['requires', '00changelog.i'] +
364 return (['requires', '00changelog.i'] +
365 ['store/' + f for f in _data.split()])
365 ['store/' + f for f in _data.split()])
366
366
367 class fncache(object):
367 class fncache(object):
368 # the filename used to be partially encoded
368 # the filename used to be partially encoded
369 # hence the encodedir/decodedir dance
369 # hence the encodedir/decodedir dance
370 def __init__(self, vfs):
370 def __init__(self, vfs):
371 self.vfs = vfs
371 self.vfs = vfs
372 self.entries = None
372 self.entries = None
373 self._dirty = False
373 self._dirty = False
374
374
375 def _load(self):
375 def _load(self):
376 '''fill the entries from the fncache file'''
376 '''fill the entries from the fncache file'''
377 self._dirty = False
377 self._dirty = False
378 try:
378 try:
379 fp = self.vfs('fncache', mode='rb')
379 fp = self.vfs('fncache', mode='rb')
380 except IOError:
380 except IOError:
381 # skip nonexistent file
381 # skip nonexistent file
382 self.entries = set()
382 self.entries = set()
383 return
383 return
384 self.entries = set(decodedir(fp.read()).splitlines())
384 self.entries = set(decodedir(fp.read()).splitlines())
385 if '' in self.entries:
385 if '' in self.entries:
386 fp.seek(0)
386 fp.seek(0)
387 for n, line in enumerate(fp):
387 for n, line in enumerate(fp):
388 if not line.rstrip('\n'):
388 if not line.rstrip('\n'):
389 t = _('invalid entry in fncache, line %s') % (n + 1)
389 t = _('invalid entry in fncache, line %s') % (n + 1)
390 raise util.Abort(t)
390 raise util.Abort(t)
391 fp.close()
391 fp.close()
392
392
393 def _write(self, files, atomictemp):
393 def _write(self, files, atomictemp):
394 fp = self.vfs('fncache', mode='wb', atomictemp=atomictemp)
394 fp = self.vfs('fncache', mode='wb', atomictemp=atomictemp)
395 if files:
395 if files:
396 fp.write(encodedir('\n'.join(files) + '\n'))
396 fp.write(encodedir('\n'.join(files) + '\n'))
397 fp.close()
397 fp.close()
398 self._dirty = False
398 self._dirty = False
399
399
400 def rewrite(self, files):
400 def rewrite(self, files):
401 self._write(files, False)
401 self._write(files, False)
402 self.entries = set(files)
402 self.entries = set(files)
403
403
404 def write(self):
404 def write(self):
405 if self._dirty:
405 if self._dirty:
406 self._write(self.entries, True)
406 self._write(self.entries, True)
407
407
408 def add(self, fn):
408 def add(self, fn):
409 if self.entries is None:
409 if self.entries is None:
410 self._load()
410 self._load()
411 if fn not in self.entries:
411 if fn not in self.entries:
412 self._dirty = True
412 self._dirty = True
413 self.entries.add(fn)
413 self.entries.add(fn)
414
414
415 def __contains__(self, fn):
415 def __contains__(self, fn):
416 if self.entries is None:
416 if self.entries is None:
417 self._load()
417 self._load()
418 return fn in self.entries
418 return fn in self.entries
419
419
420 def __iter__(self):
420 def __iter__(self):
421 if self.entries is None:
421 if self.entries is None:
422 self._load()
422 self._load()
423 return iter(self.entries)
423 return iter(self.entries)
424
424
425 class _fncachevfs(scmutil.abstractvfs):
425 class _fncachevfs(scmutil.abstractvfs):
426 def __init__(self, vfs, fnc, encode):
426 def __init__(self, vfs, fnc, encode):
427 self.vfs = vfs
427 self.vfs = vfs
428 self.fncache = fnc
428 self.fncache = fnc
429 self.encode = encode
429 self.encode = encode
430
430
431 def _getmustaudit(self):
431 def _getmustaudit(self):
432 return self.vfs.mustaudit
432 return self.vfs.mustaudit
433
433
434 def _setmustaudit(self, onoff):
434 def _setmustaudit(self, onoff):
435 self.vfs.mustaudit = onoff
435 self.vfs.mustaudit = onoff
436
436
437 mustaudit = property(_getmustaudit, _setmustaudit)
437 mustaudit = property(_getmustaudit, _setmustaudit)
438
438
439 def __call__(self, path, mode='r', *args, **kw):
439 def __call__(self, path, mode='r', *args, **kw):
440 if mode not in ('r', 'rb') and path.startswith('data/'):
440 if mode not in ('r', 'rb') and path.startswith('data/'):
441 self.fncache.add(path)
441 self.fncache.add(path)
442 return self.vfs(self.encode(path), mode, *args, **kw)
442 return self.vfs(self.encode(path), mode, *args, **kw)
443
443
444 def join(self, path):
445 if path:
446 return self.vfs.join(self.encode(path))
447 else:
448 return self.vfs.join(path)
449
444 class fncachestore(basicstore):
450 class fncachestore(basicstore):
445 def __init__(self, path, vfstype, dotencode):
451 def __init__(self, path, vfstype, dotencode):
446 if dotencode:
452 if dotencode:
447 encode = _dothybridencode
453 encode = _dothybridencode
448 else:
454 else:
449 encode = _plainhybridencode
455 encode = _plainhybridencode
450 self.encode = encode
456 self.encode = encode
451 vfs = vfstype(path + '/store')
457 vfs = vfstype(path + '/store')
452 self.path = vfs.base
458 self.path = vfs.base
453 self.pathsep = self.path + '/'
459 self.pathsep = self.path + '/'
454 self.createmode = _calcmode(self.path)
460 self.createmode = _calcmode(self.path)
455 vfs.createmode = self.createmode
461 vfs.createmode = self.createmode
456 fnc = fncache(vfs)
462 fnc = fncache(vfs)
457 self.fncache = fnc
463 self.fncache = fnc
458 self.vfs = _fncachevfs(vfs, fnc, encode)
464 self.vfs = _fncachevfs(vfs, fnc, encode)
459 self.opener = self.vfs
465 self.opener = self.vfs
460
466
461 def join(self, f):
467 def join(self, f):
462 return self.pathsep + self.encode(f)
468 return self.pathsep + self.encode(f)
463
469
464 def getsize(self, path):
470 def getsize(self, path):
465 return os.stat(self.pathsep + path).st_size
471 return os.stat(self.pathsep + path).st_size
466
472
467 def datafiles(self):
473 def datafiles(self):
468 rewrite = False
474 rewrite = False
469 existing = []
475 existing = []
470 for f in sorted(self.fncache):
476 for f in sorted(self.fncache):
471 ef = self.encode(f)
477 ef = self.encode(f)
472 try:
478 try:
473 yield f, ef, self.getsize(ef)
479 yield f, ef, self.getsize(ef)
474 existing.append(f)
480 existing.append(f)
475 except OSError, err:
481 except OSError, err:
476 if err.errno != errno.ENOENT:
482 if err.errno != errno.ENOENT:
477 raise
483 raise
478 # nonexistent entry
484 # nonexistent entry
479 rewrite = True
485 rewrite = True
480 if rewrite:
486 if rewrite:
481 # rewrite fncache to remove nonexistent entries
487 # rewrite fncache to remove nonexistent entries
482 # (may be caused by rollback / strip)
488 # (may be caused by rollback / strip)
483 self.fncache.rewrite(existing)
489 self.fncache.rewrite(existing)
484
490
485 def copylist(self):
491 def copylist(self):
486 d = ('data dh fncache phaseroots obsstore'
492 d = ('data dh fncache phaseroots obsstore'
487 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
493 ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
488 return (['requires', '00changelog.i'] +
494 return (['requires', '00changelog.i'] +
489 ['store/' + f for f in d.split()])
495 ['store/' + f for f in d.split()])
490
496
491 def write(self):
497 def write(self):
492 self.fncache.write()
498 self.fncache.write()
493
499
494 def store(requirements, path, vfstype):
500 def store(requirements, path, vfstype):
495 if 'store' in requirements:
501 if 'store' in requirements:
496 if 'fncache' in requirements:
502 if 'fncache' in requirements:
497 return fncachestore(path, vfstype, 'dotencode' in requirements)
503 return fncachestore(path, vfstype, 'dotencode' in requirements)
498 return encodedstore(path, vfstype)
504 return encodedstore(path, vfstype)
499 return basicstore(path, vfstype)
505 return basicstore(path, vfstype)
General Comments 0
You need to be logged in to leave comments. Login now