##// END OF EJS Templates
filecache: create an entry in _filecache when __set__ is called for a missing one...
Idan Kamara -
r18316:f3637557 default
parent child Browse files
Show More
@@ -1,982 +1,990 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import util, error, osutil, revset, similar, encoding, phases
9 import util, error, osutil, revset, similar, encoding, phases
10 import match as matchmod
10 import match as matchmod
11 import os, errno, re, stat, sys, glob
11 import os, errno, re, stat, sys, glob
12
12
13 def nochangesfound(ui, repo, excluded=None):
13 def nochangesfound(ui, repo, excluded=None):
14 '''Report no changes for push/pull, excluded is None or a list of
14 '''Report no changes for push/pull, excluded is None or a list of
15 nodes excluded from the push/pull.
15 nodes excluded from the push/pull.
16 '''
16 '''
17 secretlist = []
17 secretlist = []
18 if excluded:
18 if excluded:
19 for n in excluded:
19 for n in excluded:
20 ctx = repo[n]
20 ctx = repo[n]
21 if ctx.phase() >= phases.secret and not ctx.extinct():
21 if ctx.phase() >= phases.secret and not ctx.extinct():
22 secretlist.append(n)
22 secretlist.append(n)
23
23
24 if secretlist:
24 if secretlist:
25 ui.status(_("no changes found (ignored %d secret changesets)\n")
25 ui.status(_("no changes found (ignored %d secret changesets)\n")
26 % len(secretlist))
26 % len(secretlist))
27 else:
27 else:
28 ui.status(_("no changes found\n"))
28 ui.status(_("no changes found\n"))
29
29
30 def checknewlabel(repo, lbl, kind):
30 def checknewlabel(repo, lbl, kind):
31 if lbl in ['tip', '.', 'null']:
31 if lbl in ['tip', '.', 'null']:
32 raise util.Abort(_("the name '%s' is reserved") % lbl)
32 raise util.Abort(_("the name '%s' is reserved") % lbl)
33 for c in (':', '\0', '\n', '\r'):
33 for c in (':', '\0', '\n', '\r'):
34 if c in lbl:
34 if c in lbl:
35 raise util.Abort(_("%r cannot be used in a name") % c)
35 raise util.Abort(_("%r cannot be used in a name") % c)
36
36
37 def checkfilename(f):
37 def checkfilename(f):
38 '''Check that the filename f is an acceptable filename for a tracked file'''
38 '''Check that the filename f is an acceptable filename for a tracked file'''
39 if '\r' in f or '\n' in f:
39 if '\r' in f or '\n' in f:
40 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
40 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
41
41
42 def checkportable(ui, f):
42 def checkportable(ui, f):
43 '''Check if filename f is portable and warn or abort depending on config'''
43 '''Check if filename f is portable and warn or abort depending on config'''
44 checkfilename(f)
44 checkfilename(f)
45 abort, warn = checkportabilityalert(ui)
45 abort, warn = checkportabilityalert(ui)
46 if abort or warn:
46 if abort or warn:
47 msg = util.checkwinfilename(f)
47 msg = util.checkwinfilename(f)
48 if msg:
48 if msg:
49 msg = "%s: %r" % (msg, f)
49 msg = "%s: %r" % (msg, f)
50 if abort:
50 if abort:
51 raise util.Abort(msg)
51 raise util.Abort(msg)
52 ui.warn(_("warning: %s\n") % msg)
52 ui.warn(_("warning: %s\n") % msg)
53
53
54 def checkportabilityalert(ui):
54 def checkportabilityalert(ui):
55 '''check if the user's config requests nothing, a warning, or abort for
55 '''check if the user's config requests nothing, a warning, or abort for
56 non-portable filenames'''
56 non-portable filenames'''
57 val = ui.config('ui', 'portablefilenames', 'warn')
57 val = ui.config('ui', 'portablefilenames', 'warn')
58 lval = val.lower()
58 lval = val.lower()
59 bval = util.parsebool(val)
59 bval = util.parsebool(val)
60 abort = os.name == 'nt' or lval == 'abort'
60 abort = os.name == 'nt' or lval == 'abort'
61 warn = bval or lval == 'warn'
61 warn = bval or lval == 'warn'
62 if bval is None and not (warn or abort or lval == 'ignore'):
62 if bval is None and not (warn or abort or lval == 'ignore'):
63 raise error.ConfigError(
63 raise error.ConfigError(
64 _("ui.portablefilenames value is invalid ('%s')") % val)
64 _("ui.portablefilenames value is invalid ('%s')") % val)
65 return abort, warn
65 return abort, warn
66
66
67 class casecollisionauditor(object):
67 class casecollisionauditor(object):
68 def __init__(self, ui, abort, dirstate):
68 def __init__(self, ui, abort, dirstate):
69 self._ui = ui
69 self._ui = ui
70 self._abort = abort
70 self._abort = abort
71 allfiles = '\0'.join(dirstate._map)
71 allfiles = '\0'.join(dirstate._map)
72 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
72 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
73 self._dirstate = dirstate
73 self._dirstate = dirstate
74 # The purpose of _newfiles is so that we don't complain about
74 # The purpose of _newfiles is so that we don't complain about
75 # case collisions if someone were to call this object with the
75 # case collisions if someone were to call this object with the
76 # same filename twice.
76 # same filename twice.
77 self._newfiles = set()
77 self._newfiles = set()
78
78
79 def __call__(self, f):
79 def __call__(self, f):
80 fl = encoding.lower(f)
80 fl = encoding.lower(f)
81 if (fl in self._loweredfiles and f not in self._dirstate and
81 if (fl in self._loweredfiles and f not in self._dirstate and
82 f not in self._newfiles):
82 f not in self._newfiles):
83 msg = _('possible case-folding collision for %s') % f
83 msg = _('possible case-folding collision for %s') % f
84 if self._abort:
84 if self._abort:
85 raise util.Abort(msg)
85 raise util.Abort(msg)
86 self._ui.warn(_("warning: %s\n") % msg)
86 self._ui.warn(_("warning: %s\n") % msg)
87 self._loweredfiles.add(fl)
87 self._loweredfiles.add(fl)
88 self._newfiles.add(f)
88 self._newfiles.add(f)
89
89
90 class pathauditor(object):
90 class pathauditor(object):
91 '''ensure that a filesystem path contains no banned components.
91 '''ensure that a filesystem path contains no banned components.
92 the following properties of a path are checked:
92 the following properties of a path are checked:
93
93
94 - ends with a directory separator
94 - ends with a directory separator
95 - under top-level .hg
95 - under top-level .hg
96 - starts at the root of a windows drive
96 - starts at the root of a windows drive
97 - contains ".."
97 - contains ".."
98 - traverses a symlink (e.g. a/symlink_here/b)
98 - traverses a symlink (e.g. a/symlink_here/b)
99 - inside a nested repository (a callback can be used to approve
99 - inside a nested repository (a callback can be used to approve
100 some nested repositories, e.g., subrepositories)
100 some nested repositories, e.g., subrepositories)
101 '''
101 '''
102
102
103 def __init__(self, root, callback=None):
103 def __init__(self, root, callback=None):
104 self.audited = set()
104 self.audited = set()
105 self.auditeddir = set()
105 self.auditeddir = set()
106 self.root = root
106 self.root = root
107 self.callback = callback
107 self.callback = callback
108 if os.path.lexists(root) and not util.checkcase(root):
108 if os.path.lexists(root) and not util.checkcase(root):
109 self.normcase = util.normcase
109 self.normcase = util.normcase
110 else:
110 else:
111 self.normcase = lambda x: x
111 self.normcase = lambda x: x
112
112
113 def __call__(self, path):
113 def __call__(self, path):
114 '''Check the relative path.
114 '''Check the relative path.
115 path may contain a pattern (e.g. foodir/**.txt)'''
115 path may contain a pattern (e.g. foodir/**.txt)'''
116
116
117 path = util.localpath(path)
117 path = util.localpath(path)
118 normpath = self.normcase(path)
118 normpath = self.normcase(path)
119 if normpath in self.audited:
119 if normpath in self.audited:
120 return
120 return
121 # AIX ignores "/" at end of path, others raise EISDIR.
121 # AIX ignores "/" at end of path, others raise EISDIR.
122 if util.endswithsep(path):
122 if util.endswithsep(path):
123 raise util.Abort(_("path ends in directory separator: %s") % path)
123 raise util.Abort(_("path ends in directory separator: %s") % path)
124 parts = util.splitpath(path)
124 parts = util.splitpath(path)
125 if (os.path.splitdrive(path)[0]
125 if (os.path.splitdrive(path)[0]
126 or parts[0].lower() in ('.hg', '.hg.', '')
126 or parts[0].lower() in ('.hg', '.hg.', '')
127 or os.pardir in parts):
127 or os.pardir in parts):
128 raise util.Abort(_("path contains illegal component: %s") % path)
128 raise util.Abort(_("path contains illegal component: %s") % path)
129 if '.hg' in path.lower():
129 if '.hg' in path.lower():
130 lparts = [p.lower() for p in parts]
130 lparts = [p.lower() for p in parts]
131 for p in '.hg', '.hg.':
131 for p in '.hg', '.hg.':
132 if p in lparts[1:]:
132 if p in lparts[1:]:
133 pos = lparts.index(p)
133 pos = lparts.index(p)
134 base = os.path.join(*parts[:pos])
134 base = os.path.join(*parts[:pos])
135 raise util.Abort(_("path '%s' is inside nested repo %r")
135 raise util.Abort(_("path '%s' is inside nested repo %r")
136 % (path, base))
136 % (path, base))
137
137
138 normparts = util.splitpath(normpath)
138 normparts = util.splitpath(normpath)
139 assert len(parts) == len(normparts)
139 assert len(parts) == len(normparts)
140
140
141 parts.pop()
141 parts.pop()
142 normparts.pop()
142 normparts.pop()
143 prefixes = []
143 prefixes = []
144 while parts:
144 while parts:
145 prefix = os.sep.join(parts)
145 prefix = os.sep.join(parts)
146 normprefix = os.sep.join(normparts)
146 normprefix = os.sep.join(normparts)
147 if normprefix in self.auditeddir:
147 if normprefix in self.auditeddir:
148 break
148 break
149 curpath = os.path.join(self.root, prefix)
149 curpath = os.path.join(self.root, prefix)
150 try:
150 try:
151 st = os.lstat(curpath)
151 st = os.lstat(curpath)
152 except OSError, err:
152 except OSError, err:
153 # EINVAL can be raised as invalid path syntax under win32.
153 # EINVAL can be raised as invalid path syntax under win32.
154 # They must be ignored for patterns can be checked too.
154 # They must be ignored for patterns can be checked too.
155 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
155 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
156 raise
156 raise
157 else:
157 else:
158 if stat.S_ISLNK(st.st_mode):
158 if stat.S_ISLNK(st.st_mode):
159 raise util.Abort(
159 raise util.Abort(
160 _('path %r traverses symbolic link %r')
160 _('path %r traverses symbolic link %r')
161 % (path, prefix))
161 % (path, prefix))
162 elif (stat.S_ISDIR(st.st_mode) and
162 elif (stat.S_ISDIR(st.st_mode) and
163 os.path.isdir(os.path.join(curpath, '.hg'))):
163 os.path.isdir(os.path.join(curpath, '.hg'))):
164 if not self.callback or not self.callback(curpath):
164 if not self.callback or not self.callback(curpath):
165 raise util.Abort(_("path '%s' is inside nested "
165 raise util.Abort(_("path '%s' is inside nested "
166 "repo %r")
166 "repo %r")
167 % (path, prefix))
167 % (path, prefix))
168 prefixes.append(normprefix)
168 prefixes.append(normprefix)
169 parts.pop()
169 parts.pop()
170 normparts.pop()
170 normparts.pop()
171
171
172 self.audited.add(normpath)
172 self.audited.add(normpath)
173 # only add prefixes to the cache after checking everything: we don't
173 # only add prefixes to the cache after checking everything: we don't
174 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
174 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
175 self.auditeddir.update(prefixes)
175 self.auditeddir.update(prefixes)
176
176
177 class abstractvfs(object):
177 class abstractvfs(object):
178 """Abstract base class; cannot be instantiated"""
178 """Abstract base class; cannot be instantiated"""
179
179
180 def __init__(self, *args, **kwargs):
180 def __init__(self, *args, **kwargs):
181 '''Prevent instantiation; don't call this from subclasses.'''
181 '''Prevent instantiation; don't call this from subclasses.'''
182 raise NotImplementedError('attempted instantiating ' + str(type(self)))
182 raise NotImplementedError('attempted instantiating ' + str(type(self)))
183
183
184 def tryread(self, path):
184 def tryread(self, path):
185 '''gracefully return an empty string for missing files'''
185 '''gracefully return an empty string for missing files'''
186 try:
186 try:
187 return self.read(path)
187 return self.read(path)
188 except IOError, inst:
188 except IOError, inst:
189 if inst.errno != errno.ENOENT:
189 if inst.errno != errno.ENOENT:
190 raise
190 raise
191 return ""
191 return ""
192
192
193 def read(self, path):
193 def read(self, path):
194 fp = self(path, 'rb')
194 fp = self(path, 'rb')
195 try:
195 try:
196 return fp.read()
196 return fp.read()
197 finally:
197 finally:
198 fp.close()
198 fp.close()
199
199
200 def write(self, path, data):
200 def write(self, path, data):
201 fp = self(path, 'wb')
201 fp = self(path, 'wb')
202 try:
202 try:
203 return fp.write(data)
203 return fp.write(data)
204 finally:
204 finally:
205 fp.close()
205 fp.close()
206
206
207 def append(self, path, data):
207 def append(self, path, data):
208 fp = self(path, 'ab')
208 fp = self(path, 'ab')
209 try:
209 try:
210 return fp.write(data)
210 return fp.write(data)
211 finally:
211 finally:
212 fp.close()
212 fp.close()
213
213
214 def exists(self, path=None):
214 def exists(self, path=None):
215 return os.path.exists(self.join(path))
215 return os.path.exists(self.join(path))
216
216
217 def isdir(self, path=None):
217 def isdir(self, path=None):
218 return os.path.isdir(self.join(path))
218 return os.path.isdir(self.join(path))
219
219
220 def makedir(self, path=None, notindexed=True):
220 def makedir(self, path=None, notindexed=True):
221 return util.makedir(self.join(path), notindexed)
221 return util.makedir(self.join(path), notindexed)
222
222
223 def makedirs(self, path=None, mode=None):
223 def makedirs(self, path=None, mode=None):
224 return util.makedirs(self.join(path), mode)
224 return util.makedirs(self.join(path), mode)
225
225
226 def mkdir(self, path=None):
226 def mkdir(self, path=None):
227 return os.mkdir(self.join(path))
227 return os.mkdir(self.join(path))
228
228
229 def readdir(self, path=None, stat=None, skip=None):
229 def readdir(self, path=None, stat=None, skip=None):
230 return osutil.listdir(self.join(path), stat, skip)
230 return osutil.listdir(self.join(path), stat, skip)
231
231
232 def stat(self, path=None):
232 def stat(self, path=None):
233 return os.stat(self.join(path))
233 return os.stat(self.join(path))
234
234
235 class vfs(abstractvfs):
235 class vfs(abstractvfs):
236 '''Operate files relative to a base directory
236 '''Operate files relative to a base directory
237
237
238 This class is used to hide the details of COW semantics and
238 This class is used to hide the details of COW semantics and
239 remote file access from higher level code.
239 remote file access from higher level code.
240 '''
240 '''
241 def __init__(self, base, audit=True, expand=False):
241 def __init__(self, base, audit=True, expand=False):
242 if expand:
242 if expand:
243 base = os.path.realpath(util.expandpath(base))
243 base = os.path.realpath(util.expandpath(base))
244 self.base = base
244 self.base = base
245 self._setmustaudit(audit)
245 self._setmustaudit(audit)
246 self.createmode = None
246 self.createmode = None
247 self._trustnlink = None
247 self._trustnlink = None
248
248
249 def _getmustaudit(self):
249 def _getmustaudit(self):
250 return self._audit
250 return self._audit
251
251
252 def _setmustaudit(self, onoff):
252 def _setmustaudit(self, onoff):
253 self._audit = onoff
253 self._audit = onoff
254 if onoff:
254 if onoff:
255 self.auditor = pathauditor(self.base)
255 self.auditor = pathauditor(self.base)
256 else:
256 else:
257 self.auditor = util.always
257 self.auditor = util.always
258
258
259 mustaudit = property(_getmustaudit, _setmustaudit)
259 mustaudit = property(_getmustaudit, _setmustaudit)
260
260
261 @util.propertycache
261 @util.propertycache
262 def _cansymlink(self):
262 def _cansymlink(self):
263 return util.checklink(self.base)
263 return util.checklink(self.base)
264
264
265 @util.propertycache
265 @util.propertycache
266 def _chmod(self):
266 def _chmod(self):
267 return util.checkexec(self.base)
267 return util.checkexec(self.base)
268
268
269 def _fixfilemode(self, name):
269 def _fixfilemode(self, name):
270 if self.createmode is None or not self._chmod:
270 if self.createmode is None or not self._chmod:
271 return
271 return
272 os.chmod(name, self.createmode & 0666)
272 os.chmod(name, self.createmode & 0666)
273
273
274 def __call__(self, path, mode="r", text=False, atomictemp=False):
274 def __call__(self, path, mode="r", text=False, atomictemp=False):
275 if self._audit:
275 if self._audit:
276 r = util.checkosfilename(path)
276 r = util.checkosfilename(path)
277 if r:
277 if r:
278 raise util.Abort("%s: %r" % (r, path))
278 raise util.Abort("%s: %r" % (r, path))
279 self.auditor(path)
279 self.auditor(path)
280 f = self.join(path)
280 f = self.join(path)
281
281
282 if not text and "b" not in mode:
282 if not text and "b" not in mode:
283 mode += "b" # for that other OS
283 mode += "b" # for that other OS
284
284
285 nlink = -1
285 nlink = -1
286 if mode not in ('r', 'rb'):
286 if mode not in ('r', 'rb'):
287 dirname, basename = util.split(f)
287 dirname, basename = util.split(f)
288 # If basename is empty, then the path is malformed because it points
288 # If basename is empty, then the path is malformed because it points
289 # to a directory. Let the posixfile() call below raise IOError.
289 # to a directory. Let the posixfile() call below raise IOError.
290 if basename:
290 if basename:
291 if atomictemp:
291 if atomictemp:
292 if not os.path.isdir(dirname):
292 if not os.path.isdir(dirname):
293 util.makedirs(dirname, self.createmode)
293 util.makedirs(dirname, self.createmode)
294 return util.atomictempfile(f, mode, self.createmode)
294 return util.atomictempfile(f, mode, self.createmode)
295 try:
295 try:
296 if 'w' in mode:
296 if 'w' in mode:
297 util.unlink(f)
297 util.unlink(f)
298 nlink = 0
298 nlink = 0
299 else:
299 else:
300 # nlinks() may behave differently for files on Windows
300 # nlinks() may behave differently for files on Windows
301 # shares if the file is open.
301 # shares if the file is open.
302 fd = util.posixfile(f)
302 fd = util.posixfile(f)
303 nlink = util.nlinks(f)
303 nlink = util.nlinks(f)
304 if nlink < 1:
304 if nlink < 1:
305 nlink = 2 # force mktempcopy (issue1922)
305 nlink = 2 # force mktempcopy (issue1922)
306 fd.close()
306 fd.close()
307 except (OSError, IOError), e:
307 except (OSError, IOError), e:
308 if e.errno != errno.ENOENT:
308 if e.errno != errno.ENOENT:
309 raise
309 raise
310 nlink = 0
310 nlink = 0
311 if not os.path.isdir(dirname):
311 if not os.path.isdir(dirname):
312 util.makedirs(dirname, self.createmode)
312 util.makedirs(dirname, self.createmode)
313 if nlink > 0:
313 if nlink > 0:
314 if self._trustnlink is None:
314 if self._trustnlink is None:
315 self._trustnlink = nlink > 1 or util.checknlink(f)
315 self._trustnlink = nlink > 1 or util.checknlink(f)
316 if nlink > 1 or not self._trustnlink:
316 if nlink > 1 or not self._trustnlink:
317 util.rename(util.mktempcopy(f), f)
317 util.rename(util.mktempcopy(f), f)
318 fp = util.posixfile(f, mode)
318 fp = util.posixfile(f, mode)
319 if nlink == 0:
319 if nlink == 0:
320 self._fixfilemode(f)
320 self._fixfilemode(f)
321 return fp
321 return fp
322
322
323 def symlink(self, src, dst):
323 def symlink(self, src, dst):
324 self.auditor(dst)
324 self.auditor(dst)
325 linkname = self.join(dst)
325 linkname = self.join(dst)
326 try:
326 try:
327 os.unlink(linkname)
327 os.unlink(linkname)
328 except OSError:
328 except OSError:
329 pass
329 pass
330
330
331 dirname = os.path.dirname(linkname)
331 dirname = os.path.dirname(linkname)
332 if not os.path.exists(dirname):
332 if not os.path.exists(dirname):
333 util.makedirs(dirname, self.createmode)
333 util.makedirs(dirname, self.createmode)
334
334
335 if self._cansymlink:
335 if self._cansymlink:
336 try:
336 try:
337 os.symlink(src, linkname)
337 os.symlink(src, linkname)
338 except OSError, err:
338 except OSError, err:
339 raise OSError(err.errno, _('could not symlink to %r: %s') %
339 raise OSError(err.errno, _('could not symlink to %r: %s') %
340 (src, err.strerror), linkname)
340 (src, err.strerror), linkname)
341 else:
341 else:
342 self.write(dst, src)
342 self.write(dst, src)
343
343
344 def audit(self, path):
344 def audit(self, path):
345 self.auditor(path)
345 self.auditor(path)
346
346
347 def join(self, path):
347 def join(self, path):
348 if path:
348 if path:
349 return os.path.join(self.base, path)
349 return os.path.join(self.base, path)
350 else:
350 else:
351 return self.base
351 return self.base
352
352
353 opener = vfs
353 opener = vfs
354
354
355 class auditvfs(object):
355 class auditvfs(object):
356 def __init__(self, vfs):
356 def __init__(self, vfs):
357 self.vfs = vfs
357 self.vfs = vfs
358
358
359 def _getmustaudit(self):
359 def _getmustaudit(self):
360 return self.vfs.mustaudit
360 return self.vfs.mustaudit
361
361
362 def _setmustaudit(self, onoff):
362 def _setmustaudit(self, onoff):
363 self.vfs.mustaudit = onoff
363 self.vfs.mustaudit = onoff
364
364
365 mustaudit = property(_getmustaudit, _setmustaudit)
365 mustaudit = property(_getmustaudit, _setmustaudit)
366
366
367 class filtervfs(abstractvfs, auditvfs):
367 class filtervfs(abstractvfs, auditvfs):
368 '''Wrapper vfs for filtering filenames with a function.'''
368 '''Wrapper vfs for filtering filenames with a function.'''
369
369
370 def __init__(self, vfs, filter):
370 def __init__(self, vfs, filter):
371 auditvfs.__init__(self, vfs)
371 auditvfs.__init__(self, vfs)
372 self._filter = filter
372 self._filter = filter
373
373
374 def __call__(self, path, *args, **kwargs):
374 def __call__(self, path, *args, **kwargs):
375 return self.vfs(self._filter(path), *args, **kwargs)
375 return self.vfs(self._filter(path), *args, **kwargs)
376
376
377 def join(self, path):
377 def join(self, path):
378 if path:
378 if path:
379 return self.vfs.join(self._filter(path))
379 return self.vfs.join(self._filter(path))
380 else:
380 else:
381 return self.vfs.join(path)
381 return self.vfs.join(path)
382
382
383 filteropener = filtervfs
383 filteropener = filtervfs
384
384
385 class readonlyvfs(abstractvfs, auditvfs):
385 class readonlyvfs(abstractvfs, auditvfs):
386 '''Wrapper vfs preventing any writing.'''
386 '''Wrapper vfs preventing any writing.'''
387
387
388 def __init__(self, vfs):
388 def __init__(self, vfs):
389 auditvfs.__init__(self, vfs)
389 auditvfs.__init__(self, vfs)
390
390
391 def __call__(self, path, mode='r', *args, **kw):
391 def __call__(self, path, mode='r', *args, **kw):
392 if mode not in ('r', 'rb'):
392 if mode not in ('r', 'rb'):
393 raise util.Abort('this vfs is read only')
393 raise util.Abort('this vfs is read only')
394 return self.vfs(path, mode, *args, **kw)
394 return self.vfs(path, mode, *args, **kw)
395
395
396
396
397 def canonpath(root, cwd, myname, auditor=None):
397 def canonpath(root, cwd, myname, auditor=None):
398 '''return the canonical path of myname, given cwd and root'''
398 '''return the canonical path of myname, given cwd and root'''
399 if util.endswithsep(root):
399 if util.endswithsep(root):
400 rootsep = root
400 rootsep = root
401 else:
401 else:
402 rootsep = root + os.sep
402 rootsep = root + os.sep
403 name = myname
403 name = myname
404 if not os.path.isabs(name):
404 if not os.path.isabs(name):
405 name = os.path.join(root, cwd, name)
405 name = os.path.join(root, cwd, name)
406 name = os.path.normpath(name)
406 name = os.path.normpath(name)
407 if auditor is None:
407 if auditor is None:
408 auditor = pathauditor(root)
408 auditor = pathauditor(root)
409 if name != rootsep and name.startswith(rootsep):
409 if name != rootsep and name.startswith(rootsep):
410 name = name[len(rootsep):]
410 name = name[len(rootsep):]
411 auditor(name)
411 auditor(name)
412 return util.pconvert(name)
412 return util.pconvert(name)
413 elif name == root:
413 elif name == root:
414 return ''
414 return ''
415 else:
415 else:
416 # Determine whether `name' is in the hierarchy at or beneath `root',
416 # Determine whether `name' is in the hierarchy at or beneath `root',
417 # by iterating name=dirname(name) until that causes no change (can't
417 # by iterating name=dirname(name) until that causes no change (can't
418 # check name == '/', because that doesn't work on windows). The list
418 # check name == '/', because that doesn't work on windows). The list
419 # `rel' holds the reversed list of components making up the relative
419 # `rel' holds the reversed list of components making up the relative
420 # file name we want.
420 # file name we want.
421 rel = []
421 rel = []
422 while True:
422 while True:
423 try:
423 try:
424 s = util.samefile(name, root)
424 s = util.samefile(name, root)
425 except OSError:
425 except OSError:
426 s = False
426 s = False
427 if s:
427 if s:
428 if not rel:
428 if not rel:
429 # name was actually the same as root (maybe a symlink)
429 # name was actually the same as root (maybe a symlink)
430 return ''
430 return ''
431 rel.reverse()
431 rel.reverse()
432 name = os.path.join(*rel)
432 name = os.path.join(*rel)
433 auditor(name)
433 auditor(name)
434 return util.pconvert(name)
434 return util.pconvert(name)
435 dirname, basename = util.split(name)
435 dirname, basename = util.split(name)
436 rel.append(basename)
436 rel.append(basename)
437 if dirname == name:
437 if dirname == name:
438 break
438 break
439 name = dirname
439 name = dirname
440
440
441 raise util.Abort('%s not under root' % myname)
441 raise util.Abort('%s not under root' % myname)
442
442
443 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
443 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
444 '''yield every hg repository under path, always recursively.
444 '''yield every hg repository under path, always recursively.
445 The recurse flag will only control recursion into repo working dirs'''
445 The recurse flag will only control recursion into repo working dirs'''
446 def errhandler(err):
446 def errhandler(err):
447 if err.filename == path:
447 if err.filename == path:
448 raise err
448 raise err
449 samestat = getattr(os.path, 'samestat', None)
449 samestat = getattr(os.path, 'samestat', None)
450 if followsym and samestat is not None:
450 if followsym and samestat is not None:
451 def adddir(dirlst, dirname):
451 def adddir(dirlst, dirname):
452 match = False
452 match = False
453 dirstat = os.stat(dirname)
453 dirstat = os.stat(dirname)
454 for lstdirstat in dirlst:
454 for lstdirstat in dirlst:
455 if samestat(dirstat, lstdirstat):
455 if samestat(dirstat, lstdirstat):
456 match = True
456 match = True
457 break
457 break
458 if not match:
458 if not match:
459 dirlst.append(dirstat)
459 dirlst.append(dirstat)
460 return not match
460 return not match
461 else:
461 else:
462 followsym = False
462 followsym = False
463
463
464 if (seen_dirs is None) and followsym:
464 if (seen_dirs is None) and followsym:
465 seen_dirs = []
465 seen_dirs = []
466 adddir(seen_dirs, path)
466 adddir(seen_dirs, path)
467 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
467 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
468 dirs.sort()
468 dirs.sort()
469 if '.hg' in dirs:
469 if '.hg' in dirs:
470 yield root # found a repository
470 yield root # found a repository
471 qroot = os.path.join(root, '.hg', 'patches')
471 qroot = os.path.join(root, '.hg', 'patches')
472 if os.path.isdir(os.path.join(qroot, '.hg')):
472 if os.path.isdir(os.path.join(qroot, '.hg')):
473 yield qroot # we have a patch queue repo here
473 yield qroot # we have a patch queue repo here
474 if recurse:
474 if recurse:
475 # avoid recursing inside the .hg directory
475 # avoid recursing inside the .hg directory
476 dirs.remove('.hg')
476 dirs.remove('.hg')
477 else:
477 else:
478 dirs[:] = [] # don't descend further
478 dirs[:] = [] # don't descend further
479 elif followsym:
479 elif followsym:
480 newdirs = []
480 newdirs = []
481 for d in dirs:
481 for d in dirs:
482 fname = os.path.join(root, d)
482 fname = os.path.join(root, d)
483 if adddir(seen_dirs, fname):
483 if adddir(seen_dirs, fname):
484 if os.path.islink(fname):
484 if os.path.islink(fname):
485 for hgname in walkrepos(fname, True, seen_dirs):
485 for hgname in walkrepos(fname, True, seen_dirs):
486 yield hgname
486 yield hgname
487 else:
487 else:
488 newdirs.append(d)
488 newdirs.append(d)
489 dirs[:] = newdirs
489 dirs[:] = newdirs
490
490
491 def osrcpath():
491 def osrcpath():
492 '''return default os-specific hgrc search path'''
492 '''return default os-specific hgrc search path'''
493 path = systemrcpath()
493 path = systemrcpath()
494 path.extend(userrcpath())
494 path.extend(userrcpath())
495 path = [os.path.normpath(f) for f in path]
495 path = [os.path.normpath(f) for f in path]
496 return path
496 return path
497
497
498 _rcpath = None
498 _rcpath = None
499
499
500 def rcpath():
500 def rcpath():
501 '''return hgrc search path. if env var HGRCPATH is set, use it.
501 '''return hgrc search path. if env var HGRCPATH is set, use it.
502 for each item in path, if directory, use files ending in .rc,
502 for each item in path, if directory, use files ending in .rc,
503 else use item.
503 else use item.
504 make HGRCPATH empty to only look in .hg/hgrc of current repo.
504 make HGRCPATH empty to only look in .hg/hgrc of current repo.
505 if no HGRCPATH, use default os-specific path.'''
505 if no HGRCPATH, use default os-specific path.'''
506 global _rcpath
506 global _rcpath
507 if _rcpath is None:
507 if _rcpath is None:
508 if 'HGRCPATH' in os.environ:
508 if 'HGRCPATH' in os.environ:
509 _rcpath = []
509 _rcpath = []
510 for p in os.environ['HGRCPATH'].split(os.pathsep):
510 for p in os.environ['HGRCPATH'].split(os.pathsep):
511 if not p:
511 if not p:
512 continue
512 continue
513 p = util.expandpath(p)
513 p = util.expandpath(p)
514 if os.path.isdir(p):
514 if os.path.isdir(p):
515 for f, kind in osutil.listdir(p):
515 for f, kind in osutil.listdir(p):
516 if f.endswith('.rc'):
516 if f.endswith('.rc'):
517 _rcpath.append(os.path.join(p, f))
517 _rcpath.append(os.path.join(p, f))
518 else:
518 else:
519 _rcpath.append(p)
519 _rcpath.append(p)
520 else:
520 else:
521 _rcpath = osrcpath()
521 _rcpath = osrcpath()
522 return _rcpath
522 return _rcpath
523
523
524 if os.name != 'nt':
524 if os.name != 'nt':
525
525
526 def rcfiles(path):
526 def rcfiles(path):
527 rcs = [os.path.join(path, 'hgrc')]
527 rcs = [os.path.join(path, 'hgrc')]
528 rcdir = os.path.join(path, 'hgrc.d')
528 rcdir = os.path.join(path, 'hgrc.d')
529 try:
529 try:
530 rcs.extend([os.path.join(rcdir, f)
530 rcs.extend([os.path.join(rcdir, f)
531 for f, kind in osutil.listdir(rcdir)
531 for f, kind in osutil.listdir(rcdir)
532 if f.endswith(".rc")])
532 if f.endswith(".rc")])
533 except OSError:
533 except OSError:
534 pass
534 pass
535 return rcs
535 return rcs
536
536
537 def systemrcpath():
537 def systemrcpath():
538 path = []
538 path = []
539 if sys.platform == 'plan9':
539 if sys.platform == 'plan9':
540 root = 'lib/mercurial'
540 root = 'lib/mercurial'
541 else:
541 else:
542 root = 'etc/mercurial'
542 root = 'etc/mercurial'
543 # old mod_python does not set sys.argv
543 # old mod_python does not set sys.argv
544 if len(getattr(sys, 'argv', [])) > 0:
544 if len(getattr(sys, 'argv', [])) > 0:
545 p = os.path.dirname(os.path.dirname(sys.argv[0]))
545 p = os.path.dirname(os.path.dirname(sys.argv[0]))
546 path.extend(rcfiles(os.path.join(p, root)))
546 path.extend(rcfiles(os.path.join(p, root)))
547 path.extend(rcfiles('/' + root))
547 path.extend(rcfiles('/' + root))
548 return path
548 return path
549
549
550 def userrcpath():
550 def userrcpath():
551 if sys.platform == 'plan9':
551 if sys.platform == 'plan9':
552 return [os.environ['home'] + '/lib/hgrc']
552 return [os.environ['home'] + '/lib/hgrc']
553 else:
553 else:
554 return [os.path.expanduser('~/.hgrc')]
554 return [os.path.expanduser('~/.hgrc')]
555
555
556 else:
556 else:
557
557
558 import _winreg
558 import _winreg
559
559
560 def systemrcpath():
560 def systemrcpath():
561 '''return default os-specific hgrc search path'''
561 '''return default os-specific hgrc search path'''
562 rcpath = []
562 rcpath = []
563 filename = util.executablepath()
563 filename = util.executablepath()
564 # Use mercurial.ini found in directory with hg.exe
564 # Use mercurial.ini found in directory with hg.exe
565 progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
565 progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
566 if os.path.isfile(progrc):
566 if os.path.isfile(progrc):
567 rcpath.append(progrc)
567 rcpath.append(progrc)
568 return rcpath
568 return rcpath
569 # Use hgrc.d found in directory with hg.exe
569 # Use hgrc.d found in directory with hg.exe
570 progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d')
570 progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d')
571 if os.path.isdir(progrcd):
571 if os.path.isdir(progrcd):
572 for f, kind in osutil.listdir(progrcd):
572 for f, kind in osutil.listdir(progrcd):
573 if f.endswith('.rc'):
573 if f.endswith('.rc'):
574 rcpath.append(os.path.join(progrcd, f))
574 rcpath.append(os.path.join(progrcd, f))
575 return rcpath
575 return rcpath
576 # else look for a system rcpath in the registry
576 # else look for a system rcpath in the registry
577 value = util.lookupreg('SOFTWARE\\Mercurial', None,
577 value = util.lookupreg('SOFTWARE\\Mercurial', None,
578 _winreg.HKEY_LOCAL_MACHINE)
578 _winreg.HKEY_LOCAL_MACHINE)
579 if not isinstance(value, str) or not value:
579 if not isinstance(value, str) or not value:
580 return rcpath
580 return rcpath
581 value = util.localpath(value)
581 value = util.localpath(value)
582 for p in value.split(os.pathsep):
582 for p in value.split(os.pathsep):
583 if p.lower().endswith('mercurial.ini'):
583 if p.lower().endswith('mercurial.ini'):
584 rcpath.append(p)
584 rcpath.append(p)
585 elif os.path.isdir(p):
585 elif os.path.isdir(p):
586 for f, kind in osutil.listdir(p):
586 for f, kind in osutil.listdir(p):
587 if f.endswith('.rc'):
587 if f.endswith('.rc'):
588 rcpath.append(os.path.join(p, f))
588 rcpath.append(os.path.join(p, f))
589 return rcpath
589 return rcpath
590
590
591 def userrcpath():
591 def userrcpath():
592 '''return os-specific hgrc search path to the user dir'''
592 '''return os-specific hgrc search path to the user dir'''
593 home = os.path.expanduser('~')
593 home = os.path.expanduser('~')
594 path = [os.path.join(home, 'mercurial.ini'),
594 path = [os.path.join(home, 'mercurial.ini'),
595 os.path.join(home, '.hgrc')]
595 os.path.join(home, '.hgrc')]
596 userprofile = os.environ.get('USERPROFILE')
596 userprofile = os.environ.get('USERPROFILE')
597 if userprofile:
597 if userprofile:
598 path.append(os.path.join(userprofile, 'mercurial.ini'))
598 path.append(os.path.join(userprofile, 'mercurial.ini'))
599 path.append(os.path.join(userprofile, '.hgrc'))
599 path.append(os.path.join(userprofile, '.hgrc'))
600 return path
600 return path
601
601
602 def revsingle(repo, revspec, default='.'):
602 def revsingle(repo, revspec, default='.'):
603 if not revspec:
603 if not revspec:
604 return repo[default]
604 return repo[default]
605
605
606 l = revrange(repo, [revspec])
606 l = revrange(repo, [revspec])
607 if len(l) < 1:
607 if len(l) < 1:
608 raise util.Abort(_('empty revision set'))
608 raise util.Abort(_('empty revision set'))
609 return repo[l[-1]]
609 return repo[l[-1]]
610
610
611 def revpair(repo, revs):
611 def revpair(repo, revs):
612 if not revs:
612 if not revs:
613 return repo.dirstate.p1(), None
613 return repo.dirstate.p1(), None
614
614
615 l = revrange(repo, revs)
615 l = revrange(repo, revs)
616
616
617 if len(l) == 0:
617 if len(l) == 0:
618 if revs:
618 if revs:
619 raise util.Abort(_('empty revision range'))
619 raise util.Abort(_('empty revision range'))
620 return repo.dirstate.p1(), None
620 return repo.dirstate.p1(), None
621
621
622 if len(l) == 1 and len(revs) == 1 and _revrangesep not in revs[0]:
622 if len(l) == 1 and len(revs) == 1 and _revrangesep not in revs[0]:
623 return repo.lookup(l[0]), None
623 return repo.lookup(l[0]), None
624
624
625 return repo.lookup(l[0]), repo.lookup(l[-1])
625 return repo.lookup(l[0]), repo.lookup(l[-1])
626
626
627 _revrangesep = ':'
627 _revrangesep = ':'
628
628
629 def revrange(repo, revs):
629 def revrange(repo, revs):
630 """Yield revision as strings from a list of revision specifications."""
630 """Yield revision as strings from a list of revision specifications."""
631
631
632 def revfix(repo, val, defval):
632 def revfix(repo, val, defval):
633 if not val and val != 0 and defval is not None:
633 if not val and val != 0 and defval is not None:
634 return defval
634 return defval
635 return repo[val].rev()
635 return repo[val].rev()
636
636
637 seen, l = set(), []
637 seen, l = set(), []
638 for spec in revs:
638 for spec in revs:
639 if l and not seen:
639 if l and not seen:
640 seen = set(l)
640 seen = set(l)
641 # attempt to parse old-style ranges first to deal with
641 # attempt to parse old-style ranges first to deal with
642 # things like old-tag which contain query metacharacters
642 # things like old-tag which contain query metacharacters
643 try:
643 try:
644 if isinstance(spec, int):
644 if isinstance(spec, int):
645 seen.add(spec)
645 seen.add(spec)
646 l.append(spec)
646 l.append(spec)
647 continue
647 continue
648
648
649 if _revrangesep in spec:
649 if _revrangesep in spec:
650 start, end = spec.split(_revrangesep, 1)
650 start, end = spec.split(_revrangesep, 1)
651 start = revfix(repo, start, 0)
651 start = revfix(repo, start, 0)
652 end = revfix(repo, end, len(repo) - 1)
652 end = revfix(repo, end, len(repo) - 1)
653 rangeiter = repo.changelog.revs(start, end)
653 rangeiter = repo.changelog.revs(start, end)
654 if not seen and not l:
654 if not seen and not l:
655 # by far the most common case: revs = ["-1:0"]
655 # by far the most common case: revs = ["-1:0"]
656 l = list(rangeiter)
656 l = list(rangeiter)
657 # defer syncing seen until next iteration
657 # defer syncing seen until next iteration
658 continue
658 continue
659 newrevs = set(rangeiter)
659 newrevs = set(rangeiter)
660 if seen:
660 if seen:
661 newrevs.difference_update(seen)
661 newrevs.difference_update(seen)
662 seen.update(newrevs)
662 seen.update(newrevs)
663 else:
663 else:
664 seen = newrevs
664 seen = newrevs
665 l.extend(sorted(newrevs, reverse=start > end))
665 l.extend(sorted(newrevs, reverse=start > end))
666 continue
666 continue
667 elif spec and spec in repo: # single unquoted rev
667 elif spec and spec in repo: # single unquoted rev
668 rev = revfix(repo, spec, None)
668 rev = revfix(repo, spec, None)
669 if rev in seen:
669 if rev in seen:
670 continue
670 continue
671 seen.add(rev)
671 seen.add(rev)
672 l.append(rev)
672 l.append(rev)
673 continue
673 continue
674 except error.RepoLookupError:
674 except error.RepoLookupError:
675 pass
675 pass
676
676
677 # fall through to new-style queries if old-style fails
677 # fall through to new-style queries if old-style fails
678 m = revset.match(repo.ui, spec)
678 m = revset.match(repo.ui, spec)
679 dl = [r for r in m(repo, list(repo)) if r not in seen]
679 dl = [r for r in m(repo, list(repo)) if r not in seen]
680 l.extend(dl)
680 l.extend(dl)
681 seen.update(dl)
681 seen.update(dl)
682
682
683 return l
683 return l
684
684
685 def expandpats(pats):
685 def expandpats(pats):
686 if not util.expandglobs:
686 if not util.expandglobs:
687 return list(pats)
687 return list(pats)
688 ret = []
688 ret = []
689 for p in pats:
689 for p in pats:
690 kind, name = matchmod._patsplit(p, None)
690 kind, name = matchmod._patsplit(p, None)
691 if kind is None:
691 if kind is None:
692 try:
692 try:
693 globbed = glob.glob(name)
693 globbed = glob.glob(name)
694 except re.error:
694 except re.error:
695 globbed = [name]
695 globbed = [name]
696 if globbed:
696 if globbed:
697 ret.extend(globbed)
697 ret.extend(globbed)
698 continue
698 continue
699 ret.append(p)
699 ret.append(p)
700 return ret
700 return ret
701
701
702 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
702 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
703 if pats == ("",):
703 if pats == ("",):
704 pats = []
704 pats = []
705 if not globbed and default == 'relpath':
705 if not globbed and default == 'relpath':
706 pats = expandpats(pats or [])
706 pats = expandpats(pats or [])
707
707
708 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
708 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
709 default)
709 default)
710 def badfn(f, msg):
710 def badfn(f, msg):
711 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
711 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
712 m.bad = badfn
712 m.bad = badfn
713 return m, pats
713 return m, pats
714
714
715 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
715 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
716 return matchandpats(ctx, pats, opts, globbed, default)[0]
716 return matchandpats(ctx, pats, opts, globbed, default)[0]
717
717
718 def matchall(repo):
718 def matchall(repo):
719 return matchmod.always(repo.root, repo.getcwd())
719 return matchmod.always(repo.root, repo.getcwd())
720
720
721 def matchfiles(repo, files):
721 def matchfiles(repo, files):
722 return matchmod.exact(repo.root, repo.getcwd(), files)
722 return matchmod.exact(repo.root, repo.getcwd(), files)
723
723
724 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
724 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
725 if dry_run is None:
725 if dry_run is None:
726 dry_run = opts.get('dry_run')
726 dry_run = opts.get('dry_run')
727 if similarity is None:
727 if similarity is None:
728 similarity = float(opts.get('similarity') or 0)
728 similarity = float(opts.get('similarity') or 0)
729 # we'd use status here, except handling of symlinks and ignore is tricky
729 # we'd use status here, except handling of symlinks and ignore is tricky
730 added, unknown, deleted, removed = [], [], [], []
730 added, unknown, deleted, removed = [], [], [], []
731 audit_path = pathauditor(repo.root)
731 audit_path = pathauditor(repo.root)
732 m = match(repo[None], pats, opts)
732 m = match(repo[None], pats, opts)
733 rejected = []
733 rejected = []
734 m.bad = lambda x, y: rejected.append(x)
734 m.bad = lambda x, y: rejected.append(x)
735
735
736 for abs in repo.walk(m):
736 for abs in repo.walk(m):
737 target = repo.wjoin(abs)
737 target = repo.wjoin(abs)
738 good = True
738 good = True
739 try:
739 try:
740 audit_path(abs)
740 audit_path(abs)
741 except (OSError, util.Abort):
741 except (OSError, util.Abort):
742 good = False
742 good = False
743 rel = m.rel(abs)
743 rel = m.rel(abs)
744 exact = m.exact(abs)
744 exact = m.exact(abs)
745 if good and abs not in repo.dirstate:
745 if good and abs not in repo.dirstate:
746 unknown.append(abs)
746 unknown.append(abs)
747 if repo.ui.verbose or not exact:
747 if repo.ui.verbose or not exact:
748 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
748 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
749 elif (repo.dirstate[abs] != 'r' and
749 elif (repo.dirstate[abs] != 'r' and
750 (not good or not os.path.lexists(target) or
750 (not good or not os.path.lexists(target) or
751 (os.path.isdir(target) and not os.path.islink(target)))):
751 (os.path.isdir(target) and not os.path.islink(target)))):
752 deleted.append(abs)
752 deleted.append(abs)
753 if repo.ui.verbose or not exact:
753 if repo.ui.verbose or not exact:
754 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
754 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
755 # for finding renames
755 # for finding renames
756 elif repo.dirstate[abs] == 'r':
756 elif repo.dirstate[abs] == 'r':
757 removed.append(abs)
757 removed.append(abs)
758 elif repo.dirstate[abs] == 'a':
758 elif repo.dirstate[abs] == 'a':
759 added.append(abs)
759 added.append(abs)
760 copies = {}
760 copies = {}
761 if similarity > 0:
761 if similarity > 0:
762 for old, new, score in similar.findrenames(repo,
762 for old, new, score in similar.findrenames(repo,
763 added + unknown, removed + deleted, similarity):
763 added + unknown, removed + deleted, similarity):
764 if repo.ui.verbose or not m.exact(old) or not m.exact(new):
764 if repo.ui.verbose or not m.exact(old) or not m.exact(new):
765 repo.ui.status(_('recording removal of %s as rename to %s '
765 repo.ui.status(_('recording removal of %s as rename to %s '
766 '(%d%% similar)\n') %
766 '(%d%% similar)\n') %
767 (m.rel(old), m.rel(new), score * 100))
767 (m.rel(old), m.rel(new), score * 100))
768 copies[new] = old
768 copies[new] = old
769
769
770 if not dry_run:
770 if not dry_run:
771 wctx = repo[None]
771 wctx = repo[None]
772 wlock = repo.wlock()
772 wlock = repo.wlock()
773 try:
773 try:
774 wctx.forget(deleted)
774 wctx.forget(deleted)
775 wctx.add(unknown)
775 wctx.add(unknown)
776 for new, old in copies.iteritems():
776 for new, old in copies.iteritems():
777 wctx.copy(old, new)
777 wctx.copy(old, new)
778 finally:
778 finally:
779 wlock.release()
779 wlock.release()
780
780
781 for f in rejected:
781 for f in rejected:
782 if f in m.files():
782 if f in m.files():
783 return 1
783 return 1
784 return 0
784 return 0
785
785
786 def updatedir(ui, repo, patches, similarity=0):
786 def updatedir(ui, repo, patches, similarity=0):
787 '''Update dirstate after patch application according to metadata'''
787 '''Update dirstate after patch application according to metadata'''
788 if not patches:
788 if not patches:
789 return []
789 return []
790 copies = []
790 copies = []
791 removes = set()
791 removes = set()
792 cfiles = patches.keys()
792 cfiles = patches.keys()
793 cwd = repo.getcwd()
793 cwd = repo.getcwd()
794 if cwd:
794 if cwd:
795 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
795 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
796 for f in patches:
796 for f in patches:
797 gp = patches[f]
797 gp = patches[f]
798 if not gp:
798 if not gp:
799 continue
799 continue
800 if gp.op == 'RENAME':
800 if gp.op == 'RENAME':
801 copies.append((gp.oldpath, gp.path))
801 copies.append((gp.oldpath, gp.path))
802 removes.add(gp.oldpath)
802 removes.add(gp.oldpath)
803 elif gp.op == 'COPY':
803 elif gp.op == 'COPY':
804 copies.append((gp.oldpath, gp.path))
804 copies.append((gp.oldpath, gp.path))
805 elif gp.op == 'DELETE':
805 elif gp.op == 'DELETE':
806 removes.add(gp.path)
806 removes.add(gp.path)
807
807
808 wctx = repo[None]
808 wctx = repo[None]
809 for src, dst in copies:
809 for src, dst in copies:
810 dirstatecopy(ui, repo, wctx, src, dst, cwd=cwd)
810 dirstatecopy(ui, repo, wctx, src, dst, cwd=cwd)
811 if (not similarity) and removes:
811 if (not similarity) and removes:
812 wctx.remove(sorted(removes), True)
812 wctx.remove(sorted(removes), True)
813
813
814 for f in patches:
814 for f in patches:
815 gp = patches[f]
815 gp = patches[f]
816 if gp and gp.mode:
816 if gp and gp.mode:
817 islink, isexec = gp.mode
817 islink, isexec = gp.mode
818 dst = repo.wjoin(gp.path)
818 dst = repo.wjoin(gp.path)
819 # patch won't create empty files
819 # patch won't create empty files
820 if gp.op == 'ADD' and not os.path.lexists(dst):
820 if gp.op == 'ADD' and not os.path.lexists(dst):
821 flags = (isexec and 'x' or '') + (islink and 'l' or '')
821 flags = (isexec and 'x' or '') + (islink and 'l' or '')
822 repo.wwrite(gp.path, '', flags)
822 repo.wwrite(gp.path, '', flags)
823 util.setflags(dst, islink, isexec)
823 util.setflags(dst, islink, isexec)
824 addremove(repo, cfiles, similarity=similarity)
824 addremove(repo, cfiles, similarity=similarity)
825 files = patches.keys()
825 files = patches.keys()
826 files.extend([r for r in removes if r not in files])
826 files.extend([r for r in removes if r not in files])
827 return sorted(files)
827 return sorted(files)
828
828
829 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
829 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
830 """Update the dirstate to reflect the intent of copying src to dst. For
830 """Update the dirstate to reflect the intent of copying src to dst. For
831 different reasons it might not end with dst being marked as copied from src.
831 different reasons it might not end with dst being marked as copied from src.
832 """
832 """
833 origsrc = repo.dirstate.copied(src) or src
833 origsrc = repo.dirstate.copied(src) or src
834 if dst == origsrc: # copying back a copy?
834 if dst == origsrc: # copying back a copy?
835 if repo.dirstate[dst] not in 'mn' and not dryrun:
835 if repo.dirstate[dst] not in 'mn' and not dryrun:
836 repo.dirstate.normallookup(dst)
836 repo.dirstate.normallookup(dst)
837 else:
837 else:
838 if repo.dirstate[origsrc] == 'a' and origsrc == src:
838 if repo.dirstate[origsrc] == 'a' and origsrc == src:
839 if not ui.quiet:
839 if not ui.quiet:
840 ui.warn(_("%s has not been committed yet, so no copy "
840 ui.warn(_("%s has not been committed yet, so no copy "
841 "data will be stored for %s.\n")
841 "data will be stored for %s.\n")
842 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
842 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
843 if repo.dirstate[dst] in '?r' and not dryrun:
843 if repo.dirstate[dst] in '?r' and not dryrun:
844 wctx.add([dst])
844 wctx.add([dst])
845 elif not dryrun:
845 elif not dryrun:
846 wctx.copy(origsrc, dst)
846 wctx.copy(origsrc, dst)
847
847
848 def readrequires(opener, supported):
848 def readrequires(opener, supported):
849 '''Reads and parses .hg/requires and checks if all entries found
849 '''Reads and parses .hg/requires and checks if all entries found
850 are in the list of supported features.'''
850 are in the list of supported features.'''
851 requirements = set(opener.read("requires").splitlines())
851 requirements = set(opener.read("requires").splitlines())
852 missings = []
852 missings = []
853 for r in requirements:
853 for r in requirements:
854 if r not in supported:
854 if r not in supported:
855 if not r or not r[0].isalnum():
855 if not r or not r[0].isalnum():
856 raise error.RequirementError(_(".hg/requires file is corrupt"))
856 raise error.RequirementError(_(".hg/requires file is corrupt"))
857 missings.append(r)
857 missings.append(r)
858 missings.sort()
858 missings.sort()
859 if missings:
859 if missings:
860 raise error.RequirementError(
860 raise error.RequirementError(
861 _("unknown repository format: requires features '%s' (upgrade "
861 _("unknown repository format: requires features '%s' (upgrade "
862 "Mercurial)") % "', '".join(missings))
862 "Mercurial)") % "', '".join(missings))
863 return requirements
863 return requirements
864
864
865 class filecacheentry(object):
865 class filecacheentry(object):
866 def __init__(self, path, stat=True):
866 def __init__(self, path, stat=True):
867 self.path = path
867 self.path = path
868 self.cachestat = None
868 self.cachestat = None
869 self._cacheable = None
869 self._cacheable = None
870
870
871 if stat:
871 if stat:
872 self.cachestat = filecacheentry.stat(self.path)
872 self.cachestat = filecacheentry.stat(self.path)
873
873
874 if self.cachestat:
874 if self.cachestat:
875 self._cacheable = self.cachestat.cacheable()
875 self._cacheable = self.cachestat.cacheable()
876 else:
876 else:
877 # None means we don't know yet
877 # None means we don't know yet
878 self._cacheable = None
878 self._cacheable = None
879
879
880 def refresh(self):
880 def refresh(self):
881 if self.cacheable():
881 if self.cacheable():
882 self.cachestat = filecacheentry.stat(self.path)
882 self.cachestat = filecacheentry.stat(self.path)
883
883
884 def cacheable(self):
884 def cacheable(self):
885 if self._cacheable is not None:
885 if self._cacheable is not None:
886 return self._cacheable
886 return self._cacheable
887
887
888 # we don't know yet, assume it is for now
888 # we don't know yet, assume it is for now
889 return True
889 return True
890
890
891 def changed(self):
891 def changed(self):
892 # no point in going further if we can't cache it
892 # no point in going further if we can't cache it
893 if not self.cacheable():
893 if not self.cacheable():
894 return True
894 return True
895
895
896 newstat = filecacheentry.stat(self.path)
896 newstat = filecacheentry.stat(self.path)
897
897
898 # we may not know if it's cacheable yet, check again now
898 # we may not know if it's cacheable yet, check again now
899 if newstat and self._cacheable is None:
899 if newstat and self._cacheable is None:
900 self._cacheable = newstat.cacheable()
900 self._cacheable = newstat.cacheable()
901
901
902 # check again
902 # check again
903 if not self._cacheable:
903 if not self._cacheable:
904 return True
904 return True
905
905
906 if self.cachestat != newstat:
906 if self.cachestat != newstat:
907 self.cachestat = newstat
907 self.cachestat = newstat
908 return True
908 return True
909 else:
909 else:
910 return False
910 return False
911
911
912 @staticmethod
912 @staticmethod
913 def stat(path):
913 def stat(path):
914 try:
914 try:
915 return util.cachestat(path)
915 return util.cachestat(path)
916 except OSError, e:
916 except OSError, e:
917 if e.errno != errno.ENOENT:
917 if e.errno != errno.ENOENT:
918 raise
918 raise
919
919
920 class filecache(object):
920 class filecache(object):
921 '''A property like decorator that tracks a file under .hg/ for updates.
921 '''A property like decorator that tracks a file under .hg/ for updates.
922
922
923 Records stat info when called in _filecache.
923 Records stat info when called in _filecache.
924
924
925 On subsequent calls, compares old stat info with new info, and recreates
925 On subsequent calls, compares old stat info with new info, and recreates
926 the object when needed, updating the new stat info in _filecache.
926 the object when needed, updating the new stat info in _filecache.
927
927
928 Mercurial either atomic renames or appends for files under .hg,
928 Mercurial either atomic renames or appends for files under .hg,
929 so to ensure the cache is reliable we need the filesystem to be able
929 so to ensure the cache is reliable we need the filesystem to be able
930 to tell us if a file has been replaced. If it can't, we fallback to
930 to tell us if a file has been replaced. If it can't, we fallback to
931 recreating the object on every call (essentially the same behaviour as
931 recreating the object on every call (essentially the same behaviour as
932 propertycache).'''
932 propertycache).'''
933 def __init__(self, path):
933 def __init__(self, path):
934 self.path = path
934 self.path = path
935
935
936 def join(self, obj, fname):
936 def join(self, obj, fname):
937 """Used to compute the runtime path of the cached file.
937 """Used to compute the runtime path of the cached file.
938
938
939 Users should subclass filecache and provide their own version of this
939 Users should subclass filecache and provide their own version of this
940 function to call the appropriate join function on 'obj' (an instance
940 function to call the appropriate join function on 'obj' (an instance
941 of the class that its member function was decorated).
941 of the class that its member function was decorated).
942 """
942 """
943 return obj.join(fname)
943 return obj.join(fname)
944
944
945 def __call__(self, func):
945 def __call__(self, func):
946 self.func = func
946 self.func = func
947 self.name = func.__name__
947 self.name = func.__name__
948 return self
948 return self
949
949
950 def __get__(self, obj, type=None):
950 def __get__(self, obj, type=None):
951 # do we need to check if the file changed?
951 # do we need to check if the file changed?
952 if self.name in obj.__dict__:
952 if self.name in obj.__dict__:
953 assert self.name in obj._filecache, self.name
953 return obj.__dict__[self.name]
954 return obj.__dict__[self.name]
954
955
955 entry = obj._filecache.get(self.name)
956 entry = obj._filecache.get(self.name)
956
957
957 if entry:
958 if entry:
958 if entry.changed():
959 if entry.changed():
959 entry.obj = self.func(obj)
960 entry.obj = self.func(obj)
960 else:
961 else:
961 path = self.join(obj, self.path)
962 path = self.join(obj, self.path)
962
963
963 # We stat -before- creating the object so our cache doesn't lie if
964 # We stat -before- creating the object so our cache doesn't lie if
964 # a writer modified between the time we read and stat
965 # a writer modified between the time we read and stat
965 entry = filecacheentry(path)
966 entry = filecacheentry(path)
966 entry.obj = self.func(obj)
967 entry.obj = self.func(obj)
967
968
968 obj._filecache[self.name] = entry
969 obj._filecache[self.name] = entry
969
970
970 obj.__dict__[self.name] = entry.obj
971 obj.__dict__[self.name] = entry.obj
971 return entry.obj
972 return entry.obj
972
973
973 def __set__(self, obj, value):
974 def __set__(self, obj, value):
974 if self.name in obj._filecache:
975 if self.name not in obj._filecache:
975 obj._filecache[self.name].obj = value # update cached copy
976 # we add an entry for the missing value because X in __dict__
977 # implies X in _filecache
978 ce = filecacheentry(self.join(obj, self.path), False)
979 obj._filecache[self.name] = ce
980 else:
981 ce = obj._filecache[self.name]
982
983 ce.obj = value # update cached copy
976 obj.__dict__[self.name] = value # update copy returned by obj.x
984 obj.__dict__[self.name] = value # update copy returned by obj.x
977
985
978 def __delete__(self, obj):
986 def __delete__(self, obj):
979 try:
987 try:
980 del obj.__dict__[self.name]
988 del obj.__dict__[self.name]
981 except KeyError:
989 except KeyError:
982 raise AttributeError(self.name)
990 raise AttributeError(self.name)
@@ -1,111 +1,126 b''
1 import sys, os, subprocess
1 import sys, os, subprocess
2
2
3 if subprocess.call(['python', '%s/hghave' % os.environ['TESTDIR'],
3 if subprocess.call(['python', '%s/hghave' % os.environ['TESTDIR'],
4 'cacheable']):
4 'cacheable']):
5 sys.exit(80)
5 sys.exit(80)
6
6
7 from mercurial import util, scmutil, extensions, hg, ui
7 from mercurial import util, scmutil, extensions, hg, ui
8
8
9 filecache = scmutil.filecache
9 filecache = scmutil.filecache
10
10
11 class fakerepo(object):
11 class fakerepo(object):
12 def __init__(self):
12 def __init__(self):
13 self._filecache = {}
13 self._filecache = {}
14
14
15 def join(self, p):
15 def join(self, p):
16 return p
16 return p
17
17
18 def sjoin(self, p):
18 def sjoin(self, p):
19 return p
19 return p
20
20
21 @filecache('x')
21 @filecache('x')
22 def cached(self):
22 def cached(self):
23 print 'creating'
23 print 'creating'
24
24
25 def invalidate(self):
25 def invalidate(self):
26 for k in self._filecache:
26 for k in self._filecache:
27 try:
27 try:
28 delattr(self, k)
28 delattr(self, k)
29 except AttributeError:
29 except AttributeError:
30 pass
30 pass
31
31
32 def basic(repo):
32 def basic(repo):
33 # file doesn't exist, calls function
33 # file doesn't exist, calls function
34 repo.cached
34 repo.cached
35
35
36 repo.invalidate()
36 repo.invalidate()
37 # file still doesn't exist, uses cache
37 # file still doesn't exist, uses cache
38 repo.cached
38 repo.cached
39
39
40 # create empty file
40 # create empty file
41 f = open('x', 'w')
41 f = open('x', 'w')
42 f.close()
42 f.close()
43 repo.invalidate()
43 repo.invalidate()
44 # should recreate the object
44 # should recreate the object
45 repo.cached
45 repo.cached
46
46
47 f = open('x', 'w')
47 f = open('x', 'w')
48 f.write('a')
48 f.write('a')
49 f.close()
49 f.close()
50 repo.invalidate()
50 repo.invalidate()
51 # should recreate the object
51 # should recreate the object
52 repo.cached
52 repo.cached
53
53
54 repo.invalidate()
54 repo.invalidate()
55 # stats file again, nothing changed, reuses object
55 # stats file again, nothing changed, reuses object
56 repo.cached
56 repo.cached
57
57
58 # atomic replace file, size doesn't change
58 # atomic replace file, size doesn't change
59 # hopefully st_mtime doesn't change as well so this doesn't use the cache
59 # hopefully st_mtime doesn't change as well so this doesn't use the cache
60 # because of inode change
60 # because of inode change
61 f = scmutil.opener('.')('x', 'w', atomictemp=True)
61 f = scmutil.opener('.')('x', 'w', atomictemp=True)
62 f.write('b')
62 f.write('b')
63 f.close()
63 f.close()
64
64
65 repo.invalidate()
65 repo.invalidate()
66 repo.cached
66 repo.cached
67
67
68 def fakeuncacheable():
68 def fakeuncacheable():
69 def wrapcacheable(orig, *args, **kwargs):
69 def wrapcacheable(orig, *args, **kwargs):
70 return False
70 return False
71
71
72 def wrapinit(orig, *args, **kwargs):
72 def wrapinit(orig, *args, **kwargs):
73 pass
73 pass
74
74
75 originit = extensions.wrapfunction(util.cachestat, '__init__', wrapinit)
75 originit = extensions.wrapfunction(util.cachestat, '__init__', wrapinit)
76 origcacheable = extensions.wrapfunction(util.cachestat, 'cacheable',
76 origcacheable = extensions.wrapfunction(util.cachestat, 'cacheable',
77 wrapcacheable)
77 wrapcacheable)
78
78
79 try:
79 try:
80 os.remove('x')
80 os.remove('x')
81 except OSError:
81 except OSError:
82 pass
82 pass
83
83
84 basic(fakerepo())
84 basic(fakerepo())
85
85
86 util.cachestat.cacheable = origcacheable
86 util.cachestat.cacheable = origcacheable
87 util.cachestat.__init__ = originit
87 util.cachestat.__init__ = originit
88
88
89 def test_filecache_synced():
89 def test_filecache_synced():
90 # test old behaviour that caused filecached properties to go out of sync
90 # test old behaviour that caused filecached properties to go out of sync
91 os.system('hg init && echo a >> a && hg ci -qAm.')
91 os.system('hg init && echo a >> a && hg ci -qAm.')
92 repo = hg.repository(ui.ui())
92 repo = hg.repository(ui.ui())
93 # first rollback clears the filecache, but changelog to stays in __dict__
93 # first rollback clears the filecache, but changelog to stays in __dict__
94 repo.rollback()
94 repo.rollback()
95 repo.commit('.')
95 repo.commit('.')
96 # second rollback comes along and touches the changelog externally
96 # second rollback comes along and touches the changelog externally
97 # (file is moved)
97 # (file is moved)
98 repo.rollback()
98 repo.rollback()
99 # but since changelog isn't under the filecache control anymore, we don't
99 # but since changelog isn't under the filecache control anymore, we don't
100 # see that it changed, and return the old changelog without reconstructing
100 # see that it changed, and return the old changelog without reconstructing
101 # it
101 # it
102 repo.commit('.')
102 repo.commit('.')
103
103
104 def setbeforeget(repo):
105 os.remove('x')
106 repo.cached = 0
107 repo.invalidate()
108 print repo.cached
109 repo.invalidate()
110 f = open('x', 'w')
111 f.write('a')
112 f.close()
113 print repo.cached
114
104 print 'basic:'
115 print 'basic:'
105 print
116 print
106 basic(fakerepo())
117 basic(fakerepo())
107 print
118 print
108 print 'fakeuncacheable:'
119 print 'fakeuncacheable:'
109 print
120 print
110 fakeuncacheable()
121 fakeuncacheable()
111 test_filecache_synced()
122 test_filecache_synced()
123 print
124 print 'setbeforeget:'
125 print
126 setbeforeget(fakerepo())
@@ -1,19 +1,25 b''
1 basic:
1 basic:
2
2
3 creating
3 creating
4 creating
4 creating
5 creating
5 creating
6 creating
6 creating
7
7
8 fakeuncacheable:
8 fakeuncacheable:
9
9
10 creating
10 creating
11 creating
11 creating
12 creating
12 creating
13 creating
13 creating
14 creating
14 creating
15 creating
15 creating
16 repository tip rolled back to revision -1 (undo commit)
16 repository tip rolled back to revision -1 (undo commit)
17 working directory now based on revision -1
17 working directory now based on revision -1
18 repository tip rolled back to revision -1 (undo commit)
18 repository tip rolled back to revision -1 (undo commit)
19 working directory now based on revision -1
19 working directory now based on revision -1
20
21 setbeforeget:
22
23 0
24 creating
25 None
General Comments 0
You need to be logged in to leave comments. Login now