##// END OF EJS Templates
scmutil: introduce a filecacheentry that can watch multiple paths
Siddharth Agarwal -
r20044:d38de18d default
parent child Browse files
Show More
@@ -1,886 +1,903 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from mercurial.node import nullrev
9 from mercurial.node import nullrev
10 import util, error, osutil, revset, similar, encoding, phases, parsers
10 import util, error, osutil, revset, similar, encoding, phases, parsers
11 import pathutil
11 import pathutil
12 import match as matchmod
12 import match as matchmod
13 import os, errno, re, glob
13 import os, errno, re, glob
14
14
15 if os.name == 'nt':
15 if os.name == 'nt':
16 import scmwindows as scmplatform
16 import scmwindows as scmplatform
17 else:
17 else:
18 import scmposix as scmplatform
18 import scmposix as scmplatform
19
19
20 systemrcpath = scmplatform.systemrcpath
20 systemrcpath = scmplatform.systemrcpath
21 userrcpath = scmplatform.userrcpath
21 userrcpath = scmplatform.userrcpath
22
22
23 def nochangesfound(ui, repo, excluded=None):
23 def nochangesfound(ui, repo, excluded=None):
24 '''Report no changes for push/pull, excluded is None or a list of
24 '''Report no changes for push/pull, excluded is None or a list of
25 nodes excluded from the push/pull.
25 nodes excluded from the push/pull.
26 '''
26 '''
27 secretlist = []
27 secretlist = []
28 if excluded:
28 if excluded:
29 for n in excluded:
29 for n in excluded:
30 if n not in repo:
30 if n not in repo:
31 # discovery should not have included the filtered revision,
31 # discovery should not have included the filtered revision,
32 # we have to explicitly exclude it until discovery is cleanup.
32 # we have to explicitly exclude it until discovery is cleanup.
33 continue
33 continue
34 ctx = repo[n]
34 ctx = repo[n]
35 if ctx.phase() >= phases.secret and not ctx.extinct():
35 if ctx.phase() >= phases.secret and not ctx.extinct():
36 secretlist.append(n)
36 secretlist.append(n)
37
37
38 if secretlist:
38 if secretlist:
39 ui.status(_("no changes found (ignored %d secret changesets)\n")
39 ui.status(_("no changes found (ignored %d secret changesets)\n")
40 % len(secretlist))
40 % len(secretlist))
41 else:
41 else:
42 ui.status(_("no changes found\n"))
42 ui.status(_("no changes found\n"))
43
43
44 def checknewlabel(repo, lbl, kind):
44 def checknewlabel(repo, lbl, kind):
45 # Do not use the "kind" parameter in ui output.
45 # Do not use the "kind" parameter in ui output.
46 # It makes strings difficult to translate.
46 # It makes strings difficult to translate.
47 if lbl in ['tip', '.', 'null']:
47 if lbl in ['tip', '.', 'null']:
48 raise util.Abort(_("the name '%s' is reserved") % lbl)
48 raise util.Abort(_("the name '%s' is reserved") % lbl)
49 for c in (':', '\0', '\n', '\r'):
49 for c in (':', '\0', '\n', '\r'):
50 if c in lbl:
50 if c in lbl:
51 raise util.Abort(_("%r cannot be used in a name") % c)
51 raise util.Abort(_("%r cannot be used in a name") % c)
52 try:
52 try:
53 int(lbl)
53 int(lbl)
54 raise util.Abort(_("cannot use an integer as a name"))
54 raise util.Abort(_("cannot use an integer as a name"))
55 except ValueError:
55 except ValueError:
56 pass
56 pass
57
57
58 def checkfilename(f):
58 def checkfilename(f):
59 '''Check that the filename f is an acceptable filename for a tracked file'''
59 '''Check that the filename f is an acceptable filename for a tracked file'''
60 if '\r' in f or '\n' in f:
60 if '\r' in f or '\n' in f:
61 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
61 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
62
62
63 def checkportable(ui, f):
63 def checkportable(ui, f):
64 '''Check if filename f is portable and warn or abort depending on config'''
64 '''Check if filename f is portable and warn or abort depending on config'''
65 checkfilename(f)
65 checkfilename(f)
66 abort, warn = checkportabilityalert(ui)
66 abort, warn = checkportabilityalert(ui)
67 if abort or warn:
67 if abort or warn:
68 msg = util.checkwinfilename(f)
68 msg = util.checkwinfilename(f)
69 if msg:
69 if msg:
70 msg = "%s: %r" % (msg, f)
70 msg = "%s: %r" % (msg, f)
71 if abort:
71 if abort:
72 raise util.Abort(msg)
72 raise util.Abort(msg)
73 ui.warn(_("warning: %s\n") % msg)
73 ui.warn(_("warning: %s\n") % msg)
74
74
75 def checkportabilityalert(ui):
75 def checkportabilityalert(ui):
76 '''check if the user's config requests nothing, a warning, or abort for
76 '''check if the user's config requests nothing, a warning, or abort for
77 non-portable filenames'''
77 non-portable filenames'''
78 val = ui.config('ui', 'portablefilenames', 'warn')
78 val = ui.config('ui', 'portablefilenames', 'warn')
79 lval = val.lower()
79 lval = val.lower()
80 bval = util.parsebool(val)
80 bval = util.parsebool(val)
81 abort = os.name == 'nt' or lval == 'abort'
81 abort = os.name == 'nt' or lval == 'abort'
82 warn = bval or lval == 'warn'
82 warn = bval or lval == 'warn'
83 if bval is None and not (warn or abort or lval == 'ignore'):
83 if bval is None and not (warn or abort or lval == 'ignore'):
84 raise error.ConfigError(
84 raise error.ConfigError(
85 _("ui.portablefilenames value is invalid ('%s')") % val)
85 _("ui.portablefilenames value is invalid ('%s')") % val)
86 return abort, warn
86 return abort, warn
87
87
88 class casecollisionauditor(object):
88 class casecollisionauditor(object):
89 def __init__(self, ui, abort, dirstate):
89 def __init__(self, ui, abort, dirstate):
90 self._ui = ui
90 self._ui = ui
91 self._abort = abort
91 self._abort = abort
92 allfiles = '\0'.join(dirstate._map)
92 allfiles = '\0'.join(dirstate._map)
93 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
93 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
94 self._dirstate = dirstate
94 self._dirstate = dirstate
95 # The purpose of _newfiles is so that we don't complain about
95 # The purpose of _newfiles is so that we don't complain about
96 # case collisions if someone were to call this object with the
96 # case collisions if someone were to call this object with the
97 # same filename twice.
97 # same filename twice.
98 self._newfiles = set()
98 self._newfiles = set()
99
99
100 def __call__(self, f):
100 def __call__(self, f):
101 if f in self._newfiles:
101 if f in self._newfiles:
102 return
102 return
103 fl = encoding.lower(f)
103 fl = encoding.lower(f)
104 if fl in self._loweredfiles and f not in self._dirstate:
104 if fl in self._loweredfiles and f not in self._dirstate:
105 msg = _('possible case-folding collision for %s') % f
105 msg = _('possible case-folding collision for %s') % f
106 if self._abort:
106 if self._abort:
107 raise util.Abort(msg)
107 raise util.Abort(msg)
108 self._ui.warn(_("warning: %s\n") % msg)
108 self._ui.warn(_("warning: %s\n") % msg)
109 self._loweredfiles.add(fl)
109 self._loweredfiles.add(fl)
110 self._newfiles.add(f)
110 self._newfiles.add(f)
111
111
112 class abstractvfs(object):
112 class abstractvfs(object):
113 """Abstract base class; cannot be instantiated"""
113 """Abstract base class; cannot be instantiated"""
114
114
115 def __init__(self, *args, **kwargs):
115 def __init__(self, *args, **kwargs):
116 '''Prevent instantiation; don't call this from subclasses.'''
116 '''Prevent instantiation; don't call this from subclasses.'''
117 raise NotImplementedError('attempted instantiating ' + str(type(self)))
117 raise NotImplementedError('attempted instantiating ' + str(type(self)))
118
118
119 def tryread(self, path):
119 def tryread(self, path):
120 '''gracefully return an empty string for missing files'''
120 '''gracefully return an empty string for missing files'''
121 try:
121 try:
122 return self.read(path)
122 return self.read(path)
123 except IOError, inst:
123 except IOError, inst:
124 if inst.errno != errno.ENOENT:
124 if inst.errno != errno.ENOENT:
125 raise
125 raise
126 return ""
126 return ""
127
127
128 def open(self, path, mode="r", text=False, atomictemp=False):
128 def open(self, path, mode="r", text=False, atomictemp=False):
129 self.open = self.__call__
129 self.open = self.__call__
130 return self.__call__(path, mode, text, atomictemp)
130 return self.__call__(path, mode, text, atomictemp)
131
131
132 def read(self, path):
132 def read(self, path):
133 fp = self(path, 'rb')
133 fp = self(path, 'rb')
134 try:
134 try:
135 return fp.read()
135 return fp.read()
136 finally:
136 finally:
137 fp.close()
137 fp.close()
138
138
139 def write(self, path, data):
139 def write(self, path, data):
140 fp = self(path, 'wb')
140 fp = self(path, 'wb')
141 try:
141 try:
142 return fp.write(data)
142 return fp.write(data)
143 finally:
143 finally:
144 fp.close()
144 fp.close()
145
145
146 def append(self, path, data):
146 def append(self, path, data):
147 fp = self(path, 'ab')
147 fp = self(path, 'ab')
148 try:
148 try:
149 return fp.write(data)
149 return fp.write(data)
150 finally:
150 finally:
151 fp.close()
151 fp.close()
152
152
153 def exists(self, path=None):
153 def exists(self, path=None):
154 return os.path.exists(self.join(path))
154 return os.path.exists(self.join(path))
155
155
156 def fstat(self, fp):
156 def fstat(self, fp):
157 return util.fstat(fp)
157 return util.fstat(fp)
158
158
159 def isdir(self, path=None):
159 def isdir(self, path=None):
160 return os.path.isdir(self.join(path))
160 return os.path.isdir(self.join(path))
161
161
162 def islink(self, path=None):
162 def islink(self, path=None):
163 return os.path.islink(self.join(path))
163 return os.path.islink(self.join(path))
164
164
165 def lstat(self, path=None):
165 def lstat(self, path=None):
166 return os.lstat(self.join(path))
166 return os.lstat(self.join(path))
167
167
168 def makedir(self, path=None, notindexed=True):
168 def makedir(self, path=None, notindexed=True):
169 return util.makedir(self.join(path), notindexed)
169 return util.makedir(self.join(path), notindexed)
170
170
171 def makedirs(self, path=None, mode=None):
171 def makedirs(self, path=None, mode=None):
172 return util.makedirs(self.join(path), mode)
172 return util.makedirs(self.join(path), mode)
173
173
174 def mkdir(self, path=None):
174 def mkdir(self, path=None):
175 return os.mkdir(self.join(path))
175 return os.mkdir(self.join(path))
176
176
177 def readdir(self, path=None, stat=None, skip=None):
177 def readdir(self, path=None, stat=None, skip=None):
178 return osutil.listdir(self.join(path), stat, skip)
178 return osutil.listdir(self.join(path), stat, skip)
179
179
180 def rename(self, src, dst):
180 def rename(self, src, dst):
181 return util.rename(self.join(src), self.join(dst))
181 return util.rename(self.join(src), self.join(dst))
182
182
183 def readlink(self, path):
183 def readlink(self, path):
184 return os.readlink(self.join(path))
184 return os.readlink(self.join(path))
185
185
186 def setflags(self, path, l, x):
186 def setflags(self, path, l, x):
187 return util.setflags(self.join(path), l, x)
187 return util.setflags(self.join(path), l, x)
188
188
189 def stat(self, path=None):
189 def stat(self, path=None):
190 return os.stat(self.join(path))
190 return os.stat(self.join(path))
191
191
192 def unlink(self, path=None):
192 def unlink(self, path=None):
193 return util.unlink(self.join(path))
193 return util.unlink(self.join(path))
194
194
195 def utime(self, path=None, t=None):
195 def utime(self, path=None, t=None):
196 return os.utime(self.join(path), t)
196 return os.utime(self.join(path), t)
197
197
198 class vfs(abstractvfs):
198 class vfs(abstractvfs):
199 '''Operate files relative to a base directory
199 '''Operate files relative to a base directory
200
200
201 This class is used to hide the details of COW semantics and
201 This class is used to hide the details of COW semantics and
202 remote file access from higher level code.
202 remote file access from higher level code.
203 '''
203 '''
204 def __init__(self, base, audit=True, expandpath=False, realpath=False):
204 def __init__(self, base, audit=True, expandpath=False, realpath=False):
205 if expandpath:
205 if expandpath:
206 base = util.expandpath(base)
206 base = util.expandpath(base)
207 if realpath:
207 if realpath:
208 base = os.path.realpath(base)
208 base = os.path.realpath(base)
209 self.base = base
209 self.base = base
210 self._setmustaudit(audit)
210 self._setmustaudit(audit)
211 self.createmode = None
211 self.createmode = None
212 self._trustnlink = None
212 self._trustnlink = None
213
213
214 def _getmustaudit(self):
214 def _getmustaudit(self):
215 return self._audit
215 return self._audit
216
216
217 def _setmustaudit(self, onoff):
217 def _setmustaudit(self, onoff):
218 self._audit = onoff
218 self._audit = onoff
219 if onoff:
219 if onoff:
220 self.audit = pathutil.pathauditor(self.base)
220 self.audit = pathutil.pathauditor(self.base)
221 else:
221 else:
222 self.audit = util.always
222 self.audit = util.always
223
223
224 mustaudit = property(_getmustaudit, _setmustaudit)
224 mustaudit = property(_getmustaudit, _setmustaudit)
225
225
226 @util.propertycache
226 @util.propertycache
227 def _cansymlink(self):
227 def _cansymlink(self):
228 return util.checklink(self.base)
228 return util.checklink(self.base)
229
229
230 @util.propertycache
230 @util.propertycache
231 def _chmod(self):
231 def _chmod(self):
232 return util.checkexec(self.base)
232 return util.checkexec(self.base)
233
233
234 def _fixfilemode(self, name):
234 def _fixfilemode(self, name):
235 if self.createmode is None or not self._chmod:
235 if self.createmode is None or not self._chmod:
236 return
236 return
237 os.chmod(name, self.createmode & 0666)
237 os.chmod(name, self.createmode & 0666)
238
238
239 def __call__(self, path, mode="r", text=False, atomictemp=False):
239 def __call__(self, path, mode="r", text=False, atomictemp=False):
240 if self._audit:
240 if self._audit:
241 r = util.checkosfilename(path)
241 r = util.checkosfilename(path)
242 if r:
242 if r:
243 raise util.Abort("%s: %r" % (r, path))
243 raise util.Abort("%s: %r" % (r, path))
244 self.audit(path)
244 self.audit(path)
245 f = self.join(path)
245 f = self.join(path)
246
246
247 if not text and "b" not in mode:
247 if not text and "b" not in mode:
248 mode += "b" # for that other OS
248 mode += "b" # for that other OS
249
249
250 nlink = -1
250 nlink = -1
251 if mode not in ('r', 'rb'):
251 if mode not in ('r', 'rb'):
252 dirname, basename = util.split(f)
252 dirname, basename = util.split(f)
253 # If basename is empty, then the path is malformed because it points
253 # If basename is empty, then the path is malformed because it points
254 # to a directory. Let the posixfile() call below raise IOError.
254 # to a directory. Let the posixfile() call below raise IOError.
255 if basename:
255 if basename:
256 if atomictemp:
256 if atomictemp:
257 util.ensuredirs(dirname, self.createmode)
257 util.ensuredirs(dirname, self.createmode)
258 return util.atomictempfile(f, mode, self.createmode)
258 return util.atomictempfile(f, mode, self.createmode)
259 try:
259 try:
260 if 'w' in mode:
260 if 'w' in mode:
261 util.unlink(f)
261 util.unlink(f)
262 nlink = 0
262 nlink = 0
263 else:
263 else:
264 # nlinks() may behave differently for files on Windows
264 # nlinks() may behave differently for files on Windows
265 # shares if the file is open.
265 # shares if the file is open.
266 fd = util.posixfile(f)
266 fd = util.posixfile(f)
267 nlink = util.nlinks(f)
267 nlink = util.nlinks(f)
268 if nlink < 1:
268 if nlink < 1:
269 nlink = 2 # force mktempcopy (issue1922)
269 nlink = 2 # force mktempcopy (issue1922)
270 fd.close()
270 fd.close()
271 except (OSError, IOError), e:
271 except (OSError, IOError), e:
272 if e.errno != errno.ENOENT:
272 if e.errno != errno.ENOENT:
273 raise
273 raise
274 nlink = 0
274 nlink = 0
275 util.ensuredirs(dirname, self.createmode)
275 util.ensuredirs(dirname, self.createmode)
276 if nlink > 0:
276 if nlink > 0:
277 if self._trustnlink is None:
277 if self._trustnlink is None:
278 self._trustnlink = nlink > 1 or util.checknlink(f)
278 self._trustnlink = nlink > 1 or util.checknlink(f)
279 if nlink > 1 or not self._trustnlink:
279 if nlink > 1 or not self._trustnlink:
280 util.rename(util.mktempcopy(f), f)
280 util.rename(util.mktempcopy(f), f)
281 fp = util.posixfile(f, mode)
281 fp = util.posixfile(f, mode)
282 if nlink == 0:
282 if nlink == 0:
283 self._fixfilemode(f)
283 self._fixfilemode(f)
284 return fp
284 return fp
285
285
286 def symlink(self, src, dst):
286 def symlink(self, src, dst):
287 self.audit(dst)
287 self.audit(dst)
288 linkname = self.join(dst)
288 linkname = self.join(dst)
289 try:
289 try:
290 os.unlink(linkname)
290 os.unlink(linkname)
291 except OSError:
291 except OSError:
292 pass
292 pass
293
293
294 util.ensuredirs(os.path.dirname(linkname), self.createmode)
294 util.ensuredirs(os.path.dirname(linkname), self.createmode)
295
295
296 if self._cansymlink:
296 if self._cansymlink:
297 try:
297 try:
298 os.symlink(src, linkname)
298 os.symlink(src, linkname)
299 except OSError, err:
299 except OSError, err:
300 raise OSError(err.errno, _('could not symlink to %r: %s') %
300 raise OSError(err.errno, _('could not symlink to %r: %s') %
301 (src, err.strerror), linkname)
301 (src, err.strerror), linkname)
302 else:
302 else:
303 self.write(dst, src)
303 self.write(dst, src)
304
304
305 def join(self, path):
305 def join(self, path):
306 if path:
306 if path:
307 return os.path.join(self.base, path)
307 return os.path.join(self.base, path)
308 else:
308 else:
309 return self.base
309 return self.base
310
310
311 opener = vfs
311 opener = vfs
312
312
313 class auditvfs(object):
313 class auditvfs(object):
314 def __init__(self, vfs):
314 def __init__(self, vfs):
315 self.vfs = vfs
315 self.vfs = vfs
316
316
317 def _getmustaudit(self):
317 def _getmustaudit(self):
318 return self.vfs.mustaudit
318 return self.vfs.mustaudit
319
319
320 def _setmustaudit(self, onoff):
320 def _setmustaudit(self, onoff):
321 self.vfs.mustaudit = onoff
321 self.vfs.mustaudit = onoff
322
322
323 mustaudit = property(_getmustaudit, _setmustaudit)
323 mustaudit = property(_getmustaudit, _setmustaudit)
324
324
325 class filtervfs(abstractvfs, auditvfs):
325 class filtervfs(abstractvfs, auditvfs):
326 '''Wrapper vfs for filtering filenames with a function.'''
326 '''Wrapper vfs for filtering filenames with a function.'''
327
327
328 def __init__(self, vfs, filter):
328 def __init__(self, vfs, filter):
329 auditvfs.__init__(self, vfs)
329 auditvfs.__init__(self, vfs)
330 self._filter = filter
330 self._filter = filter
331
331
332 def __call__(self, path, *args, **kwargs):
332 def __call__(self, path, *args, **kwargs):
333 return self.vfs(self._filter(path), *args, **kwargs)
333 return self.vfs(self._filter(path), *args, **kwargs)
334
334
335 def join(self, path):
335 def join(self, path):
336 if path:
336 if path:
337 return self.vfs.join(self._filter(path))
337 return self.vfs.join(self._filter(path))
338 else:
338 else:
339 return self.vfs.join(path)
339 return self.vfs.join(path)
340
340
341 filteropener = filtervfs
341 filteropener = filtervfs
342
342
343 class readonlyvfs(abstractvfs, auditvfs):
343 class readonlyvfs(abstractvfs, auditvfs):
344 '''Wrapper vfs preventing any writing.'''
344 '''Wrapper vfs preventing any writing.'''
345
345
346 def __init__(self, vfs):
346 def __init__(self, vfs):
347 auditvfs.__init__(self, vfs)
347 auditvfs.__init__(self, vfs)
348
348
349 def __call__(self, path, mode='r', *args, **kw):
349 def __call__(self, path, mode='r', *args, **kw):
350 if mode not in ('r', 'rb'):
350 if mode not in ('r', 'rb'):
351 raise util.Abort('this vfs is read only')
351 raise util.Abort('this vfs is read only')
352 return self.vfs(path, mode, *args, **kw)
352 return self.vfs(path, mode, *args, **kw)
353
353
354
354
355 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
355 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
356 '''yield every hg repository under path, always recursively.
356 '''yield every hg repository under path, always recursively.
357 The recurse flag will only control recursion into repo working dirs'''
357 The recurse flag will only control recursion into repo working dirs'''
358 def errhandler(err):
358 def errhandler(err):
359 if err.filename == path:
359 if err.filename == path:
360 raise err
360 raise err
361 samestat = getattr(os.path, 'samestat', None)
361 samestat = getattr(os.path, 'samestat', None)
362 if followsym and samestat is not None:
362 if followsym and samestat is not None:
363 def adddir(dirlst, dirname):
363 def adddir(dirlst, dirname):
364 match = False
364 match = False
365 dirstat = os.stat(dirname)
365 dirstat = os.stat(dirname)
366 for lstdirstat in dirlst:
366 for lstdirstat in dirlst:
367 if samestat(dirstat, lstdirstat):
367 if samestat(dirstat, lstdirstat):
368 match = True
368 match = True
369 break
369 break
370 if not match:
370 if not match:
371 dirlst.append(dirstat)
371 dirlst.append(dirstat)
372 return not match
372 return not match
373 else:
373 else:
374 followsym = False
374 followsym = False
375
375
376 if (seen_dirs is None) and followsym:
376 if (seen_dirs is None) and followsym:
377 seen_dirs = []
377 seen_dirs = []
378 adddir(seen_dirs, path)
378 adddir(seen_dirs, path)
379 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
379 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
380 dirs.sort()
380 dirs.sort()
381 if '.hg' in dirs:
381 if '.hg' in dirs:
382 yield root # found a repository
382 yield root # found a repository
383 qroot = os.path.join(root, '.hg', 'patches')
383 qroot = os.path.join(root, '.hg', 'patches')
384 if os.path.isdir(os.path.join(qroot, '.hg')):
384 if os.path.isdir(os.path.join(qroot, '.hg')):
385 yield qroot # we have a patch queue repo here
385 yield qroot # we have a patch queue repo here
386 if recurse:
386 if recurse:
387 # avoid recursing inside the .hg directory
387 # avoid recursing inside the .hg directory
388 dirs.remove('.hg')
388 dirs.remove('.hg')
389 else:
389 else:
390 dirs[:] = [] # don't descend further
390 dirs[:] = [] # don't descend further
391 elif followsym:
391 elif followsym:
392 newdirs = []
392 newdirs = []
393 for d in dirs:
393 for d in dirs:
394 fname = os.path.join(root, d)
394 fname = os.path.join(root, d)
395 if adddir(seen_dirs, fname):
395 if adddir(seen_dirs, fname):
396 if os.path.islink(fname):
396 if os.path.islink(fname):
397 for hgname in walkrepos(fname, True, seen_dirs):
397 for hgname in walkrepos(fname, True, seen_dirs):
398 yield hgname
398 yield hgname
399 else:
399 else:
400 newdirs.append(d)
400 newdirs.append(d)
401 dirs[:] = newdirs
401 dirs[:] = newdirs
402
402
403 def osrcpath():
403 def osrcpath():
404 '''return default os-specific hgrc search path'''
404 '''return default os-specific hgrc search path'''
405 path = systemrcpath()
405 path = systemrcpath()
406 path.extend(userrcpath())
406 path.extend(userrcpath())
407 path = [os.path.normpath(f) for f in path]
407 path = [os.path.normpath(f) for f in path]
408 return path
408 return path
409
409
410 _rcpath = None
410 _rcpath = None
411
411
412 def rcpath():
412 def rcpath():
413 '''return hgrc search path. if env var HGRCPATH is set, use it.
413 '''return hgrc search path. if env var HGRCPATH is set, use it.
414 for each item in path, if directory, use files ending in .rc,
414 for each item in path, if directory, use files ending in .rc,
415 else use item.
415 else use item.
416 make HGRCPATH empty to only look in .hg/hgrc of current repo.
416 make HGRCPATH empty to only look in .hg/hgrc of current repo.
417 if no HGRCPATH, use default os-specific path.'''
417 if no HGRCPATH, use default os-specific path.'''
418 global _rcpath
418 global _rcpath
419 if _rcpath is None:
419 if _rcpath is None:
420 if 'HGRCPATH' in os.environ:
420 if 'HGRCPATH' in os.environ:
421 _rcpath = []
421 _rcpath = []
422 for p in os.environ['HGRCPATH'].split(os.pathsep):
422 for p in os.environ['HGRCPATH'].split(os.pathsep):
423 if not p:
423 if not p:
424 continue
424 continue
425 p = util.expandpath(p)
425 p = util.expandpath(p)
426 if os.path.isdir(p):
426 if os.path.isdir(p):
427 for f, kind in osutil.listdir(p):
427 for f, kind in osutil.listdir(p):
428 if f.endswith('.rc'):
428 if f.endswith('.rc'):
429 _rcpath.append(os.path.join(p, f))
429 _rcpath.append(os.path.join(p, f))
430 else:
430 else:
431 _rcpath.append(p)
431 _rcpath.append(p)
432 else:
432 else:
433 _rcpath = osrcpath()
433 _rcpath = osrcpath()
434 return _rcpath
434 return _rcpath
435
435
436 def revsingle(repo, revspec, default='.'):
436 def revsingle(repo, revspec, default='.'):
437 if not revspec and revspec != 0:
437 if not revspec and revspec != 0:
438 return repo[default]
438 return repo[default]
439
439
440 l = revrange(repo, [revspec])
440 l = revrange(repo, [revspec])
441 if len(l) < 1:
441 if len(l) < 1:
442 raise util.Abort(_('empty revision set'))
442 raise util.Abort(_('empty revision set'))
443 return repo[l[-1]]
443 return repo[l[-1]]
444
444
445 def revpair(repo, revs):
445 def revpair(repo, revs):
446 if not revs:
446 if not revs:
447 return repo.dirstate.p1(), None
447 return repo.dirstate.p1(), None
448
448
449 l = revrange(repo, revs)
449 l = revrange(repo, revs)
450
450
451 if len(l) == 0:
451 if len(l) == 0:
452 if revs:
452 if revs:
453 raise util.Abort(_('empty revision range'))
453 raise util.Abort(_('empty revision range'))
454 return repo.dirstate.p1(), None
454 return repo.dirstate.p1(), None
455
455
456 if len(l) == 1 and len(revs) == 1 and _revrangesep not in revs[0]:
456 if len(l) == 1 and len(revs) == 1 and _revrangesep not in revs[0]:
457 return repo.lookup(l[0]), None
457 return repo.lookup(l[0]), None
458
458
459 return repo.lookup(l[0]), repo.lookup(l[-1])
459 return repo.lookup(l[0]), repo.lookup(l[-1])
460
460
461 _revrangesep = ':'
461 _revrangesep = ':'
462
462
463 def revrange(repo, revs):
463 def revrange(repo, revs):
464 """Yield revision as strings from a list of revision specifications."""
464 """Yield revision as strings from a list of revision specifications."""
465
465
466 def revfix(repo, val, defval):
466 def revfix(repo, val, defval):
467 if not val and val != 0 and defval is not None:
467 if not val and val != 0 and defval is not None:
468 return defval
468 return defval
469 return repo[val].rev()
469 return repo[val].rev()
470
470
471 seen, l = set(), []
471 seen, l = set(), []
472 for spec in revs:
472 for spec in revs:
473 if l and not seen:
473 if l and not seen:
474 seen = set(l)
474 seen = set(l)
475 # attempt to parse old-style ranges first to deal with
475 # attempt to parse old-style ranges first to deal with
476 # things like old-tag which contain query metacharacters
476 # things like old-tag which contain query metacharacters
477 try:
477 try:
478 if isinstance(spec, int):
478 if isinstance(spec, int):
479 seen.add(spec)
479 seen.add(spec)
480 l.append(spec)
480 l.append(spec)
481 continue
481 continue
482
482
483 if _revrangesep in spec:
483 if _revrangesep in spec:
484 start, end = spec.split(_revrangesep, 1)
484 start, end = spec.split(_revrangesep, 1)
485 start = revfix(repo, start, 0)
485 start = revfix(repo, start, 0)
486 end = revfix(repo, end, len(repo) - 1)
486 end = revfix(repo, end, len(repo) - 1)
487 if end == nullrev and start <= 0:
487 if end == nullrev and start <= 0:
488 start = nullrev
488 start = nullrev
489 rangeiter = repo.changelog.revs(start, end)
489 rangeiter = repo.changelog.revs(start, end)
490 if not seen and not l:
490 if not seen and not l:
491 # by far the most common case: revs = ["-1:0"]
491 # by far the most common case: revs = ["-1:0"]
492 l = list(rangeiter)
492 l = list(rangeiter)
493 # defer syncing seen until next iteration
493 # defer syncing seen until next iteration
494 continue
494 continue
495 newrevs = set(rangeiter)
495 newrevs = set(rangeiter)
496 if seen:
496 if seen:
497 newrevs.difference_update(seen)
497 newrevs.difference_update(seen)
498 seen.update(newrevs)
498 seen.update(newrevs)
499 else:
499 else:
500 seen = newrevs
500 seen = newrevs
501 l.extend(sorted(newrevs, reverse=start > end))
501 l.extend(sorted(newrevs, reverse=start > end))
502 continue
502 continue
503 elif spec and spec in repo: # single unquoted rev
503 elif spec and spec in repo: # single unquoted rev
504 rev = revfix(repo, spec, None)
504 rev = revfix(repo, spec, None)
505 if rev in seen:
505 if rev in seen:
506 continue
506 continue
507 seen.add(rev)
507 seen.add(rev)
508 l.append(rev)
508 l.append(rev)
509 continue
509 continue
510 except error.RepoLookupError:
510 except error.RepoLookupError:
511 pass
511 pass
512
512
513 # fall through to new-style queries if old-style fails
513 # fall through to new-style queries if old-style fails
514 m = revset.match(repo.ui, spec)
514 m = revset.match(repo.ui, spec)
515 dl = [r for r in m(repo, list(repo)) if r not in seen]
515 dl = [r for r in m(repo, list(repo)) if r not in seen]
516 l.extend(dl)
516 l.extend(dl)
517 seen.update(dl)
517 seen.update(dl)
518
518
519 return l
519 return l
520
520
521 def expandpats(pats):
521 def expandpats(pats):
522 if not util.expandglobs:
522 if not util.expandglobs:
523 return list(pats)
523 return list(pats)
524 ret = []
524 ret = []
525 for p in pats:
525 for p in pats:
526 kind, name = matchmod._patsplit(p, None)
526 kind, name = matchmod._patsplit(p, None)
527 if kind is None:
527 if kind is None:
528 try:
528 try:
529 globbed = glob.glob(name)
529 globbed = glob.glob(name)
530 except re.error:
530 except re.error:
531 globbed = [name]
531 globbed = [name]
532 if globbed:
532 if globbed:
533 ret.extend(globbed)
533 ret.extend(globbed)
534 continue
534 continue
535 ret.append(p)
535 ret.append(p)
536 return ret
536 return ret
537
537
538 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
538 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
539 if pats == ("",):
539 if pats == ("",):
540 pats = []
540 pats = []
541 if not globbed and default == 'relpath':
541 if not globbed and default == 'relpath':
542 pats = expandpats(pats or [])
542 pats = expandpats(pats or [])
543
543
544 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
544 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
545 default)
545 default)
546 def badfn(f, msg):
546 def badfn(f, msg):
547 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
547 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
548 m.bad = badfn
548 m.bad = badfn
549 return m, pats
549 return m, pats
550
550
551 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
551 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
552 return matchandpats(ctx, pats, opts, globbed, default)[0]
552 return matchandpats(ctx, pats, opts, globbed, default)[0]
553
553
554 def matchall(repo):
554 def matchall(repo):
555 return matchmod.always(repo.root, repo.getcwd())
555 return matchmod.always(repo.root, repo.getcwd())
556
556
557 def matchfiles(repo, files):
557 def matchfiles(repo, files):
558 return matchmod.exact(repo.root, repo.getcwd(), files)
558 return matchmod.exact(repo.root, repo.getcwd(), files)
559
559
560 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
560 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
561 if dry_run is None:
561 if dry_run is None:
562 dry_run = opts.get('dry_run')
562 dry_run = opts.get('dry_run')
563 if similarity is None:
563 if similarity is None:
564 similarity = float(opts.get('similarity') or 0)
564 similarity = float(opts.get('similarity') or 0)
565 # we'd use status here, except handling of symlinks and ignore is tricky
565 # we'd use status here, except handling of symlinks and ignore is tricky
566 m = match(repo[None], pats, opts)
566 m = match(repo[None], pats, opts)
567 rejected = []
567 rejected = []
568 m.bad = lambda x, y: rejected.append(x)
568 m.bad = lambda x, y: rejected.append(x)
569
569
570 added, unknown, deleted, removed = _interestingfiles(repo, m)
570 added, unknown, deleted, removed = _interestingfiles(repo, m)
571
571
572 unknownset = set(unknown)
572 unknownset = set(unknown)
573 toprint = unknownset.copy()
573 toprint = unknownset.copy()
574 toprint.update(deleted)
574 toprint.update(deleted)
575 for abs in sorted(toprint):
575 for abs in sorted(toprint):
576 if repo.ui.verbose or not m.exact(abs):
576 if repo.ui.verbose or not m.exact(abs):
577 rel = m.rel(abs)
577 rel = m.rel(abs)
578 if abs in unknownset:
578 if abs in unknownset:
579 status = _('adding %s\n') % ((pats and rel) or abs)
579 status = _('adding %s\n') % ((pats and rel) or abs)
580 else:
580 else:
581 status = _('removing %s\n') % ((pats and rel) or abs)
581 status = _('removing %s\n') % ((pats and rel) or abs)
582 repo.ui.status(status)
582 repo.ui.status(status)
583
583
584 renames = _findrenames(repo, m, added + unknown, removed + deleted,
584 renames = _findrenames(repo, m, added + unknown, removed + deleted,
585 similarity)
585 similarity)
586
586
587 if not dry_run:
587 if not dry_run:
588 _markchanges(repo, unknown, deleted, renames)
588 _markchanges(repo, unknown, deleted, renames)
589
589
590 for f in rejected:
590 for f in rejected:
591 if f in m.files():
591 if f in m.files():
592 return 1
592 return 1
593 return 0
593 return 0
594
594
595 def marktouched(repo, files, similarity=0.0):
595 def marktouched(repo, files, similarity=0.0):
596 '''Assert that files have somehow been operated upon. files are relative to
596 '''Assert that files have somehow been operated upon. files are relative to
597 the repo root.'''
597 the repo root.'''
598 m = matchfiles(repo, files)
598 m = matchfiles(repo, files)
599 rejected = []
599 rejected = []
600 m.bad = lambda x, y: rejected.append(x)
600 m.bad = lambda x, y: rejected.append(x)
601
601
602 added, unknown, deleted, removed = _interestingfiles(repo, m)
602 added, unknown, deleted, removed = _interestingfiles(repo, m)
603
603
604 if repo.ui.verbose:
604 if repo.ui.verbose:
605 unknownset = set(unknown)
605 unknownset = set(unknown)
606 toprint = unknownset.copy()
606 toprint = unknownset.copy()
607 toprint.update(deleted)
607 toprint.update(deleted)
608 for abs in sorted(toprint):
608 for abs in sorted(toprint):
609 if abs in unknownset:
609 if abs in unknownset:
610 status = _('adding %s\n') % abs
610 status = _('adding %s\n') % abs
611 else:
611 else:
612 status = _('removing %s\n') % abs
612 status = _('removing %s\n') % abs
613 repo.ui.status(status)
613 repo.ui.status(status)
614
614
615 renames = _findrenames(repo, m, added + unknown, removed + deleted,
615 renames = _findrenames(repo, m, added + unknown, removed + deleted,
616 similarity)
616 similarity)
617
617
618 _markchanges(repo, unknown, deleted, renames)
618 _markchanges(repo, unknown, deleted, renames)
619
619
620 for f in rejected:
620 for f in rejected:
621 if f in m.files():
621 if f in m.files():
622 return 1
622 return 1
623 return 0
623 return 0
624
624
625 def _interestingfiles(repo, matcher):
625 def _interestingfiles(repo, matcher):
626 '''Walk dirstate with matcher, looking for files that addremove would care
626 '''Walk dirstate with matcher, looking for files that addremove would care
627 about.
627 about.
628
628
629 This is different from dirstate.status because it doesn't care about
629 This is different from dirstate.status because it doesn't care about
630 whether files are modified or clean.'''
630 whether files are modified or clean.'''
631 added, unknown, deleted, removed = [], [], [], []
631 added, unknown, deleted, removed = [], [], [], []
632 audit_path = pathutil.pathauditor(repo.root)
632 audit_path = pathutil.pathauditor(repo.root)
633
633
634 ctx = repo[None]
634 ctx = repo[None]
635 dirstate = repo.dirstate
635 dirstate = repo.dirstate
636 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
636 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
637 full=False)
637 full=False)
638 for abs, st in walkresults.iteritems():
638 for abs, st in walkresults.iteritems():
639 dstate = dirstate[abs]
639 dstate = dirstate[abs]
640 if dstate == '?' and audit_path.check(abs):
640 if dstate == '?' and audit_path.check(abs):
641 unknown.append(abs)
641 unknown.append(abs)
642 elif dstate != 'r' and not st:
642 elif dstate != 'r' and not st:
643 deleted.append(abs)
643 deleted.append(abs)
644 # for finding renames
644 # for finding renames
645 elif dstate == 'r':
645 elif dstate == 'r':
646 removed.append(abs)
646 removed.append(abs)
647 elif dstate == 'a':
647 elif dstate == 'a':
648 added.append(abs)
648 added.append(abs)
649
649
650 return added, unknown, deleted, removed
650 return added, unknown, deleted, removed
651
651
652 def _findrenames(repo, matcher, added, removed, similarity):
652 def _findrenames(repo, matcher, added, removed, similarity):
653 '''Find renames from removed files to added ones.'''
653 '''Find renames from removed files to added ones.'''
654 renames = {}
654 renames = {}
655 if similarity > 0:
655 if similarity > 0:
656 for old, new, score in similar.findrenames(repo, added, removed,
656 for old, new, score in similar.findrenames(repo, added, removed,
657 similarity):
657 similarity):
658 if (repo.ui.verbose or not matcher.exact(old)
658 if (repo.ui.verbose or not matcher.exact(old)
659 or not matcher.exact(new)):
659 or not matcher.exact(new)):
660 repo.ui.status(_('recording removal of %s as rename to %s '
660 repo.ui.status(_('recording removal of %s as rename to %s '
661 '(%d%% similar)\n') %
661 '(%d%% similar)\n') %
662 (matcher.rel(old), matcher.rel(new),
662 (matcher.rel(old), matcher.rel(new),
663 score * 100))
663 score * 100))
664 renames[new] = old
664 renames[new] = old
665 return renames
665 return renames
666
666
667 def _markchanges(repo, unknown, deleted, renames):
667 def _markchanges(repo, unknown, deleted, renames):
668 '''Marks the files in unknown as added, the files in deleted as removed,
668 '''Marks the files in unknown as added, the files in deleted as removed,
669 and the files in renames as copied.'''
669 and the files in renames as copied.'''
670 wctx = repo[None]
670 wctx = repo[None]
671 wlock = repo.wlock()
671 wlock = repo.wlock()
672 try:
672 try:
673 wctx.forget(deleted)
673 wctx.forget(deleted)
674 wctx.add(unknown)
674 wctx.add(unknown)
675 for new, old in renames.iteritems():
675 for new, old in renames.iteritems():
676 wctx.copy(old, new)
676 wctx.copy(old, new)
677 finally:
677 finally:
678 wlock.release()
678 wlock.release()
679
679
680 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
680 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
681 """Update the dirstate to reflect the intent of copying src to dst. For
681 """Update the dirstate to reflect the intent of copying src to dst. For
682 different reasons it might not end with dst being marked as copied from src.
682 different reasons it might not end with dst being marked as copied from src.
683 """
683 """
684 origsrc = repo.dirstate.copied(src) or src
684 origsrc = repo.dirstate.copied(src) or src
685 if dst == origsrc: # copying back a copy?
685 if dst == origsrc: # copying back a copy?
686 if repo.dirstate[dst] not in 'mn' and not dryrun:
686 if repo.dirstate[dst] not in 'mn' and not dryrun:
687 repo.dirstate.normallookup(dst)
687 repo.dirstate.normallookup(dst)
688 else:
688 else:
689 if repo.dirstate[origsrc] == 'a' and origsrc == src:
689 if repo.dirstate[origsrc] == 'a' and origsrc == src:
690 if not ui.quiet:
690 if not ui.quiet:
691 ui.warn(_("%s has not been committed yet, so no copy "
691 ui.warn(_("%s has not been committed yet, so no copy "
692 "data will be stored for %s.\n")
692 "data will be stored for %s.\n")
693 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
693 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
694 if repo.dirstate[dst] in '?r' and not dryrun:
694 if repo.dirstate[dst] in '?r' and not dryrun:
695 wctx.add([dst])
695 wctx.add([dst])
696 elif not dryrun:
696 elif not dryrun:
697 wctx.copy(origsrc, dst)
697 wctx.copy(origsrc, dst)
698
698
699 def readrequires(opener, supported):
699 def readrequires(opener, supported):
700 '''Reads and parses .hg/requires and checks if all entries found
700 '''Reads and parses .hg/requires and checks if all entries found
701 are in the list of supported features.'''
701 are in the list of supported features.'''
702 requirements = set(opener.read("requires").splitlines())
702 requirements = set(opener.read("requires").splitlines())
703 missings = []
703 missings = []
704 for r in requirements:
704 for r in requirements:
705 if r not in supported:
705 if r not in supported:
706 if not r or not r[0].isalnum():
706 if not r or not r[0].isalnum():
707 raise error.RequirementError(_(".hg/requires file is corrupt"))
707 raise error.RequirementError(_(".hg/requires file is corrupt"))
708 missings.append(r)
708 missings.append(r)
709 missings.sort()
709 missings.sort()
710 if missings:
710 if missings:
711 raise error.RequirementError(
711 raise error.RequirementError(
712 _("unknown repository format: requires features '%s' (upgrade "
712 _("unknown repository format: requires features '%s' (upgrade "
713 "Mercurial)") % "', '".join(missings))
713 "Mercurial)") % "', '".join(missings))
714 return requirements
714 return requirements
715
715
716 class filecachesubentry(object):
716 class filecachesubentry(object):
717 def __init__(self, path, stat):
717 def __init__(self, path, stat):
718 self.path = path
718 self.path = path
719 self.cachestat = None
719 self.cachestat = None
720 self._cacheable = None
720 self._cacheable = None
721
721
722 if stat:
722 if stat:
723 self.cachestat = filecachesubentry.stat(self.path)
723 self.cachestat = filecachesubentry.stat(self.path)
724
724
725 if self.cachestat:
725 if self.cachestat:
726 self._cacheable = self.cachestat.cacheable()
726 self._cacheable = self.cachestat.cacheable()
727 else:
727 else:
728 # None means we don't know yet
728 # None means we don't know yet
729 self._cacheable = None
729 self._cacheable = None
730
730
731 def refresh(self):
731 def refresh(self):
732 if self.cacheable():
732 if self.cacheable():
733 self.cachestat = filecachesubentry.stat(self.path)
733 self.cachestat = filecachesubentry.stat(self.path)
734
734
735 def cacheable(self):
735 def cacheable(self):
736 if self._cacheable is not None:
736 if self._cacheable is not None:
737 return self._cacheable
737 return self._cacheable
738
738
739 # we don't know yet, assume it is for now
739 # we don't know yet, assume it is for now
740 return True
740 return True
741
741
742 def changed(self):
742 def changed(self):
743 # no point in going further if we can't cache it
743 # no point in going further if we can't cache it
744 if not self.cacheable():
744 if not self.cacheable():
745 return True
745 return True
746
746
747 newstat = filecachesubentry.stat(self.path)
747 newstat = filecachesubentry.stat(self.path)
748
748
749 # we may not know if it's cacheable yet, check again now
749 # we may not know if it's cacheable yet, check again now
750 if newstat and self._cacheable is None:
750 if newstat and self._cacheable is None:
751 self._cacheable = newstat.cacheable()
751 self._cacheable = newstat.cacheable()
752
752
753 # check again
753 # check again
754 if not self._cacheable:
754 if not self._cacheable:
755 return True
755 return True
756
756
757 if self.cachestat != newstat:
757 if self.cachestat != newstat:
758 self.cachestat = newstat
758 self.cachestat = newstat
759 return True
759 return True
760 else:
760 else:
761 return False
761 return False
762
762
763 @staticmethod
763 @staticmethod
764 def stat(path):
764 def stat(path):
765 try:
765 try:
766 return util.cachestat(path)
766 return util.cachestat(path)
767 except OSError, e:
767 except OSError, e:
768 if e.errno != errno.ENOENT:
768 if e.errno != errno.ENOENT:
769 raise
769 raise
770
770
771 class filecacheentry(object):
772 def __init__(self, paths, stat=True):
773 self._entries = []
774 for path in paths:
775 self._entries.append(filecachesubentry(path, stat))
776
777 def changed(self):
778 '''true if any entry has changed'''
779 for entry in self._entries:
780 if entry.changed():
781 return True
782 return False
783
784 def refresh(self):
785 for entry in self._entries:
786 entry.refresh()
787
771 class filecache(object):
788 class filecache(object):
772 '''A property like decorator that tracks a file under .hg/ for updates.
789 '''A property like decorator that tracks a file under .hg/ for updates.
773
790
774 Records stat info when called in _filecache.
791 Records stat info when called in _filecache.
775
792
776 On subsequent calls, compares old stat info with new info, and recreates
793 On subsequent calls, compares old stat info with new info, and recreates
777 the object when needed, updating the new stat info in _filecache.
794 the object when needed, updating the new stat info in _filecache.
778
795
779 Mercurial either atomic renames or appends for files under .hg,
796 Mercurial either atomic renames or appends for files under .hg,
780 so to ensure the cache is reliable we need the filesystem to be able
797 so to ensure the cache is reliable we need the filesystem to be able
781 to tell us if a file has been replaced. If it can't, we fallback to
798 to tell us if a file has been replaced. If it can't, we fallback to
782 recreating the object on every call (essentially the same behaviour as
799 recreating the object on every call (essentially the same behaviour as
783 propertycache).'''
800 propertycache).'''
784 def __init__(self, path):
801 def __init__(self, path):
785 self.path = path
802 self.path = path
786
803
787 def join(self, obj, fname):
804 def join(self, obj, fname):
788 """Used to compute the runtime path of the cached file.
805 """Used to compute the runtime path of the cached file.
789
806
790 Users should subclass filecache and provide their own version of this
807 Users should subclass filecache and provide their own version of this
791 function to call the appropriate join function on 'obj' (an instance
808 function to call the appropriate join function on 'obj' (an instance
792 of the class that its member function was decorated).
809 of the class that its member function was decorated).
793 """
810 """
794 return obj.join(fname)
811 return obj.join(fname)
795
812
796 def __call__(self, func):
813 def __call__(self, func):
797 self.func = func
814 self.func = func
798 self.name = func.__name__
815 self.name = func.__name__
799 return self
816 return self
800
817
801 def __get__(self, obj, type=None):
818 def __get__(self, obj, type=None):
802 # do we need to check if the file changed?
819 # do we need to check if the file changed?
803 if self.name in obj.__dict__:
820 if self.name in obj.__dict__:
804 assert self.name in obj._filecache, self.name
821 assert self.name in obj._filecache, self.name
805 return obj.__dict__[self.name]
822 return obj.__dict__[self.name]
806
823
807 entry = obj._filecache.get(self.name)
824 entry = obj._filecache.get(self.name)
808
825
809 if entry:
826 if entry:
810 if entry.changed():
827 if entry.changed():
811 entry.obj = self.func(obj)
828 entry.obj = self.func(obj)
812 else:
829 else:
813 path = self.join(obj, self.path)
830 path = self.join(obj, self.path)
814
831
815 # We stat -before- creating the object so our cache doesn't lie if
832 # We stat -before- creating the object so our cache doesn't lie if
816 # a writer modified between the time we read and stat
833 # a writer modified between the time we read and stat
817 entry = filecachesubentry(path, True)
834 entry = filecachesubentry(path, True)
818 entry.obj = self.func(obj)
835 entry.obj = self.func(obj)
819
836
820 obj._filecache[self.name] = entry
837 obj._filecache[self.name] = entry
821
838
822 obj.__dict__[self.name] = entry.obj
839 obj.__dict__[self.name] = entry.obj
823 return entry.obj
840 return entry.obj
824
841
825 def __set__(self, obj, value):
842 def __set__(self, obj, value):
826 if self.name not in obj._filecache:
843 if self.name not in obj._filecache:
827 # we add an entry for the missing value because X in __dict__
844 # we add an entry for the missing value because X in __dict__
828 # implies X in _filecache
845 # implies X in _filecache
829 ce = filecachesubentry(self.join(obj, self.path), False)
846 ce = filecachesubentry(self.join(obj, self.path), False)
830 obj._filecache[self.name] = ce
847 obj._filecache[self.name] = ce
831 else:
848 else:
832 ce = obj._filecache[self.name]
849 ce = obj._filecache[self.name]
833
850
834 ce.obj = value # update cached copy
851 ce.obj = value # update cached copy
835 obj.__dict__[self.name] = value # update copy returned by obj.x
852 obj.__dict__[self.name] = value # update copy returned by obj.x
836
853
837 def __delete__(self, obj):
854 def __delete__(self, obj):
838 try:
855 try:
839 del obj.__dict__[self.name]
856 del obj.__dict__[self.name]
840 except KeyError:
857 except KeyError:
841 raise AttributeError(self.name)
858 raise AttributeError(self.name)
842
859
843 class dirs(object):
860 class dirs(object):
844 '''a multiset of directory names from a dirstate or manifest'''
861 '''a multiset of directory names from a dirstate or manifest'''
845
862
846 def __init__(self, map, skip=None):
863 def __init__(self, map, skip=None):
847 self._dirs = {}
864 self._dirs = {}
848 addpath = self.addpath
865 addpath = self.addpath
849 if util.safehasattr(map, 'iteritems') and skip is not None:
866 if util.safehasattr(map, 'iteritems') and skip is not None:
850 for f, s in map.iteritems():
867 for f, s in map.iteritems():
851 if s[0] != skip:
868 if s[0] != skip:
852 addpath(f)
869 addpath(f)
853 else:
870 else:
854 for f in map:
871 for f in map:
855 addpath(f)
872 addpath(f)
856
873
857 def addpath(self, path):
874 def addpath(self, path):
858 dirs = self._dirs
875 dirs = self._dirs
859 for base in finddirs(path):
876 for base in finddirs(path):
860 if base in dirs:
877 if base in dirs:
861 dirs[base] += 1
878 dirs[base] += 1
862 return
879 return
863 dirs[base] = 1
880 dirs[base] = 1
864
881
865 def delpath(self, path):
882 def delpath(self, path):
866 dirs = self._dirs
883 dirs = self._dirs
867 for base in finddirs(path):
884 for base in finddirs(path):
868 if dirs[base] > 1:
885 if dirs[base] > 1:
869 dirs[base] -= 1
886 dirs[base] -= 1
870 return
887 return
871 del dirs[base]
888 del dirs[base]
872
889
873 def __iter__(self):
890 def __iter__(self):
874 return self._dirs.iterkeys()
891 return self._dirs.iterkeys()
875
892
876 def __contains__(self, d):
893 def __contains__(self, d):
877 return d in self._dirs
894 return d in self._dirs
878
895
879 if util.safehasattr(parsers, 'dirs'):
896 if util.safehasattr(parsers, 'dirs'):
880 dirs = parsers.dirs
897 dirs = parsers.dirs
881
898
882 def finddirs(path):
899 def finddirs(path):
883 pos = path.rfind('/')
900 pos = path.rfind('/')
884 while pos != -1:
901 while pos != -1:
885 yield path[:pos]
902 yield path[:pos]
886 pos = path.rfind('/', 0, pos)
903 pos = path.rfind('/', 0, pos)
General Comments 0
You need to be logged in to leave comments. Login now