##// END OF EJS Templates
vfs: add "chmod()"
FUJIWARA Katsunori -
r20086:f3df2612 default
parent child Browse files
Show More
@@ -1,910 +1,913 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from mercurial.node import nullrev
9 from mercurial.node import nullrev
10 import util, error, osutil, revset, similar, encoding, phases, parsers
10 import util, error, osutil, revset, similar, encoding, phases, parsers
11 import pathutil
11 import pathutil
12 import match as matchmod
12 import match as matchmod
13 import os, errno, re, glob
13 import os, errno, re, glob
14
14
15 if os.name == 'nt':
15 if os.name == 'nt':
16 import scmwindows as scmplatform
16 import scmwindows as scmplatform
17 else:
17 else:
18 import scmposix as scmplatform
18 import scmposix as scmplatform
19
19
20 systemrcpath = scmplatform.systemrcpath
20 systemrcpath = scmplatform.systemrcpath
21 userrcpath = scmplatform.userrcpath
21 userrcpath = scmplatform.userrcpath
22
22
23 def nochangesfound(ui, repo, excluded=None):
23 def nochangesfound(ui, repo, excluded=None):
24 '''Report no changes for push/pull, excluded is None or a list of
24 '''Report no changes for push/pull, excluded is None or a list of
25 nodes excluded from the push/pull.
25 nodes excluded from the push/pull.
26 '''
26 '''
27 secretlist = []
27 secretlist = []
28 if excluded:
28 if excluded:
29 for n in excluded:
29 for n in excluded:
30 if n not in repo:
30 if n not in repo:
31 # discovery should not have included the filtered revision,
31 # discovery should not have included the filtered revision,
32 # we have to explicitly exclude it until discovery is cleanup.
32 # we have to explicitly exclude it until discovery is cleanup.
33 continue
33 continue
34 ctx = repo[n]
34 ctx = repo[n]
35 if ctx.phase() >= phases.secret and not ctx.extinct():
35 if ctx.phase() >= phases.secret and not ctx.extinct():
36 secretlist.append(n)
36 secretlist.append(n)
37
37
38 if secretlist:
38 if secretlist:
39 ui.status(_("no changes found (ignored %d secret changesets)\n")
39 ui.status(_("no changes found (ignored %d secret changesets)\n")
40 % len(secretlist))
40 % len(secretlist))
41 else:
41 else:
42 ui.status(_("no changes found\n"))
42 ui.status(_("no changes found\n"))
43
43
44 def checknewlabel(repo, lbl, kind):
44 def checknewlabel(repo, lbl, kind):
45 # Do not use the "kind" parameter in ui output.
45 # Do not use the "kind" parameter in ui output.
46 # It makes strings difficult to translate.
46 # It makes strings difficult to translate.
47 if lbl in ['tip', '.', 'null']:
47 if lbl in ['tip', '.', 'null']:
48 raise util.Abort(_("the name '%s' is reserved") % lbl)
48 raise util.Abort(_("the name '%s' is reserved") % lbl)
49 for c in (':', '\0', '\n', '\r'):
49 for c in (':', '\0', '\n', '\r'):
50 if c in lbl:
50 if c in lbl:
51 raise util.Abort(_("%r cannot be used in a name") % c)
51 raise util.Abort(_("%r cannot be used in a name") % c)
52 try:
52 try:
53 int(lbl)
53 int(lbl)
54 raise util.Abort(_("cannot use an integer as a name"))
54 raise util.Abort(_("cannot use an integer as a name"))
55 except ValueError:
55 except ValueError:
56 pass
56 pass
57
57
58 def checkfilename(f):
58 def checkfilename(f):
59 '''Check that the filename f is an acceptable filename for a tracked file'''
59 '''Check that the filename f is an acceptable filename for a tracked file'''
60 if '\r' in f or '\n' in f:
60 if '\r' in f or '\n' in f:
61 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
61 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
62
62
63 def checkportable(ui, f):
63 def checkportable(ui, f):
64 '''Check if filename f is portable and warn or abort depending on config'''
64 '''Check if filename f is portable and warn or abort depending on config'''
65 checkfilename(f)
65 checkfilename(f)
66 abort, warn = checkportabilityalert(ui)
66 abort, warn = checkportabilityalert(ui)
67 if abort or warn:
67 if abort or warn:
68 msg = util.checkwinfilename(f)
68 msg = util.checkwinfilename(f)
69 if msg:
69 if msg:
70 msg = "%s: %r" % (msg, f)
70 msg = "%s: %r" % (msg, f)
71 if abort:
71 if abort:
72 raise util.Abort(msg)
72 raise util.Abort(msg)
73 ui.warn(_("warning: %s\n") % msg)
73 ui.warn(_("warning: %s\n") % msg)
74
74
75 def checkportabilityalert(ui):
75 def checkportabilityalert(ui):
76 '''check if the user's config requests nothing, a warning, or abort for
76 '''check if the user's config requests nothing, a warning, or abort for
77 non-portable filenames'''
77 non-portable filenames'''
78 val = ui.config('ui', 'portablefilenames', 'warn')
78 val = ui.config('ui', 'portablefilenames', 'warn')
79 lval = val.lower()
79 lval = val.lower()
80 bval = util.parsebool(val)
80 bval = util.parsebool(val)
81 abort = os.name == 'nt' or lval == 'abort'
81 abort = os.name == 'nt' or lval == 'abort'
82 warn = bval or lval == 'warn'
82 warn = bval or lval == 'warn'
83 if bval is None and not (warn or abort or lval == 'ignore'):
83 if bval is None and not (warn or abort or lval == 'ignore'):
84 raise error.ConfigError(
84 raise error.ConfigError(
85 _("ui.portablefilenames value is invalid ('%s')") % val)
85 _("ui.portablefilenames value is invalid ('%s')") % val)
86 return abort, warn
86 return abort, warn
87
87
88 class casecollisionauditor(object):
88 class casecollisionauditor(object):
89 def __init__(self, ui, abort, dirstate):
89 def __init__(self, ui, abort, dirstate):
90 self._ui = ui
90 self._ui = ui
91 self._abort = abort
91 self._abort = abort
92 allfiles = '\0'.join(dirstate._map)
92 allfiles = '\0'.join(dirstate._map)
93 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
93 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
94 self._dirstate = dirstate
94 self._dirstate = dirstate
95 # The purpose of _newfiles is so that we don't complain about
95 # The purpose of _newfiles is so that we don't complain about
96 # case collisions if someone were to call this object with the
96 # case collisions if someone were to call this object with the
97 # same filename twice.
97 # same filename twice.
98 self._newfiles = set()
98 self._newfiles = set()
99
99
100 def __call__(self, f):
100 def __call__(self, f):
101 if f in self._newfiles:
101 if f in self._newfiles:
102 return
102 return
103 fl = encoding.lower(f)
103 fl = encoding.lower(f)
104 if fl in self._loweredfiles and f not in self._dirstate:
104 if fl in self._loweredfiles and f not in self._dirstate:
105 msg = _('possible case-folding collision for %s') % f
105 msg = _('possible case-folding collision for %s') % f
106 if self._abort:
106 if self._abort:
107 raise util.Abort(msg)
107 raise util.Abort(msg)
108 self._ui.warn(_("warning: %s\n") % msg)
108 self._ui.warn(_("warning: %s\n") % msg)
109 self._loweredfiles.add(fl)
109 self._loweredfiles.add(fl)
110 self._newfiles.add(f)
110 self._newfiles.add(f)
111
111
112 class abstractvfs(object):
112 class abstractvfs(object):
113 """Abstract base class; cannot be instantiated"""
113 """Abstract base class; cannot be instantiated"""
114
114
115 def __init__(self, *args, **kwargs):
115 def __init__(self, *args, **kwargs):
116 '''Prevent instantiation; don't call this from subclasses.'''
116 '''Prevent instantiation; don't call this from subclasses.'''
117 raise NotImplementedError('attempted instantiating ' + str(type(self)))
117 raise NotImplementedError('attempted instantiating ' + str(type(self)))
118
118
119 def tryread(self, path):
119 def tryread(self, path):
120 '''gracefully return an empty string for missing files'''
120 '''gracefully return an empty string for missing files'''
121 try:
121 try:
122 return self.read(path)
122 return self.read(path)
123 except IOError, inst:
123 except IOError, inst:
124 if inst.errno != errno.ENOENT:
124 if inst.errno != errno.ENOENT:
125 raise
125 raise
126 return ""
126 return ""
127
127
128 def open(self, path, mode="r", text=False, atomictemp=False):
128 def open(self, path, mode="r", text=False, atomictemp=False):
129 self.open = self.__call__
129 self.open = self.__call__
130 return self.__call__(path, mode, text, atomictemp)
130 return self.__call__(path, mode, text, atomictemp)
131
131
132 def read(self, path):
132 def read(self, path):
133 fp = self(path, 'rb')
133 fp = self(path, 'rb')
134 try:
134 try:
135 return fp.read()
135 return fp.read()
136 finally:
136 finally:
137 fp.close()
137 fp.close()
138
138
139 def write(self, path, data):
139 def write(self, path, data):
140 fp = self(path, 'wb')
140 fp = self(path, 'wb')
141 try:
141 try:
142 return fp.write(data)
142 return fp.write(data)
143 finally:
143 finally:
144 fp.close()
144 fp.close()
145
145
146 def append(self, path, data):
146 def append(self, path, data):
147 fp = self(path, 'ab')
147 fp = self(path, 'ab')
148 try:
148 try:
149 return fp.write(data)
149 return fp.write(data)
150 finally:
150 finally:
151 fp.close()
151 fp.close()
152
152
153 def chmod(self, path, mode):
154 return os.chmod(self.join(path), mode)
155
153 def exists(self, path=None):
156 def exists(self, path=None):
154 return os.path.exists(self.join(path))
157 return os.path.exists(self.join(path))
155
158
156 def fstat(self, fp):
159 def fstat(self, fp):
157 return util.fstat(fp)
160 return util.fstat(fp)
158
161
159 def isdir(self, path=None):
162 def isdir(self, path=None):
160 return os.path.isdir(self.join(path))
163 return os.path.isdir(self.join(path))
161
164
162 def isfile(self, path=None):
165 def isfile(self, path=None):
163 return os.path.isfile(self.join(path))
166 return os.path.isfile(self.join(path))
164
167
165 def islink(self, path=None):
168 def islink(self, path=None):
166 return os.path.islink(self.join(path))
169 return os.path.islink(self.join(path))
167
170
168 def lstat(self, path=None):
171 def lstat(self, path=None):
169 return os.lstat(self.join(path))
172 return os.lstat(self.join(path))
170
173
171 def makedir(self, path=None, notindexed=True):
174 def makedir(self, path=None, notindexed=True):
172 return util.makedir(self.join(path), notindexed)
175 return util.makedir(self.join(path), notindexed)
173
176
174 def makedirs(self, path=None, mode=None):
177 def makedirs(self, path=None, mode=None):
175 return util.makedirs(self.join(path), mode)
178 return util.makedirs(self.join(path), mode)
176
179
177 def mkdir(self, path=None):
180 def mkdir(self, path=None):
178 return os.mkdir(self.join(path))
181 return os.mkdir(self.join(path))
179
182
180 def readdir(self, path=None, stat=None, skip=None):
183 def readdir(self, path=None, stat=None, skip=None):
181 return osutil.listdir(self.join(path), stat, skip)
184 return osutil.listdir(self.join(path), stat, skip)
182
185
183 def rename(self, src, dst):
186 def rename(self, src, dst):
184 return util.rename(self.join(src), self.join(dst))
187 return util.rename(self.join(src), self.join(dst))
185
188
186 def readlink(self, path):
189 def readlink(self, path):
187 return os.readlink(self.join(path))
190 return os.readlink(self.join(path))
188
191
189 def setflags(self, path, l, x):
192 def setflags(self, path, l, x):
190 return util.setflags(self.join(path), l, x)
193 return util.setflags(self.join(path), l, x)
191
194
192 def stat(self, path=None):
195 def stat(self, path=None):
193 return os.stat(self.join(path))
196 return os.stat(self.join(path))
194
197
195 def unlink(self, path=None):
198 def unlink(self, path=None):
196 return util.unlink(self.join(path))
199 return util.unlink(self.join(path))
197
200
198 def utime(self, path=None, t=None):
201 def utime(self, path=None, t=None):
199 return os.utime(self.join(path), t)
202 return os.utime(self.join(path), t)
200
203
201 class vfs(abstractvfs):
204 class vfs(abstractvfs):
202 '''Operate files relative to a base directory
205 '''Operate files relative to a base directory
203
206
204 This class is used to hide the details of COW semantics and
207 This class is used to hide the details of COW semantics and
205 remote file access from higher level code.
208 remote file access from higher level code.
206 '''
209 '''
207 def __init__(self, base, audit=True, expandpath=False, realpath=False):
210 def __init__(self, base, audit=True, expandpath=False, realpath=False):
208 if expandpath:
211 if expandpath:
209 base = util.expandpath(base)
212 base = util.expandpath(base)
210 if realpath:
213 if realpath:
211 base = os.path.realpath(base)
214 base = os.path.realpath(base)
212 self.base = base
215 self.base = base
213 self._setmustaudit(audit)
216 self._setmustaudit(audit)
214 self.createmode = None
217 self.createmode = None
215 self._trustnlink = None
218 self._trustnlink = None
216
219
217 def _getmustaudit(self):
220 def _getmustaudit(self):
218 return self._audit
221 return self._audit
219
222
220 def _setmustaudit(self, onoff):
223 def _setmustaudit(self, onoff):
221 self._audit = onoff
224 self._audit = onoff
222 if onoff:
225 if onoff:
223 self.audit = pathutil.pathauditor(self.base)
226 self.audit = pathutil.pathauditor(self.base)
224 else:
227 else:
225 self.audit = util.always
228 self.audit = util.always
226
229
227 mustaudit = property(_getmustaudit, _setmustaudit)
230 mustaudit = property(_getmustaudit, _setmustaudit)
228
231
229 @util.propertycache
232 @util.propertycache
230 def _cansymlink(self):
233 def _cansymlink(self):
231 return util.checklink(self.base)
234 return util.checklink(self.base)
232
235
233 @util.propertycache
236 @util.propertycache
234 def _chmod(self):
237 def _chmod(self):
235 return util.checkexec(self.base)
238 return util.checkexec(self.base)
236
239
237 def _fixfilemode(self, name):
240 def _fixfilemode(self, name):
238 if self.createmode is None or not self._chmod:
241 if self.createmode is None or not self._chmod:
239 return
242 return
240 os.chmod(name, self.createmode & 0666)
243 os.chmod(name, self.createmode & 0666)
241
244
242 def __call__(self, path, mode="r", text=False, atomictemp=False):
245 def __call__(self, path, mode="r", text=False, atomictemp=False):
243 if self._audit:
246 if self._audit:
244 r = util.checkosfilename(path)
247 r = util.checkosfilename(path)
245 if r:
248 if r:
246 raise util.Abort("%s: %r" % (r, path))
249 raise util.Abort("%s: %r" % (r, path))
247 self.audit(path)
250 self.audit(path)
248 f = self.join(path)
251 f = self.join(path)
249
252
250 if not text and "b" not in mode:
253 if not text and "b" not in mode:
251 mode += "b" # for that other OS
254 mode += "b" # for that other OS
252
255
253 nlink = -1
256 nlink = -1
254 if mode not in ('r', 'rb'):
257 if mode not in ('r', 'rb'):
255 dirname, basename = util.split(f)
258 dirname, basename = util.split(f)
256 # If basename is empty, then the path is malformed because it points
259 # If basename is empty, then the path is malformed because it points
257 # to a directory. Let the posixfile() call below raise IOError.
260 # to a directory. Let the posixfile() call below raise IOError.
258 if basename:
261 if basename:
259 if atomictemp:
262 if atomictemp:
260 util.ensuredirs(dirname, self.createmode)
263 util.ensuredirs(dirname, self.createmode)
261 return util.atomictempfile(f, mode, self.createmode)
264 return util.atomictempfile(f, mode, self.createmode)
262 try:
265 try:
263 if 'w' in mode:
266 if 'w' in mode:
264 util.unlink(f)
267 util.unlink(f)
265 nlink = 0
268 nlink = 0
266 else:
269 else:
267 # nlinks() may behave differently for files on Windows
270 # nlinks() may behave differently for files on Windows
268 # shares if the file is open.
271 # shares if the file is open.
269 fd = util.posixfile(f)
272 fd = util.posixfile(f)
270 nlink = util.nlinks(f)
273 nlink = util.nlinks(f)
271 if nlink < 1:
274 if nlink < 1:
272 nlink = 2 # force mktempcopy (issue1922)
275 nlink = 2 # force mktempcopy (issue1922)
273 fd.close()
276 fd.close()
274 except (OSError, IOError), e:
277 except (OSError, IOError), e:
275 if e.errno != errno.ENOENT:
278 if e.errno != errno.ENOENT:
276 raise
279 raise
277 nlink = 0
280 nlink = 0
278 util.ensuredirs(dirname, self.createmode)
281 util.ensuredirs(dirname, self.createmode)
279 if nlink > 0:
282 if nlink > 0:
280 if self._trustnlink is None:
283 if self._trustnlink is None:
281 self._trustnlink = nlink > 1 or util.checknlink(f)
284 self._trustnlink = nlink > 1 or util.checknlink(f)
282 if nlink > 1 or not self._trustnlink:
285 if nlink > 1 or not self._trustnlink:
283 util.rename(util.mktempcopy(f), f)
286 util.rename(util.mktempcopy(f), f)
284 fp = util.posixfile(f, mode)
287 fp = util.posixfile(f, mode)
285 if nlink == 0:
288 if nlink == 0:
286 self._fixfilemode(f)
289 self._fixfilemode(f)
287 return fp
290 return fp
288
291
289 def symlink(self, src, dst):
292 def symlink(self, src, dst):
290 self.audit(dst)
293 self.audit(dst)
291 linkname = self.join(dst)
294 linkname = self.join(dst)
292 try:
295 try:
293 os.unlink(linkname)
296 os.unlink(linkname)
294 except OSError:
297 except OSError:
295 pass
298 pass
296
299
297 util.ensuredirs(os.path.dirname(linkname), self.createmode)
300 util.ensuredirs(os.path.dirname(linkname), self.createmode)
298
301
299 if self._cansymlink:
302 if self._cansymlink:
300 try:
303 try:
301 os.symlink(src, linkname)
304 os.symlink(src, linkname)
302 except OSError, err:
305 except OSError, err:
303 raise OSError(err.errno, _('could not symlink to %r: %s') %
306 raise OSError(err.errno, _('could not symlink to %r: %s') %
304 (src, err.strerror), linkname)
307 (src, err.strerror), linkname)
305 else:
308 else:
306 self.write(dst, src)
309 self.write(dst, src)
307
310
308 def join(self, path):
311 def join(self, path):
309 if path:
312 if path:
310 return os.path.join(self.base, path)
313 return os.path.join(self.base, path)
311 else:
314 else:
312 return self.base
315 return self.base
313
316
314 opener = vfs
317 opener = vfs
315
318
316 class auditvfs(object):
319 class auditvfs(object):
317 def __init__(self, vfs):
320 def __init__(self, vfs):
318 self.vfs = vfs
321 self.vfs = vfs
319
322
320 def _getmustaudit(self):
323 def _getmustaudit(self):
321 return self.vfs.mustaudit
324 return self.vfs.mustaudit
322
325
323 def _setmustaudit(self, onoff):
326 def _setmustaudit(self, onoff):
324 self.vfs.mustaudit = onoff
327 self.vfs.mustaudit = onoff
325
328
326 mustaudit = property(_getmustaudit, _setmustaudit)
329 mustaudit = property(_getmustaudit, _setmustaudit)
327
330
328 class filtervfs(abstractvfs, auditvfs):
331 class filtervfs(abstractvfs, auditvfs):
329 '''Wrapper vfs for filtering filenames with a function.'''
332 '''Wrapper vfs for filtering filenames with a function.'''
330
333
331 def __init__(self, vfs, filter):
334 def __init__(self, vfs, filter):
332 auditvfs.__init__(self, vfs)
335 auditvfs.__init__(self, vfs)
333 self._filter = filter
336 self._filter = filter
334
337
335 def __call__(self, path, *args, **kwargs):
338 def __call__(self, path, *args, **kwargs):
336 return self.vfs(self._filter(path), *args, **kwargs)
339 return self.vfs(self._filter(path), *args, **kwargs)
337
340
338 def join(self, path):
341 def join(self, path):
339 if path:
342 if path:
340 return self.vfs.join(self._filter(path))
343 return self.vfs.join(self._filter(path))
341 else:
344 else:
342 return self.vfs.join(path)
345 return self.vfs.join(path)
343
346
344 filteropener = filtervfs
347 filteropener = filtervfs
345
348
346 class readonlyvfs(abstractvfs, auditvfs):
349 class readonlyvfs(abstractvfs, auditvfs):
347 '''Wrapper vfs preventing any writing.'''
350 '''Wrapper vfs preventing any writing.'''
348
351
349 def __init__(self, vfs):
352 def __init__(self, vfs):
350 auditvfs.__init__(self, vfs)
353 auditvfs.__init__(self, vfs)
351
354
352 def __call__(self, path, mode='r', *args, **kw):
355 def __call__(self, path, mode='r', *args, **kw):
353 if mode not in ('r', 'rb'):
356 if mode not in ('r', 'rb'):
354 raise util.Abort('this vfs is read only')
357 raise util.Abort('this vfs is read only')
355 return self.vfs(path, mode, *args, **kw)
358 return self.vfs(path, mode, *args, **kw)
356
359
357
360
358 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
361 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
359 '''yield every hg repository under path, always recursively.
362 '''yield every hg repository under path, always recursively.
360 The recurse flag will only control recursion into repo working dirs'''
363 The recurse flag will only control recursion into repo working dirs'''
361 def errhandler(err):
364 def errhandler(err):
362 if err.filename == path:
365 if err.filename == path:
363 raise err
366 raise err
364 samestat = getattr(os.path, 'samestat', None)
367 samestat = getattr(os.path, 'samestat', None)
365 if followsym and samestat is not None:
368 if followsym and samestat is not None:
366 def adddir(dirlst, dirname):
369 def adddir(dirlst, dirname):
367 match = False
370 match = False
368 dirstat = os.stat(dirname)
371 dirstat = os.stat(dirname)
369 for lstdirstat in dirlst:
372 for lstdirstat in dirlst:
370 if samestat(dirstat, lstdirstat):
373 if samestat(dirstat, lstdirstat):
371 match = True
374 match = True
372 break
375 break
373 if not match:
376 if not match:
374 dirlst.append(dirstat)
377 dirlst.append(dirstat)
375 return not match
378 return not match
376 else:
379 else:
377 followsym = False
380 followsym = False
378
381
379 if (seen_dirs is None) and followsym:
382 if (seen_dirs is None) and followsym:
380 seen_dirs = []
383 seen_dirs = []
381 adddir(seen_dirs, path)
384 adddir(seen_dirs, path)
382 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
385 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
383 dirs.sort()
386 dirs.sort()
384 if '.hg' in dirs:
387 if '.hg' in dirs:
385 yield root # found a repository
388 yield root # found a repository
386 qroot = os.path.join(root, '.hg', 'patches')
389 qroot = os.path.join(root, '.hg', 'patches')
387 if os.path.isdir(os.path.join(qroot, '.hg')):
390 if os.path.isdir(os.path.join(qroot, '.hg')):
388 yield qroot # we have a patch queue repo here
391 yield qroot # we have a patch queue repo here
389 if recurse:
392 if recurse:
390 # avoid recursing inside the .hg directory
393 # avoid recursing inside the .hg directory
391 dirs.remove('.hg')
394 dirs.remove('.hg')
392 else:
395 else:
393 dirs[:] = [] # don't descend further
396 dirs[:] = [] # don't descend further
394 elif followsym:
397 elif followsym:
395 newdirs = []
398 newdirs = []
396 for d in dirs:
399 for d in dirs:
397 fname = os.path.join(root, d)
400 fname = os.path.join(root, d)
398 if adddir(seen_dirs, fname):
401 if adddir(seen_dirs, fname):
399 if os.path.islink(fname):
402 if os.path.islink(fname):
400 for hgname in walkrepos(fname, True, seen_dirs):
403 for hgname in walkrepos(fname, True, seen_dirs):
401 yield hgname
404 yield hgname
402 else:
405 else:
403 newdirs.append(d)
406 newdirs.append(d)
404 dirs[:] = newdirs
407 dirs[:] = newdirs
405
408
406 def osrcpath():
409 def osrcpath():
407 '''return default os-specific hgrc search path'''
410 '''return default os-specific hgrc search path'''
408 path = systemrcpath()
411 path = systemrcpath()
409 path.extend(userrcpath())
412 path.extend(userrcpath())
410 path = [os.path.normpath(f) for f in path]
413 path = [os.path.normpath(f) for f in path]
411 return path
414 return path
412
415
413 _rcpath = None
416 _rcpath = None
414
417
415 def rcpath():
418 def rcpath():
416 '''return hgrc search path. if env var HGRCPATH is set, use it.
419 '''return hgrc search path. if env var HGRCPATH is set, use it.
417 for each item in path, if directory, use files ending in .rc,
420 for each item in path, if directory, use files ending in .rc,
418 else use item.
421 else use item.
419 make HGRCPATH empty to only look in .hg/hgrc of current repo.
422 make HGRCPATH empty to only look in .hg/hgrc of current repo.
420 if no HGRCPATH, use default os-specific path.'''
423 if no HGRCPATH, use default os-specific path.'''
421 global _rcpath
424 global _rcpath
422 if _rcpath is None:
425 if _rcpath is None:
423 if 'HGRCPATH' in os.environ:
426 if 'HGRCPATH' in os.environ:
424 _rcpath = []
427 _rcpath = []
425 for p in os.environ['HGRCPATH'].split(os.pathsep):
428 for p in os.environ['HGRCPATH'].split(os.pathsep):
426 if not p:
429 if not p:
427 continue
430 continue
428 p = util.expandpath(p)
431 p = util.expandpath(p)
429 if os.path.isdir(p):
432 if os.path.isdir(p):
430 for f, kind in osutil.listdir(p):
433 for f, kind in osutil.listdir(p):
431 if f.endswith('.rc'):
434 if f.endswith('.rc'):
432 _rcpath.append(os.path.join(p, f))
435 _rcpath.append(os.path.join(p, f))
433 else:
436 else:
434 _rcpath.append(p)
437 _rcpath.append(p)
435 else:
438 else:
436 _rcpath = osrcpath()
439 _rcpath = osrcpath()
437 return _rcpath
440 return _rcpath
438
441
439 def revsingle(repo, revspec, default='.'):
442 def revsingle(repo, revspec, default='.'):
440 if not revspec and revspec != 0:
443 if not revspec and revspec != 0:
441 return repo[default]
444 return repo[default]
442
445
443 l = revrange(repo, [revspec])
446 l = revrange(repo, [revspec])
444 if len(l) < 1:
447 if len(l) < 1:
445 raise util.Abort(_('empty revision set'))
448 raise util.Abort(_('empty revision set'))
446 return repo[l[-1]]
449 return repo[l[-1]]
447
450
448 def revpair(repo, revs):
451 def revpair(repo, revs):
449 if not revs:
452 if not revs:
450 return repo.dirstate.p1(), None
453 return repo.dirstate.p1(), None
451
454
452 l = revrange(repo, revs)
455 l = revrange(repo, revs)
453
456
454 if len(l) == 0:
457 if len(l) == 0:
455 if revs:
458 if revs:
456 raise util.Abort(_('empty revision range'))
459 raise util.Abort(_('empty revision range'))
457 return repo.dirstate.p1(), None
460 return repo.dirstate.p1(), None
458
461
459 if len(l) == 1 and len(revs) == 1 and _revrangesep not in revs[0]:
462 if len(l) == 1 and len(revs) == 1 and _revrangesep not in revs[0]:
460 return repo.lookup(l[0]), None
463 return repo.lookup(l[0]), None
461
464
462 return repo.lookup(l[0]), repo.lookup(l[-1])
465 return repo.lookup(l[0]), repo.lookup(l[-1])
463
466
464 _revrangesep = ':'
467 _revrangesep = ':'
465
468
466 def revrange(repo, revs):
469 def revrange(repo, revs):
467 """Yield revision as strings from a list of revision specifications."""
470 """Yield revision as strings from a list of revision specifications."""
468
471
469 def revfix(repo, val, defval):
472 def revfix(repo, val, defval):
470 if not val and val != 0 and defval is not None:
473 if not val and val != 0 and defval is not None:
471 return defval
474 return defval
472 return repo[val].rev()
475 return repo[val].rev()
473
476
474 seen, l = set(), []
477 seen, l = set(), []
475 for spec in revs:
478 for spec in revs:
476 if l and not seen:
479 if l and not seen:
477 seen = set(l)
480 seen = set(l)
478 # attempt to parse old-style ranges first to deal with
481 # attempt to parse old-style ranges first to deal with
479 # things like old-tag which contain query metacharacters
482 # things like old-tag which contain query metacharacters
480 try:
483 try:
481 if isinstance(spec, int):
484 if isinstance(spec, int):
482 seen.add(spec)
485 seen.add(spec)
483 l.append(spec)
486 l.append(spec)
484 continue
487 continue
485
488
486 if _revrangesep in spec:
489 if _revrangesep in spec:
487 start, end = spec.split(_revrangesep, 1)
490 start, end = spec.split(_revrangesep, 1)
488 start = revfix(repo, start, 0)
491 start = revfix(repo, start, 0)
489 end = revfix(repo, end, len(repo) - 1)
492 end = revfix(repo, end, len(repo) - 1)
490 if end == nullrev and start <= 0:
493 if end == nullrev and start <= 0:
491 start = nullrev
494 start = nullrev
492 rangeiter = repo.changelog.revs(start, end)
495 rangeiter = repo.changelog.revs(start, end)
493 if not seen and not l:
496 if not seen and not l:
494 # by far the most common case: revs = ["-1:0"]
497 # by far the most common case: revs = ["-1:0"]
495 l = list(rangeiter)
498 l = list(rangeiter)
496 # defer syncing seen until next iteration
499 # defer syncing seen until next iteration
497 continue
500 continue
498 newrevs = set(rangeiter)
501 newrevs = set(rangeiter)
499 if seen:
502 if seen:
500 newrevs.difference_update(seen)
503 newrevs.difference_update(seen)
501 seen.update(newrevs)
504 seen.update(newrevs)
502 else:
505 else:
503 seen = newrevs
506 seen = newrevs
504 l.extend(sorted(newrevs, reverse=start > end))
507 l.extend(sorted(newrevs, reverse=start > end))
505 continue
508 continue
506 elif spec and spec in repo: # single unquoted rev
509 elif spec and spec in repo: # single unquoted rev
507 rev = revfix(repo, spec, None)
510 rev = revfix(repo, spec, None)
508 if rev in seen:
511 if rev in seen:
509 continue
512 continue
510 seen.add(rev)
513 seen.add(rev)
511 l.append(rev)
514 l.append(rev)
512 continue
515 continue
513 except error.RepoLookupError:
516 except error.RepoLookupError:
514 pass
517 pass
515
518
516 # fall through to new-style queries if old-style fails
519 # fall through to new-style queries if old-style fails
517 m = revset.match(repo.ui, spec)
520 m = revset.match(repo.ui, spec)
518 dl = [r for r in m(repo, list(repo)) if r not in seen]
521 dl = [r for r in m(repo, list(repo)) if r not in seen]
519 l.extend(dl)
522 l.extend(dl)
520 seen.update(dl)
523 seen.update(dl)
521
524
522 return l
525 return l
523
526
524 def expandpats(pats):
527 def expandpats(pats):
525 if not util.expandglobs:
528 if not util.expandglobs:
526 return list(pats)
529 return list(pats)
527 ret = []
530 ret = []
528 for p in pats:
531 for p in pats:
529 kind, name = matchmod._patsplit(p, None)
532 kind, name = matchmod._patsplit(p, None)
530 if kind is None:
533 if kind is None:
531 try:
534 try:
532 globbed = glob.glob(name)
535 globbed = glob.glob(name)
533 except re.error:
536 except re.error:
534 globbed = [name]
537 globbed = [name]
535 if globbed:
538 if globbed:
536 ret.extend(globbed)
539 ret.extend(globbed)
537 continue
540 continue
538 ret.append(p)
541 ret.append(p)
539 return ret
542 return ret
540
543
541 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
544 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
542 if pats == ("",):
545 if pats == ("",):
543 pats = []
546 pats = []
544 if not globbed and default == 'relpath':
547 if not globbed and default == 'relpath':
545 pats = expandpats(pats or [])
548 pats = expandpats(pats or [])
546
549
547 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
550 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
548 default)
551 default)
549 def badfn(f, msg):
552 def badfn(f, msg):
550 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
553 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
551 m.bad = badfn
554 m.bad = badfn
552 return m, pats
555 return m, pats
553
556
554 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
557 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
555 return matchandpats(ctx, pats, opts, globbed, default)[0]
558 return matchandpats(ctx, pats, opts, globbed, default)[0]
556
559
557 def matchall(repo):
560 def matchall(repo):
558 return matchmod.always(repo.root, repo.getcwd())
561 return matchmod.always(repo.root, repo.getcwd())
559
562
560 def matchfiles(repo, files):
563 def matchfiles(repo, files):
561 return matchmod.exact(repo.root, repo.getcwd(), files)
564 return matchmod.exact(repo.root, repo.getcwd(), files)
562
565
563 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
566 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
564 if dry_run is None:
567 if dry_run is None:
565 dry_run = opts.get('dry_run')
568 dry_run = opts.get('dry_run')
566 if similarity is None:
569 if similarity is None:
567 similarity = float(opts.get('similarity') or 0)
570 similarity = float(opts.get('similarity') or 0)
568 # we'd use status here, except handling of symlinks and ignore is tricky
571 # we'd use status here, except handling of symlinks and ignore is tricky
569 m = match(repo[None], pats, opts)
572 m = match(repo[None], pats, opts)
570 rejected = []
573 rejected = []
571 m.bad = lambda x, y: rejected.append(x)
574 m.bad = lambda x, y: rejected.append(x)
572
575
573 added, unknown, deleted, removed = _interestingfiles(repo, m)
576 added, unknown, deleted, removed = _interestingfiles(repo, m)
574
577
575 unknownset = set(unknown)
578 unknownset = set(unknown)
576 toprint = unknownset.copy()
579 toprint = unknownset.copy()
577 toprint.update(deleted)
580 toprint.update(deleted)
578 for abs in sorted(toprint):
581 for abs in sorted(toprint):
579 if repo.ui.verbose or not m.exact(abs):
582 if repo.ui.verbose or not m.exact(abs):
580 rel = m.rel(abs)
583 rel = m.rel(abs)
581 if abs in unknownset:
584 if abs in unknownset:
582 status = _('adding %s\n') % ((pats and rel) or abs)
585 status = _('adding %s\n') % ((pats and rel) or abs)
583 else:
586 else:
584 status = _('removing %s\n') % ((pats and rel) or abs)
587 status = _('removing %s\n') % ((pats and rel) or abs)
585 repo.ui.status(status)
588 repo.ui.status(status)
586
589
587 renames = _findrenames(repo, m, added + unknown, removed + deleted,
590 renames = _findrenames(repo, m, added + unknown, removed + deleted,
588 similarity)
591 similarity)
589
592
590 if not dry_run:
593 if not dry_run:
591 _markchanges(repo, unknown, deleted, renames)
594 _markchanges(repo, unknown, deleted, renames)
592
595
593 for f in rejected:
596 for f in rejected:
594 if f in m.files():
597 if f in m.files():
595 return 1
598 return 1
596 return 0
599 return 0
597
600
598 def marktouched(repo, files, similarity=0.0):
601 def marktouched(repo, files, similarity=0.0):
599 '''Assert that files have somehow been operated upon. files are relative to
602 '''Assert that files have somehow been operated upon. files are relative to
600 the repo root.'''
603 the repo root.'''
601 m = matchfiles(repo, files)
604 m = matchfiles(repo, files)
602 rejected = []
605 rejected = []
603 m.bad = lambda x, y: rejected.append(x)
606 m.bad = lambda x, y: rejected.append(x)
604
607
605 added, unknown, deleted, removed = _interestingfiles(repo, m)
608 added, unknown, deleted, removed = _interestingfiles(repo, m)
606
609
607 if repo.ui.verbose:
610 if repo.ui.verbose:
608 unknownset = set(unknown)
611 unknownset = set(unknown)
609 toprint = unknownset.copy()
612 toprint = unknownset.copy()
610 toprint.update(deleted)
613 toprint.update(deleted)
611 for abs in sorted(toprint):
614 for abs in sorted(toprint):
612 if abs in unknownset:
615 if abs in unknownset:
613 status = _('adding %s\n') % abs
616 status = _('adding %s\n') % abs
614 else:
617 else:
615 status = _('removing %s\n') % abs
618 status = _('removing %s\n') % abs
616 repo.ui.status(status)
619 repo.ui.status(status)
617
620
618 renames = _findrenames(repo, m, added + unknown, removed + deleted,
621 renames = _findrenames(repo, m, added + unknown, removed + deleted,
619 similarity)
622 similarity)
620
623
621 _markchanges(repo, unknown, deleted, renames)
624 _markchanges(repo, unknown, deleted, renames)
622
625
623 for f in rejected:
626 for f in rejected:
624 if f in m.files():
627 if f in m.files():
625 return 1
628 return 1
626 return 0
629 return 0
627
630
628 def _interestingfiles(repo, matcher):
631 def _interestingfiles(repo, matcher):
629 '''Walk dirstate with matcher, looking for files that addremove would care
632 '''Walk dirstate with matcher, looking for files that addremove would care
630 about.
633 about.
631
634
632 This is different from dirstate.status because it doesn't care about
635 This is different from dirstate.status because it doesn't care about
633 whether files are modified or clean.'''
636 whether files are modified or clean.'''
634 added, unknown, deleted, removed = [], [], [], []
637 added, unknown, deleted, removed = [], [], [], []
635 audit_path = pathutil.pathauditor(repo.root)
638 audit_path = pathutil.pathauditor(repo.root)
636
639
637 ctx = repo[None]
640 ctx = repo[None]
638 dirstate = repo.dirstate
641 dirstate = repo.dirstate
639 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
642 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
640 full=False)
643 full=False)
641 for abs, st in walkresults.iteritems():
644 for abs, st in walkresults.iteritems():
642 dstate = dirstate[abs]
645 dstate = dirstate[abs]
643 if dstate == '?' and audit_path.check(abs):
646 if dstate == '?' and audit_path.check(abs):
644 unknown.append(abs)
647 unknown.append(abs)
645 elif dstate != 'r' and not st:
648 elif dstate != 'r' and not st:
646 deleted.append(abs)
649 deleted.append(abs)
647 # for finding renames
650 # for finding renames
648 elif dstate == 'r':
651 elif dstate == 'r':
649 removed.append(abs)
652 removed.append(abs)
650 elif dstate == 'a':
653 elif dstate == 'a':
651 added.append(abs)
654 added.append(abs)
652
655
653 return added, unknown, deleted, removed
656 return added, unknown, deleted, removed
654
657
655 def _findrenames(repo, matcher, added, removed, similarity):
658 def _findrenames(repo, matcher, added, removed, similarity):
656 '''Find renames from removed files to added ones.'''
659 '''Find renames from removed files to added ones.'''
657 renames = {}
660 renames = {}
658 if similarity > 0:
661 if similarity > 0:
659 for old, new, score in similar.findrenames(repo, added, removed,
662 for old, new, score in similar.findrenames(repo, added, removed,
660 similarity):
663 similarity):
661 if (repo.ui.verbose or not matcher.exact(old)
664 if (repo.ui.verbose or not matcher.exact(old)
662 or not matcher.exact(new)):
665 or not matcher.exact(new)):
663 repo.ui.status(_('recording removal of %s as rename to %s '
666 repo.ui.status(_('recording removal of %s as rename to %s '
664 '(%d%% similar)\n') %
667 '(%d%% similar)\n') %
665 (matcher.rel(old), matcher.rel(new),
668 (matcher.rel(old), matcher.rel(new),
666 score * 100))
669 score * 100))
667 renames[new] = old
670 renames[new] = old
668 return renames
671 return renames
669
672
670 def _markchanges(repo, unknown, deleted, renames):
673 def _markchanges(repo, unknown, deleted, renames):
671 '''Marks the files in unknown as added, the files in deleted as removed,
674 '''Marks the files in unknown as added, the files in deleted as removed,
672 and the files in renames as copied.'''
675 and the files in renames as copied.'''
673 wctx = repo[None]
676 wctx = repo[None]
674 wlock = repo.wlock()
677 wlock = repo.wlock()
675 try:
678 try:
676 wctx.forget(deleted)
679 wctx.forget(deleted)
677 wctx.add(unknown)
680 wctx.add(unknown)
678 for new, old in renames.iteritems():
681 for new, old in renames.iteritems():
679 wctx.copy(old, new)
682 wctx.copy(old, new)
680 finally:
683 finally:
681 wlock.release()
684 wlock.release()
682
685
683 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
686 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
684 """Update the dirstate to reflect the intent of copying src to dst. For
687 """Update the dirstate to reflect the intent of copying src to dst. For
685 different reasons it might not end with dst being marked as copied from src.
688 different reasons it might not end with dst being marked as copied from src.
686 """
689 """
687 origsrc = repo.dirstate.copied(src) or src
690 origsrc = repo.dirstate.copied(src) or src
688 if dst == origsrc: # copying back a copy?
691 if dst == origsrc: # copying back a copy?
689 if repo.dirstate[dst] not in 'mn' and not dryrun:
692 if repo.dirstate[dst] not in 'mn' and not dryrun:
690 repo.dirstate.normallookup(dst)
693 repo.dirstate.normallookup(dst)
691 else:
694 else:
692 if repo.dirstate[origsrc] == 'a' and origsrc == src:
695 if repo.dirstate[origsrc] == 'a' and origsrc == src:
693 if not ui.quiet:
696 if not ui.quiet:
694 ui.warn(_("%s has not been committed yet, so no copy "
697 ui.warn(_("%s has not been committed yet, so no copy "
695 "data will be stored for %s.\n")
698 "data will be stored for %s.\n")
696 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
699 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
697 if repo.dirstate[dst] in '?r' and not dryrun:
700 if repo.dirstate[dst] in '?r' and not dryrun:
698 wctx.add([dst])
701 wctx.add([dst])
699 elif not dryrun:
702 elif not dryrun:
700 wctx.copy(origsrc, dst)
703 wctx.copy(origsrc, dst)
701
704
702 def readrequires(opener, supported):
705 def readrequires(opener, supported):
703 '''Reads and parses .hg/requires and checks if all entries found
706 '''Reads and parses .hg/requires and checks if all entries found
704 are in the list of supported features.'''
707 are in the list of supported features.'''
705 requirements = set(opener.read("requires").splitlines())
708 requirements = set(opener.read("requires").splitlines())
706 missings = []
709 missings = []
707 for r in requirements:
710 for r in requirements:
708 if r not in supported:
711 if r not in supported:
709 if not r or not r[0].isalnum():
712 if not r or not r[0].isalnum():
710 raise error.RequirementError(_(".hg/requires file is corrupt"))
713 raise error.RequirementError(_(".hg/requires file is corrupt"))
711 missings.append(r)
714 missings.append(r)
712 missings.sort()
715 missings.sort()
713 if missings:
716 if missings:
714 raise error.RequirementError(
717 raise error.RequirementError(
715 _("unknown repository format: requires features '%s' (upgrade "
718 _("unknown repository format: requires features '%s' (upgrade "
716 "Mercurial)") % "', '".join(missings))
719 "Mercurial)") % "', '".join(missings))
717 return requirements
720 return requirements
718
721
719 class filecachesubentry(object):
722 class filecachesubentry(object):
720 def __init__(self, path, stat):
723 def __init__(self, path, stat):
721 self.path = path
724 self.path = path
722 self.cachestat = None
725 self.cachestat = None
723 self._cacheable = None
726 self._cacheable = None
724
727
725 if stat:
728 if stat:
726 self.cachestat = filecachesubentry.stat(self.path)
729 self.cachestat = filecachesubentry.stat(self.path)
727
730
728 if self.cachestat:
731 if self.cachestat:
729 self._cacheable = self.cachestat.cacheable()
732 self._cacheable = self.cachestat.cacheable()
730 else:
733 else:
731 # None means we don't know yet
734 # None means we don't know yet
732 self._cacheable = None
735 self._cacheable = None
733
736
734 def refresh(self):
737 def refresh(self):
735 if self.cacheable():
738 if self.cacheable():
736 self.cachestat = filecachesubentry.stat(self.path)
739 self.cachestat = filecachesubentry.stat(self.path)
737
740
738 def cacheable(self):
741 def cacheable(self):
739 if self._cacheable is not None:
742 if self._cacheable is not None:
740 return self._cacheable
743 return self._cacheable
741
744
742 # we don't know yet, assume it is for now
745 # we don't know yet, assume it is for now
743 return True
746 return True
744
747
745 def changed(self):
748 def changed(self):
746 # no point in going further if we can't cache it
749 # no point in going further if we can't cache it
747 if not self.cacheable():
750 if not self.cacheable():
748 return True
751 return True
749
752
750 newstat = filecachesubentry.stat(self.path)
753 newstat = filecachesubentry.stat(self.path)
751
754
752 # we may not know if it's cacheable yet, check again now
755 # we may not know if it's cacheable yet, check again now
753 if newstat and self._cacheable is None:
756 if newstat and self._cacheable is None:
754 self._cacheable = newstat.cacheable()
757 self._cacheable = newstat.cacheable()
755
758
756 # check again
759 # check again
757 if not self._cacheable:
760 if not self._cacheable:
758 return True
761 return True
759
762
760 if self.cachestat != newstat:
763 if self.cachestat != newstat:
761 self.cachestat = newstat
764 self.cachestat = newstat
762 return True
765 return True
763 else:
766 else:
764 return False
767 return False
765
768
766 @staticmethod
769 @staticmethod
767 def stat(path):
770 def stat(path):
768 try:
771 try:
769 return util.cachestat(path)
772 return util.cachestat(path)
770 except OSError, e:
773 except OSError, e:
771 if e.errno != errno.ENOENT:
774 if e.errno != errno.ENOENT:
772 raise
775 raise
773
776
774 class filecacheentry(object):
777 class filecacheentry(object):
775 def __init__(self, paths, stat=True):
778 def __init__(self, paths, stat=True):
776 self._entries = []
779 self._entries = []
777 for path in paths:
780 for path in paths:
778 self._entries.append(filecachesubentry(path, stat))
781 self._entries.append(filecachesubentry(path, stat))
779
782
780 def changed(self):
783 def changed(self):
781 '''true if any entry has changed'''
784 '''true if any entry has changed'''
782 for entry in self._entries:
785 for entry in self._entries:
783 if entry.changed():
786 if entry.changed():
784 return True
787 return True
785 return False
788 return False
786
789
787 def refresh(self):
790 def refresh(self):
788 for entry in self._entries:
791 for entry in self._entries:
789 entry.refresh()
792 entry.refresh()
790
793
791 class filecache(object):
794 class filecache(object):
792 '''A property like decorator that tracks files under .hg/ for updates.
795 '''A property like decorator that tracks files under .hg/ for updates.
793
796
794 Records stat info when called in _filecache.
797 Records stat info when called in _filecache.
795
798
796 On subsequent calls, compares old stat info with new info, and recreates the
799 On subsequent calls, compares old stat info with new info, and recreates the
797 object when any of the files changes, updating the new stat info in
800 object when any of the files changes, updating the new stat info in
798 _filecache.
801 _filecache.
799
802
800 Mercurial either atomic renames or appends for files under .hg,
803 Mercurial either atomic renames or appends for files under .hg,
801 so to ensure the cache is reliable we need the filesystem to be able
804 so to ensure the cache is reliable we need the filesystem to be able
802 to tell us if a file has been replaced. If it can't, we fallback to
805 to tell us if a file has been replaced. If it can't, we fallback to
803 recreating the object on every call (essentially the same behaviour as
806 recreating the object on every call (essentially the same behaviour as
804 propertycache).
807 propertycache).
805
808
806 '''
809 '''
807 def __init__(self, *paths):
810 def __init__(self, *paths):
808 self.paths = paths
811 self.paths = paths
809
812
810 def join(self, obj, fname):
813 def join(self, obj, fname):
811 """Used to compute the runtime path of a cached file.
814 """Used to compute the runtime path of a cached file.
812
815
813 Users should subclass filecache and provide their own version of this
816 Users should subclass filecache and provide their own version of this
814 function to call the appropriate join function on 'obj' (an instance
817 function to call the appropriate join function on 'obj' (an instance
815 of the class that its member function was decorated).
818 of the class that its member function was decorated).
816 """
819 """
817 return obj.join(fname)
820 return obj.join(fname)
818
821
819 def __call__(self, func):
822 def __call__(self, func):
820 self.func = func
823 self.func = func
821 self.name = func.__name__
824 self.name = func.__name__
822 return self
825 return self
823
826
824 def __get__(self, obj, type=None):
827 def __get__(self, obj, type=None):
825 # do we need to check if the file changed?
828 # do we need to check if the file changed?
826 if self.name in obj.__dict__:
829 if self.name in obj.__dict__:
827 assert self.name in obj._filecache, self.name
830 assert self.name in obj._filecache, self.name
828 return obj.__dict__[self.name]
831 return obj.__dict__[self.name]
829
832
830 entry = obj._filecache.get(self.name)
833 entry = obj._filecache.get(self.name)
831
834
832 if entry:
835 if entry:
833 if entry.changed():
836 if entry.changed():
834 entry.obj = self.func(obj)
837 entry.obj = self.func(obj)
835 else:
838 else:
836 paths = [self.join(obj, path) for path in self.paths]
839 paths = [self.join(obj, path) for path in self.paths]
837
840
838 # We stat -before- creating the object so our cache doesn't lie if
841 # We stat -before- creating the object so our cache doesn't lie if
839 # a writer modified between the time we read and stat
842 # a writer modified between the time we read and stat
840 entry = filecacheentry(paths, True)
843 entry = filecacheentry(paths, True)
841 entry.obj = self.func(obj)
844 entry.obj = self.func(obj)
842
845
843 obj._filecache[self.name] = entry
846 obj._filecache[self.name] = entry
844
847
845 obj.__dict__[self.name] = entry.obj
848 obj.__dict__[self.name] = entry.obj
846 return entry.obj
849 return entry.obj
847
850
848 def __set__(self, obj, value):
851 def __set__(self, obj, value):
849 if self.name not in obj._filecache:
852 if self.name not in obj._filecache:
850 # we add an entry for the missing value because X in __dict__
853 # we add an entry for the missing value because X in __dict__
851 # implies X in _filecache
854 # implies X in _filecache
852 paths = [self.join(obj, path) for path in self.paths]
855 paths = [self.join(obj, path) for path in self.paths]
853 ce = filecacheentry(paths, False)
856 ce = filecacheentry(paths, False)
854 obj._filecache[self.name] = ce
857 obj._filecache[self.name] = ce
855 else:
858 else:
856 ce = obj._filecache[self.name]
859 ce = obj._filecache[self.name]
857
860
858 ce.obj = value # update cached copy
861 ce.obj = value # update cached copy
859 obj.__dict__[self.name] = value # update copy returned by obj.x
862 obj.__dict__[self.name] = value # update copy returned by obj.x
860
863
861 def __delete__(self, obj):
864 def __delete__(self, obj):
862 try:
865 try:
863 del obj.__dict__[self.name]
866 del obj.__dict__[self.name]
864 except KeyError:
867 except KeyError:
865 raise AttributeError(self.name)
868 raise AttributeError(self.name)
866
869
867 class dirs(object):
870 class dirs(object):
868 '''a multiset of directory names from a dirstate or manifest'''
871 '''a multiset of directory names from a dirstate or manifest'''
869
872
870 def __init__(self, map, skip=None):
873 def __init__(self, map, skip=None):
871 self._dirs = {}
874 self._dirs = {}
872 addpath = self.addpath
875 addpath = self.addpath
873 if util.safehasattr(map, 'iteritems') and skip is not None:
876 if util.safehasattr(map, 'iteritems') and skip is not None:
874 for f, s in map.iteritems():
877 for f, s in map.iteritems():
875 if s[0] != skip:
878 if s[0] != skip:
876 addpath(f)
879 addpath(f)
877 else:
880 else:
878 for f in map:
881 for f in map:
879 addpath(f)
882 addpath(f)
880
883
881 def addpath(self, path):
884 def addpath(self, path):
882 dirs = self._dirs
885 dirs = self._dirs
883 for base in finddirs(path):
886 for base in finddirs(path):
884 if base in dirs:
887 if base in dirs:
885 dirs[base] += 1
888 dirs[base] += 1
886 return
889 return
887 dirs[base] = 1
890 dirs[base] = 1
888
891
889 def delpath(self, path):
892 def delpath(self, path):
890 dirs = self._dirs
893 dirs = self._dirs
891 for base in finddirs(path):
894 for base in finddirs(path):
892 if dirs[base] > 1:
895 if dirs[base] > 1:
893 dirs[base] -= 1
896 dirs[base] -= 1
894 return
897 return
895 del dirs[base]
898 del dirs[base]
896
899
897 def __iter__(self):
900 def __iter__(self):
898 return self._dirs.iterkeys()
901 return self._dirs.iterkeys()
899
902
900 def __contains__(self, d):
903 def __contains__(self, d):
901 return d in self._dirs
904 return d in self._dirs
902
905
903 if util.safehasattr(parsers, 'dirs'):
906 if util.safehasattr(parsers, 'dirs'):
904 dirs = parsers.dirs
907 dirs = parsers.dirs
905
908
906 def finddirs(path):
909 def finddirs(path):
907 pos = path.rfind('/')
910 pos = path.rfind('/')
908 while pos != -1:
911 while pos != -1:
909 yield path[:pos]
912 yield path[:pos]
910 pos = path.rfind('/', 0, pos)
913 pos = path.rfind('/', 0, pos)
General Comments 0
You need to be logged in to leave comments. Login now