##// END OF EJS Templates
vfs: add "makelock()" and "readlock()"
FUJIWARA Katsunori -
r20090:88d8e568 default
parent child Browse files
Show More
@@ -1,913 +1,919 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from mercurial.node import nullrev
9 from mercurial.node import nullrev
10 import util, error, osutil, revset, similar, encoding, phases, parsers
10 import util, error, osutil, revset, similar, encoding, phases, parsers
11 import pathutil
11 import pathutil
12 import match as matchmod
12 import match as matchmod
13 import os, errno, re, glob
13 import os, errno, re, glob
14
14
15 if os.name == 'nt':
15 if os.name == 'nt':
16 import scmwindows as scmplatform
16 import scmwindows as scmplatform
17 else:
17 else:
18 import scmposix as scmplatform
18 import scmposix as scmplatform
19
19
20 systemrcpath = scmplatform.systemrcpath
20 systemrcpath = scmplatform.systemrcpath
21 userrcpath = scmplatform.userrcpath
21 userrcpath = scmplatform.userrcpath
22
22
23 def nochangesfound(ui, repo, excluded=None):
23 def nochangesfound(ui, repo, excluded=None):
24 '''Report no changes for push/pull, excluded is None or a list of
24 '''Report no changes for push/pull, excluded is None or a list of
25 nodes excluded from the push/pull.
25 nodes excluded from the push/pull.
26 '''
26 '''
27 secretlist = []
27 secretlist = []
28 if excluded:
28 if excluded:
29 for n in excluded:
29 for n in excluded:
30 if n not in repo:
30 if n not in repo:
31 # discovery should not have included the filtered revision,
31 # discovery should not have included the filtered revision,
32 # we have to explicitly exclude it until discovery is cleanup.
32 # we have to explicitly exclude it until discovery is cleanup.
33 continue
33 continue
34 ctx = repo[n]
34 ctx = repo[n]
35 if ctx.phase() >= phases.secret and not ctx.extinct():
35 if ctx.phase() >= phases.secret and not ctx.extinct():
36 secretlist.append(n)
36 secretlist.append(n)
37
37
38 if secretlist:
38 if secretlist:
39 ui.status(_("no changes found (ignored %d secret changesets)\n")
39 ui.status(_("no changes found (ignored %d secret changesets)\n")
40 % len(secretlist))
40 % len(secretlist))
41 else:
41 else:
42 ui.status(_("no changes found\n"))
42 ui.status(_("no changes found\n"))
43
43
44 def checknewlabel(repo, lbl, kind):
44 def checknewlabel(repo, lbl, kind):
45 # Do not use the "kind" parameter in ui output.
45 # Do not use the "kind" parameter in ui output.
46 # It makes strings difficult to translate.
46 # It makes strings difficult to translate.
47 if lbl in ['tip', '.', 'null']:
47 if lbl in ['tip', '.', 'null']:
48 raise util.Abort(_("the name '%s' is reserved") % lbl)
48 raise util.Abort(_("the name '%s' is reserved") % lbl)
49 for c in (':', '\0', '\n', '\r'):
49 for c in (':', '\0', '\n', '\r'):
50 if c in lbl:
50 if c in lbl:
51 raise util.Abort(_("%r cannot be used in a name") % c)
51 raise util.Abort(_("%r cannot be used in a name") % c)
52 try:
52 try:
53 int(lbl)
53 int(lbl)
54 raise util.Abort(_("cannot use an integer as a name"))
54 raise util.Abort(_("cannot use an integer as a name"))
55 except ValueError:
55 except ValueError:
56 pass
56 pass
57
57
58 def checkfilename(f):
58 def checkfilename(f):
59 '''Check that the filename f is an acceptable filename for a tracked file'''
59 '''Check that the filename f is an acceptable filename for a tracked file'''
60 if '\r' in f or '\n' in f:
60 if '\r' in f or '\n' in f:
61 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
61 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
62
62
63 def checkportable(ui, f):
63 def checkportable(ui, f):
64 '''Check if filename f is portable and warn or abort depending on config'''
64 '''Check if filename f is portable and warn or abort depending on config'''
65 checkfilename(f)
65 checkfilename(f)
66 abort, warn = checkportabilityalert(ui)
66 abort, warn = checkportabilityalert(ui)
67 if abort or warn:
67 if abort or warn:
68 msg = util.checkwinfilename(f)
68 msg = util.checkwinfilename(f)
69 if msg:
69 if msg:
70 msg = "%s: %r" % (msg, f)
70 msg = "%s: %r" % (msg, f)
71 if abort:
71 if abort:
72 raise util.Abort(msg)
72 raise util.Abort(msg)
73 ui.warn(_("warning: %s\n") % msg)
73 ui.warn(_("warning: %s\n") % msg)
74
74
75 def checkportabilityalert(ui):
75 def checkportabilityalert(ui):
76 '''check if the user's config requests nothing, a warning, or abort for
76 '''check if the user's config requests nothing, a warning, or abort for
77 non-portable filenames'''
77 non-portable filenames'''
78 val = ui.config('ui', 'portablefilenames', 'warn')
78 val = ui.config('ui', 'portablefilenames', 'warn')
79 lval = val.lower()
79 lval = val.lower()
80 bval = util.parsebool(val)
80 bval = util.parsebool(val)
81 abort = os.name == 'nt' or lval == 'abort'
81 abort = os.name == 'nt' or lval == 'abort'
82 warn = bval or lval == 'warn'
82 warn = bval or lval == 'warn'
83 if bval is None and not (warn or abort or lval == 'ignore'):
83 if bval is None and not (warn or abort or lval == 'ignore'):
84 raise error.ConfigError(
84 raise error.ConfigError(
85 _("ui.portablefilenames value is invalid ('%s')") % val)
85 _("ui.portablefilenames value is invalid ('%s')") % val)
86 return abort, warn
86 return abort, warn
87
87
88 class casecollisionauditor(object):
88 class casecollisionauditor(object):
89 def __init__(self, ui, abort, dirstate):
89 def __init__(self, ui, abort, dirstate):
90 self._ui = ui
90 self._ui = ui
91 self._abort = abort
91 self._abort = abort
92 allfiles = '\0'.join(dirstate._map)
92 allfiles = '\0'.join(dirstate._map)
93 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
93 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
94 self._dirstate = dirstate
94 self._dirstate = dirstate
95 # The purpose of _newfiles is so that we don't complain about
95 # The purpose of _newfiles is so that we don't complain about
96 # case collisions if someone were to call this object with the
96 # case collisions if someone were to call this object with the
97 # same filename twice.
97 # same filename twice.
98 self._newfiles = set()
98 self._newfiles = set()
99
99
100 def __call__(self, f):
100 def __call__(self, f):
101 if f in self._newfiles:
101 if f in self._newfiles:
102 return
102 return
103 fl = encoding.lower(f)
103 fl = encoding.lower(f)
104 if fl in self._loweredfiles and f not in self._dirstate:
104 if fl in self._loweredfiles and f not in self._dirstate:
105 msg = _('possible case-folding collision for %s') % f
105 msg = _('possible case-folding collision for %s') % f
106 if self._abort:
106 if self._abort:
107 raise util.Abort(msg)
107 raise util.Abort(msg)
108 self._ui.warn(_("warning: %s\n") % msg)
108 self._ui.warn(_("warning: %s\n") % msg)
109 self._loweredfiles.add(fl)
109 self._loweredfiles.add(fl)
110 self._newfiles.add(f)
110 self._newfiles.add(f)
111
111
112 class abstractvfs(object):
112 class abstractvfs(object):
113 """Abstract base class; cannot be instantiated"""
113 """Abstract base class; cannot be instantiated"""
114
114
115 def __init__(self, *args, **kwargs):
115 def __init__(self, *args, **kwargs):
116 '''Prevent instantiation; don't call this from subclasses.'''
116 '''Prevent instantiation; don't call this from subclasses.'''
117 raise NotImplementedError('attempted instantiating ' + str(type(self)))
117 raise NotImplementedError('attempted instantiating ' + str(type(self)))
118
118
119 def tryread(self, path):
119 def tryread(self, path):
120 '''gracefully return an empty string for missing files'''
120 '''gracefully return an empty string for missing files'''
121 try:
121 try:
122 return self.read(path)
122 return self.read(path)
123 except IOError, inst:
123 except IOError, inst:
124 if inst.errno != errno.ENOENT:
124 if inst.errno != errno.ENOENT:
125 raise
125 raise
126 return ""
126 return ""
127
127
128 def open(self, path, mode="r", text=False, atomictemp=False):
128 def open(self, path, mode="r", text=False, atomictemp=False):
129 self.open = self.__call__
129 self.open = self.__call__
130 return self.__call__(path, mode, text, atomictemp)
130 return self.__call__(path, mode, text, atomictemp)
131
131
132 def read(self, path):
132 def read(self, path):
133 fp = self(path, 'rb')
133 fp = self(path, 'rb')
134 try:
134 try:
135 return fp.read()
135 return fp.read()
136 finally:
136 finally:
137 fp.close()
137 fp.close()
138
138
139 def write(self, path, data):
139 def write(self, path, data):
140 fp = self(path, 'wb')
140 fp = self(path, 'wb')
141 try:
141 try:
142 return fp.write(data)
142 return fp.write(data)
143 finally:
143 finally:
144 fp.close()
144 fp.close()
145
145
146 def append(self, path, data):
146 def append(self, path, data):
147 fp = self(path, 'ab')
147 fp = self(path, 'ab')
148 try:
148 try:
149 return fp.write(data)
149 return fp.write(data)
150 finally:
150 finally:
151 fp.close()
151 fp.close()
152
152
153 def chmod(self, path, mode):
153 def chmod(self, path, mode):
154 return os.chmod(self.join(path), mode)
154 return os.chmod(self.join(path), mode)
155
155
156 def exists(self, path=None):
156 def exists(self, path=None):
157 return os.path.exists(self.join(path))
157 return os.path.exists(self.join(path))
158
158
159 def fstat(self, fp):
159 def fstat(self, fp):
160 return util.fstat(fp)
160 return util.fstat(fp)
161
161
162 def isdir(self, path=None):
162 def isdir(self, path=None):
163 return os.path.isdir(self.join(path))
163 return os.path.isdir(self.join(path))
164
164
165 def isfile(self, path=None):
165 def isfile(self, path=None):
166 return os.path.isfile(self.join(path))
166 return os.path.isfile(self.join(path))
167
167
168 def islink(self, path=None):
168 def islink(self, path=None):
169 return os.path.islink(self.join(path))
169 return os.path.islink(self.join(path))
170
170
171 def lstat(self, path=None):
171 def lstat(self, path=None):
172 return os.lstat(self.join(path))
172 return os.lstat(self.join(path))
173
173
174 def makedir(self, path=None, notindexed=True):
174 def makedir(self, path=None, notindexed=True):
175 return util.makedir(self.join(path), notindexed)
175 return util.makedir(self.join(path), notindexed)
176
176
177 def makedirs(self, path=None, mode=None):
177 def makedirs(self, path=None, mode=None):
178 return util.makedirs(self.join(path), mode)
178 return util.makedirs(self.join(path), mode)
179
179
180 def makelock(self, info, path):
181 return util.makelock(info, self.join(path))
182
180 def mkdir(self, path=None):
183 def mkdir(self, path=None):
181 return os.mkdir(self.join(path))
184 return os.mkdir(self.join(path))
182
185
183 def readdir(self, path=None, stat=None, skip=None):
186 def readdir(self, path=None, stat=None, skip=None):
184 return osutil.listdir(self.join(path), stat, skip)
187 return osutil.listdir(self.join(path), stat, skip)
185
188
189 def readlock(self, path):
190 return util.readlock(self.join(path))
191
186 def rename(self, src, dst):
192 def rename(self, src, dst):
187 return util.rename(self.join(src), self.join(dst))
193 return util.rename(self.join(src), self.join(dst))
188
194
189 def readlink(self, path):
195 def readlink(self, path):
190 return os.readlink(self.join(path))
196 return os.readlink(self.join(path))
191
197
192 def setflags(self, path, l, x):
198 def setflags(self, path, l, x):
193 return util.setflags(self.join(path), l, x)
199 return util.setflags(self.join(path), l, x)
194
200
195 def stat(self, path=None):
201 def stat(self, path=None):
196 return os.stat(self.join(path))
202 return os.stat(self.join(path))
197
203
198 def unlink(self, path=None):
204 def unlink(self, path=None):
199 return util.unlink(self.join(path))
205 return util.unlink(self.join(path))
200
206
201 def utime(self, path=None, t=None):
207 def utime(self, path=None, t=None):
202 return os.utime(self.join(path), t)
208 return os.utime(self.join(path), t)
203
209
204 class vfs(abstractvfs):
210 class vfs(abstractvfs):
205 '''Operate files relative to a base directory
211 '''Operate files relative to a base directory
206
212
207 This class is used to hide the details of COW semantics and
213 This class is used to hide the details of COW semantics and
208 remote file access from higher level code.
214 remote file access from higher level code.
209 '''
215 '''
210 def __init__(self, base, audit=True, expandpath=False, realpath=False):
216 def __init__(self, base, audit=True, expandpath=False, realpath=False):
211 if expandpath:
217 if expandpath:
212 base = util.expandpath(base)
218 base = util.expandpath(base)
213 if realpath:
219 if realpath:
214 base = os.path.realpath(base)
220 base = os.path.realpath(base)
215 self.base = base
221 self.base = base
216 self._setmustaudit(audit)
222 self._setmustaudit(audit)
217 self.createmode = None
223 self.createmode = None
218 self._trustnlink = None
224 self._trustnlink = None
219
225
220 def _getmustaudit(self):
226 def _getmustaudit(self):
221 return self._audit
227 return self._audit
222
228
223 def _setmustaudit(self, onoff):
229 def _setmustaudit(self, onoff):
224 self._audit = onoff
230 self._audit = onoff
225 if onoff:
231 if onoff:
226 self.audit = pathutil.pathauditor(self.base)
232 self.audit = pathutil.pathauditor(self.base)
227 else:
233 else:
228 self.audit = util.always
234 self.audit = util.always
229
235
230 mustaudit = property(_getmustaudit, _setmustaudit)
236 mustaudit = property(_getmustaudit, _setmustaudit)
231
237
232 @util.propertycache
238 @util.propertycache
233 def _cansymlink(self):
239 def _cansymlink(self):
234 return util.checklink(self.base)
240 return util.checklink(self.base)
235
241
236 @util.propertycache
242 @util.propertycache
237 def _chmod(self):
243 def _chmod(self):
238 return util.checkexec(self.base)
244 return util.checkexec(self.base)
239
245
240 def _fixfilemode(self, name):
246 def _fixfilemode(self, name):
241 if self.createmode is None or not self._chmod:
247 if self.createmode is None or not self._chmod:
242 return
248 return
243 os.chmod(name, self.createmode & 0666)
249 os.chmod(name, self.createmode & 0666)
244
250
245 def __call__(self, path, mode="r", text=False, atomictemp=False):
251 def __call__(self, path, mode="r", text=False, atomictemp=False):
246 if self._audit:
252 if self._audit:
247 r = util.checkosfilename(path)
253 r = util.checkosfilename(path)
248 if r:
254 if r:
249 raise util.Abort("%s: %r" % (r, path))
255 raise util.Abort("%s: %r" % (r, path))
250 self.audit(path)
256 self.audit(path)
251 f = self.join(path)
257 f = self.join(path)
252
258
253 if not text and "b" not in mode:
259 if not text and "b" not in mode:
254 mode += "b" # for that other OS
260 mode += "b" # for that other OS
255
261
256 nlink = -1
262 nlink = -1
257 if mode not in ('r', 'rb'):
263 if mode not in ('r', 'rb'):
258 dirname, basename = util.split(f)
264 dirname, basename = util.split(f)
259 # If basename is empty, then the path is malformed because it points
265 # If basename is empty, then the path is malformed because it points
260 # to a directory. Let the posixfile() call below raise IOError.
266 # to a directory. Let the posixfile() call below raise IOError.
261 if basename:
267 if basename:
262 if atomictemp:
268 if atomictemp:
263 util.ensuredirs(dirname, self.createmode)
269 util.ensuredirs(dirname, self.createmode)
264 return util.atomictempfile(f, mode, self.createmode)
270 return util.atomictempfile(f, mode, self.createmode)
265 try:
271 try:
266 if 'w' in mode:
272 if 'w' in mode:
267 util.unlink(f)
273 util.unlink(f)
268 nlink = 0
274 nlink = 0
269 else:
275 else:
270 # nlinks() may behave differently for files on Windows
276 # nlinks() may behave differently for files on Windows
271 # shares if the file is open.
277 # shares if the file is open.
272 fd = util.posixfile(f)
278 fd = util.posixfile(f)
273 nlink = util.nlinks(f)
279 nlink = util.nlinks(f)
274 if nlink < 1:
280 if nlink < 1:
275 nlink = 2 # force mktempcopy (issue1922)
281 nlink = 2 # force mktempcopy (issue1922)
276 fd.close()
282 fd.close()
277 except (OSError, IOError), e:
283 except (OSError, IOError), e:
278 if e.errno != errno.ENOENT:
284 if e.errno != errno.ENOENT:
279 raise
285 raise
280 nlink = 0
286 nlink = 0
281 util.ensuredirs(dirname, self.createmode)
287 util.ensuredirs(dirname, self.createmode)
282 if nlink > 0:
288 if nlink > 0:
283 if self._trustnlink is None:
289 if self._trustnlink is None:
284 self._trustnlink = nlink > 1 or util.checknlink(f)
290 self._trustnlink = nlink > 1 or util.checknlink(f)
285 if nlink > 1 or not self._trustnlink:
291 if nlink > 1 or not self._trustnlink:
286 util.rename(util.mktempcopy(f), f)
292 util.rename(util.mktempcopy(f), f)
287 fp = util.posixfile(f, mode)
293 fp = util.posixfile(f, mode)
288 if nlink == 0:
294 if nlink == 0:
289 self._fixfilemode(f)
295 self._fixfilemode(f)
290 return fp
296 return fp
291
297
292 def symlink(self, src, dst):
298 def symlink(self, src, dst):
293 self.audit(dst)
299 self.audit(dst)
294 linkname = self.join(dst)
300 linkname = self.join(dst)
295 try:
301 try:
296 os.unlink(linkname)
302 os.unlink(linkname)
297 except OSError:
303 except OSError:
298 pass
304 pass
299
305
300 util.ensuredirs(os.path.dirname(linkname), self.createmode)
306 util.ensuredirs(os.path.dirname(linkname), self.createmode)
301
307
302 if self._cansymlink:
308 if self._cansymlink:
303 try:
309 try:
304 os.symlink(src, linkname)
310 os.symlink(src, linkname)
305 except OSError, err:
311 except OSError, err:
306 raise OSError(err.errno, _('could not symlink to %r: %s') %
312 raise OSError(err.errno, _('could not symlink to %r: %s') %
307 (src, err.strerror), linkname)
313 (src, err.strerror), linkname)
308 else:
314 else:
309 self.write(dst, src)
315 self.write(dst, src)
310
316
311 def join(self, path):
317 def join(self, path):
312 if path:
318 if path:
313 return os.path.join(self.base, path)
319 return os.path.join(self.base, path)
314 else:
320 else:
315 return self.base
321 return self.base
316
322
317 opener = vfs
323 opener = vfs
318
324
319 class auditvfs(object):
325 class auditvfs(object):
320 def __init__(self, vfs):
326 def __init__(self, vfs):
321 self.vfs = vfs
327 self.vfs = vfs
322
328
323 def _getmustaudit(self):
329 def _getmustaudit(self):
324 return self.vfs.mustaudit
330 return self.vfs.mustaudit
325
331
326 def _setmustaudit(self, onoff):
332 def _setmustaudit(self, onoff):
327 self.vfs.mustaudit = onoff
333 self.vfs.mustaudit = onoff
328
334
329 mustaudit = property(_getmustaudit, _setmustaudit)
335 mustaudit = property(_getmustaudit, _setmustaudit)
330
336
331 class filtervfs(abstractvfs, auditvfs):
337 class filtervfs(abstractvfs, auditvfs):
332 '''Wrapper vfs for filtering filenames with a function.'''
338 '''Wrapper vfs for filtering filenames with a function.'''
333
339
334 def __init__(self, vfs, filter):
340 def __init__(self, vfs, filter):
335 auditvfs.__init__(self, vfs)
341 auditvfs.__init__(self, vfs)
336 self._filter = filter
342 self._filter = filter
337
343
338 def __call__(self, path, *args, **kwargs):
344 def __call__(self, path, *args, **kwargs):
339 return self.vfs(self._filter(path), *args, **kwargs)
345 return self.vfs(self._filter(path), *args, **kwargs)
340
346
341 def join(self, path):
347 def join(self, path):
342 if path:
348 if path:
343 return self.vfs.join(self._filter(path))
349 return self.vfs.join(self._filter(path))
344 else:
350 else:
345 return self.vfs.join(path)
351 return self.vfs.join(path)
346
352
347 filteropener = filtervfs
353 filteropener = filtervfs
348
354
349 class readonlyvfs(abstractvfs, auditvfs):
355 class readonlyvfs(abstractvfs, auditvfs):
350 '''Wrapper vfs preventing any writing.'''
356 '''Wrapper vfs preventing any writing.'''
351
357
352 def __init__(self, vfs):
358 def __init__(self, vfs):
353 auditvfs.__init__(self, vfs)
359 auditvfs.__init__(self, vfs)
354
360
355 def __call__(self, path, mode='r', *args, **kw):
361 def __call__(self, path, mode='r', *args, **kw):
356 if mode not in ('r', 'rb'):
362 if mode not in ('r', 'rb'):
357 raise util.Abort('this vfs is read only')
363 raise util.Abort('this vfs is read only')
358 return self.vfs(path, mode, *args, **kw)
364 return self.vfs(path, mode, *args, **kw)
359
365
360
366
361 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
367 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
362 '''yield every hg repository under path, always recursively.
368 '''yield every hg repository under path, always recursively.
363 The recurse flag will only control recursion into repo working dirs'''
369 The recurse flag will only control recursion into repo working dirs'''
364 def errhandler(err):
370 def errhandler(err):
365 if err.filename == path:
371 if err.filename == path:
366 raise err
372 raise err
367 samestat = getattr(os.path, 'samestat', None)
373 samestat = getattr(os.path, 'samestat', None)
368 if followsym and samestat is not None:
374 if followsym and samestat is not None:
369 def adddir(dirlst, dirname):
375 def adddir(dirlst, dirname):
370 match = False
376 match = False
371 dirstat = os.stat(dirname)
377 dirstat = os.stat(dirname)
372 for lstdirstat in dirlst:
378 for lstdirstat in dirlst:
373 if samestat(dirstat, lstdirstat):
379 if samestat(dirstat, lstdirstat):
374 match = True
380 match = True
375 break
381 break
376 if not match:
382 if not match:
377 dirlst.append(dirstat)
383 dirlst.append(dirstat)
378 return not match
384 return not match
379 else:
385 else:
380 followsym = False
386 followsym = False
381
387
382 if (seen_dirs is None) and followsym:
388 if (seen_dirs is None) and followsym:
383 seen_dirs = []
389 seen_dirs = []
384 adddir(seen_dirs, path)
390 adddir(seen_dirs, path)
385 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
391 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
386 dirs.sort()
392 dirs.sort()
387 if '.hg' in dirs:
393 if '.hg' in dirs:
388 yield root # found a repository
394 yield root # found a repository
389 qroot = os.path.join(root, '.hg', 'patches')
395 qroot = os.path.join(root, '.hg', 'patches')
390 if os.path.isdir(os.path.join(qroot, '.hg')):
396 if os.path.isdir(os.path.join(qroot, '.hg')):
391 yield qroot # we have a patch queue repo here
397 yield qroot # we have a patch queue repo here
392 if recurse:
398 if recurse:
393 # avoid recursing inside the .hg directory
399 # avoid recursing inside the .hg directory
394 dirs.remove('.hg')
400 dirs.remove('.hg')
395 else:
401 else:
396 dirs[:] = [] # don't descend further
402 dirs[:] = [] # don't descend further
397 elif followsym:
403 elif followsym:
398 newdirs = []
404 newdirs = []
399 for d in dirs:
405 for d in dirs:
400 fname = os.path.join(root, d)
406 fname = os.path.join(root, d)
401 if adddir(seen_dirs, fname):
407 if adddir(seen_dirs, fname):
402 if os.path.islink(fname):
408 if os.path.islink(fname):
403 for hgname in walkrepos(fname, True, seen_dirs):
409 for hgname in walkrepos(fname, True, seen_dirs):
404 yield hgname
410 yield hgname
405 else:
411 else:
406 newdirs.append(d)
412 newdirs.append(d)
407 dirs[:] = newdirs
413 dirs[:] = newdirs
408
414
409 def osrcpath():
415 def osrcpath():
410 '''return default os-specific hgrc search path'''
416 '''return default os-specific hgrc search path'''
411 path = systemrcpath()
417 path = systemrcpath()
412 path.extend(userrcpath())
418 path.extend(userrcpath())
413 path = [os.path.normpath(f) for f in path]
419 path = [os.path.normpath(f) for f in path]
414 return path
420 return path
415
421
416 _rcpath = None
422 _rcpath = None
417
423
418 def rcpath():
424 def rcpath():
419 '''return hgrc search path. if env var HGRCPATH is set, use it.
425 '''return hgrc search path. if env var HGRCPATH is set, use it.
420 for each item in path, if directory, use files ending in .rc,
426 for each item in path, if directory, use files ending in .rc,
421 else use item.
427 else use item.
422 make HGRCPATH empty to only look in .hg/hgrc of current repo.
428 make HGRCPATH empty to only look in .hg/hgrc of current repo.
423 if no HGRCPATH, use default os-specific path.'''
429 if no HGRCPATH, use default os-specific path.'''
424 global _rcpath
430 global _rcpath
425 if _rcpath is None:
431 if _rcpath is None:
426 if 'HGRCPATH' in os.environ:
432 if 'HGRCPATH' in os.environ:
427 _rcpath = []
433 _rcpath = []
428 for p in os.environ['HGRCPATH'].split(os.pathsep):
434 for p in os.environ['HGRCPATH'].split(os.pathsep):
429 if not p:
435 if not p:
430 continue
436 continue
431 p = util.expandpath(p)
437 p = util.expandpath(p)
432 if os.path.isdir(p):
438 if os.path.isdir(p):
433 for f, kind in osutil.listdir(p):
439 for f, kind in osutil.listdir(p):
434 if f.endswith('.rc'):
440 if f.endswith('.rc'):
435 _rcpath.append(os.path.join(p, f))
441 _rcpath.append(os.path.join(p, f))
436 else:
442 else:
437 _rcpath.append(p)
443 _rcpath.append(p)
438 else:
444 else:
439 _rcpath = osrcpath()
445 _rcpath = osrcpath()
440 return _rcpath
446 return _rcpath
441
447
442 def revsingle(repo, revspec, default='.'):
448 def revsingle(repo, revspec, default='.'):
443 if not revspec and revspec != 0:
449 if not revspec and revspec != 0:
444 return repo[default]
450 return repo[default]
445
451
446 l = revrange(repo, [revspec])
452 l = revrange(repo, [revspec])
447 if len(l) < 1:
453 if len(l) < 1:
448 raise util.Abort(_('empty revision set'))
454 raise util.Abort(_('empty revision set'))
449 return repo[l[-1]]
455 return repo[l[-1]]
450
456
451 def revpair(repo, revs):
457 def revpair(repo, revs):
452 if not revs:
458 if not revs:
453 return repo.dirstate.p1(), None
459 return repo.dirstate.p1(), None
454
460
455 l = revrange(repo, revs)
461 l = revrange(repo, revs)
456
462
457 if len(l) == 0:
463 if len(l) == 0:
458 if revs:
464 if revs:
459 raise util.Abort(_('empty revision range'))
465 raise util.Abort(_('empty revision range'))
460 return repo.dirstate.p1(), None
466 return repo.dirstate.p1(), None
461
467
462 if len(l) == 1 and len(revs) == 1 and _revrangesep not in revs[0]:
468 if len(l) == 1 and len(revs) == 1 and _revrangesep not in revs[0]:
463 return repo.lookup(l[0]), None
469 return repo.lookup(l[0]), None
464
470
465 return repo.lookup(l[0]), repo.lookup(l[-1])
471 return repo.lookup(l[0]), repo.lookup(l[-1])
466
472
467 _revrangesep = ':'
473 _revrangesep = ':'
468
474
469 def revrange(repo, revs):
475 def revrange(repo, revs):
470 """Yield revision as strings from a list of revision specifications."""
476 """Yield revision as strings from a list of revision specifications."""
471
477
472 def revfix(repo, val, defval):
478 def revfix(repo, val, defval):
473 if not val and val != 0 and defval is not None:
479 if not val and val != 0 and defval is not None:
474 return defval
480 return defval
475 return repo[val].rev()
481 return repo[val].rev()
476
482
477 seen, l = set(), []
483 seen, l = set(), []
478 for spec in revs:
484 for spec in revs:
479 if l and not seen:
485 if l and not seen:
480 seen = set(l)
486 seen = set(l)
481 # attempt to parse old-style ranges first to deal with
487 # attempt to parse old-style ranges first to deal with
482 # things like old-tag which contain query metacharacters
488 # things like old-tag which contain query metacharacters
483 try:
489 try:
484 if isinstance(spec, int):
490 if isinstance(spec, int):
485 seen.add(spec)
491 seen.add(spec)
486 l.append(spec)
492 l.append(spec)
487 continue
493 continue
488
494
489 if _revrangesep in spec:
495 if _revrangesep in spec:
490 start, end = spec.split(_revrangesep, 1)
496 start, end = spec.split(_revrangesep, 1)
491 start = revfix(repo, start, 0)
497 start = revfix(repo, start, 0)
492 end = revfix(repo, end, len(repo) - 1)
498 end = revfix(repo, end, len(repo) - 1)
493 if end == nullrev and start <= 0:
499 if end == nullrev and start <= 0:
494 start = nullrev
500 start = nullrev
495 rangeiter = repo.changelog.revs(start, end)
501 rangeiter = repo.changelog.revs(start, end)
496 if not seen and not l:
502 if not seen and not l:
497 # by far the most common case: revs = ["-1:0"]
503 # by far the most common case: revs = ["-1:0"]
498 l = list(rangeiter)
504 l = list(rangeiter)
499 # defer syncing seen until next iteration
505 # defer syncing seen until next iteration
500 continue
506 continue
501 newrevs = set(rangeiter)
507 newrevs = set(rangeiter)
502 if seen:
508 if seen:
503 newrevs.difference_update(seen)
509 newrevs.difference_update(seen)
504 seen.update(newrevs)
510 seen.update(newrevs)
505 else:
511 else:
506 seen = newrevs
512 seen = newrevs
507 l.extend(sorted(newrevs, reverse=start > end))
513 l.extend(sorted(newrevs, reverse=start > end))
508 continue
514 continue
509 elif spec and spec in repo: # single unquoted rev
515 elif spec and spec in repo: # single unquoted rev
510 rev = revfix(repo, spec, None)
516 rev = revfix(repo, spec, None)
511 if rev in seen:
517 if rev in seen:
512 continue
518 continue
513 seen.add(rev)
519 seen.add(rev)
514 l.append(rev)
520 l.append(rev)
515 continue
521 continue
516 except error.RepoLookupError:
522 except error.RepoLookupError:
517 pass
523 pass
518
524
519 # fall through to new-style queries if old-style fails
525 # fall through to new-style queries if old-style fails
520 m = revset.match(repo.ui, spec)
526 m = revset.match(repo.ui, spec)
521 dl = [r for r in m(repo, list(repo)) if r not in seen]
527 dl = [r for r in m(repo, list(repo)) if r not in seen]
522 l.extend(dl)
528 l.extend(dl)
523 seen.update(dl)
529 seen.update(dl)
524
530
525 return l
531 return l
526
532
527 def expandpats(pats):
533 def expandpats(pats):
528 if not util.expandglobs:
534 if not util.expandglobs:
529 return list(pats)
535 return list(pats)
530 ret = []
536 ret = []
531 for p in pats:
537 for p in pats:
532 kind, name = matchmod._patsplit(p, None)
538 kind, name = matchmod._patsplit(p, None)
533 if kind is None:
539 if kind is None:
534 try:
540 try:
535 globbed = glob.glob(name)
541 globbed = glob.glob(name)
536 except re.error:
542 except re.error:
537 globbed = [name]
543 globbed = [name]
538 if globbed:
544 if globbed:
539 ret.extend(globbed)
545 ret.extend(globbed)
540 continue
546 continue
541 ret.append(p)
547 ret.append(p)
542 return ret
548 return ret
543
549
544 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
550 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
545 if pats == ("",):
551 if pats == ("",):
546 pats = []
552 pats = []
547 if not globbed and default == 'relpath':
553 if not globbed and default == 'relpath':
548 pats = expandpats(pats or [])
554 pats = expandpats(pats or [])
549
555
550 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
556 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
551 default)
557 default)
552 def badfn(f, msg):
558 def badfn(f, msg):
553 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
559 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
554 m.bad = badfn
560 m.bad = badfn
555 return m, pats
561 return m, pats
556
562
557 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
563 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
558 return matchandpats(ctx, pats, opts, globbed, default)[0]
564 return matchandpats(ctx, pats, opts, globbed, default)[0]
559
565
560 def matchall(repo):
566 def matchall(repo):
561 return matchmod.always(repo.root, repo.getcwd())
567 return matchmod.always(repo.root, repo.getcwd())
562
568
563 def matchfiles(repo, files):
569 def matchfiles(repo, files):
564 return matchmod.exact(repo.root, repo.getcwd(), files)
570 return matchmod.exact(repo.root, repo.getcwd(), files)
565
571
566 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
572 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
567 if dry_run is None:
573 if dry_run is None:
568 dry_run = opts.get('dry_run')
574 dry_run = opts.get('dry_run')
569 if similarity is None:
575 if similarity is None:
570 similarity = float(opts.get('similarity') or 0)
576 similarity = float(opts.get('similarity') or 0)
571 # we'd use status here, except handling of symlinks and ignore is tricky
577 # we'd use status here, except handling of symlinks and ignore is tricky
572 m = match(repo[None], pats, opts)
578 m = match(repo[None], pats, opts)
573 rejected = []
579 rejected = []
574 m.bad = lambda x, y: rejected.append(x)
580 m.bad = lambda x, y: rejected.append(x)
575
581
576 added, unknown, deleted, removed = _interestingfiles(repo, m)
582 added, unknown, deleted, removed = _interestingfiles(repo, m)
577
583
578 unknownset = set(unknown)
584 unknownset = set(unknown)
579 toprint = unknownset.copy()
585 toprint = unknownset.copy()
580 toprint.update(deleted)
586 toprint.update(deleted)
581 for abs in sorted(toprint):
587 for abs in sorted(toprint):
582 if repo.ui.verbose or not m.exact(abs):
588 if repo.ui.verbose or not m.exact(abs):
583 rel = m.rel(abs)
589 rel = m.rel(abs)
584 if abs in unknownset:
590 if abs in unknownset:
585 status = _('adding %s\n') % ((pats and rel) or abs)
591 status = _('adding %s\n') % ((pats and rel) or abs)
586 else:
592 else:
587 status = _('removing %s\n') % ((pats and rel) or abs)
593 status = _('removing %s\n') % ((pats and rel) or abs)
588 repo.ui.status(status)
594 repo.ui.status(status)
589
595
590 renames = _findrenames(repo, m, added + unknown, removed + deleted,
596 renames = _findrenames(repo, m, added + unknown, removed + deleted,
591 similarity)
597 similarity)
592
598
593 if not dry_run:
599 if not dry_run:
594 _markchanges(repo, unknown, deleted, renames)
600 _markchanges(repo, unknown, deleted, renames)
595
601
596 for f in rejected:
602 for f in rejected:
597 if f in m.files():
603 if f in m.files():
598 return 1
604 return 1
599 return 0
605 return 0
600
606
601 def marktouched(repo, files, similarity=0.0):
607 def marktouched(repo, files, similarity=0.0):
602 '''Assert that files have somehow been operated upon. files are relative to
608 '''Assert that files have somehow been operated upon. files are relative to
603 the repo root.'''
609 the repo root.'''
604 m = matchfiles(repo, files)
610 m = matchfiles(repo, files)
605 rejected = []
611 rejected = []
606 m.bad = lambda x, y: rejected.append(x)
612 m.bad = lambda x, y: rejected.append(x)
607
613
608 added, unknown, deleted, removed = _interestingfiles(repo, m)
614 added, unknown, deleted, removed = _interestingfiles(repo, m)
609
615
610 if repo.ui.verbose:
616 if repo.ui.verbose:
611 unknownset = set(unknown)
617 unknownset = set(unknown)
612 toprint = unknownset.copy()
618 toprint = unknownset.copy()
613 toprint.update(deleted)
619 toprint.update(deleted)
614 for abs in sorted(toprint):
620 for abs in sorted(toprint):
615 if abs in unknownset:
621 if abs in unknownset:
616 status = _('adding %s\n') % abs
622 status = _('adding %s\n') % abs
617 else:
623 else:
618 status = _('removing %s\n') % abs
624 status = _('removing %s\n') % abs
619 repo.ui.status(status)
625 repo.ui.status(status)
620
626
621 renames = _findrenames(repo, m, added + unknown, removed + deleted,
627 renames = _findrenames(repo, m, added + unknown, removed + deleted,
622 similarity)
628 similarity)
623
629
624 _markchanges(repo, unknown, deleted, renames)
630 _markchanges(repo, unknown, deleted, renames)
625
631
626 for f in rejected:
632 for f in rejected:
627 if f in m.files():
633 if f in m.files():
628 return 1
634 return 1
629 return 0
635 return 0
630
636
631 def _interestingfiles(repo, matcher):
637 def _interestingfiles(repo, matcher):
632 '''Walk dirstate with matcher, looking for files that addremove would care
638 '''Walk dirstate with matcher, looking for files that addremove would care
633 about.
639 about.
634
640
635 This is different from dirstate.status because it doesn't care about
641 This is different from dirstate.status because it doesn't care about
636 whether files are modified or clean.'''
642 whether files are modified or clean.'''
637 added, unknown, deleted, removed = [], [], [], []
643 added, unknown, deleted, removed = [], [], [], []
638 audit_path = pathutil.pathauditor(repo.root)
644 audit_path = pathutil.pathauditor(repo.root)
639
645
640 ctx = repo[None]
646 ctx = repo[None]
641 dirstate = repo.dirstate
647 dirstate = repo.dirstate
642 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
648 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
643 full=False)
649 full=False)
644 for abs, st in walkresults.iteritems():
650 for abs, st in walkresults.iteritems():
645 dstate = dirstate[abs]
651 dstate = dirstate[abs]
646 if dstate == '?' and audit_path.check(abs):
652 if dstate == '?' and audit_path.check(abs):
647 unknown.append(abs)
653 unknown.append(abs)
648 elif dstate != 'r' and not st:
654 elif dstate != 'r' and not st:
649 deleted.append(abs)
655 deleted.append(abs)
650 # for finding renames
656 # for finding renames
651 elif dstate == 'r':
657 elif dstate == 'r':
652 removed.append(abs)
658 removed.append(abs)
653 elif dstate == 'a':
659 elif dstate == 'a':
654 added.append(abs)
660 added.append(abs)
655
661
656 return added, unknown, deleted, removed
662 return added, unknown, deleted, removed
657
663
658 def _findrenames(repo, matcher, added, removed, similarity):
664 def _findrenames(repo, matcher, added, removed, similarity):
659 '''Find renames from removed files to added ones.'''
665 '''Find renames from removed files to added ones.'''
660 renames = {}
666 renames = {}
661 if similarity > 0:
667 if similarity > 0:
662 for old, new, score in similar.findrenames(repo, added, removed,
668 for old, new, score in similar.findrenames(repo, added, removed,
663 similarity):
669 similarity):
664 if (repo.ui.verbose or not matcher.exact(old)
670 if (repo.ui.verbose or not matcher.exact(old)
665 or not matcher.exact(new)):
671 or not matcher.exact(new)):
666 repo.ui.status(_('recording removal of %s as rename to %s '
672 repo.ui.status(_('recording removal of %s as rename to %s '
667 '(%d%% similar)\n') %
673 '(%d%% similar)\n') %
668 (matcher.rel(old), matcher.rel(new),
674 (matcher.rel(old), matcher.rel(new),
669 score * 100))
675 score * 100))
670 renames[new] = old
676 renames[new] = old
671 return renames
677 return renames
672
678
673 def _markchanges(repo, unknown, deleted, renames):
679 def _markchanges(repo, unknown, deleted, renames):
674 '''Marks the files in unknown as added, the files in deleted as removed,
680 '''Marks the files in unknown as added, the files in deleted as removed,
675 and the files in renames as copied.'''
681 and the files in renames as copied.'''
676 wctx = repo[None]
682 wctx = repo[None]
677 wlock = repo.wlock()
683 wlock = repo.wlock()
678 try:
684 try:
679 wctx.forget(deleted)
685 wctx.forget(deleted)
680 wctx.add(unknown)
686 wctx.add(unknown)
681 for new, old in renames.iteritems():
687 for new, old in renames.iteritems():
682 wctx.copy(old, new)
688 wctx.copy(old, new)
683 finally:
689 finally:
684 wlock.release()
690 wlock.release()
685
691
686 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
692 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
687 """Update the dirstate to reflect the intent of copying src to dst. For
693 """Update the dirstate to reflect the intent of copying src to dst. For
688 different reasons it might not end with dst being marked as copied from src.
694 different reasons it might not end with dst being marked as copied from src.
689 """
695 """
690 origsrc = repo.dirstate.copied(src) or src
696 origsrc = repo.dirstate.copied(src) or src
691 if dst == origsrc: # copying back a copy?
697 if dst == origsrc: # copying back a copy?
692 if repo.dirstate[dst] not in 'mn' and not dryrun:
698 if repo.dirstate[dst] not in 'mn' and not dryrun:
693 repo.dirstate.normallookup(dst)
699 repo.dirstate.normallookup(dst)
694 else:
700 else:
695 if repo.dirstate[origsrc] == 'a' and origsrc == src:
701 if repo.dirstate[origsrc] == 'a' and origsrc == src:
696 if not ui.quiet:
702 if not ui.quiet:
697 ui.warn(_("%s has not been committed yet, so no copy "
703 ui.warn(_("%s has not been committed yet, so no copy "
698 "data will be stored for %s.\n")
704 "data will be stored for %s.\n")
699 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
705 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
700 if repo.dirstate[dst] in '?r' and not dryrun:
706 if repo.dirstate[dst] in '?r' and not dryrun:
701 wctx.add([dst])
707 wctx.add([dst])
702 elif not dryrun:
708 elif not dryrun:
703 wctx.copy(origsrc, dst)
709 wctx.copy(origsrc, dst)
704
710
705 def readrequires(opener, supported):
711 def readrequires(opener, supported):
706 '''Reads and parses .hg/requires and checks if all entries found
712 '''Reads and parses .hg/requires and checks if all entries found
707 are in the list of supported features.'''
713 are in the list of supported features.'''
708 requirements = set(opener.read("requires").splitlines())
714 requirements = set(opener.read("requires").splitlines())
709 missings = []
715 missings = []
710 for r in requirements:
716 for r in requirements:
711 if r not in supported:
717 if r not in supported:
712 if not r or not r[0].isalnum():
718 if not r or not r[0].isalnum():
713 raise error.RequirementError(_(".hg/requires file is corrupt"))
719 raise error.RequirementError(_(".hg/requires file is corrupt"))
714 missings.append(r)
720 missings.append(r)
715 missings.sort()
721 missings.sort()
716 if missings:
722 if missings:
717 raise error.RequirementError(
723 raise error.RequirementError(
718 _("unknown repository format: requires features '%s' (upgrade "
724 _("unknown repository format: requires features '%s' (upgrade "
719 "Mercurial)") % "', '".join(missings))
725 "Mercurial)") % "', '".join(missings))
720 return requirements
726 return requirements
721
727
722 class filecachesubentry(object):
728 class filecachesubentry(object):
723 def __init__(self, path, stat):
729 def __init__(self, path, stat):
724 self.path = path
730 self.path = path
725 self.cachestat = None
731 self.cachestat = None
726 self._cacheable = None
732 self._cacheable = None
727
733
728 if stat:
734 if stat:
729 self.cachestat = filecachesubentry.stat(self.path)
735 self.cachestat = filecachesubentry.stat(self.path)
730
736
731 if self.cachestat:
737 if self.cachestat:
732 self._cacheable = self.cachestat.cacheable()
738 self._cacheable = self.cachestat.cacheable()
733 else:
739 else:
734 # None means we don't know yet
740 # None means we don't know yet
735 self._cacheable = None
741 self._cacheable = None
736
742
737 def refresh(self):
743 def refresh(self):
738 if self.cacheable():
744 if self.cacheable():
739 self.cachestat = filecachesubentry.stat(self.path)
745 self.cachestat = filecachesubentry.stat(self.path)
740
746
741 def cacheable(self):
747 def cacheable(self):
742 if self._cacheable is not None:
748 if self._cacheable is not None:
743 return self._cacheable
749 return self._cacheable
744
750
745 # we don't know yet, assume it is for now
751 # we don't know yet, assume it is for now
746 return True
752 return True
747
753
748 def changed(self):
754 def changed(self):
749 # no point in going further if we can't cache it
755 # no point in going further if we can't cache it
750 if not self.cacheable():
756 if not self.cacheable():
751 return True
757 return True
752
758
753 newstat = filecachesubentry.stat(self.path)
759 newstat = filecachesubentry.stat(self.path)
754
760
755 # we may not know if it's cacheable yet, check again now
761 # we may not know if it's cacheable yet, check again now
756 if newstat and self._cacheable is None:
762 if newstat and self._cacheable is None:
757 self._cacheable = newstat.cacheable()
763 self._cacheable = newstat.cacheable()
758
764
759 # check again
765 # check again
760 if not self._cacheable:
766 if not self._cacheable:
761 return True
767 return True
762
768
763 if self.cachestat != newstat:
769 if self.cachestat != newstat:
764 self.cachestat = newstat
770 self.cachestat = newstat
765 return True
771 return True
766 else:
772 else:
767 return False
773 return False
768
774
769 @staticmethod
775 @staticmethod
770 def stat(path):
776 def stat(path):
771 try:
777 try:
772 return util.cachestat(path)
778 return util.cachestat(path)
773 except OSError, e:
779 except OSError, e:
774 if e.errno != errno.ENOENT:
780 if e.errno != errno.ENOENT:
775 raise
781 raise
776
782
777 class filecacheentry(object):
783 class filecacheentry(object):
778 def __init__(self, paths, stat=True):
784 def __init__(self, paths, stat=True):
779 self._entries = []
785 self._entries = []
780 for path in paths:
786 for path in paths:
781 self._entries.append(filecachesubentry(path, stat))
787 self._entries.append(filecachesubentry(path, stat))
782
788
783 def changed(self):
789 def changed(self):
784 '''true if any entry has changed'''
790 '''true if any entry has changed'''
785 for entry in self._entries:
791 for entry in self._entries:
786 if entry.changed():
792 if entry.changed():
787 return True
793 return True
788 return False
794 return False
789
795
790 def refresh(self):
796 def refresh(self):
791 for entry in self._entries:
797 for entry in self._entries:
792 entry.refresh()
798 entry.refresh()
793
799
794 class filecache(object):
800 class filecache(object):
795 '''A property like decorator that tracks files under .hg/ for updates.
801 '''A property like decorator that tracks files under .hg/ for updates.
796
802
797 Records stat info when called in _filecache.
803 Records stat info when called in _filecache.
798
804
799 On subsequent calls, compares old stat info with new info, and recreates the
805 On subsequent calls, compares old stat info with new info, and recreates the
800 object when any of the files changes, updating the new stat info in
806 object when any of the files changes, updating the new stat info in
801 _filecache.
807 _filecache.
802
808
803 Mercurial either atomic renames or appends for files under .hg,
809 Mercurial either atomic renames or appends for files under .hg,
804 so to ensure the cache is reliable we need the filesystem to be able
810 so to ensure the cache is reliable we need the filesystem to be able
805 to tell us if a file has been replaced. If it can't, we fallback to
811 to tell us if a file has been replaced. If it can't, we fallback to
806 recreating the object on every call (essentially the same behaviour as
812 recreating the object on every call (essentially the same behaviour as
807 propertycache).
813 propertycache).
808
814
809 '''
815 '''
810 def __init__(self, *paths):
816 def __init__(self, *paths):
811 self.paths = paths
817 self.paths = paths
812
818
813 def join(self, obj, fname):
819 def join(self, obj, fname):
814 """Used to compute the runtime path of a cached file.
820 """Used to compute the runtime path of a cached file.
815
821
816 Users should subclass filecache and provide their own version of this
822 Users should subclass filecache and provide their own version of this
817 function to call the appropriate join function on 'obj' (an instance
823 function to call the appropriate join function on 'obj' (an instance
818 of the class that its member function was decorated).
824 of the class that its member function was decorated).
819 """
825 """
820 return obj.join(fname)
826 return obj.join(fname)
821
827
822 def __call__(self, func):
828 def __call__(self, func):
823 self.func = func
829 self.func = func
824 self.name = func.__name__
830 self.name = func.__name__
825 return self
831 return self
826
832
827 def __get__(self, obj, type=None):
833 def __get__(self, obj, type=None):
828 # do we need to check if the file changed?
834 # do we need to check if the file changed?
829 if self.name in obj.__dict__:
835 if self.name in obj.__dict__:
830 assert self.name in obj._filecache, self.name
836 assert self.name in obj._filecache, self.name
831 return obj.__dict__[self.name]
837 return obj.__dict__[self.name]
832
838
833 entry = obj._filecache.get(self.name)
839 entry = obj._filecache.get(self.name)
834
840
835 if entry:
841 if entry:
836 if entry.changed():
842 if entry.changed():
837 entry.obj = self.func(obj)
843 entry.obj = self.func(obj)
838 else:
844 else:
839 paths = [self.join(obj, path) for path in self.paths]
845 paths = [self.join(obj, path) for path in self.paths]
840
846
841 # We stat -before- creating the object so our cache doesn't lie if
847 # We stat -before- creating the object so our cache doesn't lie if
842 # a writer modified between the time we read and stat
848 # a writer modified between the time we read and stat
843 entry = filecacheentry(paths, True)
849 entry = filecacheentry(paths, True)
844 entry.obj = self.func(obj)
850 entry.obj = self.func(obj)
845
851
846 obj._filecache[self.name] = entry
852 obj._filecache[self.name] = entry
847
853
848 obj.__dict__[self.name] = entry.obj
854 obj.__dict__[self.name] = entry.obj
849 return entry.obj
855 return entry.obj
850
856
851 def __set__(self, obj, value):
857 def __set__(self, obj, value):
852 if self.name not in obj._filecache:
858 if self.name not in obj._filecache:
853 # we add an entry for the missing value because X in __dict__
859 # we add an entry for the missing value because X in __dict__
854 # implies X in _filecache
860 # implies X in _filecache
855 paths = [self.join(obj, path) for path in self.paths]
861 paths = [self.join(obj, path) for path in self.paths]
856 ce = filecacheentry(paths, False)
862 ce = filecacheentry(paths, False)
857 obj._filecache[self.name] = ce
863 obj._filecache[self.name] = ce
858 else:
864 else:
859 ce = obj._filecache[self.name]
865 ce = obj._filecache[self.name]
860
866
861 ce.obj = value # update cached copy
867 ce.obj = value # update cached copy
862 obj.__dict__[self.name] = value # update copy returned by obj.x
868 obj.__dict__[self.name] = value # update copy returned by obj.x
863
869
864 def __delete__(self, obj):
870 def __delete__(self, obj):
865 try:
871 try:
866 del obj.__dict__[self.name]
872 del obj.__dict__[self.name]
867 except KeyError:
873 except KeyError:
868 raise AttributeError(self.name)
874 raise AttributeError(self.name)
869
875
870 class dirs(object):
876 class dirs(object):
871 '''a multiset of directory names from a dirstate or manifest'''
877 '''a multiset of directory names from a dirstate or manifest'''
872
878
873 def __init__(self, map, skip=None):
879 def __init__(self, map, skip=None):
874 self._dirs = {}
880 self._dirs = {}
875 addpath = self.addpath
881 addpath = self.addpath
876 if util.safehasattr(map, 'iteritems') and skip is not None:
882 if util.safehasattr(map, 'iteritems') and skip is not None:
877 for f, s in map.iteritems():
883 for f, s in map.iteritems():
878 if s[0] != skip:
884 if s[0] != skip:
879 addpath(f)
885 addpath(f)
880 else:
886 else:
881 for f in map:
887 for f in map:
882 addpath(f)
888 addpath(f)
883
889
884 def addpath(self, path):
890 def addpath(self, path):
885 dirs = self._dirs
891 dirs = self._dirs
886 for base in finddirs(path):
892 for base in finddirs(path):
887 if base in dirs:
893 if base in dirs:
888 dirs[base] += 1
894 dirs[base] += 1
889 return
895 return
890 dirs[base] = 1
896 dirs[base] = 1
891
897
892 def delpath(self, path):
898 def delpath(self, path):
893 dirs = self._dirs
899 dirs = self._dirs
894 for base in finddirs(path):
900 for base in finddirs(path):
895 if dirs[base] > 1:
901 if dirs[base] > 1:
896 dirs[base] -= 1
902 dirs[base] -= 1
897 return
903 return
898 del dirs[base]
904 del dirs[base]
899
905
900 def __iter__(self):
906 def __iter__(self):
901 return self._dirs.iterkeys()
907 return self._dirs.iterkeys()
902
908
903 def __contains__(self, d):
909 def __contains__(self, d):
904 return d in self._dirs
910 return d in self._dirs
905
911
906 if util.safehasattr(parsers, 'dirs'):
912 if util.safehasattr(parsers, 'dirs'):
907 dirs = parsers.dirs
913 dirs = parsers.dirs
908
914
909 def finddirs(path):
915 def finddirs(path):
910 pos = path.rfind('/')
916 pos = path.rfind('/')
911 while pos != -1:
917 while pos != -1:
912 yield path[:pos]
918 yield path[:pos]
913 pos = path.rfind('/', 0, pos)
919 pos = path.rfind('/', 0, pos)
General Comments 0
You need to be logged in to leave comments. Login now