##// END OF EJS Templates
scmutil: changed revrange to return lazysets for new style revsets...
Lucas Moscovicz -
r20551:efae655f default
parent child Browse files
Show More
@@ -1,929 +1,932 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from mercurial.node import nullrev
9 from mercurial.node import nullrev
10 import util, error, osutil, revset, similar, encoding, phases, parsers
10 import util, error, osutil, revset, similar, encoding, phases, parsers
11 import pathutil
11 import pathutil
12 import match as matchmod
12 import match as matchmod
13 import os, errno, re, glob
13 import os, errno, re, glob
14
14
15 if os.name == 'nt':
15 if os.name == 'nt':
16 import scmwindows as scmplatform
16 import scmwindows as scmplatform
17 else:
17 else:
18 import scmposix as scmplatform
18 import scmposix as scmplatform
19
19
20 systemrcpath = scmplatform.systemrcpath
20 systemrcpath = scmplatform.systemrcpath
21 userrcpath = scmplatform.userrcpath
21 userrcpath = scmplatform.userrcpath
22
22
23 def itersubrepos(ctx1, ctx2):
23 def itersubrepos(ctx1, ctx2):
24 """find subrepos in ctx1 or ctx2"""
24 """find subrepos in ctx1 or ctx2"""
25 # Create a (subpath, ctx) mapping where we prefer subpaths from
25 # Create a (subpath, ctx) mapping where we prefer subpaths from
26 # ctx1. The subpaths from ctx2 are important when the .hgsub file
26 # ctx1. The subpaths from ctx2 are important when the .hgsub file
27 # has been modified (in ctx2) but not yet committed (in ctx1).
27 # has been modified (in ctx2) but not yet committed (in ctx1).
28 subpaths = dict.fromkeys(ctx2.substate, ctx2)
28 subpaths = dict.fromkeys(ctx2.substate, ctx2)
29 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
29 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
30 for subpath, ctx in sorted(subpaths.iteritems()):
30 for subpath, ctx in sorted(subpaths.iteritems()):
31 yield subpath, ctx.sub(subpath)
31 yield subpath, ctx.sub(subpath)
32
32
33 def nochangesfound(ui, repo, excluded=None):
33 def nochangesfound(ui, repo, excluded=None):
34 '''Report no changes for push/pull, excluded is None or a list of
34 '''Report no changes for push/pull, excluded is None or a list of
35 nodes excluded from the push/pull.
35 nodes excluded from the push/pull.
36 '''
36 '''
37 secretlist = []
37 secretlist = []
38 if excluded:
38 if excluded:
39 for n in excluded:
39 for n in excluded:
40 if n not in repo:
40 if n not in repo:
41 # discovery should not have included the filtered revision,
41 # discovery should not have included the filtered revision,
42 # we have to explicitly exclude it until discovery is cleanup.
42 # we have to explicitly exclude it until discovery is cleanup.
43 continue
43 continue
44 ctx = repo[n]
44 ctx = repo[n]
45 if ctx.phase() >= phases.secret and not ctx.extinct():
45 if ctx.phase() >= phases.secret and not ctx.extinct():
46 secretlist.append(n)
46 secretlist.append(n)
47
47
48 if secretlist:
48 if secretlist:
49 ui.status(_("no changes found (ignored %d secret changesets)\n")
49 ui.status(_("no changes found (ignored %d secret changesets)\n")
50 % len(secretlist))
50 % len(secretlist))
51 else:
51 else:
52 ui.status(_("no changes found\n"))
52 ui.status(_("no changes found\n"))
53
53
54 def checknewlabel(repo, lbl, kind):
54 def checknewlabel(repo, lbl, kind):
55 # Do not use the "kind" parameter in ui output.
55 # Do not use the "kind" parameter in ui output.
56 # It makes strings difficult to translate.
56 # It makes strings difficult to translate.
57 if lbl in ['tip', '.', 'null']:
57 if lbl in ['tip', '.', 'null']:
58 raise util.Abort(_("the name '%s' is reserved") % lbl)
58 raise util.Abort(_("the name '%s' is reserved") % lbl)
59 for c in (':', '\0', '\n', '\r'):
59 for c in (':', '\0', '\n', '\r'):
60 if c in lbl:
60 if c in lbl:
61 raise util.Abort(_("%r cannot be used in a name") % c)
61 raise util.Abort(_("%r cannot be used in a name") % c)
62 try:
62 try:
63 int(lbl)
63 int(lbl)
64 raise util.Abort(_("cannot use an integer as a name"))
64 raise util.Abort(_("cannot use an integer as a name"))
65 except ValueError:
65 except ValueError:
66 pass
66 pass
67
67
68 def checkfilename(f):
68 def checkfilename(f):
69 '''Check that the filename f is an acceptable filename for a tracked file'''
69 '''Check that the filename f is an acceptable filename for a tracked file'''
70 if '\r' in f or '\n' in f:
70 if '\r' in f or '\n' in f:
71 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
71 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
72
72
73 def checkportable(ui, f):
73 def checkportable(ui, f):
74 '''Check if filename f is portable and warn or abort depending on config'''
74 '''Check if filename f is portable and warn or abort depending on config'''
75 checkfilename(f)
75 checkfilename(f)
76 abort, warn = checkportabilityalert(ui)
76 abort, warn = checkportabilityalert(ui)
77 if abort or warn:
77 if abort or warn:
78 msg = util.checkwinfilename(f)
78 msg = util.checkwinfilename(f)
79 if msg:
79 if msg:
80 msg = "%s: %r" % (msg, f)
80 msg = "%s: %r" % (msg, f)
81 if abort:
81 if abort:
82 raise util.Abort(msg)
82 raise util.Abort(msg)
83 ui.warn(_("warning: %s\n") % msg)
83 ui.warn(_("warning: %s\n") % msg)
84
84
85 def checkportabilityalert(ui):
85 def checkportabilityalert(ui):
86 '''check if the user's config requests nothing, a warning, or abort for
86 '''check if the user's config requests nothing, a warning, or abort for
87 non-portable filenames'''
87 non-portable filenames'''
88 val = ui.config('ui', 'portablefilenames', 'warn')
88 val = ui.config('ui', 'portablefilenames', 'warn')
89 lval = val.lower()
89 lval = val.lower()
90 bval = util.parsebool(val)
90 bval = util.parsebool(val)
91 abort = os.name == 'nt' or lval == 'abort'
91 abort = os.name == 'nt' or lval == 'abort'
92 warn = bval or lval == 'warn'
92 warn = bval or lval == 'warn'
93 if bval is None and not (warn or abort or lval == 'ignore'):
93 if bval is None and not (warn or abort or lval == 'ignore'):
94 raise error.ConfigError(
94 raise error.ConfigError(
95 _("ui.portablefilenames value is invalid ('%s')") % val)
95 _("ui.portablefilenames value is invalid ('%s')") % val)
96 return abort, warn
96 return abort, warn
97
97
98 class casecollisionauditor(object):
98 class casecollisionauditor(object):
99 def __init__(self, ui, abort, dirstate):
99 def __init__(self, ui, abort, dirstate):
100 self._ui = ui
100 self._ui = ui
101 self._abort = abort
101 self._abort = abort
102 allfiles = '\0'.join(dirstate._map)
102 allfiles = '\0'.join(dirstate._map)
103 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
103 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
104 self._dirstate = dirstate
104 self._dirstate = dirstate
105 # The purpose of _newfiles is so that we don't complain about
105 # The purpose of _newfiles is so that we don't complain about
106 # case collisions if someone were to call this object with the
106 # case collisions if someone were to call this object with the
107 # same filename twice.
107 # same filename twice.
108 self._newfiles = set()
108 self._newfiles = set()
109
109
110 def __call__(self, f):
110 def __call__(self, f):
111 if f in self._newfiles:
111 if f in self._newfiles:
112 return
112 return
113 fl = encoding.lower(f)
113 fl = encoding.lower(f)
114 if fl in self._loweredfiles and f not in self._dirstate:
114 if fl in self._loweredfiles and f not in self._dirstate:
115 msg = _('possible case-folding collision for %s') % f
115 msg = _('possible case-folding collision for %s') % f
116 if self._abort:
116 if self._abort:
117 raise util.Abort(msg)
117 raise util.Abort(msg)
118 self._ui.warn(_("warning: %s\n") % msg)
118 self._ui.warn(_("warning: %s\n") % msg)
119 self._loweredfiles.add(fl)
119 self._loweredfiles.add(fl)
120 self._newfiles.add(f)
120 self._newfiles.add(f)
121
121
122 class abstractvfs(object):
122 class abstractvfs(object):
123 """Abstract base class; cannot be instantiated"""
123 """Abstract base class; cannot be instantiated"""
124
124
125 def __init__(self, *args, **kwargs):
125 def __init__(self, *args, **kwargs):
126 '''Prevent instantiation; don't call this from subclasses.'''
126 '''Prevent instantiation; don't call this from subclasses.'''
127 raise NotImplementedError('attempted instantiating ' + str(type(self)))
127 raise NotImplementedError('attempted instantiating ' + str(type(self)))
128
128
129 def tryread(self, path):
129 def tryread(self, path):
130 '''gracefully return an empty string for missing files'''
130 '''gracefully return an empty string for missing files'''
131 try:
131 try:
132 return self.read(path)
132 return self.read(path)
133 except IOError, inst:
133 except IOError, inst:
134 if inst.errno != errno.ENOENT:
134 if inst.errno != errno.ENOENT:
135 raise
135 raise
136 return ""
136 return ""
137
137
138 def open(self, path, mode="r", text=False, atomictemp=False):
138 def open(self, path, mode="r", text=False, atomictemp=False):
139 self.open = self.__call__
139 self.open = self.__call__
140 return self.__call__(path, mode, text, atomictemp)
140 return self.__call__(path, mode, text, atomictemp)
141
141
142 def read(self, path):
142 def read(self, path):
143 fp = self(path, 'rb')
143 fp = self(path, 'rb')
144 try:
144 try:
145 return fp.read()
145 return fp.read()
146 finally:
146 finally:
147 fp.close()
147 fp.close()
148
148
149 def write(self, path, data):
149 def write(self, path, data):
150 fp = self(path, 'wb')
150 fp = self(path, 'wb')
151 try:
151 try:
152 return fp.write(data)
152 return fp.write(data)
153 finally:
153 finally:
154 fp.close()
154 fp.close()
155
155
156 def append(self, path, data):
156 def append(self, path, data):
157 fp = self(path, 'ab')
157 fp = self(path, 'ab')
158 try:
158 try:
159 return fp.write(data)
159 return fp.write(data)
160 finally:
160 finally:
161 fp.close()
161 fp.close()
162
162
163 def chmod(self, path, mode):
163 def chmod(self, path, mode):
164 return os.chmod(self.join(path), mode)
164 return os.chmod(self.join(path), mode)
165
165
166 def exists(self, path=None):
166 def exists(self, path=None):
167 return os.path.exists(self.join(path))
167 return os.path.exists(self.join(path))
168
168
169 def fstat(self, fp):
169 def fstat(self, fp):
170 return util.fstat(fp)
170 return util.fstat(fp)
171
171
172 def isdir(self, path=None):
172 def isdir(self, path=None):
173 return os.path.isdir(self.join(path))
173 return os.path.isdir(self.join(path))
174
174
175 def isfile(self, path=None):
175 def isfile(self, path=None):
176 return os.path.isfile(self.join(path))
176 return os.path.isfile(self.join(path))
177
177
178 def islink(self, path=None):
178 def islink(self, path=None):
179 return os.path.islink(self.join(path))
179 return os.path.islink(self.join(path))
180
180
181 def lstat(self, path=None):
181 def lstat(self, path=None):
182 return os.lstat(self.join(path))
182 return os.lstat(self.join(path))
183
183
184 def makedir(self, path=None, notindexed=True):
184 def makedir(self, path=None, notindexed=True):
185 return util.makedir(self.join(path), notindexed)
185 return util.makedir(self.join(path), notindexed)
186
186
187 def makedirs(self, path=None, mode=None):
187 def makedirs(self, path=None, mode=None):
188 return util.makedirs(self.join(path), mode)
188 return util.makedirs(self.join(path), mode)
189
189
190 def makelock(self, info, path):
190 def makelock(self, info, path):
191 return util.makelock(info, self.join(path))
191 return util.makelock(info, self.join(path))
192
192
193 def mkdir(self, path=None):
193 def mkdir(self, path=None):
194 return os.mkdir(self.join(path))
194 return os.mkdir(self.join(path))
195
195
196 def readdir(self, path=None, stat=None, skip=None):
196 def readdir(self, path=None, stat=None, skip=None):
197 return osutil.listdir(self.join(path), stat, skip)
197 return osutil.listdir(self.join(path), stat, skip)
198
198
199 def readlock(self, path):
199 def readlock(self, path):
200 return util.readlock(self.join(path))
200 return util.readlock(self.join(path))
201
201
202 def rename(self, src, dst):
202 def rename(self, src, dst):
203 return util.rename(self.join(src), self.join(dst))
203 return util.rename(self.join(src), self.join(dst))
204
204
205 def readlink(self, path):
205 def readlink(self, path):
206 return os.readlink(self.join(path))
206 return os.readlink(self.join(path))
207
207
208 def setflags(self, path, l, x):
208 def setflags(self, path, l, x):
209 return util.setflags(self.join(path), l, x)
209 return util.setflags(self.join(path), l, x)
210
210
211 def stat(self, path=None):
211 def stat(self, path=None):
212 return os.stat(self.join(path))
212 return os.stat(self.join(path))
213
213
214 def unlink(self, path=None):
214 def unlink(self, path=None):
215 return util.unlink(self.join(path))
215 return util.unlink(self.join(path))
216
216
217 def utime(self, path=None, t=None):
217 def utime(self, path=None, t=None):
218 return os.utime(self.join(path), t)
218 return os.utime(self.join(path), t)
219
219
220 class vfs(abstractvfs):
220 class vfs(abstractvfs):
221 '''Operate files relative to a base directory
221 '''Operate files relative to a base directory
222
222
223 This class is used to hide the details of COW semantics and
223 This class is used to hide the details of COW semantics and
224 remote file access from higher level code.
224 remote file access from higher level code.
225 '''
225 '''
226 def __init__(self, base, audit=True, expandpath=False, realpath=False):
226 def __init__(self, base, audit=True, expandpath=False, realpath=False):
227 if expandpath:
227 if expandpath:
228 base = util.expandpath(base)
228 base = util.expandpath(base)
229 if realpath:
229 if realpath:
230 base = os.path.realpath(base)
230 base = os.path.realpath(base)
231 self.base = base
231 self.base = base
232 self._setmustaudit(audit)
232 self._setmustaudit(audit)
233 self.createmode = None
233 self.createmode = None
234 self._trustnlink = None
234 self._trustnlink = None
235
235
236 def _getmustaudit(self):
236 def _getmustaudit(self):
237 return self._audit
237 return self._audit
238
238
239 def _setmustaudit(self, onoff):
239 def _setmustaudit(self, onoff):
240 self._audit = onoff
240 self._audit = onoff
241 if onoff:
241 if onoff:
242 self.audit = pathutil.pathauditor(self.base)
242 self.audit = pathutil.pathauditor(self.base)
243 else:
243 else:
244 self.audit = util.always
244 self.audit = util.always
245
245
246 mustaudit = property(_getmustaudit, _setmustaudit)
246 mustaudit = property(_getmustaudit, _setmustaudit)
247
247
248 @util.propertycache
248 @util.propertycache
249 def _cansymlink(self):
249 def _cansymlink(self):
250 return util.checklink(self.base)
250 return util.checklink(self.base)
251
251
252 @util.propertycache
252 @util.propertycache
253 def _chmod(self):
253 def _chmod(self):
254 return util.checkexec(self.base)
254 return util.checkexec(self.base)
255
255
256 def _fixfilemode(self, name):
256 def _fixfilemode(self, name):
257 if self.createmode is None or not self._chmod:
257 if self.createmode is None or not self._chmod:
258 return
258 return
259 os.chmod(name, self.createmode & 0666)
259 os.chmod(name, self.createmode & 0666)
260
260
261 def __call__(self, path, mode="r", text=False, atomictemp=False):
261 def __call__(self, path, mode="r", text=False, atomictemp=False):
262 if self._audit:
262 if self._audit:
263 r = util.checkosfilename(path)
263 r = util.checkosfilename(path)
264 if r:
264 if r:
265 raise util.Abort("%s: %r" % (r, path))
265 raise util.Abort("%s: %r" % (r, path))
266 self.audit(path)
266 self.audit(path)
267 f = self.join(path)
267 f = self.join(path)
268
268
269 if not text and "b" not in mode:
269 if not text and "b" not in mode:
270 mode += "b" # for that other OS
270 mode += "b" # for that other OS
271
271
272 nlink = -1
272 nlink = -1
273 if mode not in ('r', 'rb'):
273 if mode not in ('r', 'rb'):
274 dirname, basename = util.split(f)
274 dirname, basename = util.split(f)
275 # If basename is empty, then the path is malformed because it points
275 # If basename is empty, then the path is malformed because it points
276 # to a directory. Let the posixfile() call below raise IOError.
276 # to a directory. Let the posixfile() call below raise IOError.
277 if basename:
277 if basename:
278 if atomictemp:
278 if atomictemp:
279 util.ensuredirs(dirname, self.createmode)
279 util.ensuredirs(dirname, self.createmode)
280 return util.atomictempfile(f, mode, self.createmode)
280 return util.atomictempfile(f, mode, self.createmode)
281 try:
281 try:
282 if 'w' in mode:
282 if 'w' in mode:
283 util.unlink(f)
283 util.unlink(f)
284 nlink = 0
284 nlink = 0
285 else:
285 else:
286 # nlinks() may behave differently for files on Windows
286 # nlinks() may behave differently for files on Windows
287 # shares if the file is open.
287 # shares if the file is open.
288 fd = util.posixfile(f)
288 fd = util.posixfile(f)
289 nlink = util.nlinks(f)
289 nlink = util.nlinks(f)
290 if nlink < 1:
290 if nlink < 1:
291 nlink = 2 # force mktempcopy (issue1922)
291 nlink = 2 # force mktempcopy (issue1922)
292 fd.close()
292 fd.close()
293 except (OSError, IOError), e:
293 except (OSError, IOError), e:
294 if e.errno != errno.ENOENT:
294 if e.errno != errno.ENOENT:
295 raise
295 raise
296 nlink = 0
296 nlink = 0
297 util.ensuredirs(dirname, self.createmode)
297 util.ensuredirs(dirname, self.createmode)
298 if nlink > 0:
298 if nlink > 0:
299 if self._trustnlink is None:
299 if self._trustnlink is None:
300 self._trustnlink = nlink > 1 or util.checknlink(f)
300 self._trustnlink = nlink > 1 or util.checknlink(f)
301 if nlink > 1 or not self._trustnlink:
301 if nlink > 1 or not self._trustnlink:
302 util.rename(util.mktempcopy(f), f)
302 util.rename(util.mktempcopy(f), f)
303 fp = util.posixfile(f, mode)
303 fp = util.posixfile(f, mode)
304 if nlink == 0:
304 if nlink == 0:
305 self._fixfilemode(f)
305 self._fixfilemode(f)
306 return fp
306 return fp
307
307
308 def symlink(self, src, dst):
308 def symlink(self, src, dst):
309 self.audit(dst)
309 self.audit(dst)
310 linkname = self.join(dst)
310 linkname = self.join(dst)
311 try:
311 try:
312 os.unlink(linkname)
312 os.unlink(linkname)
313 except OSError:
313 except OSError:
314 pass
314 pass
315
315
316 util.ensuredirs(os.path.dirname(linkname), self.createmode)
316 util.ensuredirs(os.path.dirname(linkname), self.createmode)
317
317
318 if self._cansymlink:
318 if self._cansymlink:
319 try:
319 try:
320 os.symlink(src, linkname)
320 os.symlink(src, linkname)
321 except OSError, err:
321 except OSError, err:
322 raise OSError(err.errno, _('could not symlink to %r: %s') %
322 raise OSError(err.errno, _('could not symlink to %r: %s') %
323 (src, err.strerror), linkname)
323 (src, err.strerror), linkname)
324 else:
324 else:
325 self.write(dst, src)
325 self.write(dst, src)
326
326
327 def join(self, path):
327 def join(self, path):
328 if path:
328 if path:
329 return os.path.join(self.base, path)
329 return os.path.join(self.base, path)
330 else:
330 else:
331 return self.base
331 return self.base
332
332
333 opener = vfs
333 opener = vfs
334
334
335 class auditvfs(object):
335 class auditvfs(object):
336 def __init__(self, vfs):
336 def __init__(self, vfs):
337 self.vfs = vfs
337 self.vfs = vfs
338
338
339 def _getmustaudit(self):
339 def _getmustaudit(self):
340 return self.vfs.mustaudit
340 return self.vfs.mustaudit
341
341
342 def _setmustaudit(self, onoff):
342 def _setmustaudit(self, onoff):
343 self.vfs.mustaudit = onoff
343 self.vfs.mustaudit = onoff
344
344
345 mustaudit = property(_getmustaudit, _setmustaudit)
345 mustaudit = property(_getmustaudit, _setmustaudit)
346
346
347 class filtervfs(abstractvfs, auditvfs):
347 class filtervfs(abstractvfs, auditvfs):
348 '''Wrapper vfs for filtering filenames with a function.'''
348 '''Wrapper vfs for filtering filenames with a function.'''
349
349
350 def __init__(self, vfs, filter):
350 def __init__(self, vfs, filter):
351 auditvfs.__init__(self, vfs)
351 auditvfs.__init__(self, vfs)
352 self._filter = filter
352 self._filter = filter
353
353
354 def __call__(self, path, *args, **kwargs):
354 def __call__(self, path, *args, **kwargs):
355 return self.vfs(self._filter(path), *args, **kwargs)
355 return self.vfs(self._filter(path), *args, **kwargs)
356
356
357 def join(self, path):
357 def join(self, path):
358 if path:
358 if path:
359 return self.vfs.join(self._filter(path))
359 return self.vfs.join(self._filter(path))
360 else:
360 else:
361 return self.vfs.join(path)
361 return self.vfs.join(path)
362
362
363 filteropener = filtervfs
363 filteropener = filtervfs
364
364
365 class readonlyvfs(abstractvfs, auditvfs):
365 class readonlyvfs(abstractvfs, auditvfs):
366 '''Wrapper vfs preventing any writing.'''
366 '''Wrapper vfs preventing any writing.'''
367
367
368 def __init__(self, vfs):
368 def __init__(self, vfs):
369 auditvfs.__init__(self, vfs)
369 auditvfs.__init__(self, vfs)
370
370
371 def __call__(self, path, mode='r', *args, **kw):
371 def __call__(self, path, mode='r', *args, **kw):
372 if mode not in ('r', 'rb'):
372 if mode not in ('r', 'rb'):
373 raise util.Abort('this vfs is read only')
373 raise util.Abort('this vfs is read only')
374 return self.vfs(path, mode, *args, **kw)
374 return self.vfs(path, mode, *args, **kw)
375
375
376
376
377 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
377 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
378 '''yield every hg repository under path, always recursively.
378 '''yield every hg repository under path, always recursively.
379 The recurse flag will only control recursion into repo working dirs'''
379 The recurse flag will only control recursion into repo working dirs'''
380 def errhandler(err):
380 def errhandler(err):
381 if err.filename == path:
381 if err.filename == path:
382 raise err
382 raise err
383 samestat = getattr(os.path, 'samestat', None)
383 samestat = getattr(os.path, 'samestat', None)
384 if followsym and samestat is not None:
384 if followsym and samestat is not None:
385 def adddir(dirlst, dirname):
385 def adddir(dirlst, dirname):
386 match = False
386 match = False
387 dirstat = os.stat(dirname)
387 dirstat = os.stat(dirname)
388 for lstdirstat in dirlst:
388 for lstdirstat in dirlst:
389 if samestat(dirstat, lstdirstat):
389 if samestat(dirstat, lstdirstat):
390 match = True
390 match = True
391 break
391 break
392 if not match:
392 if not match:
393 dirlst.append(dirstat)
393 dirlst.append(dirstat)
394 return not match
394 return not match
395 else:
395 else:
396 followsym = False
396 followsym = False
397
397
398 if (seen_dirs is None) and followsym:
398 if (seen_dirs is None) and followsym:
399 seen_dirs = []
399 seen_dirs = []
400 adddir(seen_dirs, path)
400 adddir(seen_dirs, path)
401 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
401 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
402 dirs.sort()
402 dirs.sort()
403 if '.hg' in dirs:
403 if '.hg' in dirs:
404 yield root # found a repository
404 yield root # found a repository
405 qroot = os.path.join(root, '.hg', 'patches')
405 qroot = os.path.join(root, '.hg', 'patches')
406 if os.path.isdir(os.path.join(qroot, '.hg')):
406 if os.path.isdir(os.path.join(qroot, '.hg')):
407 yield qroot # we have a patch queue repo here
407 yield qroot # we have a patch queue repo here
408 if recurse:
408 if recurse:
409 # avoid recursing inside the .hg directory
409 # avoid recursing inside the .hg directory
410 dirs.remove('.hg')
410 dirs.remove('.hg')
411 else:
411 else:
412 dirs[:] = [] # don't descend further
412 dirs[:] = [] # don't descend further
413 elif followsym:
413 elif followsym:
414 newdirs = []
414 newdirs = []
415 for d in dirs:
415 for d in dirs:
416 fname = os.path.join(root, d)
416 fname = os.path.join(root, d)
417 if adddir(seen_dirs, fname):
417 if adddir(seen_dirs, fname):
418 if os.path.islink(fname):
418 if os.path.islink(fname):
419 for hgname in walkrepos(fname, True, seen_dirs):
419 for hgname in walkrepos(fname, True, seen_dirs):
420 yield hgname
420 yield hgname
421 else:
421 else:
422 newdirs.append(d)
422 newdirs.append(d)
423 dirs[:] = newdirs
423 dirs[:] = newdirs
424
424
425 def osrcpath():
425 def osrcpath():
426 '''return default os-specific hgrc search path'''
426 '''return default os-specific hgrc search path'''
427 path = systemrcpath()
427 path = systemrcpath()
428 path.extend(userrcpath())
428 path.extend(userrcpath())
429 path = [os.path.normpath(f) for f in path]
429 path = [os.path.normpath(f) for f in path]
430 return path
430 return path
431
431
432 _rcpath = None
432 _rcpath = None
433
433
434 def rcpath():
434 def rcpath():
435 '''return hgrc search path. if env var HGRCPATH is set, use it.
435 '''return hgrc search path. if env var HGRCPATH is set, use it.
436 for each item in path, if directory, use files ending in .rc,
436 for each item in path, if directory, use files ending in .rc,
437 else use item.
437 else use item.
438 make HGRCPATH empty to only look in .hg/hgrc of current repo.
438 make HGRCPATH empty to only look in .hg/hgrc of current repo.
439 if no HGRCPATH, use default os-specific path.'''
439 if no HGRCPATH, use default os-specific path.'''
440 global _rcpath
440 global _rcpath
441 if _rcpath is None:
441 if _rcpath is None:
442 if 'HGRCPATH' in os.environ:
442 if 'HGRCPATH' in os.environ:
443 _rcpath = []
443 _rcpath = []
444 for p in os.environ['HGRCPATH'].split(os.pathsep):
444 for p in os.environ['HGRCPATH'].split(os.pathsep):
445 if not p:
445 if not p:
446 continue
446 continue
447 p = util.expandpath(p)
447 p = util.expandpath(p)
448 if os.path.isdir(p):
448 if os.path.isdir(p):
449 for f, kind in osutil.listdir(p):
449 for f, kind in osutil.listdir(p):
450 if f.endswith('.rc'):
450 if f.endswith('.rc'):
451 _rcpath.append(os.path.join(p, f))
451 _rcpath.append(os.path.join(p, f))
452 else:
452 else:
453 _rcpath.append(p)
453 _rcpath.append(p)
454 else:
454 else:
455 _rcpath = osrcpath()
455 _rcpath = osrcpath()
456 return _rcpath
456 return _rcpath
457
457
458 def revsingle(repo, revspec, default='.'):
458 def revsingle(repo, revspec, default='.'):
459 if not revspec and revspec != 0:
459 if not revspec and revspec != 0:
460 return repo[default]
460 return repo[default]
461
461
462 l = revrange(repo, [revspec])
462 l = revrange(repo, [revspec])
463 if len(l) < 1:
463 if len(l) < 1:
464 raise util.Abort(_('empty revision set'))
464 raise util.Abort(_('empty revision set'))
465 return repo[l[-1]]
465 return repo[l[-1]]
466
466
467 def revpair(repo, revs):
467 def revpair(repo, revs):
468 if not revs:
468 if not revs:
469 return repo.dirstate.p1(), None
469 return repo.dirstate.p1(), None
470
470
471 l = revrange(repo, revs)
471 l = revrange(repo, revs)
472
472
473 if len(l) == 0:
473 if len(l) == 0:
474 if revs:
474 if revs:
475 raise util.Abort(_('empty revision range'))
475 raise util.Abort(_('empty revision range'))
476 return repo.dirstate.p1(), None
476 return repo.dirstate.p1(), None
477
477
478 if len(l) == 1 and len(revs) == 1 and _revrangesep not in revs[0]:
478 if len(l) == 1 and len(revs) == 1 and _revrangesep not in revs[0]:
479 return repo.lookup(l[0]), None
479 return repo.lookup(l[0]), None
480
480
481 return repo.lookup(l[0]), repo.lookup(l[-1])
481 return repo.lookup(l[0]), repo.lookup(l[-1])
482
482
483 _revrangesep = ':'
483 _revrangesep = ':'
484
484
485 def revrange(repo, revs):
485 def revrange(repo, revs):
486 """Yield revision as strings from a list of revision specifications."""
486 """Yield revision as strings from a list of revision specifications."""
487
487
488 def revfix(repo, val, defval):
488 def revfix(repo, val, defval):
489 if not val and val != 0 and defval is not None:
489 if not val and val != 0 and defval is not None:
490 return defval
490 return defval
491 return repo[val].rev()
491 return repo[val].rev()
492
492
493 seen, l = set(), []
493 seen, l = set(), []
494 for spec in revs:
494 for spec in revs:
495 if l and not seen:
495 if l and not seen:
496 seen = set(l)
496 seen = set(l)
497 # attempt to parse old-style ranges first to deal with
497 # attempt to parse old-style ranges first to deal with
498 # things like old-tag which contain query metacharacters
498 # things like old-tag which contain query metacharacters
499 try:
499 try:
500 if isinstance(spec, int):
500 if isinstance(spec, int):
501 seen.add(spec)
501 seen.add(spec)
502 l.append(spec)
502 l = l + [spec]
503 continue
503 continue
504
504
505 if _revrangesep in spec:
505 if _revrangesep in spec:
506 start, end = spec.split(_revrangesep, 1)
506 start, end = spec.split(_revrangesep, 1)
507 start = revfix(repo, start, 0)
507 start = revfix(repo, start, 0)
508 end = revfix(repo, end, len(repo) - 1)
508 end = revfix(repo, end, len(repo) - 1)
509 if end == nullrev and start <= 0:
509 if end == nullrev and start <= 0:
510 start = nullrev
510 start = nullrev
511 rangeiter = repo.changelog.revs(start, end)
511 rangeiter = repo.changelog.revs(start, end)
512 if not seen and not l:
512 if not seen and not l:
513 # by far the most common case: revs = ["-1:0"]
513 # by far the most common case: revs = ["-1:0"]
514 l = list(rangeiter)
514 l = list(rangeiter)
515 # defer syncing seen until next iteration
515 # defer syncing seen until next iteration
516 continue
516 continue
517 newrevs = set(rangeiter)
517 newrevs = set(rangeiter)
518 if seen:
518 if seen:
519 newrevs.difference_update(seen)
519 newrevs.difference_update(seen)
520 seen.update(newrevs)
520 seen.update(newrevs)
521 else:
521 else:
522 seen = newrevs
522 seen = newrevs
523 l.extend(sorted(newrevs, reverse=start > end))
523 l = l + sorted(newrevs, reverse=start > end)
524 continue
524 continue
525 elif spec and spec in repo: # single unquoted rev
525 elif spec and spec in repo: # single unquoted rev
526 rev = revfix(repo, spec, None)
526 rev = revfix(repo, spec, None)
527 if rev in seen:
527 if rev in seen:
528 continue
528 continue
529 seen.add(rev)
529 seen.add(rev)
530 l.append(rev)
530 l.append(rev)
531 continue
531 continue
532 except error.RepoLookupError:
532 except error.RepoLookupError:
533 pass
533 pass
534
534
535 # fall through to new-style queries if old-style fails
535 # fall through to new-style queries if old-style fails
536 m = revset.match(repo.ui, spec)
536 m = revset.match(repo.ui, spec)
537 dl = [r for r in m(repo, revset.spanset(repo)) if r not in seen]
537 if seen or l:
538 l.extend(dl)
538 dl = [r for r in m(repo, revset.spanset(repo)) if r not in seen]
539 seen.update(dl)
539 l = l + dl
540 seen.update(dl)
541 else:
542 l = m(repo, revset.spanset(repo))
540
543
541 return revset.baseset(l)
544 return l
542
545
543 def expandpats(pats):
546 def expandpats(pats):
544 if not util.expandglobs:
547 if not util.expandglobs:
545 return list(pats)
548 return list(pats)
546 ret = []
549 ret = []
547 for p in pats:
550 for p in pats:
548 kind, name = matchmod._patsplit(p, None)
551 kind, name = matchmod._patsplit(p, None)
549 if kind is None:
552 if kind is None:
550 try:
553 try:
551 globbed = glob.glob(name)
554 globbed = glob.glob(name)
552 except re.error:
555 except re.error:
553 globbed = [name]
556 globbed = [name]
554 if globbed:
557 if globbed:
555 ret.extend(globbed)
558 ret.extend(globbed)
556 continue
559 continue
557 ret.append(p)
560 ret.append(p)
558 return ret
561 return ret
559
562
560 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
563 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
561 if pats == ("",):
564 if pats == ("",):
562 pats = []
565 pats = []
563 if not globbed and default == 'relpath':
566 if not globbed and default == 'relpath':
564 pats = expandpats(pats or [])
567 pats = expandpats(pats or [])
565
568
566 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
569 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
567 default)
570 default)
568 def badfn(f, msg):
571 def badfn(f, msg):
569 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
572 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
570 m.bad = badfn
573 m.bad = badfn
571 return m, pats
574 return m, pats
572
575
573 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
576 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
574 return matchandpats(ctx, pats, opts, globbed, default)[0]
577 return matchandpats(ctx, pats, opts, globbed, default)[0]
575
578
576 def matchall(repo):
579 def matchall(repo):
577 return matchmod.always(repo.root, repo.getcwd())
580 return matchmod.always(repo.root, repo.getcwd())
578
581
579 def matchfiles(repo, files):
582 def matchfiles(repo, files):
580 return matchmod.exact(repo.root, repo.getcwd(), files)
583 return matchmod.exact(repo.root, repo.getcwd(), files)
581
584
582 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
585 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
583 if dry_run is None:
586 if dry_run is None:
584 dry_run = opts.get('dry_run')
587 dry_run = opts.get('dry_run')
585 if similarity is None:
588 if similarity is None:
586 similarity = float(opts.get('similarity') or 0)
589 similarity = float(opts.get('similarity') or 0)
587 # we'd use status here, except handling of symlinks and ignore is tricky
590 # we'd use status here, except handling of symlinks and ignore is tricky
588 m = match(repo[None], pats, opts)
591 m = match(repo[None], pats, opts)
589 rejected = []
592 rejected = []
590 m.bad = lambda x, y: rejected.append(x)
593 m.bad = lambda x, y: rejected.append(x)
591
594
592 added, unknown, deleted, removed = _interestingfiles(repo, m)
595 added, unknown, deleted, removed = _interestingfiles(repo, m)
593
596
594 unknownset = set(unknown)
597 unknownset = set(unknown)
595 toprint = unknownset.copy()
598 toprint = unknownset.copy()
596 toprint.update(deleted)
599 toprint.update(deleted)
597 for abs in sorted(toprint):
600 for abs in sorted(toprint):
598 if repo.ui.verbose or not m.exact(abs):
601 if repo.ui.verbose or not m.exact(abs):
599 rel = m.rel(abs)
602 rel = m.rel(abs)
600 if abs in unknownset:
603 if abs in unknownset:
601 status = _('adding %s\n') % ((pats and rel) or abs)
604 status = _('adding %s\n') % ((pats and rel) or abs)
602 else:
605 else:
603 status = _('removing %s\n') % ((pats and rel) or abs)
606 status = _('removing %s\n') % ((pats and rel) or abs)
604 repo.ui.status(status)
607 repo.ui.status(status)
605
608
606 renames = _findrenames(repo, m, added + unknown, removed + deleted,
609 renames = _findrenames(repo, m, added + unknown, removed + deleted,
607 similarity)
610 similarity)
608
611
609 if not dry_run:
612 if not dry_run:
610 _markchanges(repo, unknown, deleted, renames)
613 _markchanges(repo, unknown, deleted, renames)
611
614
612 for f in rejected:
615 for f in rejected:
613 if f in m.files():
616 if f in m.files():
614 return 1
617 return 1
615 return 0
618 return 0
616
619
617 def marktouched(repo, files, similarity=0.0):
620 def marktouched(repo, files, similarity=0.0):
618 '''Assert that files have somehow been operated upon. files are relative to
621 '''Assert that files have somehow been operated upon. files are relative to
619 the repo root.'''
622 the repo root.'''
620 m = matchfiles(repo, files)
623 m = matchfiles(repo, files)
621 rejected = []
624 rejected = []
622 m.bad = lambda x, y: rejected.append(x)
625 m.bad = lambda x, y: rejected.append(x)
623
626
624 added, unknown, deleted, removed = _interestingfiles(repo, m)
627 added, unknown, deleted, removed = _interestingfiles(repo, m)
625
628
626 if repo.ui.verbose:
629 if repo.ui.verbose:
627 unknownset = set(unknown)
630 unknownset = set(unknown)
628 toprint = unknownset.copy()
631 toprint = unknownset.copy()
629 toprint.update(deleted)
632 toprint.update(deleted)
630 for abs in sorted(toprint):
633 for abs in sorted(toprint):
631 if abs in unknownset:
634 if abs in unknownset:
632 status = _('adding %s\n') % abs
635 status = _('adding %s\n') % abs
633 else:
636 else:
634 status = _('removing %s\n') % abs
637 status = _('removing %s\n') % abs
635 repo.ui.status(status)
638 repo.ui.status(status)
636
639
637 renames = _findrenames(repo, m, added + unknown, removed + deleted,
640 renames = _findrenames(repo, m, added + unknown, removed + deleted,
638 similarity)
641 similarity)
639
642
640 _markchanges(repo, unknown, deleted, renames)
643 _markchanges(repo, unknown, deleted, renames)
641
644
642 for f in rejected:
645 for f in rejected:
643 if f in m.files():
646 if f in m.files():
644 return 1
647 return 1
645 return 0
648 return 0
646
649
647 def _interestingfiles(repo, matcher):
650 def _interestingfiles(repo, matcher):
648 '''Walk dirstate with matcher, looking for files that addremove would care
651 '''Walk dirstate with matcher, looking for files that addremove would care
649 about.
652 about.
650
653
651 This is different from dirstate.status because it doesn't care about
654 This is different from dirstate.status because it doesn't care about
652 whether files are modified or clean.'''
655 whether files are modified or clean.'''
653 added, unknown, deleted, removed = [], [], [], []
656 added, unknown, deleted, removed = [], [], [], []
654 audit_path = pathutil.pathauditor(repo.root)
657 audit_path = pathutil.pathauditor(repo.root)
655
658
656 ctx = repo[None]
659 ctx = repo[None]
657 dirstate = repo.dirstate
660 dirstate = repo.dirstate
658 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
661 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
659 full=False)
662 full=False)
660 for abs, st in walkresults.iteritems():
663 for abs, st in walkresults.iteritems():
661 dstate = dirstate[abs]
664 dstate = dirstate[abs]
662 if dstate == '?' and audit_path.check(abs):
665 if dstate == '?' and audit_path.check(abs):
663 unknown.append(abs)
666 unknown.append(abs)
664 elif dstate != 'r' and not st:
667 elif dstate != 'r' and not st:
665 deleted.append(abs)
668 deleted.append(abs)
666 # for finding renames
669 # for finding renames
667 elif dstate == 'r':
670 elif dstate == 'r':
668 removed.append(abs)
671 removed.append(abs)
669 elif dstate == 'a':
672 elif dstate == 'a':
670 added.append(abs)
673 added.append(abs)
671
674
672 return added, unknown, deleted, removed
675 return added, unknown, deleted, removed
673
676
674 def _findrenames(repo, matcher, added, removed, similarity):
677 def _findrenames(repo, matcher, added, removed, similarity):
675 '''Find renames from removed files to added ones.'''
678 '''Find renames from removed files to added ones.'''
676 renames = {}
679 renames = {}
677 if similarity > 0:
680 if similarity > 0:
678 for old, new, score in similar.findrenames(repo, added, removed,
681 for old, new, score in similar.findrenames(repo, added, removed,
679 similarity):
682 similarity):
680 if (repo.ui.verbose or not matcher.exact(old)
683 if (repo.ui.verbose or not matcher.exact(old)
681 or not matcher.exact(new)):
684 or not matcher.exact(new)):
682 repo.ui.status(_('recording removal of %s as rename to %s '
685 repo.ui.status(_('recording removal of %s as rename to %s '
683 '(%d%% similar)\n') %
686 '(%d%% similar)\n') %
684 (matcher.rel(old), matcher.rel(new),
687 (matcher.rel(old), matcher.rel(new),
685 score * 100))
688 score * 100))
686 renames[new] = old
689 renames[new] = old
687 return renames
690 return renames
688
691
689 def _markchanges(repo, unknown, deleted, renames):
692 def _markchanges(repo, unknown, deleted, renames):
690 '''Marks the files in unknown as added, the files in deleted as removed,
693 '''Marks the files in unknown as added, the files in deleted as removed,
691 and the files in renames as copied.'''
694 and the files in renames as copied.'''
692 wctx = repo[None]
695 wctx = repo[None]
693 wlock = repo.wlock()
696 wlock = repo.wlock()
694 try:
697 try:
695 wctx.forget(deleted)
698 wctx.forget(deleted)
696 wctx.add(unknown)
699 wctx.add(unknown)
697 for new, old in renames.iteritems():
700 for new, old in renames.iteritems():
698 wctx.copy(old, new)
701 wctx.copy(old, new)
699 finally:
702 finally:
700 wlock.release()
703 wlock.release()
701
704
702 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
705 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
703 """Update the dirstate to reflect the intent of copying src to dst. For
706 """Update the dirstate to reflect the intent of copying src to dst. For
704 different reasons it might not end with dst being marked as copied from src.
707 different reasons it might not end with dst being marked as copied from src.
705 """
708 """
706 origsrc = repo.dirstate.copied(src) or src
709 origsrc = repo.dirstate.copied(src) or src
707 if dst == origsrc: # copying back a copy?
710 if dst == origsrc: # copying back a copy?
708 if repo.dirstate[dst] not in 'mn' and not dryrun:
711 if repo.dirstate[dst] not in 'mn' and not dryrun:
709 repo.dirstate.normallookup(dst)
712 repo.dirstate.normallookup(dst)
710 else:
713 else:
711 if repo.dirstate[origsrc] == 'a' and origsrc == src:
714 if repo.dirstate[origsrc] == 'a' and origsrc == src:
712 if not ui.quiet:
715 if not ui.quiet:
713 ui.warn(_("%s has not been committed yet, so no copy "
716 ui.warn(_("%s has not been committed yet, so no copy "
714 "data will be stored for %s.\n")
717 "data will be stored for %s.\n")
715 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
718 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
716 if repo.dirstate[dst] in '?r' and not dryrun:
719 if repo.dirstate[dst] in '?r' and not dryrun:
717 wctx.add([dst])
720 wctx.add([dst])
718 elif not dryrun:
721 elif not dryrun:
719 wctx.copy(origsrc, dst)
722 wctx.copy(origsrc, dst)
720
723
721 def readrequires(opener, supported):
724 def readrequires(opener, supported):
722 '''Reads and parses .hg/requires and checks if all entries found
725 '''Reads and parses .hg/requires and checks if all entries found
723 are in the list of supported features.'''
726 are in the list of supported features.'''
724 requirements = set(opener.read("requires").splitlines())
727 requirements = set(opener.read("requires").splitlines())
725 missings = []
728 missings = []
726 for r in requirements:
729 for r in requirements:
727 if r not in supported:
730 if r not in supported:
728 if not r or not r[0].isalnum():
731 if not r or not r[0].isalnum():
729 raise error.RequirementError(_(".hg/requires file is corrupt"))
732 raise error.RequirementError(_(".hg/requires file is corrupt"))
730 missings.append(r)
733 missings.append(r)
731 missings.sort()
734 missings.sort()
732 if missings:
735 if missings:
733 raise error.RequirementError(
736 raise error.RequirementError(
734 _("unknown repository format: requires features '%s' (upgrade "
737 _("unknown repository format: requires features '%s' (upgrade "
735 "Mercurial)") % "', '".join(missings))
738 "Mercurial)") % "', '".join(missings))
736 return requirements
739 return requirements
737
740
738 class filecachesubentry(object):
741 class filecachesubentry(object):
739 def __init__(self, path, stat):
742 def __init__(self, path, stat):
740 self.path = path
743 self.path = path
741 self.cachestat = None
744 self.cachestat = None
742 self._cacheable = None
745 self._cacheable = None
743
746
744 if stat:
747 if stat:
745 self.cachestat = filecachesubentry.stat(self.path)
748 self.cachestat = filecachesubentry.stat(self.path)
746
749
747 if self.cachestat:
750 if self.cachestat:
748 self._cacheable = self.cachestat.cacheable()
751 self._cacheable = self.cachestat.cacheable()
749 else:
752 else:
750 # None means we don't know yet
753 # None means we don't know yet
751 self._cacheable = None
754 self._cacheable = None
752
755
753 def refresh(self):
756 def refresh(self):
754 if self.cacheable():
757 if self.cacheable():
755 self.cachestat = filecachesubentry.stat(self.path)
758 self.cachestat = filecachesubentry.stat(self.path)
756
759
757 def cacheable(self):
760 def cacheable(self):
758 if self._cacheable is not None:
761 if self._cacheable is not None:
759 return self._cacheable
762 return self._cacheable
760
763
761 # we don't know yet, assume it is for now
764 # we don't know yet, assume it is for now
762 return True
765 return True
763
766
764 def changed(self):
767 def changed(self):
765 # no point in going further if we can't cache it
768 # no point in going further if we can't cache it
766 if not self.cacheable():
769 if not self.cacheable():
767 return True
770 return True
768
771
769 newstat = filecachesubentry.stat(self.path)
772 newstat = filecachesubentry.stat(self.path)
770
773
771 # we may not know if it's cacheable yet, check again now
774 # we may not know if it's cacheable yet, check again now
772 if newstat and self._cacheable is None:
775 if newstat and self._cacheable is None:
773 self._cacheable = newstat.cacheable()
776 self._cacheable = newstat.cacheable()
774
777
775 # check again
778 # check again
776 if not self._cacheable:
779 if not self._cacheable:
777 return True
780 return True
778
781
779 if self.cachestat != newstat:
782 if self.cachestat != newstat:
780 self.cachestat = newstat
783 self.cachestat = newstat
781 return True
784 return True
782 else:
785 else:
783 return False
786 return False
784
787
785 @staticmethod
788 @staticmethod
786 def stat(path):
789 def stat(path):
787 try:
790 try:
788 return util.cachestat(path)
791 return util.cachestat(path)
789 except OSError, e:
792 except OSError, e:
790 if e.errno != errno.ENOENT:
793 if e.errno != errno.ENOENT:
791 raise
794 raise
792
795
793 class filecacheentry(object):
796 class filecacheentry(object):
794 def __init__(self, paths, stat=True):
797 def __init__(self, paths, stat=True):
795 self._entries = []
798 self._entries = []
796 for path in paths:
799 for path in paths:
797 self._entries.append(filecachesubentry(path, stat))
800 self._entries.append(filecachesubentry(path, stat))
798
801
799 def changed(self):
802 def changed(self):
800 '''true if any entry has changed'''
803 '''true if any entry has changed'''
801 for entry in self._entries:
804 for entry in self._entries:
802 if entry.changed():
805 if entry.changed():
803 return True
806 return True
804 return False
807 return False
805
808
806 def refresh(self):
809 def refresh(self):
807 for entry in self._entries:
810 for entry in self._entries:
808 entry.refresh()
811 entry.refresh()
809
812
810 class filecache(object):
813 class filecache(object):
811 '''A property like decorator that tracks files under .hg/ for updates.
814 '''A property like decorator that tracks files under .hg/ for updates.
812
815
813 Records stat info when called in _filecache.
816 Records stat info when called in _filecache.
814
817
815 On subsequent calls, compares old stat info with new info, and recreates the
818 On subsequent calls, compares old stat info with new info, and recreates the
816 object when any of the files changes, updating the new stat info in
819 object when any of the files changes, updating the new stat info in
817 _filecache.
820 _filecache.
818
821
819 Mercurial either atomic renames or appends for files under .hg,
822 Mercurial either atomic renames or appends for files under .hg,
820 so to ensure the cache is reliable we need the filesystem to be able
823 so to ensure the cache is reliable we need the filesystem to be able
821 to tell us if a file has been replaced. If it can't, we fallback to
824 to tell us if a file has been replaced. If it can't, we fallback to
822 recreating the object on every call (essentially the same behaviour as
825 recreating the object on every call (essentially the same behaviour as
823 propertycache).
826 propertycache).
824
827
825 '''
828 '''
826 def __init__(self, *paths):
829 def __init__(self, *paths):
827 self.paths = paths
830 self.paths = paths
828
831
829 def join(self, obj, fname):
832 def join(self, obj, fname):
830 """Used to compute the runtime path of a cached file.
833 """Used to compute the runtime path of a cached file.
831
834
832 Users should subclass filecache and provide their own version of this
835 Users should subclass filecache and provide their own version of this
833 function to call the appropriate join function on 'obj' (an instance
836 function to call the appropriate join function on 'obj' (an instance
834 of the class that its member function was decorated).
837 of the class that its member function was decorated).
835 """
838 """
836 return obj.join(fname)
839 return obj.join(fname)
837
840
838 def __call__(self, func):
841 def __call__(self, func):
839 self.func = func
842 self.func = func
840 self.name = func.__name__
843 self.name = func.__name__
841 return self
844 return self
842
845
843 def __get__(self, obj, type=None):
846 def __get__(self, obj, type=None):
844 # do we need to check if the file changed?
847 # do we need to check if the file changed?
845 if self.name in obj.__dict__:
848 if self.name in obj.__dict__:
846 assert self.name in obj._filecache, self.name
849 assert self.name in obj._filecache, self.name
847 return obj.__dict__[self.name]
850 return obj.__dict__[self.name]
848
851
849 entry = obj._filecache.get(self.name)
852 entry = obj._filecache.get(self.name)
850
853
851 if entry:
854 if entry:
852 if entry.changed():
855 if entry.changed():
853 entry.obj = self.func(obj)
856 entry.obj = self.func(obj)
854 else:
857 else:
855 paths = [self.join(obj, path) for path in self.paths]
858 paths = [self.join(obj, path) for path in self.paths]
856
859
857 # We stat -before- creating the object so our cache doesn't lie if
860 # We stat -before- creating the object so our cache doesn't lie if
858 # a writer modified between the time we read and stat
861 # a writer modified between the time we read and stat
859 entry = filecacheentry(paths, True)
862 entry = filecacheentry(paths, True)
860 entry.obj = self.func(obj)
863 entry.obj = self.func(obj)
861
864
862 obj._filecache[self.name] = entry
865 obj._filecache[self.name] = entry
863
866
864 obj.__dict__[self.name] = entry.obj
867 obj.__dict__[self.name] = entry.obj
865 return entry.obj
868 return entry.obj
866
869
867 def __set__(self, obj, value):
870 def __set__(self, obj, value):
868 if self.name not in obj._filecache:
871 if self.name not in obj._filecache:
869 # we add an entry for the missing value because X in __dict__
872 # we add an entry for the missing value because X in __dict__
870 # implies X in _filecache
873 # implies X in _filecache
871 paths = [self.join(obj, path) for path in self.paths]
874 paths = [self.join(obj, path) for path in self.paths]
872 ce = filecacheentry(paths, False)
875 ce = filecacheentry(paths, False)
873 obj._filecache[self.name] = ce
876 obj._filecache[self.name] = ce
874 else:
877 else:
875 ce = obj._filecache[self.name]
878 ce = obj._filecache[self.name]
876
879
877 ce.obj = value # update cached copy
880 ce.obj = value # update cached copy
878 obj.__dict__[self.name] = value # update copy returned by obj.x
881 obj.__dict__[self.name] = value # update copy returned by obj.x
879
882
880 def __delete__(self, obj):
883 def __delete__(self, obj):
881 try:
884 try:
882 del obj.__dict__[self.name]
885 del obj.__dict__[self.name]
883 except KeyError:
886 except KeyError:
884 raise AttributeError(self.name)
887 raise AttributeError(self.name)
885
888
886 class dirs(object):
889 class dirs(object):
887 '''a multiset of directory names from a dirstate or manifest'''
890 '''a multiset of directory names from a dirstate or manifest'''
888
891
889 def __init__(self, map, skip=None):
892 def __init__(self, map, skip=None):
890 self._dirs = {}
893 self._dirs = {}
891 addpath = self.addpath
894 addpath = self.addpath
892 if util.safehasattr(map, 'iteritems') and skip is not None:
895 if util.safehasattr(map, 'iteritems') and skip is not None:
893 for f, s in map.iteritems():
896 for f, s in map.iteritems():
894 if s[0] != skip:
897 if s[0] != skip:
895 addpath(f)
898 addpath(f)
896 else:
899 else:
897 for f in map:
900 for f in map:
898 addpath(f)
901 addpath(f)
899
902
900 def addpath(self, path):
903 def addpath(self, path):
901 dirs = self._dirs
904 dirs = self._dirs
902 for base in finddirs(path):
905 for base in finddirs(path):
903 if base in dirs:
906 if base in dirs:
904 dirs[base] += 1
907 dirs[base] += 1
905 return
908 return
906 dirs[base] = 1
909 dirs[base] = 1
907
910
908 def delpath(self, path):
911 def delpath(self, path):
909 dirs = self._dirs
912 dirs = self._dirs
910 for base in finddirs(path):
913 for base in finddirs(path):
911 if dirs[base] > 1:
914 if dirs[base] > 1:
912 dirs[base] -= 1
915 dirs[base] -= 1
913 return
916 return
914 del dirs[base]
917 del dirs[base]
915
918
916 def __iter__(self):
919 def __iter__(self):
917 return self._dirs.iterkeys()
920 return self._dirs.iterkeys()
918
921
919 def __contains__(self, d):
922 def __contains__(self, d):
920 return d in self._dirs
923 return d in self._dirs
921
924
922 if util.safehasattr(parsers, 'dirs'):
925 if util.safehasattr(parsers, 'dirs'):
923 dirs = parsers.dirs
926 dirs = parsers.dirs
924
927
925 def finddirs(path):
928 def finddirs(path):
926 pos = path.rfind('/')
929 pos = path.rfind('/')
927 while pos != -1:
930 while pos != -1:
928 yield path[:pos]
931 yield path[:pos]
929 pos = path.rfind('/', 0, pos)
932 pos = path.rfind('/', 0, pos)
General Comments 0
You need to be logged in to leave comments. Login now