##// END OF EJS Templates
revpair: drop useless conditional...
Pierre-Yves David -
r20819:202291a2 default
parent child Browse files
Show More
@@ -1,934 +1,932
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from mercurial.node import nullrev
9 from mercurial.node import nullrev
10 import util, error, osutil, revset, similar, encoding, phases, parsers
10 import util, error, osutil, revset, similar, encoding, phases, parsers
11 import pathutil
11 import pathutil
12 import match as matchmod
12 import match as matchmod
13 import os, errno, re, glob
13 import os, errno, re, glob
14
14
15 if os.name == 'nt':
15 if os.name == 'nt':
16 import scmwindows as scmplatform
16 import scmwindows as scmplatform
17 else:
17 else:
18 import scmposix as scmplatform
18 import scmposix as scmplatform
19
19
20 systemrcpath = scmplatform.systemrcpath
20 systemrcpath = scmplatform.systemrcpath
21 userrcpath = scmplatform.userrcpath
21 userrcpath = scmplatform.userrcpath
22
22
23 def itersubrepos(ctx1, ctx2):
23 def itersubrepos(ctx1, ctx2):
24 """find subrepos in ctx1 or ctx2"""
24 """find subrepos in ctx1 or ctx2"""
25 # Create a (subpath, ctx) mapping where we prefer subpaths from
25 # Create a (subpath, ctx) mapping where we prefer subpaths from
26 # ctx1. The subpaths from ctx2 are important when the .hgsub file
26 # ctx1. The subpaths from ctx2 are important when the .hgsub file
27 # has been modified (in ctx2) but not yet committed (in ctx1).
27 # has been modified (in ctx2) but not yet committed (in ctx1).
28 subpaths = dict.fromkeys(ctx2.substate, ctx2)
28 subpaths = dict.fromkeys(ctx2.substate, ctx2)
29 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
29 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
30 for subpath, ctx in sorted(subpaths.iteritems()):
30 for subpath, ctx in sorted(subpaths.iteritems()):
31 yield subpath, ctx.sub(subpath)
31 yield subpath, ctx.sub(subpath)
32
32
33 def nochangesfound(ui, repo, excluded=None):
33 def nochangesfound(ui, repo, excluded=None):
34 '''Report no changes for push/pull, excluded is None or a list of
34 '''Report no changes for push/pull, excluded is None or a list of
35 nodes excluded from the push/pull.
35 nodes excluded from the push/pull.
36 '''
36 '''
37 secretlist = []
37 secretlist = []
38 if excluded:
38 if excluded:
39 for n in excluded:
39 for n in excluded:
40 if n not in repo:
40 if n not in repo:
41 # discovery should not have included the filtered revision,
41 # discovery should not have included the filtered revision,
42 # we have to explicitly exclude it until discovery is cleanup.
42 # we have to explicitly exclude it until discovery is cleanup.
43 continue
43 continue
44 ctx = repo[n]
44 ctx = repo[n]
45 if ctx.phase() >= phases.secret and not ctx.extinct():
45 if ctx.phase() >= phases.secret and not ctx.extinct():
46 secretlist.append(n)
46 secretlist.append(n)
47
47
48 if secretlist:
48 if secretlist:
49 ui.status(_("no changes found (ignored %d secret changesets)\n")
49 ui.status(_("no changes found (ignored %d secret changesets)\n")
50 % len(secretlist))
50 % len(secretlist))
51 else:
51 else:
52 ui.status(_("no changes found\n"))
52 ui.status(_("no changes found\n"))
53
53
54 def checknewlabel(repo, lbl, kind):
54 def checknewlabel(repo, lbl, kind):
55 # Do not use the "kind" parameter in ui output.
55 # Do not use the "kind" parameter in ui output.
56 # It makes strings difficult to translate.
56 # It makes strings difficult to translate.
57 if lbl in ['tip', '.', 'null']:
57 if lbl in ['tip', '.', 'null']:
58 raise util.Abort(_("the name '%s' is reserved") % lbl)
58 raise util.Abort(_("the name '%s' is reserved") % lbl)
59 for c in (':', '\0', '\n', '\r'):
59 for c in (':', '\0', '\n', '\r'):
60 if c in lbl:
60 if c in lbl:
61 raise util.Abort(_("%r cannot be used in a name") % c)
61 raise util.Abort(_("%r cannot be used in a name") % c)
62 try:
62 try:
63 int(lbl)
63 int(lbl)
64 raise util.Abort(_("cannot use an integer as a name"))
64 raise util.Abort(_("cannot use an integer as a name"))
65 except ValueError:
65 except ValueError:
66 pass
66 pass
67
67
68 def checkfilename(f):
68 def checkfilename(f):
69 '''Check that the filename f is an acceptable filename for a tracked file'''
69 '''Check that the filename f is an acceptable filename for a tracked file'''
70 if '\r' in f or '\n' in f:
70 if '\r' in f or '\n' in f:
71 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
71 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
72
72
73 def checkportable(ui, f):
73 def checkportable(ui, f):
74 '''Check if filename f is portable and warn or abort depending on config'''
74 '''Check if filename f is portable and warn or abort depending on config'''
75 checkfilename(f)
75 checkfilename(f)
76 abort, warn = checkportabilityalert(ui)
76 abort, warn = checkportabilityalert(ui)
77 if abort or warn:
77 if abort or warn:
78 msg = util.checkwinfilename(f)
78 msg = util.checkwinfilename(f)
79 if msg:
79 if msg:
80 msg = "%s: %r" % (msg, f)
80 msg = "%s: %r" % (msg, f)
81 if abort:
81 if abort:
82 raise util.Abort(msg)
82 raise util.Abort(msg)
83 ui.warn(_("warning: %s\n") % msg)
83 ui.warn(_("warning: %s\n") % msg)
84
84
85 def checkportabilityalert(ui):
85 def checkportabilityalert(ui):
86 '''check if the user's config requests nothing, a warning, or abort for
86 '''check if the user's config requests nothing, a warning, or abort for
87 non-portable filenames'''
87 non-portable filenames'''
88 val = ui.config('ui', 'portablefilenames', 'warn')
88 val = ui.config('ui', 'portablefilenames', 'warn')
89 lval = val.lower()
89 lval = val.lower()
90 bval = util.parsebool(val)
90 bval = util.parsebool(val)
91 abort = os.name == 'nt' or lval == 'abort'
91 abort = os.name == 'nt' or lval == 'abort'
92 warn = bval or lval == 'warn'
92 warn = bval or lval == 'warn'
93 if bval is None and not (warn or abort or lval == 'ignore'):
93 if bval is None and not (warn or abort or lval == 'ignore'):
94 raise error.ConfigError(
94 raise error.ConfigError(
95 _("ui.portablefilenames value is invalid ('%s')") % val)
95 _("ui.portablefilenames value is invalid ('%s')") % val)
96 return abort, warn
96 return abort, warn
97
97
98 class casecollisionauditor(object):
98 class casecollisionauditor(object):
99 def __init__(self, ui, abort, dirstate):
99 def __init__(self, ui, abort, dirstate):
100 self._ui = ui
100 self._ui = ui
101 self._abort = abort
101 self._abort = abort
102 allfiles = '\0'.join(dirstate._map)
102 allfiles = '\0'.join(dirstate._map)
103 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
103 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
104 self._dirstate = dirstate
104 self._dirstate = dirstate
105 # The purpose of _newfiles is so that we don't complain about
105 # The purpose of _newfiles is so that we don't complain about
106 # case collisions if someone were to call this object with the
106 # case collisions if someone were to call this object with the
107 # same filename twice.
107 # same filename twice.
108 self._newfiles = set()
108 self._newfiles = set()
109
109
110 def __call__(self, f):
110 def __call__(self, f):
111 if f in self._newfiles:
111 if f in self._newfiles:
112 return
112 return
113 fl = encoding.lower(f)
113 fl = encoding.lower(f)
114 if fl in self._loweredfiles and f not in self._dirstate:
114 if fl in self._loweredfiles and f not in self._dirstate:
115 msg = _('possible case-folding collision for %s') % f
115 msg = _('possible case-folding collision for %s') % f
116 if self._abort:
116 if self._abort:
117 raise util.Abort(msg)
117 raise util.Abort(msg)
118 self._ui.warn(_("warning: %s\n") % msg)
118 self._ui.warn(_("warning: %s\n") % msg)
119 self._loweredfiles.add(fl)
119 self._loweredfiles.add(fl)
120 self._newfiles.add(f)
120 self._newfiles.add(f)
121
121
122 class abstractvfs(object):
122 class abstractvfs(object):
123 """Abstract base class; cannot be instantiated"""
123 """Abstract base class; cannot be instantiated"""
124
124
125 def __init__(self, *args, **kwargs):
125 def __init__(self, *args, **kwargs):
126 '''Prevent instantiation; don't call this from subclasses.'''
126 '''Prevent instantiation; don't call this from subclasses.'''
127 raise NotImplementedError('attempted instantiating ' + str(type(self)))
127 raise NotImplementedError('attempted instantiating ' + str(type(self)))
128
128
129 def tryread(self, path):
129 def tryread(self, path):
130 '''gracefully return an empty string for missing files'''
130 '''gracefully return an empty string for missing files'''
131 try:
131 try:
132 return self.read(path)
132 return self.read(path)
133 except IOError, inst:
133 except IOError, inst:
134 if inst.errno != errno.ENOENT:
134 if inst.errno != errno.ENOENT:
135 raise
135 raise
136 return ""
136 return ""
137
137
138 def open(self, path, mode="r", text=False, atomictemp=False):
138 def open(self, path, mode="r", text=False, atomictemp=False):
139 self.open = self.__call__
139 self.open = self.__call__
140 return self.__call__(path, mode, text, atomictemp)
140 return self.__call__(path, mode, text, atomictemp)
141
141
142 def read(self, path):
142 def read(self, path):
143 fp = self(path, 'rb')
143 fp = self(path, 'rb')
144 try:
144 try:
145 return fp.read()
145 return fp.read()
146 finally:
146 finally:
147 fp.close()
147 fp.close()
148
148
149 def write(self, path, data):
149 def write(self, path, data):
150 fp = self(path, 'wb')
150 fp = self(path, 'wb')
151 try:
151 try:
152 return fp.write(data)
152 return fp.write(data)
153 finally:
153 finally:
154 fp.close()
154 fp.close()
155
155
156 def append(self, path, data):
156 def append(self, path, data):
157 fp = self(path, 'ab')
157 fp = self(path, 'ab')
158 try:
158 try:
159 return fp.write(data)
159 return fp.write(data)
160 finally:
160 finally:
161 fp.close()
161 fp.close()
162
162
163 def chmod(self, path, mode):
163 def chmod(self, path, mode):
164 return os.chmod(self.join(path), mode)
164 return os.chmod(self.join(path), mode)
165
165
166 def exists(self, path=None):
166 def exists(self, path=None):
167 return os.path.exists(self.join(path))
167 return os.path.exists(self.join(path))
168
168
169 def fstat(self, fp):
169 def fstat(self, fp):
170 return util.fstat(fp)
170 return util.fstat(fp)
171
171
172 def isdir(self, path=None):
172 def isdir(self, path=None):
173 return os.path.isdir(self.join(path))
173 return os.path.isdir(self.join(path))
174
174
175 def isfile(self, path=None):
175 def isfile(self, path=None):
176 return os.path.isfile(self.join(path))
176 return os.path.isfile(self.join(path))
177
177
178 def islink(self, path=None):
178 def islink(self, path=None):
179 return os.path.islink(self.join(path))
179 return os.path.islink(self.join(path))
180
180
181 def lstat(self, path=None):
181 def lstat(self, path=None):
182 return os.lstat(self.join(path))
182 return os.lstat(self.join(path))
183
183
184 def makedir(self, path=None, notindexed=True):
184 def makedir(self, path=None, notindexed=True):
185 return util.makedir(self.join(path), notindexed)
185 return util.makedir(self.join(path), notindexed)
186
186
187 def makedirs(self, path=None, mode=None):
187 def makedirs(self, path=None, mode=None):
188 return util.makedirs(self.join(path), mode)
188 return util.makedirs(self.join(path), mode)
189
189
190 def makelock(self, info, path):
190 def makelock(self, info, path):
191 return util.makelock(info, self.join(path))
191 return util.makelock(info, self.join(path))
192
192
193 def mkdir(self, path=None):
193 def mkdir(self, path=None):
194 return os.mkdir(self.join(path))
194 return os.mkdir(self.join(path))
195
195
196 def readdir(self, path=None, stat=None, skip=None):
196 def readdir(self, path=None, stat=None, skip=None):
197 return osutil.listdir(self.join(path), stat, skip)
197 return osutil.listdir(self.join(path), stat, skip)
198
198
199 def readlock(self, path):
199 def readlock(self, path):
200 return util.readlock(self.join(path))
200 return util.readlock(self.join(path))
201
201
202 def rename(self, src, dst):
202 def rename(self, src, dst):
203 return util.rename(self.join(src), self.join(dst))
203 return util.rename(self.join(src), self.join(dst))
204
204
205 def readlink(self, path):
205 def readlink(self, path):
206 return os.readlink(self.join(path))
206 return os.readlink(self.join(path))
207
207
208 def setflags(self, path, l, x):
208 def setflags(self, path, l, x):
209 return util.setflags(self.join(path), l, x)
209 return util.setflags(self.join(path), l, x)
210
210
211 def stat(self, path=None):
211 def stat(self, path=None):
212 return os.stat(self.join(path))
212 return os.stat(self.join(path))
213
213
214 def unlink(self, path=None):
214 def unlink(self, path=None):
215 return util.unlink(self.join(path))
215 return util.unlink(self.join(path))
216
216
217 def utime(self, path=None, t=None):
217 def utime(self, path=None, t=None):
218 return os.utime(self.join(path), t)
218 return os.utime(self.join(path), t)
219
219
220 class vfs(abstractvfs):
220 class vfs(abstractvfs):
221 '''Operate files relative to a base directory
221 '''Operate files relative to a base directory
222
222
223 This class is used to hide the details of COW semantics and
223 This class is used to hide the details of COW semantics and
224 remote file access from higher level code.
224 remote file access from higher level code.
225 '''
225 '''
226 def __init__(self, base, audit=True, expandpath=False, realpath=False):
226 def __init__(self, base, audit=True, expandpath=False, realpath=False):
227 if expandpath:
227 if expandpath:
228 base = util.expandpath(base)
228 base = util.expandpath(base)
229 if realpath:
229 if realpath:
230 base = os.path.realpath(base)
230 base = os.path.realpath(base)
231 self.base = base
231 self.base = base
232 self._setmustaudit(audit)
232 self._setmustaudit(audit)
233 self.createmode = None
233 self.createmode = None
234 self._trustnlink = None
234 self._trustnlink = None
235
235
236 def _getmustaudit(self):
236 def _getmustaudit(self):
237 return self._audit
237 return self._audit
238
238
239 def _setmustaudit(self, onoff):
239 def _setmustaudit(self, onoff):
240 self._audit = onoff
240 self._audit = onoff
241 if onoff:
241 if onoff:
242 self.audit = pathutil.pathauditor(self.base)
242 self.audit = pathutil.pathauditor(self.base)
243 else:
243 else:
244 self.audit = util.always
244 self.audit = util.always
245
245
246 mustaudit = property(_getmustaudit, _setmustaudit)
246 mustaudit = property(_getmustaudit, _setmustaudit)
247
247
248 @util.propertycache
248 @util.propertycache
249 def _cansymlink(self):
249 def _cansymlink(self):
250 return util.checklink(self.base)
250 return util.checklink(self.base)
251
251
252 @util.propertycache
252 @util.propertycache
253 def _chmod(self):
253 def _chmod(self):
254 return util.checkexec(self.base)
254 return util.checkexec(self.base)
255
255
256 def _fixfilemode(self, name):
256 def _fixfilemode(self, name):
257 if self.createmode is None or not self._chmod:
257 if self.createmode is None or not self._chmod:
258 return
258 return
259 os.chmod(name, self.createmode & 0666)
259 os.chmod(name, self.createmode & 0666)
260
260
261 def __call__(self, path, mode="r", text=False, atomictemp=False):
261 def __call__(self, path, mode="r", text=False, atomictemp=False):
262 if self._audit:
262 if self._audit:
263 r = util.checkosfilename(path)
263 r = util.checkosfilename(path)
264 if r:
264 if r:
265 raise util.Abort("%s: %r" % (r, path))
265 raise util.Abort("%s: %r" % (r, path))
266 self.audit(path)
266 self.audit(path)
267 f = self.join(path)
267 f = self.join(path)
268
268
269 if not text and "b" not in mode:
269 if not text and "b" not in mode:
270 mode += "b" # for that other OS
270 mode += "b" # for that other OS
271
271
272 nlink = -1
272 nlink = -1
273 if mode not in ('r', 'rb'):
273 if mode not in ('r', 'rb'):
274 dirname, basename = util.split(f)
274 dirname, basename = util.split(f)
275 # If basename is empty, then the path is malformed because it points
275 # If basename is empty, then the path is malformed because it points
276 # to a directory. Let the posixfile() call below raise IOError.
276 # to a directory. Let the posixfile() call below raise IOError.
277 if basename:
277 if basename:
278 if atomictemp:
278 if atomictemp:
279 util.ensuredirs(dirname, self.createmode)
279 util.ensuredirs(dirname, self.createmode)
280 return util.atomictempfile(f, mode, self.createmode)
280 return util.atomictempfile(f, mode, self.createmode)
281 try:
281 try:
282 if 'w' in mode:
282 if 'w' in mode:
283 util.unlink(f)
283 util.unlink(f)
284 nlink = 0
284 nlink = 0
285 else:
285 else:
286 # nlinks() may behave differently for files on Windows
286 # nlinks() may behave differently for files on Windows
287 # shares if the file is open.
287 # shares if the file is open.
288 fd = util.posixfile(f)
288 fd = util.posixfile(f)
289 nlink = util.nlinks(f)
289 nlink = util.nlinks(f)
290 if nlink < 1:
290 if nlink < 1:
291 nlink = 2 # force mktempcopy (issue1922)
291 nlink = 2 # force mktempcopy (issue1922)
292 fd.close()
292 fd.close()
293 except (OSError, IOError), e:
293 except (OSError, IOError), e:
294 if e.errno != errno.ENOENT:
294 if e.errno != errno.ENOENT:
295 raise
295 raise
296 nlink = 0
296 nlink = 0
297 util.ensuredirs(dirname, self.createmode)
297 util.ensuredirs(dirname, self.createmode)
298 if nlink > 0:
298 if nlink > 0:
299 if self._trustnlink is None:
299 if self._trustnlink is None:
300 self._trustnlink = nlink > 1 or util.checknlink(f)
300 self._trustnlink = nlink > 1 or util.checknlink(f)
301 if nlink > 1 or not self._trustnlink:
301 if nlink > 1 or not self._trustnlink:
302 util.rename(util.mktempcopy(f), f)
302 util.rename(util.mktempcopy(f), f)
303 fp = util.posixfile(f, mode)
303 fp = util.posixfile(f, mode)
304 if nlink == 0:
304 if nlink == 0:
305 self._fixfilemode(f)
305 self._fixfilemode(f)
306 return fp
306 return fp
307
307
308 def symlink(self, src, dst):
308 def symlink(self, src, dst):
309 self.audit(dst)
309 self.audit(dst)
310 linkname = self.join(dst)
310 linkname = self.join(dst)
311 try:
311 try:
312 os.unlink(linkname)
312 os.unlink(linkname)
313 except OSError:
313 except OSError:
314 pass
314 pass
315
315
316 util.ensuredirs(os.path.dirname(linkname), self.createmode)
316 util.ensuredirs(os.path.dirname(linkname), self.createmode)
317
317
318 if self._cansymlink:
318 if self._cansymlink:
319 try:
319 try:
320 os.symlink(src, linkname)
320 os.symlink(src, linkname)
321 except OSError, err:
321 except OSError, err:
322 raise OSError(err.errno, _('could not symlink to %r: %s') %
322 raise OSError(err.errno, _('could not symlink to %r: %s') %
323 (src, err.strerror), linkname)
323 (src, err.strerror), linkname)
324 else:
324 else:
325 self.write(dst, src)
325 self.write(dst, src)
326
326
327 def join(self, path):
327 def join(self, path):
328 if path:
328 if path:
329 return os.path.join(self.base, path)
329 return os.path.join(self.base, path)
330 else:
330 else:
331 return self.base
331 return self.base
332
332
333 opener = vfs
333 opener = vfs
334
334
335 class auditvfs(object):
335 class auditvfs(object):
336 def __init__(self, vfs):
336 def __init__(self, vfs):
337 self.vfs = vfs
337 self.vfs = vfs
338
338
339 def _getmustaudit(self):
339 def _getmustaudit(self):
340 return self.vfs.mustaudit
340 return self.vfs.mustaudit
341
341
342 def _setmustaudit(self, onoff):
342 def _setmustaudit(self, onoff):
343 self.vfs.mustaudit = onoff
343 self.vfs.mustaudit = onoff
344
344
345 mustaudit = property(_getmustaudit, _setmustaudit)
345 mustaudit = property(_getmustaudit, _setmustaudit)
346
346
347 class filtervfs(abstractvfs, auditvfs):
347 class filtervfs(abstractvfs, auditvfs):
348 '''Wrapper vfs for filtering filenames with a function.'''
348 '''Wrapper vfs for filtering filenames with a function.'''
349
349
350 def __init__(self, vfs, filter):
350 def __init__(self, vfs, filter):
351 auditvfs.__init__(self, vfs)
351 auditvfs.__init__(self, vfs)
352 self._filter = filter
352 self._filter = filter
353
353
354 def __call__(self, path, *args, **kwargs):
354 def __call__(self, path, *args, **kwargs):
355 return self.vfs(self._filter(path), *args, **kwargs)
355 return self.vfs(self._filter(path), *args, **kwargs)
356
356
357 def join(self, path):
357 def join(self, path):
358 if path:
358 if path:
359 return self.vfs.join(self._filter(path))
359 return self.vfs.join(self._filter(path))
360 else:
360 else:
361 return self.vfs.join(path)
361 return self.vfs.join(path)
362
362
363 filteropener = filtervfs
363 filteropener = filtervfs
364
364
365 class readonlyvfs(abstractvfs, auditvfs):
365 class readonlyvfs(abstractvfs, auditvfs):
366 '''Wrapper vfs preventing any writing.'''
366 '''Wrapper vfs preventing any writing.'''
367
367
368 def __init__(self, vfs):
368 def __init__(self, vfs):
369 auditvfs.__init__(self, vfs)
369 auditvfs.__init__(self, vfs)
370
370
371 def __call__(self, path, mode='r', *args, **kw):
371 def __call__(self, path, mode='r', *args, **kw):
372 if mode not in ('r', 'rb'):
372 if mode not in ('r', 'rb'):
373 raise util.Abort('this vfs is read only')
373 raise util.Abort('this vfs is read only')
374 return self.vfs(path, mode, *args, **kw)
374 return self.vfs(path, mode, *args, **kw)
375
375
376
376
377 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
377 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
378 '''yield every hg repository under path, always recursively.
378 '''yield every hg repository under path, always recursively.
379 The recurse flag will only control recursion into repo working dirs'''
379 The recurse flag will only control recursion into repo working dirs'''
380 def errhandler(err):
380 def errhandler(err):
381 if err.filename == path:
381 if err.filename == path:
382 raise err
382 raise err
383 samestat = getattr(os.path, 'samestat', None)
383 samestat = getattr(os.path, 'samestat', None)
384 if followsym and samestat is not None:
384 if followsym and samestat is not None:
385 def adddir(dirlst, dirname):
385 def adddir(dirlst, dirname):
386 match = False
386 match = False
387 dirstat = os.stat(dirname)
387 dirstat = os.stat(dirname)
388 for lstdirstat in dirlst:
388 for lstdirstat in dirlst:
389 if samestat(dirstat, lstdirstat):
389 if samestat(dirstat, lstdirstat):
390 match = True
390 match = True
391 break
391 break
392 if not match:
392 if not match:
393 dirlst.append(dirstat)
393 dirlst.append(dirstat)
394 return not match
394 return not match
395 else:
395 else:
396 followsym = False
396 followsym = False
397
397
398 if (seen_dirs is None) and followsym:
398 if (seen_dirs is None) and followsym:
399 seen_dirs = []
399 seen_dirs = []
400 adddir(seen_dirs, path)
400 adddir(seen_dirs, path)
401 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
401 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
402 dirs.sort()
402 dirs.sort()
403 if '.hg' in dirs:
403 if '.hg' in dirs:
404 yield root # found a repository
404 yield root # found a repository
405 qroot = os.path.join(root, '.hg', 'patches')
405 qroot = os.path.join(root, '.hg', 'patches')
406 if os.path.isdir(os.path.join(qroot, '.hg')):
406 if os.path.isdir(os.path.join(qroot, '.hg')):
407 yield qroot # we have a patch queue repo here
407 yield qroot # we have a patch queue repo here
408 if recurse:
408 if recurse:
409 # avoid recursing inside the .hg directory
409 # avoid recursing inside the .hg directory
410 dirs.remove('.hg')
410 dirs.remove('.hg')
411 else:
411 else:
412 dirs[:] = [] # don't descend further
412 dirs[:] = [] # don't descend further
413 elif followsym:
413 elif followsym:
414 newdirs = []
414 newdirs = []
415 for d in dirs:
415 for d in dirs:
416 fname = os.path.join(root, d)
416 fname = os.path.join(root, d)
417 if adddir(seen_dirs, fname):
417 if adddir(seen_dirs, fname):
418 if os.path.islink(fname):
418 if os.path.islink(fname):
419 for hgname in walkrepos(fname, True, seen_dirs):
419 for hgname in walkrepos(fname, True, seen_dirs):
420 yield hgname
420 yield hgname
421 else:
421 else:
422 newdirs.append(d)
422 newdirs.append(d)
423 dirs[:] = newdirs
423 dirs[:] = newdirs
424
424
425 def osrcpath():
425 def osrcpath():
426 '''return default os-specific hgrc search path'''
426 '''return default os-specific hgrc search path'''
427 path = systemrcpath()
427 path = systemrcpath()
428 path.extend(userrcpath())
428 path.extend(userrcpath())
429 path = [os.path.normpath(f) for f in path]
429 path = [os.path.normpath(f) for f in path]
430 return path
430 return path
431
431
432 _rcpath = None
432 _rcpath = None
433
433
434 def rcpath():
434 def rcpath():
435 '''return hgrc search path. if env var HGRCPATH is set, use it.
435 '''return hgrc search path. if env var HGRCPATH is set, use it.
436 for each item in path, if directory, use files ending in .rc,
436 for each item in path, if directory, use files ending in .rc,
437 else use item.
437 else use item.
438 make HGRCPATH empty to only look in .hg/hgrc of current repo.
438 make HGRCPATH empty to only look in .hg/hgrc of current repo.
439 if no HGRCPATH, use default os-specific path.'''
439 if no HGRCPATH, use default os-specific path.'''
440 global _rcpath
440 global _rcpath
441 if _rcpath is None:
441 if _rcpath is None:
442 if 'HGRCPATH' in os.environ:
442 if 'HGRCPATH' in os.environ:
443 _rcpath = []
443 _rcpath = []
444 for p in os.environ['HGRCPATH'].split(os.pathsep):
444 for p in os.environ['HGRCPATH'].split(os.pathsep):
445 if not p:
445 if not p:
446 continue
446 continue
447 p = util.expandpath(p)
447 p = util.expandpath(p)
448 if os.path.isdir(p):
448 if os.path.isdir(p):
449 for f, kind in osutil.listdir(p):
449 for f, kind in osutil.listdir(p):
450 if f.endswith('.rc'):
450 if f.endswith('.rc'):
451 _rcpath.append(os.path.join(p, f))
451 _rcpath.append(os.path.join(p, f))
452 else:
452 else:
453 _rcpath.append(p)
453 _rcpath.append(p)
454 else:
454 else:
455 _rcpath = osrcpath()
455 _rcpath = osrcpath()
456 return _rcpath
456 return _rcpath
457
457
458 def revsingle(repo, revspec, default='.'):
458 def revsingle(repo, revspec, default='.'):
459 if not revspec and revspec != 0:
459 if not revspec and revspec != 0:
460 return repo[default]
460 return repo[default]
461
461
462 l = revrange(repo, [revspec])
462 l = revrange(repo, [revspec])
463 if len(l) < 1:
463 if len(l) < 1:
464 raise util.Abort(_('empty revision set'))
464 raise util.Abort(_('empty revision set'))
465 return repo[l[-1]]
465 return repo[l[-1]]
466
466
467 def revpair(repo, revs):
467 def revpair(repo, revs):
468 if not revs:
468 if not revs:
469 return repo.dirstate.p1(), None
469 return repo.dirstate.p1(), None
470
470
471 l = revrange(repo, revs)
471 l = revrange(repo, revs)
472
472
473 if len(l) == 0:
473 if len(l) == 0:
474 if revs:
475 raise util.Abort(_('empty revision range'))
474 raise util.Abort(_('empty revision range'))
476 return repo.dirstate.p1(), None
477
475
478 if len(l) == 1 and len(revs) == 1 and _revrangesep not in revs[0]:
476 if len(l) == 1 and len(revs) == 1 and _revrangesep not in revs[0]:
479 return repo.lookup(l[0]), None
477 return repo.lookup(l[0]), None
480
478
481 return repo.lookup(l[0]), repo.lookup(l[-1])
479 return repo.lookup(l[0]), repo.lookup(l[-1])
482
480
483 _revrangesep = ':'
481 _revrangesep = ':'
484
482
485 def revrange(repo, revs):
483 def revrange(repo, revs):
486 """Yield revision as strings from a list of revision specifications."""
484 """Yield revision as strings from a list of revision specifications."""
487
485
488 def revfix(repo, val, defval):
486 def revfix(repo, val, defval):
489 if not val and val != 0 and defval is not None:
487 if not val and val != 0 and defval is not None:
490 return defval
488 return defval
491 return repo[val].rev()
489 return repo[val].rev()
492
490
493 seen, l = set(), revset.baseset([])
491 seen, l = set(), revset.baseset([])
494 for spec in revs:
492 for spec in revs:
495 if l and not seen:
493 if l and not seen:
496 seen = set(l)
494 seen = set(l)
497 # attempt to parse old-style ranges first to deal with
495 # attempt to parse old-style ranges first to deal with
498 # things like old-tag which contain query metacharacters
496 # things like old-tag which contain query metacharacters
499 try:
497 try:
500 if isinstance(spec, int):
498 if isinstance(spec, int):
501 seen.add(spec)
499 seen.add(spec)
502 l = l + revset.baseset([spec])
500 l = l + revset.baseset([spec])
503 continue
501 continue
504
502
505 if _revrangesep in spec:
503 if _revrangesep in spec:
506 start, end = spec.split(_revrangesep, 1)
504 start, end = spec.split(_revrangesep, 1)
507 start = revfix(repo, start, 0)
505 start = revfix(repo, start, 0)
508 end = revfix(repo, end, len(repo) - 1)
506 end = revfix(repo, end, len(repo) - 1)
509 if end == nullrev and start < 0:
507 if end == nullrev and start < 0:
510 start = nullrev
508 start = nullrev
511 rangeiter = repo.changelog.revs(start, end)
509 rangeiter = repo.changelog.revs(start, end)
512 if not seen and not l:
510 if not seen and not l:
513 # by far the most common case: revs = ["-1:0"]
511 # by far the most common case: revs = ["-1:0"]
514 l = revset.baseset(rangeiter)
512 l = revset.baseset(rangeiter)
515 # defer syncing seen until next iteration
513 # defer syncing seen until next iteration
516 continue
514 continue
517 newrevs = set(rangeiter)
515 newrevs = set(rangeiter)
518 if seen:
516 if seen:
519 newrevs.difference_update(seen)
517 newrevs.difference_update(seen)
520 seen.update(newrevs)
518 seen.update(newrevs)
521 else:
519 else:
522 seen = newrevs
520 seen = newrevs
523 l = l + revset.baseset(sorted(newrevs, reverse=start > end))
521 l = l + revset.baseset(sorted(newrevs, reverse=start > end))
524 continue
522 continue
525 elif spec and spec in repo: # single unquoted rev
523 elif spec and spec in repo: # single unquoted rev
526 rev = revfix(repo, spec, None)
524 rev = revfix(repo, spec, None)
527 if rev in seen:
525 if rev in seen:
528 continue
526 continue
529 seen.add(rev)
527 seen.add(rev)
530 l = l + revset.baseset([rev])
528 l = l + revset.baseset([rev])
531 continue
529 continue
532 except error.RepoLookupError:
530 except error.RepoLookupError:
533 pass
531 pass
534
532
535 # fall through to new-style queries if old-style fails
533 # fall through to new-style queries if old-style fails
536 m = revset.match(repo.ui, spec, repo)
534 m = revset.match(repo.ui, spec, repo)
537 if seen or l:
535 if seen or l:
538 dl = [r for r in m(repo, revset.spanset(repo)) if r not in seen]
536 dl = [r for r in m(repo, revset.spanset(repo)) if r not in seen]
539 l = l + revset.baseset(dl)
537 l = l + revset.baseset(dl)
540 seen.update(dl)
538 seen.update(dl)
541 else:
539 else:
542 l = m(repo, revset.spanset(repo))
540 l = m(repo, revset.spanset(repo))
543
541
544 return l
542 return l
545
543
546 def expandpats(pats):
544 def expandpats(pats):
547 if not util.expandglobs:
545 if not util.expandglobs:
548 return list(pats)
546 return list(pats)
549 ret = []
547 ret = []
550 for p in pats:
548 for p in pats:
551 kind, name = matchmod._patsplit(p, None)
549 kind, name = matchmod._patsplit(p, None)
552 if kind is None:
550 if kind is None:
553 try:
551 try:
554 globbed = glob.glob(name)
552 globbed = glob.glob(name)
555 except re.error:
553 except re.error:
556 globbed = [name]
554 globbed = [name]
557 if globbed:
555 if globbed:
558 ret.extend(globbed)
556 ret.extend(globbed)
559 continue
557 continue
560 ret.append(p)
558 ret.append(p)
561 return ret
559 return ret
562
560
563 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
561 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
564 if pats == ("",):
562 if pats == ("",):
565 pats = []
563 pats = []
566 if not globbed and default == 'relpath':
564 if not globbed and default == 'relpath':
567 pats = expandpats(pats or [])
565 pats = expandpats(pats or [])
568
566
569 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
567 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
570 default)
568 default)
571 def badfn(f, msg):
569 def badfn(f, msg):
572 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
570 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
573 m.bad = badfn
571 m.bad = badfn
574 return m, pats
572 return m, pats
575
573
576 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
574 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
577 return matchandpats(ctx, pats, opts, globbed, default)[0]
575 return matchandpats(ctx, pats, opts, globbed, default)[0]
578
576
579 def matchall(repo):
577 def matchall(repo):
580 return matchmod.always(repo.root, repo.getcwd())
578 return matchmod.always(repo.root, repo.getcwd())
581
579
582 def matchfiles(repo, files):
580 def matchfiles(repo, files):
583 return matchmod.exact(repo.root, repo.getcwd(), files)
581 return matchmod.exact(repo.root, repo.getcwd(), files)
584
582
585 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
583 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
586 if dry_run is None:
584 if dry_run is None:
587 dry_run = opts.get('dry_run')
585 dry_run = opts.get('dry_run')
588 if similarity is None:
586 if similarity is None:
589 similarity = float(opts.get('similarity') or 0)
587 similarity = float(opts.get('similarity') or 0)
590 # we'd use status here, except handling of symlinks and ignore is tricky
588 # we'd use status here, except handling of symlinks and ignore is tricky
591 m = match(repo[None], pats, opts)
589 m = match(repo[None], pats, opts)
592 rejected = []
590 rejected = []
593 m.bad = lambda x, y: rejected.append(x)
591 m.bad = lambda x, y: rejected.append(x)
594
592
595 added, unknown, deleted, removed = _interestingfiles(repo, m)
593 added, unknown, deleted, removed = _interestingfiles(repo, m)
596
594
597 unknownset = set(unknown)
595 unknownset = set(unknown)
598 toprint = unknownset.copy()
596 toprint = unknownset.copy()
599 toprint.update(deleted)
597 toprint.update(deleted)
600 for abs in sorted(toprint):
598 for abs in sorted(toprint):
601 if repo.ui.verbose or not m.exact(abs):
599 if repo.ui.verbose or not m.exact(abs):
602 rel = m.rel(abs)
600 rel = m.rel(abs)
603 if abs in unknownset:
601 if abs in unknownset:
604 status = _('adding %s\n') % ((pats and rel) or abs)
602 status = _('adding %s\n') % ((pats and rel) or abs)
605 else:
603 else:
606 status = _('removing %s\n') % ((pats and rel) or abs)
604 status = _('removing %s\n') % ((pats and rel) or abs)
607 repo.ui.status(status)
605 repo.ui.status(status)
608
606
609 renames = _findrenames(repo, m, added + unknown, removed + deleted,
607 renames = _findrenames(repo, m, added + unknown, removed + deleted,
610 similarity)
608 similarity)
611
609
612 if not dry_run:
610 if not dry_run:
613 _markchanges(repo, unknown, deleted, renames)
611 _markchanges(repo, unknown, deleted, renames)
614
612
615 for f in rejected:
613 for f in rejected:
616 if f in m.files():
614 if f in m.files():
617 return 1
615 return 1
618 return 0
616 return 0
619
617
620 def marktouched(repo, files, similarity=0.0):
618 def marktouched(repo, files, similarity=0.0):
621 '''Assert that files have somehow been operated upon. files are relative to
619 '''Assert that files have somehow been operated upon. files are relative to
622 the repo root.'''
620 the repo root.'''
623 m = matchfiles(repo, files)
621 m = matchfiles(repo, files)
624 rejected = []
622 rejected = []
625 m.bad = lambda x, y: rejected.append(x)
623 m.bad = lambda x, y: rejected.append(x)
626
624
627 added, unknown, deleted, removed = _interestingfiles(repo, m)
625 added, unknown, deleted, removed = _interestingfiles(repo, m)
628
626
629 if repo.ui.verbose:
627 if repo.ui.verbose:
630 unknownset = set(unknown)
628 unknownset = set(unknown)
631 toprint = unknownset.copy()
629 toprint = unknownset.copy()
632 toprint.update(deleted)
630 toprint.update(deleted)
633 for abs in sorted(toprint):
631 for abs in sorted(toprint):
634 if abs in unknownset:
632 if abs in unknownset:
635 status = _('adding %s\n') % abs
633 status = _('adding %s\n') % abs
636 else:
634 else:
637 status = _('removing %s\n') % abs
635 status = _('removing %s\n') % abs
638 repo.ui.status(status)
636 repo.ui.status(status)
639
637
640 renames = _findrenames(repo, m, added + unknown, removed + deleted,
638 renames = _findrenames(repo, m, added + unknown, removed + deleted,
641 similarity)
639 similarity)
642
640
643 _markchanges(repo, unknown, deleted, renames)
641 _markchanges(repo, unknown, deleted, renames)
644
642
645 for f in rejected:
643 for f in rejected:
646 if f in m.files():
644 if f in m.files():
647 return 1
645 return 1
648 return 0
646 return 0
649
647
650 def _interestingfiles(repo, matcher):
648 def _interestingfiles(repo, matcher):
651 '''Walk dirstate with matcher, looking for files that addremove would care
649 '''Walk dirstate with matcher, looking for files that addremove would care
652 about.
650 about.
653
651
654 This is different from dirstate.status because it doesn't care about
652 This is different from dirstate.status because it doesn't care about
655 whether files are modified or clean.'''
653 whether files are modified or clean.'''
656 added, unknown, deleted, removed = [], [], [], []
654 added, unknown, deleted, removed = [], [], [], []
657 audit_path = pathutil.pathauditor(repo.root)
655 audit_path = pathutil.pathauditor(repo.root)
658
656
659 ctx = repo[None]
657 ctx = repo[None]
660 dirstate = repo.dirstate
658 dirstate = repo.dirstate
661 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
659 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
662 full=False)
660 full=False)
663 for abs, st in walkresults.iteritems():
661 for abs, st in walkresults.iteritems():
664 dstate = dirstate[abs]
662 dstate = dirstate[abs]
665 if dstate == '?' and audit_path.check(abs):
663 if dstate == '?' and audit_path.check(abs):
666 unknown.append(abs)
664 unknown.append(abs)
667 elif dstate != 'r' and not st:
665 elif dstate != 'r' and not st:
668 deleted.append(abs)
666 deleted.append(abs)
669 # for finding renames
667 # for finding renames
670 elif dstate == 'r':
668 elif dstate == 'r':
671 removed.append(abs)
669 removed.append(abs)
672 elif dstate == 'a':
670 elif dstate == 'a':
673 added.append(abs)
671 added.append(abs)
674
672
675 return added, unknown, deleted, removed
673 return added, unknown, deleted, removed
676
674
677 def _findrenames(repo, matcher, added, removed, similarity):
675 def _findrenames(repo, matcher, added, removed, similarity):
678 '''Find renames from removed files to added ones.'''
676 '''Find renames from removed files to added ones.'''
679 renames = {}
677 renames = {}
680 if similarity > 0:
678 if similarity > 0:
681 for old, new, score in similar.findrenames(repo, added, removed,
679 for old, new, score in similar.findrenames(repo, added, removed,
682 similarity):
680 similarity):
683 if (repo.ui.verbose or not matcher.exact(old)
681 if (repo.ui.verbose or not matcher.exact(old)
684 or not matcher.exact(new)):
682 or not matcher.exact(new)):
685 repo.ui.status(_('recording removal of %s as rename to %s '
683 repo.ui.status(_('recording removal of %s as rename to %s '
686 '(%d%% similar)\n') %
684 '(%d%% similar)\n') %
687 (matcher.rel(old), matcher.rel(new),
685 (matcher.rel(old), matcher.rel(new),
688 score * 100))
686 score * 100))
689 renames[new] = old
687 renames[new] = old
690 return renames
688 return renames
691
689
692 def _markchanges(repo, unknown, deleted, renames):
690 def _markchanges(repo, unknown, deleted, renames):
693 '''Marks the files in unknown as added, the files in deleted as removed,
691 '''Marks the files in unknown as added, the files in deleted as removed,
694 and the files in renames as copied.'''
692 and the files in renames as copied.'''
695 wctx = repo[None]
693 wctx = repo[None]
696 wlock = repo.wlock()
694 wlock = repo.wlock()
697 try:
695 try:
698 wctx.forget(deleted)
696 wctx.forget(deleted)
699 wctx.add(unknown)
697 wctx.add(unknown)
700 for new, old in renames.iteritems():
698 for new, old in renames.iteritems():
701 wctx.copy(old, new)
699 wctx.copy(old, new)
702 finally:
700 finally:
703 wlock.release()
701 wlock.release()
704
702
705 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
703 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
706 """Update the dirstate to reflect the intent of copying src to dst. For
704 """Update the dirstate to reflect the intent of copying src to dst. For
707 different reasons it might not end with dst being marked as copied from src.
705 different reasons it might not end with dst being marked as copied from src.
708 """
706 """
709 origsrc = repo.dirstate.copied(src) or src
707 origsrc = repo.dirstate.copied(src) or src
710 if dst == origsrc: # copying back a copy?
708 if dst == origsrc: # copying back a copy?
711 if repo.dirstate[dst] not in 'mn' and not dryrun:
709 if repo.dirstate[dst] not in 'mn' and not dryrun:
712 repo.dirstate.normallookup(dst)
710 repo.dirstate.normallookup(dst)
713 else:
711 else:
714 if repo.dirstate[origsrc] == 'a' and origsrc == src:
712 if repo.dirstate[origsrc] == 'a' and origsrc == src:
715 if not ui.quiet:
713 if not ui.quiet:
716 ui.warn(_("%s has not been committed yet, so no copy "
714 ui.warn(_("%s has not been committed yet, so no copy "
717 "data will be stored for %s.\n")
715 "data will be stored for %s.\n")
718 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
716 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
719 if repo.dirstate[dst] in '?r' and not dryrun:
717 if repo.dirstate[dst] in '?r' and not dryrun:
720 wctx.add([dst])
718 wctx.add([dst])
721 elif not dryrun:
719 elif not dryrun:
722 wctx.copy(origsrc, dst)
720 wctx.copy(origsrc, dst)
723
721
724 def readrequires(opener, supported):
722 def readrequires(opener, supported):
725 '''Reads and parses .hg/requires and checks if all entries found
723 '''Reads and parses .hg/requires and checks if all entries found
726 are in the list of supported features.'''
724 are in the list of supported features.'''
727 requirements = set(opener.read("requires").splitlines())
725 requirements = set(opener.read("requires").splitlines())
728 missings = []
726 missings = []
729 for r in requirements:
727 for r in requirements:
730 if r not in supported:
728 if r not in supported:
731 if not r or not r[0].isalnum():
729 if not r or not r[0].isalnum():
732 raise error.RequirementError(_(".hg/requires file is corrupt"))
730 raise error.RequirementError(_(".hg/requires file is corrupt"))
733 missings.append(r)
731 missings.append(r)
734 missings.sort()
732 missings.sort()
735 if missings:
733 if missings:
736 raise error.RequirementError(
734 raise error.RequirementError(
737 _("unknown repository format: requires features '%s' (upgrade "
735 _("unknown repository format: requires features '%s' (upgrade "
738 "Mercurial)") % "', '".join(missings),
736 "Mercurial)") % "', '".join(missings),
739 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
737 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
740 " for details"))
738 " for details"))
741 return requirements
739 return requirements
742
740
743 class filecachesubentry(object):
741 class filecachesubentry(object):
744 def __init__(self, path, stat):
742 def __init__(self, path, stat):
745 self.path = path
743 self.path = path
746 self.cachestat = None
744 self.cachestat = None
747 self._cacheable = None
745 self._cacheable = None
748
746
749 if stat:
747 if stat:
750 self.cachestat = filecachesubentry.stat(self.path)
748 self.cachestat = filecachesubentry.stat(self.path)
751
749
752 if self.cachestat:
750 if self.cachestat:
753 self._cacheable = self.cachestat.cacheable()
751 self._cacheable = self.cachestat.cacheable()
754 else:
752 else:
755 # None means we don't know yet
753 # None means we don't know yet
756 self._cacheable = None
754 self._cacheable = None
757
755
758 def refresh(self):
756 def refresh(self):
759 if self.cacheable():
757 if self.cacheable():
760 self.cachestat = filecachesubentry.stat(self.path)
758 self.cachestat = filecachesubentry.stat(self.path)
761
759
762 def cacheable(self):
760 def cacheable(self):
763 if self._cacheable is not None:
761 if self._cacheable is not None:
764 return self._cacheable
762 return self._cacheable
765
763
766 # we don't know yet, assume it is for now
764 # we don't know yet, assume it is for now
767 return True
765 return True
768
766
769 def changed(self):
767 def changed(self):
770 # no point in going further if we can't cache it
768 # no point in going further if we can't cache it
771 if not self.cacheable():
769 if not self.cacheable():
772 return True
770 return True
773
771
774 newstat = filecachesubentry.stat(self.path)
772 newstat = filecachesubentry.stat(self.path)
775
773
776 # we may not know if it's cacheable yet, check again now
774 # we may not know if it's cacheable yet, check again now
777 if newstat and self._cacheable is None:
775 if newstat and self._cacheable is None:
778 self._cacheable = newstat.cacheable()
776 self._cacheable = newstat.cacheable()
779
777
780 # check again
778 # check again
781 if not self._cacheable:
779 if not self._cacheable:
782 return True
780 return True
783
781
784 if self.cachestat != newstat:
782 if self.cachestat != newstat:
785 self.cachestat = newstat
783 self.cachestat = newstat
786 return True
784 return True
787 else:
785 else:
788 return False
786 return False
789
787
790 @staticmethod
788 @staticmethod
791 def stat(path):
789 def stat(path):
792 try:
790 try:
793 return util.cachestat(path)
791 return util.cachestat(path)
794 except OSError, e:
792 except OSError, e:
795 if e.errno != errno.ENOENT:
793 if e.errno != errno.ENOENT:
796 raise
794 raise
797
795
798 class filecacheentry(object):
796 class filecacheentry(object):
799 def __init__(self, paths, stat=True):
797 def __init__(self, paths, stat=True):
800 self._entries = []
798 self._entries = []
801 for path in paths:
799 for path in paths:
802 self._entries.append(filecachesubentry(path, stat))
800 self._entries.append(filecachesubentry(path, stat))
803
801
804 def changed(self):
802 def changed(self):
805 '''true if any entry has changed'''
803 '''true if any entry has changed'''
806 for entry in self._entries:
804 for entry in self._entries:
807 if entry.changed():
805 if entry.changed():
808 return True
806 return True
809 return False
807 return False
810
808
811 def refresh(self):
809 def refresh(self):
812 for entry in self._entries:
810 for entry in self._entries:
813 entry.refresh()
811 entry.refresh()
814
812
815 class filecache(object):
813 class filecache(object):
816 '''A property like decorator that tracks files under .hg/ for updates.
814 '''A property like decorator that tracks files under .hg/ for updates.
817
815
818 Records stat info when called in _filecache.
816 Records stat info when called in _filecache.
819
817
820 On subsequent calls, compares old stat info with new info, and recreates the
818 On subsequent calls, compares old stat info with new info, and recreates the
821 object when any of the files changes, updating the new stat info in
819 object when any of the files changes, updating the new stat info in
822 _filecache.
820 _filecache.
823
821
824 Mercurial either atomic renames or appends for files under .hg,
822 Mercurial either atomic renames or appends for files under .hg,
825 so to ensure the cache is reliable we need the filesystem to be able
823 so to ensure the cache is reliable we need the filesystem to be able
826 to tell us if a file has been replaced. If it can't, we fallback to
824 to tell us if a file has been replaced. If it can't, we fallback to
827 recreating the object on every call (essentially the same behaviour as
825 recreating the object on every call (essentially the same behaviour as
828 propertycache).
826 propertycache).
829
827
830 '''
828 '''
831 def __init__(self, *paths):
829 def __init__(self, *paths):
832 self.paths = paths
830 self.paths = paths
833
831
834 def join(self, obj, fname):
832 def join(self, obj, fname):
835 """Used to compute the runtime path of a cached file.
833 """Used to compute the runtime path of a cached file.
836
834
837 Users should subclass filecache and provide their own version of this
835 Users should subclass filecache and provide their own version of this
838 function to call the appropriate join function on 'obj' (an instance
836 function to call the appropriate join function on 'obj' (an instance
839 of the class that its member function was decorated).
837 of the class that its member function was decorated).
840 """
838 """
841 return obj.join(fname)
839 return obj.join(fname)
842
840
843 def __call__(self, func):
841 def __call__(self, func):
844 self.func = func
842 self.func = func
845 self.name = func.__name__
843 self.name = func.__name__
846 return self
844 return self
847
845
848 def __get__(self, obj, type=None):
846 def __get__(self, obj, type=None):
849 # do we need to check if the file changed?
847 # do we need to check if the file changed?
850 if self.name in obj.__dict__:
848 if self.name in obj.__dict__:
851 assert self.name in obj._filecache, self.name
849 assert self.name in obj._filecache, self.name
852 return obj.__dict__[self.name]
850 return obj.__dict__[self.name]
853
851
854 entry = obj._filecache.get(self.name)
852 entry = obj._filecache.get(self.name)
855
853
856 if entry:
854 if entry:
857 if entry.changed():
855 if entry.changed():
858 entry.obj = self.func(obj)
856 entry.obj = self.func(obj)
859 else:
857 else:
860 paths = [self.join(obj, path) for path in self.paths]
858 paths = [self.join(obj, path) for path in self.paths]
861
859
862 # We stat -before- creating the object so our cache doesn't lie if
860 # We stat -before- creating the object so our cache doesn't lie if
863 # a writer modified between the time we read and stat
861 # a writer modified between the time we read and stat
864 entry = filecacheentry(paths, True)
862 entry = filecacheentry(paths, True)
865 entry.obj = self.func(obj)
863 entry.obj = self.func(obj)
866
864
867 obj._filecache[self.name] = entry
865 obj._filecache[self.name] = entry
868
866
869 obj.__dict__[self.name] = entry.obj
867 obj.__dict__[self.name] = entry.obj
870 return entry.obj
868 return entry.obj
871
869
872 def __set__(self, obj, value):
870 def __set__(self, obj, value):
873 if self.name not in obj._filecache:
871 if self.name not in obj._filecache:
874 # we add an entry for the missing value because X in __dict__
872 # we add an entry for the missing value because X in __dict__
875 # implies X in _filecache
873 # implies X in _filecache
876 paths = [self.join(obj, path) for path in self.paths]
874 paths = [self.join(obj, path) for path in self.paths]
877 ce = filecacheentry(paths, False)
875 ce = filecacheentry(paths, False)
878 obj._filecache[self.name] = ce
876 obj._filecache[self.name] = ce
879 else:
877 else:
880 ce = obj._filecache[self.name]
878 ce = obj._filecache[self.name]
881
879
882 ce.obj = value # update cached copy
880 ce.obj = value # update cached copy
883 obj.__dict__[self.name] = value # update copy returned by obj.x
881 obj.__dict__[self.name] = value # update copy returned by obj.x
884
882
885 def __delete__(self, obj):
883 def __delete__(self, obj):
886 try:
884 try:
887 del obj.__dict__[self.name]
885 del obj.__dict__[self.name]
888 except KeyError:
886 except KeyError:
889 raise AttributeError(self.name)
887 raise AttributeError(self.name)
890
888
891 class dirs(object):
889 class dirs(object):
892 '''a multiset of directory names from a dirstate or manifest'''
890 '''a multiset of directory names from a dirstate or manifest'''
893
891
894 def __init__(self, map, skip=None):
892 def __init__(self, map, skip=None):
895 self._dirs = {}
893 self._dirs = {}
896 addpath = self.addpath
894 addpath = self.addpath
897 if util.safehasattr(map, 'iteritems') and skip is not None:
895 if util.safehasattr(map, 'iteritems') and skip is not None:
898 for f, s in map.iteritems():
896 for f, s in map.iteritems():
899 if s[0] != skip:
897 if s[0] != skip:
900 addpath(f)
898 addpath(f)
901 else:
899 else:
902 for f in map:
900 for f in map:
903 addpath(f)
901 addpath(f)
904
902
905 def addpath(self, path):
903 def addpath(self, path):
906 dirs = self._dirs
904 dirs = self._dirs
907 for base in finddirs(path):
905 for base in finddirs(path):
908 if base in dirs:
906 if base in dirs:
909 dirs[base] += 1
907 dirs[base] += 1
910 return
908 return
911 dirs[base] = 1
909 dirs[base] = 1
912
910
913 def delpath(self, path):
911 def delpath(self, path):
914 dirs = self._dirs
912 dirs = self._dirs
915 for base in finddirs(path):
913 for base in finddirs(path):
916 if dirs[base] > 1:
914 if dirs[base] > 1:
917 dirs[base] -= 1
915 dirs[base] -= 1
918 return
916 return
919 del dirs[base]
917 del dirs[base]
920
918
921 def __iter__(self):
919 def __iter__(self):
922 return self._dirs.iterkeys()
920 return self._dirs.iterkeys()
923
921
924 def __contains__(self, d):
922 def __contains__(self, d):
925 return d in self._dirs
923 return d in self._dirs
926
924
927 if util.safehasattr(parsers, 'dirs'):
925 if util.safehasattr(parsers, 'dirs'):
928 dirs = parsers.dirs
926 dirs = parsers.dirs
929
927
930 def finddirs(path):
928 def finddirs(path):
931 pos = path.rfind('/')
929 pos = path.rfind('/')
932 while pos != -1:
930 while pos != -1:
933 yield path[:pos]
931 yield path[:pos]
934 pos = path.rfind('/', 0, pos)
932 pos = path.rfind('/', 0, pos)
General Comments 0
You need to be logged in to leave comments. Login now