##// END OF EJS Templates
vfs: add "mkstemp()"
FUJIWARA Katsunori -
r20980:6fb4c94a default
parent child Browse files
Show More
@@ -1,945 +1,954 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from mercurial.node import nullrev
9 from mercurial.node import nullrev
10 import util, error, osutil, revset, similar, encoding, phases, parsers
10 import util, error, osutil, revset, similar, encoding, phases, parsers
11 import pathutil
11 import pathutil
12 import match as matchmod
12 import match as matchmod
13 import os, errno, re, glob
13 import os, errno, re, glob, tempfile
14
14
15 if os.name == 'nt':
15 if os.name == 'nt':
16 import scmwindows as scmplatform
16 import scmwindows as scmplatform
17 else:
17 else:
18 import scmposix as scmplatform
18 import scmposix as scmplatform
19
19
20 systemrcpath = scmplatform.systemrcpath
20 systemrcpath = scmplatform.systemrcpath
21 userrcpath = scmplatform.userrcpath
21 userrcpath = scmplatform.userrcpath
22
22
23 def itersubrepos(ctx1, ctx2):
23 def itersubrepos(ctx1, ctx2):
24 """find subrepos in ctx1 or ctx2"""
24 """find subrepos in ctx1 or ctx2"""
25 # Create a (subpath, ctx) mapping where we prefer subpaths from
25 # Create a (subpath, ctx) mapping where we prefer subpaths from
26 # ctx1. The subpaths from ctx2 are important when the .hgsub file
26 # ctx1. The subpaths from ctx2 are important when the .hgsub file
27 # has been modified (in ctx2) but not yet committed (in ctx1).
27 # has been modified (in ctx2) but not yet committed (in ctx1).
28 subpaths = dict.fromkeys(ctx2.substate, ctx2)
28 subpaths = dict.fromkeys(ctx2.substate, ctx2)
29 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
29 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
30 for subpath, ctx in sorted(subpaths.iteritems()):
30 for subpath, ctx in sorted(subpaths.iteritems()):
31 yield subpath, ctx.sub(subpath)
31 yield subpath, ctx.sub(subpath)
32
32
33 def nochangesfound(ui, repo, excluded=None):
33 def nochangesfound(ui, repo, excluded=None):
34 '''Report no changes for push/pull, excluded is None or a list of
34 '''Report no changes for push/pull, excluded is None or a list of
35 nodes excluded from the push/pull.
35 nodes excluded from the push/pull.
36 '''
36 '''
37 secretlist = []
37 secretlist = []
38 if excluded:
38 if excluded:
39 for n in excluded:
39 for n in excluded:
40 if n not in repo:
40 if n not in repo:
41 # discovery should not have included the filtered revision,
41 # discovery should not have included the filtered revision,
42 # we have to explicitly exclude it until discovery is cleanup.
42 # we have to explicitly exclude it until discovery is cleanup.
43 continue
43 continue
44 ctx = repo[n]
44 ctx = repo[n]
45 if ctx.phase() >= phases.secret and not ctx.extinct():
45 if ctx.phase() >= phases.secret and not ctx.extinct():
46 secretlist.append(n)
46 secretlist.append(n)
47
47
48 if secretlist:
48 if secretlist:
49 ui.status(_("no changes found (ignored %d secret changesets)\n")
49 ui.status(_("no changes found (ignored %d secret changesets)\n")
50 % len(secretlist))
50 % len(secretlist))
51 else:
51 else:
52 ui.status(_("no changes found\n"))
52 ui.status(_("no changes found\n"))
53
53
54 def checknewlabel(repo, lbl, kind):
54 def checknewlabel(repo, lbl, kind):
55 # Do not use the "kind" parameter in ui output.
55 # Do not use the "kind" parameter in ui output.
56 # It makes strings difficult to translate.
56 # It makes strings difficult to translate.
57 if lbl in ['tip', '.', 'null']:
57 if lbl in ['tip', '.', 'null']:
58 raise util.Abort(_("the name '%s' is reserved") % lbl)
58 raise util.Abort(_("the name '%s' is reserved") % lbl)
59 for c in (':', '\0', '\n', '\r'):
59 for c in (':', '\0', '\n', '\r'):
60 if c in lbl:
60 if c in lbl:
61 raise util.Abort(_("%r cannot be used in a name") % c)
61 raise util.Abort(_("%r cannot be used in a name") % c)
62 try:
62 try:
63 int(lbl)
63 int(lbl)
64 raise util.Abort(_("cannot use an integer as a name"))
64 raise util.Abort(_("cannot use an integer as a name"))
65 except ValueError:
65 except ValueError:
66 pass
66 pass
67
67
68 def checkfilename(f):
68 def checkfilename(f):
69 '''Check that the filename f is an acceptable filename for a tracked file'''
69 '''Check that the filename f is an acceptable filename for a tracked file'''
70 if '\r' in f or '\n' in f:
70 if '\r' in f or '\n' in f:
71 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
71 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
72
72
73 def checkportable(ui, f):
73 def checkportable(ui, f):
74 '''Check if filename f is portable and warn or abort depending on config'''
74 '''Check if filename f is portable and warn or abort depending on config'''
75 checkfilename(f)
75 checkfilename(f)
76 abort, warn = checkportabilityalert(ui)
76 abort, warn = checkportabilityalert(ui)
77 if abort or warn:
77 if abort or warn:
78 msg = util.checkwinfilename(f)
78 msg = util.checkwinfilename(f)
79 if msg:
79 if msg:
80 msg = "%s: %r" % (msg, f)
80 msg = "%s: %r" % (msg, f)
81 if abort:
81 if abort:
82 raise util.Abort(msg)
82 raise util.Abort(msg)
83 ui.warn(_("warning: %s\n") % msg)
83 ui.warn(_("warning: %s\n") % msg)
84
84
85 def checkportabilityalert(ui):
85 def checkportabilityalert(ui):
86 '''check if the user's config requests nothing, a warning, or abort for
86 '''check if the user's config requests nothing, a warning, or abort for
87 non-portable filenames'''
87 non-portable filenames'''
88 val = ui.config('ui', 'portablefilenames', 'warn')
88 val = ui.config('ui', 'portablefilenames', 'warn')
89 lval = val.lower()
89 lval = val.lower()
90 bval = util.parsebool(val)
90 bval = util.parsebool(val)
91 abort = os.name == 'nt' or lval == 'abort'
91 abort = os.name == 'nt' or lval == 'abort'
92 warn = bval or lval == 'warn'
92 warn = bval or lval == 'warn'
93 if bval is None and not (warn or abort or lval == 'ignore'):
93 if bval is None and not (warn or abort or lval == 'ignore'):
94 raise error.ConfigError(
94 raise error.ConfigError(
95 _("ui.portablefilenames value is invalid ('%s')") % val)
95 _("ui.portablefilenames value is invalid ('%s')") % val)
96 return abort, warn
96 return abort, warn
97
97
98 class casecollisionauditor(object):
98 class casecollisionauditor(object):
99 def __init__(self, ui, abort, dirstate):
99 def __init__(self, ui, abort, dirstate):
100 self._ui = ui
100 self._ui = ui
101 self._abort = abort
101 self._abort = abort
102 allfiles = '\0'.join(dirstate._map)
102 allfiles = '\0'.join(dirstate._map)
103 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
103 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
104 self._dirstate = dirstate
104 self._dirstate = dirstate
105 # The purpose of _newfiles is so that we don't complain about
105 # The purpose of _newfiles is so that we don't complain about
106 # case collisions if someone were to call this object with the
106 # case collisions if someone were to call this object with the
107 # same filename twice.
107 # same filename twice.
108 self._newfiles = set()
108 self._newfiles = set()
109
109
110 def __call__(self, f):
110 def __call__(self, f):
111 if f in self._newfiles:
111 if f in self._newfiles:
112 return
112 return
113 fl = encoding.lower(f)
113 fl = encoding.lower(f)
114 if fl in self._loweredfiles and f not in self._dirstate:
114 if fl in self._loweredfiles and f not in self._dirstate:
115 msg = _('possible case-folding collision for %s') % f
115 msg = _('possible case-folding collision for %s') % f
116 if self._abort:
116 if self._abort:
117 raise util.Abort(msg)
117 raise util.Abort(msg)
118 self._ui.warn(_("warning: %s\n") % msg)
118 self._ui.warn(_("warning: %s\n") % msg)
119 self._loweredfiles.add(fl)
119 self._loweredfiles.add(fl)
120 self._newfiles.add(f)
120 self._newfiles.add(f)
121
121
122 class abstractvfs(object):
122 class abstractvfs(object):
123 """Abstract base class; cannot be instantiated"""
123 """Abstract base class; cannot be instantiated"""
124
124
125 def __init__(self, *args, **kwargs):
125 def __init__(self, *args, **kwargs):
126 '''Prevent instantiation; don't call this from subclasses.'''
126 '''Prevent instantiation; don't call this from subclasses.'''
127 raise NotImplementedError('attempted instantiating ' + str(type(self)))
127 raise NotImplementedError('attempted instantiating ' + str(type(self)))
128
128
129 def tryread(self, path):
129 def tryread(self, path):
130 '''gracefully return an empty string for missing files'''
130 '''gracefully return an empty string for missing files'''
131 try:
131 try:
132 return self.read(path)
132 return self.read(path)
133 except IOError, inst:
133 except IOError, inst:
134 if inst.errno != errno.ENOENT:
134 if inst.errno != errno.ENOENT:
135 raise
135 raise
136 return ""
136 return ""
137
137
138 def open(self, path, mode="r", text=False, atomictemp=False):
138 def open(self, path, mode="r", text=False, atomictemp=False):
139 self.open = self.__call__
139 self.open = self.__call__
140 return self.__call__(path, mode, text, atomictemp)
140 return self.__call__(path, mode, text, atomictemp)
141
141
142 def read(self, path):
142 def read(self, path):
143 fp = self(path, 'rb')
143 fp = self(path, 'rb')
144 try:
144 try:
145 return fp.read()
145 return fp.read()
146 finally:
146 finally:
147 fp.close()
147 fp.close()
148
148
149 def write(self, path, data):
149 def write(self, path, data):
150 fp = self(path, 'wb')
150 fp = self(path, 'wb')
151 try:
151 try:
152 return fp.write(data)
152 return fp.write(data)
153 finally:
153 finally:
154 fp.close()
154 fp.close()
155
155
156 def append(self, path, data):
156 def append(self, path, data):
157 fp = self(path, 'ab')
157 fp = self(path, 'ab')
158 try:
158 try:
159 return fp.write(data)
159 return fp.write(data)
160 finally:
160 finally:
161 fp.close()
161 fp.close()
162
162
163 def chmod(self, path, mode):
163 def chmod(self, path, mode):
164 return os.chmod(self.join(path), mode)
164 return os.chmod(self.join(path), mode)
165
165
166 def exists(self, path=None):
166 def exists(self, path=None):
167 return os.path.exists(self.join(path))
167 return os.path.exists(self.join(path))
168
168
169 def fstat(self, fp):
169 def fstat(self, fp):
170 return util.fstat(fp)
170 return util.fstat(fp)
171
171
172 def isdir(self, path=None):
172 def isdir(self, path=None):
173 return os.path.isdir(self.join(path))
173 return os.path.isdir(self.join(path))
174
174
175 def isfile(self, path=None):
175 def isfile(self, path=None):
176 return os.path.isfile(self.join(path))
176 return os.path.isfile(self.join(path))
177
177
178 def islink(self, path=None):
178 def islink(self, path=None):
179 return os.path.islink(self.join(path))
179 return os.path.islink(self.join(path))
180
180
181 def lstat(self, path=None):
181 def lstat(self, path=None):
182 return os.lstat(self.join(path))
182 return os.lstat(self.join(path))
183
183
184 def makedir(self, path=None, notindexed=True):
184 def makedir(self, path=None, notindexed=True):
185 return util.makedir(self.join(path), notindexed)
185 return util.makedir(self.join(path), notindexed)
186
186
187 def makedirs(self, path=None, mode=None):
187 def makedirs(self, path=None, mode=None):
188 return util.makedirs(self.join(path), mode)
188 return util.makedirs(self.join(path), mode)
189
189
190 def makelock(self, info, path):
190 def makelock(self, info, path):
191 return util.makelock(info, self.join(path))
191 return util.makelock(info, self.join(path))
192
192
193 def mkdir(self, path=None):
193 def mkdir(self, path=None):
194 return os.mkdir(self.join(path))
194 return os.mkdir(self.join(path))
195
195
196 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
197 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
198 dir=self.join(dir), text=text)
199 dname, fname = util.split(name)
200 if dir:
201 return fd, os.path.join(dir, fname)
202 else:
203 return fd, fname
204
196 def readdir(self, path=None, stat=None, skip=None):
205 def readdir(self, path=None, stat=None, skip=None):
197 return osutil.listdir(self.join(path), stat, skip)
206 return osutil.listdir(self.join(path), stat, skip)
198
207
199 def readlock(self, path):
208 def readlock(self, path):
200 return util.readlock(self.join(path))
209 return util.readlock(self.join(path))
201
210
202 def rename(self, src, dst):
211 def rename(self, src, dst):
203 return util.rename(self.join(src), self.join(dst))
212 return util.rename(self.join(src), self.join(dst))
204
213
205 def readlink(self, path):
214 def readlink(self, path):
206 return os.readlink(self.join(path))
215 return os.readlink(self.join(path))
207
216
208 def setflags(self, path, l, x):
217 def setflags(self, path, l, x):
209 return util.setflags(self.join(path), l, x)
218 return util.setflags(self.join(path), l, x)
210
219
211 def stat(self, path=None):
220 def stat(self, path=None):
212 return os.stat(self.join(path))
221 return os.stat(self.join(path))
213
222
214 def unlink(self, path=None):
223 def unlink(self, path=None):
215 return util.unlink(self.join(path))
224 return util.unlink(self.join(path))
216
225
217 def utime(self, path=None, t=None):
226 def utime(self, path=None, t=None):
218 return os.utime(self.join(path), t)
227 return os.utime(self.join(path), t)
219
228
220 class vfs(abstractvfs):
229 class vfs(abstractvfs):
221 '''Operate files relative to a base directory
230 '''Operate files relative to a base directory
222
231
223 This class is used to hide the details of COW semantics and
232 This class is used to hide the details of COW semantics and
224 remote file access from higher level code.
233 remote file access from higher level code.
225 '''
234 '''
226 def __init__(self, base, audit=True, expandpath=False, realpath=False):
235 def __init__(self, base, audit=True, expandpath=False, realpath=False):
227 if expandpath:
236 if expandpath:
228 base = util.expandpath(base)
237 base = util.expandpath(base)
229 if realpath:
238 if realpath:
230 base = os.path.realpath(base)
239 base = os.path.realpath(base)
231 self.base = base
240 self.base = base
232 self._setmustaudit(audit)
241 self._setmustaudit(audit)
233 self.createmode = None
242 self.createmode = None
234 self._trustnlink = None
243 self._trustnlink = None
235
244
236 def _getmustaudit(self):
245 def _getmustaudit(self):
237 return self._audit
246 return self._audit
238
247
239 def _setmustaudit(self, onoff):
248 def _setmustaudit(self, onoff):
240 self._audit = onoff
249 self._audit = onoff
241 if onoff:
250 if onoff:
242 self.audit = pathutil.pathauditor(self.base)
251 self.audit = pathutil.pathauditor(self.base)
243 else:
252 else:
244 self.audit = util.always
253 self.audit = util.always
245
254
246 mustaudit = property(_getmustaudit, _setmustaudit)
255 mustaudit = property(_getmustaudit, _setmustaudit)
247
256
248 @util.propertycache
257 @util.propertycache
249 def _cansymlink(self):
258 def _cansymlink(self):
250 return util.checklink(self.base)
259 return util.checklink(self.base)
251
260
252 @util.propertycache
261 @util.propertycache
253 def _chmod(self):
262 def _chmod(self):
254 return util.checkexec(self.base)
263 return util.checkexec(self.base)
255
264
256 def _fixfilemode(self, name):
265 def _fixfilemode(self, name):
257 if self.createmode is None or not self._chmod:
266 if self.createmode is None or not self._chmod:
258 return
267 return
259 os.chmod(name, self.createmode & 0666)
268 os.chmod(name, self.createmode & 0666)
260
269
261 def __call__(self, path, mode="r", text=False, atomictemp=False):
270 def __call__(self, path, mode="r", text=False, atomictemp=False):
262 if self._audit:
271 if self._audit:
263 r = util.checkosfilename(path)
272 r = util.checkosfilename(path)
264 if r:
273 if r:
265 raise util.Abort("%s: %r" % (r, path))
274 raise util.Abort("%s: %r" % (r, path))
266 self.audit(path)
275 self.audit(path)
267 f = self.join(path)
276 f = self.join(path)
268
277
269 if not text and "b" not in mode:
278 if not text and "b" not in mode:
270 mode += "b" # for that other OS
279 mode += "b" # for that other OS
271
280
272 nlink = -1
281 nlink = -1
273 if mode not in ('r', 'rb'):
282 if mode not in ('r', 'rb'):
274 dirname, basename = util.split(f)
283 dirname, basename = util.split(f)
275 # If basename is empty, then the path is malformed because it points
284 # If basename is empty, then the path is malformed because it points
276 # to a directory. Let the posixfile() call below raise IOError.
285 # to a directory. Let the posixfile() call below raise IOError.
277 if basename:
286 if basename:
278 if atomictemp:
287 if atomictemp:
279 util.ensuredirs(dirname, self.createmode)
288 util.ensuredirs(dirname, self.createmode)
280 return util.atomictempfile(f, mode, self.createmode)
289 return util.atomictempfile(f, mode, self.createmode)
281 try:
290 try:
282 if 'w' in mode:
291 if 'w' in mode:
283 util.unlink(f)
292 util.unlink(f)
284 nlink = 0
293 nlink = 0
285 else:
294 else:
286 # nlinks() may behave differently for files on Windows
295 # nlinks() may behave differently for files on Windows
287 # shares if the file is open.
296 # shares if the file is open.
288 fd = util.posixfile(f)
297 fd = util.posixfile(f)
289 nlink = util.nlinks(f)
298 nlink = util.nlinks(f)
290 if nlink < 1:
299 if nlink < 1:
291 nlink = 2 # force mktempcopy (issue1922)
300 nlink = 2 # force mktempcopy (issue1922)
292 fd.close()
301 fd.close()
293 except (OSError, IOError), e:
302 except (OSError, IOError), e:
294 if e.errno != errno.ENOENT:
303 if e.errno != errno.ENOENT:
295 raise
304 raise
296 nlink = 0
305 nlink = 0
297 util.ensuredirs(dirname, self.createmode)
306 util.ensuredirs(dirname, self.createmode)
298 if nlink > 0:
307 if nlink > 0:
299 if self._trustnlink is None:
308 if self._trustnlink is None:
300 self._trustnlink = nlink > 1 or util.checknlink(f)
309 self._trustnlink = nlink > 1 or util.checknlink(f)
301 if nlink > 1 or not self._trustnlink:
310 if nlink > 1 or not self._trustnlink:
302 util.rename(util.mktempcopy(f), f)
311 util.rename(util.mktempcopy(f), f)
303 fp = util.posixfile(f, mode)
312 fp = util.posixfile(f, mode)
304 if nlink == 0:
313 if nlink == 0:
305 self._fixfilemode(f)
314 self._fixfilemode(f)
306 return fp
315 return fp
307
316
308 def symlink(self, src, dst):
317 def symlink(self, src, dst):
309 self.audit(dst)
318 self.audit(dst)
310 linkname = self.join(dst)
319 linkname = self.join(dst)
311 try:
320 try:
312 os.unlink(linkname)
321 os.unlink(linkname)
313 except OSError:
322 except OSError:
314 pass
323 pass
315
324
316 util.ensuredirs(os.path.dirname(linkname), self.createmode)
325 util.ensuredirs(os.path.dirname(linkname), self.createmode)
317
326
318 if self._cansymlink:
327 if self._cansymlink:
319 try:
328 try:
320 os.symlink(src, linkname)
329 os.symlink(src, linkname)
321 except OSError, err:
330 except OSError, err:
322 raise OSError(err.errno, _('could not symlink to %r: %s') %
331 raise OSError(err.errno, _('could not symlink to %r: %s') %
323 (src, err.strerror), linkname)
332 (src, err.strerror), linkname)
324 else:
333 else:
325 self.write(dst, src)
334 self.write(dst, src)
326
335
327 def join(self, path):
336 def join(self, path):
328 if path:
337 if path:
329 return os.path.join(self.base, path)
338 return os.path.join(self.base, path)
330 else:
339 else:
331 return self.base
340 return self.base
332
341
333 opener = vfs
342 opener = vfs
334
343
335 class auditvfs(object):
344 class auditvfs(object):
336 def __init__(self, vfs):
345 def __init__(self, vfs):
337 self.vfs = vfs
346 self.vfs = vfs
338
347
339 def _getmustaudit(self):
348 def _getmustaudit(self):
340 return self.vfs.mustaudit
349 return self.vfs.mustaudit
341
350
342 def _setmustaudit(self, onoff):
351 def _setmustaudit(self, onoff):
343 self.vfs.mustaudit = onoff
352 self.vfs.mustaudit = onoff
344
353
345 mustaudit = property(_getmustaudit, _setmustaudit)
354 mustaudit = property(_getmustaudit, _setmustaudit)
346
355
347 class filtervfs(abstractvfs, auditvfs):
356 class filtervfs(abstractvfs, auditvfs):
348 '''Wrapper vfs for filtering filenames with a function.'''
357 '''Wrapper vfs for filtering filenames with a function.'''
349
358
350 def __init__(self, vfs, filter):
359 def __init__(self, vfs, filter):
351 auditvfs.__init__(self, vfs)
360 auditvfs.__init__(self, vfs)
352 self._filter = filter
361 self._filter = filter
353
362
354 def __call__(self, path, *args, **kwargs):
363 def __call__(self, path, *args, **kwargs):
355 return self.vfs(self._filter(path), *args, **kwargs)
364 return self.vfs(self._filter(path), *args, **kwargs)
356
365
357 def join(self, path):
366 def join(self, path):
358 if path:
367 if path:
359 return self.vfs.join(self._filter(path))
368 return self.vfs.join(self._filter(path))
360 else:
369 else:
361 return self.vfs.join(path)
370 return self.vfs.join(path)
362
371
363 filteropener = filtervfs
372 filteropener = filtervfs
364
373
365 class readonlyvfs(abstractvfs, auditvfs):
374 class readonlyvfs(abstractvfs, auditvfs):
366 '''Wrapper vfs preventing any writing.'''
375 '''Wrapper vfs preventing any writing.'''
367
376
368 def __init__(self, vfs):
377 def __init__(self, vfs):
369 auditvfs.__init__(self, vfs)
378 auditvfs.__init__(self, vfs)
370
379
371 def __call__(self, path, mode='r', *args, **kw):
380 def __call__(self, path, mode='r', *args, **kw):
372 if mode not in ('r', 'rb'):
381 if mode not in ('r', 'rb'):
373 raise util.Abort('this vfs is read only')
382 raise util.Abort('this vfs is read only')
374 return self.vfs(path, mode, *args, **kw)
383 return self.vfs(path, mode, *args, **kw)
375
384
376
385
377 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
386 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
378 '''yield every hg repository under path, always recursively.
387 '''yield every hg repository under path, always recursively.
379 The recurse flag will only control recursion into repo working dirs'''
388 The recurse flag will only control recursion into repo working dirs'''
380 def errhandler(err):
389 def errhandler(err):
381 if err.filename == path:
390 if err.filename == path:
382 raise err
391 raise err
383 samestat = getattr(os.path, 'samestat', None)
392 samestat = getattr(os.path, 'samestat', None)
384 if followsym and samestat is not None:
393 if followsym and samestat is not None:
385 def adddir(dirlst, dirname):
394 def adddir(dirlst, dirname):
386 match = False
395 match = False
387 dirstat = os.stat(dirname)
396 dirstat = os.stat(dirname)
388 for lstdirstat in dirlst:
397 for lstdirstat in dirlst:
389 if samestat(dirstat, lstdirstat):
398 if samestat(dirstat, lstdirstat):
390 match = True
399 match = True
391 break
400 break
392 if not match:
401 if not match:
393 dirlst.append(dirstat)
402 dirlst.append(dirstat)
394 return not match
403 return not match
395 else:
404 else:
396 followsym = False
405 followsym = False
397
406
398 if (seen_dirs is None) and followsym:
407 if (seen_dirs is None) and followsym:
399 seen_dirs = []
408 seen_dirs = []
400 adddir(seen_dirs, path)
409 adddir(seen_dirs, path)
401 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
410 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
402 dirs.sort()
411 dirs.sort()
403 if '.hg' in dirs:
412 if '.hg' in dirs:
404 yield root # found a repository
413 yield root # found a repository
405 qroot = os.path.join(root, '.hg', 'patches')
414 qroot = os.path.join(root, '.hg', 'patches')
406 if os.path.isdir(os.path.join(qroot, '.hg')):
415 if os.path.isdir(os.path.join(qroot, '.hg')):
407 yield qroot # we have a patch queue repo here
416 yield qroot # we have a patch queue repo here
408 if recurse:
417 if recurse:
409 # avoid recursing inside the .hg directory
418 # avoid recursing inside the .hg directory
410 dirs.remove('.hg')
419 dirs.remove('.hg')
411 else:
420 else:
412 dirs[:] = [] # don't descend further
421 dirs[:] = [] # don't descend further
413 elif followsym:
422 elif followsym:
414 newdirs = []
423 newdirs = []
415 for d in dirs:
424 for d in dirs:
416 fname = os.path.join(root, d)
425 fname = os.path.join(root, d)
417 if adddir(seen_dirs, fname):
426 if adddir(seen_dirs, fname):
418 if os.path.islink(fname):
427 if os.path.islink(fname):
419 for hgname in walkrepos(fname, True, seen_dirs):
428 for hgname in walkrepos(fname, True, seen_dirs):
420 yield hgname
429 yield hgname
421 else:
430 else:
422 newdirs.append(d)
431 newdirs.append(d)
423 dirs[:] = newdirs
432 dirs[:] = newdirs
424
433
425 def osrcpath():
434 def osrcpath():
426 '''return default os-specific hgrc search path'''
435 '''return default os-specific hgrc search path'''
427 path = systemrcpath()
436 path = systemrcpath()
428 path.extend(userrcpath())
437 path.extend(userrcpath())
429 path = [os.path.normpath(f) for f in path]
438 path = [os.path.normpath(f) for f in path]
430 return path
439 return path
431
440
432 _rcpath = None
441 _rcpath = None
433
442
434 def rcpath():
443 def rcpath():
435 '''return hgrc search path. if env var HGRCPATH is set, use it.
444 '''return hgrc search path. if env var HGRCPATH is set, use it.
436 for each item in path, if directory, use files ending in .rc,
445 for each item in path, if directory, use files ending in .rc,
437 else use item.
446 else use item.
438 make HGRCPATH empty to only look in .hg/hgrc of current repo.
447 make HGRCPATH empty to only look in .hg/hgrc of current repo.
439 if no HGRCPATH, use default os-specific path.'''
448 if no HGRCPATH, use default os-specific path.'''
440 global _rcpath
449 global _rcpath
441 if _rcpath is None:
450 if _rcpath is None:
442 if 'HGRCPATH' in os.environ:
451 if 'HGRCPATH' in os.environ:
443 _rcpath = []
452 _rcpath = []
444 for p in os.environ['HGRCPATH'].split(os.pathsep):
453 for p in os.environ['HGRCPATH'].split(os.pathsep):
445 if not p:
454 if not p:
446 continue
455 continue
447 p = util.expandpath(p)
456 p = util.expandpath(p)
448 if os.path.isdir(p):
457 if os.path.isdir(p):
449 for f, kind in osutil.listdir(p):
458 for f, kind in osutil.listdir(p):
450 if f.endswith('.rc'):
459 if f.endswith('.rc'):
451 _rcpath.append(os.path.join(p, f))
460 _rcpath.append(os.path.join(p, f))
452 else:
461 else:
453 _rcpath.append(p)
462 _rcpath.append(p)
454 else:
463 else:
455 _rcpath = osrcpath()
464 _rcpath = osrcpath()
456 return _rcpath
465 return _rcpath
457
466
458 def revsingle(repo, revspec, default='.'):
467 def revsingle(repo, revspec, default='.'):
459 if not revspec and revspec != 0:
468 if not revspec and revspec != 0:
460 return repo[default]
469 return repo[default]
461
470
462 l = revrange(repo, [revspec])
471 l = revrange(repo, [revspec])
463 if len(l) < 1:
472 if len(l) < 1:
464 raise util.Abort(_('empty revision set'))
473 raise util.Abort(_('empty revision set'))
465 return repo[l[-1]]
474 return repo[l[-1]]
466
475
467 def revpair(repo, revs):
476 def revpair(repo, revs):
468 if not revs:
477 if not revs:
469 return repo.dirstate.p1(), None
478 return repo.dirstate.p1(), None
470
479
471 l = revrange(repo, revs)
480 l = revrange(repo, revs)
472
481
473 if not l:
482 if not l:
474 first = second = None
483 first = second = None
475 elif l.isascending():
484 elif l.isascending():
476 first = l.min()
485 first = l.min()
477 second = l.max()
486 second = l.max()
478 elif l.isdescending():
487 elif l.isdescending():
479 first = l.max()
488 first = l.max()
480 second = l.min()
489 second = l.min()
481 else:
490 else:
482 l = list(l)
491 l = list(l)
483 first = l[0]
492 first = l[0]
484 second = l[-1]
493 second = l[-1]
485
494
486 if first is None:
495 if first is None:
487 raise util.Abort(_('empty revision range'))
496 raise util.Abort(_('empty revision range'))
488
497
489 if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
498 if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
490 return repo.lookup(first), None
499 return repo.lookup(first), None
491
500
492 return repo.lookup(first), repo.lookup(second)
501 return repo.lookup(first), repo.lookup(second)
493
502
494 _revrangesep = ':'
503 _revrangesep = ':'
495
504
496 def revrange(repo, revs):
505 def revrange(repo, revs):
497 """Yield revision as strings from a list of revision specifications."""
506 """Yield revision as strings from a list of revision specifications."""
498
507
499 def revfix(repo, val, defval):
508 def revfix(repo, val, defval):
500 if not val and val != 0 and defval is not None:
509 if not val and val != 0 and defval is not None:
501 return defval
510 return defval
502 return repo[val].rev()
511 return repo[val].rev()
503
512
504 seen, l = set(), revset.baseset([])
513 seen, l = set(), revset.baseset([])
505 for spec in revs:
514 for spec in revs:
506 if l and not seen:
515 if l and not seen:
507 seen = set(l)
516 seen = set(l)
508 # attempt to parse old-style ranges first to deal with
517 # attempt to parse old-style ranges first to deal with
509 # things like old-tag which contain query metacharacters
518 # things like old-tag which contain query metacharacters
510 try:
519 try:
511 if isinstance(spec, int):
520 if isinstance(spec, int):
512 seen.add(spec)
521 seen.add(spec)
513 l = l + revset.baseset([spec])
522 l = l + revset.baseset([spec])
514 continue
523 continue
515
524
516 if _revrangesep in spec:
525 if _revrangesep in spec:
517 start, end = spec.split(_revrangesep, 1)
526 start, end = spec.split(_revrangesep, 1)
518 start = revfix(repo, start, 0)
527 start = revfix(repo, start, 0)
519 end = revfix(repo, end, len(repo) - 1)
528 end = revfix(repo, end, len(repo) - 1)
520 if end == nullrev and start < 0:
529 if end == nullrev and start < 0:
521 start = nullrev
530 start = nullrev
522 rangeiter = repo.changelog.revs(start, end)
531 rangeiter = repo.changelog.revs(start, end)
523 if not seen and not l:
532 if not seen and not l:
524 # by far the most common case: revs = ["-1:0"]
533 # by far the most common case: revs = ["-1:0"]
525 l = revset.baseset(rangeiter)
534 l = revset.baseset(rangeiter)
526 # defer syncing seen until next iteration
535 # defer syncing seen until next iteration
527 continue
536 continue
528 newrevs = set(rangeiter)
537 newrevs = set(rangeiter)
529 if seen:
538 if seen:
530 newrevs.difference_update(seen)
539 newrevs.difference_update(seen)
531 seen.update(newrevs)
540 seen.update(newrevs)
532 else:
541 else:
533 seen = newrevs
542 seen = newrevs
534 l = l + revset.baseset(sorted(newrevs, reverse=start > end))
543 l = l + revset.baseset(sorted(newrevs, reverse=start > end))
535 continue
544 continue
536 elif spec and spec in repo: # single unquoted rev
545 elif spec and spec in repo: # single unquoted rev
537 rev = revfix(repo, spec, None)
546 rev = revfix(repo, spec, None)
538 if rev in seen:
547 if rev in seen:
539 continue
548 continue
540 seen.add(rev)
549 seen.add(rev)
541 l = l + revset.baseset([rev])
550 l = l + revset.baseset([rev])
542 continue
551 continue
543 except error.RepoLookupError:
552 except error.RepoLookupError:
544 pass
553 pass
545
554
546 # fall through to new-style queries if old-style fails
555 # fall through to new-style queries if old-style fails
547 m = revset.match(repo.ui, spec, repo)
556 m = revset.match(repo.ui, spec, repo)
548 if seen or l:
557 if seen or l:
549 dl = [r for r in m(repo, revset.spanset(repo)) if r not in seen]
558 dl = [r for r in m(repo, revset.spanset(repo)) if r not in seen]
550 l = l + revset.baseset(dl)
559 l = l + revset.baseset(dl)
551 seen.update(dl)
560 seen.update(dl)
552 else:
561 else:
553 l = m(repo, revset.spanset(repo))
562 l = m(repo, revset.spanset(repo))
554
563
555 return l
564 return l
556
565
557 def expandpats(pats):
566 def expandpats(pats):
558 if not util.expandglobs:
567 if not util.expandglobs:
559 return list(pats)
568 return list(pats)
560 ret = []
569 ret = []
561 for p in pats:
570 for p in pats:
562 kind, name = matchmod._patsplit(p, None)
571 kind, name = matchmod._patsplit(p, None)
563 if kind is None:
572 if kind is None:
564 try:
573 try:
565 globbed = glob.glob(name)
574 globbed = glob.glob(name)
566 except re.error:
575 except re.error:
567 globbed = [name]
576 globbed = [name]
568 if globbed:
577 if globbed:
569 ret.extend(globbed)
578 ret.extend(globbed)
570 continue
579 continue
571 ret.append(p)
580 ret.append(p)
572 return ret
581 return ret
573
582
574 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
583 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
575 if pats == ("",):
584 if pats == ("",):
576 pats = []
585 pats = []
577 if not globbed and default == 'relpath':
586 if not globbed and default == 'relpath':
578 pats = expandpats(pats or [])
587 pats = expandpats(pats or [])
579
588
580 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
589 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
581 default)
590 default)
582 def badfn(f, msg):
591 def badfn(f, msg):
583 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
592 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
584 m.bad = badfn
593 m.bad = badfn
585 return m, pats
594 return m, pats
586
595
587 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
596 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
588 return matchandpats(ctx, pats, opts, globbed, default)[0]
597 return matchandpats(ctx, pats, opts, globbed, default)[0]
589
598
590 def matchall(repo):
599 def matchall(repo):
591 return matchmod.always(repo.root, repo.getcwd())
600 return matchmod.always(repo.root, repo.getcwd())
592
601
593 def matchfiles(repo, files):
602 def matchfiles(repo, files):
594 return matchmod.exact(repo.root, repo.getcwd(), files)
603 return matchmod.exact(repo.root, repo.getcwd(), files)
595
604
596 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
605 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
597 if dry_run is None:
606 if dry_run is None:
598 dry_run = opts.get('dry_run')
607 dry_run = opts.get('dry_run')
599 if similarity is None:
608 if similarity is None:
600 similarity = float(opts.get('similarity') or 0)
609 similarity = float(opts.get('similarity') or 0)
601 # we'd use status here, except handling of symlinks and ignore is tricky
610 # we'd use status here, except handling of symlinks and ignore is tricky
602 m = match(repo[None], pats, opts)
611 m = match(repo[None], pats, opts)
603 rejected = []
612 rejected = []
604 m.bad = lambda x, y: rejected.append(x)
613 m.bad = lambda x, y: rejected.append(x)
605
614
606 added, unknown, deleted, removed = _interestingfiles(repo, m)
615 added, unknown, deleted, removed = _interestingfiles(repo, m)
607
616
608 unknownset = set(unknown)
617 unknownset = set(unknown)
609 toprint = unknownset.copy()
618 toprint = unknownset.copy()
610 toprint.update(deleted)
619 toprint.update(deleted)
611 for abs in sorted(toprint):
620 for abs in sorted(toprint):
612 if repo.ui.verbose or not m.exact(abs):
621 if repo.ui.verbose or not m.exact(abs):
613 rel = m.rel(abs)
622 rel = m.rel(abs)
614 if abs in unknownset:
623 if abs in unknownset:
615 status = _('adding %s\n') % ((pats and rel) or abs)
624 status = _('adding %s\n') % ((pats and rel) or abs)
616 else:
625 else:
617 status = _('removing %s\n') % ((pats and rel) or abs)
626 status = _('removing %s\n') % ((pats and rel) or abs)
618 repo.ui.status(status)
627 repo.ui.status(status)
619
628
620 renames = _findrenames(repo, m, added + unknown, removed + deleted,
629 renames = _findrenames(repo, m, added + unknown, removed + deleted,
621 similarity)
630 similarity)
622
631
623 if not dry_run:
632 if not dry_run:
624 _markchanges(repo, unknown, deleted, renames)
633 _markchanges(repo, unknown, deleted, renames)
625
634
626 for f in rejected:
635 for f in rejected:
627 if f in m.files():
636 if f in m.files():
628 return 1
637 return 1
629 return 0
638 return 0
630
639
631 def marktouched(repo, files, similarity=0.0):
640 def marktouched(repo, files, similarity=0.0):
632 '''Assert that files have somehow been operated upon. files are relative to
641 '''Assert that files have somehow been operated upon. files are relative to
633 the repo root.'''
642 the repo root.'''
634 m = matchfiles(repo, files)
643 m = matchfiles(repo, files)
635 rejected = []
644 rejected = []
636 m.bad = lambda x, y: rejected.append(x)
645 m.bad = lambda x, y: rejected.append(x)
637
646
638 added, unknown, deleted, removed = _interestingfiles(repo, m)
647 added, unknown, deleted, removed = _interestingfiles(repo, m)
639
648
640 if repo.ui.verbose:
649 if repo.ui.verbose:
641 unknownset = set(unknown)
650 unknownset = set(unknown)
642 toprint = unknownset.copy()
651 toprint = unknownset.copy()
643 toprint.update(deleted)
652 toprint.update(deleted)
644 for abs in sorted(toprint):
653 for abs in sorted(toprint):
645 if abs in unknownset:
654 if abs in unknownset:
646 status = _('adding %s\n') % abs
655 status = _('adding %s\n') % abs
647 else:
656 else:
648 status = _('removing %s\n') % abs
657 status = _('removing %s\n') % abs
649 repo.ui.status(status)
658 repo.ui.status(status)
650
659
651 renames = _findrenames(repo, m, added + unknown, removed + deleted,
660 renames = _findrenames(repo, m, added + unknown, removed + deleted,
652 similarity)
661 similarity)
653
662
654 _markchanges(repo, unknown, deleted, renames)
663 _markchanges(repo, unknown, deleted, renames)
655
664
656 for f in rejected:
665 for f in rejected:
657 if f in m.files():
666 if f in m.files():
658 return 1
667 return 1
659 return 0
668 return 0
660
669
661 def _interestingfiles(repo, matcher):
670 def _interestingfiles(repo, matcher):
662 '''Walk dirstate with matcher, looking for files that addremove would care
671 '''Walk dirstate with matcher, looking for files that addremove would care
663 about.
672 about.
664
673
665 This is different from dirstate.status because it doesn't care about
674 This is different from dirstate.status because it doesn't care about
666 whether files are modified or clean.'''
675 whether files are modified or clean.'''
667 added, unknown, deleted, removed = [], [], [], []
676 added, unknown, deleted, removed = [], [], [], []
668 audit_path = pathutil.pathauditor(repo.root)
677 audit_path = pathutil.pathauditor(repo.root)
669
678
670 ctx = repo[None]
679 ctx = repo[None]
671 dirstate = repo.dirstate
680 dirstate = repo.dirstate
672 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
681 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
673 full=False)
682 full=False)
674 for abs, st in walkresults.iteritems():
683 for abs, st in walkresults.iteritems():
675 dstate = dirstate[abs]
684 dstate = dirstate[abs]
676 if dstate == '?' and audit_path.check(abs):
685 if dstate == '?' and audit_path.check(abs):
677 unknown.append(abs)
686 unknown.append(abs)
678 elif dstate != 'r' and not st:
687 elif dstate != 'r' and not st:
679 deleted.append(abs)
688 deleted.append(abs)
680 # for finding renames
689 # for finding renames
681 elif dstate == 'r':
690 elif dstate == 'r':
682 removed.append(abs)
691 removed.append(abs)
683 elif dstate == 'a':
692 elif dstate == 'a':
684 added.append(abs)
693 added.append(abs)
685
694
686 return added, unknown, deleted, removed
695 return added, unknown, deleted, removed
687
696
688 def _findrenames(repo, matcher, added, removed, similarity):
697 def _findrenames(repo, matcher, added, removed, similarity):
689 '''Find renames from removed files to added ones.'''
698 '''Find renames from removed files to added ones.'''
690 renames = {}
699 renames = {}
691 if similarity > 0:
700 if similarity > 0:
692 for old, new, score in similar.findrenames(repo, added, removed,
701 for old, new, score in similar.findrenames(repo, added, removed,
693 similarity):
702 similarity):
694 if (repo.ui.verbose or not matcher.exact(old)
703 if (repo.ui.verbose or not matcher.exact(old)
695 or not matcher.exact(new)):
704 or not matcher.exact(new)):
696 repo.ui.status(_('recording removal of %s as rename to %s '
705 repo.ui.status(_('recording removal of %s as rename to %s '
697 '(%d%% similar)\n') %
706 '(%d%% similar)\n') %
698 (matcher.rel(old), matcher.rel(new),
707 (matcher.rel(old), matcher.rel(new),
699 score * 100))
708 score * 100))
700 renames[new] = old
709 renames[new] = old
701 return renames
710 return renames
702
711
703 def _markchanges(repo, unknown, deleted, renames):
712 def _markchanges(repo, unknown, deleted, renames):
704 '''Marks the files in unknown as added, the files in deleted as removed,
713 '''Marks the files in unknown as added, the files in deleted as removed,
705 and the files in renames as copied.'''
714 and the files in renames as copied.'''
706 wctx = repo[None]
715 wctx = repo[None]
707 wlock = repo.wlock()
716 wlock = repo.wlock()
708 try:
717 try:
709 wctx.forget(deleted)
718 wctx.forget(deleted)
710 wctx.add(unknown)
719 wctx.add(unknown)
711 for new, old in renames.iteritems():
720 for new, old in renames.iteritems():
712 wctx.copy(old, new)
721 wctx.copy(old, new)
713 finally:
722 finally:
714 wlock.release()
723 wlock.release()
715
724
716 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
725 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
717 """Update the dirstate to reflect the intent of copying src to dst. For
726 """Update the dirstate to reflect the intent of copying src to dst. For
718 different reasons it might not end with dst being marked as copied from src.
727 different reasons it might not end with dst being marked as copied from src.
719 """
728 """
720 origsrc = repo.dirstate.copied(src) or src
729 origsrc = repo.dirstate.copied(src) or src
721 if dst == origsrc: # copying back a copy?
730 if dst == origsrc: # copying back a copy?
722 if repo.dirstate[dst] not in 'mn' and not dryrun:
731 if repo.dirstate[dst] not in 'mn' and not dryrun:
723 repo.dirstate.normallookup(dst)
732 repo.dirstate.normallookup(dst)
724 else:
733 else:
725 if repo.dirstate[origsrc] == 'a' and origsrc == src:
734 if repo.dirstate[origsrc] == 'a' and origsrc == src:
726 if not ui.quiet:
735 if not ui.quiet:
727 ui.warn(_("%s has not been committed yet, so no copy "
736 ui.warn(_("%s has not been committed yet, so no copy "
728 "data will be stored for %s.\n")
737 "data will be stored for %s.\n")
729 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
738 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
730 if repo.dirstate[dst] in '?r' and not dryrun:
739 if repo.dirstate[dst] in '?r' and not dryrun:
731 wctx.add([dst])
740 wctx.add([dst])
732 elif not dryrun:
741 elif not dryrun:
733 wctx.copy(origsrc, dst)
742 wctx.copy(origsrc, dst)
734
743
735 def readrequires(opener, supported):
744 def readrequires(opener, supported):
736 '''Reads and parses .hg/requires and checks if all entries found
745 '''Reads and parses .hg/requires and checks if all entries found
737 are in the list of supported features.'''
746 are in the list of supported features.'''
738 requirements = set(opener.read("requires").splitlines())
747 requirements = set(opener.read("requires").splitlines())
739 missings = []
748 missings = []
740 for r in requirements:
749 for r in requirements:
741 if r not in supported:
750 if r not in supported:
742 if not r or not r[0].isalnum():
751 if not r or not r[0].isalnum():
743 raise error.RequirementError(_(".hg/requires file is corrupt"))
752 raise error.RequirementError(_(".hg/requires file is corrupt"))
744 missings.append(r)
753 missings.append(r)
745 missings.sort()
754 missings.sort()
746 if missings:
755 if missings:
747 raise error.RequirementError(
756 raise error.RequirementError(
748 _("repository requires features unknown to this Mercurial: %s")
757 _("repository requires features unknown to this Mercurial: %s")
749 % " ".join(missings),
758 % " ".join(missings),
750 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
759 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
751 " for more information"))
760 " for more information"))
752 return requirements
761 return requirements
753
762
754 class filecachesubentry(object):
763 class filecachesubentry(object):
755 def __init__(self, path, stat):
764 def __init__(self, path, stat):
756 self.path = path
765 self.path = path
757 self.cachestat = None
766 self.cachestat = None
758 self._cacheable = None
767 self._cacheable = None
759
768
760 if stat:
769 if stat:
761 self.cachestat = filecachesubentry.stat(self.path)
770 self.cachestat = filecachesubentry.stat(self.path)
762
771
763 if self.cachestat:
772 if self.cachestat:
764 self._cacheable = self.cachestat.cacheable()
773 self._cacheable = self.cachestat.cacheable()
765 else:
774 else:
766 # None means we don't know yet
775 # None means we don't know yet
767 self._cacheable = None
776 self._cacheable = None
768
777
769 def refresh(self):
778 def refresh(self):
770 if self.cacheable():
779 if self.cacheable():
771 self.cachestat = filecachesubentry.stat(self.path)
780 self.cachestat = filecachesubentry.stat(self.path)
772
781
773 def cacheable(self):
782 def cacheable(self):
774 if self._cacheable is not None:
783 if self._cacheable is not None:
775 return self._cacheable
784 return self._cacheable
776
785
777 # we don't know yet, assume it is for now
786 # we don't know yet, assume it is for now
778 return True
787 return True
779
788
780 def changed(self):
789 def changed(self):
781 # no point in going further if we can't cache it
790 # no point in going further if we can't cache it
782 if not self.cacheable():
791 if not self.cacheable():
783 return True
792 return True
784
793
785 newstat = filecachesubentry.stat(self.path)
794 newstat = filecachesubentry.stat(self.path)
786
795
787 # we may not know if it's cacheable yet, check again now
796 # we may not know if it's cacheable yet, check again now
788 if newstat and self._cacheable is None:
797 if newstat and self._cacheable is None:
789 self._cacheable = newstat.cacheable()
798 self._cacheable = newstat.cacheable()
790
799
791 # check again
800 # check again
792 if not self._cacheable:
801 if not self._cacheable:
793 return True
802 return True
794
803
795 if self.cachestat != newstat:
804 if self.cachestat != newstat:
796 self.cachestat = newstat
805 self.cachestat = newstat
797 return True
806 return True
798 else:
807 else:
799 return False
808 return False
800
809
801 @staticmethod
810 @staticmethod
802 def stat(path):
811 def stat(path):
803 try:
812 try:
804 return util.cachestat(path)
813 return util.cachestat(path)
805 except OSError, e:
814 except OSError, e:
806 if e.errno != errno.ENOENT:
815 if e.errno != errno.ENOENT:
807 raise
816 raise
808
817
809 class filecacheentry(object):
818 class filecacheentry(object):
810 def __init__(self, paths, stat=True):
819 def __init__(self, paths, stat=True):
811 self._entries = []
820 self._entries = []
812 for path in paths:
821 for path in paths:
813 self._entries.append(filecachesubentry(path, stat))
822 self._entries.append(filecachesubentry(path, stat))
814
823
815 def changed(self):
824 def changed(self):
816 '''true if any entry has changed'''
825 '''true if any entry has changed'''
817 for entry in self._entries:
826 for entry in self._entries:
818 if entry.changed():
827 if entry.changed():
819 return True
828 return True
820 return False
829 return False
821
830
822 def refresh(self):
831 def refresh(self):
823 for entry in self._entries:
832 for entry in self._entries:
824 entry.refresh()
833 entry.refresh()
825
834
826 class filecache(object):
835 class filecache(object):
827 '''A property like decorator that tracks files under .hg/ for updates.
836 '''A property like decorator that tracks files under .hg/ for updates.
828
837
829 Records stat info when called in _filecache.
838 Records stat info when called in _filecache.
830
839
831 On subsequent calls, compares old stat info with new info, and recreates the
840 On subsequent calls, compares old stat info with new info, and recreates the
832 object when any of the files changes, updating the new stat info in
841 object when any of the files changes, updating the new stat info in
833 _filecache.
842 _filecache.
834
843
835 Mercurial either atomic renames or appends for files under .hg,
844 Mercurial either atomic renames or appends for files under .hg,
836 so to ensure the cache is reliable we need the filesystem to be able
845 so to ensure the cache is reliable we need the filesystem to be able
837 to tell us if a file has been replaced. If it can't, we fallback to
846 to tell us if a file has been replaced. If it can't, we fallback to
838 recreating the object on every call (essentially the same behaviour as
847 recreating the object on every call (essentially the same behaviour as
839 propertycache).
848 propertycache).
840
849
841 '''
850 '''
842 def __init__(self, *paths):
851 def __init__(self, *paths):
843 self.paths = paths
852 self.paths = paths
844
853
845 def join(self, obj, fname):
854 def join(self, obj, fname):
846 """Used to compute the runtime path of a cached file.
855 """Used to compute the runtime path of a cached file.
847
856
848 Users should subclass filecache and provide their own version of this
857 Users should subclass filecache and provide their own version of this
849 function to call the appropriate join function on 'obj' (an instance
858 function to call the appropriate join function on 'obj' (an instance
850 of the class that its member function was decorated).
859 of the class that its member function was decorated).
851 """
860 """
852 return obj.join(fname)
861 return obj.join(fname)
853
862
854 def __call__(self, func):
863 def __call__(self, func):
855 self.func = func
864 self.func = func
856 self.name = func.__name__
865 self.name = func.__name__
857 return self
866 return self
858
867
859 def __get__(self, obj, type=None):
868 def __get__(self, obj, type=None):
860 # do we need to check if the file changed?
869 # do we need to check if the file changed?
861 if self.name in obj.__dict__:
870 if self.name in obj.__dict__:
862 assert self.name in obj._filecache, self.name
871 assert self.name in obj._filecache, self.name
863 return obj.__dict__[self.name]
872 return obj.__dict__[self.name]
864
873
865 entry = obj._filecache.get(self.name)
874 entry = obj._filecache.get(self.name)
866
875
867 if entry:
876 if entry:
868 if entry.changed():
877 if entry.changed():
869 entry.obj = self.func(obj)
878 entry.obj = self.func(obj)
870 else:
879 else:
871 paths = [self.join(obj, path) for path in self.paths]
880 paths = [self.join(obj, path) for path in self.paths]
872
881
873 # We stat -before- creating the object so our cache doesn't lie if
882 # We stat -before- creating the object so our cache doesn't lie if
874 # a writer modified between the time we read and stat
883 # a writer modified between the time we read and stat
875 entry = filecacheentry(paths, True)
884 entry = filecacheentry(paths, True)
876 entry.obj = self.func(obj)
885 entry.obj = self.func(obj)
877
886
878 obj._filecache[self.name] = entry
887 obj._filecache[self.name] = entry
879
888
880 obj.__dict__[self.name] = entry.obj
889 obj.__dict__[self.name] = entry.obj
881 return entry.obj
890 return entry.obj
882
891
883 def __set__(self, obj, value):
892 def __set__(self, obj, value):
884 if self.name not in obj._filecache:
893 if self.name not in obj._filecache:
885 # we add an entry for the missing value because X in __dict__
894 # we add an entry for the missing value because X in __dict__
886 # implies X in _filecache
895 # implies X in _filecache
887 paths = [self.join(obj, path) for path in self.paths]
896 paths = [self.join(obj, path) for path in self.paths]
888 ce = filecacheentry(paths, False)
897 ce = filecacheentry(paths, False)
889 obj._filecache[self.name] = ce
898 obj._filecache[self.name] = ce
890 else:
899 else:
891 ce = obj._filecache[self.name]
900 ce = obj._filecache[self.name]
892
901
893 ce.obj = value # update cached copy
902 ce.obj = value # update cached copy
894 obj.__dict__[self.name] = value # update copy returned by obj.x
903 obj.__dict__[self.name] = value # update copy returned by obj.x
895
904
896 def __delete__(self, obj):
905 def __delete__(self, obj):
897 try:
906 try:
898 del obj.__dict__[self.name]
907 del obj.__dict__[self.name]
899 except KeyError:
908 except KeyError:
900 raise AttributeError(self.name)
909 raise AttributeError(self.name)
901
910
902 class dirs(object):
911 class dirs(object):
903 '''a multiset of directory names from a dirstate or manifest'''
912 '''a multiset of directory names from a dirstate or manifest'''
904
913
905 def __init__(self, map, skip=None):
914 def __init__(self, map, skip=None):
906 self._dirs = {}
915 self._dirs = {}
907 addpath = self.addpath
916 addpath = self.addpath
908 if util.safehasattr(map, 'iteritems') and skip is not None:
917 if util.safehasattr(map, 'iteritems') and skip is not None:
909 for f, s in map.iteritems():
918 for f, s in map.iteritems():
910 if s[0] != skip:
919 if s[0] != skip:
911 addpath(f)
920 addpath(f)
912 else:
921 else:
913 for f in map:
922 for f in map:
914 addpath(f)
923 addpath(f)
915
924
916 def addpath(self, path):
925 def addpath(self, path):
917 dirs = self._dirs
926 dirs = self._dirs
918 for base in finddirs(path):
927 for base in finddirs(path):
919 if base in dirs:
928 if base in dirs:
920 dirs[base] += 1
929 dirs[base] += 1
921 return
930 return
922 dirs[base] = 1
931 dirs[base] = 1
923
932
924 def delpath(self, path):
933 def delpath(self, path):
925 dirs = self._dirs
934 dirs = self._dirs
926 for base in finddirs(path):
935 for base in finddirs(path):
927 if dirs[base] > 1:
936 if dirs[base] > 1:
928 dirs[base] -= 1
937 dirs[base] -= 1
929 return
938 return
930 del dirs[base]
939 del dirs[base]
931
940
932 def __iter__(self):
941 def __iter__(self):
933 return self._dirs.iterkeys()
942 return self._dirs.iterkeys()
934
943
935 def __contains__(self, d):
944 def __contains__(self, d):
936 return d in self._dirs
945 return d in self._dirs
937
946
938 if util.safehasattr(parsers, 'dirs'):
947 if util.safehasattr(parsers, 'dirs'):
939 dirs = parsers.dirs
948 dirs = parsers.dirs
940
949
941 def finddirs(path):
950 def finddirs(path):
942 pos = path.rfind('/')
951 pos = path.rfind('/')
943 while pos != -1:
952 while pos != -1:
944 yield path[:pos]
953 yield path[:pos]
945 pos = path.rfind('/', 0, pos)
954 pos = path.rfind('/', 0, pos)
General Comments 0
You need to be logged in to leave comments. Login now