##// END OF EJS Templates
vfs: add listdir for os.listdir in vfs...
Chinmay Joshi -
r21799:dfacdd6a default
parent child Browse files
Show More
@@ -1,967 +1,970 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from mercurial.node import nullrev
9 from mercurial.node import nullrev
10 import util, error, osutil, revset, similar, encoding, phases, parsers
10 import util, error, osutil, revset, similar, encoding, phases, parsers
11 import pathutil
11 import pathutil
12 import match as matchmod
12 import match as matchmod
13 import os, errno, re, glob, tempfile
13 import os, errno, re, glob, tempfile
14
14
15 if os.name == 'nt':
15 if os.name == 'nt':
16 import scmwindows as scmplatform
16 import scmwindows as scmplatform
17 else:
17 else:
18 import scmposix as scmplatform
18 import scmposix as scmplatform
19
19
20 systemrcpath = scmplatform.systemrcpath
20 systemrcpath = scmplatform.systemrcpath
21 userrcpath = scmplatform.userrcpath
21 userrcpath = scmplatform.userrcpath
22
22
23 def itersubrepos(ctx1, ctx2):
23 def itersubrepos(ctx1, ctx2):
24 """find subrepos in ctx1 or ctx2"""
24 """find subrepos in ctx1 or ctx2"""
25 # Create a (subpath, ctx) mapping where we prefer subpaths from
25 # Create a (subpath, ctx) mapping where we prefer subpaths from
26 # ctx1. The subpaths from ctx2 are important when the .hgsub file
26 # ctx1. The subpaths from ctx2 are important when the .hgsub file
27 # has been modified (in ctx2) but not yet committed (in ctx1).
27 # has been modified (in ctx2) but not yet committed (in ctx1).
28 subpaths = dict.fromkeys(ctx2.substate, ctx2)
28 subpaths = dict.fromkeys(ctx2.substate, ctx2)
29 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
29 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
30 for subpath, ctx in sorted(subpaths.iteritems()):
30 for subpath, ctx in sorted(subpaths.iteritems()):
31 yield subpath, ctx.sub(subpath)
31 yield subpath, ctx.sub(subpath)
32
32
33 def nochangesfound(ui, repo, excluded=None):
33 def nochangesfound(ui, repo, excluded=None):
34 '''Report no changes for push/pull, excluded is None or a list of
34 '''Report no changes for push/pull, excluded is None or a list of
35 nodes excluded from the push/pull.
35 nodes excluded from the push/pull.
36 '''
36 '''
37 secretlist = []
37 secretlist = []
38 if excluded:
38 if excluded:
39 for n in excluded:
39 for n in excluded:
40 if n not in repo:
40 if n not in repo:
41 # discovery should not have included the filtered revision,
41 # discovery should not have included the filtered revision,
42 # we have to explicitly exclude it until discovery is cleanup.
42 # we have to explicitly exclude it until discovery is cleanup.
43 continue
43 continue
44 ctx = repo[n]
44 ctx = repo[n]
45 if ctx.phase() >= phases.secret and not ctx.extinct():
45 if ctx.phase() >= phases.secret and not ctx.extinct():
46 secretlist.append(n)
46 secretlist.append(n)
47
47
48 if secretlist:
48 if secretlist:
49 ui.status(_("no changes found (ignored %d secret changesets)\n")
49 ui.status(_("no changes found (ignored %d secret changesets)\n")
50 % len(secretlist))
50 % len(secretlist))
51 else:
51 else:
52 ui.status(_("no changes found\n"))
52 ui.status(_("no changes found\n"))
53
53
54 def checknewlabel(repo, lbl, kind):
54 def checknewlabel(repo, lbl, kind):
55 # Do not use the "kind" parameter in ui output.
55 # Do not use the "kind" parameter in ui output.
56 # It makes strings difficult to translate.
56 # It makes strings difficult to translate.
57 if lbl in ['tip', '.', 'null']:
57 if lbl in ['tip', '.', 'null']:
58 raise util.Abort(_("the name '%s' is reserved") % lbl)
58 raise util.Abort(_("the name '%s' is reserved") % lbl)
59 for c in (':', '\0', '\n', '\r'):
59 for c in (':', '\0', '\n', '\r'):
60 if c in lbl:
60 if c in lbl:
61 raise util.Abort(_("%r cannot be used in a name") % c)
61 raise util.Abort(_("%r cannot be used in a name") % c)
62 try:
62 try:
63 int(lbl)
63 int(lbl)
64 raise util.Abort(_("cannot use an integer as a name"))
64 raise util.Abort(_("cannot use an integer as a name"))
65 except ValueError:
65 except ValueError:
66 pass
66 pass
67
67
68 def checkfilename(f):
68 def checkfilename(f):
69 '''Check that the filename f is an acceptable filename for a tracked file'''
69 '''Check that the filename f is an acceptable filename for a tracked file'''
70 if '\r' in f or '\n' in f:
70 if '\r' in f or '\n' in f:
71 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
71 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
72
72
73 def checkportable(ui, f):
73 def checkportable(ui, f):
74 '''Check if filename f is portable and warn or abort depending on config'''
74 '''Check if filename f is portable and warn or abort depending on config'''
75 checkfilename(f)
75 checkfilename(f)
76 abort, warn = checkportabilityalert(ui)
76 abort, warn = checkportabilityalert(ui)
77 if abort or warn:
77 if abort or warn:
78 msg = util.checkwinfilename(f)
78 msg = util.checkwinfilename(f)
79 if msg:
79 if msg:
80 msg = "%s: %r" % (msg, f)
80 msg = "%s: %r" % (msg, f)
81 if abort:
81 if abort:
82 raise util.Abort(msg)
82 raise util.Abort(msg)
83 ui.warn(_("warning: %s\n") % msg)
83 ui.warn(_("warning: %s\n") % msg)
84
84
85 def checkportabilityalert(ui):
85 def checkportabilityalert(ui):
86 '''check if the user's config requests nothing, a warning, or abort for
86 '''check if the user's config requests nothing, a warning, or abort for
87 non-portable filenames'''
87 non-portable filenames'''
88 val = ui.config('ui', 'portablefilenames', 'warn')
88 val = ui.config('ui', 'portablefilenames', 'warn')
89 lval = val.lower()
89 lval = val.lower()
90 bval = util.parsebool(val)
90 bval = util.parsebool(val)
91 abort = os.name == 'nt' or lval == 'abort'
91 abort = os.name == 'nt' or lval == 'abort'
92 warn = bval or lval == 'warn'
92 warn = bval or lval == 'warn'
93 if bval is None and not (warn or abort or lval == 'ignore'):
93 if bval is None and not (warn or abort or lval == 'ignore'):
94 raise error.ConfigError(
94 raise error.ConfigError(
95 _("ui.portablefilenames value is invalid ('%s')") % val)
95 _("ui.portablefilenames value is invalid ('%s')") % val)
96 return abort, warn
96 return abort, warn
97
97
98 class casecollisionauditor(object):
98 class casecollisionauditor(object):
99 def __init__(self, ui, abort, dirstate):
99 def __init__(self, ui, abort, dirstate):
100 self._ui = ui
100 self._ui = ui
101 self._abort = abort
101 self._abort = abort
102 allfiles = '\0'.join(dirstate._map)
102 allfiles = '\0'.join(dirstate._map)
103 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
103 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
104 self._dirstate = dirstate
104 self._dirstate = dirstate
105 # The purpose of _newfiles is so that we don't complain about
105 # The purpose of _newfiles is so that we don't complain about
106 # case collisions if someone were to call this object with the
106 # case collisions if someone were to call this object with the
107 # same filename twice.
107 # same filename twice.
108 self._newfiles = set()
108 self._newfiles = set()
109
109
110 def __call__(self, f):
110 def __call__(self, f):
111 if f in self._newfiles:
111 if f in self._newfiles:
112 return
112 return
113 fl = encoding.lower(f)
113 fl = encoding.lower(f)
114 if fl in self._loweredfiles and f not in self._dirstate:
114 if fl in self._loweredfiles and f not in self._dirstate:
115 msg = _('possible case-folding collision for %s') % f
115 msg = _('possible case-folding collision for %s') % f
116 if self._abort:
116 if self._abort:
117 raise util.Abort(msg)
117 raise util.Abort(msg)
118 self._ui.warn(_("warning: %s\n") % msg)
118 self._ui.warn(_("warning: %s\n") % msg)
119 self._loweredfiles.add(fl)
119 self._loweredfiles.add(fl)
120 self._newfiles.add(f)
120 self._newfiles.add(f)
121
121
122 class abstractvfs(object):
122 class abstractvfs(object):
123 """Abstract base class; cannot be instantiated"""
123 """Abstract base class; cannot be instantiated"""
124
124
125 def __init__(self, *args, **kwargs):
125 def __init__(self, *args, **kwargs):
126 '''Prevent instantiation; don't call this from subclasses.'''
126 '''Prevent instantiation; don't call this from subclasses.'''
127 raise NotImplementedError('attempted instantiating ' + str(type(self)))
127 raise NotImplementedError('attempted instantiating ' + str(type(self)))
128
128
129 def tryread(self, path):
129 def tryread(self, path):
130 '''gracefully return an empty string for missing files'''
130 '''gracefully return an empty string for missing files'''
131 try:
131 try:
132 return self.read(path)
132 return self.read(path)
133 except IOError, inst:
133 except IOError, inst:
134 if inst.errno != errno.ENOENT:
134 if inst.errno != errno.ENOENT:
135 raise
135 raise
136 return ""
136 return ""
137
137
138 def open(self, path, mode="r", text=False, atomictemp=False):
138 def open(self, path, mode="r", text=False, atomictemp=False):
139 self.open = self.__call__
139 self.open = self.__call__
140 return self.__call__(path, mode, text, atomictemp)
140 return self.__call__(path, mode, text, atomictemp)
141
141
142 def read(self, path):
142 def read(self, path):
143 fp = self(path, 'rb')
143 fp = self(path, 'rb')
144 try:
144 try:
145 return fp.read()
145 return fp.read()
146 finally:
146 finally:
147 fp.close()
147 fp.close()
148
148
149 def write(self, path, data):
149 def write(self, path, data):
150 fp = self(path, 'wb')
150 fp = self(path, 'wb')
151 try:
151 try:
152 return fp.write(data)
152 return fp.write(data)
153 finally:
153 finally:
154 fp.close()
154 fp.close()
155
155
156 def append(self, path, data):
156 def append(self, path, data):
157 fp = self(path, 'ab')
157 fp = self(path, 'ab')
158 try:
158 try:
159 return fp.write(data)
159 return fp.write(data)
160 finally:
160 finally:
161 fp.close()
161 fp.close()
162
162
163 def chmod(self, path, mode):
163 def chmod(self, path, mode):
164 return os.chmod(self.join(path), mode)
164 return os.chmod(self.join(path), mode)
165
165
166 def exists(self, path=None):
166 def exists(self, path=None):
167 return os.path.exists(self.join(path))
167 return os.path.exists(self.join(path))
168
168
169 def fstat(self, fp):
169 def fstat(self, fp):
170 return util.fstat(fp)
170 return util.fstat(fp)
171
171
172 def isdir(self, path=None):
172 def isdir(self, path=None):
173 return os.path.isdir(self.join(path))
173 return os.path.isdir(self.join(path))
174
174
175 def isfile(self, path=None):
175 def isfile(self, path=None):
176 return os.path.isfile(self.join(path))
176 return os.path.isfile(self.join(path))
177
177
178 def islink(self, path=None):
178 def islink(self, path=None):
179 return os.path.islink(self.join(path))
179 return os.path.islink(self.join(path))
180
180
181 def lexists(self, path=None):
181 def lexists(self, path=None):
182 return os.path.lexists(self.join(path))
182 return os.path.lexists(self.join(path))
183
183
184 def lstat(self, path=None):
184 def lstat(self, path=None):
185 return os.lstat(self.join(path))
185 return os.lstat(self.join(path))
186
186
187 def listdir(self, path=None):
188 return os.listdir(self.join(path))
189
187 def makedir(self, path=None, notindexed=True):
190 def makedir(self, path=None, notindexed=True):
188 return util.makedir(self.join(path), notindexed)
191 return util.makedir(self.join(path), notindexed)
189
192
190 def makedirs(self, path=None, mode=None):
193 def makedirs(self, path=None, mode=None):
191 return util.makedirs(self.join(path), mode)
194 return util.makedirs(self.join(path), mode)
192
195
193 def makelock(self, info, path):
196 def makelock(self, info, path):
194 return util.makelock(info, self.join(path))
197 return util.makelock(info, self.join(path))
195
198
196 def mkdir(self, path=None):
199 def mkdir(self, path=None):
197 return os.mkdir(self.join(path))
200 return os.mkdir(self.join(path))
198
201
199 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
202 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
200 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
203 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
201 dir=self.join(dir), text=text)
204 dir=self.join(dir), text=text)
202 dname, fname = util.split(name)
205 dname, fname = util.split(name)
203 if dir:
206 if dir:
204 return fd, os.path.join(dir, fname)
207 return fd, os.path.join(dir, fname)
205 else:
208 else:
206 return fd, fname
209 return fd, fname
207
210
208 def readdir(self, path=None, stat=None, skip=None):
211 def readdir(self, path=None, stat=None, skip=None):
209 return osutil.listdir(self.join(path), stat, skip)
212 return osutil.listdir(self.join(path), stat, skip)
210
213
211 def readlock(self, path):
214 def readlock(self, path):
212 return util.readlock(self.join(path))
215 return util.readlock(self.join(path))
213
216
214 def rename(self, src, dst):
217 def rename(self, src, dst):
215 return util.rename(self.join(src), self.join(dst))
218 return util.rename(self.join(src), self.join(dst))
216
219
217 def readlink(self, path):
220 def readlink(self, path):
218 return os.readlink(self.join(path))
221 return os.readlink(self.join(path))
219
222
220 def setflags(self, path, l, x):
223 def setflags(self, path, l, x):
221 return util.setflags(self.join(path), l, x)
224 return util.setflags(self.join(path), l, x)
222
225
223 def stat(self, path=None):
226 def stat(self, path=None):
224 return os.stat(self.join(path))
227 return os.stat(self.join(path))
225
228
226 def unlink(self, path=None):
229 def unlink(self, path=None):
227 return util.unlink(self.join(path))
230 return util.unlink(self.join(path))
228
231
229 def unlinkpath(self, path=None, ignoremissing=False):
232 def unlinkpath(self, path=None, ignoremissing=False):
230 return util.unlinkpath(self.join(path), ignoremissing)
233 return util.unlinkpath(self.join(path), ignoremissing)
231
234
232 def utime(self, path=None, t=None):
235 def utime(self, path=None, t=None):
233 return os.utime(self.join(path), t)
236 return os.utime(self.join(path), t)
234
237
235 class vfs(abstractvfs):
238 class vfs(abstractvfs):
236 '''Operate files relative to a base directory
239 '''Operate files relative to a base directory
237
240
238 This class is used to hide the details of COW semantics and
241 This class is used to hide the details of COW semantics and
239 remote file access from higher level code.
242 remote file access from higher level code.
240 '''
243 '''
241 def __init__(self, base, audit=True, expandpath=False, realpath=False):
244 def __init__(self, base, audit=True, expandpath=False, realpath=False):
242 if expandpath:
245 if expandpath:
243 base = util.expandpath(base)
246 base = util.expandpath(base)
244 if realpath:
247 if realpath:
245 base = os.path.realpath(base)
248 base = os.path.realpath(base)
246 self.base = base
249 self.base = base
247 self._setmustaudit(audit)
250 self._setmustaudit(audit)
248 self.createmode = None
251 self.createmode = None
249 self._trustnlink = None
252 self._trustnlink = None
250
253
251 def _getmustaudit(self):
254 def _getmustaudit(self):
252 return self._audit
255 return self._audit
253
256
254 def _setmustaudit(self, onoff):
257 def _setmustaudit(self, onoff):
255 self._audit = onoff
258 self._audit = onoff
256 if onoff:
259 if onoff:
257 self.audit = pathutil.pathauditor(self.base)
260 self.audit = pathutil.pathauditor(self.base)
258 else:
261 else:
259 self.audit = util.always
262 self.audit = util.always
260
263
261 mustaudit = property(_getmustaudit, _setmustaudit)
264 mustaudit = property(_getmustaudit, _setmustaudit)
262
265
263 @util.propertycache
266 @util.propertycache
264 def _cansymlink(self):
267 def _cansymlink(self):
265 return util.checklink(self.base)
268 return util.checklink(self.base)
266
269
267 @util.propertycache
270 @util.propertycache
268 def _chmod(self):
271 def _chmod(self):
269 return util.checkexec(self.base)
272 return util.checkexec(self.base)
270
273
271 def _fixfilemode(self, name):
274 def _fixfilemode(self, name):
272 if self.createmode is None or not self._chmod:
275 if self.createmode is None or not self._chmod:
273 return
276 return
274 os.chmod(name, self.createmode & 0666)
277 os.chmod(name, self.createmode & 0666)
275
278
276 def __call__(self, path, mode="r", text=False, atomictemp=False):
279 def __call__(self, path, mode="r", text=False, atomictemp=False):
277 if self._audit:
280 if self._audit:
278 r = util.checkosfilename(path)
281 r = util.checkosfilename(path)
279 if r:
282 if r:
280 raise util.Abort("%s: %r" % (r, path))
283 raise util.Abort("%s: %r" % (r, path))
281 self.audit(path)
284 self.audit(path)
282 f = self.join(path)
285 f = self.join(path)
283
286
284 if not text and "b" not in mode:
287 if not text and "b" not in mode:
285 mode += "b" # for that other OS
288 mode += "b" # for that other OS
286
289
287 nlink = -1
290 nlink = -1
288 if mode not in ('r', 'rb'):
291 if mode not in ('r', 'rb'):
289 dirname, basename = util.split(f)
292 dirname, basename = util.split(f)
290 # If basename is empty, then the path is malformed because it points
293 # If basename is empty, then the path is malformed because it points
291 # to a directory. Let the posixfile() call below raise IOError.
294 # to a directory. Let the posixfile() call below raise IOError.
292 if basename:
295 if basename:
293 if atomictemp:
296 if atomictemp:
294 util.ensuredirs(dirname, self.createmode)
297 util.ensuredirs(dirname, self.createmode)
295 return util.atomictempfile(f, mode, self.createmode)
298 return util.atomictempfile(f, mode, self.createmode)
296 try:
299 try:
297 if 'w' in mode:
300 if 'w' in mode:
298 util.unlink(f)
301 util.unlink(f)
299 nlink = 0
302 nlink = 0
300 else:
303 else:
301 # nlinks() may behave differently for files on Windows
304 # nlinks() may behave differently for files on Windows
302 # shares if the file is open.
305 # shares if the file is open.
303 fd = util.posixfile(f)
306 fd = util.posixfile(f)
304 nlink = util.nlinks(f)
307 nlink = util.nlinks(f)
305 if nlink < 1:
308 if nlink < 1:
306 nlink = 2 # force mktempcopy (issue1922)
309 nlink = 2 # force mktempcopy (issue1922)
307 fd.close()
310 fd.close()
308 except (OSError, IOError), e:
311 except (OSError, IOError), e:
309 if e.errno != errno.ENOENT:
312 if e.errno != errno.ENOENT:
310 raise
313 raise
311 nlink = 0
314 nlink = 0
312 util.ensuredirs(dirname, self.createmode)
315 util.ensuredirs(dirname, self.createmode)
313 if nlink > 0:
316 if nlink > 0:
314 if self._trustnlink is None:
317 if self._trustnlink is None:
315 self._trustnlink = nlink > 1 or util.checknlink(f)
318 self._trustnlink = nlink > 1 or util.checknlink(f)
316 if nlink > 1 or not self._trustnlink:
319 if nlink > 1 or not self._trustnlink:
317 util.rename(util.mktempcopy(f), f)
320 util.rename(util.mktempcopy(f), f)
318 fp = util.posixfile(f, mode)
321 fp = util.posixfile(f, mode)
319 if nlink == 0:
322 if nlink == 0:
320 self._fixfilemode(f)
323 self._fixfilemode(f)
321 return fp
324 return fp
322
325
323 def symlink(self, src, dst):
326 def symlink(self, src, dst):
324 self.audit(dst)
327 self.audit(dst)
325 linkname = self.join(dst)
328 linkname = self.join(dst)
326 try:
329 try:
327 os.unlink(linkname)
330 os.unlink(linkname)
328 except OSError:
331 except OSError:
329 pass
332 pass
330
333
331 util.ensuredirs(os.path.dirname(linkname), self.createmode)
334 util.ensuredirs(os.path.dirname(linkname), self.createmode)
332
335
333 if self._cansymlink:
336 if self._cansymlink:
334 try:
337 try:
335 os.symlink(src, linkname)
338 os.symlink(src, linkname)
336 except OSError, err:
339 except OSError, err:
337 raise OSError(err.errno, _('could not symlink to %r: %s') %
340 raise OSError(err.errno, _('could not symlink to %r: %s') %
338 (src, err.strerror), linkname)
341 (src, err.strerror), linkname)
339 else:
342 else:
340 self.write(dst, src)
343 self.write(dst, src)
341
344
342 def join(self, path):
345 def join(self, path):
343 if path:
346 if path:
344 return os.path.join(self.base, path)
347 return os.path.join(self.base, path)
345 else:
348 else:
346 return self.base
349 return self.base
347
350
348 opener = vfs
351 opener = vfs
349
352
350 class auditvfs(object):
353 class auditvfs(object):
351 def __init__(self, vfs):
354 def __init__(self, vfs):
352 self.vfs = vfs
355 self.vfs = vfs
353
356
354 def _getmustaudit(self):
357 def _getmustaudit(self):
355 return self.vfs.mustaudit
358 return self.vfs.mustaudit
356
359
357 def _setmustaudit(self, onoff):
360 def _setmustaudit(self, onoff):
358 self.vfs.mustaudit = onoff
361 self.vfs.mustaudit = onoff
359
362
360 mustaudit = property(_getmustaudit, _setmustaudit)
363 mustaudit = property(_getmustaudit, _setmustaudit)
361
364
362 class filtervfs(abstractvfs, auditvfs):
365 class filtervfs(abstractvfs, auditvfs):
363 '''Wrapper vfs for filtering filenames with a function.'''
366 '''Wrapper vfs for filtering filenames with a function.'''
364
367
365 def __init__(self, vfs, filter):
368 def __init__(self, vfs, filter):
366 auditvfs.__init__(self, vfs)
369 auditvfs.__init__(self, vfs)
367 self._filter = filter
370 self._filter = filter
368
371
369 def __call__(self, path, *args, **kwargs):
372 def __call__(self, path, *args, **kwargs):
370 return self.vfs(self._filter(path), *args, **kwargs)
373 return self.vfs(self._filter(path), *args, **kwargs)
371
374
372 def join(self, path):
375 def join(self, path):
373 if path:
376 if path:
374 return self.vfs.join(self._filter(path))
377 return self.vfs.join(self._filter(path))
375 else:
378 else:
376 return self.vfs.join(path)
379 return self.vfs.join(path)
377
380
378 filteropener = filtervfs
381 filteropener = filtervfs
379
382
380 class readonlyvfs(abstractvfs, auditvfs):
383 class readonlyvfs(abstractvfs, auditvfs):
381 '''Wrapper vfs preventing any writing.'''
384 '''Wrapper vfs preventing any writing.'''
382
385
383 def __init__(self, vfs):
386 def __init__(self, vfs):
384 auditvfs.__init__(self, vfs)
387 auditvfs.__init__(self, vfs)
385
388
386 def __call__(self, path, mode='r', *args, **kw):
389 def __call__(self, path, mode='r', *args, **kw):
387 if mode not in ('r', 'rb'):
390 if mode not in ('r', 'rb'):
388 raise util.Abort('this vfs is read only')
391 raise util.Abort('this vfs is read only')
389 return self.vfs(path, mode, *args, **kw)
392 return self.vfs(path, mode, *args, **kw)
390
393
391
394
392 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
395 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
393 '''yield every hg repository under path, always recursively.
396 '''yield every hg repository under path, always recursively.
394 The recurse flag will only control recursion into repo working dirs'''
397 The recurse flag will only control recursion into repo working dirs'''
395 def errhandler(err):
398 def errhandler(err):
396 if err.filename == path:
399 if err.filename == path:
397 raise err
400 raise err
398 samestat = getattr(os.path, 'samestat', None)
401 samestat = getattr(os.path, 'samestat', None)
399 if followsym and samestat is not None:
402 if followsym and samestat is not None:
400 def adddir(dirlst, dirname):
403 def adddir(dirlst, dirname):
401 match = False
404 match = False
402 dirstat = os.stat(dirname)
405 dirstat = os.stat(dirname)
403 for lstdirstat in dirlst:
406 for lstdirstat in dirlst:
404 if samestat(dirstat, lstdirstat):
407 if samestat(dirstat, lstdirstat):
405 match = True
408 match = True
406 break
409 break
407 if not match:
410 if not match:
408 dirlst.append(dirstat)
411 dirlst.append(dirstat)
409 return not match
412 return not match
410 else:
413 else:
411 followsym = False
414 followsym = False
412
415
413 if (seen_dirs is None) and followsym:
416 if (seen_dirs is None) and followsym:
414 seen_dirs = []
417 seen_dirs = []
415 adddir(seen_dirs, path)
418 adddir(seen_dirs, path)
416 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
419 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
417 dirs.sort()
420 dirs.sort()
418 if '.hg' in dirs:
421 if '.hg' in dirs:
419 yield root # found a repository
422 yield root # found a repository
420 qroot = os.path.join(root, '.hg', 'patches')
423 qroot = os.path.join(root, '.hg', 'patches')
421 if os.path.isdir(os.path.join(qroot, '.hg')):
424 if os.path.isdir(os.path.join(qroot, '.hg')):
422 yield qroot # we have a patch queue repo here
425 yield qroot # we have a patch queue repo here
423 if recurse:
426 if recurse:
424 # avoid recursing inside the .hg directory
427 # avoid recursing inside the .hg directory
425 dirs.remove('.hg')
428 dirs.remove('.hg')
426 else:
429 else:
427 dirs[:] = [] # don't descend further
430 dirs[:] = [] # don't descend further
428 elif followsym:
431 elif followsym:
429 newdirs = []
432 newdirs = []
430 for d in dirs:
433 for d in dirs:
431 fname = os.path.join(root, d)
434 fname = os.path.join(root, d)
432 if adddir(seen_dirs, fname):
435 if adddir(seen_dirs, fname):
433 if os.path.islink(fname):
436 if os.path.islink(fname):
434 for hgname in walkrepos(fname, True, seen_dirs):
437 for hgname in walkrepos(fname, True, seen_dirs):
435 yield hgname
438 yield hgname
436 else:
439 else:
437 newdirs.append(d)
440 newdirs.append(d)
438 dirs[:] = newdirs
441 dirs[:] = newdirs
439
442
440 def osrcpath():
443 def osrcpath():
441 '''return default os-specific hgrc search path'''
444 '''return default os-specific hgrc search path'''
442 path = systemrcpath()
445 path = systemrcpath()
443 path.extend(userrcpath())
446 path.extend(userrcpath())
444 path = [os.path.normpath(f) for f in path]
447 path = [os.path.normpath(f) for f in path]
445 return path
448 return path
446
449
447 _rcpath = None
450 _rcpath = None
448
451
449 def rcpath():
452 def rcpath():
450 '''return hgrc search path. if env var HGRCPATH is set, use it.
453 '''return hgrc search path. if env var HGRCPATH is set, use it.
451 for each item in path, if directory, use files ending in .rc,
454 for each item in path, if directory, use files ending in .rc,
452 else use item.
455 else use item.
453 make HGRCPATH empty to only look in .hg/hgrc of current repo.
456 make HGRCPATH empty to only look in .hg/hgrc of current repo.
454 if no HGRCPATH, use default os-specific path.'''
457 if no HGRCPATH, use default os-specific path.'''
455 global _rcpath
458 global _rcpath
456 if _rcpath is None:
459 if _rcpath is None:
457 if 'HGRCPATH' in os.environ:
460 if 'HGRCPATH' in os.environ:
458 _rcpath = []
461 _rcpath = []
459 for p in os.environ['HGRCPATH'].split(os.pathsep):
462 for p in os.environ['HGRCPATH'].split(os.pathsep):
460 if not p:
463 if not p:
461 continue
464 continue
462 p = util.expandpath(p)
465 p = util.expandpath(p)
463 if os.path.isdir(p):
466 if os.path.isdir(p):
464 for f, kind in osutil.listdir(p):
467 for f, kind in osutil.listdir(p):
465 if f.endswith('.rc'):
468 if f.endswith('.rc'):
466 _rcpath.append(os.path.join(p, f))
469 _rcpath.append(os.path.join(p, f))
467 else:
470 else:
468 _rcpath.append(p)
471 _rcpath.append(p)
469 else:
472 else:
470 _rcpath = osrcpath()
473 _rcpath = osrcpath()
471 return _rcpath
474 return _rcpath
472
475
473 def revsingle(repo, revspec, default='.'):
476 def revsingle(repo, revspec, default='.'):
474 if not revspec and revspec != 0:
477 if not revspec and revspec != 0:
475 return repo[default]
478 return repo[default]
476
479
477 l = revrange(repo, [revspec])
480 l = revrange(repo, [revspec])
478 if len(l) < 1:
481 if len(l) < 1:
479 raise util.Abort(_('empty revision set'))
482 raise util.Abort(_('empty revision set'))
480 return repo[l[-1]]
483 return repo[l[-1]]
481
484
482 def revpair(repo, revs):
485 def revpair(repo, revs):
483 if not revs:
486 if not revs:
484 return repo.dirstate.p1(), None
487 return repo.dirstate.p1(), None
485
488
486 l = revrange(repo, revs)
489 l = revrange(repo, revs)
487
490
488 if not l:
491 if not l:
489 first = second = None
492 first = second = None
490 elif l.isascending():
493 elif l.isascending():
491 first = l.min()
494 first = l.min()
492 second = l.max()
495 second = l.max()
493 elif l.isdescending():
496 elif l.isdescending():
494 first = l.max()
497 first = l.max()
495 second = l.min()
498 second = l.min()
496 else:
499 else:
497 l = list(l)
500 l = list(l)
498 first = l[0]
501 first = l[0]
499 second = l[-1]
502 second = l[-1]
500
503
501 if first is None:
504 if first is None:
502 raise util.Abort(_('empty revision range'))
505 raise util.Abort(_('empty revision range'))
503
506
504 if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
507 if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
505 return repo.lookup(first), None
508 return repo.lookup(first), None
506
509
507 return repo.lookup(first), repo.lookup(second)
510 return repo.lookup(first), repo.lookup(second)
508
511
509 _revrangesep = ':'
512 _revrangesep = ':'
510
513
511 def revrange(repo, revs):
514 def revrange(repo, revs):
512 """Yield revision as strings from a list of revision specifications."""
515 """Yield revision as strings from a list of revision specifications."""
513
516
514 def revfix(repo, val, defval):
517 def revfix(repo, val, defval):
515 if not val and val != 0 and defval is not None:
518 if not val and val != 0 and defval is not None:
516 return defval
519 return defval
517 return repo[val].rev()
520 return repo[val].rev()
518
521
519 seen, l = set(), revset.baseset([])
522 seen, l = set(), revset.baseset([])
520 for spec in revs:
523 for spec in revs:
521 if l and not seen:
524 if l and not seen:
522 seen = set(l)
525 seen = set(l)
523 # attempt to parse old-style ranges first to deal with
526 # attempt to parse old-style ranges first to deal with
524 # things like old-tag which contain query metacharacters
527 # things like old-tag which contain query metacharacters
525 try:
528 try:
526 if isinstance(spec, int):
529 if isinstance(spec, int):
527 seen.add(spec)
530 seen.add(spec)
528 l = l + revset.baseset([spec])
531 l = l + revset.baseset([spec])
529 continue
532 continue
530
533
531 if _revrangesep in spec:
534 if _revrangesep in spec:
532 start, end = spec.split(_revrangesep, 1)
535 start, end = spec.split(_revrangesep, 1)
533 start = revfix(repo, start, 0)
536 start = revfix(repo, start, 0)
534 end = revfix(repo, end, len(repo) - 1)
537 end = revfix(repo, end, len(repo) - 1)
535 if end == nullrev and start < 0:
538 if end == nullrev and start < 0:
536 start = nullrev
539 start = nullrev
537 rangeiter = repo.changelog.revs(start, end)
540 rangeiter = repo.changelog.revs(start, end)
538 if not seen and not l:
541 if not seen and not l:
539 # by far the most common case: revs = ["-1:0"]
542 # by far the most common case: revs = ["-1:0"]
540 l = revset.baseset(rangeiter)
543 l = revset.baseset(rangeiter)
541 # defer syncing seen until next iteration
544 # defer syncing seen until next iteration
542 continue
545 continue
543 newrevs = set(rangeiter)
546 newrevs = set(rangeiter)
544 if seen:
547 if seen:
545 newrevs.difference_update(seen)
548 newrevs.difference_update(seen)
546 seen.update(newrevs)
549 seen.update(newrevs)
547 else:
550 else:
548 seen = newrevs
551 seen = newrevs
549 l = l + revset.baseset(sorted(newrevs, reverse=start > end))
552 l = l + revset.baseset(sorted(newrevs, reverse=start > end))
550 continue
553 continue
551 elif spec and spec in repo: # single unquoted rev
554 elif spec and spec in repo: # single unquoted rev
552 rev = revfix(repo, spec, None)
555 rev = revfix(repo, spec, None)
553 if rev in seen:
556 if rev in seen:
554 continue
557 continue
555 seen.add(rev)
558 seen.add(rev)
556 l = l + revset.baseset([rev])
559 l = l + revset.baseset([rev])
557 continue
560 continue
558 except error.RepoLookupError:
561 except error.RepoLookupError:
559 pass
562 pass
560
563
561 # fall through to new-style queries if old-style fails
564 # fall through to new-style queries if old-style fails
562 m = revset.match(repo.ui, spec, repo)
565 m = revset.match(repo.ui, spec, repo)
563 if seen or l:
566 if seen or l:
564 dl = [r for r in m(repo, revset.spanset(repo)) if r not in seen]
567 dl = [r for r in m(repo, revset.spanset(repo)) if r not in seen]
565 l = l + revset.baseset(dl)
568 l = l + revset.baseset(dl)
566 seen.update(dl)
569 seen.update(dl)
567 else:
570 else:
568 l = m(repo, revset.spanset(repo))
571 l = m(repo, revset.spanset(repo))
569
572
570 return l
573 return l
571
574
572 def expandpats(pats):
575 def expandpats(pats):
573 '''Expand bare globs when running on windows.
576 '''Expand bare globs when running on windows.
574 On posix we assume it already has already been done by sh.'''
577 On posix we assume it already has already been done by sh.'''
575 if not util.expandglobs:
578 if not util.expandglobs:
576 return list(pats)
579 return list(pats)
577 ret = []
580 ret = []
578 for kindpat in pats:
581 for kindpat in pats:
579 kind, pat = matchmod._patsplit(kindpat, None)
582 kind, pat = matchmod._patsplit(kindpat, None)
580 if kind is None:
583 if kind is None:
581 try:
584 try:
582 globbed = glob.glob(pat)
585 globbed = glob.glob(pat)
583 except re.error:
586 except re.error:
584 globbed = [pat]
587 globbed = [pat]
585 if globbed:
588 if globbed:
586 ret.extend(globbed)
589 ret.extend(globbed)
587 continue
590 continue
588 ret.append(kindpat)
591 ret.append(kindpat)
589 return ret
592 return ret
590
593
591 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
594 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
592 '''Return a matcher and the patterns that were used.
595 '''Return a matcher and the patterns that were used.
593 The matcher will warn about bad matches.'''
596 The matcher will warn about bad matches.'''
594 if pats == ("",):
597 if pats == ("",):
595 pats = []
598 pats = []
596 if not globbed and default == 'relpath':
599 if not globbed and default == 'relpath':
597 pats = expandpats(pats or [])
600 pats = expandpats(pats or [])
598
601
599 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
602 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
600 default)
603 default)
601 def badfn(f, msg):
604 def badfn(f, msg):
602 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
605 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
603 m.bad = badfn
606 m.bad = badfn
604 return m, pats
607 return m, pats
605
608
606 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
609 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
607 '''Return a matcher that will warn about bad matches.'''
610 '''Return a matcher that will warn about bad matches.'''
608 return matchandpats(ctx, pats, opts, globbed, default)[0]
611 return matchandpats(ctx, pats, opts, globbed, default)[0]
609
612
610 def matchall(repo):
613 def matchall(repo):
611 '''Return a matcher that will efficiently match everything.'''
614 '''Return a matcher that will efficiently match everything.'''
612 return matchmod.always(repo.root, repo.getcwd())
615 return matchmod.always(repo.root, repo.getcwd())
613
616
614 def matchfiles(repo, files):
617 def matchfiles(repo, files):
615 '''Return a matcher that will efficiently match exactly these files.'''
618 '''Return a matcher that will efficiently match exactly these files.'''
616 return matchmod.exact(repo.root, repo.getcwd(), files)
619 return matchmod.exact(repo.root, repo.getcwd(), files)
617
620
618 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
621 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
619 if dry_run is None:
622 if dry_run is None:
620 dry_run = opts.get('dry_run')
623 dry_run = opts.get('dry_run')
621 if similarity is None:
624 if similarity is None:
622 similarity = float(opts.get('similarity') or 0)
625 similarity = float(opts.get('similarity') or 0)
623 # we'd use status here, except handling of symlinks and ignore is tricky
626 # we'd use status here, except handling of symlinks and ignore is tricky
624 m = match(repo[None], pats, opts)
627 m = match(repo[None], pats, opts)
625 rejected = []
628 rejected = []
626 m.bad = lambda x, y: rejected.append(x)
629 m.bad = lambda x, y: rejected.append(x)
627
630
628 added, unknown, deleted, removed = _interestingfiles(repo, m)
631 added, unknown, deleted, removed = _interestingfiles(repo, m)
629
632
630 unknownset = set(unknown)
633 unknownset = set(unknown)
631 toprint = unknownset.copy()
634 toprint = unknownset.copy()
632 toprint.update(deleted)
635 toprint.update(deleted)
633 for abs in sorted(toprint):
636 for abs in sorted(toprint):
634 if repo.ui.verbose or not m.exact(abs):
637 if repo.ui.verbose or not m.exact(abs):
635 rel = m.rel(abs)
638 rel = m.rel(abs)
636 if abs in unknownset:
639 if abs in unknownset:
637 status = _('adding %s\n') % ((pats and rel) or abs)
640 status = _('adding %s\n') % ((pats and rel) or abs)
638 else:
641 else:
639 status = _('removing %s\n') % ((pats and rel) or abs)
642 status = _('removing %s\n') % ((pats and rel) or abs)
640 repo.ui.status(status)
643 repo.ui.status(status)
641
644
642 renames = _findrenames(repo, m, added + unknown, removed + deleted,
645 renames = _findrenames(repo, m, added + unknown, removed + deleted,
643 similarity)
646 similarity)
644
647
645 if not dry_run:
648 if not dry_run:
646 _markchanges(repo, unknown, deleted, renames)
649 _markchanges(repo, unknown, deleted, renames)
647
650
648 for f in rejected:
651 for f in rejected:
649 if f in m.files():
652 if f in m.files():
650 return 1
653 return 1
651 return 0
654 return 0
652
655
653 def marktouched(repo, files, similarity=0.0):
656 def marktouched(repo, files, similarity=0.0):
654 '''Assert that files have somehow been operated upon. files are relative to
657 '''Assert that files have somehow been operated upon. files are relative to
655 the repo root.'''
658 the repo root.'''
656 m = matchfiles(repo, files)
659 m = matchfiles(repo, files)
657 rejected = []
660 rejected = []
658 m.bad = lambda x, y: rejected.append(x)
661 m.bad = lambda x, y: rejected.append(x)
659
662
660 added, unknown, deleted, removed = _interestingfiles(repo, m)
663 added, unknown, deleted, removed = _interestingfiles(repo, m)
661
664
662 if repo.ui.verbose:
665 if repo.ui.verbose:
663 unknownset = set(unknown)
666 unknownset = set(unknown)
664 toprint = unknownset.copy()
667 toprint = unknownset.copy()
665 toprint.update(deleted)
668 toprint.update(deleted)
666 for abs in sorted(toprint):
669 for abs in sorted(toprint):
667 if abs in unknownset:
670 if abs in unknownset:
668 status = _('adding %s\n') % abs
671 status = _('adding %s\n') % abs
669 else:
672 else:
670 status = _('removing %s\n') % abs
673 status = _('removing %s\n') % abs
671 repo.ui.status(status)
674 repo.ui.status(status)
672
675
673 renames = _findrenames(repo, m, added + unknown, removed + deleted,
676 renames = _findrenames(repo, m, added + unknown, removed + deleted,
674 similarity)
677 similarity)
675
678
676 _markchanges(repo, unknown, deleted, renames)
679 _markchanges(repo, unknown, deleted, renames)
677
680
678 for f in rejected:
681 for f in rejected:
679 if f in m.files():
682 if f in m.files():
680 return 1
683 return 1
681 return 0
684 return 0
682
685
683 def _interestingfiles(repo, matcher):
686 def _interestingfiles(repo, matcher):
684 '''Walk dirstate with matcher, looking for files that addremove would care
687 '''Walk dirstate with matcher, looking for files that addremove would care
685 about.
688 about.
686
689
687 This is different from dirstate.status because it doesn't care about
690 This is different from dirstate.status because it doesn't care about
688 whether files are modified or clean.'''
691 whether files are modified or clean.'''
689 added, unknown, deleted, removed = [], [], [], []
692 added, unknown, deleted, removed = [], [], [], []
690 audit_path = pathutil.pathauditor(repo.root)
693 audit_path = pathutil.pathauditor(repo.root)
691
694
692 ctx = repo[None]
695 ctx = repo[None]
693 dirstate = repo.dirstate
696 dirstate = repo.dirstate
694 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
697 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
695 full=False)
698 full=False)
696 for abs, st in walkresults.iteritems():
699 for abs, st in walkresults.iteritems():
697 dstate = dirstate[abs]
700 dstate = dirstate[abs]
698 if dstate == '?' and audit_path.check(abs):
701 if dstate == '?' and audit_path.check(abs):
699 unknown.append(abs)
702 unknown.append(abs)
700 elif dstate != 'r' and not st:
703 elif dstate != 'r' and not st:
701 deleted.append(abs)
704 deleted.append(abs)
702 # for finding renames
705 # for finding renames
703 elif dstate == 'r':
706 elif dstate == 'r':
704 removed.append(abs)
707 removed.append(abs)
705 elif dstate == 'a':
708 elif dstate == 'a':
706 added.append(abs)
709 added.append(abs)
707
710
708 return added, unknown, deleted, removed
711 return added, unknown, deleted, removed
709
712
710 def _findrenames(repo, matcher, added, removed, similarity):
713 def _findrenames(repo, matcher, added, removed, similarity):
711 '''Find renames from removed files to added ones.'''
714 '''Find renames from removed files to added ones.'''
712 renames = {}
715 renames = {}
713 if similarity > 0:
716 if similarity > 0:
714 for old, new, score in similar.findrenames(repo, added, removed,
717 for old, new, score in similar.findrenames(repo, added, removed,
715 similarity):
718 similarity):
716 if (repo.ui.verbose or not matcher.exact(old)
719 if (repo.ui.verbose or not matcher.exact(old)
717 or not matcher.exact(new)):
720 or not matcher.exact(new)):
718 repo.ui.status(_('recording removal of %s as rename to %s '
721 repo.ui.status(_('recording removal of %s as rename to %s '
719 '(%d%% similar)\n') %
722 '(%d%% similar)\n') %
720 (matcher.rel(old), matcher.rel(new),
723 (matcher.rel(old), matcher.rel(new),
721 score * 100))
724 score * 100))
722 renames[new] = old
725 renames[new] = old
723 return renames
726 return renames
724
727
725 def _markchanges(repo, unknown, deleted, renames):
728 def _markchanges(repo, unknown, deleted, renames):
726 '''Marks the files in unknown as added, the files in deleted as removed,
729 '''Marks the files in unknown as added, the files in deleted as removed,
727 and the files in renames as copied.'''
730 and the files in renames as copied.'''
728 wctx = repo[None]
731 wctx = repo[None]
729 wlock = repo.wlock()
732 wlock = repo.wlock()
730 try:
733 try:
731 wctx.forget(deleted)
734 wctx.forget(deleted)
732 wctx.add(unknown)
735 wctx.add(unknown)
733 for new, old in renames.iteritems():
736 for new, old in renames.iteritems():
734 wctx.copy(old, new)
737 wctx.copy(old, new)
735 finally:
738 finally:
736 wlock.release()
739 wlock.release()
737
740
738 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
741 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
739 """Update the dirstate to reflect the intent of copying src to dst. For
742 """Update the dirstate to reflect the intent of copying src to dst. For
740 different reasons it might not end with dst being marked as copied from src.
743 different reasons it might not end with dst being marked as copied from src.
741 """
744 """
742 origsrc = repo.dirstate.copied(src) or src
745 origsrc = repo.dirstate.copied(src) or src
743 if dst == origsrc: # copying back a copy?
746 if dst == origsrc: # copying back a copy?
744 if repo.dirstate[dst] not in 'mn' and not dryrun:
747 if repo.dirstate[dst] not in 'mn' and not dryrun:
745 repo.dirstate.normallookup(dst)
748 repo.dirstate.normallookup(dst)
746 else:
749 else:
747 if repo.dirstate[origsrc] == 'a' and origsrc == src:
750 if repo.dirstate[origsrc] == 'a' and origsrc == src:
748 if not ui.quiet:
751 if not ui.quiet:
749 ui.warn(_("%s has not been committed yet, so no copy "
752 ui.warn(_("%s has not been committed yet, so no copy "
750 "data will be stored for %s.\n")
753 "data will be stored for %s.\n")
751 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
754 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
752 if repo.dirstate[dst] in '?r' and not dryrun:
755 if repo.dirstate[dst] in '?r' and not dryrun:
753 wctx.add([dst])
756 wctx.add([dst])
754 elif not dryrun:
757 elif not dryrun:
755 wctx.copy(origsrc, dst)
758 wctx.copy(origsrc, dst)
756
759
757 def readrequires(opener, supported):
760 def readrequires(opener, supported):
758 '''Reads and parses .hg/requires and checks if all entries found
761 '''Reads and parses .hg/requires and checks if all entries found
759 are in the list of supported features.'''
762 are in the list of supported features.'''
760 requirements = set(opener.read("requires").splitlines())
763 requirements = set(opener.read("requires").splitlines())
761 missings = []
764 missings = []
762 for r in requirements:
765 for r in requirements:
763 if r not in supported:
766 if r not in supported:
764 if not r or not r[0].isalnum():
767 if not r or not r[0].isalnum():
765 raise error.RequirementError(_(".hg/requires file is corrupt"))
768 raise error.RequirementError(_(".hg/requires file is corrupt"))
766 missings.append(r)
769 missings.append(r)
767 missings.sort()
770 missings.sort()
768 if missings:
771 if missings:
769 raise error.RequirementError(
772 raise error.RequirementError(
770 _("repository requires features unknown to this Mercurial: %s")
773 _("repository requires features unknown to this Mercurial: %s")
771 % " ".join(missings),
774 % " ".join(missings),
772 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
775 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
773 " for more information"))
776 " for more information"))
774 return requirements
777 return requirements
775
778
776 class filecachesubentry(object):
779 class filecachesubentry(object):
777 def __init__(self, path, stat):
780 def __init__(self, path, stat):
778 self.path = path
781 self.path = path
779 self.cachestat = None
782 self.cachestat = None
780 self._cacheable = None
783 self._cacheable = None
781
784
782 if stat:
785 if stat:
783 self.cachestat = filecachesubentry.stat(self.path)
786 self.cachestat = filecachesubentry.stat(self.path)
784
787
785 if self.cachestat:
788 if self.cachestat:
786 self._cacheable = self.cachestat.cacheable()
789 self._cacheable = self.cachestat.cacheable()
787 else:
790 else:
788 # None means we don't know yet
791 # None means we don't know yet
789 self._cacheable = None
792 self._cacheable = None
790
793
791 def refresh(self):
794 def refresh(self):
792 if self.cacheable():
795 if self.cacheable():
793 self.cachestat = filecachesubentry.stat(self.path)
796 self.cachestat = filecachesubentry.stat(self.path)
794
797
795 def cacheable(self):
798 def cacheable(self):
796 if self._cacheable is not None:
799 if self._cacheable is not None:
797 return self._cacheable
800 return self._cacheable
798
801
799 # we don't know yet, assume it is for now
802 # we don't know yet, assume it is for now
800 return True
803 return True
801
804
802 def changed(self):
805 def changed(self):
803 # no point in going further if we can't cache it
806 # no point in going further if we can't cache it
804 if not self.cacheable():
807 if not self.cacheable():
805 return True
808 return True
806
809
807 newstat = filecachesubentry.stat(self.path)
810 newstat = filecachesubentry.stat(self.path)
808
811
809 # we may not know if it's cacheable yet, check again now
812 # we may not know if it's cacheable yet, check again now
810 if newstat and self._cacheable is None:
813 if newstat and self._cacheable is None:
811 self._cacheable = newstat.cacheable()
814 self._cacheable = newstat.cacheable()
812
815
813 # check again
816 # check again
814 if not self._cacheable:
817 if not self._cacheable:
815 return True
818 return True
816
819
817 if self.cachestat != newstat:
820 if self.cachestat != newstat:
818 self.cachestat = newstat
821 self.cachestat = newstat
819 return True
822 return True
820 else:
823 else:
821 return False
824 return False
822
825
823 @staticmethod
826 @staticmethod
824 def stat(path):
827 def stat(path):
825 try:
828 try:
826 return util.cachestat(path)
829 return util.cachestat(path)
827 except OSError, e:
830 except OSError, e:
828 if e.errno != errno.ENOENT:
831 if e.errno != errno.ENOENT:
829 raise
832 raise
830
833
831 class filecacheentry(object):
834 class filecacheentry(object):
832 def __init__(self, paths, stat=True):
835 def __init__(self, paths, stat=True):
833 self._entries = []
836 self._entries = []
834 for path in paths:
837 for path in paths:
835 self._entries.append(filecachesubentry(path, stat))
838 self._entries.append(filecachesubentry(path, stat))
836
839
837 def changed(self):
840 def changed(self):
838 '''true if any entry has changed'''
841 '''true if any entry has changed'''
839 for entry in self._entries:
842 for entry in self._entries:
840 if entry.changed():
843 if entry.changed():
841 return True
844 return True
842 return False
845 return False
843
846
844 def refresh(self):
847 def refresh(self):
845 for entry in self._entries:
848 for entry in self._entries:
846 entry.refresh()
849 entry.refresh()
847
850
848 class filecache(object):
851 class filecache(object):
849 '''A property like decorator that tracks files under .hg/ for updates.
852 '''A property like decorator that tracks files under .hg/ for updates.
850
853
851 Records stat info when called in _filecache.
854 Records stat info when called in _filecache.
852
855
853 On subsequent calls, compares old stat info with new info, and recreates the
856 On subsequent calls, compares old stat info with new info, and recreates the
854 object when any of the files changes, updating the new stat info in
857 object when any of the files changes, updating the new stat info in
855 _filecache.
858 _filecache.
856
859
857 Mercurial either atomic renames or appends for files under .hg,
860 Mercurial either atomic renames or appends for files under .hg,
858 so to ensure the cache is reliable we need the filesystem to be able
861 so to ensure the cache is reliable we need the filesystem to be able
859 to tell us if a file has been replaced. If it can't, we fallback to
862 to tell us if a file has been replaced. If it can't, we fallback to
860 recreating the object on every call (essentially the same behaviour as
863 recreating the object on every call (essentially the same behaviour as
861 propertycache).
864 propertycache).
862
865
863 '''
866 '''
864 def __init__(self, *paths):
867 def __init__(self, *paths):
865 self.paths = paths
868 self.paths = paths
866
869
867 def join(self, obj, fname):
870 def join(self, obj, fname):
868 """Used to compute the runtime path of a cached file.
871 """Used to compute the runtime path of a cached file.
869
872
870 Users should subclass filecache and provide their own version of this
873 Users should subclass filecache and provide their own version of this
871 function to call the appropriate join function on 'obj' (an instance
874 function to call the appropriate join function on 'obj' (an instance
872 of the class that its member function was decorated).
875 of the class that its member function was decorated).
873 """
876 """
874 return obj.join(fname)
877 return obj.join(fname)
875
878
876 def __call__(self, func):
879 def __call__(self, func):
877 self.func = func
880 self.func = func
878 self.name = func.__name__
881 self.name = func.__name__
879 return self
882 return self
880
883
881 def __get__(self, obj, type=None):
884 def __get__(self, obj, type=None):
882 # do we need to check if the file changed?
885 # do we need to check if the file changed?
883 if self.name in obj.__dict__:
886 if self.name in obj.__dict__:
884 assert self.name in obj._filecache, self.name
887 assert self.name in obj._filecache, self.name
885 return obj.__dict__[self.name]
888 return obj.__dict__[self.name]
886
889
887 entry = obj._filecache.get(self.name)
890 entry = obj._filecache.get(self.name)
888
891
889 if entry:
892 if entry:
890 if entry.changed():
893 if entry.changed():
891 entry.obj = self.func(obj)
894 entry.obj = self.func(obj)
892 else:
895 else:
893 paths = [self.join(obj, path) for path in self.paths]
896 paths = [self.join(obj, path) for path in self.paths]
894
897
895 # We stat -before- creating the object so our cache doesn't lie if
898 # We stat -before- creating the object so our cache doesn't lie if
896 # a writer modified between the time we read and stat
899 # a writer modified between the time we read and stat
897 entry = filecacheentry(paths, True)
900 entry = filecacheentry(paths, True)
898 entry.obj = self.func(obj)
901 entry.obj = self.func(obj)
899
902
900 obj._filecache[self.name] = entry
903 obj._filecache[self.name] = entry
901
904
902 obj.__dict__[self.name] = entry.obj
905 obj.__dict__[self.name] = entry.obj
903 return entry.obj
906 return entry.obj
904
907
905 def __set__(self, obj, value):
908 def __set__(self, obj, value):
906 if self.name not in obj._filecache:
909 if self.name not in obj._filecache:
907 # we add an entry for the missing value because X in __dict__
910 # we add an entry for the missing value because X in __dict__
908 # implies X in _filecache
911 # implies X in _filecache
909 paths = [self.join(obj, path) for path in self.paths]
912 paths = [self.join(obj, path) for path in self.paths]
910 ce = filecacheentry(paths, False)
913 ce = filecacheentry(paths, False)
911 obj._filecache[self.name] = ce
914 obj._filecache[self.name] = ce
912 else:
915 else:
913 ce = obj._filecache[self.name]
916 ce = obj._filecache[self.name]
914
917
915 ce.obj = value # update cached copy
918 ce.obj = value # update cached copy
916 obj.__dict__[self.name] = value # update copy returned by obj.x
919 obj.__dict__[self.name] = value # update copy returned by obj.x
917
920
918 def __delete__(self, obj):
921 def __delete__(self, obj):
919 try:
922 try:
920 del obj.__dict__[self.name]
923 del obj.__dict__[self.name]
921 except KeyError:
924 except KeyError:
922 raise AttributeError(self.name)
925 raise AttributeError(self.name)
923
926
924 class dirs(object):
927 class dirs(object):
925 '''a multiset of directory names from a dirstate or manifest'''
928 '''a multiset of directory names from a dirstate or manifest'''
926
929
927 def __init__(self, map, skip=None):
930 def __init__(self, map, skip=None):
928 self._dirs = {}
931 self._dirs = {}
929 addpath = self.addpath
932 addpath = self.addpath
930 if util.safehasattr(map, 'iteritems') and skip is not None:
933 if util.safehasattr(map, 'iteritems') and skip is not None:
931 for f, s in map.iteritems():
934 for f, s in map.iteritems():
932 if s[0] != skip:
935 if s[0] != skip:
933 addpath(f)
936 addpath(f)
934 else:
937 else:
935 for f in map:
938 for f in map:
936 addpath(f)
939 addpath(f)
937
940
938 def addpath(self, path):
941 def addpath(self, path):
939 dirs = self._dirs
942 dirs = self._dirs
940 for base in finddirs(path):
943 for base in finddirs(path):
941 if base in dirs:
944 if base in dirs:
942 dirs[base] += 1
945 dirs[base] += 1
943 return
946 return
944 dirs[base] = 1
947 dirs[base] = 1
945
948
946 def delpath(self, path):
949 def delpath(self, path):
947 dirs = self._dirs
950 dirs = self._dirs
948 for base in finddirs(path):
951 for base in finddirs(path):
949 if dirs[base] > 1:
952 if dirs[base] > 1:
950 dirs[base] -= 1
953 dirs[base] -= 1
951 return
954 return
952 del dirs[base]
955 del dirs[base]
953
956
954 def __iter__(self):
957 def __iter__(self):
955 return self._dirs.iterkeys()
958 return self._dirs.iterkeys()
956
959
957 def __contains__(self, d):
960 def __contains__(self, d):
958 return d in self._dirs
961 return d in self._dirs
959
962
960 if util.safehasattr(parsers, 'dirs'):
963 if util.safehasattr(parsers, 'dirs'):
961 dirs = parsers.dirs
964 dirs = parsers.dirs
962
965
963 def finddirs(path):
966 def finddirs(path):
964 pos = path.rfind('/')
967 pos = path.rfind('/')
965 while pos != -1:
968 while pos != -1:
966 yield path[:pos]
969 yield path[:pos]
967 pos = path.rfind('/', 0, pos)
970 pos = path.rfind('/', 0, pos)
General Comments 0
You need to be logged in to leave comments. Login now