##// END OF EJS Templates
vfs: add lexists() in current api...
Chinmay Joshi -
r21563:764b691b default
parent child Browse files
Show More
@@ -1,961 +1,964
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from mercurial.node import nullrev
9 from mercurial.node import nullrev
10 import util, error, osutil, revset, similar, encoding, phases, parsers
10 import util, error, osutil, revset, similar, encoding, phases, parsers
11 import pathutil
11 import pathutil
12 import match as matchmod
12 import match as matchmod
13 import os, errno, re, glob, tempfile
13 import os, errno, re, glob, tempfile
14
14
15 if os.name == 'nt':
15 if os.name == 'nt':
16 import scmwindows as scmplatform
16 import scmwindows as scmplatform
17 else:
17 else:
18 import scmposix as scmplatform
18 import scmposix as scmplatform
19
19
20 systemrcpath = scmplatform.systemrcpath
20 systemrcpath = scmplatform.systemrcpath
21 userrcpath = scmplatform.userrcpath
21 userrcpath = scmplatform.userrcpath
22
22
23 def itersubrepos(ctx1, ctx2):
23 def itersubrepos(ctx1, ctx2):
24 """find subrepos in ctx1 or ctx2"""
24 """find subrepos in ctx1 or ctx2"""
25 # Create a (subpath, ctx) mapping where we prefer subpaths from
25 # Create a (subpath, ctx) mapping where we prefer subpaths from
26 # ctx1. The subpaths from ctx2 are important when the .hgsub file
26 # ctx1. The subpaths from ctx2 are important when the .hgsub file
27 # has been modified (in ctx2) but not yet committed (in ctx1).
27 # has been modified (in ctx2) but not yet committed (in ctx1).
28 subpaths = dict.fromkeys(ctx2.substate, ctx2)
28 subpaths = dict.fromkeys(ctx2.substate, ctx2)
29 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
29 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
30 for subpath, ctx in sorted(subpaths.iteritems()):
30 for subpath, ctx in sorted(subpaths.iteritems()):
31 yield subpath, ctx.sub(subpath)
31 yield subpath, ctx.sub(subpath)
32
32
33 def nochangesfound(ui, repo, excluded=None):
33 def nochangesfound(ui, repo, excluded=None):
34 '''Report no changes for push/pull, excluded is None or a list of
34 '''Report no changes for push/pull, excluded is None or a list of
35 nodes excluded from the push/pull.
35 nodes excluded from the push/pull.
36 '''
36 '''
37 secretlist = []
37 secretlist = []
38 if excluded:
38 if excluded:
39 for n in excluded:
39 for n in excluded:
40 if n not in repo:
40 if n not in repo:
41 # discovery should not have included the filtered revision,
41 # discovery should not have included the filtered revision,
42 # we have to explicitly exclude it until discovery is cleanup.
42 # we have to explicitly exclude it until discovery is cleanup.
43 continue
43 continue
44 ctx = repo[n]
44 ctx = repo[n]
45 if ctx.phase() >= phases.secret and not ctx.extinct():
45 if ctx.phase() >= phases.secret and not ctx.extinct():
46 secretlist.append(n)
46 secretlist.append(n)
47
47
48 if secretlist:
48 if secretlist:
49 ui.status(_("no changes found (ignored %d secret changesets)\n")
49 ui.status(_("no changes found (ignored %d secret changesets)\n")
50 % len(secretlist))
50 % len(secretlist))
51 else:
51 else:
52 ui.status(_("no changes found\n"))
52 ui.status(_("no changes found\n"))
53
53
54 def checknewlabel(repo, lbl, kind):
54 def checknewlabel(repo, lbl, kind):
55 # Do not use the "kind" parameter in ui output.
55 # Do not use the "kind" parameter in ui output.
56 # It makes strings difficult to translate.
56 # It makes strings difficult to translate.
57 if lbl in ['tip', '.', 'null']:
57 if lbl in ['tip', '.', 'null']:
58 raise util.Abort(_("the name '%s' is reserved") % lbl)
58 raise util.Abort(_("the name '%s' is reserved") % lbl)
59 for c in (':', '\0', '\n', '\r'):
59 for c in (':', '\0', '\n', '\r'):
60 if c in lbl:
60 if c in lbl:
61 raise util.Abort(_("%r cannot be used in a name") % c)
61 raise util.Abort(_("%r cannot be used in a name") % c)
62 try:
62 try:
63 int(lbl)
63 int(lbl)
64 raise util.Abort(_("cannot use an integer as a name"))
64 raise util.Abort(_("cannot use an integer as a name"))
65 except ValueError:
65 except ValueError:
66 pass
66 pass
67
67
68 def checkfilename(f):
68 def checkfilename(f):
69 '''Check that the filename f is an acceptable filename for a tracked file'''
69 '''Check that the filename f is an acceptable filename for a tracked file'''
70 if '\r' in f or '\n' in f:
70 if '\r' in f or '\n' in f:
71 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
71 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
72
72
73 def checkportable(ui, f):
73 def checkportable(ui, f):
74 '''Check if filename f is portable and warn or abort depending on config'''
74 '''Check if filename f is portable and warn or abort depending on config'''
75 checkfilename(f)
75 checkfilename(f)
76 abort, warn = checkportabilityalert(ui)
76 abort, warn = checkportabilityalert(ui)
77 if abort or warn:
77 if abort or warn:
78 msg = util.checkwinfilename(f)
78 msg = util.checkwinfilename(f)
79 if msg:
79 if msg:
80 msg = "%s: %r" % (msg, f)
80 msg = "%s: %r" % (msg, f)
81 if abort:
81 if abort:
82 raise util.Abort(msg)
82 raise util.Abort(msg)
83 ui.warn(_("warning: %s\n") % msg)
83 ui.warn(_("warning: %s\n") % msg)
84
84
85 def checkportabilityalert(ui):
85 def checkportabilityalert(ui):
86 '''check if the user's config requests nothing, a warning, or abort for
86 '''check if the user's config requests nothing, a warning, or abort for
87 non-portable filenames'''
87 non-portable filenames'''
88 val = ui.config('ui', 'portablefilenames', 'warn')
88 val = ui.config('ui', 'portablefilenames', 'warn')
89 lval = val.lower()
89 lval = val.lower()
90 bval = util.parsebool(val)
90 bval = util.parsebool(val)
91 abort = os.name == 'nt' or lval == 'abort'
91 abort = os.name == 'nt' or lval == 'abort'
92 warn = bval or lval == 'warn'
92 warn = bval or lval == 'warn'
93 if bval is None and not (warn or abort or lval == 'ignore'):
93 if bval is None and not (warn or abort or lval == 'ignore'):
94 raise error.ConfigError(
94 raise error.ConfigError(
95 _("ui.portablefilenames value is invalid ('%s')") % val)
95 _("ui.portablefilenames value is invalid ('%s')") % val)
96 return abort, warn
96 return abort, warn
97
97
98 class casecollisionauditor(object):
98 class casecollisionauditor(object):
99 def __init__(self, ui, abort, dirstate):
99 def __init__(self, ui, abort, dirstate):
100 self._ui = ui
100 self._ui = ui
101 self._abort = abort
101 self._abort = abort
102 allfiles = '\0'.join(dirstate._map)
102 allfiles = '\0'.join(dirstate._map)
103 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
103 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
104 self._dirstate = dirstate
104 self._dirstate = dirstate
105 # The purpose of _newfiles is so that we don't complain about
105 # The purpose of _newfiles is so that we don't complain about
106 # case collisions if someone were to call this object with the
106 # case collisions if someone were to call this object with the
107 # same filename twice.
107 # same filename twice.
108 self._newfiles = set()
108 self._newfiles = set()
109
109
110 def __call__(self, f):
110 def __call__(self, f):
111 if f in self._newfiles:
111 if f in self._newfiles:
112 return
112 return
113 fl = encoding.lower(f)
113 fl = encoding.lower(f)
114 if fl in self._loweredfiles and f not in self._dirstate:
114 if fl in self._loweredfiles and f not in self._dirstate:
115 msg = _('possible case-folding collision for %s') % f
115 msg = _('possible case-folding collision for %s') % f
116 if self._abort:
116 if self._abort:
117 raise util.Abort(msg)
117 raise util.Abort(msg)
118 self._ui.warn(_("warning: %s\n") % msg)
118 self._ui.warn(_("warning: %s\n") % msg)
119 self._loweredfiles.add(fl)
119 self._loweredfiles.add(fl)
120 self._newfiles.add(f)
120 self._newfiles.add(f)
121
121
122 class abstractvfs(object):
122 class abstractvfs(object):
123 """Abstract base class; cannot be instantiated"""
123 """Abstract base class; cannot be instantiated"""
124
124
125 def __init__(self, *args, **kwargs):
125 def __init__(self, *args, **kwargs):
126 '''Prevent instantiation; don't call this from subclasses.'''
126 '''Prevent instantiation; don't call this from subclasses.'''
127 raise NotImplementedError('attempted instantiating ' + str(type(self)))
127 raise NotImplementedError('attempted instantiating ' + str(type(self)))
128
128
129 def tryread(self, path):
129 def tryread(self, path):
130 '''gracefully return an empty string for missing files'''
130 '''gracefully return an empty string for missing files'''
131 try:
131 try:
132 return self.read(path)
132 return self.read(path)
133 except IOError, inst:
133 except IOError, inst:
134 if inst.errno != errno.ENOENT:
134 if inst.errno != errno.ENOENT:
135 raise
135 raise
136 return ""
136 return ""
137
137
138 def open(self, path, mode="r", text=False, atomictemp=False):
138 def open(self, path, mode="r", text=False, atomictemp=False):
139 self.open = self.__call__
139 self.open = self.__call__
140 return self.__call__(path, mode, text, atomictemp)
140 return self.__call__(path, mode, text, atomictemp)
141
141
142 def read(self, path):
142 def read(self, path):
143 fp = self(path, 'rb')
143 fp = self(path, 'rb')
144 try:
144 try:
145 return fp.read()
145 return fp.read()
146 finally:
146 finally:
147 fp.close()
147 fp.close()
148
148
149 def write(self, path, data):
149 def write(self, path, data):
150 fp = self(path, 'wb')
150 fp = self(path, 'wb')
151 try:
151 try:
152 return fp.write(data)
152 return fp.write(data)
153 finally:
153 finally:
154 fp.close()
154 fp.close()
155
155
156 def append(self, path, data):
156 def append(self, path, data):
157 fp = self(path, 'ab')
157 fp = self(path, 'ab')
158 try:
158 try:
159 return fp.write(data)
159 return fp.write(data)
160 finally:
160 finally:
161 fp.close()
161 fp.close()
162
162
163 def chmod(self, path, mode):
163 def chmod(self, path, mode):
164 return os.chmod(self.join(path), mode)
164 return os.chmod(self.join(path), mode)
165
165
166 def exists(self, path=None):
166 def exists(self, path=None):
167 return os.path.exists(self.join(path))
167 return os.path.exists(self.join(path))
168
168
169 def fstat(self, fp):
169 def fstat(self, fp):
170 return util.fstat(fp)
170 return util.fstat(fp)
171
171
172 def isdir(self, path=None):
172 def isdir(self, path=None):
173 return os.path.isdir(self.join(path))
173 return os.path.isdir(self.join(path))
174
174
175 def isfile(self, path=None):
175 def isfile(self, path=None):
176 return os.path.isfile(self.join(path))
176 return os.path.isfile(self.join(path))
177
177
178 def islink(self, path=None):
178 def islink(self, path=None):
179 return os.path.islink(self.join(path))
179 return os.path.islink(self.join(path))
180
180
181 def lexists(self, path=None):
182 return os.path.lexists(self.join(path))
183
181 def lstat(self, path=None):
184 def lstat(self, path=None):
182 return os.lstat(self.join(path))
185 return os.lstat(self.join(path))
183
186
184 def makedir(self, path=None, notindexed=True):
187 def makedir(self, path=None, notindexed=True):
185 return util.makedir(self.join(path), notindexed)
188 return util.makedir(self.join(path), notindexed)
186
189
187 def makedirs(self, path=None, mode=None):
190 def makedirs(self, path=None, mode=None):
188 return util.makedirs(self.join(path), mode)
191 return util.makedirs(self.join(path), mode)
189
192
190 def makelock(self, info, path):
193 def makelock(self, info, path):
191 return util.makelock(info, self.join(path))
194 return util.makelock(info, self.join(path))
192
195
193 def mkdir(self, path=None):
196 def mkdir(self, path=None):
194 return os.mkdir(self.join(path))
197 return os.mkdir(self.join(path))
195
198
196 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
199 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
197 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
200 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
198 dir=self.join(dir), text=text)
201 dir=self.join(dir), text=text)
199 dname, fname = util.split(name)
202 dname, fname = util.split(name)
200 if dir:
203 if dir:
201 return fd, os.path.join(dir, fname)
204 return fd, os.path.join(dir, fname)
202 else:
205 else:
203 return fd, fname
206 return fd, fname
204
207
205 def readdir(self, path=None, stat=None, skip=None):
208 def readdir(self, path=None, stat=None, skip=None):
206 return osutil.listdir(self.join(path), stat, skip)
209 return osutil.listdir(self.join(path), stat, skip)
207
210
208 def readlock(self, path):
211 def readlock(self, path):
209 return util.readlock(self.join(path))
212 return util.readlock(self.join(path))
210
213
211 def rename(self, src, dst):
214 def rename(self, src, dst):
212 return util.rename(self.join(src), self.join(dst))
215 return util.rename(self.join(src), self.join(dst))
213
216
214 def readlink(self, path):
217 def readlink(self, path):
215 return os.readlink(self.join(path))
218 return os.readlink(self.join(path))
216
219
217 def setflags(self, path, l, x):
220 def setflags(self, path, l, x):
218 return util.setflags(self.join(path), l, x)
221 return util.setflags(self.join(path), l, x)
219
222
220 def stat(self, path=None):
223 def stat(self, path=None):
221 return os.stat(self.join(path))
224 return os.stat(self.join(path))
222
225
223 def unlink(self, path=None):
226 def unlink(self, path=None):
224 return util.unlink(self.join(path))
227 return util.unlink(self.join(path))
225
228
226 def utime(self, path=None, t=None):
229 def utime(self, path=None, t=None):
227 return os.utime(self.join(path), t)
230 return os.utime(self.join(path), t)
228
231
229 class vfs(abstractvfs):
232 class vfs(abstractvfs):
230 '''Operate files relative to a base directory
233 '''Operate files relative to a base directory
231
234
232 This class is used to hide the details of COW semantics and
235 This class is used to hide the details of COW semantics and
233 remote file access from higher level code.
236 remote file access from higher level code.
234 '''
237 '''
235 def __init__(self, base, audit=True, expandpath=False, realpath=False):
238 def __init__(self, base, audit=True, expandpath=False, realpath=False):
236 if expandpath:
239 if expandpath:
237 base = util.expandpath(base)
240 base = util.expandpath(base)
238 if realpath:
241 if realpath:
239 base = os.path.realpath(base)
242 base = os.path.realpath(base)
240 self.base = base
243 self.base = base
241 self._setmustaudit(audit)
244 self._setmustaudit(audit)
242 self.createmode = None
245 self.createmode = None
243 self._trustnlink = None
246 self._trustnlink = None
244
247
245 def _getmustaudit(self):
248 def _getmustaudit(self):
246 return self._audit
249 return self._audit
247
250
248 def _setmustaudit(self, onoff):
251 def _setmustaudit(self, onoff):
249 self._audit = onoff
252 self._audit = onoff
250 if onoff:
253 if onoff:
251 self.audit = pathutil.pathauditor(self.base)
254 self.audit = pathutil.pathauditor(self.base)
252 else:
255 else:
253 self.audit = util.always
256 self.audit = util.always
254
257
255 mustaudit = property(_getmustaudit, _setmustaudit)
258 mustaudit = property(_getmustaudit, _setmustaudit)
256
259
257 @util.propertycache
260 @util.propertycache
258 def _cansymlink(self):
261 def _cansymlink(self):
259 return util.checklink(self.base)
262 return util.checklink(self.base)
260
263
261 @util.propertycache
264 @util.propertycache
262 def _chmod(self):
265 def _chmod(self):
263 return util.checkexec(self.base)
266 return util.checkexec(self.base)
264
267
265 def _fixfilemode(self, name):
268 def _fixfilemode(self, name):
266 if self.createmode is None or not self._chmod:
269 if self.createmode is None or not self._chmod:
267 return
270 return
268 os.chmod(name, self.createmode & 0666)
271 os.chmod(name, self.createmode & 0666)
269
272
270 def __call__(self, path, mode="r", text=False, atomictemp=False):
273 def __call__(self, path, mode="r", text=False, atomictemp=False):
271 if self._audit:
274 if self._audit:
272 r = util.checkosfilename(path)
275 r = util.checkosfilename(path)
273 if r:
276 if r:
274 raise util.Abort("%s: %r" % (r, path))
277 raise util.Abort("%s: %r" % (r, path))
275 self.audit(path)
278 self.audit(path)
276 f = self.join(path)
279 f = self.join(path)
277
280
278 if not text and "b" not in mode:
281 if not text and "b" not in mode:
279 mode += "b" # for that other OS
282 mode += "b" # for that other OS
280
283
281 nlink = -1
284 nlink = -1
282 if mode not in ('r', 'rb'):
285 if mode not in ('r', 'rb'):
283 dirname, basename = util.split(f)
286 dirname, basename = util.split(f)
284 # If basename is empty, then the path is malformed because it points
287 # If basename is empty, then the path is malformed because it points
285 # to a directory. Let the posixfile() call below raise IOError.
288 # to a directory. Let the posixfile() call below raise IOError.
286 if basename:
289 if basename:
287 if atomictemp:
290 if atomictemp:
288 util.ensuredirs(dirname, self.createmode)
291 util.ensuredirs(dirname, self.createmode)
289 return util.atomictempfile(f, mode, self.createmode)
292 return util.atomictempfile(f, mode, self.createmode)
290 try:
293 try:
291 if 'w' in mode:
294 if 'w' in mode:
292 util.unlink(f)
295 util.unlink(f)
293 nlink = 0
296 nlink = 0
294 else:
297 else:
295 # nlinks() may behave differently for files on Windows
298 # nlinks() may behave differently for files on Windows
296 # shares if the file is open.
299 # shares if the file is open.
297 fd = util.posixfile(f)
300 fd = util.posixfile(f)
298 nlink = util.nlinks(f)
301 nlink = util.nlinks(f)
299 if nlink < 1:
302 if nlink < 1:
300 nlink = 2 # force mktempcopy (issue1922)
303 nlink = 2 # force mktempcopy (issue1922)
301 fd.close()
304 fd.close()
302 except (OSError, IOError), e:
305 except (OSError, IOError), e:
303 if e.errno != errno.ENOENT:
306 if e.errno != errno.ENOENT:
304 raise
307 raise
305 nlink = 0
308 nlink = 0
306 util.ensuredirs(dirname, self.createmode)
309 util.ensuredirs(dirname, self.createmode)
307 if nlink > 0:
310 if nlink > 0:
308 if self._trustnlink is None:
311 if self._trustnlink is None:
309 self._trustnlink = nlink > 1 or util.checknlink(f)
312 self._trustnlink = nlink > 1 or util.checknlink(f)
310 if nlink > 1 or not self._trustnlink:
313 if nlink > 1 or not self._trustnlink:
311 util.rename(util.mktempcopy(f), f)
314 util.rename(util.mktempcopy(f), f)
312 fp = util.posixfile(f, mode)
315 fp = util.posixfile(f, mode)
313 if nlink == 0:
316 if nlink == 0:
314 self._fixfilemode(f)
317 self._fixfilemode(f)
315 return fp
318 return fp
316
319
317 def symlink(self, src, dst):
320 def symlink(self, src, dst):
318 self.audit(dst)
321 self.audit(dst)
319 linkname = self.join(dst)
322 linkname = self.join(dst)
320 try:
323 try:
321 os.unlink(linkname)
324 os.unlink(linkname)
322 except OSError:
325 except OSError:
323 pass
326 pass
324
327
325 util.ensuredirs(os.path.dirname(linkname), self.createmode)
328 util.ensuredirs(os.path.dirname(linkname), self.createmode)
326
329
327 if self._cansymlink:
330 if self._cansymlink:
328 try:
331 try:
329 os.symlink(src, linkname)
332 os.symlink(src, linkname)
330 except OSError, err:
333 except OSError, err:
331 raise OSError(err.errno, _('could not symlink to %r: %s') %
334 raise OSError(err.errno, _('could not symlink to %r: %s') %
332 (src, err.strerror), linkname)
335 (src, err.strerror), linkname)
333 else:
336 else:
334 self.write(dst, src)
337 self.write(dst, src)
335
338
336 def join(self, path):
339 def join(self, path):
337 if path:
340 if path:
338 return os.path.join(self.base, path)
341 return os.path.join(self.base, path)
339 else:
342 else:
340 return self.base
343 return self.base
341
344
342 opener = vfs
345 opener = vfs
343
346
344 class auditvfs(object):
347 class auditvfs(object):
345 def __init__(self, vfs):
348 def __init__(self, vfs):
346 self.vfs = vfs
349 self.vfs = vfs
347
350
348 def _getmustaudit(self):
351 def _getmustaudit(self):
349 return self.vfs.mustaudit
352 return self.vfs.mustaudit
350
353
351 def _setmustaudit(self, onoff):
354 def _setmustaudit(self, onoff):
352 self.vfs.mustaudit = onoff
355 self.vfs.mustaudit = onoff
353
356
354 mustaudit = property(_getmustaudit, _setmustaudit)
357 mustaudit = property(_getmustaudit, _setmustaudit)
355
358
356 class filtervfs(abstractvfs, auditvfs):
359 class filtervfs(abstractvfs, auditvfs):
357 '''Wrapper vfs for filtering filenames with a function.'''
360 '''Wrapper vfs for filtering filenames with a function.'''
358
361
359 def __init__(self, vfs, filter):
362 def __init__(self, vfs, filter):
360 auditvfs.__init__(self, vfs)
363 auditvfs.__init__(self, vfs)
361 self._filter = filter
364 self._filter = filter
362
365
363 def __call__(self, path, *args, **kwargs):
366 def __call__(self, path, *args, **kwargs):
364 return self.vfs(self._filter(path), *args, **kwargs)
367 return self.vfs(self._filter(path), *args, **kwargs)
365
368
366 def join(self, path):
369 def join(self, path):
367 if path:
370 if path:
368 return self.vfs.join(self._filter(path))
371 return self.vfs.join(self._filter(path))
369 else:
372 else:
370 return self.vfs.join(path)
373 return self.vfs.join(path)
371
374
372 filteropener = filtervfs
375 filteropener = filtervfs
373
376
374 class readonlyvfs(abstractvfs, auditvfs):
377 class readonlyvfs(abstractvfs, auditvfs):
375 '''Wrapper vfs preventing any writing.'''
378 '''Wrapper vfs preventing any writing.'''
376
379
377 def __init__(self, vfs):
380 def __init__(self, vfs):
378 auditvfs.__init__(self, vfs)
381 auditvfs.__init__(self, vfs)
379
382
380 def __call__(self, path, mode='r', *args, **kw):
383 def __call__(self, path, mode='r', *args, **kw):
381 if mode not in ('r', 'rb'):
384 if mode not in ('r', 'rb'):
382 raise util.Abort('this vfs is read only')
385 raise util.Abort('this vfs is read only')
383 return self.vfs(path, mode, *args, **kw)
386 return self.vfs(path, mode, *args, **kw)
384
387
385
388
386 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
389 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
387 '''yield every hg repository under path, always recursively.
390 '''yield every hg repository under path, always recursively.
388 The recurse flag will only control recursion into repo working dirs'''
391 The recurse flag will only control recursion into repo working dirs'''
389 def errhandler(err):
392 def errhandler(err):
390 if err.filename == path:
393 if err.filename == path:
391 raise err
394 raise err
392 samestat = getattr(os.path, 'samestat', None)
395 samestat = getattr(os.path, 'samestat', None)
393 if followsym and samestat is not None:
396 if followsym and samestat is not None:
394 def adddir(dirlst, dirname):
397 def adddir(dirlst, dirname):
395 match = False
398 match = False
396 dirstat = os.stat(dirname)
399 dirstat = os.stat(dirname)
397 for lstdirstat in dirlst:
400 for lstdirstat in dirlst:
398 if samestat(dirstat, lstdirstat):
401 if samestat(dirstat, lstdirstat):
399 match = True
402 match = True
400 break
403 break
401 if not match:
404 if not match:
402 dirlst.append(dirstat)
405 dirlst.append(dirstat)
403 return not match
406 return not match
404 else:
407 else:
405 followsym = False
408 followsym = False
406
409
407 if (seen_dirs is None) and followsym:
410 if (seen_dirs is None) and followsym:
408 seen_dirs = []
411 seen_dirs = []
409 adddir(seen_dirs, path)
412 adddir(seen_dirs, path)
410 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
413 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
411 dirs.sort()
414 dirs.sort()
412 if '.hg' in dirs:
415 if '.hg' in dirs:
413 yield root # found a repository
416 yield root # found a repository
414 qroot = os.path.join(root, '.hg', 'patches')
417 qroot = os.path.join(root, '.hg', 'patches')
415 if os.path.isdir(os.path.join(qroot, '.hg')):
418 if os.path.isdir(os.path.join(qroot, '.hg')):
416 yield qroot # we have a patch queue repo here
419 yield qroot # we have a patch queue repo here
417 if recurse:
420 if recurse:
418 # avoid recursing inside the .hg directory
421 # avoid recursing inside the .hg directory
419 dirs.remove('.hg')
422 dirs.remove('.hg')
420 else:
423 else:
421 dirs[:] = [] # don't descend further
424 dirs[:] = [] # don't descend further
422 elif followsym:
425 elif followsym:
423 newdirs = []
426 newdirs = []
424 for d in dirs:
427 for d in dirs:
425 fname = os.path.join(root, d)
428 fname = os.path.join(root, d)
426 if adddir(seen_dirs, fname):
429 if adddir(seen_dirs, fname):
427 if os.path.islink(fname):
430 if os.path.islink(fname):
428 for hgname in walkrepos(fname, True, seen_dirs):
431 for hgname in walkrepos(fname, True, seen_dirs):
429 yield hgname
432 yield hgname
430 else:
433 else:
431 newdirs.append(d)
434 newdirs.append(d)
432 dirs[:] = newdirs
435 dirs[:] = newdirs
433
436
434 def osrcpath():
437 def osrcpath():
435 '''return default os-specific hgrc search path'''
438 '''return default os-specific hgrc search path'''
436 path = systemrcpath()
439 path = systemrcpath()
437 path.extend(userrcpath())
440 path.extend(userrcpath())
438 path = [os.path.normpath(f) for f in path]
441 path = [os.path.normpath(f) for f in path]
439 return path
442 return path
440
443
441 _rcpath = None
444 _rcpath = None
442
445
443 def rcpath():
446 def rcpath():
444 '''return hgrc search path. if env var HGRCPATH is set, use it.
447 '''return hgrc search path. if env var HGRCPATH is set, use it.
445 for each item in path, if directory, use files ending in .rc,
448 for each item in path, if directory, use files ending in .rc,
446 else use item.
449 else use item.
447 make HGRCPATH empty to only look in .hg/hgrc of current repo.
450 make HGRCPATH empty to only look in .hg/hgrc of current repo.
448 if no HGRCPATH, use default os-specific path.'''
451 if no HGRCPATH, use default os-specific path.'''
449 global _rcpath
452 global _rcpath
450 if _rcpath is None:
453 if _rcpath is None:
451 if 'HGRCPATH' in os.environ:
454 if 'HGRCPATH' in os.environ:
452 _rcpath = []
455 _rcpath = []
453 for p in os.environ['HGRCPATH'].split(os.pathsep):
456 for p in os.environ['HGRCPATH'].split(os.pathsep):
454 if not p:
457 if not p:
455 continue
458 continue
456 p = util.expandpath(p)
459 p = util.expandpath(p)
457 if os.path.isdir(p):
460 if os.path.isdir(p):
458 for f, kind in osutil.listdir(p):
461 for f, kind in osutil.listdir(p):
459 if f.endswith('.rc'):
462 if f.endswith('.rc'):
460 _rcpath.append(os.path.join(p, f))
463 _rcpath.append(os.path.join(p, f))
461 else:
464 else:
462 _rcpath.append(p)
465 _rcpath.append(p)
463 else:
466 else:
464 _rcpath = osrcpath()
467 _rcpath = osrcpath()
465 return _rcpath
468 return _rcpath
466
469
467 def revsingle(repo, revspec, default='.'):
470 def revsingle(repo, revspec, default='.'):
468 if not revspec and revspec != 0:
471 if not revspec and revspec != 0:
469 return repo[default]
472 return repo[default]
470
473
471 l = revrange(repo, [revspec])
474 l = revrange(repo, [revspec])
472 if len(l) < 1:
475 if len(l) < 1:
473 raise util.Abort(_('empty revision set'))
476 raise util.Abort(_('empty revision set'))
474 return repo[l[-1]]
477 return repo[l[-1]]
475
478
476 def revpair(repo, revs):
479 def revpair(repo, revs):
477 if not revs:
480 if not revs:
478 return repo.dirstate.p1(), None
481 return repo.dirstate.p1(), None
479
482
480 l = revrange(repo, revs)
483 l = revrange(repo, revs)
481
484
482 if not l:
485 if not l:
483 first = second = None
486 first = second = None
484 elif l.isascending():
487 elif l.isascending():
485 first = l.min()
488 first = l.min()
486 second = l.max()
489 second = l.max()
487 elif l.isdescending():
490 elif l.isdescending():
488 first = l.max()
491 first = l.max()
489 second = l.min()
492 second = l.min()
490 else:
493 else:
491 l = list(l)
494 l = list(l)
492 first = l[0]
495 first = l[0]
493 second = l[-1]
496 second = l[-1]
494
497
495 if first is None:
498 if first is None:
496 raise util.Abort(_('empty revision range'))
499 raise util.Abort(_('empty revision range'))
497
500
498 if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
501 if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
499 return repo.lookup(first), None
502 return repo.lookup(first), None
500
503
501 return repo.lookup(first), repo.lookup(second)
504 return repo.lookup(first), repo.lookup(second)
502
505
503 _revrangesep = ':'
506 _revrangesep = ':'
504
507
505 def revrange(repo, revs):
508 def revrange(repo, revs):
506 """Yield revision as strings from a list of revision specifications."""
509 """Yield revision as strings from a list of revision specifications."""
507
510
508 def revfix(repo, val, defval):
511 def revfix(repo, val, defval):
509 if not val and val != 0 and defval is not None:
512 if not val and val != 0 and defval is not None:
510 return defval
513 return defval
511 return repo[val].rev()
514 return repo[val].rev()
512
515
513 seen, l = set(), revset.baseset([])
516 seen, l = set(), revset.baseset([])
514 for spec in revs:
517 for spec in revs:
515 if l and not seen:
518 if l and not seen:
516 seen = set(l)
519 seen = set(l)
517 # attempt to parse old-style ranges first to deal with
520 # attempt to parse old-style ranges first to deal with
518 # things like old-tag which contain query metacharacters
521 # things like old-tag which contain query metacharacters
519 try:
522 try:
520 if isinstance(spec, int):
523 if isinstance(spec, int):
521 seen.add(spec)
524 seen.add(spec)
522 l = l + revset.baseset([spec])
525 l = l + revset.baseset([spec])
523 continue
526 continue
524
527
525 if _revrangesep in spec:
528 if _revrangesep in spec:
526 start, end = spec.split(_revrangesep, 1)
529 start, end = spec.split(_revrangesep, 1)
527 start = revfix(repo, start, 0)
530 start = revfix(repo, start, 0)
528 end = revfix(repo, end, len(repo) - 1)
531 end = revfix(repo, end, len(repo) - 1)
529 if end == nullrev and start < 0:
532 if end == nullrev and start < 0:
530 start = nullrev
533 start = nullrev
531 rangeiter = repo.changelog.revs(start, end)
534 rangeiter = repo.changelog.revs(start, end)
532 if not seen and not l:
535 if not seen and not l:
533 # by far the most common case: revs = ["-1:0"]
536 # by far the most common case: revs = ["-1:0"]
534 l = revset.baseset(rangeiter)
537 l = revset.baseset(rangeiter)
535 # defer syncing seen until next iteration
538 # defer syncing seen until next iteration
536 continue
539 continue
537 newrevs = set(rangeiter)
540 newrevs = set(rangeiter)
538 if seen:
541 if seen:
539 newrevs.difference_update(seen)
542 newrevs.difference_update(seen)
540 seen.update(newrevs)
543 seen.update(newrevs)
541 else:
544 else:
542 seen = newrevs
545 seen = newrevs
543 l = l + revset.baseset(sorted(newrevs, reverse=start > end))
546 l = l + revset.baseset(sorted(newrevs, reverse=start > end))
544 continue
547 continue
545 elif spec and spec in repo: # single unquoted rev
548 elif spec and spec in repo: # single unquoted rev
546 rev = revfix(repo, spec, None)
549 rev = revfix(repo, spec, None)
547 if rev in seen:
550 if rev in seen:
548 continue
551 continue
549 seen.add(rev)
552 seen.add(rev)
550 l = l + revset.baseset([rev])
553 l = l + revset.baseset([rev])
551 continue
554 continue
552 except error.RepoLookupError:
555 except error.RepoLookupError:
553 pass
556 pass
554
557
555 # fall through to new-style queries if old-style fails
558 # fall through to new-style queries if old-style fails
556 m = revset.match(repo.ui, spec, repo)
559 m = revset.match(repo.ui, spec, repo)
557 if seen or l:
560 if seen or l:
558 dl = [r for r in m(repo, revset.spanset(repo)) if r not in seen]
561 dl = [r for r in m(repo, revset.spanset(repo)) if r not in seen]
559 l = l + revset.baseset(dl)
562 l = l + revset.baseset(dl)
560 seen.update(dl)
563 seen.update(dl)
561 else:
564 else:
562 l = m(repo, revset.spanset(repo))
565 l = m(repo, revset.spanset(repo))
563
566
564 return l
567 return l
565
568
566 def expandpats(pats):
569 def expandpats(pats):
567 '''Expand bare globs when running on windows.
570 '''Expand bare globs when running on windows.
568 On posix we assume it already has already been done by sh.'''
571 On posix we assume it already has already been done by sh.'''
569 if not util.expandglobs:
572 if not util.expandglobs:
570 return list(pats)
573 return list(pats)
571 ret = []
574 ret = []
572 for kindpat in pats:
575 for kindpat in pats:
573 kind, pat = matchmod._patsplit(kindpat, None)
576 kind, pat = matchmod._patsplit(kindpat, None)
574 if kind is None:
577 if kind is None:
575 try:
578 try:
576 globbed = glob.glob(pat)
579 globbed = glob.glob(pat)
577 except re.error:
580 except re.error:
578 globbed = [pat]
581 globbed = [pat]
579 if globbed:
582 if globbed:
580 ret.extend(globbed)
583 ret.extend(globbed)
581 continue
584 continue
582 ret.append(kindpat)
585 ret.append(kindpat)
583 return ret
586 return ret
584
587
585 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
588 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
586 '''Return a matcher and the patterns that were used.
589 '''Return a matcher and the patterns that were used.
587 The matcher will warn about bad matches.'''
590 The matcher will warn about bad matches.'''
588 if pats == ("",):
591 if pats == ("",):
589 pats = []
592 pats = []
590 if not globbed and default == 'relpath':
593 if not globbed and default == 'relpath':
591 pats = expandpats(pats or [])
594 pats = expandpats(pats or [])
592
595
593 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
596 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
594 default)
597 default)
595 def badfn(f, msg):
598 def badfn(f, msg):
596 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
599 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
597 m.bad = badfn
600 m.bad = badfn
598 return m, pats
601 return m, pats
599
602
600 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
603 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
601 '''Return a matcher that will warn about bad matches.'''
604 '''Return a matcher that will warn about bad matches.'''
602 return matchandpats(ctx, pats, opts, globbed, default)[0]
605 return matchandpats(ctx, pats, opts, globbed, default)[0]
603
606
604 def matchall(repo):
607 def matchall(repo):
605 '''Return a matcher that will efficiently match everything.'''
608 '''Return a matcher that will efficiently match everything.'''
606 return matchmod.always(repo.root, repo.getcwd())
609 return matchmod.always(repo.root, repo.getcwd())
607
610
608 def matchfiles(repo, files):
611 def matchfiles(repo, files):
609 '''Return a matcher that will efficiently match exactly these files.'''
612 '''Return a matcher that will efficiently match exactly these files.'''
610 return matchmod.exact(repo.root, repo.getcwd(), files)
613 return matchmod.exact(repo.root, repo.getcwd(), files)
611
614
612 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
615 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
613 if dry_run is None:
616 if dry_run is None:
614 dry_run = opts.get('dry_run')
617 dry_run = opts.get('dry_run')
615 if similarity is None:
618 if similarity is None:
616 similarity = float(opts.get('similarity') or 0)
619 similarity = float(opts.get('similarity') or 0)
617 # we'd use status here, except handling of symlinks and ignore is tricky
620 # we'd use status here, except handling of symlinks and ignore is tricky
618 m = match(repo[None], pats, opts)
621 m = match(repo[None], pats, opts)
619 rejected = []
622 rejected = []
620 m.bad = lambda x, y: rejected.append(x)
623 m.bad = lambda x, y: rejected.append(x)
621
624
622 added, unknown, deleted, removed = _interestingfiles(repo, m)
625 added, unknown, deleted, removed = _interestingfiles(repo, m)
623
626
624 unknownset = set(unknown)
627 unknownset = set(unknown)
625 toprint = unknownset.copy()
628 toprint = unknownset.copy()
626 toprint.update(deleted)
629 toprint.update(deleted)
627 for abs in sorted(toprint):
630 for abs in sorted(toprint):
628 if repo.ui.verbose or not m.exact(abs):
631 if repo.ui.verbose or not m.exact(abs):
629 rel = m.rel(abs)
632 rel = m.rel(abs)
630 if abs in unknownset:
633 if abs in unknownset:
631 status = _('adding %s\n') % ((pats and rel) or abs)
634 status = _('adding %s\n') % ((pats and rel) or abs)
632 else:
635 else:
633 status = _('removing %s\n') % ((pats and rel) or abs)
636 status = _('removing %s\n') % ((pats and rel) or abs)
634 repo.ui.status(status)
637 repo.ui.status(status)
635
638
636 renames = _findrenames(repo, m, added + unknown, removed + deleted,
639 renames = _findrenames(repo, m, added + unknown, removed + deleted,
637 similarity)
640 similarity)
638
641
639 if not dry_run:
642 if not dry_run:
640 _markchanges(repo, unknown, deleted, renames)
643 _markchanges(repo, unknown, deleted, renames)
641
644
642 for f in rejected:
645 for f in rejected:
643 if f in m.files():
646 if f in m.files():
644 return 1
647 return 1
645 return 0
648 return 0
646
649
647 def marktouched(repo, files, similarity=0.0):
650 def marktouched(repo, files, similarity=0.0):
648 '''Assert that files have somehow been operated upon. files are relative to
651 '''Assert that files have somehow been operated upon. files are relative to
649 the repo root.'''
652 the repo root.'''
650 m = matchfiles(repo, files)
653 m = matchfiles(repo, files)
651 rejected = []
654 rejected = []
652 m.bad = lambda x, y: rejected.append(x)
655 m.bad = lambda x, y: rejected.append(x)
653
656
654 added, unknown, deleted, removed = _interestingfiles(repo, m)
657 added, unknown, deleted, removed = _interestingfiles(repo, m)
655
658
656 if repo.ui.verbose:
659 if repo.ui.verbose:
657 unknownset = set(unknown)
660 unknownset = set(unknown)
658 toprint = unknownset.copy()
661 toprint = unknownset.copy()
659 toprint.update(deleted)
662 toprint.update(deleted)
660 for abs in sorted(toprint):
663 for abs in sorted(toprint):
661 if abs in unknownset:
664 if abs in unknownset:
662 status = _('adding %s\n') % abs
665 status = _('adding %s\n') % abs
663 else:
666 else:
664 status = _('removing %s\n') % abs
667 status = _('removing %s\n') % abs
665 repo.ui.status(status)
668 repo.ui.status(status)
666
669
667 renames = _findrenames(repo, m, added + unknown, removed + deleted,
670 renames = _findrenames(repo, m, added + unknown, removed + deleted,
668 similarity)
671 similarity)
669
672
670 _markchanges(repo, unknown, deleted, renames)
673 _markchanges(repo, unknown, deleted, renames)
671
674
672 for f in rejected:
675 for f in rejected:
673 if f in m.files():
676 if f in m.files():
674 return 1
677 return 1
675 return 0
678 return 0
676
679
677 def _interestingfiles(repo, matcher):
680 def _interestingfiles(repo, matcher):
678 '''Walk dirstate with matcher, looking for files that addremove would care
681 '''Walk dirstate with matcher, looking for files that addremove would care
679 about.
682 about.
680
683
681 This is different from dirstate.status because it doesn't care about
684 This is different from dirstate.status because it doesn't care about
682 whether files are modified or clean.'''
685 whether files are modified or clean.'''
683 added, unknown, deleted, removed = [], [], [], []
686 added, unknown, deleted, removed = [], [], [], []
684 audit_path = pathutil.pathauditor(repo.root)
687 audit_path = pathutil.pathauditor(repo.root)
685
688
686 ctx = repo[None]
689 ctx = repo[None]
687 dirstate = repo.dirstate
690 dirstate = repo.dirstate
688 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
691 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
689 full=False)
692 full=False)
690 for abs, st in walkresults.iteritems():
693 for abs, st in walkresults.iteritems():
691 dstate = dirstate[abs]
694 dstate = dirstate[abs]
692 if dstate == '?' and audit_path.check(abs):
695 if dstate == '?' and audit_path.check(abs):
693 unknown.append(abs)
696 unknown.append(abs)
694 elif dstate != 'r' and not st:
697 elif dstate != 'r' and not st:
695 deleted.append(abs)
698 deleted.append(abs)
696 # for finding renames
699 # for finding renames
697 elif dstate == 'r':
700 elif dstate == 'r':
698 removed.append(abs)
701 removed.append(abs)
699 elif dstate == 'a':
702 elif dstate == 'a':
700 added.append(abs)
703 added.append(abs)
701
704
702 return added, unknown, deleted, removed
705 return added, unknown, deleted, removed
703
706
704 def _findrenames(repo, matcher, added, removed, similarity):
707 def _findrenames(repo, matcher, added, removed, similarity):
705 '''Find renames from removed files to added ones.'''
708 '''Find renames from removed files to added ones.'''
706 renames = {}
709 renames = {}
707 if similarity > 0:
710 if similarity > 0:
708 for old, new, score in similar.findrenames(repo, added, removed,
711 for old, new, score in similar.findrenames(repo, added, removed,
709 similarity):
712 similarity):
710 if (repo.ui.verbose or not matcher.exact(old)
713 if (repo.ui.verbose or not matcher.exact(old)
711 or not matcher.exact(new)):
714 or not matcher.exact(new)):
712 repo.ui.status(_('recording removal of %s as rename to %s '
715 repo.ui.status(_('recording removal of %s as rename to %s '
713 '(%d%% similar)\n') %
716 '(%d%% similar)\n') %
714 (matcher.rel(old), matcher.rel(new),
717 (matcher.rel(old), matcher.rel(new),
715 score * 100))
718 score * 100))
716 renames[new] = old
719 renames[new] = old
717 return renames
720 return renames
718
721
719 def _markchanges(repo, unknown, deleted, renames):
722 def _markchanges(repo, unknown, deleted, renames):
720 '''Marks the files in unknown as added, the files in deleted as removed,
723 '''Marks the files in unknown as added, the files in deleted as removed,
721 and the files in renames as copied.'''
724 and the files in renames as copied.'''
722 wctx = repo[None]
725 wctx = repo[None]
723 wlock = repo.wlock()
726 wlock = repo.wlock()
724 try:
727 try:
725 wctx.forget(deleted)
728 wctx.forget(deleted)
726 wctx.add(unknown)
729 wctx.add(unknown)
727 for new, old in renames.iteritems():
730 for new, old in renames.iteritems():
728 wctx.copy(old, new)
731 wctx.copy(old, new)
729 finally:
732 finally:
730 wlock.release()
733 wlock.release()
731
734
732 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
735 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
733 """Update the dirstate to reflect the intent of copying src to dst. For
736 """Update the dirstate to reflect the intent of copying src to dst. For
734 different reasons it might not end with dst being marked as copied from src.
737 different reasons it might not end with dst being marked as copied from src.
735 """
738 """
736 origsrc = repo.dirstate.copied(src) or src
739 origsrc = repo.dirstate.copied(src) or src
737 if dst == origsrc: # copying back a copy?
740 if dst == origsrc: # copying back a copy?
738 if repo.dirstate[dst] not in 'mn' and not dryrun:
741 if repo.dirstate[dst] not in 'mn' and not dryrun:
739 repo.dirstate.normallookup(dst)
742 repo.dirstate.normallookup(dst)
740 else:
743 else:
741 if repo.dirstate[origsrc] == 'a' and origsrc == src:
744 if repo.dirstate[origsrc] == 'a' and origsrc == src:
742 if not ui.quiet:
745 if not ui.quiet:
743 ui.warn(_("%s has not been committed yet, so no copy "
746 ui.warn(_("%s has not been committed yet, so no copy "
744 "data will be stored for %s.\n")
747 "data will be stored for %s.\n")
745 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
748 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
746 if repo.dirstate[dst] in '?r' and not dryrun:
749 if repo.dirstate[dst] in '?r' and not dryrun:
747 wctx.add([dst])
750 wctx.add([dst])
748 elif not dryrun:
751 elif not dryrun:
749 wctx.copy(origsrc, dst)
752 wctx.copy(origsrc, dst)
750
753
751 def readrequires(opener, supported):
754 def readrequires(opener, supported):
752 '''Reads and parses .hg/requires and checks if all entries found
755 '''Reads and parses .hg/requires and checks if all entries found
753 are in the list of supported features.'''
756 are in the list of supported features.'''
754 requirements = set(opener.read("requires").splitlines())
757 requirements = set(opener.read("requires").splitlines())
755 missings = []
758 missings = []
756 for r in requirements:
759 for r in requirements:
757 if r not in supported:
760 if r not in supported:
758 if not r or not r[0].isalnum():
761 if not r or not r[0].isalnum():
759 raise error.RequirementError(_(".hg/requires file is corrupt"))
762 raise error.RequirementError(_(".hg/requires file is corrupt"))
760 missings.append(r)
763 missings.append(r)
761 missings.sort()
764 missings.sort()
762 if missings:
765 if missings:
763 raise error.RequirementError(
766 raise error.RequirementError(
764 _("repository requires features unknown to this Mercurial: %s")
767 _("repository requires features unknown to this Mercurial: %s")
765 % " ".join(missings),
768 % " ".join(missings),
766 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
769 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
767 " for more information"))
770 " for more information"))
768 return requirements
771 return requirements
769
772
770 class filecachesubentry(object):
773 class filecachesubentry(object):
771 def __init__(self, path, stat):
774 def __init__(self, path, stat):
772 self.path = path
775 self.path = path
773 self.cachestat = None
776 self.cachestat = None
774 self._cacheable = None
777 self._cacheable = None
775
778
776 if stat:
779 if stat:
777 self.cachestat = filecachesubentry.stat(self.path)
780 self.cachestat = filecachesubentry.stat(self.path)
778
781
779 if self.cachestat:
782 if self.cachestat:
780 self._cacheable = self.cachestat.cacheable()
783 self._cacheable = self.cachestat.cacheable()
781 else:
784 else:
782 # None means we don't know yet
785 # None means we don't know yet
783 self._cacheable = None
786 self._cacheable = None
784
787
785 def refresh(self):
788 def refresh(self):
786 if self.cacheable():
789 if self.cacheable():
787 self.cachestat = filecachesubentry.stat(self.path)
790 self.cachestat = filecachesubentry.stat(self.path)
788
791
789 def cacheable(self):
792 def cacheable(self):
790 if self._cacheable is not None:
793 if self._cacheable is not None:
791 return self._cacheable
794 return self._cacheable
792
795
793 # we don't know yet, assume it is for now
796 # we don't know yet, assume it is for now
794 return True
797 return True
795
798
796 def changed(self):
799 def changed(self):
797 # no point in going further if we can't cache it
800 # no point in going further if we can't cache it
798 if not self.cacheable():
801 if not self.cacheable():
799 return True
802 return True
800
803
801 newstat = filecachesubentry.stat(self.path)
804 newstat = filecachesubentry.stat(self.path)
802
805
803 # we may not know if it's cacheable yet, check again now
806 # we may not know if it's cacheable yet, check again now
804 if newstat and self._cacheable is None:
807 if newstat and self._cacheable is None:
805 self._cacheable = newstat.cacheable()
808 self._cacheable = newstat.cacheable()
806
809
807 # check again
810 # check again
808 if not self._cacheable:
811 if not self._cacheable:
809 return True
812 return True
810
813
811 if self.cachestat != newstat:
814 if self.cachestat != newstat:
812 self.cachestat = newstat
815 self.cachestat = newstat
813 return True
816 return True
814 else:
817 else:
815 return False
818 return False
816
819
817 @staticmethod
820 @staticmethod
818 def stat(path):
821 def stat(path):
819 try:
822 try:
820 return util.cachestat(path)
823 return util.cachestat(path)
821 except OSError, e:
824 except OSError, e:
822 if e.errno != errno.ENOENT:
825 if e.errno != errno.ENOENT:
823 raise
826 raise
824
827
825 class filecacheentry(object):
828 class filecacheentry(object):
826 def __init__(self, paths, stat=True):
829 def __init__(self, paths, stat=True):
827 self._entries = []
830 self._entries = []
828 for path in paths:
831 for path in paths:
829 self._entries.append(filecachesubentry(path, stat))
832 self._entries.append(filecachesubentry(path, stat))
830
833
831 def changed(self):
834 def changed(self):
832 '''true if any entry has changed'''
835 '''true if any entry has changed'''
833 for entry in self._entries:
836 for entry in self._entries:
834 if entry.changed():
837 if entry.changed():
835 return True
838 return True
836 return False
839 return False
837
840
838 def refresh(self):
841 def refresh(self):
839 for entry in self._entries:
842 for entry in self._entries:
840 entry.refresh()
843 entry.refresh()
841
844
842 class filecache(object):
845 class filecache(object):
843 '''A property like decorator that tracks files under .hg/ for updates.
846 '''A property like decorator that tracks files under .hg/ for updates.
844
847
845 Records stat info when called in _filecache.
848 Records stat info when called in _filecache.
846
849
847 On subsequent calls, compares old stat info with new info, and recreates the
850 On subsequent calls, compares old stat info with new info, and recreates the
848 object when any of the files changes, updating the new stat info in
851 object when any of the files changes, updating the new stat info in
849 _filecache.
852 _filecache.
850
853
851 Mercurial either atomic renames or appends for files under .hg,
854 Mercurial either atomic renames or appends for files under .hg,
852 so to ensure the cache is reliable we need the filesystem to be able
855 so to ensure the cache is reliable we need the filesystem to be able
853 to tell us if a file has been replaced. If it can't, we fallback to
856 to tell us if a file has been replaced. If it can't, we fallback to
854 recreating the object on every call (essentially the same behaviour as
857 recreating the object on every call (essentially the same behaviour as
855 propertycache).
858 propertycache).
856
859
857 '''
860 '''
858 def __init__(self, *paths):
861 def __init__(self, *paths):
859 self.paths = paths
862 self.paths = paths
860
863
861 def join(self, obj, fname):
864 def join(self, obj, fname):
862 """Used to compute the runtime path of a cached file.
865 """Used to compute the runtime path of a cached file.
863
866
864 Users should subclass filecache and provide their own version of this
867 Users should subclass filecache and provide their own version of this
865 function to call the appropriate join function on 'obj' (an instance
868 function to call the appropriate join function on 'obj' (an instance
866 of the class that its member function was decorated).
869 of the class that its member function was decorated).
867 """
870 """
868 return obj.join(fname)
871 return obj.join(fname)
869
872
870 def __call__(self, func):
873 def __call__(self, func):
871 self.func = func
874 self.func = func
872 self.name = func.__name__
875 self.name = func.__name__
873 return self
876 return self
874
877
875 def __get__(self, obj, type=None):
878 def __get__(self, obj, type=None):
876 # do we need to check if the file changed?
879 # do we need to check if the file changed?
877 if self.name in obj.__dict__:
880 if self.name in obj.__dict__:
878 assert self.name in obj._filecache, self.name
881 assert self.name in obj._filecache, self.name
879 return obj.__dict__[self.name]
882 return obj.__dict__[self.name]
880
883
881 entry = obj._filecache.get(self.name)
884 entry = obj._filecache.get(self.name)
882
885
883 if entry:
886 if entry:
884 if entry.changed():
887 if entry.changed():
885 entry.obj = self.func(obj)
888 entry.obj = self.func(obj)
886 else:
889 else:
887 paths = [self.join(obj, path) for path in self.paths]
890 paths = [self.join(obj, path) for path in self.paths]
888
891
889 # We stat -before- creating the object so our cache doesn't lie if
892 # We stat -before- creating the object so our cache doesn't lie if
890 # a writer modified between the time we read and stat
893 # a writer modified between the time we read and stat
891 entry = filecacheentry(paths, True)
894 entry = filecacheentry(paths, True)
892 entry.obj = self.func(obj)
895 entry.obj = self.func(obj)
893
896
894 obj._filecache[self.name] = entry
897 obj._filecache[self.name] = entry
895
898
896 obj.__dict__[self.name] = entry.obj
899 obj.__dict__[self.name] = entry.obj
897 return entry.obj
900 return entry.obj
898
901
899 def __set__(self, obj, value):
902 def __set__(self, obj, value):
900 if self.name not in obj._filecache:
903 if self.name not in obj._filecache:
901 # we add an entry for the missing value because X in __dict__
904 # we add an entry for the missing value because X in __dict__
902 # implies X in _filecache
905 # implies X in _filecache
903 paths = [self.join(obj, path) for path in self.paths]
906 paths = [self.join(obj, path) for path in self.paths]
904 ce = filecacheentry(paths, False)
907 ce = filecacheentry(paths, False)
905 obj._filecache[self.name] = ce
908 obj._filecache[self.name] = ce
906 else:
909 else:
907 ce = obj._filecache[self.name]
910 ce = obj._filecache[self.name]
908
911
909 ce.obj = value # update cached copy
912 ce.obj = value # update cached copy
910 obj.__dict__[self.name] = value # update copy returned by obj.x
913 obj.__dict__[self.name] = value # update copy returned by obj.x
911
914
912 def __delete__(self, obj):
915 def __delete__(self, obj):
913 try:
916 try:
914 del obj.__dict__[self.name]
917 del obj.__dict__[self.name]
915 except KeyError:
918 except KeyError:
916 raise AttributeError(self.name)
919 raise AttributeError(self.name)
917
920
918 class dirs(object):
921 class dirs(object):
919 '''a multiset of directory names from a dirstate or manifest'''
922 '''a multiset of directory names from a dirstate or manifest'''
920
923
921 def __init__(self, map, skip=None):
924 def __init__(self, map, skip=None):
922 self._dirs = {}
925 self._dirs = {}
923 addpath = self.addpath
926 addpath = self.addpath
924 if util.safehasattr(map, 'iteritems') and skip is not None:
927 if util.safehasattr(map, 'iteritems') and skip is not None:
925 for f, s in map.iteritems():
928 for f, s in map.iteritems():
926 if s[0] != skip:
929 if s[0] != skip:
927 addpath(f)
930 addpath(f)
928 else:
931 else:
929 for f in map:
932 for f in map:
930 addpath(f)
933 addpath(f)
931
934
932 def addpath(self, path):
935 def addpath(self, path):
933 dirs = self._dirs
936 dirs = self._dirs
934 for base in finddirs(path):
937 for base in finddirs(path):
935 if base in dirs:
938 if base in dirs:
936 dirs[base] += 1
939 dirs[base] += 1
937 return
940 return
938 dirs[base] = 1
941 dirs[base] = 1
939
942
940 def delpath(self, path):
943 def delpath(self, path):
941 dirs = self._dirs
944 dirs = self._dirs
942 for base in finddirs(path):
945 for base in finddirs(path):
943 if dirs[base] > 1:
946 if dirs[base] > 1:
944 dirs[base] -= 1
947 dirs[base] -= 1
945 return
948 return
946 del dirs[base]
949 del dirs[base]
947
950
948 def __iter__(self):
951 def __iter__(self):
949 return self._dirs.iterkeys()
952 return self._dirs.iterkeys()
950
953
951 def __contains__(self, d):
954 def __contains__(self, d):
952 return d in self._dirs
955 return d in self._dirs
953
956
954 if util.safehasattr(parsers, 'dirs'):
957 if util.safehasattr(parsers, 'dirs'):
955 dirs = parsers.dirs
958 dirs = parsers.dirs
956
959
957 def finddirs(path):
960 def finddirs(path):
958 pos = path.rfind('/')
961 pos = path.rfind('/')
959 while pos != -1:
962 while pos != -1:
960 yield path[:pos]
963 yield path[:pos]
961 pos = path.rfind('/', 0, pos)
964 pos = path.rfind('/', 0, pos)
General Comments 0
You need to be logged in to leave comments. Login now