##// END OF EJS Templates
addremove: don't do full walks...
Siddharth Agarwal -
r19655:1d07bf10 default
parent child Browse files
Show More
@@ -1,1007 +1,1008
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from mercurial.node import nullrev
9 from mercurial.node import nullrev
10 import util, error, osutil, revset, similar, encoding, phases, parsers
10 import util, error, osutil, revset, similar, encoding, phases, parsers
11 import match as matchmod
11 import match as matchmod
12 import os, errno, re, stat, glob
12 import os, errno, re, stat, glob
13
13
14 if os.name == 'nt':
14 if os.name == 'nt':
15 import scmwindows as scmplatform
15 import scmwindows as scmplatform
16 else:
16 else:
17 import scmposix as scmplatform
17 import scmposix as scmplatform
18
18
19 systemrcpath = scmplatform.systemrcpath
19 systemrcpath = scmplatform.systemrcpath
20 userrcpath = scmplatform.userrcpath
20 userrcpath = scmplatform.userrcpath
21
21
22 def nochangesfound(ui, repo, excluded=None):
22 def nochangesfound(ui, repo, excluded=None):
23 '''Report no changes for push/pull, excluded is None or a list of
23 '''Report no changes for push/pull, excluded is None or a list of
24 nodes excluded from the push/pull.
24 nodes excluded from the push/pull.
25 '''
25 '''
26 secretlist = []
26 secretlist = []
27 if excluded:
27 if excluded:
28 for n in excluded:
28 for n in excluded:
29 if n not in repo:
29 if n not in repo:
30 # discovery should not have included the filtered revision,
30 # discovery should not have included the filtered revision,
31 # we have to explicitly exclude it until discovery is cleanup.
31 # we have to explicitly exclude it until discovery is cleanup.
32 continue
32 continue
33 ctx = repo[n]
33 ctx = repo[n]
34 if ctx.phase() >= phases.secret and not ctx.extinct():
34 if ctx.phase() >= phases.secret and not ctx.extinct():
35 secretlist.append(n)
35 secretlist.append(n)
36
36
37 if secretlist:
37 if secretlist:
38 ui.status(_("no changes found (ignored %d secret changesets)\n")
38 ui.status(_("no changes found (ignored %d secret changesets)\n")
39 % len(secretlist))
39 % len(secretlist))
40 else:
40 else:
41 ui.status(_("no changes found\n"))
41 ui.status(_("no changes found\n"))
42
42
43 def checknewlabel(repo, lbl, kind):
43 def checknewlabel(repo, lbl, kind):
44 # Do not use the "kind" parameter in ui output.
44 # Do not use the "kind" parameter in ui output.
45 # It makes strings difficult to translate.
45 # It makes strings difficult to translate.
46 if lbl in ['tip', '.', 'null']:
46 if lbl in ['tip', '.', 'null']:
47 raise util.Abort(_("the name '%s' is reserved") % lbl)
47 raise util.Abort(_("the name '%s' is reserved") % lbl)
48 for c in (':', '\0', '\n', '\r'):
48 for c in (':', '\0', '\n', '\r'):
49 if c in lbl:
49 if c in lbl:
50 raise util.Abort(_("%r cannot be used in a name") % c)
50 raise util.Abort(_("%r cannot be used in a name") % c)
51 try:
51 try:
52 int(lbl)
52 int(lbl)
53 raise util.Abort(_("cannot use an integer as a name"))
53 raise util.Abort(_("cannot use an integer as a name"))
54 except ValueError:
54 except ValueError:
55 pass
55 pass
56
56
57 def checkfilename(f):
57 def checkfilename(f):
58 '''Check that the filename f is an acceptable filename for a tracked file'''
58 '''Check that the filename f is an acceptable filename for a tracked file'''
59 if '\r' in f or '\n' in f:
59 if '\r' in f or '\n' in f:
60 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
60 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
61
61
62 def checkportable(ui, f):
62 def checkportable(ui, f):
63 '''Check if filename f is portable and warn or abort depending on config'''
63 '''Check if filename f is portable and warn or abort depending on config'''
64 checkfilename(f)
64 checkfilename(f)
65 abort, warn = checkportabilityalert(ui)
65 abort, warn = checkportabilityalert(ui)
66 if abort or warn:
66 if abort or warn:
67 msg = util.checkwinfilename(f)
67 msg = util.checkwinfilename(f)
68 if msg:
68 if msg:
69 msg = "%s: %r" % (msg, f)
69 msg = "%s: %r" % (msg, f)
70 if abort:
70 if abort:
71 raise util.Abort(msg)
71 raise util.Abort(msg)
72 ui.warn(_("warning: %s\n") % msg)
72 ui.warn(_("warning: %s\n") % msg)
73
73
74 def checkportabilityalert(ui):
74 def checkportabilityalert(ui):
75 '''check if the user's config requests nothing, a warning, or abort for
75 '''check if the user's config requests nothing, a warning, or abort for
76 non-portable filenames'''
76 non-portable filenames'''
77 val = ui.config('ui', 'portablefilenames', 'warn')
77 val = ui.config('ui', 'portablefilenames', 'warn')
78 lval = val.lower()
78 lval = val.lower()
79 bval = util.parsebool(val)
79 bval = util.parsebool(val)
80 abort = os.name == 'nt' or lval == 'abort'
80 abort = os.name == 'nt' or lval == 'abort'
81 warn = bval or lval == 'warn'
81 warn = bval or lval == 'warn'
82 if bval is None and not (warn or abort or lval == 'ignore'):
82 if bval is None and not (warn or abort or lval == 'ignore'):
83 raise error.ConfigError(
83 raise error.ConfigError(
84 _("ui.portablefilenames value is invalid ('%s')") % val)
84 _("ui.portablefilenames value is invalid ('%s')") % val)
85 return abort, warn
85 return abort, warn
86
86
87 class casecollisionauditor(object):
87 class casecollisionauditor(object):
88 def __init__(self, ui, abort, dirstate):
88 def __init__(self, ui, abort, dirstate):
89 self._ui = ui
89 self._ui = ui
90 self._abort = abort
90 self._abort = abort
91 allfiles = '\0'.join(dirstate._map)
91 allfiles = '\0'.join(dirstate._map)
92 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
92 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
93 self._dirstate = dirstate
93 self._dirstate = dirstate
94 # The purpose of _newfiles is so that we don't complain about
94 # The purpose of _newfiles is so that we don't complain about
95 # case collisions if someone were to call this object with the
95 # case collisions if someone were to call this object with the
96 # same filename twice.
96 # same filename twice.
97 self._newfiles = set()
97 self._newfiles = set()
98
98
99 def __call__(self, f):
99 def __call__(self, f):
100 fl = encoding.lower(f)
100 fl = encoding.lower(f)
101 if (fl in self._loweredfiles and f not in self._dirstate and
101 if (fl in self._loweredfiles and f not in self._dirstate and
102 f not in self._newfiles):
102 f not in self._newfiles):
103 msg = _('possible case-folding collision for %s') % f
103 msg = _('possible case-folding collision for %s') % f
104 if self._abort:
104 if self._abort:
105 raise util.Abort(msg)
105 raise util.Abort(msg)
106 self._ui.warn(_("warning: %s\n") % msg)
106 self._ui.warn(_("warning: %s\n") % msg)
107 self._loweredfiles.add(fl)
107 self._loweredfiles.add(fl)
108 self._newfiles.add(f)
108 self._newfiles.add(f)
109
109
110 class pathauditor(object):
110 class pathauditor(object):
111 '''ensure that a filesystem path contains no banned components.
111 '''ensure that a filesystem path contains no banned components.
112 the following properties of a path are checked:
112 the following properties of a path are checked:
113
113
114 - ends with a directory separator
114 - ends with a directory separator
115 - under top-level .hg
115 - under top-level .hg
116 - starts at the root of a windows drive
116 - starts at the root of a windows drive
117 - contains ".."
117 - contains ".."
118 - traverses a symlink (e.g. a/symlink_here/b)
118 - traverses a symlink (e.g. a/symlink_here/b)
119 - inside a nested repository (a callback can be used to approve
119 - inside a nested repository (a callback can be used to approve
120 some nested repositories, e.g., subrepositories)
120 some nested repositories, e.g., subrepositories)
121 '''
121 '''
122
122
123 def __init__(self, root, callback=None):
123 def __init__(self, root, callback=None):
124 self.audited = set()
124 self.audited = set()
125 self.auditeddir = set()
125 self.auditeddir = set()
126 self.root = root
126 self.root = root
127 self.callback = callback
127 self.callback = callback
128 if os.path.lexists(root) and not util.checkcase(root):
128 if os.path.lexists(root) and not util.checkcase(root):
129 self.normcase = util.normcase
129 self.normcase = util.normcase
130 else:
130 else:
131 self.normcase = lambda x: x
131 self.normcase = lambda x: x
132
132
133 def __call__(self, path):
133 def __call__(self, path):
134 '''Check the relative path.
134 '''Check the relative path.
135 path may contain a pattern (e.g. foodir/**.txt)'''
135 path may contain a pattern (e.g. foodir/**.txt)'''
136
136
137 path = util.localpath(path)
137 path = util.localpath(path)
138 normpath = self.normcase(path)
138 normpath = self.normcase(path)
139 if normpath in self.audited:
139 if normpath in self.audited:
140 return
140 return
141 # AIX ignores "/" at end of path, others raise EISDIR.
141 # AIX ignores "/" at end of path, others raise EISDIR.
142 if util.endswithsep(path):
142 if util.endswithsep(path):
143 raise util.Abort(_("path ends in directory separator: %s") % path)
143 raise util.Abort(_("path ends in directory separator: %s") % path)
144 parts = util.splitpath(path)
144 parts = util.splitpath(path)
145 if (os.path.splitdrive(path)[0]
145 if (os.path.splitdrive(path)[0]
146 or parts[0].lower() in ('.hg', '.hg.', '')
146 or parts[0].lower() in ('.hg', '.hg.', '')
147 or os.pardir in parts):
147 or os.pardir in parts):
148 raise util.Abort(_("path contains illegal component: %s") % path)
148 raise util.Abort(_("path contains illegal component: %s") % path)
149 if '.hg' in path.lower():
149 if '.hg' in path.lower():
150 lparts = [p.lower() for p in parts]
150 lparts = [p.lower() for p in parts]
151 for p in '.hg', '.hg.':
151 for p in '.hg', '.hg.':
152 if p in lparts[1:]:
152 if p in lparts[1:]:
153 pos = lparts.index(p)
153 pos = lparts.index(p)
154 base = os.path.join(*parts[:pos])
154 base = os.path.join(*parts[:pos])
155 raise util.Abort(_("path '%s' is inside nested repo %r")
155 raise util.Abort(_("path '%s' is inside nested repo %r")
156 % (path, base))
156 % (path, base))
157
157
158 normparts = util.splitpath(normpath)
158 normparts = util.splitpath(normpath)
159 assert len(parts) == len(normparts)
159 assert len(parts) == len(normparts)
160
160
161 parts.pop()
161 parts.pop()
162 normparts.pop()
162 normparts.pop()
163 prefixes = []
163 prefixes = []
164 while parts:
164 while parts:
165 prefix = os.sep.join(parts)
165 prefix = os.sep.join(parts)
166 normprefix = os.sep.join(normparts)
166 normprefix = os.sep.join(normparts)
167 if normprefix in self.auditeddir:
167 if normprefix in self.auditeddir:
168 break
168 break
169 curpath = os.path.join(self.root, prefix)
169 curpath = os.path.join(self.root, prefix)
170 try:
170 try:
171 st = os.lstat(curpath)
171 st = os.lstat(curpath)
172 except OSError, err:
172 except OSError, err:
173 # EINVAL can be raised as invalid path syntax under win32.
173 # EINVAL can be raised as invalid path syntax under win32.
174 # They must be ignored for patterns can be checked too.
174 # They must be ignored for patterns can be checked too.
175 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
175 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
176 raise
176 raise
177 else:
177 else:
178 if stat.S_ISLNK(st.st_mode):
178 if stat.S_ISLNK(st.st_mode):
179 raise util.Abort(
179 raise util.Abort(
180 _('path %r traverses symbolic link %r')
180 _('path %r traverses symbolic link %r')
181 % (path, prefix))
181 % (path, prefix))
182 elif (stat.S_ISDIR(st.st_mode) and
182 elif (stat.S_ISDIR(st.st_mode) and
183 os.path.isdir(os.path.join(curpath, '.hg'))):
183 os.path.isdir(os.path.join(curpath, '.hg'))):
184 if not self.callback or not self.callback(curpath):
184 if not self.callback or not self.callback(curpath):
185 raise util.Abort(_("path '%s' is inside nested "
185 raise util.Abort(_("path '%s' is inside nested "
186 "repo %r")
186 "repo %r")
187 % (path, prefix))
187 % (path, prefix))
188 prefixes.append(normprefix)
188 prefixes.append(normprefix)
189 parts.pop()
189 parts.pop()
190 normparts.pop()
190 normparts.pop()
191
191
192 self.audited.add(normpath)
192 self.audited.add(normpath)
193 # only add prefixes to the cache after checking everything: we don't
193 # only add prefixes to the cache after checking everything: we don't
194 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
194 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
195 self.auditeddir.update(prefixes)
195 self.auditeddir.update(prefixes)
196
196
197 def check(self, path):
197 def check(self, path):
198 try:
198 try:
199 self(path)
199 self(path)
200 return True
200 return True
201 except (OSError, util.Abort):
201 except (OSError, util.Abort):
202 return False
202 return False
203
203
204 class abstractvfs(object):
204 class abstractvfs(object):
205 """Abstract base class; cannot be instantiated"""
205 """Abstract base class; cannot be instantiated"""
206
206
207 def __init__(self, *args, **kwargs):
207 def __init__(self, *args, **kwargs):
208 '''Prevent instantiation; don't call this from subclasses.'''
208 '''Prevent instantiation; don't call this from subclasses.'''
209 raise NotImplementedError('attempted instantiating ' + str(type(self)))
209 raise NotImplementedError('attempted instantiating ' + str(type(self)))
210
210
211 def tryread(self, path):
211 def tryread(self, path):
212 '''gracefully return an empty string for missing files'''
212 '''gracefully return an empty string for missing files'''
213 try:
213 try:
214 return self.read(path)
214 return self.read(path)
215 except IOError, inst:
215 except IOError, inst:
216 if inst.errno != errno.ENOENT:
216 if inst.errno != errno.ENOENT:
217 raise
217 raise
218 return ""
218 return ""
219
219
220 def read(self, path):
220 def read(self, path):
221 fp = self(path, 'rb')
221 fp = self(path, 'rb')
222 try:
222 try:
223 return fp.read()
223 return fp.read()
224 finally:
224 finally:
225 fp.close()
225 fp.close()
226
226
227 def write(self, path, data):
227 def write(self, path, data):
228 fp = self(path, 'wb')
228 fp = self(path, 'wb')
229 try:
229 try:
230 return fp.write(data)
230 return fp.write(data)
231 finally:
231 finally:
232 fp.close()
232 fp.close()
233
233
234 def append(self, path, data):
234 def append(self, path, data):
235 fp = self(path, 'ab')
235 fp = self(path, 'ab')
236 try:
236 try:
237 return fp.write(data)
237 return fp.write(data)
238 finally:
238 finally:
239 fp.close()
239 fp.close()
240
240
241 def exists(self, path=None):
241 def exists(self, path=None):
242 return os.path.exists(self.join(path))
242 return os.path.exists(self.join(path))
243
243
244 def isdir(self, path=None):
244 def isdir(self, path=None):
245 return os.path.isdir(self.join(path))
245 return os.path.isdir(self.join(path))
246
246
247 def islink(self, path=None):
247 def islink(self, path=None):
248 return os.path.islink(self.join(path))
248 return os.path.islink(self.join(path))
249
249
250 def makedir(self, path=None, notindexed=True):
250 def makedir(self, path=None, notindexed=True):
251 return util.makedir(self.join(path), notindexed)
251 return util.makedir(self.join(path), notindexed)
252
252
253 def makedirs(self, path=None, mode=None):
253 def makedirs(self, path=None, mode=None):
254 return util.makedirs(self.join(path), mode)
254 return util.makedirs(self.join(path), mode)
255
255
256 def mkdir(self, path=None):
256 def mkdir(self, path=None):
257 return os.mkdir(self.join(path))
257 return os.mkdir(self.join(path))
258
258
259 def readdir(self, path=None, stat=None, skip=None):
259 def readdir(self, path=None, stat=None, skip=None):
260 return osutil.listdir(self.join(path), stat, skip)
260 return osutil.listdir(self.join(path), stat, skip)
261
261
262 def rename(self, src, dst):
262 def rename(self, src, dst):
263 return util.rename(self.join(src), self.join(dst))
263 return util.rename(self.join(src), self.join(dst))
264
264
265 def readlink(self, path):
265 def readlink(self, path):
266 return os.readlink(self.join(path))
266 return os.readlink(self.join(path))
267
267
268 def setflags(self, path, l, x):
268 def setflags(self, path, l, x):
269 return util.setflags(self.join(path), l, x)
269 return util.setflags(self.join(path), l, x)
270
270
271 def stat(self, path=None):
271 def stat(self, path=None):
272 return os.stat(self.join(path))
272 return os.stat(self.join(path))
273
273
274 class vfs(abstractvfs):
274 class vfs(abstractvfs):
275 '''Operate files relative to a base directory
275 '''Operate files relative to a base directory
276
276
277 This class is used to hide the details of COW semantics and
277 This class is used to hide the details of COW semantics and
278 remote file access from higher level code.
278 remote file access from higher level code.
279 '''
279 '''
280 def __init__(self, base, audit=True, expandpath=False, realpath=False):
280 def __init__(self, base, audit=True, expandpath=False, realpath=False):
281 if expandpath:
281 if expandpath:
282 base = util.expandpath(base)
282 base = util.expandpath(base)
283 if realpath:
283 if realpath:
284 base = os.path.realpath(base)
284 base = os.path.realpath(base)
285 self.base = base
285 self.base = base
286 self._setmustaudit(audit)
286 self._setmustaudit(audit)
287 self.createmode = None
287 self.createmode = None
288 self._trustnlink = None
288 self._trustnlink = None
289
289
290 def _getmustaudit(self):
290 def _getmustaudit(self):
291 return self._audit
291 return self._audit
292
292
293 def _setmustaudit(self, onoff):
293 def _setmustaudit(self, onoff):
294 self._audit = onoff
294 self._audit = onoff
295 if onoff:
295 if onoff:
296 self.audit = pathauditor(self.base)
296 self.audit = pathauditor(self.base)
297 else:
297 else:
298 self.audit = util.always
298 self.audit = util.always
299
299
300 mustaudit = property(_getmustaudit, _setmustaudit)
300 mustaudit = property(_getmustaudit, _setmustaudit)
301
301
302 @util.propertycache
302 @util.propertycache
303 def _cansymlink(self):
303 def _cansymlink(self):
304 return util.checklink(self.base)
304 return util.checklink(self.base)
305
305
306 @util.propertycache
306 @util.propertycache
307 def _chmod(self):
307 def _chmod(self):
308 return util.checkexec(self.base)
308 return util.checkexec(self.base)
309
309
310 def _fixfilemode(self, name):
310 def _fixfilemode(self, name):
311 if self.createmode is None or not self._chmod:
311 if self.createmode is None or not self._chmod:
312 return
312 return
313 os.chmod(name, self.createmode & 0666)
313 os.chmod(name, self.createmode & 0666)
314
314
315 def __call__(self, path, mode="r", text=False, atomictemp=False):
315 def __call__(self, path, mode="r", text=False, atomictemp=False):
316 if self._audit:
316 if self._audit:
317 r = util.checkosfilename(path)
317 r = util.checkosfilename(path)
318 if r:
318 if r:
319 raise util.Abort("%s: %r" % (r, path))
319 raise util.Abort("%s: %r" % (r, path))
320 self.audit(path)
320 self.audit(path)
321 f = self.join(path)
321 f = self.join(path)
322
322
323 if not text and "b" not in mode:
323 if not text and "b" not in mode:
324 mode += "b" # for that other OS
324 mode += "b" # for that other OS
325
325
326 nlink = -1
326 nlink = -1
327 if mode not in ('r', 'rb'):
327 if mode not in ('r', 'rb'):
328 dirname, basename = util.split(f)
328 dirname, basename = util.split(f)
329 # If basename is empty, then the path is malformed because it points
329 # If basename is empty, then the path is malformed because it points
330 # to a directory. Let the posixfile() call below raise IOError.
330 # to a directory. Let the posixfile() call below raise IOError.
331 if basename:
331 if basename:
332 if atomictemp:
332 if atomictemp:
333 util.ensuredirs(dirname, self.createmode)
333 util.ensuredirs(dirname, self.createmode)
334 return util.atomictempfile(f, mode, self.createmode)
334 return util.atomictempfile(f, mode, self.createmode)
335 try:
335 try:
336 if 'w' in mode:
336 if 'w' in mode:
337 util.unlink(f)
337 util.unlink(f)
338 nlink = 0
338 nlink = 0
339 else:
339 else:
340 # nlinks() may behave differently for files on Windows
340 # nlinks() may behave differently for files on Windows
341 # shares if the file is open.
341 # shares if the file is open.
342 fd = util.posixfile(f)
342 fd = util.posixfile(f)
343 nlink = util.nlinks(f)
343 nlink = util.nlinks(f)
344 if nlink < 1:
344 if nlink < 1:
345 nlink = 2 # force mktempcopy (issue1922)
345 nlink = 2 # force mktempcopy (issue1922)
346 fd.close()
346 fd.close()
347 except (OSError, IOError), e:
347 except (OSError, IOError), e:
348 if e.errno != errno.ENOENT:
348 if e.errno != errno.ENOENT:
349 raise
349 raise
350 nlink = 0
350 nlink = 0
351 util.ensuredirs(dirname, self.createmode)
351 util.ensuredirs(dirname, self.createmode)
352 if nlink > 0:
352 if nlink > 0:
353 if self._trustnlink is None:
353 if self._trustnlink is None:
354 self._trustnlink = nlink > 1 or util.checknlink(f)
354 self._trustnlink = nlink > 1 or util.checknlink(f)
355 if nlink > 1 or not self._trustnlink:
355 if nlink > 1 or not self._trustnlink:
356 util.rename(util.mktempcopy(f), f)
356 util.rename(util.mktempcopy(f), f)
357 fp = util.posixfile(f, mode)
357 fp = util.posixfile(f, mode)
358 if nlink == 0:
358 if nlink == 0:
359 self._fixfilemode(f)
359 self._fixfilemode(f)
360 return fp
360 return fp
361
361
362 def symlink(self, src, dst):
362 def symlink(self, src, dst):
363 self.audit(dst)
363 self.audit(dst)
364 linkname = self.join(dst)
364 linkname = self.join(dst)
365 try:
365 try:
366 os.unlink(linkname)
366 os.unlink(linkname)
367 except OSError:
367 except OSError:
368 pass
368 pass
369
369
370 util.ensuredirs(os.path.dirname(linkname), self.createmode)
370 util.ensuredirs(os.path.dirname(linkname), self.createmode)
371
371
372 if self._cansymlink:
372 if self._cansymlink:
373 try:
373 try:
374 os.symlink(src, linkname)
374 os.symlink(src, linkname)
375 except OSError, err:
375 except OSError, err:
376 raise OSError(err.errno, _('could not symlink to %r: %s') %
376 raise OSError(err.errno, _('could not symlink to %r: %s') %
377 (src, err.strerror), linkname)
377 (src, err.strerror), linkname)
378 else:
378 else:
379 self.write(dst, src)
379 self.write(dst, src)
380
380
381 def join(self, path):
381 def join(self, path):
382 if path:
382 if path:
383 return os.path.join(self.base, path)
383 return os.path.join(self.base, path)
384 else:
384 else:
385 return self.base
385 return self.base
386
386
387 opener = vfs
387 opener = vfs
388
388
389 class auditvfs(object):
389 class auditvfs(object):
390 def __init__(self, vfs):
390 def __init__(self, vfs):
391 self.vfs = vfs
391 self.vfs = vfs
392
392
393 def _getmustaudit(self):
393 def _getmustaudit(self):
394 return self.vfs.mustaudit
394 return self.vfs.mustaudit
395
395
396 def _setmustaudit(self, onoff):
396 def _setmustaudit(self, onoff):
397 self.vfs.mustaudit = onoff
397 self.vfs.mustaudit = onoff
398
398
399 mustaudit = property(_getmustaudit, _setmustaudit)
399 mustaudit = property(_getmustaudit, _setmustaudit)
400
400
401 class filtervfs(abstractvfs, auditvfs):
401 class filtervfs(abstractvfs, auditvfs):
402 '''Wrapper vfs for filtering filenames with a function.'''
402 '''Wrapper vfs for filtering filenames with a function.'''
403
403
404 def __init__(self, vfs, filter):
404 def __init__(self, vfs, filter):
405 auditvfs.__init__(self, vfs)
405 auditvfs.__init__(self, vfs)
406 self._filter = filter
406 self._filter = filter
407
407
408 def __call__(self, path, *args, **kwargs):
408 def __call__(self, path, *args, **kwargs):
409 return self.vfs(self._filter(path), *args, **kwargs)
409 return self.vfs(self._filter(path), *args, **kwargs)
410
410
411 def join(self, path):
411 def join(self, path):
412 if path:
412 if path:
413 return self.vfs.join(self._filter(path))
413 return self.vfs.join(self._filter(path))
414 else:
414 else:
415 return self.vfs.join(path)
415 return self.vfs.join(path)
416
416
417 filteropener = filtervfs
417 filteropener = filtervfs
418
418
419 class readonlyvfs(abstractvfs, auditvfs):
419 class readonlyvfs(abstractvfs, auditvfs):
420 '''Wrapper vfs preventing any writing.'''
420 '''Wrapper vfs preventing any writing.'''
421
421
422 def __init__(self, vfs):
422 def __init__(self, vfs):
423 auditvfs.__init__(self, vfs)
423 auditvfs.__init__(self, vfs)
424
424
425 def __call__(self, path, mode='r', *args, **kw):
425 def __call__(self, path, mode='r', *args, **kw):
426 if mode not in ('r', 'rb'):
426 if mode not in ('r', 'rb'):
427 raise util.Abort('this vfs is read only')
427 raise util.Abort('this vfs is read only')
428 return self.vfs(path, mode, *args, **kw)
428 return self.vfs(path, mode, *args, **kw)
429
429
430
430
431 def canonpath(root, cwd, myname, auditor=None):
431 def canonpath(root, cwd, myname, auditor=None):
432 '''return the canonical path of myname, given cwd and root'''
432 '''return the canonical path of myname, given cwd and root'''
433 if util.endswithsep(root):
433 if util.endswithsep(root):
434 rootsep = root
434 rootsep = root
435 else:
435 else:
436 rootsep = root + os.sep
436 rootsep = root + os.sep
437 name = myname
437 name = myname
438 if not os.path.isabs(name):
438 if not os.path.isabs(name):
439 name = os.path.join(root, cwd, name)
439 name = os.path.join(root, cwd, name)
440 name = os.path.normpath(name)
440 name = os.path.normpath(name)
441 if auditor is None:
441 if auditor is None:
442 auditor = pathauditor(root)
442 auditor = pathauditor(root)
443 if name != rootsep and name.startswith(rootsep):
443 if name != rootsep and name.startswith(rootsep):
444 name = name[len(rootsep):]
444 name = name[len(rootsep):]
445 auditor(name)
445 auditor(name)
446 return util.pconvert(name)
446 return util.pconvert(name)
447 elif name == root:
447 elif name == root:
448 return ''
448 return ''
449 else:
449 else:
450 # Determine whether `name' is in the hierarchy at or beneath `root',
450 # Determine whether `name' is in the hierarchy at or beneath `root',
451 # by iterating name=dirname(name) until that causes no change (can't
451 # by iterating name=dirname(name) until that causes no change (can't
452 # check name == '/', because that doesn't work on windows). The list
452 # check name == '/', because that doesn't work on windows). The list
453 # `rel' holds the reversed list of components making up the relative
453 # `rel' holds the reversed list of components making up the relative
454 # file name we want.
454 # file name we want.
455 rel = []
455 rel = []
456 while True:
456 while True:
457 try:
457 try:
458 s = util.samefile(name, root)
458 s = util.samefile(name, root)
459 except OSError:
459 except OSError:
460 s = False
460 s = False
461 if s:
461 if s:
462 if not rel:
462 if not rel:
463 # name was actually the same as root (maybe a symlink)
463 # name was actually the same as root (maybe a symlink)
464 return ''
464 return ''
465 rel.reverse()
465 rel.reverse()
466 name = os.path.join(*rel)
466 name = os.path.join(*rel)
467 auditor(name)
467 auditor(name)
468 return util.pconvert(name)
468 return util.pconvert(name)
469 dirname, basename = util.split(name)
469 dirname, basename = util.split(name)
470 rel.append(basename)
470 rel.append(basename)
471 if dirname == name:
471 if dirname == name:
472 break
472 break
473 name = dirname
473 name = dirname
474
474
475 raise util.Abort(_("%s not under root '%s'") % (myname, root))
475 raise util.Abort(_("%s not under root '%s'") % (myname, root))
476
476
477 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
477 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
478 '''yield every hg repository under path, always recursively.
478 '''yield every hg repository under path, always recursively.
479 The recurse flag will only control recursion into repo working dirs'''
479 The recurse flag will only control recursion into repo working dirs'''
480 def errhandler(err):
480 def errhandler(err):
481 if err.filename == path:
481 if err.filename == path:
482 raise err
482 raise err
483 samestat = getattr(os.path, 'samestat', None)
483 samestat = getattr(os.path, 'samestat', None)
484 if followsym and samestat is not None:
484 if followsym and samestat is not None:
485 def adddir(dirlst, dirname):
485 def adddir(dirlst, dirname):
486 match = False
486 match = False
487 dirstat = os.stat(dirname)
487 dirstat = os.stat(dirname)
488 for lstdirstat in dirlst:
488 for lstdirstat in dirlst:
489 if samestat(dirstat, lstdirstat):
489 if samestat(dirstat, lstdirstat):
490 match = True
490 match = True
491 break
491 break
492 if not match:
492 if not match:
493 dirlst.append(dirstat)
493 dirlst.append(dirstat)
494 return not match
494 return not match
495 else:
495 else:
496 followsym = False
496 followsym = False
497
497
498 if (seen_dirs is None) and followsym:
498 if (seen_dirs is None) and followsym:
499 seen_dirs = []
499 seen_dirs = []
500 adddir(seen_dirs, path)
500 adddir(seen_dirs, path)
501 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
501 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
502 dirs.sort()
502 dirs.sort()
503 if '.hg' in dirs:
503 if '.hg' in dirs:
504 yield root # found a repository
504 yield root # found a repository
505 qroot = os.path.join(root, '.hg', 'patches')
505 qroot = os.path.join(root, '.hg', 'patches')
506 if os.path.isdir(os.path.join(qroot, '.hg')):
506 if os.path.isdir(os.path.join(qroot, '.hg')):
507 yield qroot # we have a patch queue repo here
507 yield qroot # we have a patch queue repo here
508 if recurse:
508 if recurse:
509 # avoid recursing inside the .hg directory
509 # avoid recursing inside the .hg directory
510 dirs.remove('.hg')
510 dirs.remove('.hg')
511 else:
511 else:
512 dirs[:] = [] # don't descend further
512 dirs[:] = [] # don't descend further
513 elif followsym:
513 elif followsym:
514 newdirs = []
514 newdirs = []
515 for d in dirs:
515 for d in dirs:
516 fname = os.path.join(root, d)
516 fname = os.path.join(root, d)
517 if adddir(seen_dirs, fname):
517 if adddir(seen_dirs, fname):
518 if os.path.islink(fname):
518 if os.path.islink(fname):
519 for hgname in walkrepos(fname, True, seen_dirs):
519 for hgname in walkrepos(fname, True, seen_dirs):
520 yield hgname
520 yield hgname
521 else:
521 else:
522 newdirs.append(d)
522 newdirs.append(d)
523 dirs[:] = newdirs
523 dirs[:] = newdirs
524
524
525 def osrcpath():
525 def osrcpath():
526 '''return default os-specific hgrc search path'''
526 '''return default os-specific hgrc search path'''
527 path = systemrcpath()
527 path = systemrcpath()
528 path.extend(userrcpath())
528 path.extend(userrcpath())
529 path = [os.path.normpath(f) for f in path]
529 path = [os.path.normpath(f) for f in path]
530 return path
530 return path
531
531
532 _rcpath = None
532 _rcpath = None
533
533
534 def rcpath():
534 def rcpath():
535 '''return hgrc search path. if env var HGRCPATH is set, use it.
535 '''return hgrc search path. if env var HGRCPATH is set, use it.
536 for each item in path, if directory, use files ending in .rc,
536 for each item in path, if directory, use files ending in .rc,
537 else use item.
537 else use item.
538 make HGRCPATH empty to only look in .hg/hgrc of current repo.
538 make HGRCPATH empty to only look in .hg/hgrc of current repo.
539 if no HGRCPATH, use default os-specific path.'''
539 if no HGRCPATH, use default os-specific path.'''
540 global _rcpath
540 global _rcpath
541 if _rcpath is None:
541 if _rcpath is None:
542 if 'HGRCPATH' in os.environ:
542 if 'HGRCPATH' in os.environ:
543 _rcpath = []
543 _rcpath = []
544 for p in os.environ['HGRCPATH'].split(os.pathsep):
544 for p in os.environ['HGRCPATH'].split(os.pathsep):
545 if not p:
545 if not p:
546 continue
546 continue
547 p = util.expandpath(p)
547 p = util.expandpath(p)
548 if os.path.isdir(p):
548 if os.path.isdir(p):
549 for f, kind in osutil.listdir(p):
549 for f, kind in osutil.listdir(p):
550 if f.endswith('.rc'):
550 if f.endswith('.rc'):
551 _rcpath.append(os.path.join(p, f))
551 _rcpath.append(os.path.join(p, f))
552 else:
552 else:
553 _rcpath.append(p)
553 _rcpath.append(p)
554 else:
554 else:
555 _rcpath = osrcpath()
555 _rcpath = osrcpath()
556 return _rcpath
556 return _rcpath
557
557
558 def revsingle(repo, revspec, default='.'):
558 def revsingle(repo, revspec, default='.'):
559 if not revspec and revspec != 0:
559 if not revspec and revspec != 0:
560 return repo[default]
560 return repo[default]
561
561
562 l = revrange(repo, [revspec])
562 l = revrange(repo, [revspec])
563 if len(l) < 1:
563 if len(l) < 1:
564 raise util.Abort(_('empty revision set'))
564 raise util.Abort(_('empty revision set'))
565 return repo[l[-1]]
565 return repo[l[-1]]
566
566
567 def revpair(repo, revs):
567 def revpair(repo, revs):
568 if not revs:
568 if not revs:
569 return repo.dirstate.p1(), None
569 return repo.dirstate.p1(), None
570
570
571 l = revrange(repo, revs)
571 l = revrange(repo, revs)
572
572
573 if len(l) == 0:
573 if len(l) == 0:
574 if revs:
574 if revs:
575 raise util.Abort(_('empty revision range'))
575 raise util.Abort(_('empty revision range'))
576 return repo.dirstate.p1(), None
576 return repo.dirstate.p1(), None
577
577
578 if len(l) == 1 and len(revs) == 1 and _revrangesep not in revs[0]:
578 if len(l) == 1 and len(revs) == 1 and _revrangesep not in revs[0]:
579 return repo.lookup(l[0]), None
579 return repo.lookup(l[0]), None
580
580
581 return repo.lookup(l[0]), repo.lookup(l[-1])
581 return repo.lookup(l[0]), repo.lookup(l[-1])
582
582
583 _revrangesep = ':'
583 _revrangesep = ':'
584
584
585 def revrange(repo, revs):
585 def revrange(repo, revs):
586 """Yield revision as strings from a list of revision specifications."""
586 """Yield revision as strings from a list of revision specifications."""
587
587
588 def revfix(repo, val, defval):
588 def revfix(repo, val, defval):
589 if not val and val != 0 and defval is not None:
589 if not val and val != 0 and defval is not None:
590 return defval
590 return defval
591 return repo[val].rev()
591 return repo[val].rev()
592
592
593 seen, l = set(), []
593 seen, l = set(), []
594 for spec in revs:
594 for spec in revs:
595 if l and not seen:
595 if l and not seen:
596 seen = set(l)
596 seen = set(l)
597 # attempt to parse old-style ranges first to deal with
597 # attempt to parse old-style ranges first to deal with
598 # things like old-tag which contain query metacharacters
598 # things like old-tag which contain query metacharacters
599 try:
599 try:
600 if isinstance(spec, int):
600 if isinstance(spec, int):
601 seen.add(spec)
601 seen.add(spec)
602 l.append(spec)
602 l.append(spec)
603 continue
603 continue
604
604
605 if _revrangesep in spec:
605 if _revrangesep in spec:
606 start, end = spec.split(_revrangesep, 1)
606 start, end = spec.split(_revrangesep, 1)
607 start = revfix(repo, start, 0)
607 start = revfix(repo, start, 0)
608 end = revfix(repo, end, len(repo) - 1)
608 end = revfix(repo, end, len(repo) - 1)
609 if end == nullrev and start <= 0:
609 if end == nullrev and start <= 0:
610 start = nullrev
610 start = nullrev
611 rangeiter = repo.changelog.revs(start, end)
611 rangeiter = repo.changelog.revs(start, end)
612 if not seen and not l:
612 if not seen and not l:
613 # by far the most common case: revs = ["-1:0"]
613 # by far the most common case: revs = ["-1:0"]
614 l = list(rangeiter)
614 l = list(rangeiter)
615 # defer syncing seen until next iteration
615 # defer syncing seen until next iteration
616 continue
616 continue
617 newrevs = set(rangeiter)
617 newrevs = set(rangeiter)
618 if seen:
618 if seen:
619 newrevs.difference_update(seen)
619 newrevs.difference_update(seen)
620 seen.update(newrevs)
620 seen.update(newrevs)
621 else:
621 else:
622 seen = newrevs
622 seen = newrevs
623 l.extend(sorted(newrevs, reverse=start > end))
623 l.extend(sorted(newrevs, reverse=start > end))
624 continue
624 continue
625 elif spec and spec in repo: # single unquoted rev
625 elif spec and spec in repo: # single unquoted rev
626 rev = revfix(repo, spec, None)
626 rev = revfix(repo, spec, None)
627 if rev in seen:
627 if rev in seen:
628 continue
628 continue
629 seen.add(rev)
629 seen.add(rev)
630 l.append(rev)
630 l.append(rev)
631 continue
631 continue
632 except error.RepoLookupError:
632 except error.RepoLookupError:
633 pass
633 pass
634
634
635 # fall through to new-style queries if old-style fails
635 # fall through to new-style queries if old-style fails
636 m = revset.match(repo.ui, spec)
636 m = revset.match(repo.ui, spec)
637 dl = [r for r in m(repo, list(repo)) if r not in seen]
637 dl = [r for r in m(repo, list(repo)) if r not in seen]
638 l.extend(dl)
638 l.extend(dl)
639 seen.update(dl)
639 seen.update(dl)
640
640
641 return l
641 return l
642
642
643 def expandpats(pats):
643 def expandpats(pats):
644 if not util.expandglobs:
644 if not util.expandglobs:
645 return list(pats)
645 return list(pats)
646 ret = []
646 ret = []
647 for p in pats:
647 for p in pats:
648 kind, name = matchmod._patsplit(p, None)
648 kind, name = matchmod._patsplit(p, None)
649 if kind is None:
649 if kind is None:
650 try:
650 try:
651 globbed = glob.glob(name)
651 globbed = glob.glob(name)
652 except re.error:
652 except re.error:
653 globbed = [name]
653 globbed = [name]
654 if globbed:
654 if globbed:
655 ret.extend(globbed)
655 ret.extend(globbed)
656 continue
656 continue
657 ret.append(p)
657 ret.append(p)
658 return ret
658 return ret
659
659
660 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
660 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
661 if pats == ("",):
661 if pats == ("",):
662 pats = []
662 pats = []
663 if not globbed and default == 'relpath':
663 if not globbed and default == 'relpath':
664 pats = expandpats(pats or [])
664 pats = expandpats(pats or [])
665
665
666 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
666 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
667 default)
667 default)
668 def badfn(f, msg):
668 def badfn(f, msg):
669 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
669 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
670 m.bad = badfn
670 m.bad = badfn
671 return m, pats
671 return m, pats
672
672
673 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
673 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
674 return matchandpats(ctx, pats, opts, globbed, default)[0]
674 return matchandpats(ctx, pats, opts, globbed, default)[0]
675
675
676 def matchall(repo):
676 def matchall(repo):
677 return matchmod.always(repo.root, repo.getcwd())
677 return matchmod.always(repo.root, repo.getcwd())
678
678
679 def matchfiles(repo, files):
679 def matchfiles(repo, files):
680 return matchmod.exact(repo.root, repo.getcwd(), files)
680 return matchmod.exact(repo.root, repo.getcwd(), files)
681
681
682 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
682 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
683 if dry_run is None:
683 if dry_run is None:
684 dry_run = opts.get('dry_run')
684 dry_run = opts.get('dry_run')
685 if similarity is None:
685 if similarity is None:
686 similarity = float(opts.get('similarity') or 0)
686 similarity = float(opts.get('similarity') or 0)
687 # we'd use status here, except handling of symlinks and ignore is tricky
687 # we'd use status here, except handling of symlinks and ignore is tricky
688 m = match(repo[None], pats, opts)
688 m = match(repo[None], pats, opts)
689 rejected = []
689 rejected = []
690 m.bad = lambda x, y: rejected.append(x)
690 m.bad = lambda x, y: rejected.append(x)
691
691
692 added, unknown, deleted, removed = _interestingfiles(repo, m)
692 added, unknown, deleted, removed = _interestingfiles(repo, m)
693
693
694 unknownset = set(unknown)
694 unknownset = set(unknown)
695 toprint = unknownset.copy()
695 toprint = unknownset.copy()
696 toprint.update(deleted)
696 toprint.update(deleted)
697 for abs in sorted(toprint):
697 for abs in sorted(toprint):
698 if repo.ui.verbose or not m.exact(abs):
698 if repo.ui.verbose or not m.exact(abs):
699 rel = m.rel(abs)
699 rel = m.rel(abs)
700 if abs in unknownset:
700 if abs in unknownset:
701 status = _('adding %s\n') % ((pats and rel) or abs)
701 status = _('adding %s\n') % ((pats and rel) or abs)
702 else:
702 else:
703 status = _('removing %s\n') % ((pats and rel) or abs)
703 status = _('removing %s\n') % ((pats and rel) or abs)
704 repo.ui.status(status)
704 repo.ui.status(status)
705
705
706 renames = _findrenames(repo, m, added + unknown, removed + deleted,
706 renames = _findrenames(repo, m, added + unknown, removed + deleted,
707 similarity)
707 similarity)
708
708
709 if not dry_run:
709 if not dry_run:
710 _markchanges(repo, unknown, deleted, renames)
710 _markchanges(repo, unknown, deleted, renames)
711
711
712 for f in rejected:
712 for f in rejected:
713 if f in m.files():
713 if f in m.files():
714 return 1
714 return 1
715 return 0
715 return 0
716
716
717 def marktouched(repo, files, similarity=0.0):
717 def marktouched(repo, files, similarity=0.0):
718 '''Assert that files have somehow been operated upon. files are relative to
718 '''Assert that files have somehow been operated upon. files are relative to
719 the repo root.'''
719 the repo root.'''
720 m = matchfiles(repo, files)
720 m = matchfiles(repo, files)
721 rejected = []
721 rejected = []
722 m.bad = lambda x, y: rejected.append(x)
722 m.bad = lambda x, y: rejected.append(x)
723
723
724 added, unknown, deleted, removed = _interestingfiles(repo, m)
724 added, unknown, deleted, removed = _interestingfiles(repo, m)
725
725
726 if repo.ui.verbose:
726 if repo.ui.verbose:
727 unknownset = set(unknown)
727 unknownset = set(unknown)
728 toprint = unknownset.copy()
728 toprint = unknownset.copy()
729 toprint.update(deleted)
729 toprint.update(deleted)
730 for abs in sorted(toprint):
730 for abs in sorted(toprint):
731 if abs in unknownset:
731 if abs in unknownset:
732 status = _('adding %s\n') % abs
732 status = _('adding %s\n') % abs
733 else:
733 else:
734 status = _('removing %s\n') % abs
734 status = _('removing %s\n') % abs
735 repo.ui.status(status)
735 repo.ui.status(status)
736
736
737 renames = _findrenames(repo, m, added + unknown, removed + deleted,
737 renames = _findrenames(repo, m, added + unknown, removed + deleted,
738 similarity)
738 similarity)
739
739
740 _markchanges(repo, unknown, deleted, renames)
740 _markchanges(repo, unknown, deleted, renames)
741
741
742 for f in rejected:
742 for f in rejected:
743 if f in m.files():
743 if f in m.files():
744 return 1
744 return 1
745 return 0
745 return 0
746
746
747 def _interestingfiles(repo, matcher):
747 def _interestingfiles(repo, matcher):
748 '''Walk dirstate with matcher, looking for files that addremove would care
748 '''Walk dirstate with matcher, looking for files that addremove would care
749 about.
749 about.
750
750
751 This is different from dirstate.status because it doesn't care about
751 This is different from dirstate.status because it doesn't care about
752 whether files are modified or clean.'''
752 whether files are modified or clean.'''
753 added, unknown, deleted, removed = [], [], [], []
753 added, unknown, deleted, removed = [], [], [], []
754 audit_path = pathauditor(repo.root)
754 audit_path = pathauditor(repo.root)
755
755
756 ctx = repo[None]
756 ctx = repo[None]
757 dirstate = repo.dirstate
757 dirstate = repo.dirstate
758 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False)
758 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
759 full=False)
759 for abs, st in walkresults.iteritems():
760 for abs, st in walkresults.iteritems():
760 dstate = dirstate[abs]
761 dstate = dirstate[abs]
761 if dstate == '?' and audit_path.check(abs):
762 if dstate == '?' and audit_path.check(abs):
762 unknown.append(abs)
763 unknown.append(abs)
763 elif dstate != 'r' and not st:
764 elif dstate != 'r' and not st:
764 deleted.append(abs)
765 deleted.append(abs)
765 # for finding renames
766 # for finding renames
766 elif dstate == 'r':
767 elif dstate == 'r':
767 removed.append(abs)
768 removed.append(abs)
768 elif dstate == 'a':
769 elif dstate == 'a':
769 added.append(abs)
770 added.append(abs)
770
771
771 return added, unknown, deleted, removed
772 return added, unknown, deleted, removed
772
773
773 def _findrenames(repo, matcher, added, removed, similarity):
774 def _findrenames(repo, matcher, added, removed, similarity):
774 '''Find renames from removed files to added ones.'''
775 '''Find renames from removed files to added ones.'''
775 renames = {}
776 renames = {}
776 if similarity > 0:
777 if similarity > 0:
777 for old, new, score in similar.findrenames(repo, added, removed,
778 for old, new, score in similar.findrenames(repo, added, removed,
778 similarity):
779 similarity):
779 if (repo.ui.verbose or not matcher.exact(old)
780 if (repo.ui.verbose or not matcher.exact(old)
780 or not matcher.exact(new)):
781 or not matcher.exact(new)):
781 repo.ui.status(_('recording removal of %s as rename to %s '
782 repo.ui.status(_('recording removal of %s as rename to %s '
782 '(%d%% similar)\n') %
783 '(%d%% similar)\n') %
783 (matcher.rel(old), matcher.rel(new),
784 (matcher.rel(old), matcher.rel(new),
784 score * 100))
785 score * 100))
785 renames[new] = old
786 renames[new] = old
786 return renames
787 return renames
787
788
788 def _markchanges(repo, unknown, deleted, renames):
789 def _markchanges(repo, unknown, deleted, renames):
789 '''Marks the files in unknown as added, the files in deleted as removed,
790 '''Marks the files in unknown as added, the files in deleted as removed,
790 and the files in renames as copied.'''
791 and the files in renames as copied.'''
791 wctx = repo[None]
792 wctx = repo[None]
792 wlock = repo.wlock()
793 wlock = repo.wlock()
793 try:
794 try:
794 wctx.forget(deleted)
795 wctx.forget(deleted)
795 wctx.add(unknown)
796 wctx.add(unknown)
796 for new, old in renames.iteritems():
797 for new, old in renames.iteritems():
797 wctx.copy(old, new)
798 wctx.copy(old, new)
798 finally:
799 finally:
799 wlock.release()
800 wlock.release()
800
801
801 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
802 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
802 """Update the dirstate to reflect the intent of copying src to dst. For
803 """Update the dirstate to reflect the intent of copying src to dst. For
803 different reasons it might not end with dst being marked as copied from src.
804 different reasons it might not end with dst being marked as copied from src.
804 """
805 """
805 origsrc = repo.dirstate.copied(src) or src
806 origsrc = repo.dirstate.copied(src) or src
806 if dst == origsrc: # copying back a copy?
807 if dst == origsrc: # copying back a copy?
807 if repo.dirstate[dst] not in 'mn' and not dryrun:
808 if repo.dirstate[dst] not in 'mn' and not dryrun:
808 repo.dirstate.normallookup(dst)
809 repo.dirstate.normallookup(dst)
809 else:
810 else:
810 if repo.dirstate[origsrc] == 'a' and origsrc == src:
811 if repo.dirstate[origsrc] == 'a' and origsrc == src:
811 if not ui.quiet:
812 if not ui.quiet:
812 ui.warn(_("%s has not been committed yet, so no copy "
813 ui.warn(_("%s has not been committed yet, so no copy "
813 "data will be stored for %s.\n")
814 "data will be stored for %s.\n")
814 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
815 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
815 if repo.dirstate[dst] in '?r' and not dryrun:
816 if repo.dirstate[dst] in '?r' and not dryrun:
816 wctx.add([dst])
817 wctx.add([dst])
817 elif not dryrun:
818 elif not dryrun:
818 wctx.copy(origsrc, dst)
819 wctx.copy(origsrc, dst)
819
820
820 def readrequires(opener, supported):
821 def readrequires(opener, supported):
821 '''Reads and parses .hg/requires and checks if all entries found
822 '''Reads and parses .hg/requires and checks if all entries found
822 are in the list of supported features.'''
823 are in the list of supported features.'''
823 requirements = set(opener.read("requires").splitlines())
824 requirements = set(opener.read("requires").splitlines())
824 missings = []
825 missings = []
825 for r in requirements:
826 for r in requirements:
826 if r not in supported:
827 if r not in supported:
827 if not r or not r[0].isalnum():
828 if not r or not r[0].isalnum():
828 raise error.RequirementError(_(".hg/requires file is corrupt"))
829 raise error.RequirementError(_(".hg/requires file is corrupt"))
829 missings.append(r)
830 missings.append(r)
830 missings.sort()
831 missings.sort()
831 if missings:
832 if missings:
832 raise error.RequirementError(
833 raise error.RequirementError(
833 _("unknown repository format: requires features '%s' (upgrade "
834 _("unknown repository format: requires features '%s' (upgrade "
834 "Mercurial)") % "', '".join(missings))
835 "Mercurial)") % "', '".join(missings))
835 return requirements
836 return requirements
836
837
837 class filecacheentry(object):
838 class filecacheentry(object):
838 def __init__(self, path, stat=True):
839 def __init__(self, path, stat=True):
839 self.path = path
840 self.path = path
840 self.cachestat = None
841 self.cachestat = None
841 self._cacheable = None
842 self._cacheable = None
842
843
843 if stat:
844 if stat:
844 self.cachestat = filecacheentry.stat(self.path)
845 self.cachestat = filecacheentry.stat(self.path)
845
846
846 if self.cachestat:
847 if self.cachestat:
847 self._cacheable = self.cachestat.cacheable()
848 self._cacheable = self.cachestat.cacheable()
848 else:
849 else:
849 # None means we don't know yet
850 # None means we don't know yet
850 self._cacheable = None
851 self._cacheable = None
851
852
852 def refresh(self):
853 def refresh(self):
853 if self.cacheable():
854 if self.cacheable():
854 self.cachestat = filecacheentry.stat(self.path)
855 self.cachestat = filecacheentry.stat(self.path)
855
856
856 def cacheable(self):
857 def cacheable(self):
857 if self._cacheable is not None:
858 if self._cacheable is not None:
858 return self._cacheable
859 return self._cacheable
859
860
860 # we don't know yet, assume it is for now
861 # we don't know yet, assume it is for now
861 return True
862 return True
862
863
863 def changed(self):
864 def changed(self):
864 # no point in going further if we can't cache it
865 # no point in going further if we can't cache it
865 if not self.cacheable():
866 if not self.cacheable():
866 return True
867 return True
867
868
868 newstat = filecacheentry.stat(self.path)
869 newstat = filecacheentry.stat(self.path)
869
870
870 # we may not know if it's cacheable yet, check again now
871 # we may not know if it's cacheable yet, check again now
871 if newstat and self._cacheable is None:
872 if newstat and self._cacheable is None:
872 self._cacheable = newstat.cacheable()
873 self._cacheable = newstat.cacheable()
873
874
874 # check again
875 # check again
875 if not self._cacheable:
876 if not self._cacheable:
876 return True
877 return True
877
878
878 if self.cachestat != newstat:
879 if self.cachestat != newstat:
879 self.cachestat = newstat
880 self.cachestat = newstat
880 return True
881 return True
881 else:
882 else:
882 return False
883 return False
883
884
884 @staticmethod
885 @staticmethod
885 def stat(path):
886 def stat(path):
886 try:
887 try:
887 return util.cachestat(path)
888 return util.cachestat(path)
888 except OSError, e:
889 except OSError, e:
889 if e.errno != errno.ENOENT:
890 if e.errno != errno.ENOENT:
890 raise
891 raise
891
892
892 class filecache(object):
893 class filecache(object):
893 '''A property like decorator that tracks a file under .hg/ for updates.
894 '''A property like decorator that tracks a file under .hg/ for updates.
894
895
895 Records stat info when called in _filecache.
896 Records stat info when called in _filecache.
896
897
897 On subsequent calls, compares old stat info with new info, and recreates
898 On subsequent calls, compares old stat info with new info, and recreates
898 the object when needed, updating the new stat info in _filecache.
899 the object when needed, updating the new stat info in _filecache.
899
900
900 Mercurial either atomic renames or appends for files under .hg,
901 Mercurial either atomic renames or appends for files under .hg,
901 so to ensure the cache is reliable we need the filesystem to be able
902 so to ensure the cache is reliable we need the filesystem to be able
902 to tell us if a file has been replaced. If it can't, we fallback to
903 to tell us if a file has been replaced. If it can't, we fallback to
903 recreating the object on every call (essentially the same behaviour as
904 recreating the object on every call (essentially the same behaviour as
904 propertycache).'''
905 propertycache).'''
905 def __init__(self, path):
906 def __init__(self, path):
906 self.path = path
907 self.path = path
907
908
908 def join(self, obj, fname):
909 def join(self, obj, fname):
909 """Used to compute the runtime path of the cached file.
910 """Used to compute the runtime path of the cached file.
910
911
911 Users should subclass filecache and provide their own version of this
912 Users should subclass filecache and provide their own version of this
912 function to call the appropriate join function on 'obj' (an instance
913 function to call the appropriate join function on 'obj' (an instance
913 of the class that its member function was decorated).
914 of the class that its member function was decorated).
914 """
915 """
915 return obj.join(fname)
916 return obj.join(fname)
916
917
917 def __call__(self, func):
918 def __call__(self, func):
918 self.func = func
919 self.func = func
919 self.name = func.__name__
920 self.name = func.__name__
920 return self
921 return self
921
922
922 def __get__(self, obj, type=None):
923 def __get__(self, obj, type=None):
923 # do we need to check if the file changed?
924 # do we need to check if the file changed?
924 if self.name in obj.__dict__:
925 if self.name in obj.__dict__:
925 assert self.name in obj._filecache, self.name
926 assert self.name in obj._filecache, self.name
926 return obj.__dict__[self.name]
927 return obj.__dict__[self.name]
927
928
928 entry = obj._filecache.get(self.name)
929 entry = obj._filecache.get(self.name)
929
930
930 if entry:
931 if entry:
931 if entry.changed():
932 if entry.changed():
932 entry.obj = self.func(obj)
933 entry.obj = self.func(obj)
933 else:
934 else:
934 path = self.join(obj, self.path)
935 path = self.join(obj, self.path)
935
936
936 # We stat -before- creating the object so our cache doesn't lie if
937 # We stat -before- creating the object so our cache doesn't lie if
937 # a writer modified between the time we read and stat
938 # a writer modified between the time we read and stat
938 entry = filecacheentry(path)
939 entry = filecacheentry(path)
939 entry.obj = self.func(obj)
940 entry.obj = self.func(obj)
940
941
941 obj._filecache[self.name] = entry
942 obj._filecache[self.name] = entry
942
943
943 obj.__dict__[self.name] = entry.obj
944 obj.__dict__[self.name] = entry.obj
944 return entry.obj
945 return entry.obj
945
946
946 def __set__(self, obj, value):
947 def __set__(self, obj, value):
947 if self.name not in obj._filecache:
948 if self.name not in obj._filecache:
948 # we add an entry for the missing value because X in __dict__
949 # we add an entry for the missing value because X in __dict__
949 # implies X in _filecache
950 # implies X in _filecache
950 ce = filecacheentry(self.join(obj, self.path), False)
951 ce = filecacheentry(self.join(obj, self.path), False)
951 obj._filecache[self.name] = ce
952 obj._filecache[self.name] = ce
952 else:
953 else:
953 ce = obj._filecache[self.name]
954 ce = obj._filecache[self.name]
954
955
955 ce.obj = value # update cached copy
956 ce.obj = value # update cached copy
956 obj.__dict__[self.name] = value # update copy returned by obj.x
957 obj.__dict__[self.name] = value # update copy returned by obj.x
957
958
958 def __delete__(self, obj):
959 def __delete__(self, obj):
959 try:
960 try:
960 del obj.__dict__[self.name]
961 del obj.__dict__[self.name]
961 except KeyError:
962 except KeyError:
962 raise AttributeError(self.name)
963 raise AttributeError(self.name)
963
964
964 class dirs(object):
965 class dirs(object):
965 '''a multiset of directory names from a dirstate or manifest'''
966 '''a multiset of directory names from a dirstate or manifest'''
966
967
967 def __init__(self, map, skip=None):
968 def __init__(self, map, skip=None):
968 self._dirs = {}
969 self._dirs = {}
969 addpath = self.addpath
970 addpath = self.addpath
970 if util.safehasattr(map, 'iteritems') and skip is not None:
971 if util.safehasattr(map, 'iteritems') and skip is not None:
971 for f, s in map.iteritems():
972 for f, s in map.iteritems():
972 if s[0] != skip:
973 if s[0] != skip:
973 addpath(f)
974 addpath(f)
974 else:
975 else:
975 for f in map:
976 for f in map:
976 addpath(f)
977 addpath(f)
977
978
978 def addpath(self, path):
979 def addpath(self, path):
979 dirs = self._dirs
980 dirs = self._dirs
980 for base in finddirs(path):
981 for base in finddirs(path):
981 if base in dirs:
982 if base in dirs:
982 dirs[base] += 1
983 dirs[base] += 1
983 return
984 return
984 dirs[base] = 1
985 dirs[base] = 1
985
986
986 def delpath(self, path):
987 def delpath(self, path):
987 dirs = self._dirs
988 dirs = self._dirs
988 for base in finddirs(path):
989 for base in finddirs(path):
989 if dirs[base] > 1:
990 if dirs[base] > 1:
990 dirs[base] -= 1
991 dirs[base] -= 1
991 return
992 return
992 del dirs[base]
993 del dirs[base]
993
994
994 def __iter__(self):
995 def __iter__(self):
995 return self._dirs.iterkeys()
996 return self._dirs.iterkeys()
996
997
997 def __contains__(self, d):
998 def __contains__(self, d):
998 return d in self._dirs
999 return d in self._dirs
999
1000
1000 if util.safehasattr(parsers, 'dirs'):
1001 if util.safehasattr(parsers, 'dirs'):
1001 dirs = parsers.dirs
1002 dirs = parsers.dirs
1002
1003
1003 def finddirs(path):
1004 def finddirs(path):
1004 pos = path.rfind('/')
1005 pos = path.rfind('/')
1005 while pos != -1:
1006 while pos != -1:
1006 yield path[:pos]
1007 yield path[:pos]
1007 pos = path.rfind('/', 0, pos)
1008 pos = path.rfind('/', 0, pos)
General Comments 0
You need to be logged in to leave comments. Login now