##// END OF EJS Templates
addremove: add back forgotten files (BC)...
Martin von Zweigbergk -
r23259:9f477802 default
parent child Browse files
Show More
@@ -1,1028 +1,1030 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from mercurial.node import nullrev
9 from mercurial.node import nullrev
10 import util, error, osutil, revset, similar, encoding, phases, parsers
10 import util, error, osutil, revset, similar, encoding, phases, parsers
11 import pathutil
11 import pathutil
12 import match as matchmod
12 import match as matchmod
13 import os, errno, re, glob, tempfile
13 import os, errno, re, glob, tempfile
14
14
15 if os.name == 'nt':
15 if os.name == 'nt':
16 import scmwindows as scmplatform
16 import scmwindows as scmplatform
17 else:
17 else:
18 import scmposix as scmplatform
18 import scmposix as scmplatform
19
19
20 systemrcpath = scmplatform.systemrcpath
20 systemrcpath = scmplatform.systemrcpath
21 userrcpath = scmplatform.userrcpath
21 userrcpath = scmplatform.userrcpath
22
22
23 class status(tuple):
23 class status(tuple):
24 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
24 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
25 and 'ignored' properties are only relevant to the working copy.
25 and 'ignored' properties are only relevant to the working copy.
26 '''
26 '''
27
27
28 __slots__ = ()
28 __slots__ = ()
29
29
30 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
30 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
31 clean):
31 clean):
32 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
32 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
33 ignored, clean))
33 ignored, clean))
34
34
35 @property
35 @property
36 def modified(self):
36 def modified(self):
37 '''files that have been modified'''
37 '''files that have been modified'''
38 return self[0]
38 return self[0]
39
39
40 @property
40 @property
41 def added(self):
41 def added(self):
42 '''files that have been added'''
42 '''files that have been added'''
43 return self[1]
43 return self[1]
44
44
45 @property
45 @property
46 def removed(self):
46 def removed(self):
47 '''files that have been removed'''
47 '''files that have been removed'''
48 return self[2]
48 return self[2]
49
49
50 @property
50 @property
51 def deleted(self):
51 def deleted(self):
52 '''files that are in the dirstate, but have been deleted from the
52 '''files that are in the dirstate, but have been deleted from the
53 working copy (aka "missing")
53 working copy (aka "missing")
54 '''
54 '''
55 return self[3]
55 return self[3]
56
56
57 @property
57 @property
58 def unknown(self):
58 def unknown(self):
59 '''files not in the dirstate that are not ignored'''
59 '''files not in the dirstate that are not ignored'''
60 return self[4]
60 return self[4]
61
61
62 @property
62 @property
63 def ignored(self):
63 def ignored(self):
64 '''files not in the dirstate that are ignored (by _dirignore())'''
64 '''files not in the dirstate that are ignored (by _dirignore())'''
65 return self[5]
65 return self[5]
66
66
67 @property
67 @property
68 def clean(self):
68 def clean(self):
69 '''files that have not been modified'''
69 '''files that have not been modified'''
70 return self[6]
70 return self[6]
71
71
72 def __repr__(self, *args, **kwargs):
72 def __repr__(self, *args, **kwargs):
73 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
73 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
74 'unknown=%r, ignored=%r, clean=%r>') % self)
74 'unknown=%r, ignored=%r, clean=%r>') % self)
75
75
76 def itersubrepos(ctx1, ctx2):
76 def itersubrepos(ctx1, ctx2):
77 """find subrepos in ctx1 or ctx2"""
77 """find subrepos in ctx1 or ctx2"""
78 # Create a (subpath, ctx) mapping where we prefer subpaths from
78 # Create a (subpath, ctx) mapping where we prefer subpaths from
79 # ctx1. The subpaths from ctx2 are important when the .hgsub file
79 # ctx1. The subpaths from ctx2 are important when the .hgsub file
80 # has been modified (in ctx2) but not yet committed (in ctx1).
80 # has been modified (in ctx2) but not yet committed (in ctx1).
81 subpaths = dict.fromkeys(ctx2.substate, ctx2)
81 subpaths = dict.fromkeys(ctx2.substate, ctx2)
82 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
82 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
83 for subpath, ctx in sorted(subpaths.iteritems()):
83 for subpath, ctx in sorted(subpaths.iteritems()):
84 yield subpath, ctx.sub(subpath)
84 yield subpath, ctx.sub(subpath)
85
85
86 def nochangesfound(ui, repo, excluded=None):
86 def nochangesfound(ui, repo, excluded=None):
87 '''Report no changes for push/pull, excluded is None or a list of
87 '''Report no changes for push/pull, excluded is None or a list of
88 nodes excluded from the push/pull.
88 nodes excluded from the push/pull.
89 '''
89 '''
90 secretlist = []
90 secretlist = []
91 if excluded:
91 if excluded:
92 for n in excluded:
92 for n in excluded:
93 if n not in repo:
93 if n not in repo:
94 # discovery should not have included the filtered revision,
94 # discovery should not have included the filtered revision,
95 # we have to explicitly exclude it until discovery is cleanup.
95 # we have to explicitly exclude it until discovery is cleanup.
96 continue
96 continue
97 ctx = repo[n]
97 ctx = repo[n]
98 if ctx.phase() >= phases.secret and not ctx.extinct():
98 if ctx.phase() >= phases.secret and not ctx.extinct():
99 secretlist.append(n)
99 secretlist.append(n)
100
100
101 if secretlist:
101 if secretlist:
102 ui.status(_("no changes found (ignored %d secret changesets)\n")
102 ui.status(_("no changes found (ignored %d secret changesets)\n")
103 % len(secretlist))
103 % len(secretlist))
104 else:
104 else:
105 ui.status(_("no changes found\n"))
105 ui.status(_("no changes found\n"))
106
106
107 def checknewlabel(repo, lbl, kind):
107 def checknewlabel(repo, lbl, kind):
108 # Do not use the "kind" parameter in ui output.
108 # Do not use the "kind" parameter in ui output.
109 # It makes strings difficult to translate.
109 # It makes strings difficult to translate.
110 if lbl in ['tip', '.', 'null']:
110 if lbl in ['tip', '.', 'null']:
111 raise util.Abort(_("the name '%s' is reserved") % lbl)
111 raise util.Abort(_("the name '%s' is reserved") % lbl)
112 for c in (':', '\0', '\n', '\r'):
112 for c in (':', '\0', '\n', '\r'):
113 if c in lbl:
113 if c in lbl:
114 raise util.Abort(_("%r cannot be used in a name") % c)
114 raise util.Abort(_("%r cannot be used in a name") % c)
115 try:
115 try:
116 int(lbl)
116 int(lbl)
117 raise util.Abort(_("cannot use an integer as a name"))
117 raise util.Abort(_("cannot use an integer as a name"))
118 except ValueError:
118 except ValueError:
119 pass
119 pass
120
120
121 def checkfilename(f):
121 def checkfilename(f):
122 '''Check that the filename f is an acceptable filename for a tracked file'''
122 '''Check that the filename f is an acceptable filename for a tracked file'''
123 if '\r' in f or '\n' in f:
123 if '\r' in f or '\n' in f:
124 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
124 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
125
125
126 def checkportable(ui, f):
126 def checkportable(ui, f):
127 '''Check if filename f is portable and warn or abort depending on config'''
127 '''Check if filename f is portable and warn or abort depending on config'''
128 checkfilename(f)
128 checkfilename(f)
129 abort, warn = checkportabilityalert(ui)
129 abort, warn = checkportabilityalert(ui)
130 if abort or warn:
130 if abort or warn:
131 msg = util.checkwinfilename(f)
131 msg = util.checkwinfilename(f)
132 if msg:
132 if msg:
133 msg = "%s: %r" % (msg, f)
133 msg = "%s: %r" % (msg, f)
134 if abort:
134 if abort:
135 raise util.Abort(msg)
135 raise util.Abort(msg)
136 ui.warn(_("warning: %s\n") % msg)
136 ui.warn(_("warning: %s\n") % msg)
137
137
138 def checkportabilityalert(ui):
138 def checkportabilityalert(ui):
139 '''check if the user's config requests nothing, a warning, or abort for
139 '''check if the user's config requests nothing, a warning, or abort for
140 non-portable filenames'''
140 non-portable filenames'''
141 val = ui.config('ui', 'portablefilenames', 'warn')
141 val = ui.config('ui', 'portablefilenames', 'warn')
142 lval = val.lower()
142 lval = val.lower()
143 bval = util.parsebool(val)
143 bval = util.parsebool(val)
144 abort = os.name == 'nt' or lval == 'abort'
144 abort = os.name == 'nt' or lval == 'abort'
145 warn = bval or lval == 'warn'
145 warn = bval or lval == 'warn'
146 if bval is None and not (warn or abort or lval == 'ignore'):
146 if bval is None and not (warn or abort or lval == 'ignore'):
147 raise error.ConfigError(
147 raise error.ConfigError(
148 _("ui.portablefilenames value is invalid ('%s')") % val)
148 _("ui.portablefilenames value is invalid ('%s')") % val)
149 return abort, warn
149 return abort, warn
150
150
151 class casecollisionauditor(object):
151 class casecollisionauditor(object):
152 def __init__(self, ui, abort, dirstate):
152 def __init__(self, ui, abort, dirstate):
153 self._ui = ui
153 self._ui = ui
154 self._abort = abort
154 self._abort = abort
155 allfiles = '\0'.join(dirstate._map)
155 allfiles = '\0'.join(dirstate._map)
156 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
156 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
157 self._dirstate = dirstate
157 self._dirstate = dirstate
158 # The purpose of _newfiles is so that we don't complain about
158 # The purpose of _newfiles is so that we don't complain about
159 # case collisions if someone were to call this object with the
159 # case collisions if someone were to call this object with the
160 # same filename twice.
160 # same filename twice.
161 self._newfiles = set()
161 self._newfiles = set()
162
162
163 def __call__(self, f):
163 def __call__(self, f):
164 if f in self._newfiles:
164 if f in self._newfiles:
165 return
165 return
166 fl = encoding.lower(f)
166 fl = encoding.lower(f)
167 if fl in self._loweredfiles and f not in self._dirstate:
167 if fl in self._loweredfiles and f not in self._dirstate:
168 msg = _('possible case-folding collision for %s') % f
168 msg = _('possible case-folding collision for %s') % f
169 if self._abort:
169 if self._abort:
170 raise util.Abort(msg)
170 raise util.Abort(msg)
171 self._ui.warn(_("warning: %s\n") % msg)
171 self._ui.warn(_("warning: %s\n") % msg)
172 self._loweredfiles.add(fl)
172 self._loweredfiles.add(fl)
173 self._newfiles.add(f)
173 self._newfiles.add(f)
174
174
175 class abstractvfs(object):
175 class abstractvfs(object):
176 """Abstract base class; cannot be instantiated"""
176 """Abstract base class; cannot be instantiated"""
177
177
178 def __init__(self, *args, **kwargs):
178 def __init__(self, *args, **kwargs):
179 '''Prevent instantiation; don't call this from subclasses.'''
179 '''Prevent instantiation; don't call this from subclasses.'''
180 raise NotImplementedError('attempted instantiating ' + str(type(self)))
180 raise NotImplementedError('attempted instantiating ' + str(type(self)))
181
181
182 def tryread(self, path):
182 def tryread(self, path):
183 '''gracefully return an empty string for missing files'''
183 '''gracefully return an empty string for missing files'''
184 try:
184 try:
185 return self.read(path)
185 return self.read(path)
186 except IOError, inst:
186 except IOError, inst:
187 if inst.errno != errno.ENOENT:
187 if inst.errno != errno.ENOENT:
188 raise
188 raise
189 return ""
189 return ""
190
190
191 def open(self, path, mode="r", text=False, atomictemp=False):
191 def open(self, path, mode="r", text=False, atomictemp=False):
192 self.open = self.__call__
192 self.open = self.__call__
193 return self.__call__(path, mode, text, atomictemp)
193 return self.__call__(path, mode, text, atomictemp)
194
194
195 def read(self, path):
195 def read(self, path):
196 fp = self(path, 'rb')
196 fp = self(path, 'rb')
197 try:
197 try:
198 return fp.read()
198 return fp.read()
199 finally:
199 finally:
200 fp.close()
200 fp.close()
201
201
202 def write(self, path, data):
202 def write(self, path, data):
203 fp = self(path, 'wb')
203 fp = self(path, 'wb')
204 try:
204 try:
205 return fp.write(data)
205 return fp.write(data)
206 finally:
206 finally:
207 fp.close()
207 fp.close()
208
208
209 def append(self, path, data):
209 def append(self, path, data):
210 fp = self(path, 'ab')
210 fp = self(path, 'ab')
211 try:
211 try:
212 return fp.write(data)
212 return fp.write(data)
213 finally:
213 finally:
214 fp.close()
214 fp.close()
215
215
216 def chmod(self, path, mode):
216 def chmod(self, path, mode):
217 return os.chmod(self.join(path), mode)
217 return os.chmod(self.join(path), mode)
218
218
219 def exists(self, path=None):
219 def exists(self, path=None):
220 return os.path.exists(self.join(path))
220 return os.path.exists(self.join(path))
221
221
222 def fstat(self, fp):
222 def fstat(self, fp):
223 return util.fstat(fp)
223 return util.fstat(fp)
224
224
225 def isdir(self, path=None):
225 def isdir(self, path=None):
226 return os.path.isdir(self.join(path))
226 return os.path.isdir(self.join(path))
227
227
228 def isfile(self, path=None):
228 def isfile(self, path=None):
229 return os.path.isfile(self.join(path))
229 return os.path.isfile(self.join(path))
230
230
231 def islink(self, path=None):
231 def islink(self, path=None):
232 return os.path.islink(self.join(path))
232 return os.path.islink(self.join(path))
233
233
234 def lexists(self, path=None):
234 def lexists(self, path=None):
235 return os.path.lexists(self.join(path))
235 return os.path.lexists(self.join(path))
236
236
237 def lstat(self, path=None):
237 def lstat(self, path=None):
238 return os.lstat(self.join(path))
238 return os.lstat(self.join(path))
239
239
240 def listdir(self, path=None):
240 def listdir(self, path=None):
241 return os.listdir(self.join(path))
241 return os.listdir(self.join(path))
242
242
243 def makedir(self, path=None, notindexed=True):
243 def makedir(self, path=None, notindexed=True):
244 return util.makedir(self.join(path), notindexed)
244 return util.makedir(self.join(path), notindexed)
245
245
246 def makedirs(self, path=None, mode=None):
246 def makedirs(self, path=None, mode=None):
247 return util.makedirs(self.join(path), mode)
247 return util.makedirs(self.join(path), mode)
248
248
249 def makelock(self, info, path):
249 def makelock(self, info, path):
250 return util.makelock(info, self.join(path))
250 return util.makelock(info, self.join(path))
251
251
252 def mkdir(self, path=None):
252 def mkdir(self, path=None):
253 return os.mkdir(self.join(path))
253 return os.mkdir(self.join(path))
254
254
255 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
255 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
256 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
256 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
257 dir=self.join(dir), text=text)
257 dir=self.join(dir), text=text)
258 dname, fname = util.split(name)
258 dname, fname = util.split(name)
259 if dir:
259 if dir:
260 return fd, os.path.join(dir, fname)
260 return fd, os.path.join(dir, fname)
261 else:
261 else:
262 return fd, fname
262 return fd, fname
263
263
264 def readdir(self, path=None, stat=None, skip=None):
264 def readdir(self, path=None, stat=None, skip=None):
265 return osutil.listdir(self.join(path), stat, skip)
265 return osutil.listdir(self.join(path), stat, skip)
266
266
267 def readlock(self, path):
267 def readlock(self, path):
268 return util.readlock(self.join(path))
268 return util.readlock(self.join(path))
269
269
270 def rename(self, src, dst):
270 def rename(self, src, dst):
271 return util.rename(self.join(src), self.join(dst))
271 return util.rename(self.join(src), self.join(dst))
272
272
273 def readlink(self, path):
273 def readlink(self, path):
274 return os.readlink(self.join(path))
274 return os.readlink(self.join(path))
275
275
276 def setflags(self, path, l, x):
276 def setflags(self, path, l, x):
277 return util.setflags(self.join(path), l, x)
277 return util.setflags(self.join(path), l, x)
278
278
279 def stat(self, path=None):
279 def stat(self, path=None):
280 return os.stat(self.join(path))
280 return os.stat(self.join(path))
281
281
282 def unlink(self, path=None):
282 def unlink(self, path=None):
283 return util.unlink(self.join(path))
283 return util.unlink(self.join(path))
284
284
285 def unlinkpath(self, path=None, ignoremissing=False):
285 def unlinkpath(self, path=None, ignoremissing=False):
286 return util.unlinkpath(self.join(path), ignoremissing)
286 return util.unlinkpath(self.join(path), ignoremissing)
287
287
288 def utime(self, path=None, t=None):
288 def utime(self, path=None, t=None):
289 return os.utime(self.join(path), t)
289 return os.utime(self.join(path), t)
290
290
291 class vfs(abstractvfs):
291 class vfs(abstractvfs):
292 '''Operate files relative to a base directory
292 '''Operate files relative to a base directory
293
293
294 This class is used to hide the details of COW semantics and
294 This class is used to hide the details of COW semantics and
295 remote file access from higher level code.
295 remote file access from higher level code.
296 '''
296 '''
297 def __init__(self, base, audit=True, expandpath=False, realpath=False):
297 def __init__(self, base, audit=True, expandpath=False, realpath=False):
298 if expandpath:
298 if expandpath:
299 base = util.expandpath(base)
299 base = util.expandpath(base)
300 if realpath:
300 if realpath:
301 base = os.path.realpath(base)
301 base = os.path.realpath(base)
302 self.base = base
302 self.base = base
303 self._setmustaudit(audit)
303 self._setmustaudit(audit)
304 self.createmode = None
304 self.createmode = None
305 self._trustnlink = None
305 self._trustnlink = None
306
306
307 def _getmustaudit(self):
307 def _getmustaudit(self):
308 return self._audit
308 return self._audit
309
309
310 def _setmustaudit(self, onoff):
310 def _setmustaudit(self, onoff):
311 self._audit = onoff
311 self._audit = onoff
312 if onoff:
312 if onoff:
313 self.audit = pathutil.pathauditor(self.base)
313 self.audit = pathutil.pathauditor(self.base)
314 else:
314 else:
315 self.audit = util.always
315 self.audit = util.always
316
316
317 mustaudit = property(_getmustaudit, _setmustaudit)
317 mustaudit = property(_getmustaudit, _setmustaudit)
318
318
319 @util.propertycache
319 @util.propertycache
320 def _cansymlink(self):
320 def _cansymlink(self):
321 return util.checklink(self.base)
321 return util.checklink(self.base)
322
322
323 @util.propertycache
323 @util.propertycache
324 def _chmod(self):
324 def _chmod(self):
325 return util.checkexec(self.base)
325 return util.checkexec(self.base)
326
326
327 def _fixfilemode(self, name):
327 def _fixfilemode(self, name):
328 if self.createmode is None or not self._chmod:
328 if self.createmode is None or not self._chmod:
329 return
329 return
330 os.chmod(name, self.createmode & 0666)
330 os.chmod(name, self.createmode & 0666)
331
331
332 def __call__(self, path, mode="r", text=False, atomictemp=False):
332 def __call__(self, path, mode="r", text=False, atomictemp=False):
333 if self._audit:
333 if self._audit:
334 r = util.checkosfilename(path)
334 r = util.checkosfilename(path)
335 if r:
335 if r:
336 raise util.Abort("%s: %r" % (r, path))
336 raise util.Abort("%s: %r" % (r, path))
337 self.audit(path)
337 self.audit(path)
338 f = self.join(path)
338 f = self.join(path)
339
339
340 if not text and "b" not in mode:
340 if not text and "b" not in mode:
341 mode += "b" # for that other OS
341 mode += "b" # for that other OS
342
342
343 nlink = -1
343 nlink = -1
344 if mode not in ('r', 'rb'):
344 if mode not in ('r', 'rb'):
345 dirname, basename = util.split(f)
345 dirname, basename = util.split(f)
346 # If basename is empty, then the path is malformed because it points
346 # If basename is empty, then the path is malformed because it points
347 # to a directory. Let the posixfile() call below raise IOError.
347 # to a directory. Let the posixfile() call below raise IOError.
348 if basename:
348 if basename:
349 if atomictemp:
349 if atomictemp:
350 util.ensuredirs(dirname, self.createmode)
350 util.ensuredirs(dirname, self.createmode)
351 return util.atomictempfile(f, mode, self.createmode)
351 return util.atomictempfile(f, mode, self.createmode)
352 try:
352 try:
353 if 'w' in mode:
353 if 'w' in mode:
354 util.unlink(f)
354 util.unlink(f)
355 nlink = 0
355 nlink = 0
356 else:
356 else:
357 # nlinks() may behave differently for files on Windows
357 # nlinks() may behave differently for files on Windows
358 # shares if the file is open.
358 # shares if the file is open.
359 fd = util.posixfile(f)
359 fd = util.posixfile(f)
360 nlink = util.nlinks(f)
360 nlink = util.nlinks(f)
361 if nlink < 1:
361 if nlink < 1:
362 nlink = 2 # force mktempcopy (issue1922)
362 nlink = 2 # force mktempcopy (issue1922)
363 fd.close()
363 fd.close()
364 except (OSError, IOError), e:
364 except (OSError, IOError), e:
365 if e.errno != errno.ENOENT:
365 if e.errno != errno.ENOENT:
366 raise
366 raise
367 nlink = 0
367 nlink = 0
368 util.ensuredirs(dirname, self.createmode)
368 util.ensuredirs(dirname, self.createmode)
369 if nlink > 0:
369 if nlink > 0:
370 if self._trustnlink is None:
370 if self._trustnlink is None:
371 self._trustnlink = nlink > 1 or util.checknlink(f)
371 self._trustnlink = nlink > 1 or util.checknlink(f)
372 if nlink > 1 or not self._trustnlink:
372 if nlink > 1 or not self._trustnlink:
373 util.rename(util.mktempcopy(f), f)
373 util.rename(util.mktempcopy(f), f)
374 fp = util.posixfile(f, mode)
374 fp = util.posixfile(f, mode)
375 if nlink == 0:
375 if nlink == 0:
376 self._fixfilemode(f)
376 self._fixfilemode(f)
377 return fp
377 return fp
378
378
379 def symlink(self, src, dst):
379 def symlink(self, src, dst):
380 self.audit(dst)
380 self.audit(dst)
381 linkname = self.join(dst)
381 linkname = self.join(dst)
382 try:
382 try:
383 os.unlink(linkname)
383 os.unlink(linkname)
384 except OSError:
384 except OSError:
385 pass
385 pass
386
386
387 util.ensuredirs(os.path.dirname(linkname), self.createmode)
387 util.ensuredirs(os.path.dirname(linkname), self.createmode)
388
388
389 if self._cansymlink:
389 if self._cansymlink:
390 try:
390 try:
391 os.symlink(src, linkname)
391 os.symlink(src, linkname)
392 except OSError, err:
392 except OSError, err:
393 raise OSError(err.errno, _('could not symlink to %r: %s') %
393 raise OSError(err.errno, _('could not symlink to %r: %s') %
394 (src, err.strerror), linkname)
394 (src, err.strerror), linkname)
395 else:
395 else:
396 self.write(dst, src)
396 self.write(dst, src)
397
397
398 def join(self, path):
398 def join(self, path):
399 if path:
399 if path:
400 return os.path.join(self.base, path)
400 return os.path.join(self.base, path)
401 else:
401 else:
402 return self.base
402 return self.base
403
403
404 opener = vfs
404 opener = vfs
405
405
406 class auditvfs(object):
406 class auditvfs(object):
407 def __init__(self, vfs):
407 def __init__(self, vfs):
408 self.vfs = vfs
408 self.vfs = vfs
409
409
410 def _getmustaudit(self):
410 def _getmustaudit(self):
411 return self.vfs.mustaudit
411 return self.vfs.mustaudit
412
412
413 def _setmustaudit(self, onoff):
413 def _setmustaudit(self, onoff):
414 self.vfs.mustaudit = onoff
414 self.vfs.mustaudit = onoff
415
415
416 mustaudit = property(_getmustaudit, _setmustaudit)
416 mustaudit = property(_getmustaudit, _setmustaudit)
417
417
418 class filtervfs(abstractvfs, auditvfs):
418 class filtervfs(abstractvfs, auditvfs):
419 '''Wrapper vfs for filtering filenames with a function.'''
419 '''Wrapper vfs for filtering filenames with a function.'''
420
420
421 def __init__(self, vfs, filter):
421 def __init__(self, vfs, filter):
422 auditvfs.__init__(self, vfs)
422 auditvfs.__init__(self, vfs)
423 self._filter = filter
423 self._filter = filter
424
424
425 def __call__(self, path, *args, **kwargs):
425 def __call__(self, path, *args, **kwargs):
426 return self.vfs(self._filter(path), *args, **kwargs)
426 return self.vfs(self._filter(path), *args, **kwargs)
427
427
428 def join(self, path):
428 def join(self, path):
429 if path:
429 if path:
430 return self.vfs.join(self._filter(path))
430 return self.vfs.join(self._filter(path))
431 else:
431 else:
432 return self.vfs.join(path)
432 return self.vfs.join(path)
433
433
434 filteropener = filtervfs
434 filteropener = filtervfs
435
435
436 class readonlyvfs(abstractvfs, auditvfs):
436 class readonlyvfs(abstractvfs, auditvfs):
437 '''Wrapper vfs preventing any writing.'''
437 '''Wrapper vfs preventing any writing.'''
438
438
439 def __init__(self, vfs):
439 def __init__(self, vfs):
440 auditvfs.__init__(self, vfs)
440 auditvfs.__init__(self, vfs)
441
441
442 def __call__(self, path, mode='r', *args, **kw):
442 def __call__(self, path, mode='r', *args, **kw):
443 if mode not in ('r', 'rb'):
443 if mode not in ('r', 'rb'):
444 raise util.Abort('this vfs is read only')
444 raise util.Abort('this vfs is read only')
445 return self.vfs(path, mode, *args, **kw)
445 return self.vfs(path, mode, *args, **kw)
446
446
447
447
448 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
448 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
449 '''yield every hg repository under path, always recursively.
449 '''yield every hg repository under path, always recursively.
450 The recurse flag will only control recursion into repo working dirs'''
450 The recurse flag will only control recursion into repo working dirs'''
451 def errhandler(err):
451 def errhandler(err):
452 if err.filename == path:
452 if err.filename == path:
453 raise err
453 raise err
454 samestat = getattr(os.path, 'samestat', None)
454 samestat = getattr(os.path, 'samestat', None)
455 if followsym and samestat is not None:
455 if followsym and samestat is not None:
456 def adddir(dirlst, dirname):
456 def adddir(dirlst, dirname):
457 match = False
457 match = False
458 dirstat = os.stat(dirname)
458 dirstat = os.stat(dirname)
459 for lstdirstat in dirlst:
459 for lstdirstat in dirlst:
460 if samestat(dirstat, lstdirstat):
460 if samestat(dirstat, lstdirstat):
461 match = True
461 match = True
462 break
462 break
463 if not match:
463 if not match:
464 dirlst.append(dirstat)
464 dirlst.append(dirstat)
465 return not match
465 return not match
466 else:
466 else:
467 followsym = False
467 followsym = False
468
468
469 if (seen_dirs is None) and followsym:
469 if (seen_dirs is None) and followsym:
470 seen_dirs = []
470 seen_dirs = []
471 adddir(seen_dirs, path)
471 adddir(seen_dirs, path)
472 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
472 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
473 dirs.sort()
473 dirs.sort()
474 if '.hg' in dirs:
474 if '.hg' in dirs:
475 yield root # found a repository
475 yield root # found a repository
476 qroot = os.path.join(root, '.hg', 'patches')
476 qroot = os.path.join(root, '.hg', 'patches')
477 if os.path.isdir(os.path.join(qroot, '.hg')):
477 if os.path.isdir(os.path.join(qroot, '.hg')):
478 yield qroot # we have a patch queue repo here
478 yield qroot # we have a patch queue repo here
479 if recurse:
479 if recurse:
480 # avoid recursing inside the .hg directory
480 # avoid recursing inside the .hg directory
481 dirs.remove('.hg')
481 dirs.remove('.hg')
482 else:
482 else:
483 dirs[:] = [] # don't descend further
483 dirs[:] = [] # don't descend further
484 elif followsym:
484 elif followsym:
485 newdirs = []
485 newdirs = []
486 for d in dirs:
486 for d in dirs:
487 fname = os.path.join(root, d)
487 fname = os.path.join(root, d)
488 if adddir(seen_dirs, fname):
488 if adddir(seen_dirs, fname):
489 if os.path.islink(fname):
489 if os.path.islink(fname):
490 for hgname in walkrepos(fname, True, seen_dirs):
490 for hgname in walkrepos(fname, True, seen_dirs):
491 yield hgname
491 yield hgname
492 else:
492 else:
493 newdirs.append(d)
493 newdirs.append(d)
494 dirs[:] = newdirs
494 dirs[:] = newdirs
495
495
496 def osrcpath():
496 def osrcpath():
497 '''return default os-specific hgrc search path'''
497 '''return default os-specific hgrc search path'''
498 path = []
498 path = []
499 defaultpath = os.path.join(util.datapath, 'default.d')
499 defaultpath = os.path.join(util.datapath, 'default.d')
500 if os.path.isdir(defaultpath):
500 if os.path.isdir(defaultpath):
501 for f, kind in osutil.listdir(defaultpath):
501 for f, kind in osutil.listdir(defaultpath):
502 if f.endswith('.rc'):
502 if f.endswith('.rc'):
503 path.append(os.path.join(defaultpath, f))
503 path.append(os.path.join(defaultpath, f))
504 path.extend(systemrcpath())
504 path.extend(systemrcpath())
505 path.extend(userrcpath())
505 path.extend(userrcpath())
506 path = [os.path.normpath(f) for f in path]
506 path = [os.path.normpath(f) for f in path]
507 return path
507 return path
508
508
509 _rcpath = None
509 _rcpath = None
510
510
511 def rcpath():
511 def rcpath():
512 '''return hgrc search path. if env var HGRCPATH is set, use it.
512 '''return hgrc search path. if env var HGRCPATH is set, use it.
513 for each item in path, if directory, use files ending in .rc,
513 for each item in path, if directory, use files ending in .rc,
514 else use item.
514 else use item.
515 make HGRCPATH empty to only look in .hg/hgrc of current repo.
515 make HGRCPATH empty to only look in .hg/hgrc of current repo.
516 if no HGRCPATH, use default os-specific path.'''
516 if no HGRCPATH, use default os-specific path.'''
517 global _rcpath
517 global _rcpath
518 if _rcpath is None:
518 if _rcpath is None:
519 if 'HGRCPATH' in os.environ:
519 if 'HGRCPATH' in os.environ:
520 _rcpath = []
520 _rcpath = []
521 for p in os.environ['HGRCPATH'].split(os.pathsep):
521 for p in os.environ['HGRCPATH'].split(os.pathsep):
522 if not p:
522 if not p:
523 continue
523 continue
524 p = util.expandpath(p)
524 p = util.expandpath(p)
525 if os.path.isdir(p):
525 if os.path.isdir(p):
526 for f, kind in osutil.listdir(p):
526 for f, kind in osutil.listdir(p):
527 if f.endswith('.rc'):
527 if f.endswith('.rc'):
528 _rcpath.append(os.path.join(p, f))
528 _rcpath.append(os.path.join(p, f))
529 else:
529 else:
530 _rcpath.append(p)
530 _rcpath.append(p)
531 else:
531 else:
532 _rcpath = osrcpath()
532 _rcpath = osrcpath()
533 return _rcpath
533 return _rcpath
534
534
535 def revsingle(repo, revspec, default='.'):
535 def revsingle(repo, revspec, default='.'):
536 if not revspec and revspec != 0:
536 if not revspec and revspec != 0:
537 return repo[default]
537 return repo[default]
538
538
539 l = revrange(repo, [revspec])
539 l = revrange(repo, [revspec])
540 if not l:
540 if not l:
541 raise util.Abort(_('empty revision set'))
541 raise util.Abort(_('empty revision set'))
542 return repo[l.last()]
542 return repo[l.last()]
543
543
544 def revpair(repo, revs):
544 def revpair(repo, revs):
545 if not revs:
545 if not revs:
546 return repo.dirstate.p1(), None
546 return repo.dirstate.p1(), None
547
547
548 l = revrange(repo, revs)
548 l = revrange(repo, revs)
549
549
550 if not l:
550 if not l:
551 first = second = None
551 first = second = None
552 elif l.isascending():
552 elif l.isascending():
553 first = l.min()
553 first = l.min()
554 second = l.max()
554 second = l.max()
555 elif l.isdescending():
555 elif l.isdescending():
556 first = l.max()
556 first = l.max()
557 second = l.min()
557 second = l.min()
558 else:
558 else:
559 first = l.first()
559 first = l.first()
560 second = l.last()
560 second = l.last()
561
561
562 if first is None:
562 if first is None:
563 raise util.Abort(_('empty revision range'))
563 raise util.Abort(_('empty revision range'))
564
564
565 if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
565 if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
566 return repo.lookup(first), None
566 return repo.lookup(first), None
567
567
568 return repo.lookup(first), repo.lookup(second)
568 return repo.lookup(first), repo.lookup(second)
569
569
570 _revrangesep = ':'
570 _revrangesep = ':'
571
571
572 def revrange(repo, revs):
572 def revrange(repo, revs):
573 """Yield revision as strings from a list of revision specifications."""
573 """Yield revision as strings from a list of revision specifications."""
574
574
575 def revfix(repo, val, defval):
575 def revfix(repo, val, defval):
576 if not val and val != 0 and defval is not None:
576 if not val and val != 0 and defval is not None:
577 return defval
577 return defval
578 return repo[val].rev()
578 return repo[val].rev()
579
579
580 seen, l = set(), revset.baseset([])
580 seen, l = set(), revset.baseset([])
581 for spec in revs:
581 for spec in revs:
582 if l and not seen:
582 if l and not seen:
583 seen = set(l)
583 seen = set(l)
584 # attempt to parse old-style ranges first to deal with
584 # attempt to parse old-style ranges first to deal with
585 # things like old-tag which contain query metacharacters
585 # things like old-tag which contain query metacharacters
586 try:
586 try:
587 if isinstance(spec, int):
587 if isinstance(spec, int):
588 seen.add(spec)
588 seen.add(spec)
589 l = l + revset.baseset([spec])
589 l = l + revset.baseset([spec])
590 continue
590 continue
591
591
592 if _revrangesep in spec:
592 if _revrangesep in spec:
593 start, end = spec.split(_revrangesep, 1)
593 start, end = spec.split(_revrangesep, 1)
594 start = revfix(repo, start, 0)
594 start = revfix(repo, start, 0)
595 end = revfix(repo, end, len(repo) - 1)
595 end = revfix(repo, end, len(repo) - 1)
596 if end == nullrev and start < 0:
596 if end == nullrev and start < 0:
597 start = nullrev
597 start = nullrev
598 rangeiter = repo.changelog.revs(start, end)
598 rangeiter = repo.changelog.revs(start, end)
599 if not seen and not l:
599 if not seen and not l:
600 # by far the most common case: revs = ["-1:0"]
600 # by far the most common case: revs = ["-1:0"]
601 l = revset.baseset(rangeiter)
601 l = revset.baseset(rangeiter)
602 # defer syncing seen until next iteration
602 # defer syncing seen until next iteration
603 continue
603 continue
604 newrevs = set(rangeiter)
604 newrevs = set(rangeiter)
605 if seen:
605 if seen:
606 newrevs.difference_update(seen)
606 newrevs.difference_update(seen)
607 seen.update(newrevs)
607 seen.update(newrevs)
608 else:
608 else:
609 seen = newrevs
609 seen = newrevs
610 l = l + revset.baseset(sorted(newrevs, reverse=start > end))
610 l = l + revset.baseset(sorted(newrevs, reverse=start > end))
611 continue
611 continue
612 elif spec and spec in repo: # single unquoted rev
612 elif spec and spec in repo: # single unquoted rev
613 rev = revfix(repo, spec, None)
613 rev = revfix(repo, spec, None)
614 if rev in seen:
614 if rev in seen:
615 continue
615 continue
616 seen.add(rev)
616 seen.add(rev)
617 l = l + revset.baseset([rev])
617 l = l + revset.baseset([rev])
618 continue
618 continue
619 except error.RepoLookupError:
619 except error.RepoLookupError:
620 pass
620 pass
621
621
622 # fall through to new-style queries if old-style fails
622 # fall through to new-style queries if old-style fails
623 m = revset.match(repo.ui, spec, repo)
623 m = revset.match(repo.ui, spec, repo)
624 if seen or l:
624 if seen or l:
625 dl = [r for r in m(repo, revset.spanset(repo)) if r not in seen]
625 dl = [r for r in m(repo, revset.spanset(repo)) if r not in seen]
626 l = l + revset.baseset(dl)
626 l = l + revset.baseset(dl)
627 seen.update(dl)
627 seen.update(dl)
628 else:
628 else:
629 l = m(repo, revset.spanset(repo))
629 l = m(repo, revset.spanset(repo))
630
630
631 return l
631 return l
632
632
633 def expandpats(pats):
633 def expandpats(pats):
634 '''Expand bare globs when running on windows.
634 '''Expand bare globs when running on windows.
635 On posix we assume it already has already been done by sh.'''
635 On posix we assume it already has already been done by sh.'''
636 if not util.expandglobs:
636 if not util.expandglobs:
637 return list(pats)
637 return list(pats)
638 ret = []
638 ret = []
639 for kindpat in pats:
639 for kindpat in pats:
640 kind, pat = matchmod._patsplit(kindpat, None)
640 kind, pat = matchmod._patsplit(kindpat, None)
641 if kind is None:
641 if kind is None:
642 try:
642 try:
643 globbed = glob.glob(pat)
643 globbed = glob.glob(pat)
644 except re.error:
644 except re.error:
645 globbed = [pat]
645 globbed = [pat]
646 if globbed:
646 if globbed:
647 ret.extend(globbed)
647 ret.extend(globbed)
648 continue
648 continue
649 ret.append(kindpat)
649 ret.append(kindpat)
650 return ret
650 return ret
651
651
652 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
652 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
653 '''Return a matcher and the patterns that were used.
653 '''Return a matcher and the patterns that were used.
654 The matcher will warn about bad matches.'''
654 The matcher will warn about bad matches.'''
655 if pats == ("",):
655 if pats == ("",):
656 pats = []
656 pats = []
657 if not globbed and default == 'relpath':
657 if not globbed and default == 'relpath':
658 pats = expandpats(pats or [])
658 pats = expandpats(pats or [])
659
659
660 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
660 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
661 default)
661 default)
662 def badfn(f, msg):
662 def badfn(f, msg):
663 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
663 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
664 m.bad = badfn
664 m.bad = badfn
665 return m, pats
665 return m, pats
666
666
667 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
667 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
668 '''Return a matcher that will warn about bad matches.'''
668 '''Return a matcher that will warn about bad matches.'''
669 return matchandpats(ctx, pats, opts, globbed, default)[0]
669 return matchandpats(ctx, pats, opts, globbed, default)[0]
670
670
671 def matchall(repo):
671 def matchall(repo):
672 '''Return a matcher that will efficiently match everything.'''
672 '''Return a matcher that will efficiently match everything.'''
673 return matchmod.always(repo.root, repo.getcwd())
673 return matchmod.always(repo.root, repo.getcwd())
674
674
675 def matchfiles(repo, files):
675 def matchfiles(repo, files):
676 '''Return a matcher that will efficiently match exactly these files.'''
676 '''Return a matcher that will efficiently match exactly these files.'''
677 return matchmod.exact(repo.root, repo.getcwd(), files)
677 return matchmod.exact(repo.root, repo.getcwd(), files)
678
678
679 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
679 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
680 if dry_run is None:
680 if dry_run is None:
681 dry_run = opts.get('dry_run')
681 dry_run = opts.get('dry_run')
682 if similarity is None:
682 if similarity is None:
683 similarity = float(opts.get('similarity') or 0)
683 similarity = float(opts.get('similarity') or 0)
684 # we'd use status here, except handling of symlinks and ignore is tricky
684 # we'd use status here, except handling of symlinks and ignore is tricky
685 m = match(repo[None], pats, opts)
685 m = match(repo[None], pats, opts)
686 rejected = []
686 rejected = []
687 m.bad = lambda x, y: rejected.append(x)
687 m.bad = lambda x, y: rejected.append(x)
688
688
689 added, unknown, deleted, removed = _interestingfiles(repo, m)
689 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
690
690
691 unknownset = set(unknown)
691 unknownset = set(unknown + forgotten)
692 toprint = unknownset.copy()
692 toprint = unknownset.copy()
693 toprint.update(deleted)
693 toprint.update(deleted)
694 for abs in sorted(toprint):
694 for abs in sorted(toprint):
695 if repo.ui.verbose or not m.exact(abs):
695 if repo.ui.verbose or not m.exact(abs):
696 rel = m.rel(abs)
696 rel = m.rel(abs)
697 if abs in unknownset:
697 if abs in unknownset:
698 status = _('adding %s\n') % ((pats and rel) or abs)
698 status = _('adding %s\n') % ((pats and rel) or abs)
699 else:
699 else:
700 status = _('removing %s\n') % ((pats and rel) or abs)
700 status = _('removing %s\n') % ((pats and rel) or abs)
701 repo.ui.status(status)
701 repo.ui.status(status)
702
702
703 renames = _findrenames(repo, m, added + unknown, removed + deleted,
703 renames = _findrenames(repo, m, added + unknown, removed + deleted,
704 similarity)
704 similarity)
705
705
706 if not dry_run:
706 if not dry_run:
707 _markchanges(repo, unknown, deleted, renames)
707 _markchanges(repo, unknown + forgotten, deleted, renames)
708
708
709 for f in rejected:
709 for f in rejected:
710 if f in m.files():
710 if f in m.files():
711 return 1
711 return 1
712 return 0
712 return 0
713
713
714 def marktouched(repo, files, similarity=0.0):
714 def marktouched(repo, files, similarity=0.0):
715 '''Assert that files have somehow been operated upon. files are relative to
715 '''Assert that files have somehow been operated upon. files are relative to
716 the repo root.'''
716 the repo root.'''
717 m = matchfiles(repo, files)
717 m = matchfiles(repo, files)
718 rejected = []
718 rejected = []
719 m.bad = lambda x, y: rejected.append(x)
719 m.bad = lambda x, y: rejected.append(x)
720
720
721 added, unknown, deleted, removed = _interestingfiles(repo, m)
721 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
722
722
723 if repo.ui.verbose:
723 if repo.ui.verbose:
724 unknownset = set(unknown)
724 unknownset = set(unknown + forgotten)
725 toprint = unknownset.copy()
725 toprint = unknownset.copy()
726 toprint.update(deleted)
726 toprint.update(deleted)
727 for abs in sorted(toprint):
727 for abs in sorted(toprint):
728 if abs in unknownset:
728 if abs in unknownset:
729 status = _('adding %s\n') % abs
729 status = _('adding %s\n') % abs
730 else:
730 else:
731 status = _('removing %s\n') % abs
731 status = _('removing %s\n') % abs
732 repo.ui.status(status)
732 repo.ui.status(status)
733
733
734 renames = _findrenames(repo, m, added + unknown, removed + deleted,
734 renames = _findrenames(repo, m, added + unknown, removed + deleted,
735 similarity)
735 similarity)
736
736
737 _markchanges(repo, unknown, deleted, renames)
737 _markchanges(repo, unknown + forgotten, deleted, renames)
738
738
739 for f in rejected:
739 for f in rejected:
740 if f in m.files():
740 if f in m.files():
741 return 1
741 return 1
742 return 0
742 return 0
743
743
744 def _interestingfiles(repo, matcher):
744 def _interestingfiles(repo, matcher):
745 '''Walk dirstate with matcher, looking for files that addremove would care
745 '''Walk dirstate with matcher, looking for files that addremove would care
746 about.
746 about.
747
747
748 This is different from dirstate.status because it doesn't care about
748 This is different from dirstate.status because it doesn't care about
749 whether files are modified or clean.'''
749 whether files are modified or clean.'''
750 added, unknown, deleted, removed = [], [], [], []
750 added, unknown, deleted, removed, forgotten = [], [], [], [], []
751 audit_path = pathutil.pathauditor(repo.root)
751 audit_path = pathutil.pathauditor(repo.root)
752
752
753 ctx = repo[None]
753 ctx = repo[None]
754 dirstate = repo.dirstate
754 dirstate = repo.dirstate
755 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
755 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
756 full=False)
756 full=False)
757 for abs, st in walkresults.iteritems():
757 for abs, st in walkresults.iteritems():
758 dstate = dirstate[abs]
758 dstate = dirstate[abs]
759 if dstate == '?' and audit_path.check(abs):
759 if dstate == '?' and audit_path.check(abs):
760 unknown.append(abs)
760 unknown.append(abs)
761 elif dstate != 'r' and not st:
761 elif dstate != 'r' and not st:
762 deleted.append(abs)
762 deleted.append(abs)
763 elif dstate == 'r' and st:
764 forgotten.append(abs)
763 # for finding renames
765 # for finding renames
764 elif dstate == 'r':
766 elif dstate == 'r' and not st:
765 removed.append(abs)
767 removed.append(abs)
766 elif dstate == 'a':
768 elif dstate == 'a':
767 added.append(abs)
769 added.append(abs)
768
770
769 return added, unknown, deleted, removed
771 return added, unknown, deleted, removed, forgotten
770
772
771 def _findrenames(repo, matcher, added, removed, similarity):
773 def _findrenames(repo, matcher, added, removed, similarity):
772 '''Find renames from removed files to added ones.'''
774 '''Find renames from removed files to added ones.'''
773 renames = {}
775 renames = {}
774 if similarity > 0:
776 if similarity > 0:
775 for old, new, score in similar.findrenames(repo, added, removed,
777 for old, new, score in similar.findrenames(repo, added, removed,
776 similarity):
778 similarity):
777 if (repo.ui.verbose or not matcher.exact(old)
779 if (repo.ui.verbose or not matcher.exact(old)
778 or not matcher.exact(new)):
780 or not matcher.exact(new)):
779 repo.ui.status(_('recording removal of %s as rename to %s '
781 repo.ui.status(_('recording removal of %s as rename to %s '
780 '(%d%% similar)\n') %
782 '(%d%% similar)\n') %
781 (matcher.rel(old), matcher.rel(new),
783 (matcher.rel(old), matcher.rel(new),
782 score * 100))
784 score * 100))
783 renames[new] = old
785 renames[new] = old
784 return renames
786 return renames
785
787
786 def _markchanges(repo, unknown, deleted, renames):
788 def _markchanges(repo, unknown, deleted, renames):
787 '''Marks the files in unknown as added, the files in deleted as removed,
789 '''Marks the files in unknown as added, the files in deleted as removed,
788 and the files in renames as copied.'''
790 and the files in renames as copied.'''
789 wctx = repo[None]
791 wctx = repo[None]
790 wlock = repo.wlock()
792 wlock = repo.wlock()
791 try:
793 try:
792 wctx.forget(deleted)
794 wctx.forget(deleted)
793 wctx.add(unknown)
795 wctx.add(unknown)
794 for new, old in renames.iteritems():
796 for new, old in renames.iteritems():
795 wctx.copy(old, new)
797 wctx.copy(old, new)
796 finally:
798 finally:
797 wlock.release()
799 wlock.release()
798
800
799 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
801 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
800 """Update the dirstate to reflect the intent of copying src to dst. For
802 """Update the dirstate to reflect the intent of copying src to dst. For
801 different reasons it might not end with dst being marked as copied from src.
803 different reasons it might not end with dst being marked as copied from src.
802 """
804 """
803 origsrc = repo.dirstate.copied(src) or src
805 origsrc = repo.dirstate.copied(src) or src
804 if dst == origsrc: # copying back a copy?
806 if dst == origsrc: # copying back a copy?
805 if repo.dirstate[dst] not in 'mn' and not dryrun:
807 if repo.dirstate[dst] not in 'mn' and not dryrun:
806 repo.dirstate.normallookup(dst)
808 repo.dirstate.normallookup(dst)
807 else:
809 else:
808 if repo.dirstate[origsrc] == 'a' and origsrc == src:
810 if repo.dirstate[origsrc] == 'a' and origsrc == src:
809 if not ui.quiet:
811 if not ui.quiet:
810 ui.warn(_("%s has not been committed yet, so no copy "
812 ui.warn(_("%s has not been committed yet, so no copy "
811 "data will be stored for %s.\n")
813 "data will be stored for %s.\n")
812 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
814 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
813 if repo.dirstate[dst] in '?r' and not dryrun:
815 if repo.dirstate[dst] in '?r' and not dryrun:
814 wctx.add([dst])
816 wctx.add([dst])
815 elif not dryrun:
817 elif not dryrun:
816 wctx.copy(origsrc, dst)
818 wctx.copy(origsrc, dst)
817
819
818 def readrequires(opener, supported):
820 def readrequires(opener, supported):
819 '''Reads and parses .hg/requires and checks if all entries found
821 '''Reads and parses .hg/requires and checks if all entries found
820 are in the list of supported features.'''
822 are in the list of supported features.'''
821 requirements = set(opener.read("requires").splitlines())
823 requirements = set(opener.read("requires").splitlines())
822 missings = []
824 missings = []
823 for r in requirements:
825 for r in requirements:
824 if r not in supported:
826 if r not in supported:
825 if not r or not r[0].isalnum():
827 if not r or not r[0].isalnum():
826 raise error.RequirementError(_(".hg/requires file is corrupt"))
828 raise error.RequirementError(_(".hg/requires file is corrupt"))
827 missings.append(r)
829 missings.append(r)
828 missings.sort()
830 missings.sort()
829 if missings:
831 if missings:
830 raise error.RequirementError(
832 raise error.RequirementError(
831 _("repository requires features unknown to this Mercurial: %s")
833 _("repository requires features unknown to this Mercurial: %s")
832 % " ".join(missings),
834 % " ".join(missings),
833 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
835 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
834 " for more information"))
836 " for more information"))
835 return requirements
837 return requirements
836
838
837 class filecachesubentry(object):
839 class filecachesubentry(object):
838 def __init__(self, path, stat):
840 def __init__(self, path, stat):
839 self.path = path
841 self.path = path
840 self.cachestat = None
842 self.cachestat = None
841 self._cacheable = None
843 self._cacheable = None
842
844
843 if stat:
845 if stat:
844 self.cachestat = filecachesubentry.stat(self.path)
846 self.cachestat = filecachesubentry.stat(self.path)
845
847
846 if self.cachestat:
848 if self.cachestat:
847 self._cacheable = self.cachestat.cacheable()
849 self._cacheable = self.cachestat.cacheable()
848 else:
850 else:
849 # None means we don't know yet
851 # None means we don't know yet
850 self._cacheable = None
852 self._cacheable = None
851
853
852 def refresh(self):
854 def refresh(self):
853 if self.cacheable():
855 if self.cacheable():
854 self.cachestat = filecachesubentry.stat(self.path)
856 self.cachestat = filecachesubentry.stat(self.path)
855
857
856 def cacheable(self):
858 def cacheable(self):
857 if self._cacheable is not None:
859 if self._cacheable is not None:
858 return self._cacheable
860 return self._cacheable
859
861
860 # we don't know yet, assume it is for now
862 # we don't know yet, assume it is for now
861 return True
863 return True
862
864
863 def changed(self):
865 def changed(self):
864 # no point in going further if we can't cache it
866 # no point in going further if we can't cache it
865 if not self.cacheable():
867 if not self.cacheable():
866 return True
868 return True
867
869
868 newstat = filecachesubentry.stat(self.path)
870 newstat = filecachesubentry.stat(self.path)
869
871
870 # we may not know if it's cacheable yet, check again now
872 # we may not know if it's cacheable yet, check again now
871 if newstat and self._cacheable is None:
873 if newstat and self._cacheable is None:
872 self._cacheable = newstat.cacheable()
874 self._cacheable = newstat.cacheable()
873
875
874 # check again
876 # check again
875 if not self._cacheable:
877 if not self._cacheable:
876 return True
878 return True
877
879
878 if self.cachestat != newstat:
880 if self.cachestat != newstat:
879 self.cachestat = newstat
881 self.cachestat = newstat
880 return True
882 return True
881 else:
883 else:
882 return False
884 return False
883
885
884 @staticmethod
886 @staticmethod
885 def stat(path):
887 def stat(path):
886 try:
888 try:
887 return util.cachestat(path)
889 return util.cachestat(path)
888 except OSError, e:
890 except OSError, e:
889 if e.errno != errno.ENOENT:
891 if e.errno != errno.ENOENT:
890 raise
892 raise
891
893
892 class filecacheentry(object):
894 class filecacheentry(object):
893 def __init__(self, paths, stat=True):
895 def __init__(self, paths, stat=True):
894 self._entries = []
896 self._entries = []
895 for path in paths:
897 for path in paths:
896 self._entries.append(filecachesubentry(path, stat))
898 self._entries.append(filecachesubentry(path, stat))
897
899
898 def changed(self):
900 def changed(self):
899 '''true if any entry has changed'''
901 '''true if any entry has changed'''
900 for entry in self._entries:
902 for entry in self._entries:
901 if entry.changed():
903 if entry.changed():
902 return True
904 return True
903 return False
905 return False
904
906
905 def refresh(self):
907 def refresh(self):
906 for entry in self._entries:
908 for entry in self._entries:
907 entry.refresh()
909 entry.refresh()
908
910
909 class filecache(object):
911 class filecache(object):
910 '''A property like decorator that tracks files under .hg/ for updates.
912 '''A property like decorator that tracks files under .hg/ for updates.
911
913
912 Records stat info when called in _filecache.
914 Records stat info when called in _filecache.
913
915
914 On subsequent calls, compares old stat info with new info, and recreates the
916 On subsequent calls, compares old stat info with new info, and recreates the
915 object when any of the files changes, updating the new stat info in
917 object when any of the files changes, updating the new stat info in
916 _filecache.
918 _filecache.
917
919
918 Mercurial either atomic renames or appends for files under .hg,
920 Mercurial either atomic renames or appends for files under .hg,
919 so to ensure the cache is reliable we need the filesystem to be able
921 so to ensure the cache is reliable we need the filesystem to be able
920 to tell us if a file has been replaced. If it can't, we fallback to
922 to tell us if a file has been replaced. If it can't, we fallback to
921 recreating the object on every call (essentially the same behaviour as
923 recreating the object on every call (essentially the same behaviour as
922 propertycache).
924 propertycache).
923
925
924 '''
926 '''
925 def __init__(self, *paths):
927 def __init__(self, *paths):
926 self.paths = paths
928 self.paths = paths
927
929
928 def join(self, obj, fname):
930 def join(self, obj, fname):
929 """Used to compute the runtime path of a cached file.
931 """Used to compute the runtime path of a cached file.
930
932
931 Users should subclass filecache and provide their own version of this
933 Users should subclass filecache and provide their own version of this
932 function to call the appropriate join function on 'obj' (an instance
934 function to call the appropriate join function on 'obj' (an instance
933 of the class that its member function was decorated).
935 of the class that its member function was decorated).
934 """
936 """
935 return obj.join(fname)
937 return obj.join(fname)
936
938
937 def __call__(self, func):
939 def __call__(self, func):
938 self.func = func
940 self.func = func
939 self.name = func.__name__
941 self.name = func.__name__
940 return self
942 return self
941
943
942 def __get__(self, obj, type=None):
944 def __get__(self, obj, type=None):
943 # do we need to check if the file changed?
945 # do we need to check if the file changed?
944 if self.name in obj.__dict__:
946 if self.name in obj.__dict__:
945 assert self.name in obj._filecache, self.name
947 assert self.name in obj._filecache, self.name
946 return obj.__dict__[self.name]
948 return obj.__dict__[self.name]
947
949
948 entry = obj._filecache.get(self.name)
950 entry = obj._filecache.get(self.name)
949
951
950 if entry:
952 if entry:
951 if entry.changed():
953 if entry.changed():
952 entry.obj = self.func(obj)
954 entry.obj = self.func(obj)
953 else:
955 else:
954 paths = [self.join(obj, path) for path in self.paths]
956 paths = [self.join(obj, path) for path in self.paths]
955
957
956 # We stat -before- creating the object so our cache doesn't lie if
958 # We stat -before- creating the object so our cache doesn't lie if
957 # a writer modified between the time we read and stat
959 # a writer modified between the time we read and stat
958 entry = filecacheentry(paths, True)
960 entry = filecacheentry(paths, True)
959 entry.obj = self.func(obj)
961 entry.obj = self.func(obj)
960
962
961 obj._filecache[self.name] = entry
963 obj._filecache[self.name] = entry
962
964
963 obj.__dict__[self.name] = entry.obj
965 obj.__dict__[self.name] = entry.obj
964 return entry.obj
966 return entry.obj
965
967
966 def __set__(self, obj, value):
968 def __set__(self, obj, value):
967 if self.name not in obj._filecache:
969 if self.name not in obj._filecache:
968 # we add an entry for the missing value because X in __dict__
970 # we add an entry for the missing value because X in __dict__
969 # implies X in _filecache
971 # implies X in _filecache
970 paths = [self.join(obj, path) for path in self.paths]
972 paths = [self.join(obj, path) for path in self.paths]
971 ce = filecacheentry(paths, False)
973 ce = filecacheentry(paths, False)
972 obj._filecache[self.name] = ce
974 obj._filecache[self.name] = ce
973 else:
975 else:
974 ce = obj._filecache[self.name]
976 ce = obj._filecache[self.name]
975
977
976 ce.obj = value # update cached copy
978 ce.obj = value # update cached copy
977 obj.__dict__[self.name] = value # update copy returned by obj.x
979 obj.__dict__[self.name] = value # update copy returned by obj.x
978
980
979 def __delete__(self, obj):
981 def __delete__(self, obj):
980 try:
982 try:
981 del obj.__dict__[self.name]
983 del obj.__dict__[self.name]
982 except KeyError:
984 except KeyError:
983 raise AttributeError(self.name)
985 raise AttributeError(self.name)
984
986
985 class dirs(object):
987 class dirs(object):
986 '''a multiset of directory names from a dirstate or manifest'''
988 '''a multiset of directory names from a dirstate or manifest'''
987
989
988 def __init__(self, map, skip=None):
990 def __init__(self, map, skip=None):
989 self._dirs = {}
991 self._dirs = {}
990 addpath = self.addpath
992 addpath = self.addpath
991 if util.safehasattr(map, 'iteritems') and skip is not None:
993 if util.safehasattr(map, 'iteritems') and skip is not None:
992 for f, s in map.iteritems():
994 for f, s in map.iteritems():
993 if s[0] != skip:
995 if s[0] != skip:
994 addpath(f)
996 addpath(f)
995 else:
997 else:
996 for f in map:
998 for f in map:
997 addpath(f)
999 addpath(f)
998
1000
999 def addpath(self, path):
1001 def addpath(self, path):
1000 dirs = self._dirs
1002 dirs = self._dirs
1001 for base in finddirs(path):
1003 for base in finddirs(path):
1002 if base in dirs:
1004 if base in dirs:
1003 dirs[base] += 1
1005 dirs[base] += 1
1004 return
1006 return
1005 dirs[base] = 1
1007 dirs[base] = 1
1006
1008
1007 def delpath(self, path):
1009 def delpath(self, path):
1008 dirs = self._dirs
1010 dirs = self._dirs
1009 for base in finddirs(path):
1011 for base in finddirs(path):
1010 if dirs[base] > 1:
1012 if dirs[base] > 1:
1011 dirs[base] -= 1
1013 dirs[base] -= 1
1012 return
1014 return
1013 del dirs[base]
1015 del dirs[base]
1014
1016
1015 def __iter__(self):
1017 def __iter__(self):
1016 return self._dirs.iterkeys()
1018 return self._dirs.iterkeys()
1017
1019
1018 def __contains__(self, d):
1020 def __contains__(self, d):
1019 return d in self._dirs
1021 return d in self._dirs
1020
1022
1021 if util.safehasattr(parsers, 'dirs'):
1023 if util.safehasattr(parsers, 'dirs'):
1022 dirs = parsers.dirs
1024 dirs = parsers.dirs
1023
1025
1024 def finddirs(path):
1026 def finddirs(path):
1025 pos = path.rfind('/')
1027 pos = path.rfind('/')
1026 while pos != -1:
1028 while pos != -1:
1027 yield path[:pos]
1029 yield path[:pos]
1028 pos = path.rfind('/', 0, pos)
1030 pos = path.rfind('/', 0, pos)
@@ -1,48 +1,57 b''
1 $ hg init rep
1 $ hg init rep
2 $ cd rep
2 $ cd rep
3 $ mkdir dir
3 $ mkdir dir
4 $ touch foo dir/bar
4 $ touch foo dir/bar
5 $ hg -v addremove
5 $ hg -v addremove
6 adding dir/bar
6 adding dir/bar
7 adding foo
7 adding foo
8 $ hg -v commit -m "add 1"
8 $ hg -v commit -m "add 1"
9 dir/bar
9 dir/bar
10 foo
10 foo
11 committed changeset 0:6f7f953567a2
11 committed changeset 0:6f7f953567a2
12 $ cd dir/
12 $ cd dir/
13 $ touch ../foo_2 bar_2
13 $ touch ../foo_2 bar_2
14 $ hg -v addremove
14 $ hg -v addremove
15 adding dir/bar_2
15 adding dir/bar_2
16 adding foo_2
16 adding foo_2
17 $ hg -v commit -m "add 2"
17 $ hg -v commit -m "add 2"
18 dir/bar_2
18 dir/bar_2
19 foo_2
19 foo_2
20 committed changeset 1:e65414bf35c5
20 committed changeset 1:e65414bf35c5
21 $ cd ../..
21 $ cd ..
22 $ hg forget foo
23 $ hg -v addremove
24 adding foo
25 $ cd ..
22
26
23 $ hg init sim
27 $ hg init sim
24 $ cd sim
28 $ cd sim
25 $ echo a > a
29 $ echo a > a
26 $ echo a >> a
30 $ echo a >> a
27 $ echo a >> a
31 $ echo a >> a
28 $ echo c > c
32 $ echo c > c
29 $ hg commit -Ama
33 $ hg commit -Ama
30 adding a
34 adding a
31 adding c
35 adding c
32 $ mv a b
36 $ mv a b
33 $ rm c
37 $ rm c
34 $ echo d > d
38 $ echo d > d
35 $ hg addremove -n -s 50 # issue 1696
39 $ hg addremove -n -s 50 # issue 1696
36 removing a
40 removing a
37 adding b
41 adding b
38 removing c
42 removing c
39 adding d
43 adding d
40 recording removal of a as rename to b (100% similar)
44 recording removal of a as rename to b (100% similar)
41 $ hg addremove -s 50
45 $ hg addremove -s 50
42 removing a
46 removing a
43 adding b
47 adding b
44 removing c
48 removing c
45 adding d
49 adding d
46 recording removal of a as rename to b (100% similar)
50 recording removal of a as rename to b (100% similar)
47 $ hg commit -mb
51 $ hg commit -mb
52 $ cp b c
53 $ hg forget b
54 $ hg addremove -s 50
55 adding b
56 adding c
48 $ cd ..
57 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now