##// END OF EJS Templates
addremove: print relative paths when called with -I/-X (BC)...
Martin von Zweigbergk -
r23427:37788841 default
parent child Browse files
Show More
@@ -1,1067 +1,1067 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from mercurial.node import nullrev
9 from mercurial.node import nullrev
10 import util, error, osutil, revset, similar, encoding, phases, parsers
10 import util, error, osutil, revset, similar, encoding, phases, parsers
11 import pathutil
11 import pathutil
12 import match as matchmod
12 import match as matchmod
13 import os, errno, re, glob, tempfile
13 import os, errno, re, glob, tempfile
14
14
15 if os.name == 'nt':
15 if os.name == 'nt':
16 import scmwindows as scmplatform
16 import scmwindows as scmplatform
17 else:
17 else:
18 import scmposix as scmplatform
18 import scmposix as scmplatform
19
19
20 systemrcpath = scmplatform.systemrcpath
20 systemrcpath = scmplatform.systemrcpath
21 userrcpath = scmplatform.userrcpath
21 userrcpath = scmplatform.userrcpath
22
22
23 class status(tuple):
23 class status(tuple):
24 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
24 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
25 and 'ignored' properties are only relevant to the working copy.
25 and 'ignored' properties are only relevant to the working copy.
26 '''
26 '''
27
27
28 __slots__ = ()
28 __slots__ = ()
29
29
30 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
30 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
31 clean):
31 clean):
32 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
32 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
33 ignored, clean))
33 ignored, clean))
34
34
35 @property
35 @property
36 def modified(self):
36 def modified(self):
37 '''files that have been modified'''
37 '''files that have been modified'''
38 return self[0]
38 return self[0]
39
39
40 @property
40 @property
41 def added(self):
41 def added(self):
42 '''files that have been added'''
42 '''files that have been added'''
43 return self[1]
43 return self[1]
44
44
45 @property
45 @property
46 def removed(self):
46 def removed(self):
47 '''files that have been removed'''
47 '''files that have been removed'''
48 return self[2]
48 return self[2]
49
49
50 @property
50 @property
51 def deleted(self):
51 def deleted(self):
52 '''files that are in the dirstate, but have been deleted from the
52 '''files that are in the dirstate, but have been deleted from the
53 working copy (aka "missing")
53 working copy (aka "missing")
54 '''
54 '''
55 return self[3]
55 return self[3]
56
56
57 @property
57 @property
58 def unknown(self):
58 def unknown(self):
59 '''files not in the dirstate that are not ignored'''
59 '''files not in the dirstate that are not ignored'''
60 return self[4]
60 return self[4]
61
61
62 @property
62 @property
63 def ignored(self):
63 def ignored(self):
64 '''files not in the dirstate that are ignored (by _dirignore())'''
64 '''files not in the dirstate that are ignored (by _dirignore())'''
65 return self[5]
65 return self[5]
66
66
67 @property
67 @property
68 def clean(self):
68 def clean(self):
69 '''files that have not been modified'''
69 '''files that have not been modified'''
70 return self[6]
70 return self[6]
71
71
72 def __repr__(self, *args, **kwargs):
72 def __repr__(self, *args, **kwargs):
73 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
73 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
74 'unknown=%r, ignored=%r, clean=%r>') % self)
74 'unknown=%r, ignored=%r, clean=%r>') % self)
75
75
76 def itersubrepos(ctx1, ctx2):
76 def itersubrepos(ctx1, ctx2):
77 """find subrepos in ctx1 or ctx2"""
77 """find subrepos in ctx1 or ctx2"""
78 # Create a (subpath, ctx) mapping where we prefer subpaths from
78 # Create a (subpath, ctx) mapping where we prefer subpaths from
79 # ctx1. The subpaths from ctx2 are important when the .hgsub file
79 # ctx1. The subpaths from ctx2 are important when the .hgsub file
80 # has been modified (in ctx2) but not yet committed (in ctx1).
80 # has been modified (in ctx2) but not yet committed (in ctx1).
81 subpaths = dict.fromkeys(ctx2.substate, ctx2)
81 subpaths = dict.fromkeys(ctx2.substate, ctx2)
82 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
82 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
83 for subpath, ctx in sorted(subpaths.iteritems()):
83 for subpath, ctx in sorted(subpaths.iteritems()):
84 yield subpath, ctx.sub(subpath)
84 yield subpath, ctx.sub(subpath)
85
85
86 def nochangesfound(ui, repo, excluded=None):
86 def nochangesfound(ui, repo, excluded=None):
87 '''Report no changes for push/pull, excluded is None or a list of
87 '''Report no changes for push/pull, excluded is None or a list of
88 nodes excluded from the push/pull.
88 nodes excluded from the push/pull.
89 '''
89 '''
90 secretlist = []
90 secretlist = []
91 if excluded:
91 if excluded:
92 for n in excluded:
92 for n in excluded:
93 if n not in repo:
93 if n not in repo:
94 # discovery should not have included the filtered revision,
94 # discovery should not have included the filtered revision,
95 # we have to explicitly exclude it until discovery is cleanup.
95 # we have to explicitly exclude it until discovery is cleanup.
96 continue
96 continue
97 ctx = repo[n]
97 ctx = repo[n]
98 if ctx.phase() >= phases.secret and not ctx.extinct():
98 if ctx.phase() >= phases.secret and not ctx.extinct():
99 secretlist.append(n)
99 secretlist.append(n)
100
100
101 if secretlist:
101 if secretlist:
102 ui.status(_("no changes found (ignored %d secret changesets)\n")
102 ui.status(_("no changes found (ignored %d secret changesets)\n")
103 % len(secretlist))
103 % len(secretlist))
104 else:
104 else:
105 ui.status(_("no changes found\n"))
105 ui.status(_("no changes found\n"))
106
106
107 def checknewlabel(repo, lbl, kind):
107 def checknewlabel(repo, lbl, kind):
108 # Do not use the "kind" parameter in ui output.
108 # Do not use the "kind" parameter in ui output.
109 # It makes strings difficult to translate.
109 # It makes strings difficult to translate.
110 if lbl in ['tip', '.', 'null']:
110 if lbl in ['tip', '.', 'null']:
111 raise util.Abort(_("the name '%s' is reserved") % lbl)
111 raise util.Abort(_("the name '%s' is reserved") % lbl)
112 for c in (':', '\0', '\n', '\r'):
112 for c in (':', '\0', '\n', '\r'):
113 if c in lbl:
113 if c in lbl:
114 raise util.Abort(_("%r cannot be used in a name") % c)
114 raise util.Abort(_("%r cannot be used in a name") % c)
115 try:
115 try:
116 int(lbl)
116 int(lbl)
117 raise util.Abort(_("cannot use an integer as a name"))
117 raise util.Abort(_("cannot use an integer as a name"))
118 except ValueError:
118 except ValueError:
119 pass
119 pass
120
120
121 def checkfilename(f):
121 def checkfilename(f):
122 '''Check that the filename f is an acceptable filename for a tracked file'''
122 '''Check that the filename f is an acceptable filename for a tracked file'''
123 if '\r' in f or '\n' in f:
123 if '\r' in f or '\n' in f:
124 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
124 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
125
125
126 def checkportable(ui, f):
126 def checkportable(ui, f):
127 '''Check if filename f is portable and warn or abort depending on config'''
127 '''Check if filename f is portable and warn or abort depending on config'''
128 checkfilename(f)
128 checkfilename(f)
129 abort, warn = checkportabilityalert(ui)
129 abort, warn = checkportabilityalert(ui)
130 if abort or warn:
130 if abort or warn:
131 msg = util.checkwinfilename(f)
131 msg = util.checkwinfilename(f)
132 if msg:
132 if msg:
133 msg = "%s: %r" % (msg, f)
133 msg = "%s: %r" % (msg, f)
134 if abort:
134 if abort:
135 raise util.Abort(msg)
135 raise util.Abort(msg)
136 ui.warn(_("warning: %s\n") % msg)
136 ui.warn(_("warning: %s\n") % msg)
137
137
138 def checkportabilityalert(ui):
138 def checkportabilityalert(ui):
139 '''check if the user's config requests nothing, a warning, or abort for
139 '''check if the user's config requests nothing, a warning, or abort for
140 non-portable filenames'''
140 non-portable filenames'''
141 val = ui.config('ui', 'portablefilenames', 'warn')
141 val = ui.config('ui', 'portablefilenames', 'warn')
142 lval = val.lower()
142 lval = val.lower()
143 bval = util.parsebool(val)
143 bval = util.parsebool(val)
144 abort = os.name == 'nt' or lval == 'abort'
144 abort = os.name == 'nt' or lval == 'abort'
145 warn = bval or lval == 'warn'
145 warn = bval or lval == 'warn'
146 if bval is None and not (warn or abort or lval == 'ignore'):
146 if bval is None and not (warn or abort or lval == 'ignore'):
147 raise error.ConfigError(
147 raise error.ConfigError(
148 _("ui.portablefilenames value is invalid ('%s')") % val)
148 _("ui.portablefilenames value is invalid ('%s')") % val)
149 return abort, warn
149 return abort, warn
150
150
151 class casecollisionauditor(object):
151 class casecollisionauditor(object):
152 def __init__(self, ui, abort, dirstate):
152 def __init__(self, ui, abort, dirstate):
153 self._ui = ui
153 self._ui = ui
154 self._abort = abort
154 self._abort = abort
155 allfiles = '\0'.join(dirstate._map)
155 allfiles = '\0'.join(dirstate._map)
156 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
156 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
157 self._dirstate = dirstate
157 self._dirstate = dirstate
158 # The purpose of _newfiles is so that we don't complain about
158 # The purpose of _newfiles is so that we don't complain about
159 # case collisions if someone were to call this object with the
159 # case collisions if someone were to call this object with the
160 # same filename twice.
160 # same filename twice.
161 self._newfiles = set()
161 self._newfiles = set()
162
162
163 def __call__(self, f):
163 def __call__(self, f):
164 if f in self._newfiles:
164 if f in self._newfiles:
165 return
165 return
166 fl = encoding.lower(f)
166 fl = encoding.lower(f)
167 if fl in self._loweredfiles and f not in self._dirstate:
167 if fl in self._loweredfiles and f not in self._dirstate:
168 msg = _('possible case-folding collision for %s') % f
168 msg = _('possible case-folding collision for %s') % f
169 if self._abort:
169 if self._abort:
170 raise util.Abort(msg)
170 raise util.Abort(msg)
171 self._ui.warn(_("warning: %s\n") % msg)
171 self._ui.warn(_("warning: %s\n") % msg)
172 self._loweredfiles.add(fl)
172 self._loweredfiles.add(fl)
173 self._newfiles.add(f)
173 self._newfiles.add(f)
174
174
175 class abstractvfs(object):
175 class abstractvfs(object):
176 """Abstract base class; cannot be instantiated"""
176 """Abstract base class; cannot be instantiated"""
177
177
178 def __init__(self, *args, **kwargs):
178 def __init__(self, *args, **kwargs):
179 '''Prevent instantiation; don't call this from subclasses.'''
179 '''Prevent instantiation; don't call this from subclasses.'''
180 raise NotImplementedError('attempted instantiating ' + str(type(self)))
180 raise NotImplementedError('attempted instantiating ' + str(type(self)))
181
181
182 def tryread(self, path):
182 def tryread(self, path):
183 '''gracefully return an empty string for missing files'''
183 '''gracefully return an empty string for missing files'''
184 try:
184 try:
185 return self.read(path)
185 return self.read(path)
186 except IOError, inst:
186 except IOError, inst:
187 if inst.errno != errno.ENOENT:
187 if inst.errno != errno.ENOENT:
188 raise
188 raise
189 return ""
189 return ""
190
190
191 def tryreadlines(self, path, mode='rb'):
191 def tryreadlines(self, path, mode='rb'):
192 '''gracefully return an empty array for missing files'''
192 '''gracefully return an empty array for missing files'''
193 try:
193 try:
194 return self.readlines(path, mode=mode)
194 return self.readlines(path, mode=mode)
195 except IOError, inst:
195 except IOError, inst:
196 if inst.errno != errno.ENOENT:
196 if inst.errno != errno.ENOENT:
197 raise
197 raise
198 return []
198 return []
199
199
200 def open(self, path, mode="r", text=False, atomictemp=False,
200 def open(self, path, mode="r", text=False, atomictemp=False,
201 notindexed=False):
201 notindexed=False):
202 '''Open ``path`` file, which is relative to vfs root.
202 '''Open ``path`` file, which is relative to vfs root.
203
203
204 Newly created directories are marked as "not to be indexed by
204 Newly created directories are marked as "not to be indexed by
205 the content indexing service", if ``notindexed`` is specified
205 the content indexing service", if ``notindexed`` is specified
206 for "write" mode access.
206 for "write" mode access.
207 '''
207 '''
208 self.open = self.__call__
208 self.open = self.__call__
209 return self.__call__(path, mode, text, atomictemp, notindexed)
209 return self.__call__(path, mode, text, atomictemp, notindexed)
210
210
211 def read(self, path):
211 def read(self, path):
212 fp = self(path, 'rb')
212 fp = self(path, 'rb')
213 try:
213 try:
214 return fp.read()
214 return fp.read()
215 finally:
215 finally:
216 fp.close()
216 fp.close()
217
217
218 def readlines(self, path, mode='rb'):
218 def readlines(self, path, mode='rb'):
219 fp = self(path, mode=mode)
219 fp = self(path, mode=mode)
220 try:
220 try:
221 return fp.readlines()
221 return fp.readlines()
222 finally:
222 finally:
223 fp.close()
223 fp.close()
224
224
225 def write(self, path, data):
225 def write(self, path, data):
226 fp = self(path, 'wb')
226 fp = self(path, 'wb')
227 try:
227 try:
228 return fp.write(data)
228 return fp.write(data)
229 finally:
229 finally:
230 fp.close()
230 fp.close()
231
231
232 def writelines(self, path, data, mode='wb', notindexed=False):
232 def writelines(self, path, data, mode='wb', notindexed=False):
233 fp = self(path, mode=mode, notindexed=notindexed)
233 fp = self(path, mode=mode, notindexed=notindexed)
234 try:
234 try:
235 return fp.writelines(data)
235 return fp.writelines(data)
236 finally:
236 finally:
237 fp.close()
237 fp.close()
238
238
239 def append(self, path, data):
239 def append(self, path, data):
240 fp = self(path, 'ab')
240 fp = self(path, 'ab')
241 try:
241 try:
242 return fp.write(data)
242 return fp.write(data)
243 finally:
243 finally:
244 fp.close()
244 fp.close()
245
245
246 def chmod(self, path, mode):
246 def chmod(self, path, mode):
247 return os.chmod(self.join(path), mode)
247 return os.chmod(self.join(path), mode)
248
248
249 def exists(self, path=None):
249 def exists(self, path=None):
250 return os.path.exists(self.join(path))
250 return os.path.exists(self.join(path))
251
251
252 def fstat(self, fp):
252 def fstat(self, fp):
253 return util.fstat(fp)
253 return util.fstat(fp)
254
254
255 def isdir(self, path=None):
255 def isdir(self, path=None):
256 return os.path.isdir(self.join(path))
256 return os.path.isdir(self.join(path))
257
257
258 def isfile(self, path=None):
258 def isfile(self, path=None):
259 return os.path.isfile(self.join(path))
259 return os.path.isfile(self.join(path))
260
260
261 def islink(self, path=None):
261 def islink(self, path=None):
262 return os.path.islink(self.join(path))
262 return os.path.islink(self.join(path))
263
263
264 def lexists(self, path=None):
264 def lexists(self, path=None):
265 return os.path.lexists(self.join(path))
265 return os.path.lexists(self.join(path))
266
266
267 def lstat(self, path=None):
267 def lstat(self, path=None):
268 return os.lstat(self.join(path))
268 return os.lstat(self.join(path))
269
269
270 def listdir(self, path=None):
270 def listdir(self, path=None):
271 return os.listdir(self.join(path))
271 return os.listdir(self.join(path))
272
272
273 def makedir(self, path=None, notindexed=True):
273 def makedir(self, path=None, notindexed=True):
274 return util.makedir(self.join(path), notindexed)
274 return util.makedir(self.join(path), notindexed)
275
275
276 def makedirs(self, path=None, mode=None):
276 def makedirs(self, path=None, mode=None):
277 return util.makedirs(self.join(path), mode)
277 return util.makedirs(self.join(path), mode)
278
278
279 def makelock(self, info, path):
279 def makelock(self, info, path):
280 return util.makelock(info, self.join(path))
280 return util.makelock(info, self.join(path))
281
281
282 def mkdir(self, path=None):
282 def mkdir(self, path=None):
283 return os.mkdir(self.join(path))
283 return os.mkdir(self.join(path))
284
284
285 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
285 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
286 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
286 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
287 dir=self.join(dir), text=text)
287 dir=self.join(dir), text=text)
288 dname, fname = util.split(name)
288 dname, fname = util.split(name)
289 if dir:
289 if dir:
290 return fd, os.path.join(dir, fname)
290 return fd, os.path.join(dir, fname)
291 else:
291 else:
292 return fd, fname
292 return fd, fname
293
293
294 def readdir(self, path=None, stat=None, skip=None):
294 def readdir(self, path=None, stat=None, skip=None):
295 return osutil.listdir(self.join(path), stat, skip)
295 return osutil.listdir(self.join(path), stat, skip)
296
296
297 def readlock(self, path):
297 def readlock(self, path):
298 return util.readlock(self.join(path))
298 return util.readlock(self.join(path))
299
299
300 def rename(self, src, dst):
300 def rename(self, src, dst):
301 return util.rename(self.join(src), self.join(dst))
301 return util.rename(self.join(src), self.join(dst))
302
302
303 def readlink(self, path):
303 def readlink(self, path):
304 return os.readlink(self.join(path))
304 return os.readlink(self.join(path))
305
305
306 def setflags(self, path, l, x):
306 def setflags(self, path, l, x):
307 return util.setflags(self.join(path), l, x)
307 return util.setflags(self.join(path), l, x)
308
308
309 def stat(self, path=None):
309 def stat(self, path=None):
310 return os.stat(self.join(path))
310 return os.stat(self.join(path))
311
311
312 def unlink(self, path=None):
312 def unlink(self, path=None):
313 return util.unlink(self.join(path))
313 return util.unlink(self.join(path))
314
314
315 def unlinkpath(self, path=None, ignoremissing=False):
315 def unlinkpath(self, path=None, ignoremissing=False):
316 return util.unlinkpath(self.join(path), ignoremissing)
316 return util.unlinkpath(self.join(path), ignoremissing)
317
317
318 def utime(self, path=None, t=None):
318 def utime(self, path=None, t=None):
319 return os.utime(self.join(path), t)
319 return os.utime(self.join(path), t)
320
320
321 class vfs(abstractvfs):
321 class vfs(abstractvfs):
322 '''Operate files relative to a base directory
322 '''Operate files relative to a base directory
323
323
324 This class is used to hide the details of COW semantics and
324 This class is used to hide the details of COW semantics and
325 remote file access from higher level code.
325 remote file access from higher level code.
326 '''
326 '''
327 def __init__(self, base, audit=True, expandpath=False, realpath=False):
327 def __init__(self, base, audit=True, expandpath=False, realpath=False):
328 if expandpath:
328 if expandpath:
329 base = util.expandpath(base)
329 base = util.expandpath(base)
330 if realpath:
330 if realpath:
331 base = os.path.realpath(base)
331 base = os.path.realpath(base)
332 self.base = base
332 self.base = base
333 self._setmustaudit(audit)
333 self._setmustaudit(audit)
334 self.createmode = None
334 self.createmode = None
335 self._trustnlink = None
335 self._trustnlink = None
336
336
337 def _getmustaudit(self):
337 def _getmustaudit(self):
338 return self._audit
338 return self._audit
339
339
340 def _setmustaudit(self, onoff):
340 def _setmustaudit(self, onoff):
341 self._audit = onoff
341 self._audit = onoff
342 if onoff:
342 if onoff:
343 self.audit = pathutil.pathauditor(self.base)
343 self.audit = pathutil.pathauditor(self.base)
344 else:
344 else:
345 self.audit = util.always
345 self.audit = util.always
346
346
347 mustaudit = property(_getmustaudit, _setmustaudit)
347 mustaudit = property(_getmustaudit, _setmustaudit)
348
348
349 @util.propertycache
349 @util.propertycache
350 def _cansymlink(self):
350 def _cansymlink(self):
351 return util.checklink(self.base)
351 return util.checklink(self.base)
352
352
353 @util.propertycache
353 @util.propertycache
354 def _chmod(self):
354 def _chmod(self):
355 return util.checkexec(self.base)
355 return util.checkexec(self.base)
356
356
357 def _fixfilemode(self, name):
357 def _fixfilemode(self, name):
358 if self.createmode is None or not self._chmod:
358 if self.createmode is None or not self._chmod:
359 return
359 return
360 os.chmod(name, self.createmode & 0666)
360 os.chmod(name, self.createmode & 0666)
361
361
362 def __call__(self, path, mode="r", text=False, atomictemp=False,
362 def __call__(self, path, mode="r", text=False, atomictemp=False,
363 notindexed=False):
363 notindexed=False):
364 '''Open ``path`` file, which is relative to vfs root.
364 '''Open ``path`` file, which is relative to vfs root.
365
365
366 Newly created directories are marked as "not to be indexed by
366 Newly created directories are marked as "not to be indexed by
367 the content indexing service", if ``notindexed`` is specified
367 the content indexing service", if ``notindexed`` is specified
368 for "write" mode access.
368 for "write" mode access.
369 '''
369 '''
370 if self._audit:
370 if self._audit:
371 r = util.checkosfilename(path)
371 r = util.checkosfilename(path)
372 if r:
372 if r:
373 raise util.Abort("%s: %r" % (r, path))
373 raise util.Abort("%s: %r" % (r, path))
374 self.audit(path)
374 self.audit(path)
375 f = self.join(path)
375 f = self.join(path)
376
376
377 if not text and "b" not in mode:
377 if not text and "b" not in mode:
378 mode += "b" # for that other OS
378 mode += "b" # for that other OS
379
379
380 nlink = -1
380 nlink = -1
381 if mode not in ('r', 'rb'):
381 if mode not in ('r', 'rb'):
382 dirname, basename = util.split(f)
382 dirname, basename = util.split(f)
383 # If basename is empty, then the path is malformed because it points
383 # If basename is empty, then the path is malformed because it points
384 # to a directory. Let the posixfile() call below raise IOError.
384 # to a directory. Let the posixfile() call below raise IOError.
385 if basename:
385 if basename:
386 if atomictemp:
386 if atomictemp:
387 util.ensuredirs(dirname, self.createmode, notindexed)
387 util.ensuredirs(dirname, self.createmode, notindexed)
388 return util.atomictempfile(f, mode, self.createmode)
388 return util.atomictempfile(f, mode, self.createmode)
389 try:
389 try:
390 if 'w' in mode:
390 if 'w' in mode:
391 util.unlink(f)
391 util.unlink(f)
392 nlink = 0
392 nlink = 0
393 else:
393 else:
394 # nlinks() may behave differently for files on Windows
394 # nlinks() may behave differently for files on Windows
395 # shares if the file is open.
395 # shares if the file is open.
396 fd = util.posixfile(f)
396 fd = util.posixfile(f)
397 nlink = util.nlinks(f)
397 nlink = util.nlinks(f)
398 if nlink < 1:
398 if nlink < 1:
399 nlink = 2 # force mktempcopy (issue1922)
399 nlink = 2 # force mktempcopy (issue1922)
400 fd.close()
400 fd.close()
401 except (OSError, IOError), e:
401 except (OSError, IOError), e:
402 if e.errno != errno.ENOENT:
402 if e.errno != errno.ENOENT:
403 raise
403 raise
404 nlink = 0
404 nlink = 0
405 util.ensuredirs(dirname, self.createmode, notindexed)
405 util.ensuredirs(dirname, self.createmode, notindexed)
406 if nlink > 0:
406 if nlink > 0:
407 if self._trustnlink is None:
407 if self._trustnlink is None:
408 self._trustnlink = nlink > 1 or util.checknlink(f)
408 self._trustnlink = nlink > 1 or util.checknlink(f)
409 if nlink > 1 or not self._trustnlink:
409 if nlink > 1 or not self._trustnlink:
410 util.rename(util.mktempcopy(f), f)
410 util.rename(util.mktempcopy(f), f)
411 fp = util.posixfile(f, mode)
411 fp = util.posixfile(f, mode)
412 if nlink == 0:
412 if nlink == 0:
413 self._fixfilemode(f)
413 self._fixfilemode(f)
414 return fp
414 return fp
415
415
416 def symlink(self, src, dst):
416 def symlink(self, src, dst):
417 self.audit(dst)
417 self.audit(dst)
418 linkname = self.join(dst)
418 linkname = self.join(dst)
419 try:
419 try:
420 os.unlink(linkname)
420 os.unlink(linkname)
421 except OSError:
421 except OSError:
422 pass
422 pass
423
423
424 util.ensuredirs(os.path.dirname(linkname), self.createmode)
424 util.ensuredirs(os.path.dirname(linkname), self.createmode)
425
425
426 if self._cansymlink:
426 if self._cansymlink:
427 try:
427 try:
428 os.symlink(src, linkname)
428 os.symlink(src, linkname)
429 except OSError, err:
429 except OSError, err:
430 raise OSError(err.errno, _('could not symlink to %r: %s') %
430 raise OSError(err.errno, _('could not symlink to %r: %s') %
431 (src, err.strerror), linkname)
431 (src, err.strerror), linkname)
432 else:
432 else:
433 self.write(dst, src)
433 self.write(dst, src)
434
434
435 def join(self, path):
435 def join(self, path):
436 if path:
436 if path:
437 return os.path.join(self.base, path)
437 return os.path.join(self.base, path)
438 else:
438 else:
439 return self.base
439 return self.base
440
440
441 opener = vfs
441 opener = vfs
442
442
443 class auditvfs(object):
443 class auditvfs(object):
444 def __init__(self, vfs):
444 def __init__(self, vfs):
445 self.vfs = vfs
445 self.vfs = vfs
446
446
447 def _getmustaudit(self):
447 def _getmustaudit(self):
448 return self.vfs.mustaudit
448 return self.vfs.mustaudit
449
449
450 def _setmustaudit(self, onoff):
450 def _setmustaudit(self, onoff):
451 self.vfs.mustaudit = onoff
451 self.vfs.mustaudit = onoff
452
452
453 mustaudit = property(_getmustaudit, _setmustaudit)
453 mustaudit = property(_getmustaudit, _setmustaudit)
454
454
455 class filtervfs(abstractvfs, auditvfs):
455 class filtervfs(abstractvfs, auditvfs):
456 '''Wrapper vfs for filtering filenames with a function.'''
456 '''Wrapper vfs for filtering filenames with a function.'''
457
457
458 def __init__(self, vfs, filter):
458 def __init__(self, vfs, filter):
459 auditvfs.__init__(self, vfs)
459 auditvfs.__init__(self, vfs)
460 self._filter = filter
460 self._filter = filter
461
461
462 def __call__(self, path, *args, **kwargs):
462 def __call__(self, path, *args, **kwargs):
463 return self.vfs(self._filter(path), *args, **kwargs)
463 return self.vfs(self._filter(path), *args, **kwargs)
464
464
465 def join(self, path):
465 def join(self, path):
466 if path:
466 if path:
467 return self.vfs.join(self._filter(path))
467 return self.vfs.join(self._filter(path))
468 else:
468 else:
469 return self.vfs.join(path)
469 return self.vfs.join(path)
470
470
471 filteropener = filtervfs
471 filteropener = filtervfs
472
472
473 class readonlyvfs(abstractvfs, auditvfs):
473 class readonlyvfs(abstractvfs, auditvfs):
474 '''Wrapper vfs preventing any writing.'''
474 '''Wrapper vfs preventing any writing.'''
475
475
476 def __init__(self, vfs):
476 def __init__(self, vfs):
477 auditvfs.__init__(self, vfs)
477 auditvfs.__init__(self, vfs)
478
478
479 def __call__(self, path, mode='r', *args, **kw):
479 def __call__(self, path, mode='r', *args, **kw):
480 if mode not in ('r', 'rb'):
480 if mode not in ('r', 'rb'):
481 raise util.Abort('this vfs is read only')
481 raise util.Abort('this vfs is read only')
482 return self.vfs(path, mode, *args, **kw)
482 return self.vfs(path, mode, *args, **kw)
483
483
484
484
485 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
485 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
486 '''yield every hg repository under path, always recursively.
486 '''yield every hg repository under path, always recursively.
487 The recurse flag will only control recursion into repo working dirs'''
487 The recurse flag will only control recursion into repo working dirs'''
488 def errhandler(err):
488 def errhandler(err):
489 if err.filename == path:
489 if err.filename == path:
490 raise err
490 raise err
491 samestat = getattr(os.path, 'samestat', None)
491 samestat = getattr(os.path, 'samestat', None)
492 if followsym and samestat is not None:
492 if followsym and samestat is not None:
493 def adddir(dirlst, dirname):
493 def adddir(dirlst, dirname):
494 match = False
494 match = False
495 dirstat = os.stat(dirname)
495 dirstat = os.stat(dirname)
496 for lstdirstat in dirlst:
496 for lstdirstat in dirlst:
497 if samestat(dirstat, lstdirstat):
497 if samestat(dirstat, lstdirstat):
498 match = True
498 match = True
499 break
499 break
500 if not match:
500 if not match:
501 dirlst.append(dirstat)
501 dirlst.append(dirstat)
502 return not match
502 return not match
503 else:
503 else:
504 followsym = False
504 followsym = False
505
505
506 if (seen_dirs is None) and followsym:
506 if (seen_dirs is None) and followsym:
507 seen_dirs = []
507 seen_dirs = []
508 adddir(seen_dirs, path)
508 adddir(seen_dirs, path)
509 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
509 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
510 dirs.sort()
510 dirs.sort()
511 if '.hg' in dirs:
511 if '.hg' in dirs:
512 yield root # found a repository
512 yield root # found a repository
513 qroot = os.path.join(root, '.hg', 'patches')
513 qroot = os.path.join(root, '.hg', 'patches')
514 if os.path.isdir(os.path.join(qroot, '.hg')):
514 if os.path.isdir(os.path.join(qroot, '.hg')):
515 yield qroot # we have a patch queue repo here
515 yield qroot # we have a patch queue repo here
516 if recurse:
516 if recurse:
517 # avoid recursing inside the .hg directory
517 # avoid recursing inside the .hg directory
518 dirs.remove('.hg')
518 dirs.remove('.hg')
519 else:
519 else:
520 dirs[:] = [] # don't descend further
520 dirs[:] = [] # don't descend further
521 elif followsym:
521 elif followsym:
522 newdirs = []
522 newdirs = []
523 for d in dirs:
523 for d in dirs:
524 fname = os.path.join(root, d)
524 fname = os.path.join(root, d)
525 if adddir(seen_dirs, fname):
525 if adddir(seen_dirs, fname):
526 if os.path.islink(fname):
526 if os.path.islink(fname):
527 for hgname in walkrepos(fname, True, seen_dirs):
527 for hgname in walkrepos(fname, True, seen_dirs):
528 yield hgname
528 yield hgname
529 else:
529 else:
530 newdirs.append(d)
530 newdirs.append(d)
531 dirs[:] = newdirs
531 dirs[:] = newdirs
532
532
533 def osrcpath():
533 def osrcpath():
534 '''return default os-specific hgrc search path'''
534 '''return default os-specific hgrc search path'''
535 path = []
535 path = []
536 defaultpath = os.path.join(util.datapath, 'default.d')
536 defaultpath = os.path.join(util.datapath, 'default.d')
537 if os.path.isdir(defaultpath):
537 if os.path.isdir(defaultpath):
538 for f, kind in osutil.listdir(defaultpath):
538 for f, kind in osutil.listdir(defaultpath):
539 if f.endswith('.rc'):
539 if f.endswith('.rc'):
540 path.append(os.path.join(defaultpath, f))
540 path.append(os.path.join(defaultpath, f))
541 path.extend(systemrcpath())
541 path.extend(systemrcpath())
542 path.extend(userrcpath())
542 path.extend(userrcpath())
543 path = [os.path.normpath(f) for f in path]
543 path = [os.path.normpath(f) for f in path]
544 return path
544 return path
545
545
546 _rcpath = None
546 _rcpath = None
547
547
548 def rcpath():
548 def rcpath():
549 '''return hgrc search path. if env var HGRCPATH is set, use it.
549 '''return hgrc search path. if env var HGRCPATH is set, use it.
550 for each item in path, if directory, use files ending in .rc,
550 for each item in path, if directory, use files ending in .rc,
551 else use item.
551 else use item.
552 make HGRCPATH empty to only look in .hg/hgrc of current repo.
552 make HGRCPATH empty to only look in .hg/hgrc of current repo.
553 if no HGRCPATH, use default os-specific path.'''
553 if no HGRCPATH, use default os-specific path.'''
554 global _rcpath
554 global _rcpath
555 if _rcpath is None:
555 if _rcpath is None:
556 if 'HGRCPATH' in os.environ:
556 if 'HGRCPATH' in os.environ:
557 _rcpath = []
557 _rcpath = []
558 for p in os.environ['HGRCPATH'].split(os.pathsep):
558 for p in os.environ['HGRCPATH'].split(os.pathsep):
559 if not p:
559 if not p:
560 continue
560 continue
561 p = util.expandpath(p)
561 p = util.expandpath(p)
562 if os.path.isdir(p):
562 if os.path.isdir(p):
563 for f, kind in osutil.listdir(p):
563 for f, kind in osutil.listdir(p):
564 if f.endswith('.rc'):
564 if f.endswith('.rc'):
565 _rcpath.append(os.path.join(p, f))
565 _rcpath.append(os.path.join(p, f))
566 else:
566 else:
567 _rcpath.append(p)
567 _rcpath.append(p)
568 else:
568 else:
569 _rcpath = osrcpath()
569 _rcpath = osrcpath()
570 return _rcpath
570 return _rcpath
571
571
572 def revsingle(repo, revspec, default='.'):
572 def revsingle(repo, revspec, default='.'):
573 if not revspec and revspec != 0:
573 if not revspec and revspec != 0:
574 return repo[default]
574 return repo[default]
575
575
576 l = revrange(repo, [revspec])
576 l = revrange(repo, [revspec])
577 if not l:
577 if not l:
578 raise util.Abort(_('empty revision set'))
578 raise util.Abort(_('empty revision set'))
579 return repo[l.last()]
579 return repo[l.last()]
580
580
581 def revpair(repo, revs):
581 def revpair(repo, revs):
582 if not revs:
582 if not revs:
583 return repo.dirstate.p1(), None
583 return repo.dirstate.p1(), None
584
584
585 l = revrange(repo, revs)
585 l = revrange(repo, revs)
586
586
587 if not l:
587 if not l:
588 first = second = None
588 first = second = None
589 elif l.isascending():
589 elif l.isascending():
590 first = l.min()
590 first = l.min()
591 second = l.max()
591 second = l.max()
592 elif l.isdescending():
592 elif l.isdescending():
593 first = l.max()
593 first = l.max()
594 second = l.min()
594 second = l.min()
595 else:
595 else:
596 first = l.first()
596 first = l.first()
597 second = l.last()
597 second = l.last()
598
598
599 if first is None:
599 if first is None:
600 raise util.Abort(_('empty revision range'))
600 raise util.Abort(_('empty revision range'))
601
601
602 if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
602 if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
603 return repo.lookup(first), None
603 return repo.lookup(first), None
604
604
605 return repo.lookup(first), repo.lookup(second)
605 return repo.lookup(first), repo.lookup(second)
606
606
607 _revrangesep = ':'
607 _revrangesep = ':'
608
608
609 def revrange(repo, revs):
609 def revrange(repo, revs):
610 """Yield revision as strings from a list of revision specifications."""
610 """Yield revision as strings from a list of revision specifications."""
611
611
612 def revfix(repo, val, defval):
612 def revfix(repo, val, defval):
613 if not val and val != 0 and defval is not None:
613 if not val and val != 0 and defval is not None:
614 return defval
614 return defval
615 return repo[val].rev()
615 return repo[val].rev()
616
616
617 seen, l = set(), revset.baseset([])
617 seen, l = set(), revset.baseset([])
618 for spec in revs:
618 for spec in revs:
619 if l and not seen:
619 if l and not seen:
620 seen = set(l)
620 seen = set(l)
621 # attempt to parse old-style ranges first to deal with
621 # attempt to parse old-style ranges first to deal with
622 # things like old-tag which contain query metacharacters
622 # things like old-tag which contain query metacharacters
623 try:
623 try:
624 if isinstance(spec, int):
624 if isinstance(spec, int):
625 seen.add(spec)
625 seen.add(spec)
626 l = l + revset.baseset([spec])
626 l = l + revset.baseset([spec])
627 continue
627 continue
628
628
629 if _revrangesep in spec:
629 if _revrangesep in spec:
630 start, end = spec.split(_revrangesep, 1)
630 start, end = spec.split(_revrangesep, 1)
631 start = revfix(repo, start, 0)
631 start = revfix(repo, start, 0)
632 end = revfix(repo, end, len(repo) - 1)
632 end = revfix(repo, end, len(repo) - 1)
633 if end == nullrev and start < 0:
633 if end == nullrev and start < 0:
634 start = nullrev
634 start = nullrev
635 rangeiter = repo.changelog.revs(start, end)
635 rangeiter = repo.changelog.revs(start, end)
636 if not seen and not l:
636 if not seen and not l:
637 # by far the most common case: revs = ["-1:0"]
637 # by far the most common case: revs = ["-1:0"]
638 l = revset.baseset(rangeiter)
638 l = revset.baseset(rangeiter)
639 # defer syncing seen until next iteration
639 # defer syncing seen until next iteration
640 continue
640 continue
641 newrevs = set(rangeiter)
641 newrevs = set(rangeiter)
642 if seen:
642 if seen:
643 newrevs.difference_update(seen)
643 newrevs.difference_update(seen)
644 seen.update(newrevs)
644 seen.update(newrevs)
645 else:
645 else:
646 seen = newrevs
646 seen = newrevs
647 l = l + revset.baseset(sorted(newrevs, reverse=start > end))
647 l = l + revset.baseset(sorted(newrevs, reverse=start > end))
648 continue
648 continue
649 elif spec and spec in repo: # single unquoted rev
649 elif spec and spec in repo: # single unquoted rev
650 rev = revfix(repo, spec, None)
650 rev = revfix(repo, spec, None)
651 if rev in seen:
651 if rev in seen:
652 continue
652 continue
653 seen.add(rev)
653 seen.add(rev)
654 l = l + revset.baseset([rev])
654 l = l + revset.baseset([rev])
655 continue
655 continue
656 except error.RepoLookupError:
656 except error.RepoLookupError:
657 pass
657 pass
658
658
659 # fall through to new-style queries if old-style fails
659 # fall through to new-style queries if old-style fails
660 m = revset.match(repo.ui, spec, repo)
660 m = revset.match(repo.ui, spec, repo)
661 if seen or l:
661 if seen or l:
662 dl = [r for r in m(repo, revset.spanset(repo)) if r not in seen]
662 dl = [r for r in m(repo, revset.spanset(repo)) if r not in seen]
663 l = l + revset.baseset(dl)
663 l = l + revset.baseset(dl)
664 seen.update(dl)
664 seen.update(dl)
665 else:
665 else:
666 l = m(repo, revset.spanset(repo))
666 l = m(repo, revset.spanset(repo))
667
667
668 return l
668 return l
669
669
670 def expandpats(pats):
670 def expandpats(pats):
671 '''Expand bare globs when running on windows.
671 '''Expand bare globs when running on windows.
672 On posix we assume it already has already been done by sh.'''
672 On posix we assume it already has already been done by sh.'''
673 if not util.expandglobs:
673 if not util.expandglobs:
674 return list(pats)
674 return list(pats)
675 ret = []
675 ret = []
676 for kindpat in pats:
676 for kindpat in pats:
677 kind, pat = matchmod._patsplit(kindpat, None)
677 kind, pat = matchmod._patsplit(kindpat, None)
678 if kind is None:
678 if kind is None:
679 try:
679 try:
680 globbed = glob.glob(pat)
680 globbed = glob.glob(pat)
681 except re.error:
681 except re.error:
682 globbed = [pat]
682 globbed = [pat]
683 if globbed:
683 if globbed:
684 ret.extend(globbed)
684 ret.extend(globbed)
685 continue
685 continue
686 ret.append(kindpat)
686 ret.append(kindpat)
687 return ret
687 return ret
688
688
689 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
689 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
690 '''Return a matcher and the patterns that were used.
690 '''Return a matcher and the patterns that were used.
691 The matcher will warn about bad matches.'''
691 The matcher will warn about bad matches.'''
692 if pats == ("",):
692 if pats == ("",):
693 pats = []
693 pats = []
694 if not globbed and default == 'relpath':
694 if not globbed and default == 'relpath':
695 pats = expandpats(pats or [])
695 pats = expandpats(pats or [])
696
696
697 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
697 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
698 default)
698 default)
699 def badfn(f, msg):
699 def badfn(f, msg):
700 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
700 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
701 m.bad = badfn
701 m.bad = badfn
702 return m, pats
702 return m, pats
703
703
704 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
704 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
705 '''Return a matcher that will warn about bad matches.'''
705 '''Return a matcher that will warn about bad matches.'''
706 return matchandpats(ctx, pats, opts, globbed, default)[0]
706 return matchandpats(ctx, pats, opts, globbed, default)[0]
707
707
708 def matchall(repo):
708 def matchall(repo):
709 '''Return a matcher that will efficiently match everything.'''
709 '''Return a matcher that will efficiently match everything.'''
710 return matchmod.always(repo.root, repo.getcwd())
710 return matchmod.always(repo.root, repo.getcwd())
711
711
712 def matchfiles(repo, files):
712 def matchfiles(repo, files):
713 '''Return a matcher that will efficiently match exactly these files.'''
713 '''Return a matcher that will efficiently match exactly these files.'''
714 return matchmod.exact(repo.root, repo.getcwd(), files)
714 return matchmod.exact(repo.root, repo.getcwd(), files)
715
715
716 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
716 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
717 if dry_run is None:
717 if dry_run is None:
718 dry_run = opts.get('dry_run')
718 dry_run = opts.get('dry_run')
719 if similarity is None:
719 if similarity is None:
720 similarity = float(opts.get('similarity') or 0)
720 similarity = float(opts.get('similarity') or 0)
721 # we'd use status here, except handling of symlinks and ignore is tricky
721 # we'd use status here, except handling of symlinks and ignore is tricky
722 m = match(repo[None], pats, opts)
722 m = match(repo[None], pats, opts)
723 rejected = []
723 rejected = []
724 m.bad = lambda x, y: rejected.append(x)
724 m.bad = lambda x, y: rejected.append(x)
725
725
726 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
726 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
727
727
728 unknownset = set(unknown + forgotten)
728 unknownset = set(unknown + forgotten)
729 toprint = unknownset.copy()
729 toprint = unknownset.copy()
730 toprint.update(deleted)
730 toprint.update(deleted)
731 for abs in sorted(toprint):
731 for abs in sorted(toprint):
732 if repo.ui.verbose or not m.exact(abs):
732 if repo.ui.verbose or not m.exact(abs):
733 rel = m.rel(abs)
733 rel = m.rel(abs)
734 if abs in unknownset:
734 if abs in unknownset:
735 status = _('adding %s\n') % ((pats and rel) or abs)
735 status = _('adding %s\n') % ((m.anypats() and rel) or abs)
736 else:
736 else:
737 status = _('removing %s\n') % ((pats and rel) or abs)
737 status = _('removing %s\n') % ((m.anypats() and rel) or abs)
738 repo.ui.status(status)
738 repo.ui.status(status)
739
739
740 renames = _findrenames(repo, m, added + unknown, removed + deleted,
740 renames = _findrenames(repo, m, added + unknown, removed + deleted,
741 similarity)
741 similarity)
742
742
743 if not dry_run:
743 if not dry_run:
744 _markchanges(repo, unknown + forgotten, deleted, renames)
744 _markchanges(repo, unknown + forgotten, deleted, renames)
745
745
746 for f in rejected:
746 for f in rejected:
747 if f in m.files():
747 if f in m.files():
748 return 1
748 return 1
749 return 0
749 return 0
750
750
751 def marktouched(repo, files, similarity=0.0):
751 def marktouched(repo, files, similarity=0.0):
752 '''Assert that files have somehow been operated upon. files are relative to
752 '''Assert that files have somehow been operated upon. files are relative to
753 the repo root.'''
753 the repo root.'''
754 m = matchfiles(repo, files)
754 m = matchfiles(repo, files)
755 rejected = []
755 rejected = []
756 m.bad = lambda x, y: rejected.append(x)
756 m.bad = lambda x, y: rejected.append(x)
757
757
758 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
758 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
759
759
760 if repo.ui.verbose:
760 if repo.ui.verbose:
761 unknownset = set(unknown + forgotten)
761 unknownset = set(unknown + forgotten)
762 toprint = unknownset.copy()
762 toprint = unknownset.copy()
763 toprint.update(deleted)
763 toprint.update(deleted)
764 for abs in sorted(toprint):
764 for abs in sorted(toprint):
765 if abs in unknownset:
765 if abs in unknownset:
766 status = _('adding %s\n') % abs
766 status = _('adding %s\n') % abs
767 else:
767 else:
768 status = _('removing %s\n') % abs
768 status = _('removing %s\n') % abs
769 repo.ui.status(status)
769 repo.ui.status(status)
770
770
771 renames = _findrenames(repo, m, added + unknown, removed + deleted,
771 renames = _findrenames(repo, m, added + unknown, removed + deleted,
772 similarity)
772 similarity)
773
773
774 _markchanges(repo, unknown + forgotten, deleted, renames)
774 _markchanges(repo, unknown + forgotten, deleted, renames)
775
775
776 for f in rejected:
776 for f in rejected:
777 if f in m.files():
777 if f in m.files():
778 return 1
778 return 1
779 return 0
779 return 0
780
780
781 def _interestingfiles(repo, matcher):
781 def _interestingfiles(repo, matcher):
782 '''Walk dirstate with matcher, looking for files that addremove would care
782 '''Walk dirstate with matcher, looking for files that addremove would care
783 about.
783 about.
784
784
785 This is different from dirstate.status because it doesn't care about
785 This is different from dirstate.status because it doesn't care about
786 whether files are modified or clean.'''
786 whether files are modified or clean.'''
787 added, unknown, deleted, removed, forgotten = [], [], [], [], []
787 added, unknown, deleted, removed, forgotten = [], [], [], [], []
788 audit_path = pathutil.pathauditor(repo.root)
788 audit_path = pathutil.pathauditor(repo.root)
789
789
790 ctx = repo[None]
790 ctx = repo[None]
791 dirstate = repo.dirstate
791 dirstate = repo.dirstate
792 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
792 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
793 full=False)
793 full=False)
794 for abs, st in walkresults.iteritems():
794 for abs, st in walkresults.iteritems():
795 dstate = dirstate[abs]
795 dstate = dirstate[abs]
796 if dstate == '?' and audit_path.check(abs):
796 if dstate == '?' and audit_path.check(abs):
797 unknown.append(abs)
797 unknown.append(abs)
798 elif dstate != 'r' and not st:
798 elif dstate != 'r' and not st:
799 deleted.append(abs)
799 deleted.append(abs)
800 elif dstate == 'r' and st:
800 elif dstate == 'r' and st:
801 forgotten.append(abs)
801 forgotten.append(abs)
802 # for finding renames
802 # for finding renames
803 elif dstate == 'r' and not st:
803 elif dstate == 'r' and not st:
804 removed.append(abs)
804 removed.append(abs)
805 elif dstate == 'a':
805 elif dstate == 'a':
806 added.append(abs)
806 added.append(abs)
807
807
808 return added, unknown, deleted, removed, forgotten
808 return added, unknown, deleted, removed, forgotten
809
809
810 def _findrenames(repo, matcher, added, removed, similarity):
810 def _findrenames(repo, matcher, added, removed, similarity):
811 '''Find renames from removed files to added ones.'''
811 '''Find renames from removed files to added ones.'''
812 renames = {}
812 renames = {}
813 if similarity > 0:
813 if similarity > 0:
814 for old, new, score in similar.findrenames(repo, added, removed,
814 for old, new, score in similar.findrenames(repo, added, removed,
815 similarity):
815 similarity):
816 if (repo.ui.verbose or not matcher.exact(old)
816 if (repo.ui.verbose or not matcher.exact(old)
817 or not matcher.exact(new)):
817 or not matcher.exact(new)):
818 repo.ui.status(_('recording removal of %s as rename to %s '
818 repo.ui.status(_('recording removal of %s as rename to %s '
819 '(%d%% similar)\n') %
819 '(%d%% similar)\n') %
820 (matcher.rel(old), matcher.rel(new),
820 (matcher.rel(old), matcher.rel(new),
821 score * 100))
821 score * 100))
822 renames[new] = old
822 renames[new] = old
823 return renames
823 return renames
824
824
825 def _markchanges(repo, unknown, deleted, renames):
825 def _markchanges(repo, unknown, deleted, renames):
826 '''Marks the files in unknown as added, the files in deleted as removed,
826 '''Marks the files in unknown as added, the files in deleted as removed,
827 and the files in renames as copied.'''
827 and the files in renames as copied.'''
828 wctx = repo[None]
828 wctx = repo[None]
829 wlock = repo.wlock()
829 wlock = repo.wlock()
830 try:
830 try:
831 wctx.forget(deleted)
831 wctx.forget(deleted)
832 wctx.add(unknown)
832 wctx.add(unknown)
833 for new, old in renames.iteritems():
833 for new, old in renames.iteritems():
834 wctx.copy(old, new)
834 wctx.copy(old, new)
835 finally:
835 finally:
836 wlock.release()
836 wlock.release()
837
837
838 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
838 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
839 """Update the dirstate to reflect the intent of copying src to dst. For
839 """Update the dirstate to reflect the intent of copying src to dst. For
840 different reasons it might not end with dst being marked as copied from src.
840 different reasons it might not end with dst being marked as copied from src.
841 """
841 """
842 origsrc = repo.dirstate.copied(src) or src
842 origsrc = repo.dirstate.copied(src) or src
843 if dst == origsrc: # copying back a copy?
843 if dst == origsrc: # copying back a copy?
844 if repo.dirstate[dst] not in 'mn' and not dryrun:
844 if repo.dirstate[dst] not in 'mn' and not dryrun:
845 repo.dirstate.normallookup(dst)
845 repo.dirstate.normallookup(dst)
846 else:
846 else:
847 if repo.dirstate[origsrc] == 'a' and origsrc == src:
847 if repo.dirstate[origsrc] == 'a' and origsrc == src:
848 if not ui.quiet:
848 if not ui.quiet:
849 ui.warn(_("%s has not been committed yet, so no copy "
849 ui.warn(_("%s has not been committed yet, so no copy "
850 "data will be stored for %s.\n")
850 "data will be stored for %s.\n")
851 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
851 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
852 if repo.dirstate[dst] in '?r' and not dryrun:
852 if repo.dirstate[dst] in '?r' and not dryrun:
853 wctx.add([dst])
853 wctx.add([dst])
854 elif not dryrun:
854 elif not dryrun:
855 wctx.copy(origsrc, dst)
855 wctx.copy(origsrc, dst)
856
856
857 def readrequires(opener, supported):
857 def readrequires(opener, supported):
858 '''Reads and parses .hg/requires and checks if all entries found
858 '''Reads and parses .hg/requires and checks if all entries found
859 are in the list of supported features.'''
859 are in the list of supported features.'''
860 requirements = set(opener.read("requires").splitlines())
860 requirements = set(opener.read("requires").splitlines())
861 missings = []
861 missings = []
862 for r in requirements:
862 for r in requirements:
863 if r not in supported:
863 if r not in supported:
864 if not r or not r[0].isalnum():
864 if not r or not r[0].isalnum():
865 raise error.RequirementError(_(".hg/requires file is corrupt"))
865 raise error.RequirementError(_(".hg/requires file is corrupt"))
866 missings.append(r)
866 missings.append(r)
867 missings.sort()
867 missings.sort()
868 if missings:
868 if missings:
869 raise error.RequirementError(
869 raise error.RequirementError(
870 _("repository requires features unknown to this Mercurial: %s")
870 _("repository requires features unknown to this Mercurial: %s")
871 % " ".join(missings),
871 % " ".join(missings),
872 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
872 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
873 " for more information"))
873 " for more information"))
874 return requirements
874 return requirements
875
875
876 class filecachesubentry(object):
876 class filecachesubentry(object):
877 def __init__(self, path, stat):
877 def __init__(self, path, stat):
878 self.path = path
878 self.path = path
879 self.cachestat = None
879 self.cachestat = None
880 self._cacheable = None
880 self._cacheable = None
881
881
882 if stat:
882 if stat:
883 self.cachestat = filecachesubentry.stat(self.path)
883 self.cachestat = filecachesubentry.stat(self.path)
884
884
885 if self.cachestat:
885 if self.cachestat:
886 self._cacheable = self.cachestat.cacheable()
886 self._cacheable = self.cachestat.cacheable()
887 else:
887 else:
888 # None means we don't know yet
888 # None means we don't know yet
889 self._cacheable = None
889 self._cacheable = None
890
890
891 def refresh(self):
891 def refresh(self):
892 if self.cacheable():
892 if self.cacheable():
893 self.cachestat = filecachesubentry.stat(self.path)
893 self.cachestat = filecachesubentry.stat(self.path)
894
894
895 def cacheable(self):
895 def cacheable(self):
896 if self._cacheable is not None:
896 if self._cacheable is not None:
897 return self._cacheable
897 return self._cacheable
898
898
899 # we don't know yet, assume it is for now
899 # we don't know yet, assume it is for now
900 return True
900 return True
901
901
902 def changed(self):
902 def changed(self):
903 # no point in going further if we can't cache it
903 # no point in going further if we can't cache it
904 if not self.cacheable():
904 if not self.cacheable():
905 return True
905 return True
906
906
907 newstat = filecachesubentry.stat(self.path)
907 newstat = filecachesubentry.stat(self.path)
908
908
909 # we may not know if it's cacheable yet, check again now
909 # we may not know if it's cacheable yet, check again now
910 if newstat and self._cacheable is None:
910 if newstat and self._cacheable is None:
911 self._cacheable = newstat.cacheable()
911 self._cacheable = newstat.cacheable()
912
912
913 # check again
913 # check again
914 if not self._cacheable:
914 if not self._cacheable:
915 return True
915 return True
916
916
917 if self.cachestat != newstat:
917 if self.cachestat != newstat:
918 self.cachestat = newstat
918 self.cachestat = newstat
919 return True
919 return True
920 else:
920 else:
921 return False
921 return False
922
922
923 @staticmethod
923 @staticmethod
924 def stat(path):
924 def stat(path):
925 try:
925 try:
926 return util.cachestat(path)
926 return util.cachestat(path)
927 except OSError, e:
927 except OSError, e:
928 if e.errno != errno.ENOENT:
928 if e.errno != errno.ENOENT:
929 raise
929 raise
930
930
931 class filecacheentry(object):
931 class filecacheentry(object):
932 def __init__(self, paths, stat=True):
932 def __init__(self, paths, stat=True):
933 self._entries = []
933 self._entries = []
934 for path in paths:
934 for path in paths:
935 self._entries.append(filecachesubentry(path, stat))
935 self._entries.append(filecachesubentry(path, stat))
936
936
937 def changed(self):
937 def changed(self):
938 '''true if any entry has changed'''
938 '''true if any entry has changed'''
939 for entry in self._entries:
939 for entry in self._entries:
940 if entry.changed():
940 if entry.changed():
941 return True
941 return True
942 return False
942 return False
943
943
944 def refresh(self):
944 def refresh(self):
945 for entry in self._entries:
945 for entry in self._entries:
946 entry.refresh()
946 entry.refresh()
947
947
948 class filecache(object):
948 class filecache(object):
949 '''A property like decorator that tracks files under .hg/ for updates.
949 '''A property like decorator that tracks files under .hg/ for updates.
950
950
951 Records stat info when called in _filecache.
951 Records stat info when called in _filecache.
952
952
953 On subsequent calls, compares old stat info with new info, and recreates the
953 On subsequent calls, compares old stat info with new info, and recreates the
954 object when any of the files changes, updating the new stat info in
954 object when any of the files changes, updating the new stat info in
955 _filecache.
955 _filecache.
956
956
957 Mercurial either atomic renames or appends for files under .hg,
957 Mercurial either atomic renames or appends for files under .hg,
958 so to ensure the cache is reliable we need the filesystem to be able
958 so to ensure the cache is reliable we need the filesystem to be able
959 to tell us if a file has been replaced. If it can't, we fallback to
959 to tell us if a file has been replaced. If it can't, we fallback to
960 recreating the object on every call (essentially the same behaviour as
960 recreating the object on every call (essentially the same behaviour as
961 propertycache).
961 propertycache).
962
962
963 '''
963 '''
964 def __init__(self, *paths):
964 def __init__(self, *paths):
965 self.paths = paths
965 self.paths = paths
966
966
967 def join(self, obj, fname):
967 def join(self, obj, fname):
968 """Used to compute the runtime path of a cached file.
968 """Used to compute the runtime path of a cached file.
969
969
970 Users should subclass filecache and provide their own version of this
970 Users should subclass filecache and provide their own version of this
971 function to call the appropriate join function on 'obj' (an instance
971 function to call the appropriate join function on 'obj' (an instance
972 of the class that its member function was decorated).
972 of the class that its member function was decorated).
973 """
973 """
974 return obj.join(fname)
974 return obj.join(fname)
975
975
976 def __call__(self, func):
976 def __call__(self, func):
977 self.func = func
977 self.func = func
978 self.name = func.__name__
978 self.name = func.__name__
979 return self
979 return self
980
980
981 def __get__(self, obj, type=None):
981 def __get__(self, obj, type=None):
982 # do we need to check if the file changed?
982 # do we need to check if the file changed?
983 if self.name in obj.__dict__:
983 if self.name in obj.__dict__:
984 assert self.name in obj._filecache, self.name
984 assert self.name in obj._filecache, self.name
985 return obj.__dict__[self.name]
985 return obj.__dict__[self.name]
986
986
987 entry = obj._filecache.get(self.name)
987 entry = obj._filecache.get(self.name)
988
988
989 if entry:
989 if entry:
990 if entry.changed():
990 if entry.changed():
991 entry.obj = self.func(obj)
991 entry.obj = self.func(obj)
992 else:
992 else:
993 paths = [self.join(obj, path) for path in self.paths]
993 paths = [self.join(obj, path) for path in self.paths]
994
994
995 # We stat -before- creating the object so our cache doesn't lie if
995 # We stat -before- creating the object so our cache doesn't lie if
996 # a writer modified between the time we read and stat
996 # a writer modified between the time we read and stat
997 entry = filecacheentry(paths, True)
997 entry = filecacheentry(paths, True)
998 entry.obj = self.func(obj)
998 entry.obj = self.func(obj)
999
999
1000 obj._filecache[self.name] = entry
1000 obj._filecache[self.name] = entry
1001
1001
1002 obj.__dict__[self.name] = entry.obj
1002 obj.__dict__[self.name] = entry.obj
1003 return entry.obj
1003 return entry.obj
1004
1004
1005 def __set__(self, obj, value):
1005 def __set__(self, obj, value):
1006 if self.name not in obj._filecache:
1006 if self.name not in obj._filecache:
1007 # we add an entry for the missing value because X in __dict__
1007 # we add an entry for the missing value because X in __dict__
1008 # implies X in _filecache
1008 # implies X in _filecache
1009 paths = [self.join(obj, path) for path in self.paths]
1009 paths = [self.join(obj, path) for path in self.paths]
1010 ce = filecacheentry(paths, False)
1010 ce = filecacheentry(paths, False)
1011 obj._filecache[self.name] = ce
1011 obj._filecache[self.name] = ce
1012 else:
1012 else:
1013 ce = obj._filecache[self.name]
1013 ce = obj._filecache[self.name]
1014
1014
1015 ce.obj = value # update cached copy
1015 ce.obj = value # update cached copy
1016 obj.__dict__[self.name] = value # update copy returned by obj.x
1016 obj.__dict__[self.name] = value # update copy returned by obj.x
1017
1017
1018 def __delete__(self, obj):
1018 def __delete__(self, obj):
1019 try:
1019 try:
1020 del obj.__dict__[self.name]
1020 del obj.__dict__[self.name]
1021 except KeyError:
1021 except KeyError:
1022 raise AttributeError(self.name)
1022 raise AttributeError(self.name)
1023
1023
1024 class dirs(object):
1024 class dirs(object):
1025 '''a multiset of directory names from a dirstate or manifest'''
1025 '''a multiset of directory names from a dirstate or manifest'''
1026
1026
1027 def __init__(self, map, skip=None):
1027 def __init__(self, map, skip=None):
1028 self._dirs = {}
1028 self._dirs = {}
1029 addpath = self.addpath
1029 addpath = self.addpath
1030 if util.safehasattr(map, 'iteritems') and skip is not None:
1030 if util.safehasattr(map, 'iteritems') and skip is not None:
1031 for f, s in map.iteritems():
1031 for f, s in map.iteritems():
1032 if s[0] != skip:
1032 if s[0] != skip:
1033 addpath(f)
1033 addpath(f)
1034 else:
1034 else:
1035 for f in map:
1035 for f in map:
1036 addpath(f)
1036 addpath(f)
1037
1037
1038 def addpath(self, path):
1038 def addpath(self, path):
1039 dirs = self._dirs
1039 dirs = self._dirs
1040 for base in finddirs(path):
1040 for base in finddirs(path):
1041 if base in dirs:
1041 if base in dirs:
1042 dirs[base] += 1
1042 dirs[base] += 1
1043 return
1043 return
1044 dirs[base] = 1
1044 dirs[base] = 1
1045
1045
1046 def delpath(self, path):
1046 def delpath(self, path):
1047 dirs = self._dirs
1047 dirs = self._dirs
1048 for base in finddirs(path):
1048 for base in finddirs(path):
1049 if dirs[base] > 1:
1049 if dirs[base] > 1:
1050 dirs[base] -= 1
1050 dirs[base] -= 1
1051 return
1051 return
1052 del dirs[base]
1052 del dirs[base]
1053
1053
1054 def __iter__(self):
1054 def __iter__(self):
1055 return self._dirs.iterkeys()
1055 return self._dirs.iterkeys()
1056
1056
1057 def __contains__(self, d):
1057 def __contains__(self, d):
1058 return d in self._dirs
1058 return d in self._dirs
1059
1059
1060 if util.safehasattr(parsers, 'dirs'):
1060 if util.safehasattr(parsers, 'dirs'):
1061 dirs = parsers.dirs
1061 dirs = parsers.dirs
1062
1062
1063 def finddirs(path):
1063 def finddirs(path):
1064 pos = path.rfind('/')
1064 pos = path.rfind('/')
1065 while pos != -1:
1065 while pos != -1:
1066 yield path[:pos]
1066 yield path[:pos]
1067 pos = path.rfind('/', 0, pos)
1067 pos = path.rfind('/', 0, pos)
@@ -1,57 +1,72 b''
1 $ hg init rep
1 $ hg init rep
2 $ cd rep
2 $ cd rep
3 $ mkdir dir
3 $ mkdir dir
4 $ touch foo dir/bar
4 $ touch foo dir/bar
5 $ hg -v addremove
5 $ hg -v addremove
6 adding dir/bar
6 adding dir/bar
7 adding foo
7 adding foo
8 $ hg -v commit -m "add 1"
8 $ hg -v commit -m "add 1"
9 dir/bar
9 dir/bar
10 foo
10 foo
11 committed changeset 0:6f7f953567a2
11 committed changeset 0:6f7f953567a2
12 $ cd dir/
12 $ cd dir/
13 $ touch ../foo_2 bar_2
13 $ touch ../foo_2 bar_2
14 $ hg -v addremove
14 $ hg -v addremove
15 adding dir/bar_2
15 adding dir/bar_2
16 adding foo_2
16 adding foo_2
17 $ hg -v commit -m "add 2"
17 $ hg -v commit -m "add 2"
18 dir/bar_2
18 dir/bar_2
19 foo_2
19 foo_2
20 committed changeset 1:e65414bf35c5
20 committed changeset 1:e65414bf35c5
21 $ cd ..
21 $ cd ..
22 $ hg forget foo
22 $ hg forget foo
23 $ hg -v addremove
23 $ hg -v addremove
24 adding foo
24 adding foo
25 $ cd ..
25 $ cd ..
26
26
27 $ hg init subdir
28 $ cd subdir
29 $ mkdir dir
30 $ cd dir
31 $ touch a.py
32 $ hg addremove 'glob:*.py'
33 adding a.py
34 $ hg forget a.py
35 $ hg addremove -I 'glob:*.py'
36 adding a.py
37 $ hg forget a.py
38 $ hg addremove
39 adding dir/a.py
40 $ cd ..
41
27 $ hg init sim
42 $ hg init sim
28 $ cd sim
43 $ cd sim
29 $ echo a > a
44 $ echo a > a
30 $ echo a >> a
45 $ echo a >> a
31 $ echo a >> a
46 $ echo a >> a
32 $ echo c > c
47 $ echo c > c
33 $ hg commit -Ama
48 $ hg commit -Ama
34 adding a
49 adding a
35 adding c
50 adding c
36 $ mv a b
51 $ mv a b
37 $ rm c
52 $ rm c
38 $ echo d > d
53 $ echo d > d
39 $ hg addremove -n -s 50 # issue 1696
54 $ hg addremove -n -s 50 # issue 1696
40 removing a
55 removing a
41 adding b
56 adding b
42 removing c
57 removing c
43 adding d
58 adding d
44 recording removal of a as rename to b (100% similar)
59 recording removal of a as rename to b (100% similar)
45 $ hg addremove -s 50
60 $ hg addremove -s 50
46 removing a
61 removing a
47 adding b
62 adding b
48 removing c
63 removing c
49 adding d
64 adding d
50 recording removal of a as rename to b (100% similar)
65 recording removal of a as rename to b (100% similar)
51 $ hg commit -mb
66 $ hg commit -mb
52 $ cp b c
67 $ cp b c
53 $ hg forget b
68 $ hg forget b
54 $ hg addremove -s 50
69 $ hg addremove -s 50
55 adding b
70 adding b
56 adding c
71 adding c
57 $ cd ..
72 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now