##// END OF EJS Templates
vfs: make it possible to pass multiple path elements to join...
Matt Harbison -
r24628:a0b47885 default
parent child Browse files
Show More
@@ -1,1130 +1,1130 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from mercurial.node import nullrev
9 from mercurial.node import nullrev
10 import util, error, osutil, revset, similar, encoding, phases, parsers
10 import util, error, osutil, revset, similar, encoding, phases, parsers
11 import pathutil
11 import pathutil
12 import match as matchmod
12 import match as matchmod
13 import os, errno, re, glob, tempfile
13 import os, errno, re, glob, tempfile
14
14
15 if os.name == 'nt':
15 if os.name == 'nt':
16 import scmwindows as scmplatform
16 import scmwindows as scmplatform
17 else:
17 else:
18 import scmposix as scmplatform
18 import scmposix as scmplatform
19
19
20 systemrcpath = scmplatform.systemrcpath
20 systemrcpath = scmplatform.systemrcpath
21 userrcpath = scmplatform.userrcpath
21 userrcpath = scmplatform.userrcpath
22
22
23 class status(tuple):
23 class status(tuple):
24 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
24 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
25 and 'ignored' properties are only relevant to the working copy.
25 and 'ignored' properties are only relevant to the working copy.
26 '''
26 '''
27
27
28 __slots__ = ()
28 __slots__ = ()
29
29
30 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
30 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
31 clean):
31 clean):
32 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
32 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
33 ignored, clean))
33 ignored, clean))
34
34
35 @property
35 @property
36 def modified(self):
36 def modified(self):
37 '''files that have been modified'''
37 '''files that have been modified'''
38 return self[0]
38 return self[0]
39
39
40 @property
40 @property
41 def added(self):
41 def added(self):
42 '''files that have been added'''
42 '''files that have been added'''
43 return self[1]
43 return self[1]
44
44
45 @property
45 @property
46 def removed(self):
46 def removed(self):
47 '''files that have been removed'''
47 '''files that have been removed'''
48 return self[2]
48 return self[2]
49
49
50 @property
50 @property
51 def deleted(self):
51 def deleted(self):
52 '''files that are in the dirstate, but have been deleted from the
52 '''files that are in the dirstate, but have been deleted from the
53 working copy (aka "missing")
53 working copy (aka "missing")
54 '''
54 '''
55 return self[3]
55 return self[3]
56
56
57 @property
57 @property
58 def unknown(self):
58 def unknown(self):
59 '''files not in the dirstate that are not ignored'''
59 '''files not in the dirstate that are not ignored'''
60 return self[4]
60 return self[4]
61
61
62 @property
62 @property
63 def ignored(self):
63 def ignored(self):
64 '''files not in the dirstate that are ignored (by _dirignore())'''
64 '''files not in the dirstate that are ignored (by _dirignore())'''
65 return self[5]
65 return self[5]
66
66
67 @property
67 @property
68 def clean(self):
68 def clean(self):
69 '''files that have not been modified'''
69 '''files that have not been modified'''
70 return self[6]
70 return self[6]
71
71
72 def __repr__(self, *args, **kwargs):
72 def __repr__(self, *args, **kwargs):
73 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
73 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
74 'unknown=%r, ignored=%r, clean=%r>') % self)
74 'unknown=%r, ignored=%r, clean=%r>') % self)
75
75
76 def itersubrepos(ctx1, ctx2):
76 def itersubrepos(ctx1, ctx2):
77 """find subrepos in ctx1 or ctx2"""
77 """find subrepos in ctx1 or ctx2"""
78 # Create a (subpath, ctx) mapping where we prefer subpaths from
78 # Create a (subpath, ctx) mapping where we prefer subpaths from
79 # ctx1. The subpaths from ctx2 are important when the .hgsub file
79 # ctx1. The subpaths from ctx2 are important when the .hgsub file
80 # has been modified (in ctx2) but not yet committed (in ctx1).
80 # has been modified (in ctx2) but not yet committed (in ctx1).
81 subpaths = dict.fromkeys(ctx2.substate, ctx2)
81 subpaths = dict.fromkeys(ctx2.substate, ctx2)
82 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
82 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
83 for subpath, ctx in sorted(subpaths.iteritems()):
83 for subpath, ctx in sorted(subpaths.iteritems()):
84 yield subpath, ctx.sub(subpath)
84 yield subpath, ctx.sub(subpath)
85
85
86 def nochangesfound(ui, repo, excluded=None):
86 def nochangesfound(ui, repo, excluded=None):
87 '''Report no changes for push/pull, excluded is None or a list of
87 '''Report no changes for push/pull, excluded is None or a list of
88 nodes excluded from the push/pull.
88 nodes excluded from the push/pull.
89 '''
89 '''
90 secretlist = []
90 secretlist = []
91 if excluded:
91 if excluded:
92 for n in excluded:
92 for n in excluded:
93 if n not in repo:
93 if n not in repo:
94 # discovery should not have included the filtered revision,
94 # discovery should not have included the filtered revision,
95 # we have to explicitly exclude it until discovery is cleanup.
95 # we have to explicitly exclude it until discovery is cleanup.
96 continue
96 continue
97 ctx = repo[n]
97 ctx = repo[n]
98 if ctx.phase() >= phases.secret and not ctx.extinct():
98 if ctx.phase() >= phases.secret and not ctx.extinct():
99 secretlist.append(n)
99 secretlist.append(n)
100
100
101 if secretlist:
101 if secretlist:
102 ui.status(_("no changes found (ignored %d secret changesets)\n")
102 ui.status(_("no changes found (ignored %d secret changesets)\n")
103 % len(secretlist))
103 % len(secretlist))
104 else:
104 else:
105 ui.status(_("no changes found\n"))
105 ui.status(_("no changes found\n"))
106
106
107 def checknewlabel(repo, lbl, kind):
107 def checknewlabel(repo, lbl, kind):
108 # Do not use the "kind" parameter in ui output.
108 # Do not use the "kind" parameter in ui output.
109 # It makes strings difficult to translate.
109 # It makes strings difficult to translate.
110 if lbl in ['tip', '.', 'null']:
110 if lbl in ['tip', '.', 'null']:
111 raise util.Abort(_("the name '%s' is reserved") % lbl)
111 raise util.Abort(_("the name '%s' is reserved") % lbl)
112 for c in (':', '\0', '\n', '\r'):
112 for c in (':', '\0', '\n', '\r'):
113 if c in lbl:
113 if c in lbl:
114 raise util.Abort(_("%r cannot be used in a name") % c)
114 raise util.Abort(_("%r cannot be used in a name") % c)
115 try:
115 try:
116 int(lbl)
116 int(lbl)
117 raise util.Abort(_("cannot use an integer as a name"))
117 raise util.Abort(_("cannot use an integer as a name"))
118 except ValueError:
118 except ValueError:
119 pass
119 pass
120
120
121 def checkfilename(f):
121 def checkfilename(f):
122 '''Check that the filename f is an acceptable filename for a tracked file'''
122 '''Check that the filename f is an acceptable filename for a tracked file'''
123 if '\r' in f or '\n' in f:
123 if '\r' in f or '\n' in f:
124 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
124 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
125
125
126 def checkportable(ui, f):
126 def checkportable(ui, f):
127 '''Check if filename f is portable and warn or abort depending on config'''
127 '''Check if filename f is portable and warn or abort depending on config'''
128 checkfilename(f)
128 checkfilename(f)
129 abort, warn = checkportabilityalert(ui)
129 abort, warn = checkportabilityalert(ui)
130 if abort or warn:
130 if abort or warn:
131 msg = util.checkwinfilename(f)
131 msg = util.checkwinfilename(f)
132 if msg:
132 if msg:
133 msg = "%s: %r" % (msg, f)
133 msg = "%s: %r" % (msg, f)
134 if abort:
134 if abort:
135 raise util.Abort(msg)
135 raise util.Abort(msg)
136 ui.warn(_("warning: %s\n") % msg)
136 ui.warn(_("warning: %s\n") % msg)
137
137
138 def checkportabilityalert(ui):
138 def checkportabilityalert(ui):
139 '''check if the user's config requests nothing, a warning, or abort for
139 '''check if the user's config requests nothing, a warning, or abort for
140 non-portable filenames'''
140 non-portable filenames'''
141 val = ui.config('ui', 'portablefilenames', 'warn')
141 val = ui.config('ui', 'portablefilenames', 'warn')
142 lval = val.lower()
142 lval = val.lower()
143 bval = util.parsebool(val)
143 bval = util.parsebool(val)
144 abort = os.name == 'nt' or lval == 'abort'
144 abort = os.name == 'nt' or lval == 'abort'
145 warn = bval or lval == 'warn'
145 warn = bval or lval == 'warn'
146 if bval is None and not (warn or abort or lval == 'ignore'):
146 if bval is None and not (warn or abort or lval == 'ignore'):
147 raise error.ConfigError(
147 raise error.ConfigError(
148 _("ui.portablefilenames value is invalid ('%s')") % val)
148 _("ui.portablefilenames value is invalid ('%s')") % val)
149 return abort, warn
149 return abort, warn
150
150
151 class casecollisionauditor(object):
151 class casecollisionauditor(object):
152 def __init__(self, ui, abort, dirstate):
152 def __init__(self, ui, abort, dirstate):
153 self._ui = ui
153 self._ui = ui
154 self._abort = abort
154 self._abort = abort
155 allfiles = '\0'.join(dirstate._map)
155 allfiles = '\0'.join(dirstate._map)
156 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
156 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
157 self._dirstate = dirstate
157 self._dirstate = dirstate
158 # The purpose of _newfiles is so that we don't complain about
158 # The purpose of _newfiles is so that we don't complain about
159 # case collisions if someone were to call this object with the
159 # case collisions if someone were to call this object with the
160 # same filename twice.
160 # same filename twice.
161 self._newfiles = set()
161 self._newfiles = set()
162
162
163 def __call__(self, f):
163 def __call__(self, f):
164 if f in self._newfiles:
164 if f in self._newfiles:
165 return
165 return
166 fl = encoding.lower(f)
166 fl = encoding.lower(f)
167 if fl in self._loweredfiles and f not in self._dirstate:
167 if fl in self._loweredfiles and f not in self._dirstate:
168 msg = _('possible case-folding collision for %s') % f
168 msg = _('possible case-folding collision for %s') % f
169 if self._abort:
169 if self._abort:
170 raise util.Abort(msg)
170 raise util.Abort(msg)
171 self._ui.warn(_("warning: %s\n") % msg)
171 self._ui.warn(_("warning: %s\n") % msg)
172 self._loweredfiles.add(fl)
172 self._loweredfiles.add(fl)
173 self._newfiles.add(f)
173 self._newfiles.add(f)
174
174
175 class abstractvfs(object):
175 class abstractvfs(object):
176 """Abstract base class; cannot be instantiated"""
176 """Abstract base class; cannot be instantiated"""
177
177
178 def __init__(self, *args, **kwargs):
178 def __init__(self, *args, **kwargs):
179 '''Prevent instantiation; don't call this from subclasses.'''
179 '''Prevent instantiation; don't call this from subclasses.'''
180 raise NotImplementedError('attempted instantiating ' + str(type(self)))
180 raise NotImplementedError('attempted instantiating ' + str(type(self)))
181
181
182 def tryread(self, path):
182 def tryread(self, path):
183 '''gracefully return an empty string for missing files'''
183 '''gracefully return an empty string for missing files'''
184 try:
184 try:
185 return self.read(path)
185 return self.read(path)
186 except IOError, inst:
186 except IOError, inst:
187 if inst.errno != errno.ENOENT:
187 if inst.errno != errno.ENOENT:
188 raise
188 raise
189 return ""
189 return ""
190
190
191 def tryreadlines(self, path, mode='rb'):
191 def tryreadlines(self, path, mode='rb'):
192 '''gracefully return an empty array for missing files'''
192 '''gracefully return an empty array for missing files'''
193 try:
193 try:
194 return self.readlines(path, mode=mode)
194 return self.readlines(path, mode=mode)
195 except IOError, inst:
195 except IOError, inst:
196 if inst.errno != errno.ENOENT:
196 if inst.errno != errno.ENOENT:
197 raise
197 raise
198 return []
198 return []
199
199
200 def open(self, path, mode="r", text=False, atomictemp=False,
200 def open(self, path, mode="r", text=False, atomictemp=False,
201 notindexed=False):
201 notindexed=False):
202 '''Open ``path`` file, which is relative to vfs root.
202 '''Open ``path`` file, which is relative to vfs root.
203
203
204 Newly created directories are marked as "not to be indexed by
204 Newly created directories are marked as "not to be indexed by
205 the content indexing service", if ``notindexed`` is specified
205 the content indexing service", if ``notindexed`` is specified
206 for "write" mode access.
206 for "write" mode access.
207 '''
207 '''
208 self.open = self.__call__
208 self.open = self.__call__
209 return self.__call__(path, mode, text, atomictemp, notindexed)
209 return self.__call__(path, mode, text, atomictemp, notindexed)
210
210
211 def read(self, path):
211 def read(self, path):
212 fp = self(path, 'rb')
212 fp = self(path, 'rb')
213 try:
213 try:
214 return fp.read()
214 return fp.read()
215 finally:
215 finally:
216 fp.close()
216 fp.close()
217
217
218 def readlines(self, path, mode='rb'):
218 def readlines(self, path, mode='rb'):
219 fp = self(path, mode=mode)
219 fp = self(path, mode=mode)
220 try:
220 try:
221 return fp.readlines()
221 return fp.readlines()
222 finally:
222 finally:
223 fp.close()
223 fp.close()
224
224
225 def write(self, path, data):
225 def write(self, path, data):
226 fp = self(path, 'wb')
226 fp = self(path, 'wb')
227 try:
227 try:
228 return fp.write(data)
228 return fp.write(data)
229 finally:
229 finally:
230 fp.close()
230 fp.close()
231
231
232 def writelines(self, path, data, mode='wb', notindexed=False):
232 def writelines(self, path, data, mode='wb', notindexed=False):
233 fp = self(path, mode=mode, notindexed=notindexed)
233 fp = self(path, mode=mode, notindexed=notindexed)
234 try:
234 try:
235 return fp.writelines(data)
235 return fp.writelines(data)
236 finally:
236 finally:
237 fp.close()
237 fp.close()
238
238
239 def append(self, path, data):
239 def append(self, path, data):
240 fp = self(path, 'ab')
240 fp = self(path, 'ab')
241 try:
241 try:
242 return fp.write(data)
242 return fp.write(data)
243 finally:
243 finally:
244 fp.close()
244 fp.close()
245
245
246 def chmod(self, path, mode):
246 def chmod(self, path, mode):
247 return os.chmod(self.join(path), mode)
247 return os.chmod(self.join(path), mode)
248
248
249 def exists(self, path=None):
249 def exists(self, path=None):
250 return os.path.exists(self.join(path))
250 return os.path.exists(self.join(path))
251
251
252 def fstat(self, fp):
252 def fstat(self, fp):
253 return util.fstat(fp)
253 return util.fstat(fp)
254
254
255 def isdir(self, path=None):
255 def isdir(self, path=None):
256 return os.path.isdir(self.join(path))
256 return os.path.isdir(self.join(path))
257
257
258 def isfile(self, path=None):
258 def isfile(self, path=None):
259 return os.path.isfile(self.join(path))
259 return os.path.isfile(self.join(path))
260
260
261 def islink(self, path=None):
261 def islink(self, path=None):
262 return os.path.islink(self.join(path))
262 return os.path.islink(self.join(path))
263
263
264 def reljoin(self, *paths):
264 def reljoin(self, *paths):
265 """join various elements of a path together (as os.path.join would do)
265 """join various elements of a path together (as os.path.join would do)
266
266
267 The vfs base is not injected so that path stay relative. This exists
267 The vfs base is not injected so that path stay relative. This exists
268 to allow handling of strange encoding if needed."""
268 to allow handling of strange encoding if needed."""
269 return os.path.join(*paths)
269 return os.path.join(*paths)
270
270
271 def split(self, path):
271 def split(self, path):
272 """split top-most element of a path (as os.path.split would do)
272 """split top-most element of a path (as os.path.split would do)
273
273
274 This exists to allow handling of strange encoding if needed."""
274 This exists to allow handling of strange encoding if needed."""
275 return os.path.split(path)
275 return os.path.split(path)
276
276
277 def lexists(self, path=None):
277 def lexists(self, path=None):
278 return os.path.lexists(self.join(path))
278 return os.path.lexists(self.join(path))
279
279
280 def lstat(self, path=None):
280 def lstat(self, path=None):
281 return os.lstat(self.join(path))
281 return os.lstat(self.join(path))
282
282
283 def listdir(self, path=None):
283 def listdir(self, path=None):
284 return os.listdir(self.join(path))
284 return os.listdir(self.join(path))
285
285
286 def makedir(self, path=None, notindexed=True):
286 def makedir(self, path=None, notindexed=True):
287 return util.makedir(self.join(path), notindexed)
287 return util.makedir(self.join(path), notindexed)
288
288
289 def makedirs(self, path=None, mode=None):
289 def makedirs(self, path=None, mode=None):
290 return util.makedirs(self.join(path), mode)
290 return util.makedirs(self.join(path), mode)
291
291
292 def makelock(self, info, path):
292 def makelock(self, info, path):
293 return util.makelock(info, self.join(path))
293 return util.makelock(info, self.join(path))
294
294
295 def mkdir(self, path=None):
295 def mkdir(self, path=None):
296 return os.mkdir(self.join(path))
296 return os.mkdir(self.join(path))
297
297
298 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
298 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
299 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
299 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
300 dir=self.join(dir), text=text)
300 dir=self.join(dir), text=text)
301 dname, fname = util.split(name)
301 dname, fname = util.split(name)
302 if dir:
302 if dir:
303 return fd, os.path.join(dir, fname)
303 return fd, os.path.join(dir, fname)
304 else:
304 else:
305 return fd, fname
305 return fd, fname
306
306
307 def readdir(self, path=None, stat=None, skip=None):
307 def readdir(self, path=None, stat=None, skip=None):
308 return osutil.listdir(self.join(path), stat, skip)
308 return osutil.listdir(self.join(path), stat, skip)
309
309
310 def readlock(self, path):
310 def readlock(self, path):
311 return util.readlock(self.join(path))
311 return util.readlock(self.join(path))
312
312
313 def rename(self, src, dst):
313 def rename(self, src, dst):
314 return util.rename(self.join(src), self.join(dst))
314 return util.rename(self.join(src), self.join(dst))
315
315
316 def readlink(self, path):
316 def readlink(self, path):
317 return os.readlink(self.join(path))
317 return os.readlink(self.join(path))
318
318
319 def setflags(self, path, l, x):
319 def setflags(self, path, l, x):
320 return util.setflags(self.join(path), l, x)
320 return util.setflags(self.join(path), l, x)
321
321
322 def stat(self, path=None):
322 def stat(self, path=None):
323 return os.stat(self.join(path))
323 return os.stat(self.join(path))
324
324
325 def unlink(self, path=None):
325 def unlink(self, path=None):
326 return util.unlink(self.join(path))
326 return util.unlink(self.join(path))
327
327
328 def unlinkpath(self, path=None, ignoremissing=False):
328 def unlinkpath(self, path=None, ignoremissing=False):
329 return util.unlinkpath(self.join(path), ignoremissing)
329 return util.unlinkpath(self.join(path), ignoremissing)
330
330
331 def utime(self, path=None, t=None):
331 def utime(self, path=None, t=None):
332 return os.utime(self.join(path), t)
332 return os.utime(self.join(path), t)
333
333
334 class vfs(abstractvfs):
334 class vfs(abstractvfs):
335 '''Operate files relative to a base directory
335 '''Operate files relative to a base directory
336
336
337 This class is used to hide the details of COW semantics and
337 This class is used to hide the details of COW semantics and
338 remote file access from higher level code.
338 remote file access from higher level code.
339 '''
339 '''
340 def __init__(self, base, audit=True, expandpath=False, realpath=False):
340 def __init__(self, base, audit=True, expandpath=False, realpath=False):
341 if expandpath:
341 if expandpath:
342 base = util.expandpath(base)
342 base = util.expandpath(base)
343 if realpath:
343 if realpath:
344 base = os.path.realpath(base)
344 base = os.path.realpath(base)
345 self.base = base
345 self.base = base
346 self._setmustaudit(audit)
346 self._setmustaudit(audit)
347 self.createmode = None
347 self.createmode = None
348 self._trustnlink = None
348 self._trustnlink = None
349
349
350 def _getmustaudit(self):
350 def _getmustaudit(self):
351 return self._audit
351 return self._audit
352
352
353 def _setmustaudit(self, onoff):
353 def _setmustaudit(self, onoff):
354 self._audit = onoff
354 self._audit = onoff
355 if onoff:
355 if onoff:
356 self.audit = pathutil.pathauditor(self.base)
356 self.audit = pathutil.pathauditor(self.base)
357 else:
357 else:
358 self.audit = util.always
358 self.audit = util.always
359
359
360 mustaudit = property(_getmustaudit, _setmustaudit)
360 mustaudit = property(_getmustaudit, _setmustaudit)
361
361
362 @util.propertycache
362 @util.propertycache
363 def _cansymlink(self):
363 def _cansymlink(self):
364 return util.checklink(self.base)
364 return util.checklink(self.base)
365
365
366 @util.propertycache
366 @util.propertycache
367 def _chmod(self):
367 def _chmod(self):
368 return util.checkexec(self.base)
368 return util.checkexec(self.base)
369
369
370 def _fixfilemode(self, name):
370 def _fixfilemode(self, name):
371 if self.createmode is None or not self._chmod:
371 if self.createmode is None or not self._chmod:
372 return
372 return
373 os.chmod(name, self.createmode & 0666)
373 os.chmod(name, self.createmode & 0666)
374
374
375 def __call__(self, path, mode="r", text=False, atomictemp=False,
375 def __call__(self, path, mode="r", text=False, atomictemp=False,
376 notindexed=False):
376 notindexed=False):
377 '''Open ``path`` file, which is relative to vfs root.
377 '''Open ``path`` file, which is relative to vfs root.
378
378
379 Newly created directories are marked as "not to be indexed by
379 Newly created directories are marked as "not to be indexed by
380 the content indexing service", if ``notindexed`` is specified
380 the content indexing service", if ``notindexed`` is specified
381 for "write" mode access.
381 for "write" mode access.
382 '''
382 '''
383 if self._audit:
383 if self._audit:
384 r = util.checkosfilename(path)
384 r = util.checkosfilename(path)
385 if r:
385 if r:
386 raise util.Abort("%s: %r" % (r, path))
386 raise util.Abort("%s: %r" % (r, path))
387 self.audit(path)
387 self.audit(path)
388 f = self.join(path)
388 f = self.join(path)
389
389
390 if not text and "b" not in mode:
390 if not text and "b" not in mode:
391 mode += "b" # for that other OS
391 mode += "b" # for that other OS
392
392
393 nlink = -1
393 nlink = -1
394 if mode not in ('r', 'rb'):
394 if mode not in ('r', 'rb'):
395 dirname, basename = util.split(f)
395 dirname, basename = util.split(f)
396 # If basename is empty, then the path is malformed because it points
396 # If basename is empty, then the path is malformed because it points
397 # to a directory. Let the posixfile() call below raise IOError.
397 # to a directory. Let the posixfile() call below raise IOError.
398 if basename:
398 if basename:
399 if atomictemp:
399 if atomictemp:
400 util.ensuredirs(dirname, self.createmode, notindexed)
400 util.ensuredirs(dirname, self.createmode, notindexed)
401 return util.atomictempfile(f, mode, self.createmode)
401 return util.atomictempfile(f, mode, self.createmode)
402 try:
402 try:
403 if 'w' in mode:
403 if 'w' in mode:
404 util.unlink(f)
404 util.unlink(f)
405 nlink = 0
405 nlink = 0
406 else:
406 else:
407 # nlinks() may behave differently for files on Windows
407 # nlinks() may behave differently for files on Windows
408 # shares if the file is open.
408 # shares if the file is open.
409 fd = util.posixfile(f)
409 fd = util.posixfile(f)
410 nlink = util.nlinks(f)
410 nlink = util.nlinks(f)
411 if nlink < 1:
411 if nlink < 1:
412 nlink = 2 # force mktempcopy (issue1922)
412 nlink = 2 # force mktempcopy (issue1922)
413 fd.close()
413 fd.close()
414 except (OSError, IOError), e:
414 except (OSError, IOError), e:
415 if e.errno != errno.ENOENT:
415 if e.errno != errno.ENOENT:
416 raise
416 raise
417 nlink = 0
417 nlink = 0
418 util.ensuredirs(dirname, self.createmode, notindexed)
418 util.ensuredirs(dirname, self.createmode, notindexed)
419 if nlink > 0:
419 if nlink > 0:
420 if self._trustnlink is None:
420 if self._trustnlink is None:
421 self._trustnlink = nlink > 1 or util.checknlink(f)
421 self._trustnlink = nlink > 1 or util.checknlink(f)
422 if nlink > 1 or not self._trustnlink:
422 if nlink > 1 or not self._trustnlink:
423 util.rename(util.mktempcopy(f), f)
423 util.rename(util.mktempcopy(f), f)
424 fp = util.posixfile(f, mode)
424 fp = util.posixfile(f, mode)
425 if nlink == 0:
425 if nlink == 0:
426 self._fixfilemode(f)
426 self._fixfilemode(f)
427 return fp
427 return fp
428
428
429 def symlink(self, src, dst):
429 def symlink(self, src, dst):
430 self.audit(dst)
430 self.audit(dst)
431 linkname = self.join(dst)
431 linkname = self.join(dst)
432 try:
432 try:
433 os.unlink(linkname)
433 os.unlink(linkname)
434 except OSError:
434 except OSError:
435 pass
435 pass
436
436
437 util.ensuredirs(os.path.dirname(linkname), self.createmode)
437 util.ensuredirs(os.path.dirname(linkname), self.createmode)
438
438
439 if self._cansymlink:
439 if self._cansymlink:
440 try:
440 try:
441 os.symlink(src, linkname)
441 os.symlink(src, linkname)
442 except OSError, err:
442 except OSError, err:
443 raise OSError(err.errno, _('could not symlink to %r: %s') %
443 raise OSError(err.errno, _('could not symlink to %r: %s') %
444 (src, err.strerror), linkname)
444 (src, err.strerror), linkname)
445 else:
445 else:
446 self.write(dst, src)
446 self.write(dst, src)
447
447
448 def join(self, path):
448 def join(self, path, *insidef):
449 if path:
449 if path:
450 return os.path.join(self.base, path)
450 return os.path.join(self.base, path, *insidef)
451 else:
451 else:
452 return self.base
452 return self.base
453
453
454 opener = vfs
454 opener = vfs
455
455
456 class auditvfs(object):
456 class auditvfs(object):
457 def __init__(self, vfs):
457 def __init__(self, vfs):
458 self.vfs = vfs
458 self.vfs = vfs
459
459
460 def _getmustaudit(self):
460 def _getmustaudit(self):
461 return self.vfs.mustaudit
461 return self.vfs.mustaudit
462
462
463 def _setmustaudit(self, onoff):
463 def _setmustaudit(self, onoff):
464 self.vfs.mustaudit = onoff
464 self.vfs.mustaudit = onoff
465
465
466 mustaudit = property(_getmustaudit, _setmustaudit)
466 mustaudit = property(_getmustaudit, _setmustaudit)
467
467
468 class filtervfs(abstractvfs, auditvfs):
468 class filtervfs(abstractvfs, auditvfs):
469 '''Wrapper vfs for filtering filenames with a function.'''
469 '''Wrapper vfs for filtering filenames with a function.'''
470
470
471 def __init__(self, vfs, filter):
471 def __init__(self, vfs, filter):
472 auditvfs.__init__(self, vfs)
472 auditvfs.__init__(self, vfs)
473 self._filter = filter
473 self._filter = filter
474
474
475 def __call__(self, path, *args, **kwargs):
475 def __call__(self, path, *args, **kwargs):
476 return self.vfs(self._filter(path), *args, **kwargs)
476 return self.vfs(self._filter(path), *args, **kwargs)
477
477
478 def join(self, path):
478 def join(self, path, *insidef):
479 if path:
479 if path:
480 return self.vfs.join(self._filter(path))
480 return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef)))
481 else:
481 else:
482 return self.vfs.join(path)
482 return self.vfs.join(path)
483
483
484 filteropener = filtervfs
484 filteropener = filtervfs
485
485
486 class readonlyvfs(abstractvfs, auditvfs):
486 class readonlyvfs(abstractvfs, auditvfs):
487 '''Wrapper vfs preventing any writing.'''
487 '''Wrapper vfs preventing any writing.'''
488
488
489 def __init__(self, vfs):
489 def __init__(self, vfs):
490 auditvfs.__init__(self, vfs)
490 auditvfs.__init__(self, vfs)
491
491
492 def __call__(self, path, mode='r', *args, **kw):
492 def __call__(self, path, mode='r', *args, **kw):
493 if mode not in ('r', 'rb'):
493 if mode not in ('r', 'rb'):
494 raise util.Abort('this vfs is read only')
494 raise util.Abort('this vfs is read only')
495 return self.vfs(path, mode, *args, **kw)
495 return self.vfs(path, mode, *args, **kw)
496
496
497
497
498 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
498 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
499 '''yield every hg repository under path, always recursively.
499 '''yield every hg repository under path, always recursively.
500 The recurse flag will only control recursion into repo working dirs'''
500 The recurse flag will only control recursion into repo working dirs'''
501 def errhandler(err):
501 def errhandler(err):
502 if err.filename == path:
502 if err.filename == path:
503 raise err
503 raise err
504 samestat = getattr(os.path, 'samestat', None)
504 samestat = getattr(os.path, 'samestat', None)
505 if followsym and samestat is not None:
505 if followsym and samestat is not None:
506 def adddir(dirlst, dirname):
506 def adddir(dirlst, dirname):
507 match = False
507 match = False
508 dirstat = os.stat(dirname)
508 dirstat = os.stat(dirname)
509 for lstdirstat in dirlst:
509 for lstdirstat in dirlst:
510 if samestat(dirstat, lstdirstat):
510 if samestat(dirstat, lstdirstat):
511 match = True
511 match = True
512 break
512 break
513 if not match:
513 if not match:
514 dirlst.append(dirstat)
514 dirlst.append(dirstat)
515 return not match
515 return not match
516 else:
516 else:
517 followsym = False
517 followsym = False
518
518
519 if (seen_dirs is None) and followsym:
519 if (seen_dirs is None) and followsym:
520 seen_dirs = []
520 seen_dirs = []
521 adddir(seen_dirs, path)
521 adddir(seen_dirs, path)
522 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
522 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
523 dirs.sort()
523 dirs.sort()
524 if '.hg' in dirs:
524 if '.hg' in dirs:
525 yield root # found a repository
525 yield root # found a repository
526 qroot = os.path.join(root, '.hg', 'patches')
526 qroot = os.path.join(root, '.hg', 'patches')
527 if os.path.isdir(os.path.join(qroot, '.hg')):
527 if os.path.isdir(os.path.join(qroot, '.hg')):
528 yield qroot # we have a patch queue repo here
528 yield qroot # we have a patch queue repo here
529 if recurse:
529 if recurse:
530 # avoid recursing inside the .hg directory
530 # avoid recursing inside the .hg directory
531 dirs.remove('.hg')
531 dirs.remove('.hg')
532 else:
532 else:
533 dirs[:] = [] # don't descend further
533 dirs[:] = [] # don't descend further
534 elif followsym:
534 elif followsym:
535 newdirs = []
535 newdirs = []
536 for d in dirs:
536 for d in dirs:
537 fname = os.path.join(root, d)
537 fname = os.path.join(root, d)
538 if adddir(seen_dirs, fname):
538 if adddir(seen_dirs, fname):
539 if os.path.islink(fname):
539 if os.path.islink(fname):
540 for hgname in walkrepos(fname, True, seen_dirs):
540 for hgname in walkrepos(fname, True, seen_dirs):
541 yield hgname
541 yield hgname
542 else:
542 else:
543 newdirs.append(d)
543 newdirs.append(d)
544 dirs[:] = newdirs
544 dirs[:] = newdirs
545
545
546 def osrcpath():
546 def osrcpath():
547 '''return default os-specific hgrc search path'''
547 '''return default os-specific hgrc search path'''
548 path = []
548 path = []
549 defaultpath = os.path.join(util.datapath, 'default.d')
549 defaultpath = os.path.join(util.datapath, 'default.d')
550 if os.path.isdir(defaultpath):
550 if os.path.isdir(defaultpath):
551 for f, kind in osutil.listdir(defaultpath):
551 for f, kind in osutil.listdir(defaultpath):
552 if f.endswith('.rc'):
552 if f.endswith('.rc'):
553 path.append(os.path.join(defaultpath, f))
553 path.append(os.path.join(defaultpath, f))
554 path.extend(systemrcpath())
554 path.extend(systemrcpath())
555 path.extend(userrcpath())
555 path.extend(userrcpath())
556 path = [os.path.normpath(f) for f in path]
556 path = [os.path.normpath(f) for f in path]
557 return path
557 return path
558
558
559 _rcpath = None
559 _rcpath = None
560
560
561 def rcpath():
561 def rcpath():
562 '''return hgrc search path. if env var HGRCPATH is set, use it.
562 '''return hgrc search path. if env var HGRCPATH is set, use it.
563 for each item in path, if directory, use files ending in .rc,
563 for each item in path, if directory, use files ending in .rc,
564 else use item.
564 else use item.
565 make HGRCPATH empty to only look in .hg/hgrc of current repo.
565 make HGRCPATH empty to only look in .hg/hgrc of current repo.
566 if no HGRCPATH, use default os-specific path.'''
566 if no HGRCPATH, use default os-specific path.'''
567 global _rcpath
567 global _rcpath
568 if _rcpath is None:
568 if _rcpath is None:
569 if 'HGRCPATH' in os.environ:
569 if 'HGRCPATH' in os.environ:
570 _rcpath = []
570 _rcpath = []
571 for p in os.environ['HGRCPATH'].split(os.pathsep):
571 for p in os.environ['HGRCPATH'].split(os.pathsep):
572 if not p:
572 if not p:
573 continue
573 continue
574 p = util.expandpath(p)
574 p = util.expandpath(p)
575 if os.path.isdir(p):
575 if os.path.isdir(p):
576 for f, kind in osutil.listdir(p):
576 for f, kind in osutil.listdir(p):
577 if f.endswith('.rc'):
577 if f.endswith('.rc'):
578 _rcpath.append(os.path.join(p, f))
578 _rcpath.append(os.path.join(p, f))
579 else:
579 else:
580 _rcpath.append(p)
580 _rcpath.append(p)
581 else:
581 else:
582 _rcpath = osrcpath()
582 _rcpath = osrcpath()
583 return _rcpath
583 return _rcpath
584
584
585 def intrev(repo, rev):
585 def intrev(repo, rev):
586 """Return integer for a given revision that can be used in comparison or
586 """Return integer for a given revision that can be used in comparison or
587 arithmetic operation"""
587 arithmetic operation"""
588 if rev is None:
588 if rev is None:
589 return len(repo)
589 return len(repo)
590 return rev
590 return rev
591
591
592 def revsingle(repo, revspec, default='.'):
592 def revsingle(repo, revspec, default='.'):
593 if not revspec and revspec != 0:
593 if not revspec and revspec != 0:
594 return repo[default]
594 return repo[default]
595
595
596 l = revrange(repo, [revspec])
596 l = revrange(repo, [revspec])
597 if not l:
597 if not l:
598 raise util.Abort(_('empty revision set'))
598 raise util.Abort(_('empty revision set'))
599 return repo[l.last()]
599 return repo[l.last()]
600
600
601 def revpair(repo, revs):
601 def revpair(repo, revs):
602 if not revs:
602 if not revs:
603 return repo.dirstate.p1(), None
603 return repo.dirstate.p1(), None
604
604
605 l = revrange(repo, revs)
605 l = revrange(repo, revs)
606
606
607 if not l:
607 if not l:
608 first = second = None
608 first = second = None
609 elif l.isascending():
609 elif l.isascending():
610 first = l.min()
610 first = l.min()
611 second = l.max()
611 second = l.max()
612 elif l.isdescending():
612 elif l.isdescending():
613 first = l.max()
613 first = l.max()
614 second = l.min()
614 second = l.min()
615 else:
615 else:
616 first = l.first()
616 first = l.first()
617 second = l.last()
617 second = l.last()
618
618
619 if first is None:
619 if first is None:
620 raise util.Abort(_('empty revision range'))
620 raise util.Abort(_('empty revision range'))
621
621
622 if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
622 if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
623 return repo.lookup(first), None
623 return repo.lookup(first), None
624
624
625 return repo.lookup(first), repo.lookup(second)
625 return repo.lookup(first), repo.lookup(second)
626
626
627 _revrangesep = ':'
627 _revrangesep = ':'
628
628
629 def revrange(repo, revs):
629 def revrange(repo, revs):
630 """Yield revision as strings from a list of revision specifications."""
630 """Yield revision as strings from a list of revision specifications."""
631
631
632 def revfix(repo, val, defval):
632 def revfix(repo, val, defval):
633 if not val and val != 0 and defval is not None:
633 if not val and val != 0 and defval is not None:
634 return defval
634 return defval
635 return repo[val].rev()
635 return repo[val].rev()
636
636
637 seen, l = set(), revset.baseset([])
637 seen, l = set(), revset.baseset([])
638
638
639 revsetaliases = [alias for (alias, _) in
639 revsetaliases = [alias for (alias, _) in
640 repo.ui.configitems("revsetalias")]
640 repo.ui.configitems("revsetalias")]
641
641
642 for spec in revs:
642 for spec in revs:
643 if l and not seen:
643 if l and not seen:
644 seen = set(l)
644 seen = set(l)
645 # attempt to parse old-style ranges first to deal with
645 # attempt to parse old-style ranges first to deal with
646 # things like old-tag which contain query metacharacters
646 # things like old-tag which contain query metacharacters
647 try:
647 try:
648 # ... except for revset aliases without arguments. These
648 # ... except for revset aliases without arguments. These
649 # should be parsed as soon as possible, because they might
649 # should be parsed as soon as possible, because they might
650 # clash with a hash prefix.
650 # clash with a hash prefix.
651 if spec in revsetaliases:
651 if spec in revsetaliases:
652 raise error.RepoLookupError
652 raise error.RepoLookupError
653
653
654 if isinstance(spec, int):
654 if isinstance(spec, int):
655 seen.add(spec)
655 seen.add(spec)
656 l = l + revset.baseset([spec])
656 l = l + revset.baseset([spec])
657 continue
657 continue
658
658
659 if _revrangesep in spec:
659 if _revrangesep in spec:
660 start, end = spec.split(_revrangesep, 1)
660 start, end = spec.split(_revrangesep, 1)
661 if start in revsetaliases or end in revsetaliases:
661 if start in revsetaliases or end in revsetaliases:
662 raise error.RepoLookupError
662 raise error.RepoLookupError
663
663
664 start = revfix(repo, start, 0)
664 start = revfix(repo, start, 0)
665 end = revfix(repo, end, len(repo) - 1)
665 end = revfix(repo, end, len(repo) - 1)
666 if end == nullrev and start < 0:
666 if end == nullrev and start < 0:
667 start = nullrev
667 start = nullrev
668 rangeiter = repo.changelog.revs(start, end)
668 rangeiter = repo.changelog.revs(start, end)
669 if not seen and not l:
669 if not seen and not l:
670 # by far the most common case: revs = ["-1:0"]
670 # by far the most common case: revs = ["-1:0"]
671 l = revset.baseset(rangeiter)
671 l = revset.baseset(rangeiter)
672 # defer syncing seen until next iteration
672 # defer syncing seen until next iteration
673 continue
673 continue
674 newrevs = set(rangeiter)
674 newrevs = set(rangeiter)
675 if seen:
675 if seen:
676 newrevs.difference_update(seen)
676 newrevs.difference_update(seen)
677 seen.update(newrevs)
677 seen.update(newrevs)
678 else:
678 else:
679 seen = newrevs
679 seen = newrevs
680 l = l + revset.baseset(sorted(newrevs, reverse=start > end))
680 l = l + revset.baseset(sorted(newrevs, reverse=start > end))
681 continue
681 continue
682 elif spec and spec in repo: # single unquoted rev
682 elif spec and spec in repo: # single unquoted rev
683 rev = revfix(repo, spec, None)
683 rev = revfix(repo, spec, None)
684 if rev in seen:
684 if rev in seen:
685 continue
685 continue
686 seen.add(rev)
686 seen.add(rev)
687 l = l + revset.baseset([rev])
687 l = l + revset.baseset([rev])
688 continue
688 continue
689 except error.RepoLookupError:
689 except error.RepoLookupError:
690 pass
690 pass
691
691
692 # fall through to new-style queries if old-style fails
692 # fall through to new-style queries if old-style fails
693 m = revset.match(repo.ui, spec, repo)
693 m = revset.match(repo.ui, spec, repo)
694 if seen or l:
694 if seen or l:
695 dl = [r for r in m(repo) if r not in seen]
695 dl = [r for r in m(repo) if r not in seen]
696 l = l + revset.baseset(dl)
696 l = l + revset.baseset(dl)
697 seen.update(dl)
697 seen.update(dl)
698 else:
698 else:
699 l = m(repo)
699 l = m(repo)
700
700
701 return l
701 return l
702
702
703 def expandpats(pats):
703 def expandpats(pats):
704 '''Expand bare globs when running on windows.
704 '''Expand bare globs when running on windows.
705 On posix we assume it already has already been done by sh.'''
705 On posix we assume it already has already been done by sh.'''
706 if not util.expandglobs:
706 if not util.expandglobs:
707 return list(pats)
707 return list(pats)
708 ret = []
708 ret = []
709 for kindpat in pats:
709 for kindpat in pats:
710 kind, pat = matchmod._patsplit(kindpat, None)
710 kind, pat = matchmod._patsplit(kindpat, None)
711 if kind is None:
711 if kind is None:
712 try:
712 try:
713 globbed = glob.glob(pat)
713 globbed = glob.glob(pat)
714 except re.error:
714 except re.error:
715 globbed = [pat]
715 globbed = [pat]
716 if globbed:
716 if globbed:
717 ret.extend(globbed)
717 ret.extend(globbed)
718 continue
718 continue
719 ret.append(kindpat)
719 ret.append(kindpat)
720 return ret
720 return ret
721
721
722 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
722 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
723 '''Return a matcher and the patterns that were used.
723 '''Return a matcher and the patterns that were used.
724 The matcher will warn about bad matches.'''
724 The matcher will warn about bad matches.'''
725 if pats == ("",):
725 if pats == ("",):
726 pats = []
726 pats = []
727 if not globbed and default == 'relpath':
727 if not globbed and default == 'relpath':
728 pats = expandpats(pats or [])
728 pats = expandpats(pats or [])
729
729
730 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
730 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
731 default)
731 default)
732 def badfn(f, msg):
732 def badfn(f, msg):
733 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
733 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
734 m.bad = badfn
734 m.bad = badfn
735 if m.always():
735 if m.always():
736 pats = []
736 pats = []
737 return m, pats
737 return m, pats
738
738
739 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
739 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
740 '''Return a matcher that will warn about bad matches.'''
740 '''Return a matcher that will warn about bad matches.'''
741 return matchandpats(ctx, pats, opts, globbed, default)[0]
741 return matchandpats(ctx, pats, opts, globbed, default)[0]
742
742
743 def matchall(repo):
743 def matchall(repo):
744 '''Return a matcher that will efficiently match everything.'''
744 '''Return a matcher that will efficiently match everything.'''
745 return matchmod.always(repo.root, repo.getcwd())
745 return matchmod.always(repo.root, repo.getcwd())
746
746
747 def matchfiles(repo, files):
747 def matchfiles(repo, files):
748 '''Return a matcher that will efficiently match exactly these files.'''
748 '''Return a matcher that will efficiently match exactly these files.'''
749 return matchmod.exact(repo.root, repo.getcwd(), files)
749 return matchmod.exact(repo.root, repo.getcwd(), files)
750
750
751 def addremove(repo, matcher, prefix, opts={}, dry_run=None, similarity=None):
751 def addremove(repo, matcher, prefix, opts={}, dry_run=None, similarity=None):
752 m = matcher
752 m = matcher
753 if dry_run is None:
753 if dry_run is None:
754 dry_run = opts.get('dry_run')
754 dry_run = opts.get('dry_run')
755 if similarity is None:
755 if similarity is None:
756 similarity = float(opts.get('similarity') or 0)
756 similarity = float(opts.get('similarity') or 0)
757
757
758 ret = 0
758 ret = 0
759 join = lambda f: os.path.join(prefix, f)
759 join = lambda f: os.path.join(prefix, f)
760
760
761 def matchessubrepo(matcher, subpath):
761 def matchessubrepo(matcher, subpath):
762 if matcher.exact(subpath):
762 if matcher.exact(subpath):
763 return True
763 return True
764 for f in matcher.files():
764 for f in matcher.files():
765 if f.startswith(subpath):
765 if f.startswith(subpath):
766 return True
766 return True
767 return False
767 return False
768
768
769 wctx = repo[None]
769 wctx = repo[None]
770 for subpath in sorted(wctx.substate):
770 for subpath in sorted(wctx.substate):
771 if opts.get('subrepos') or matchessubrepo(m, subpath):
771 if opts.get('subrepos') or matchessubrepo(m, subpath):
772 sub = wctx.sub(subpath)
772 sub = wctx.sub(subpath)
773 try:
773 try:
774 submatch = matchmod.narrowmatcher(subpath, m)
774 submatch = matchmod.narrowmatcher(subpath, m)
775 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
775 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
776 ret = 1
776 ret = 1
777 except error.LookupError:
777 except error.LookupError:
778 repo.ui.status(_("skipping missing subrepository: %s\n")
778 repo.ui.status(_("skipping missing subrepository: %s\n")
779 % join(subpath))
779 % join(subpath))
780
780
781 rejected = []
781 rejected = []
782 origbad = m.bad
782 origbad = m.bad
783 def badfn(f, msg):
783 def badfn(f, msg):
784 if f in m.files():
784 if f in m.files():
785 origbad(f, msg)
785 origbad(f, msg)
786 rejected.append(f)
786 rejected.append(f)
787
787
788 m.bad = badfn
788 m.bad = badfn
789 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
789 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
790 m.bad = origbad
790 m.bad = origbad
791
791
792 unknownset = set(unknown + forgotten)
792 unknownset = set(unknown + forgotten)
793 toprint = unknownset.copy()
793 toprint = unknownset.copy()
794 toprint.update(deleted)
794 toprint.update(deleted)
795 for abs in sorted(toprint):
795 for abs in sorted(toprint):
796 if repo.ui.verbose or not m.exact(abs):
796 if repo.ui.verbose or not m.exact(abs):
797 if abs in unknownset:
797 if abs in unknownset:
798 status = _('adding %s\n') % m.uipath(abs)
798 status = _('adding %s\n') % m.uipath(abs)
799 else:
799 else:
800 status = _('removing %s\n') % m.uipath(abs)
800 status = _('removing %s\n') % m.uipath(abs)
801 repo.ui.status(status)
801 repo.ui.status(status)
802
802
803 renames = _findrenames(repo, m, added + unknown, removed + deleted,
803 renames = _findrenames(repo, m, added + unknown, removed + deleted,
804 similarity)
804 similarity)
805
805
806 if not dry_run:
806 if not dry_run:
807 _markchanges(repo, unknown + forgotten, deleted, renames)
807 _markchanges(repo, unknown + forgotten, deleted, renames)
808
808
809 for f in rejected:
809 for f in rejected:
810 if f in m.files():
810 if f in m.files():
811 return 1
811 return 1
812 return ret
812 return ret
813
813
814 def marktouched(repo, files, similarity=0.0):
814 def marktouched(repo, files, similarity=0.0):
815 '''Assert that files have somehow been operated upon. files are relative to
815 '''Assert that files have somehow been operated upon. files are relative to
816 the repo root.'''
816 the repo root.'''
817 m = matchfiles(repo, files)
817 m = matchfiles(repo, files)
818 rejected = []
818 rejected = []
819 m.bad = lambda x, y: rejected.append(x)
819 m.bad = lambda x, y: rejected.append(x)
820
820
821 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
821 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
822
822
823 if repo.ui.verbose:
823 if repo.ui.verbose:
824 unknownset = set(unknown + forgotten)
824 unknownset = set(unknown + forgotten)
825 toprint = unknownset.copy()
825 toprint = unknownset.copy()
826 toprint.update(deleted)
826 toprint.update(deleted)
827 for abs in sorted(toprint):
827 for abs in sorted(toprint):
828 if abs in unknownset:
828 if abs in unknownset:
829 status = _('adding %s\n') % abs
829 status = _('adding %s\n') % abs
830 else:
830 else:
831 status = _('removing %s\n') % abs
831 status = _('removing %s\n') % abs
832 repo.ui.status(status)
832 repo.ui.status(status)
833
833
834 renames = _findrenames(repo, m, added + unknown, removed + deleted,
834 renames = _findrenames(repo, m, added + unknown, removed + deleted,
835 similarity)
835 similarity)
836
836
837 _markchanges(repo, unknown + forgotten, deleted, renames)
837 _markchanges(repo, unknown + forgotten, deleted, renames)
838
838
839 for f in rejected:
839 for f in rejected:
840 if f in m.files():
840 if f in m.files():
841 return 1
841 return 1
842 return 0
842 return 0
843
843
844 def _interestingfiles(repo, matcher):
844 def _interestingfiles(repo, matcher):
845 '''Walk dirstate with matcher, looking for files that addremove would care
845 '''Walk dirstate with matcher, looking for files that addremove would care
846 about.
846 about.
847
847
848 This is different from dirstate.status because it doesn't care about
848 This is different from dirstate.status because it doesn't care about
849 whether files are modified or clean.'''
849 whether files are modified or clean.'''
850 added, unknown, deleted, removed, forgotten = [], [], [], [], []
850 added, unknown, deleted, removed, forgotten = [], [], [], [], []
851 audit_path = pathutil.pathauditor(repo.root)
851 audit_path = pathutil.pathauditor(repo.root)
852
852
853 ctx = repo[None]
853 ctx = repo[None]
854 dirstate = repo.dirstate
854 dirstate = repo.dirstate
855 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
855 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
856 full=False)
856 full=False)
857 for abs, st in walkresults.iteritems():
857 for abs, st in walkresults.iteritems():
858 dstate = dirstate[abs]
858 dstate = dirstate[abs]
859 if dstate == '?' and audit_path.check(abs):
859 if dstate == '?' and audit_path.check(abs):
860 unknown.append(abs)
860 unknown.append(abs)
861 elif dstate != 'r' and not st:
861 elif dstate != 'r' and not st:
862 deleted.append(abs)
862 deleted.append(abs)
863 elif dstate == 'r' and st:
863 elif dstate == 'r' and st:
864 forgotten.append(abs)
864 forgotten.append(abs)
865 # for finding renames
865 # for finding renames
866 elif dstate == 'r' and not st:
866 elif dstate == 'r' and not st:
867 removed.append(abs)
867 removed.append(abs)
868 elif dstate == 'a':
868 elif dstate == 'a':
869 added.append(abs)
869 added.append(abs)
870
870
871 return added, unknown, deleted, removed, forgotten
871 return added, unknown, deleted, removed, forgotten
872
872
873 def _findrenames(repo, matcher, added, removed, similarity):
873 def _findrenames(repo, matcher, added, removed, similarity):
874 '''Find renames from removed files to added ones.'''
874 '''Find renames from removed files to added ones.'''
875 renames = {}
875 renames = {}
876 if similarity > 0:
876 if similarity > 0:
877 for old, new, score in similar.findrenames(repo, added, removed,
877 for old, new, score in similar.findrenames(repo, added, removed,
878 similarity):
878 similarity):
879 if (repo.ui.verbose or not matcher.exact(old)
879 if (repo.ui.verbose or not matcher.exact(old)
880 or not matcher.exact(new)):
880 or not matcher.exact(new)):
881 repo.ui.status(_('recording removal of %s as rename to %s '
881 repo.ui.status(_('recording removal of %s as rename to %s '
882 '(%d%% similar)\n') %
882 '(%d%% similar)\n') %
883 (matcher.rel(old), matcher.rel(new),
883 (matcher.rel(old), matcher.rel(new),
884 score * 100))
884 score * 100))
885 renames[new] = old
885 renames[new] = old
886 return renames
886 return renames
887
887
888 def _markchanges(repo, unknown, deleted, renames):
888 def _markchanges(repo, unknown, deleted, renames):
889 '''Marks the files in unknown as added, the files in deleted as removed,
889 '''Marks the files in unknown as added, the files in deleted as removed,
890 and the files in renames as copied.'''
890 and the files in renames as copied.'''
891 wctx = repo[None]
891 wctx = repo[None]
892 wlock = repo.wlock()
892 wlock = repo.wlock()
893 try:
893 try:
894 wctx.forget(deleted)
894 wctx.forget(deleted)
895 wctx.add(unknown)
895 wctx.add(unknown)
896 for new, old in renames.iteritems():
896 for new, old in renames.iteritems():
897 wctx.copy(old, new)
897 wctx.copy(old, new)
898 finally:
898 finally:
899 wlock.release()
899 wlock.release()
900
900
901 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
901 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
902 """Update the dirstate to reflect the intent of copying src to dst. For
902 """Update the dirstate to reflect the intent of copying src to dst. For
903 different reasons it might not end with dst being marked as copied from src.
903 different reasons it might not end with dst being marked as copied from src.
904 """
904 """
905 origsrc = repo.dirstate.copied(src) or src
905 origsrc = repo.dirstate.copied(src) or src
906 if dst == origsrc: # copying back a copy?
906 if dst == origsrc: # copying back a copy?
907 if repo.dirstate[dst] not in 'mn' and not dryrun:
907 if repo.dirstate[dst] not in 'mn' and not dryrun:
908 repo.dirstate.normallookup(dst)
908 repo.dirstate.normallookup(dst)
909 else:
909 else:
910 if repo.dirstate[origsrc] == 'a' and origsrc == src:
910 if repo.dirstate[origsrc] == 'a' and origsrc == src:
911 if not ui.quiet:
911 if not ui.quiet:
912 ui.warn(_("%s has not been committed yet, so no copy "
912 ui.warn(_("%s has not been committed yet, so no copy "
913 "data will be stored for %s.\n")
913 "data will be stored for %s.\n")
914 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
914 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
915 if repo.dirstate[dst] in '?r' and not dryrun:
915 if repo.dirstate[dst] in '?r' and not dryrun:
916 wctx.add([dst])
916 wctx.add([dst])
917 elif not dryrun:
917 elif not dryrun:
918 wctx.copy(origsrc, dst)
918 wctx.copy(origsrc, dst)
919
919
920 def readrequires(opener, supported):
920 def readrequires(opener, supported):
921 '''Reads and parses .hg/requires and checks if all entries found
921 '''Reads and parses .hg/requires and checks if all entries found
922 are in the list of supported features.'''
922 are in the list of supported features.'''
923 requirements = set(opener.read("requires").splitlines())
923 requirements = set(opener.read("requires").splitlines())
924 missings = []
924 missings = []
925 for r in requirements:
925 for r in requirements:
926 if r not in supported:
926 if r not in supported:
927 if not r or not r[0].isalnum():
927 if not r or not r[0].isalnum():
928 raise error.RequirementError(_(".hg/requires file is corrupt"))
928 raise error.RequirementError(_(".hg/requires file is corrupt"))
929 missings.append(r)
929 missings.append(r)
930 missings.sort()
930 missings.sort()
931 if missings:
931 if missings:
932 raise error.RequirementError(
932 raise error.RequirementError(
933 _("repository requires features unknown to this Mercurial: %s")
933 _("repository requires features unknown to this Mercurial: %s")
934 % " ".join(missings),
934 % " ".join(missings),
935 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
935 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
936 " for more information"))
936 " for more information"))
937 return requirements
937 return requirements
938
938
939 class filecachesubentry(object):
939 class filecachesubentry(object):
940 def __init__(self, path, stat):
940 def __init__(self, path, stat):
941 self.path = path
941 self.path = path
942 self.cachestat = None
942 self.cachestat = None
943 self._cacheable = None
943 self._cacheable = None
944
944
945 if stat:
945 if stat:
946 self.cachestat = filecachesubentry.stat(self.path)
946 self.cachestat = filecachesubentry.stat(self.path)
947
947
948 if self.cachestat:
948 if self.cachestat:
949 self._cacheable = self.cachestat.cacheable()
949 self._cacheable = self.cachestat.cacheable()
950 else:
950 else:
951 # None means we don't know yet
951 # None means we don't know yet
952 self._cacheable = None
952 self._cacheable = None
953
953
954 def refresh(self):
954 def refresh(self):
955 if self.cacheable():
955 if self.cacheable():
956 self.cachestat = filecachesubentry.stat(self.path)
956 self.cachestat = filecachesubentry.stat(self.path)
957
957
958 def cacheable(self):
958 def cacheable(self):
959 if self._cacheable is not None:
959 if self._cacheable is not None:
960 return self._cacheable
960 return self._cacheable
961
961
962 # we don't know yet, assume it is for now
962 # we don't know yet, assume it is for now
963 return True
963 return True
964
964
965 def changed(self):
965 def changed(self):
966 # no point in going further if we can't cache it
966 # no point in going further if we can't cache it
967 if not self.cacheable():
967 if not self.cacheable():
968 return True
968 return True
969
969
970 newstat = filecachesubentry.stat(self.path)
970 newstat = filecachesubentry.stat(self.path)
971
971
972 # we may not know if it's cacheable yet, check again now
972 # we may not know if it's cacheable yet, check again now
973 if newstat and self._cacheable is None:
973 if newstat and self._cacheable is None:
974 self._cacheable = newstat.cacheable()
974 self._cacheable = newstat.cacheable()
975
975
976 # check again
976 # check again
977 if not self._cacheable:
977 if not self._cacheable:
978 return True
978 return True
979
979
980 if self.cachestat != newstat:
980 if self.cachestat != newstat:
981 self.cachestat = newstat
981 self.cachestat = newstat
982 return True
982 return True
983 else:
983 else:
984 return False
984 return False
985
985
986 @staticmethod
986 @staticmethod
987 def stat(path):
987 def stat(path):
988 try:
988 try:
989 return util.cachestat(path)
989 return util.cachestat(path)
990 except OSError, e:
990 except OSError, e:
991 if e.errno != errno.ENOENT:
991 if e.errno != errno.ENOENT:
992 raise
992 raise
993
993
994 class filecacheentry(object):
994 class filecacheentry(object):
995 def __init__(self, paths, stat=True):
995 def __init__(self, paths, stat=True):
996 self._entries = []
996 self._entries = []
997 for path in paths:
997 for path in paths:
998 self._entries.append(filecachesubentry(path, stat))
998 self._entries.append(filecachesubentry(path, stat))
999
999
1000 def changed(self):
1000 def changed(self):
1001 '''true if any entry has changed'''
1001 '''true if any entry has changed'''
1002 for entry in self._entries:
1002 for entry in self._entries:
1003 if entry.changed():
1003 if entry.changed():
1004 return True
1004 return True
1005 return False
1005 return False
1006
1006
1007 def refresh(self):
1007 def refresh(self):
1008 for entry in self._entries:
1008 for entry in self._entries:
1009 entry.refresh()
1009 entry.refresh()
1010
1010
1011 class filecache(object):
1011 class filecache(object):
1012 '''A property like decorator that tracks files under .hg/ for updates.
1012 '''A property like decorator that tracks files under .hg/ for updates.
1013
1013
1014 Records stat info when called in _filecache.
1014 Records stat info when called in _filecache.
1015
1015
1016 On subsequent calls, compares old stat info with new info, and recreates the
1016 On subsequent calls, compares old stat info with new info, and recreates the
1017 object when any of the files changes, updating the new stat info in
1017 object when any of the files changes, updating the new stat info in
1018 _filecache.
1018 _filecache.
1019
1019
1020 Mercurial either atomic renames or appends for files under .hg,
1020 Mercurial either atomic renames or appends for files under .hg,
1021 so to ensure the cache is reliable we need the filesystem to be able
1021 so to ensure the cache is reliable we need the filesystem to be able
1022 to tell us if a file has been replaced. If it can't, we fallback to
1022 to tell us if a file has been replaced. If it can't, we fallback to
1023 recreating the object on every call (essentially the same behaviour as
1023 recreating the object on every call (essentially the same behaviour as
1024 propertycache).
1024 propertycache).
1025
1025
1026 '''
1026 '''
1027 def __init__(self, *paths):
1027 def __init__(self, *paths):
1028 self.paths = paths
1028 self.paths = paths
1029
1029
1030 def join(self, obj, fname):
1030 def join(self, obj, fname):
1031 """Used to compute the runtime path of a cached file.
1031 """Used to compute the runtime path of a cached file.
1032
1032
1033 Users should subclass filecache and provide their own version of this
1033 Users should subclass filecache and provide their own version of this
1034 function to call the appropriate join function on 'obj' (an instance
1034 function to call the appropriate join function on 'obj' (an instance
1035 of the class that its member function was decorated).
1035 of the class that its member function was decorated).
1036 """
1036 """
1037 return obj.join(fname)
1037 return obj.join(fname)
1038
1038
1039 def __call__(self, func):
1039 def __call__(self, func):
1040 self.func = func
1040 self.func = func
1041 self.name = func.__name__
1041 self.name = func.__name__
1042 return self
1042 return self
1043
1043
1044 def __get__(self, obj, type=None):
1044 def __get__(self, obj, type=None):
1045 # do we need to check if the file changed?
1045 # do we need to check if the file changed?
1046 if self.name in obj.__dict__:
1046 if self.name in obj.__dict__:
1047 assert self.name in obj._filecache, self.name
1047 assert self.name in obj._filecache, self.name
1048 return obj.__dict__[self.name]
1048 return obj.__dict__[self.name]
1049
1049
1050 entry = obj._filecache.get(self.name)
1050 entry = obj._filecache.get(self.name)
1051
1051
1052 if entry:
1052 if entry:
1053 if entry.changed():
1053 if entry.changed():
1054 entry.obj = self.func(obj)
1054 entry.obj = self.func(obj)
1055 else:
1055 else:
1056 paths = [self.join(obj, path) for path in self.paths]
1056 paths = [self.join(obj, path) for path in self.paths]
1057
1057
1058 # We stat -before- creating the object so our cache doesn't lie if
1058 # We stat -before- creating the object so our cache doesn't lie if
1059 # a writer modified between the time we read and stat
1059 # a writer modified between the time we read and stat
1060 entry = filecacheentry(paths, True)
1060 entry = filecacheentry(paths, True)
1061 entry.obj = self.func(obj)
1061 entry.obj = self.func(obj)
1062
1062
1063 obj._filecache[self.name] = entry
1063 obj._filecache[self.name] = entry
1064
1064
1065 obj.__dict__[self.name] = entry.obj
1065 obj.__dict__[self.name] = entry.obj
1066 return entry.obj
1066 return entry.obj
1067
1067
1068 def __set__(self, obj, value):
1068 def __set__(self, obj, value):
1069 if self.name not in obj._filecache:
1069 if self.name not in obj._filecache:
1070 # we add an entry for the missing value because X in __dict__
1070 # we add an entry for the missing value because X in __dict__
1071 # implies X in _filecache
1071 # implies X in _filecache
1072 paths = [self.join(obj, path) for path in self.paths]
1072 paths = [self.join(obj, path) for path in self.paths]
1073 ce = filecacheentry(paths, False)
1073 ce = filecacheentry(paths, False)
1074 obj._filecache[self.name] = ce
1074 obj._filecache[self.name] = ce
1075 else:
1075 else:
1076 ce = obj._filecache[self.name]
1076 ce = obj._filecache[self.name]
1077
1077
1078 ce.obj = value # update cached copy
1078 ce.obj = value # update cached copy
1079 obj.__dict__[self.name] = value # update copy returned by obj.x
1079 obj.__dict__[self.name] = value # update copy returned by obj.x
1080
1080
1081 def __delete__(self, obj):
1081 def __delete__(self, obj):
1082 try:
1082 try:
1083 del obj.__dict__[self.name]
1083 del obj.__dict__[self.name]
1084 except KeyError:
1084 except KeyError:
1085 raise AttributeError(self.name)
1085 raise AttributeError(self.name)
1086
1086
1087 class dirs(object):
1087 class dirs(object):
1088 '''a multiset of directory names from a dirstate or manifest'''
1088 '''a multiset of directory names from a dirstate or manifest'''
1089
1089
1090 def __init__(self, map, skip=None):
1090 def __init__(self, map, skip=None):
1091 self._dirs = {}
1091 self._dirs = {}
1092 addpath = self.addpath
1092 addpath = self.addpath
1093 if util.safehasattr(map, 'iteritems') and skip is not None:
1093 if util.safehasattr(map, 'iteritems') and skip is not None:
1094 for f, s in map.iteritems():
1094 for f, s in map.iteritems():
1095 if s[0] != skip:
1095 if s[0] != skip:
1096 addpath(f)
1096 addpath(f)
1097 else:
1097 else:
1098 for f in map:
1098 for f in map:
1099 addpath(f)
1099 addpath(f)
1100
1100
1101 def addpath(self, path):
1101 def addpath(self, path):
1102 dirs = self._dirs
1102 dirs = self._dirs
1103 for base in finddirs(path):
1103 for base in finddirs(path):
1104 if base in dirs:
1104 if base in dirs:
1105 dirs[base] += 1
1105 dirs[base] += 1
1106 return
1106 return
1107 dirs[base] = 1
1107 dirs[base] = 1
1108
1108
1109 def delpath(self, path):
1109 def delpath(self, path):
1110 dirs = self._dirs
1110 dirs = self._dirs
1111 for base in finddirs(path):
1111 for base in finddirs(path):
1112 if dirs[base] > 1:
1112 if dirs[base] > 1:
1113 dirs[base] -= 1
1113 dirs[base] -= 1
1114 return
1114 return
1115 del dirs[base]
1115 del dirs[base]
1116
1116
1117 def __iter__(self):
1117 def __iter__(self):
1118 return self._dirs.iterkeys()
1118 return self._dirs.iterkeys()
1119
1119
1120 def __contains__(self, d):
1120 def __contains__(self, d):
1121 return d in self._dirs
1121 return d in self._dirs
1122
1122
1123 if util.safehasattr(parsers, 'dirs'):
1123 if util.safehasattr(parsers, 'dirs'):
1124 dirs = parsers.dirs
1124 dirs = parsers.dirs
1125
1125
1126 def finddirs(path):
1126 def finddirs(path):
1127 pos = path.rfind('/')
1127 pos = path.rfind('/')
1128 while pos != -1:
1128 while pos != -1:
1129 yield path[:pos]
1129 yield path[:pos]
1130 pos = path.rfind('/', 0, pos)
1130 pos = path.rfind('/', 0, pos)
General Comments 0
You need to be logged in to leave comments. Login now