##// END OF EJS Templates
vfs: add walk...
FUJIWARA Katsunori -
r24725:ee751d47 default
parent child Browse files
Show More
@@ -1,1134 +1,1150 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from mercurial.node import nullrev
9 from mercurial.node import nullrev
10 import util, error, osutil, revset, similar, encoding, phases
10 import util, error, osutil, revset, similar, encoding, phases
11 import pathutil
11 import pathutil
12 import match as matchmod
12 import match as matchmod
13 import os, errno, re, glob, tempfile, shutil, stat
13 import os, errno, re, glob, tempfile, shutil, stat
14
14
15 if os.name == 'nt':
15 if os.name == 'nt':
16 import scmwindows as scmplatform
16 import scmwindows as scmplatform
17 else:
17 else:
18 import scmposix as scmplatform
18 import scmposix as scmplatform
19
19
20 systemrcpath = scmplatform.systemrcpath
20 systemrcpath = scmplatform.systemrcpath
21 userrcpath = scmplatform.userrcpath
21 userrcpath = scmplatform.userrcpath
22
22
23 class status(tuple):
23 class status(tuple):
24 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
24 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
25 and 'ignored' properties are only relevant to the working copy.
25 and 'ignored' properties are only relevant to the working copy.
26 '''
26 '''
27
27
28 __slots__ = ()
28 __slots__ = ()
29
29
30 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
30 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
31 clean):
31 clean):
32 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
32 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
33 ignored, clean))
33 ignored, clean))
34
34
35 @property
35 @property
36 def modified(self):
36 def modified(self):
37 '''files that have been modified'''
37 '''files that have been modified'''
38 return self[0]
38 return self[0]
39
39
40 @property
40 @property
41 def added(self):
41 def added(self):
42 '''files that have been added'''
42 '''files that have been added'''
43 return self[1]
43 return self[1]
44
44
45 @property
45 @property
46 def removed(self):
46 def removed(self):
47 '''files that have been removed'''
47 '''files that have been removed'''
48 return self[2]
48 return self[2]
49
49
50 @property
50 @property
51 def deleted(self):
51 def deleted(self):
52 '''files that are in the dirstate, but have been deleted from the
52 '''files that are in the dirstate, but have been deleted from the
53 working copy (aka "missing")
53 working copy (aka "missing")
54 '''
54 '''
55 return self[3]
55 return self[3]
56
56
57 @property
57 @property
58 def unknown(self):
58 def unknown(self):
59 '''files not in the dirstate that are not ignored'''
59 '''files not in the dirstate that are not ignored'''
60 return self[4]
60 return self[4]
61
61
62 @property
62 @property
63 def ignored(self):
63 def ignored(self):
64 '''files not in the dirstate that are ignored (by _dirignore())'''
64 '''files not in the dirstate that are ignored (by _dirignore())'''
65 return self[5]
65 return self[5]
66
66
67 @property
67 @property
68 def clean(self):
68 def clean(self):
69 '''files that have not been modified'''
69 '''files that have not been modified'''
70 return self[6]
70 return self[6]
71
71
72 def __repr__(self, *args, **kwargs):
72 def __repr__(self, *args, **kwargs):
73 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
73 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
74 'unknown=%r, ignored=%r, clean=%r>') % self)
74 'unknown=%r, ignored=%r, clean=%r>') % self)
75
75
76 def itersubrepos(ctx1, ctx2):
76 def itersubrepos(ctx1, ctx2):
77 """find subrepos in ctx1 or ctx2"""
77 """find subrepos in ctx1 or ctx2"""
78 # Create a (subpath, ctx) mapping where we prefer subpaths from
78 # Create a (subpath, ctx) mapping where we prefer subpaths from
79 # ctx1. The subpaths from ctx2 are important when the .hgsub file
79 # ctx1. The subpaths from ctx2 are important when the .hgsub file
80 # has been modified (in ctx2) but not yet committed (in ctx1).
80 # has been modified (in ctx2) but not yet committed (in ctx1).
81 subpaths = dict.fromkeys(ctx2.substate, ctx2)
81 subpaths = dict.fromkeys(ctx2.substate, ctx2)
82 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
82 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
83 for subpath, ctx in sorted(subpaths.iteritems()):
83 for subpath, ctx in sorted(subpaths.iteritems()):
84 yield subpath, ctx.sub(subpath)
84 yield subpath, ctx.sub(subpath)
85
85
86 def nochangesfound(ui, repo, excluded=None):
86 def nochangesfound(ui, repo, excluded=None):
87 '''Report no changes for push/pull, excluded is None or a list of
87 '''Report no changes for push/pull, excluded is None or a list of
88 nodes excluded from the push/pull.
88 nodes excluded from the push/pull.
89 '''
89 '''
90 secretlist = []
90 secretlist = []
91 if excluded:
91 if excluded:
92 for n in excluded:
92 for n in excluded:
93 if n not in repo:
93 if n not in repo:
94 # discovery should not have included the filtered revision,
94 # discovery should not have included the filtered revision,
95 # we have to explicitly exclude it until discovery is cleanup.
95 # we have to explicitly exclude it until discovery is cleanup.
96 continue
96 continue
97 ctx = repo[n]
97 ctx = repo[n]
98 if ctx.phase() >= phases.secret and not ctx.extinct():
98 if ctx.phase() >= phases.secret and not ctx.extinct():
99 secretlist.append(n)
99 secretlist.append(n)
100
100
101 if secretlist:
101 if secretlist:
102 ui.status(_("no changes found (ignored %d secret changesets)\n")
102 ui.status(_("no changes found (ignored %d secret changesets)\n")
103 % len(secretlist))
103 % len(secretlist))
104 else:
104 else:
105 ui.status(_("no changes found\n"))
105 ui.status(_("no changes found\n"))
106
106
107 def checknewlabel(repo, lbl, kind):
107 def checknewlabel(repo, lbl, kind):
108 # Do not use the "kind" parameter in ui output.
108 # Do not use the "kind" parameter in ui output.
109 # It makes strings difficult to translate.
109 # It makes strings difficult to translate.
110 if lbl in ['tip', '.', 'null']:
110 if lbl in ['tip', '.', 'null']:
111 raise util.Abort(_("the name '%s' is reserved") % lbl)
111 raise util.Abort(_("the name '%s' is reserved") % lbl)
112 for c in (':', '\0', '\n', '\r'):
112 for c in (':', '\0', '\n', '\r'):
113 if c in lbl:
113 if c in lbl:
114 raise util.Abort(_("%r cannot be used in a name") % c)
114 raise util.Abort(_("%r cannot be used in a name") % c)
115 try:
115 try:
116 int(lbl)
116 int(lbl)
117 raise util.Abort(_("cannot use an integer as a name"))
117 raise util.Abort(_("cannot use an integer as a name"))
118 except ValueError:
118 except ValueError:
119 pass
119 pass
120
120
121 def checkfilename(f):
121 def checkfilename(f):
122 '''Check that the filename f is an acceptable filename for a tracked file'''
122 '''Check that the filename f is an acceptable filename for a tracked file'''
123 if '\r' in f or '\n' in f:
123 if '\r' in f or '\n' in f:
124 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
124 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
125
125
126 def checkportable(ui, f):
126 def checkportable(ui, f):
127 '''Check if filename f is portable and warn or abort depending on config'''
127 '''Check if filename f is portable and warn or abort depending on config'''
128 checkfilename(f)
128 checkfilename(f)
129 abort, warn = checkportabilityalert(ui)
129 abort, warn = checkportabilityalert(ui)
130 if abort or warn:
130 if abort or warn:
131 msg = util.checkwinfilename(f)
131 msg = util.checkwinfilename(f)
132 if msg:
132 if msg:
133 msg = "%s: %r" % (msg, f)
133 msg = "%s: %r" % (msg, f)
134 if abort:
134 if abort:
135 raise util.Abort(msg)
135 raise util.Abort(msg)
136 ui.warn(_("warning: %s\n") % msg)
136 ui.warn(_("warning: %s\n") % msg)
137
137
138 def checkportabilityalert(ui):
138 def checkportabilityalert(ui):
139 '''check if the user's config requests nothing, a warning, or abort for
139 '''check if the user's config requests nothing, a warning, or abort for
140 non-portable filenames'''
140 non-portable filenames'''
141 val = ui.config('ui', 'portablefilenames', 'warn')
141 val = ui.config('ui', 'portablefilenames', 'warn')
142 lval = val.lower()
142 lval = val.lower()
143 bval = util.parsebool(val)
143 bval = util.parsebool(val)
144 abort = os.name == 'nt' or lval == 'abort'
144 abort = os.name == 'nt' or lval == 'abort'
145 warn = bval or lval == 'warn'
145 warn = bval or lval == 'warn'
146 if bval is None and not (warn or abort or lval == 'ignore'):
146 if bval is None and not (warn or abort or lval == 'ignore'):
147 raise error.ConfigError(
147 raise error.ConfigError(
148 _("ui.portablefilenames value is invalid ('%s')") % val)
148 _("ui.portablefilenames value is invalid ('%s')") % val)
149 return abort, warn
149 return abort, warn
150
150
151 class casecollisionauditor(object):
151 class casecollisionauditor(object):
152 def __init__(self, ui, abort, dirstate):
152 def __init__(self, ui, abort, dirstate):
153 self._ui = ui
153 self._ui = ui
154 self._abort = abort
154 self._abort = abort
155 allfiles = '\0'.join(dirstate._map)
155 allfiles = '\0'.join(dirstate._map)
156 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
156 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
157 self._dirstate = dirstate
157 self._dirstate = dirstate
158 # The purpose of _newfiles is so that we don't complain about
158 # The purpose of _newfiles is so that we don't complain about
159 # case collisions if someone were to call this object with the
159 # case collisions if someone were to call this object with the
160 # same filename twice.
160 # same filename twice.
161 self._newfiles = set()
161 self._newfiles = set()
162
162
163 def __call__(self, f):
163 def __call__(self, f):
164 if f in self._newfiles:
164 if f in self._newfiles:
165 return
165 return
166 fl = encoding.lower(f)
166 fl = encoding.lower(f)
167 if fl in self._loweredfiles and f not in self._dirstate:
167 if fl in self._loweredfiles and f not in self._dirstate:
168 msg = _('possible case-folding collision for %s') % f
168 msg = _('possible case-folding collision for %s') % f
169 if self._abort:
169 if self._abort:
170 raise util.Abort(msg)
170 raise util.Abort(msg)
171 self._ui.warn(_("warning: %s\n") % msg)
171 self._ui.warn(_("warning: %s\n") % msg)
172 self._loweredfiles.add(fl)
172 self._loweredfiles.add(fl)
173 self._newfiles.add(f)
173 self._newfiles.add(f)
174
174
175 def filteredhash(repo, maxrev):
175 def filteredhash(repo, maxrev):
176 """build hash of filtered revisions in the current repoview.
176 """build hash of filtered revisions in the current repoview.
177
177
178 Multiple caches perform up-to-date validation by checking that the
178 Multiple caches perform up-to-date validation by checking that the
179 tiprev and tipnode stored in the cache file match the current repository.
179 tiprev and tipnode stored in the cache file match the current repository.
180 However, this is not sufficient for validating repoviews because the set
180 However, this is not sufficient for validating repoviews because the set
181 of revisions in the view may change without the repository tiprev and
181 of revisions in the view may change without the repository tiprev and
182 tipnode changing.
182 tipnode changing.
183
183
184 This function hashes all the revs filtered from the view and returns
184 This function hashes all the revs filtered from the view and returns
185 that SHA-1 digest.
185 that SHA-1 digest.
186 """
186 """
187 cl = repo.changelog
187 cl = repo.changelog
188 if not cl.filteredrevs:
188 if not cl.filteredrevs:
189 return None
189 return None
190 key = None
190 key = None
191 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
191 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
192 if revs:
192 if revs:
193 s = util.sha1()
193 s = util.sha1()
194 for rev in revs:
194 for rev in revs:
195 s.update('%s;' % rev)
195 s.update('%s;' % rev)
196 key = s.digest()
196 key = s.digest()
197 return key
197 return key
198
198
199 class abstractvfs(object):
199 class abstractvfs(object):
200 """Abstract base class; cannot be instantiated"""
200 """Abstract base class; cannot be instantiated"""
201
201
202 def __init__(self, *args, **kwargs):
202 def __init__(self, *args, **kwargs):
203 '''Prevent instantiation; don't call this from subclasses.'''
203 '''Prevent instantiation; don't call this from subclasses.'''
204 raise NotImplementedError('attempted instantiating ' + str(type(self)))
204 raise NotImplementedError('attempted instantiating ' + str(type(self)))
205
205
206 def tryread(self, path):
206 def tryread(self, path):
207 '''gracefully return an empty string for missing files'''
207 '''gracefully return an empty string for missing files'''
208 try:
208 try:
209 return self.read(path)
209 return self.read(path)
210 except IOError, inst:
210 except IOError, inst:
211 if inst.errno != errno.ENOENT:
211 if inst.errno != errno.ENOENT:
212 raise
212 raise
213 return ""
213 return ""
214
214
215 def tryreadlines(self, path, mode='rb'):
215 def tryreadlines(self, path, mode='rb'):
216 '''gracefully return an empty array for missing files'''
216 '''gracefully return an empty array for missing files'''
217 try:
217 try:
218 return self.readlines(path, mode=mode)
218 return self.readlines(path, mode=mode)
219 except IOError, inst:
219 except IOError, inst:
220 if inst.errno != errno.ENOENT:
220 if inst.errno != errno.ENOENT:
221 raise
221 raise
222 return []
222 return []
223
223
224 def open(self, path, mode="r", text=False, atomictemp=False,
224 def open(self, path, mode="r", text=False, atomictemp=False,
225 notindexed=False):
225 notindexed=False):
226 '''Open ``path`` file, which is relative to vfs root.
226 '''Open ``path`` file, which is relative to vfs root.
227
227
228 Newly created directories are marked as "not to be indexed by
228 Newly created directories are marked as "not to be indexed by
229 the content indexing service", if ``notindexed`` is specified
229 the content indexing service", if ``notindexed`` is specified
230 for "write" mode access.
230 for "write" mode access.
231 '''
231 '''
232 self.open = self.__call__
232 self.open = self.__call__
233 return self.__call__(path, mode, text, atomictemp, notindexed)
233 return self.__call__(path, mode, text, atomictemp, notindexed)
234
234
235 def read(self, path):
235 def read(self, path):
236 fp = self(path, 'rb')
236 fp = self(path, 'rb')
237 try:
237 try:
238 return fp.read()
238 return fp.read()
239 finally:
239 finally:
240 fp.close()
240 fp.close()
241
241
242 def readlines(self, path, mode='rb'):
242 def readlines(self, path, mode='rb'):
243 fp = self(path, mode=mode)
243 fp = self(path, mode=mode)
244 try:
244 try:
245 return fp.readlines()
245 return fp.readlines()
246 finally:
246 finally:
247 fp.close()
247 fp.close()
248
248
249 def write(self, path, data):
249 def write(self, path, data):
250 fp = self(path, 'wb')
250 fp = self(path, 'wb')
251 try:
251 try:
252 return fp.write(data)
252 return fp.write(data)
253 finally:
253 finally:
254 fp.close()
254 fp.close()
255
255
256 def writelines(self, path, data, mode='wb', notindexed=False):
256 def writelines(self, path, data, mode='wb', notindexed=False):
257 fp = self(path, mode=mode, notindexed=notindexed)
257 fp = self(path, mode=mode, notindexed=notindexed)
258 try:
258 try:
259 return fp.writelines(data)
259 return fp.writelines(data)
260 finally:
260 finally:
261 fp.close()
261 fp.close()
262
262
263 def append(self, path, data):
263 def append(self, path, data):
264 fp = self(path, 'ab')
264 fp = self(path, 'ab')
265 try:
265 try:
266 return fp.write(data)
266 return fp.write(data)
267 finally:
267 finally:
268 fp.close()
268 fp.close()
269
269
270 def chmod(self, path, mode):
270 def chmod(self, path, mode):
271 return os.chmod(self.join(path), mode)
271 return os.chmod(self.join(path), mode)
272
272
273 def exists(self, path=None):
273 def exists(self, path=None):
274 return os.path.exists(self.join(path))
274 return os.path.exists(self.join(path))
275
275
276 def fstat(self, fp):
276 def fstat(self, fp):
277 return util.fstat(fp)
277 return util.fstat(fp)
278
278
279 def isdir(self, path=None):
279 def isdir(self, path=None):
280 return os.path.isdir(self.join(path))
280 return os.path.isdir(self.join(path))
281
281
282 def isfile(self, path=None):
282 def isfile(self, path=None):
283 return os.path.isfile(self.join(path))
283 return os.path.isfile(self.join(path))
284
284
285 def islink(self, path=None):
285 def islink(self, path=None):
286 return os.path.islink(self.join(path))
286 return os.path.islink(self.join(path))
287
287
288 def reljoin(self, *paths):
288 def reljoin(self, *paths):
289 """join various elements of a path together (as os.path.join would do)
289 """join various elements of a path together (as os.path.join would do)
290
290
291 The vfs base is not injected so that path stay relative. This exists
291 The vfs base is not injected so that path stay relative. This exists
292 to allow handling of strange encoding if needed."""
292 to allow handling of strange encoding if needed."""
293 return os.path.join(*paths)
293 return os.path.join(*paths)
294
294
295 def split(self, path):
295 def split(self, path):
296 """split top-most element of a path (as os.path.split would do)
296 """split top-most element of a path (as os.path.split would do)
297
297
298 This exists to allow handling of strange encoding if needed."""
298 This exists to allow handling of strange encoding if needed."""
299 return os.path.split(path)
299 return os.path.split(path)
300
300
301 def lexists(self, path=None):
301 def lexists(self, path=None):
302 return os.path.lexists(self.join(path))
302 return os.path.lexists(self.join(path))
303
303
304 def lstat(self, path=None):
304 def lstat(self, path=None):
305 return os.lstat(self.join(path))
305 return os.lstat(self.join(path))
306
306
307 def listdir(self, path=None):
307 def listdir(self, path=None):
308 return os.listdir(self.join(path))
308 return os.listdir(self.join(path))
309
309
310 def makedir(self, path=None, notindexed=True):
310 def makedir(self, path=None, notindexed=True):
311 return util.makedir(self.join(path), notindexed)
311 return util.makedir(self.join(path), notindexed)
312
312
313 def makedirs(self, path=None, mode=None):
313 def makedirs(self, path=None, mode=None):
314 return util.makedirs(self.join(path), mode)
314 return util.makedirs(self.join(path), mode)
315
315
316 def makelock(self, info, path):
316 def makelock(self, info, path):
317 return util.makelock(info, self.join(path))
317 return util.makelock(info, self.join(path))
318
318
319 def mkdir(self, path=None):
319 def mkdir(self, path=None):
320 return os.mkdir(self.join(path))
320 return os.mkdir(self.join(path))
321
321
322 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
322 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
323 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
323 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
324 dir=self.join(dir), text=text)
324 dir=self.join(dir), text=text)
325 dname, fname = util.split(name)
325 dname, fname = util.split(name)
326 if dir:
326 if dir:
327 return fd, os.path.join(dir, fname)
327 return fd, os.path.join(dir, fname)
328 else:
328 else:
329 return fd, fname
329 return fd, fname
330
330
331 def readdir(self, path=None, stat=None, skip=None):
331 def readdir(self, path=None, stat=None, skip=None):
332 return osutil.listdir(self.join(path), stat, skip)
332 return osutil.listdir(self.join(path), stat, skip)
333
333
334 def readlock(self, path):
334 def readlock(self, path):
335 return util.readlock(self.join(path))
335 return util.readlock(self.join(path))
336
336
337 def rename(self, src, dst):
337 def rename(self, src, dst):
338 return util.rename(self.join(src), self.join(dst))
338 return util.rename(self.join(src), self.join(dst))
339
339
340 def readlink(self, path):
340 def readlink(self, path):
341 return os.readlink(self.join(path))
341 return os.readlink(self.join(path))
342
342
343 def removedirs(self, path=None):
343 def removedirs(self, path=None):
344 """Remove a leaf directory and all empty intermediate ones
344 """Remove a leaf directory and all empty intermediate ones
345 """
345 """
346 return util.removedirs(self.join(path))
346 return util.removedirs(self.join(path))
347
347
348 def rmtree(self, path=None, ignore_errors=False, forcibly=False):
348 def rmtree(self, path=None, ignore_errors=False, forcibly=False):
349 """Remove a directory tree recursively
349 """Remove a directory tree recursively
350
350
351 If ``forcibly``, this tries to remove READ-ONLY files, too.
351 If ``forcibly``, this tries to remove READ-ONLY files, too.
352 """
352 """
353 if forcibly:
353 if forcibly:
354 def onerror(function, path, excinfo):
354 def onerror(function, path, excinfo):
355 if function is not os.remove:
355 if function is not os.remove:
356 raise
356 raise
357 # read-only files cannot be unlinked under Windows
357 # read-only files cannot be unlinked under Windows
358 s = os.stat(path)
358 s = os.stat(path)
359 if (s.st_mode & stat.S_IWRITE) != 0:
359 if (s.st_mode & stat.S_IWRITE) != 0:
360 raise
360 raise
361 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
361 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
362 os.remove(path)
362 os.remove(path)
363 else:
363 else:
364 onerror = None
364 onerror = None
365 return shutil.rmtree(self.join(path),
365 return shutil.rmtree(self.join(path),
366 ignore_errors=ignore_errors, onerror=onerror)
366 ignore_errors=ignore_errors, onerror=onerror)
367
367
368 def setflags(self, path, l, x):
368 def setflags(self, path, l, x):
369 return util.setflags(self.join(path), l, x)
369 return util.setflags(self.join(path), l, x)
370
370
371 def stat(self, path=None):
371 def stat(self, path=None):
372 return os.stat(self.join(path))
372 return os.stat(self.join(path))
373
373
374 def unlink(self, path=None):
374 def unlink(self, path=None):
375 return util.unlink(self.join(path))
375 return util.unlink(self.join(path))
376
376
377 def unlinkpath(self, path=None, ignoremissing=False):
377 def unlinkpath(self, path=None, ignoremissing=False):
378 return util.unlinkpath(self.join(path), ignoremissing)
378 return util.unlinkpath(self.join(path), ignoremissing)
379
379
380 def utime(self, path=None, t=None):
380 def utime(self, path=None, t=None):
381 return os.utime(self.join(path), t)
381 return os.utime(self.join(path), t)
382
382
383 def walk(self, path=None, onerror=None):
384 """Yield (dirpath, dirs, files) tuple for each directories under path
385
386 ``dirpath`` is relative one from the root of this vfs. This
387 uses ``os.sep`` as path separator, even you specify POSIX
388 style ``path``.
389
390 "The root of this vfs" is represented as empty ``dirpath``.
391 """
392 root = os.path.normpath(self.join(None))
393 # when dirpath == root, dirpath[prefixlen:] becomes empty
394 # because len(dirpath) < prefixlen.
395 prefixlen = len(pathutil.normasprefix(root))
396 for dirpath, dirs, files in os.walk(self.join(path), onerror=onerror):
397 yield (dirpath[prefixlen:], dirs, files)
398
383 class vfs(abstractvfs):
399 class vfs(abstractvfs):
384 '''Operate files relative to a base directory
400 '''Operate files relative to a base directory
385
401
386 This class is used to hide the details of COW semantics and
402 This class is used to hide the details of COW semantics and
387 remote file access from higher level code.
403 remote file access from higher level code.
388 '''
404 '''
389 def __init__(self, base, audit=True, expandpath=False, realpath=False):
405 def __init__(self, base, audit=True, expandpath=False, realpath=False):
390 if expandpath:
406 if expandpath:
391 base = util.expandpath(base)
407 base = util.expandpath(base)
392 if realpath:
408 if realpath:
393 base = os.path.realpath(base)
409 base = os.path.realpath(base)
394 self.base = base
410 self.base = base
395 self._setmustaudit(audit)
411 self._setmustaudit(audit)
396 self.createmode = None
412 self.createmode = None
397 self._trustnlink = None
413 self._trustnlink = None
398
414
399 def _getmustaudit(self):
415 def _getmustaudit(self):
400 return self._audit
416 return self._audit
401
417
402 def _setmustaudit(self, onoff):
418 def _setmustaudit(self, onoff):
403 self._audit = onoff
419 self._audit = onoff
404 if onoff:
420 if onoff:
405 self.audit = pathutil.pathauditor(self.base)
421 self.audit = pathutil.pathauditor(self.base)
406 else:
422 else:
407 self.audit = util.always
423 self.audit = util.always
408
424
409 mustaudit = property(_getmustaudit, _setmustaudit)
425 mustaudit = property(_getmustaudit, _setmustaudit)
410
426
411 @util.propertycache
427 @util.propertycache
412 def _cansymlink(self):
428 def _cansymlink(self):
413 return util.checklink(self.base)
429 return util.checklink(self.base)
414
430
415 @util.propertycache
431 @util.propertycache
416 def _chmod(self):
432 def _chmod(self):
417 return util.checkexec(self.base)
433 return util.checkexec(self.base)
418
434
419 def _fixfilemode(self, name):
435 def _fixfilemode(self, name):
420 if self.createmode is None or not self._chmod:
436 if self.createmode is None or not self._chmod:
421 return
437 return
422 os.chmod(name, self.createmode & 0666)
438 os.chmod(name, self.createmode & 0666)
423
439
424 def __call__(self, path, mode="r", text=False, atomictemp=False,
440 def __call__(self, path, mode="r", text=False, atomictemp=False,
425 notindexed=False):
441 notindexed=False):
426 '''Open ``path`` file, which is relative to vfs root.
442 '''Open ``path`` file, which is relative to vfs root.
427
443
428 Newly created directories are marked as "not to be indexed by
444 Newly created directories are marked as "not to be indexed by
429 the content indexing service", if ``notindexed`` is specified
445 the content indexing service", if ``notindexed`` is specified
430 for "write" mode access.
446 for "write" mode access.
431 '''
447 '''
432 if self._audit:
448 if self._audit:
433 r = util.checkosfilename(path)
449 r = util.checkosfilename(path)
434 if r:
450 if r:
435 raise util.Abort("%s: %r" % (r, path))
451 raise util.Abort("%s: %r" % (r, path))
436 self.audit(path)
452 self.audit(path)
437 f = self.join(path)
453 f = self.join(path)
438
454
439 if not text and "b" not in mode:
455 if not text and "b" not in mode:
440 mode += "b" # for that other OS
456 mode += "b" # for that other OS
441
457
442 nlink = -1
458 nlink = -1
443 if mode not in ('r', 'rb'):
459 if mode not in ('r', 'rb'):
444 dirname, basename = util.split(f)
460 dirname, basename = util.split(f)
445 # If basename is empty, then the path is malformed because it points
461 # If basename is empty, then the path is malformed because it points
446 # to a directory. Let the posixfile() call below raise IOError.
462 # to a directory. Let the posixfile() call below raise IOError.
447 if basename:
463 if basename:
448 if atomictemp:
464 if atomictemp:
449 util.ensuredirs(dirname, self.createmode, notindexed)
465 util.ensuredirs(dirname, self.createmode, notindexed)
450 return util.atomictempfile(f, mode, self.createmode)
466 return util.atomictempfile(f, mode, self.createmode)
451 try:
467 try:
452 if 'w' in mode:
468 if 'w' in mode:
453 util.unlink(f)
469 util.unlink(f)
454 nlink = 0
470 nlink = 0
455 else:
471 else:
456 # nlinks() may behave differently for files on Windows
472 # nlinks() may behave differently for files on Windows
457 # shares if the file is open.
473 # shares if the file is open.
458 fd = util.posixfile(f)
474 fd = util.posixfile(f)
459 nlink = util.nlinks(f)
475 nlink = util.nlinks(f)
460 if nlink < 1:
476 if nlink < 1:
461 nlink = 2 # force mktempcopy (issue1922)
477 nlink = 2 # force mktempcopy (issue1922)
462 fd.close()
478 fd.close()
463 except (OSError, IOError), e:
479 except (OSError, IOError), e:
464 if e.errno != errno.ENOENT:
480 if e.errno != errno.ENOENT:
465 raise
481 raise
466 nlink = 0
482 nlink = 0
467 util.ensuredirs(dirname, self.createmode, notindexed)
483 util.ensuredirs(dirname, self.createmode, notindexed)
468 if nlink > 0:
484 if nlink > 0:
469 if self._trustnlink is None:
485 if self._trustnlink is None:
470 self._trustnlink = nlink > 1 or util.checknlink(f)
486 self._trustnlink = nlink > 1 or util.checknlink(f)
471 if nlink > 1 or not self._trustnlink:
487 if nlink > 1 or not self._trustnlink:
472 util.rename(util.mktempcopy(f), f)
488 util.rename(util.mktempcopy(f), f)
473 fp = util.posixfile(f, mode)
489 fp = util.posixfile(f, mode)
474 if nlink == 0:
490 if nlink == 0:
475 self._fixfilemode(f)
491 self._fixfilemode(f)
476 return fp
492 return fp
477
493
478 def symlink(self, src, dst):
494 def symlink(self, src, dst):
479 self.audit(dst)
495 self.audit(dst)
480 linkname = self.join(dst)
496 linkname = self.join(dst)
481 try:
497 try:
482 os.unlink(linkname)
498 os.unlink(linkname)
483 except OSError:
499 except OSError:
484 pass
500 pass
485
501
486 util.ensuredirs(os.path.dirname(linkname), self.createmode)
502 util.ensuredirs(os.path.dirname(linkname), self.createmode)
487
503
488 if self._cansymlink:
504 if self._cansymlink:
489 try:
505 try:
490 os.symlink(src, linkname)
506 os.symlink(src, linkname)
491 except OSError, err:
507 except OSError, err:
492 raise OSError(err.errno, _('could not symlink to %r: %s') %
508 raise OSError(err.errno, _('could not symlink to %r: %s') %
493 (src, err.strerror), linkname)
509 (src, err.strerror), linkname)
494 else:
510 else:
495 self.write(dst, src)
511 self.write(dst, src)
496
512
497 def join(self, path, *insidef):
513 def join(self, path, *insidef):
498 if path:
514 if path:
499 return os.path.join(self.base, path, *insidef)
515 return os.path.join(self.base, path, *insidef)
500 else:
516 else:
501 return self.base
517 return self.base
502
518
503 opener = vfs
519 opener = vfs
504
520
505 class auditvfs(object):
521 class auditvfs(object):
506 def __init__(self, vfs):
522 def __init__(self, vfs):
507 self.vfs = vfs
523 self.vfs = vfs
508
524
509 def _getmustaudit(self):
525 def _getmustaudit(self):
510 return self.vfs.mustaudit
526 return self.vfs.mustaudit
511
527
512 def _setmustaudit(self, onoff):
528 def _setmustaudit(self, onoff):
513 self.vfs.mustaudit = onoff
529 self.vfs.mustaudit = onoff
514
530
515 mustaudit = property(_getmustaudit, _setmustaudit)
531 mustaudit = property(_getmustaudit, _setmustaudit)
516
532
517 class filtervfs(abstractvfs, auditvfs):
533 class filtervfs(abstractvfs, auditvfs):
518 '''Wrapper vfs for filtering filenames with a function.'''
534 '''Wrapper vfs for filtering filenames with a function.'''
519
535
520 def __init__(self, vfs, filter):
536 def __init__(self, vfs, filter):
521 auditvfs.__init__(self, vfs)
537 auditvfs.__init__(self, vfs)
522 self._filter = filter
538 self._filter = filter
523
539
524 def __call__(self, path, *args, **kwargs):
540 def __call__(self, path, *args, **kwargs):
525 return self.vfs(self._filter(path), *args, **kwargs)
541 return self.vfs(self._filter(path), *args, **kwargs)
526
542
527 def join(self, path, *insidef):
543 def join(self, path, *insidef):
528 if path:
544 if path:
529 return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef)))
545 return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef)))
530 else:
546 else:
531 return self.vfs.join(path)
547 return self.vfs.join(path)
532
548
533 filteropener = filtervfs
549 filteropener = filtervfs
534
550
535 class readonlyvfs(abstractvfs, auditvfs):
551 class readonlyvfs(abstractvfs, auditvfs):
536 '''Wrapper vfs preventing any writing.'''
552 '''Wrapper vfs preventing any writing.'''
537
553
538 def __init__(self, vfs):
554 def __init__(self, vfs):
539 auditvfs.__init__(self, vfs)
555 auditvfs.__init__(self, vfs)
540
556
541 def __call__(self, path, mode='r', *args, **kw):
557 def __call__(self, path, mode='r', *args, **kw):
542 if mode not in ('r', 'rb'):
558 if mode not in ('r', 'rb'):
543 raise util.Abort('this vfs is read only')
559 raise util.Abort('this vfs is read only')
544 return self.vfs(path, mode, *args, **kw)
560 return self.vfs(path, mode, *args, **kw)
545
561
546
562
547 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
563 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
548 '''yield every hg repository under path, always recursively.
564 '''yield every hg repository under path, always recursively.
549 The recurse flag will only control recursion into repo working dirs'''
565 The recurse flag will only control recursion into repo working dirs'''
550 def errhandler(err):
566 def errhandler(err):
551 if err.filename == path:
567 if err.filename == path:
552 raise err
568 raise err
553 samestat = getattr(os.path, 'samestat', None)
569 samestat = getattr(os.path, 'samestat', None)
554 if followsym and samestat is not None:
570 if followsym and samestat is not None:
555 def adddir(dirlst, dirname):
571 def adddir(dirlst, dirname):
556 match = False
572 match = False
557 dirstat = os.stat(dirname)
573 dirstat = os.stat(dirname)
558 for lstdirstat in dirlst:
574 for lstdirstat in dirlst:
559 if samestat(dirstat, lstdirstat):
575 if samestat(dirstat, lstdirstat):
560 match = True
576 match = True
561 break
577 break
562 if not match:
578 if not match:
563 dirlst.append(dirstat)
579 dirlst.append(dirstat)
564 return not match
580 return not match
565 else:
581 else:
566 followsym = False
582 followsym = False
567
583
568 if (seen_dirs is None) and followsym:
584 if (seen_dirs is None) and followsym:
569 seen_dirs = []
585 seen_dirs = []
570 adddir(seen_dirs, path)
586 adddir(seen_dirs, path)
571 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
587 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
572 dirs.sort()
588 dirs.sort()
573 if '.hg' in dirs:
589 if '.hg' in dirs:
574 yield root # found a repository
590 yield root # found a repository
575 qroot = os.path.join(root, '.hg', 'patches')
591 qroot = os.path.join(root, '.hg', 'patches')
576 if os.path.isdir(os.path.join(qroot, '.hg')):
592 if os.path.isdir(os.path.join(qroot, '.hg')):
577 yield qroot # we have a patch queue repo here
593 yield qroot # we have a patch queue repo here
578 if recurse:
594 if recurse:
579 # avoid recursing inside the .hg directory
595 # avoid recursing inside the .hg directory
580 dirs.remove('.hg')
596 dirs.remove('.hg')
581 else:
597 else:
582 dirs[:] = [] # don't descend further
598 dirs[:] = [] # don't descend further
583 elif followsym:
599 elif followsym:
584 newdirs = []
600 newdirs = []
585 for d in dirs:
601 for d in dirs:
586 fname = os.path.join(root, d)
602 fname = os.path.join(root, d)
587 if adddir(seen_dirs, fname):
603 if adddir(seen_dirs, fname):
588 if os.path.islink(fname):
604 if os.path.islink(fname):
589 for hgname in walkrepos(fname, True, seen_dirs):
605 for hgname in walkrepos(fname, True, seen_dirs):
590 yield hgname
606 yield hgname
591 else:
607 else:
592 newdirs.append(d)
608 newdirs.append(d)
593 dirs[:] = newdirs
609 dirs[:] = newdirs
594
610
595 def osrcpath():
611 def osrcpath():
596 '''return default os-specific hgrc search path'''
612 '''return default os-specific hgrc search path'''
597 path = []
613 path = []
598 defaultpath = os.path.join(util.datapath, 'default.d')
614 defaultpath = os.path.join(util.datapath, 'default.d')
599 if os.path.isdir(defaultpath):
615 if os.path.isdir(defaultpath):
600 for f, kind in osutil.listdir(defaultpath):
616 for f, kind in osutil.listdir(defaultpath):
601 if f.endswith('.rc'):
617 if f.endswith('.rc'):
602 path.append(os.path.join(defaultpath, f))
618 path.append(os.path.join(defaultpath, f))
603 path.extend(systemrcpath())
619 path.extend(systemrcpath())
604 path.extend(userrcpath())
620 path.extend(userrcpath())
605 path = [os.path.normpath(f) for f in path]
621 path = [os.path.normpath(f) for f in path]
606 return path
622 return path
607
623
608 _rcpath = None
624 _rcpath = None
609
625
610 def rcpath():
626 def rcpath():
611 '''return hgrc search path. if env var HGRCPATH is set, use it.
627 '''return hgrc search path. if env var HGRCPATH is set, use it.
612 for each item in path, if directory, use files ending in .rc,
628 for each item in path, if directory, use files ending in .rc,
613 else use item.
629 else use item.
614 make HGRCPATH empty to only look in .hg/hgrc of current repo.
630 make HGRCPATH empty to only look in .hg/hgrc of current repo.
615 if no HGRCPATH, use default os-specific path.'''
631 if no HGRCPATH, use default os-specific path.'''
616 global _rcpath
632 global _rcpath
617 if _rcpath is None:
633 if _rcpath is None:
618 if 'HGRCPATH' in os.environ:
634 if 'HGRCPATH' in os.environ:
619 _rcpath = []
635 _rcpath = []
620 for p in os.environ['HGRCPATH'].split(os.pathsep):
636 for p in os.environ['HGRCPATH'].split(os.pathsep):
621 if not p:
637 if not p:
622 continue
638 continue
623 p = util.expandpath(p)
639 p = util.expandpath(p)
624 if os.path.isdir(p):
640 if os.path.isdir(p):
625 for f, kind in osutil.listdir(p):
641 for f, kind in osutil.listdir(p):
626 if f.endswith('.rc'):
642 if f.endswith('.rc'):
627 _rcpath.append(os.path.join(p, f))
643 _rcpath.append(os.path.join(p, f))
628 else:
644 else:
629 _rcpath.append(p)
645 _rcpath.append(p)
630 else:
646 else:
631 _rcpath = osrcpath()
647 _rcpath = osrcpath()
632 return _rcpath
648 return _rcpath
633
649
634 def intrev(repo, rev):
650 def intrev(repo, rev):
635 """Return integer for a given revision that can be used in comparison or
651 """Return integer for a given revision that can be used in comparison or
636 arithmetic operation"""
652 arithmetic operation"""
637 if rev is None:
653 if rev is None:
638 return len(repo)
654 return len(repo)
639 return rev
655 return rev
640
656
641 def revsingle(repo, revspec, default='.'):
657 def revsingle(repo, revspec, default='.'):
642 if not revspec and revspec != 0:
658 if not revspec and revspec != 0:
643 return repo[default]
659 return repo[default]
644
660
645 l = revrange(repo, [revspec])
661 l = revrange(repo, [revspec])
646 if not l:
662 if not l:
647 raise util.Abort(_('empty revision set'))
663 raise util.Abort(_('empty revision set'))
648 return repo[l.last()]
664 return repo[l.last()]
649
665
650 def revpair(repo, revs):
666 def revpair(repo, revs):
651 if not revs:
667 if not revs:
652 return repo.dirstate.p1(), None
668 return repo.dirstate.p1(), None
653
669
654 l = revrange(repo, revs)
670 l = revrange(repo, revs)
655
671
656 if not l:
672 if not l:
657 first = second = None
673 first = second = None
658 elif l.isascending():
674 elif l.isascending():
659 first = l.min()
675 first = l.min()
660 second = l.max()
676 second = l.max()
661 elif l.isdescending():
677 elif l.isdescending():
662 first = l.max()
678 first = l.max()
663 second = l.min()
679 second = l.min()
664 else:
680 else:
665 first = l.first()
681 first = l.first()
666 second = l.last()
682 second = l.last()
667
683
668 if first is None:
684 if first is None:
669 raise util.Abort(_('empty revision range'))
685 raise util.Abort(_('empty revision range'))
670
686
671 if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
687 if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
672 return repo.lookup(first), None
688 return repo.lookup(first), None
673
689
674 return repo.lookup(first), repo.lookup(second)
690 return repo.lookup(first), repo.lookup(second)
675
691
676 _revrangesep = ':'
692 _revrangesep = ':'
677
693
678 def revrange(repo, revs):
694 def revrange(repo, revs):
679 """Yield revision as strings from a list of revision specifications."""
695 """Yield revision as strings from a list of revision specifications."""
680
696
681 def revfix(repo, val, defval):
697 def revfix(repo, val, defval):
682 if not val and val != 0 and defval is not None:
698 if not val and val != 0 and defval is not None:
683 return defval
699 return defval
684 return repo[val].rev()
700 return repo[val].rev()
685
701
686 seen, l = set(), revset.baseset([])
702 seen, l = set(), revset.baseset([])
687
703
688 revsetaliases = [alias for (alias, _) in
704 revsetaliases = [alias for (alias, _) in
689 repo.ui.configitems("revsetalias")]
705 repo.ui.configitems("revsetalias")]
690
706
691 for spec in revs:
707 for spec in revs:
692 if l and not seen:
708 if l and not seen:
693 seen = set(l)
709 seen = set(l)
694 # attempt to parse old-style ranges first to deal with
710 # attempt to parse old-style ranges first to deal with
695 # things like old-tag which contain query metacharacters
711 # things like old-tag which contain query metacharacters
696 try:
712 try:
697 # ... except for revset aliases without arguments. These
713 # ... except for revset aliases without arguments. These
698 # should be parsed as soon as possible, because they might
714 # should be parsed as soon as possible, because they might
699 # clash with a hash prefix.
715 # clash with a hash prefix.
700 if spec in revsetaliases:
716 if spec in revsetaliases:
701 raise error.RepoLookupError
717 raise error.RepoLookupError
702
718
703 if isinstance(spec, int):
719 if isinstance(spec, int):
704 seen.add(spec)
720 seen.add(spec)
705 l = l + revset.baseset([spec])
721 l = l + revset.baseset([spec])
706 continue
722 continue
707
723
708 if _revrangesep in spec:
724 if _revrangesep in spec:
709 start, end = spec.split(_revrangesep, 1)
725 start, end = spec.split(_revrangesep, 1)
710 if start in revsetaliases or end in revsetaliases:
726 if start in revsetaliases or end in revsetaliases:
711 raise error.RepoLookupError
727 raise error.RepoLookupError
712
728
713 start = revfix(repo, start, 0)
729 start = revfix(repo, start, 0)
714 end = revfix(repo, end, len(repo) - 1)
730 end = revfix(repo, end, len(repo) - 1)
715 if end == nullrev and start < 0:
731 if end == nullrev and start < 0:
716 start = nullrev
732 start = nullrev
717 rangeiter = repo.changelog.revs(start, end)
733 rangeiter = repo.changelog.revs(start, end)
718 if not seen and not l:
734 if not seen and not l:
719 # by far the most common case: revs = ["-1:0"]
735 # by far the most common case: revs = ["-1:0"]
720 l = revset.baseset(rangeiter)
736 l = revset.baseset(rangeiter)
721 # defer syncing seen until next iteration
737 # defer syncing seen until next iteration
722 continue
738 continue
723 newrevs = set(rangeiter)
739 newrevs = set(rangeiter)
724 if seen:
740 if seen:
725 newrevs.difference_update(seen)
741 newrevs.difference_update(seen)
726 seen.update(newrevs)
742 seen.update(newrevs)
727 else:
743 else:
728 seen = newrevs
744 seen = newrevs
729 l = l + revset.baseset(sorted(newrevs, reverse=start > end))
745 l = l + revset.baseset(sorted(newrevs, reverse=start > end))
730 continue
746 continue
731 elif spec and spec in repo: # single unquoted rev
747 elif spec and spec in repo: # single unquoted rev
732 rev = revfix(repo, spec, None)
748 rev = revfix(repo, spec, None)
733 if rev in seen:
749 if rev in seen:
734 continue
750 continue
735 seen.add(rev)
751 seen.add(rev)
736 l = l + revset.baseset([rev])
752 l = l + revset.baseset([rev])
737 continue
753 continue
738 except error.RepoLookupError:
754 except error.RepoLookupError:
739 pass
755 pass
740
756
741 # fall through to new-style queries if old-style fails
757 # fall through to new-style queries if old-style fails
742 m = revset.match(repo.ui, spec, repo)
758 m = revset.match(repo.ui, spec, repo)
743 if seen or l:
759 if seen or l:
744 dl = [r for r in m(repo) if r not in seen]
760 dl = [r for r in m(repo) if r not in seen]
745 l = l + revset.baseset(dl)
761 l = l + revset.baseset(dl)
746 seen.update(dl)
762 seen.update(dl)
747 else:
763 else:
748 l = m(repo)
764 l = m(repo)
749
765
750 return l
766 return l
751
767
752 def expandpats(pats):
768 def expandpats(pats):
753 '''Expand bare globs when running on windows.
769 '''Expand bare globs when running on windows.
754 On posix we assume it already has already been done by sh.'''
770 On posix we assume it already has already been done by sh.'''
755 if not util.expandglobs:
771 if not util.expandglobs:
756 return list(pats)
772 return list(pats)
757 ret = []
773 ret = []
758 for kindpat in pats:
774 for kindpat in pats:
759 kind, pat = matchmod._patsplit(kindpat, None)
775 kind, pat = matchmod._patsplit(kindpat, None)
760 if kind is None:
776 if kind is None:
761 try:
777 try:
762 globbed = glob.glob(pat)
778 globbed = glob.glob(pat)
763 except re.error:
779 except re.error:
764 globbed = [pat]
780 globbed = [pat]
765 if globbed:
781 if globbed:
766 ret.extend(globbed)
782 ret.extend(globbed)
767 continue
783 continue
768 ret.append(kindpat)
784 ret.append(kindpat)
769 return ret
785 return ret
770
786
771 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
787 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
772 '''Return a matcher and the patterns that were used.
788 '''Return a matcher and the patterns that were used.
773 The matcher will warn about bad matches.'''
789 The matcher will warn about bad matches.'''
774 if pats == ("",):
790 if pats == ("",):
775 pats = []
791 pats = []
776 if not globbed and default == 'relpath':
792 if not globbed and default == 'relpath':
777 pats = expandpats(pats or [])
793 pats = expandpats(pats or [])
778
794
779 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
795 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
780 default)
796 default)
781 def badfn(f, msg):
797 def badfn(f, msg):
782 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
798 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
783 m.bad = badfn
799 m.bad = badfn
784 if m.always():
800 if m.always():
785 pats = []
801 pats = []
786 return m, pats
802 return m, pats
787
803
788 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
804 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
789 '''Return a matcher that will warn about bad matches.'''
805 '''Return a matcher that will warn about bad matches.'''
790 return matchandpats(ctx, pats, opts, globbed, default)[0]
806 return matchandpats(ctx, pats, opts, globbed, default)[0]
791
807
792 def matchall(repo):
808 def matchall(repo):
793 '''Return a matcher that will efficiently match everything.'''
809 '''Return a matcher that will efficiently match everything.'''
794 return matchmod.always(repo.root, repo.getcwd())
810 return matchmod.always(repo.root, repo.getcwd())
795
811
796 def matchfiles(repo, files):
812 def matchfiles(repo, files):
797 '''Return a matcher that will efficiently match exactly these files.'''
813 '''Return a matcher that will efficiently match exactly these files.'''
798 return matchmod.exact(repo.root, repo.getcwd(), files)
814 return matchmod.exact(repo.root, repo.getcwd(), files)
799
815
800 def addremove(repo, matcher, prefix, opts={}, dry_run=None, similarity=None):
816 def addremove(repo, matcher, prefix, opts={}, dry_run=None, similarity=None):
801 m = matcher
817 m = matcher
802 if dry_run is None:
818 if dry_run is None:
803 dry_run = opts.get('dry_run')
819 dry_run = opts.get('dry_run')
804 if similarity is None:
820 if similarity is None:
805 similarity = float(opts.get('similarity') or 0)
821 similarity = float(opts.get('similarity') or 0)
806
822
807 ret = 0
823 ret = 0
808 join = lambda f: os.path.join(prefix, f)
824 join = lambda f: os.path.join(prefix, f)
809
825
810 def matchessubrepo(matcher, subpath):
826 def matchessubrepo(matcher, subpath):
811 if matcher.exact(subpath):
827 if matcher.exact(subpath):
812 return True
828 return True
813 for f in matcher.files():
829 for f in matcher.files():
814 if f.startswith(subpath):
830 if f.startswith(subpath):
815 return True
831 return True
816 return False
832 return False
817
833
818 wctx = repo[None]
834 wctx = repo[None]
819 for subpath in sorted(wctx.substate):
835 for subpath in sorted(wctx.substate):
820 if opts.get('subrepos') or matchessubrepo(m, subpath):
836 if opts.get('subrepos') or matchessubrepo(m, subpath):
821 sub = wctx.sub(subpath)
837 sub = wctx.sub(subpath)
822 try:
838 try:
823 submatch = matchmod.narrowmatcher(subpath, m)
839 submatch = matchmod.narrowmatcher(subpath, m)
824 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
840 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
825 ret = 1
841 ret = 1
826 except error.LookupError:
842 except error.LookupError:
827 repo.ui.status(_("skipping missing subrepository: %s\n")
843 repo.ui.status(_("skipping missing subrepository: %s\n")
828 % join(subpath))
844 % join(subpath))
829
845
830 rejected = []
846 rejected = []
831 origbad = m.bad
847 origbad = m.bad
832 def badfn(f, msg):
848 def badfn(f, msg):
833 if f in m.files():
849 if f in m.files():
834 origbad(f, msg)
850 origbad(f, msg)
835 rejected.append(f)
851 rejected.append(f)
836
852
837 m.bad = badfn
853 m.bad = badfn
838 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
854 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
839 m.bad = origbad
855 m.bad = origbad
840
856
841 unknownset = set(unknown + forgotten)
857 unknownset = set(unknown + forgotten)
842 toprint = unknownset.copy()
858 toprint = unknownset.copy()
843 toprint.update(deleted)
859 toprint.update(deleted)
844 for abs in sorted(toprint):
860 for abs in sorted(toprint):
845 if repo.ui.verbose or not m.exact(abs):
861 if repo.ui.verbose or not m.exact(abs):
846 if abs in unknownset:
862 if abs in unknownset:
847 status = _('adding %s\n') % m.uipath(abs)
863 status = _('adding %s\n') % m.uipath(abs)
848 else:
864 else:
849 status = _('removing %s\n') % m.uipath(abs)
865 status = _('removing %s\n') % m.uipath(abs)
850 repo.ui.status(status)
866 repo.ui.status(status)
851
867
852 renames = _findrenames(repo, m, added + unknown, removed + deleted,
868 renames = _findrenames(repo, m, added + unknown, removed + deleted,
853 similarity)
869 similarity)
854
870
855 if not dry_run:
871 if not dry_run:
856 _markchanges(repo, unknown + forgotten, deleted, renames)
872 _markchanges(repo, unknown + forgotten, deleted, renames)
857
873
858 for f in rejected:
874 for f in rejected:
859 if f in m.files():
875 if f in m.files():
860 return 1
876 return 1
861 return ret
877 return ret
862
878
863 def marktouched(repo, files, similarity=0.0):
879 def marktouched(repo, files, similarity=0.0):
864 '''Assert that files have somehow been operated upon. files are relative to
880 '''Assert that files have somehow been operated upon. files are relative to
865 the repo root.'''
881 the repo root.'''
866 m = matchfiles(repo, files)
882 m = matchfiles(repo, files)
867 rejected = []
883 rejected = []
868 m.bad = lambda x, y: rejected.append(x)
884 m.bad = lambda x, y: rejected.append(x)
869
885
870 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
886 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
871
887
872 if repo.ui.verbose:
888 if repo.ui.verbose:
873 unknownset = set(unknown + forgotten)
889 unknownset = set(unknown + forgotten)
874 toprint = unknownset.copy()
890 toprint = unknownset.copy()
875 toprint.update(deleted)
891 toprint.update(deleted)
876 for abs in sorted(toprint):
892 for abs in sorted(toprint):
877 if abs in unknownset:
893 if abs in unknownset:
878 status = _('adding %s\n') % abs
894 status = _('adding %s\n') % abs
879 else:
895 else:
880 status = _('removing %s\n') % abs
896 status = _('removing %s\n') % abs
881 repo.ui.status(status)
897 repo.ui.status(status)
882
898
883 renames = _findrenames(repo, m, added + unknown, removed + deleted,
899 renames = _findrenames(repo, m, added + unknown, removed + deleted,
884 similarity)
900 similarity)
885
901
886 _markchanges(repo, unknown + forgotten, deleted, renames)
902 _markchanges(repo, unknown + forgotten, deleted, renames)
887
903
888 for f in rejected:
904 for f in rejected:
889 if f in m.files():
905 if f in m.files():
890 return 1
906 return 1
891 return 0
907 return 0
892
908
893 def _interestingfiles(repo, matcher):
909 def _interestingfiles(repo, matcher):
894 '''Walk dirstate with matcher, looking for files that addremove would care
910 '''Walk dirstate with matcher, looking for files that addremove would care
895 about.
911 about.
896
912
897 This is different from dirstate.status because it doesn't care about
913 This is different from dirstate.status because it doesn't care about
898 whether files are modified or clean.'''
914 whether files are modified or clean.'''
899 added, unknown, deleted, removed, forgotten = [], [], [], [], []
915 added, unknown, deleted, removed, forgotten = [], [], [], [], []
900 audit_path = pathutil.pathauditor(repo.root)
916 audit_path = pathutil.pathauditor(repo.root)
901
917
902 ctx = repo[None]
918 ctx = repo[None]
903 dirstate = repo.dirstate
919 dirstate = repo.dirstate
904 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
920 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
905 full=False)
921 full=False)
906 for abs, st in walkresults.iteritems():
922 for abs, st in walkresults.iteritems():
907 dstate = dirstate[abs]
923 dstate = dirstate[abs]
908 if dstate == '?' and audit_path.check(abs):
924 if dstate == '?' and audit_path.check(abs):
909 unknown.append(abs)
925 unknown.append(abs)
910 elif dstate != 'r' and not st:
926 elif dstate != 'r' and not st:
911 deleted.append(abs)
927 deleted.append(abs)
912 elif dstate == 'r' and st:
928 elif dstate == 'r' and st:
913 forgotten.append(abs)
929 forgotten.append(abs)
914 # for finding renames
930 # for finding renames
915 elif dstate == 'r' and not st:
931 elif dstate == 'r' and not st:
916 removed.append(abs)
932 removed.append(abs)
917 elif dstate == 'a':
933 elif dstate == 'a':
918 added.append(abs)
934 added.append(abs)
919
935
920 return added, unknown, deleted, removed, forgotten
936 return added, unknown, deleted, removed, forgotten
921
937
922 def _findrenames(repo, matcher, added, removed, similarity):
938 def _findrenames(repo, matcher, added, removed, similarity):
923 '''Find renames from removed files to added ones.'''
939 '''Find renames from removed files to added ones.'''
924 renames = {}
940 renames = {}
925 if similarity > 0:
941 if similarity > 0:
926 for old, new, score in similar.findrenames(repo, added, removed,
942 for old, new, score in similar.findrenames(repo, added, removed,
927 similarity):
943 similarity):
928 if (repo.ui.verbose or not matcher.exact(old)
944 if (repo.ui.verbose or not matcher.exact(old)
929 or not matcher.exact(new)):
945 or not matcher.exact(new)):
930 repo.ui.status(_('recording removal of %s as rename to %s '
946 repo.ui.status(_('recording removal of %s as rename to %s '
931 '(%d%% similar)\n') %
947 '(%d%% similar)\n') %
932 (matcher.rel(old), matcher.rel(new),
948 (matcher.rel(old), matcher.rel(new),
933 score * 100))
949 score * 100))
934 renames[new] = old
950 renames[new] = old
935 return renames
951 return renames
936
952
937 def _markchanges(repo, unknown, deleted, renames):
953 def _markchanges(repo, unknown, deleted, renames):
938 '''Marks the files in unknown as added, the files in deleted as removed,
954 '''Marks the files in unknown as added, the files in deleted as removed,
939 and the files in renames as copied.'''
955 and the files in renames as copied.'''
940 wctx = repo[None]
956 wctx = repo[None]
941 wlock = repo.wlock()
957 wlock = repo.wlock()
942 try:
958 try:
943 wctx.forget(deleted)
959 wctx.forget(deleted)
944 wctx.add(unknown)
960 wctx.add(unknown)
945 for new, old in renames.iteritems():
961 for new, old in renames.iteritems():
946 wctx.copy(old, new)
962 wctx.copy(old, new)
947 finally:
963 finally:
948 wlock.release()
964 wlock.release()
949
965
950 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
966 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
951 """Update the dirstate to reflect the intent of copying src to dst. For
967 """Update the dirstate to reflect the intent of copying src to dst. For
952 different reasons it might not end with dst being marked as copied from src.
968 different reasons it might not end with dst being marked as copied from src.
953 """
969 """
954 origsrc = repo.dirstate.copied(src) or src
970 origsrc = repo.dirstate.copied(src) or src
955 if dst == origsrc: # copying back a copy?
971 if dst == origsrc: # copying back a copy?
956 if repo.dirstate[dst] not in 'mn' and not dryrun:
972 if repo.dirstate[dst] not in 'mn' and not dryrun:
957 repo.dirstate.normallookup(dst)
973 repo.dirstate.normallookup(dst)
958 else:
974 else:
959 if repo.dirstate[origsrc] == 'a' and origsrc == src:
975 if repo.dirstate[origsrc] == 'a' and origsrc == src:
960 if not ui.quiet:
976 if not ui.quiet:
961 ui.warn(_("%s has not been committed yet, so no copy "
977 ui.warn(_("%s has not been committed yet, so no copy "
962 "data will be stored for %s.\n")
978 "data will be stored for %s.\n")
963 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
979 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
964 if repo.dirstate[dst] in '?r' and not dryrun:
980 if repo.dirstate[dst] in '?r' and not dryrun:
965 wctx.add([dst])
981 wctx.add([dst])
966 elif not dryrun:
982 elif not dryrun:
967 wctx.copy(origsrc, dst)
983 wctx.copy(origsrc, dst)
968
984
969 def readrequires(opener, supported):
985 def readrequires(opener, supported):
970 '''Reads and parses .hg/requires and checks if all entries found
986 '''Reads and parses .hg/requires and checks if all entries found
971 are in the list of supported features.'''
987 are in the list of supported features.'''
972 requirements = set(opener.read("requires").splitlines())
988 requirements = set(opener.read("requires").splitlines())
973 missings = []
989 missings = []
974 for r in requirements:
990 for r in requirements:
975 if r not in supported:
991 if r not in supported:
976 if not r or not r[0].isalnum():
992 if not r or not r[0].isalnum():
977 raise error.RequirementError(_(".hg/requires file is corrupt"))
993 raise error.RequirementError(_(".hg/requires file is corrupt"))
978 missings.append(r)
994 missings.append(r)
979 missings.sort()
995 missings.sort()
980 if missings:
996 if missings:
981 raise error.RequirementError(
997 raise error.RequirementError(
982 _("repository requires features unknown to this Mercurial: %s")
998 _("repository requires features unknown to this Mercurial: %s")
983 % " ".join(missings),
999 % " ".join(missings),
984 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
1000 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
985 " for more information"))
1001 " for more information"))
986 return requirements
1002 return requirements
987
1003
988 class filecachesubentry(object):
1004 class filecachesubentry(object):
989 def __init__(self, path, stat):
1005 def __init__(self, path, stat):
990 self.path = path
1006 self.path = path
991 self.cachestat = None
1007 self.cachestat = None
992 self._cacheable = None
1008 self._cacheable = None
993
1009
994 if stat:
1010 if stat:
995 self.cachestat = filecachesubentry.stat(self.path)
1011 self.cachestat = filecachesubentry.stat(self.path)
996
1012
997 if self.cachestat:
1013 if self.cachestat:
998 self._cacheable = self.cachestat.cacheable()
1014 self._cacheable = self.cachestat.cacheable()
999 else:
1015 else:
1000 # None means we don't know yet
1016 # None means we don't know yet
1001 self._cacheable = None
1017 self._cacheable = None
1002
1018
1003 def refresh(self):
1019 def refresh(self):
1004 if self.cacheable():
1020 if self.cacheable():
1005 self.cachestat = filecachesubentry.stat(self.path)
1021 self.cachestat = filecachesubentry.stat(self.path)
1006
1022
1007 def cacheable(self):
1023 def cacheable(self):
1008 if self._cacheable is not None:
1024 if self._cacheable is not None:
1009 return self._cacheable
1025 return self._cacheable
1010
1026
1011 # we don't know yet, assume it is for now
1027 # we don't know yet, assume it is for now
1012 return True
1028 return True
1013
1029
1014 def changed(self):
1030 def changed(self):
1015 # no point in going further if we can't cache it
1031 # no point in going further if we can't cache it
1016 if not self.cacheable():
1032 if not self.cacheable():
1017 return True
1033 return True
1018
1034
1019 newstat = filecachesubentry.stat(self.path)
1035 newstat = filecachesubentry.stat(self.path)
1020
1036
1021 # we may not know if it's cacheable yet, check again now
1037 # we may not know if it's cacheable yet, check again now
1022 if newstat and self._cacheable is None:
1038 if newstat and self._cacheable is None:
1023 self._cacheable = newstat.cacheable()
1039 self._cacheable = newstat.cacheable()
1024
1040
1025 # check again
1041 # check again
1026 if not self._cacheable:
1042 if not self._cacheable:
1027 return True
1043 return True
1028
1044
1029 if self.cachestat != newstat:
1045 if self.cachestat != newstat:
1030 self.cachestat = newstat
1046 self.cachestat = newstat
1031 return True
1047 return True
1032 else:
1048 else:
1033 return False
1049 return False
1034
1050
1035 @staticmethod
1051 @staticmethod
1036 def stat(path):
1052 def stat(path):
1037 try:
1053 try:
1038 return util.cachestat(path)
1054 return util.cachestat(path)
1039 except OSError, e:
1055 except OSError, e:
1040 if e.errno != errno.ENOENT:
1056 if e.errno != errno.ENOENT:
1041 raise
1057 raise
1042
1058
1043 class filecacheentry(object):
1059 class filecacheentry(object):
1044 def __init__(self, paths, stat=True):
1060 def __init__(self, paths, stat=True):
1045 self._entries = []
1061 self._entries = []
1046 for path in paths:
1062 for path in paths:
1047 self._entries.append(filecachesubentry(path, stat))
1063 self._entries.append(filecachesubentry(path, stat))
1048
1064
1049 def changed(self):
1065 def changed(self):
1050 '''true if any entry has changed'''
1066 '''true if any entry has changed'''
1051 for entry in self._entries:
1067 for entry in self._entries:
1052 if entry.changed():
1068 if entry.changed():
1053 return True
1069 return True
1054 return False
1070 return False
1055
1071
1056 def refresh(self):
1072 def refresh(self):
1057 for entry in self._entries:
1073 for entry in self._entries:
1058 entry.refresh()
1074 entry.refresh()
1059
1075
1060 class filecache(object):
1076 class filecache(object):
1061 '''A property like decorator that tracks files under .hg/ for updates.
1077 '''A property like decorator that tracks files under .hg/ for updates.
1062
1078
1063 Records stat info when called in _filecache.
1079 Records stat info when called in _filecache.
1064
1080
1065 On subsequent calls, compares old stat info with new info, and recreates the
1081 On subsequent calls, compares old stat info with new info, and recreates the
1066 object when any of the files changes, updating the new stat info in
1082 object when any of the files changes, updating the new stat info in
1067 _filecache.
1083 _filecache.
1068
1084
1069 Mercurial either atomic renames or appends for files under .hg,
1085 Mercurial either atomic renames or appends for files under .hg,
1070 so to ensure the cache is reliable we need the filesystem to be able
1086 so to ensure the cache is reliable we need the filesystem to be able
1071 to tell us if a file has been replaced. If it can't, we fallback to
1087 to tell us if a file has been replaced. If it can't, we fallback to
1072 recreating the object on every call (essentially the same behaviour as
1088 recreating the object on every call (essentially the same behaviour as
1073 propertycache).
1089 propertycache).
1074
1090
1075 '''
1091 '''
1076 def __init__(self, *paths):
1092 def __init__(self, *paths):
1077 self.paths = paths
1093 self.paths = paths
1078
1094
1079 def join(self, obj, fname):
1095 def join(self, obj, fname):
1080 """Used to compute the runtime path of a cached file.
1096 """Used to compute the runtime path of a cached file.
1081
1097
1082 Users should subclass filecache and provide their own version of this
1098 Users should subclass filecache and provide their own version of this
1083 function to call the appropriate join function on 'obj' (an instance
1099 function to call the appropriate join function on 'obj' (an instance
1084 of the class that its member function was decorated).
1100 of the class that its member function was decorated).
1085 """
1101 """
1086 return obj.join(fname)
1102 return obj.join(fname)
1087
1103
1088 def __call__(self, func):
1104 def __call__(self, func):
1089 self.func = func
1105 self.func = func
1090 self.name = func.__name__
1106 self.name = func.__name__
1091 return self
1107 return self
1092
1108
1093 def __get__(self, obj, type=None):
1109 def __get__(self, obj, type=None):
1094 # do we need to check if the file changed?
1110 # do we need to check if the file changed?
1095 if self.name in obj.__dict__:
1111 if self.name in obj.__dict__:
1096 assert self.name in obj._filecache, self.name
1112 assert self.name in obj._filecache, self.name
1097 return obj.__dict__[self.name]
1113 return obj.__dict__[self.name]
1098
1114
1099 entry = obj._filecache.get(self.name)
1115 entry = obj._filecache.get(self.name)
1100
1116
1101 if entry:
1117 if entry:
1102 if entry.changed():
1118 if entry.changed():
1103 entry.obj = self.func(obj)
1119 entry.obj = self.func(obj)
1104 else:
1120 else:
1105 paths = [self.join(obj, path) for path in self.paths]
1121 paths = [self.join(obj, path) for path in self.paths]
1106
1122
1107 # We stat -before- creating the object so our cache doesn't lie if
1123 # We stat -before- creating the object so our cache doesn't lie if
1108 # a writer modified between the time we read and stat
1124 # a writer modified between the time we read and stat
1109 entry = filecacheentry(paths, True)
1125 entry = filecacheentry(paths, True)
1110 entry.obj = self.func(obj)
1126 entry.obj = self.func(obj)
1111
1127
1112 obj._filecache[self.name] = entry
1128 obj._filecache[self.name] = entry
1113
1129
1114 obj.__dict__[self.name] = entry.obj
1130 obj.__dict__[self.name] = entry.obj
1115 return entry.obj
1131 return entry.obj
1116
1132
1117 def __set__(self, obj, value):
1133 def __set__(self, obj, value):
1118 if self.name not in obj._filecache:
1134 if self.name not in obj._filecache:
1119 # we add an entry for the missing value because X in __dict__
1135 # we add an entry for the missing value because X in __dict__
1120 # implies X in _filecache
1136 # implies X in _filecache
1121 paths = [self.join(obj, path) for path in self.paths]
1137 paths = [self.join(obj, path) for path in self.paths]
1122 ce = filecacheentry(paths, False)
1138 ce = filecacheentry(paths, False)
1123 obj._filecache[self.name] = ce
1139 obj._filecache[self.name] = ce
1124 else:
1140 else:
1125 ce = obj._filecache[self.name]
1141 ce = obj._filecache[self.name]
1126
1142
1127 ce.obj = value # update cached copy
1143 ce.obj = value # update cached copy
1128 obj.__dict__[self.name] = value # update copy returned by obj.x
1144 obj.__dict__[self.name] = value # update copy returned by obj.x
1129
1145
1130 def __delete__(self, obj):
1146 def __delete__(self, obj):
1131 try:
1147 try:
1132 del obj.__dict__[self.name]
1148 del obj.__dict__[self.name]
1133 except KeyError:
1149 except KeyError:
1134 raise AttributeError(self.name)
1150 raise AttributeError(self.name)
General Comments 0
You need to be logged in to leave comments. Login now