##// END OF EJS Templates
revrange: build spanset from x:y range...
Yuya Nishihara -
r25386:a5a95642 default
parent child Browse files
Show More
@@ -1,1145 +1,1147 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from mercurial.node import nullrev
9 from mercurial.node import nullrev
10 import util, error, osutil, revset, similar, encoding, phases
10 import util, error, osutil, revset, similar, encoding, phases
11 import pathutil
11 import pathutil
12 import match as matchmod
12 import match as matchmod
13 import os, errno, re, glob, tempfile, shutil, stat, inspect
13 import os, errno, re, glob, tempfile, shutil, stat, inspect
14
14
15 if os.name == 'nt':
15 if os.name == 'nt':
16 import scmwindows as scmplatform
16 import scmwindows as scmplatform
17 else:
17 else:
18 import scmposix as scmplatform
18 import scmposix as scmplatform
19
19
20 systemrcpath = scmplatform.systemrcpath
20 systemrcpath = scmplatform.systemrcpath
21 userrcpath = scmplatform.userrcpath
21 userrcpath = scmplatform.userrcpath
22
22
23 class status(tuple):
23 class status(tuple):
24 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
24 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
25 and 'ignored' properties are only relevant to the working copy.
25 and 'ignored' properties are only relevant to the working copy.
26 '''
26 '''
27
27
28 __slots__ = ()
28 __slots__ = ()
29
29
30 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
30 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
31 clean):
31 clean):
32 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
32 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
33 ignored, clean))
33 ignored, clean))
34
34
35 @property
35 @property
36 def modified(self):
36 def modified(self):
37 '''files that have been modified'''
37 '''files that have been modified'''
38 return self[0]
38 return self[0]
39
39
40 @property
40 @property
41 def added(self):
41 def added(self):
42 '''files that have been added'''
42 '''files that have been added'''
43 return self[1]
43 return self[1]
44
44
45 @property
45 @property
46 def removed(self):
46 def removed(self):
47 '''files that have been removed'''
47 '''files that have been removed'''
48 return self[2]
48 return self[2]
49
49
50 @property
50 @property
51 def deleted(self):
51 def deleted(self):
52 '''files that are in the dirstate, but have been deleted from the
52 '''files that are in the dirstate, but have been deleted from the
53 working copy (aka "missing")
53 working copy (aka "missing")
54 '''
54 '''
55 return self[3]
55 return self[3]
56
56
57 @property
57 @property
58 def unknown(self):
58 def unknown(self):
59 '''files not in the dirstate that are not ignored'''
59 '''files not in the dirstate that are not ignored'''
60 return self[4]
60 return self[4]
61
61
62 @property
62 @property
63 def ignored(self):
63 def ignored(self):
64 '''files not in the dirstate that are ignored (by _dirignore())'''
64 '''files not in the dirstate that are ignored (by _dirignore())'''
65 return self[5]
65 return self[5]
66
66
67 @property
67 @property
68 def clean(self):
68 def clean(self):
69 '''files that have not been modified'''
69 '''files that have not been modified'''
70 return self[6]
70 return self[6]
71
71
72 def __repr__(self, *args, **kwargs):
72 def __repr__(self, *args, **kwargs):
73 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
73 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
74 'unknown=%r, ignored=%r, clean=%r>') % self)
74 'unknown=%r, ignored=%r, clean=%r>') % self)
75
75
76 def itersubrepos(ctx1, ctx2):
76 def itersubrepos(ctx1, ctx2):
77 """find subrepos in ctx1 or ctx2"""
77 """find subrepos in ctx1 or ctx2"""
78 # Create a (subpath, ctx) mapping where we prefer subpaths from
78 # Create a (subpath, ctx) mapping where we prefer subpaths from
79 # ctx1. The subpaths from ctx2 are important when the .hgsub file
79 # ctx1. The subpaths from ctx2 are important when the .hgsub file
80 # has been modified (in ctx2) but not yet committed (in ctx1).
80 # has been modified (in ctx2) but not yet committed (in ctx1).
81 subpaths = dict.fromkeys(ctx2.substate, ctx2)
81 subpaths = dict.fromkeys(ctx2.substate, ctx2)
82 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
82 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
83 for subpath, ctx in sorted(subpaths.iteritems()):
83 for subpath, ctx in sorted(subpaths.iteritems()):
84 yield subpath, ctx.sub(subpath)
84 yield subpath, ctx.sub(subpath)
85
85
86 def nochangesfound(ui, repo, excluded=None):
86 def nochangesfound(ui, repo, excluded=None):
87 '''Report no changes for push/pull, excluded is None or a list of
87 '''Report no changes for push/pull, excluded is None or a list of
88 nodes excluded from the push/pull.
88 nodes excluded from the push/pull.
89 '''
89 '''
90 secretlist = []
90 secretlist = []
91 if excluded:
91 if excluded:
92 for n in excluded:
92 for n in excluded:
93 if n not in repo:
93 if n not in repo:
94 # discovery should not have included the filtered revision,
94 # discovery should not have included the filtered revision,
95 # we have to explicitly exclude it until discovery is cleanup.
95 # we have to explicitly exclude it until discovery is cleanup.
96 continue
96 continue
97 ctx = repo[n]
97 ctx = repo[n]
98 if ctx.phase() >= phases.secret and not ctx.extinct():
98 if ctx.phase() >= phases.secret and not ctx.extinct():
99 secretlist.append(n)
99 secretlist.append(n)
100
100
101 if secretlist:
101 if secretlist:
102 ui.status(_("no changes found (ignored %d secret changesets)\n")
102 ui.status(_("no changes found (ignored %d secret changesets)\n")
103 % len(secretlist))
103 % len(secretlist))
104 else:
104 else:
105 ui.status(_("no changes found\n"))
105 ui.status(_("no changes found\n"))
106
106
107 def checknewlabel(repo, lbl, kind):
107 def checknewlabel(repo, lbl, kind):
108 # Do not use the "kind" parameter in ui output.
108 # Do not use the "kind" parameter in ui output.
109 # It makes strings difficult to translate.
109 # It makes strings difficult to translate.
110 if lbl in ['tip', '.', 'null']:
110 if lbl in ['tip', '.', 'null']:
111 raise util.Abort(_("the name '%s' is reserved") % lbl)
111 raise util.Abort(_("the name '%s' is reserved") % lbl)
112 for c in (':', '\0', '\n', '\r'):
112 for c in (':', '\0', '\n', '\r'):
113 if c in lbl:
113 if c in lbl:
114 raise util.Abort(_("%r cannot be used in a name") % c)
114 raise util.Abort(_("%r cannot be used in a name") % c)
115 try:
115 try:
116 int(lbl)
116 int(lbl)
117 raise util.Abort(_("cannot use an integer as a name"))
117 raise util.Abort(_("cannot use an integer as a name"))
118 except ValueError:
118 except ValueError:
119 pass
119 pass
120
120
121 def checkfilename(f):
121 def checkfilename(f):
122 '''Check that the filename f is an acceptable filename for a tracked file'''
122 '''Check that the filename f is an acceptable filename for a tracked file'''
123 if '\r' in f or '\n' in f:
123 if '\r' in f or '\n' in f:
124 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
124 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
125
125
126 def checkportable(ui, f):
126 def checkportable(ui, f):
127 '''Check if filename f is portable and warn or abort depending on config'''
127 '''Check if filename f is portable and warn or abort depending on config'''
128 checkfilename(f)
128 checkfilename(f)
129 abort, warn = checkportabilityalert(ui)
129 abort, warn = checkportabilityalert(ui)
130 if abort or warn:
130 if abort or warn:
131 msg = util.checkwinfilename(f)
131 msg = util.checkwinfilename(f)
132 if msg:
132 if msg:
133 msg = "%s: %r" % (msg, f)
133 msg = "%s: %r" % (msg, f)
134 if abort:
134 if abort:
135 raise util.Abort(msg)
135 raise util.Abort(msg)
136 ui.warn(_("warning: %s\n") % msg)
136 ui.warn(_("warning: %s\n") % msg)
137
137
138 def checkportabilityalert(ui):
138 def checkportabilityalert(ui):
139 '''check if the user's config requests nothing, a warning, or abort for
139 '''check if the user's config requests nothing, a warning, or abort for
140 non-portable filenames'''
140 non-portable filenames'''
141 val = ui.config('ui', 'portablefilenames', 'warn')
141 val = ui.config('ui', 'portablefilenames', 'warn')
142 lval = val.lower()
142 lval = val.lower()
143 bval = util.parsebool(val)
143 bval = util.parsebool(val)
144 abort = os.name == 'nt' or lval == 'abort'
144 abort = os.name == 'nt' or lval == 'abort'
145 warn = bval or lval == 'warn'
145 warn = bval or lval == 'warn'
146 if bval is None and not (warn or abort or lval == 'ignore'):
146 if bval is None and not (warn or abort or lval == 'ignore'):
147 raise error.ConfigError(
147 raise error.ConfigError(
148 _("ui.portablefilenames value is invalid ('%s')") % val)
148 _("ui.portablefilenames value is invalid ('%s')") % val)
149 return abort, warn
149 return abort, warn
150
150
151 class casecollisionauditor(object):
151 class casecollisionauditor(object):
152 def __init__(self, ui, abort, dirstate):
152 def __init__(self, ui, abort, dirstate):
153 self._ui = ui
153 self._ui = ui
154 self._abort = abort
154 self._abort = abort
155 allfiles = '\0'.join(dirstate._map)
155 allfiles = '\0'.join(dirstate._map)
156 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
156 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
157 self._dirstate = dirstate
157 self._dirstate = dirstate
158 # The purpose of _newfiles is so that we don't complain about
158 # The purpose of _newfiles is so that we don't complain about
159 # case collisions if someone were to call this object with the
159 # case collisions if someone were to call this object with the
160 # same filename twice.
160 # same filename twice.
161 self._newfiles = set()
161 self._newfiles = set()
162
162
163 def __call__(self, f):
163 def __call__(self, f):
164 if f in self._newfiles:
164 if f in self._newfiles:
165 return
165 return
166 fl = encoding.lower(f)
166 fl = encoding.lower(f)
167 if fl in self._loweredfiles and f not in self._dirstate:
167 if fl in self._loweredfiles and f not in self._dirstate:
168 msg = _('possible case-folding collision for %s') % f
168 msg = _('possible case-folding collision for %s') % f
169 if self._abort:
169 if self._abort:
170 raise util.Abort(msg)
170 raise util.Abort(msg)
171 self._ui.warn(_("warning: %s\n") % msg)
171 self._ui.warn(_("warning: %s\n") % msg)
172 self._loweredfiles.add(fl)
172 self._loweredfiles.add(fl)
173 self._newfiles.add(f)
173 self._newfiles.add(f)
174
174
175 def develwarn(tui, msg):
175 def develwarn(tui, msg):
176 """issue a developer warning message"""
176 """issue a developer warning message"""
177 msg = 'devel-warn: ' + msg
177 msg = 'devel-warn: ' + msg
178 if tui.tracebackflag:
178 if tui.tracebackflag:
179 util.debugstacktrace(msg, 2)
179 util.debugstacktrace(msg, 2)
180 else:
180 else:
181 curframe = inspect.currentframe()
181 curframe = inspect.currentframe()
182 calframe = inspect.getouterframes(curframe, 2)
182 calframe = inspect.getouterframes(curframe, 2)
183 tui.write_err('%s at: %s:%s (%s)\n' % ((msg,) + calframe[2][1:4]))
183 tui.write_err('%s at: %s:%s (%s)\n' % ((msg,) + calframe[2][1:4]))
184
184
185 def filteredhash(repo, maxrev):
185 def filteredhash(repo, maxrev):
186 """build hash of filtered revisions in the current repoview.
186 """build hash of filtered revisions in the current repoview.
187
187
188 Multiple caches perform up-to-date validation by checking that the
188 Multiple caches perform up-to-date validation by checking that the
189 tiprev and tipnode stored in the cache file match the current repository.
189 tiprev and tipnode stored in the cache file match the current repository.
190 However, this is not sufficient for validating repoviews because the set
190 However, this is not sufficient for validating repoviews because the set
191 of revisions in the view may change without the repository tiprev and
191 of revisions in the view may change without the repository tiprev and
192 tipnode changing.
192 tipnode changing.
193
193
194 This function hashes all the revs filtered from the view and returns
194 This function hashes all the revs filtered from the view and returns
195 that SHA-1 digest.
195 that SHA-1 digest.
196 """
196 """
197 cl = repo.changelog
197 cl = repo.changelog
198 if not cl.filteredrevs:
198 if not cl.filteredrevs:
199 return None
199 return None
200 key = None
200 key = None
201 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
201 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
202 if revs:
202 if revs:
203 s = util.sha1()
203 s = util.sha1()
204 for rev in revs:
204 for rev in revs:
205 s.update('%s;' % rev)
205 s.update('%s;' % rev)
206 key = s.digest()
206 key = s.digest()
207 return key
207 return key
208
208
209 class abstractvfs(object):
209 class abstractvfs(object):
210 """Abstract base class; cannot be instantiated"""
210 """Abstract base class; cannot be instantiated"""
211
211
212 def __init__(self, *args, **kwargs):
212 def __init__(self, *args, **kwargs):
213 '''Prevent instantiation; don't call this from subclasses.'''
213 '''Prevent instantiation; don't call this from subclasses.'''
214 raise NotImplementedError('attempted instantiating ' + str(type(self)))
214 raise NotImplementedError('attempted instantiating ' + str(type(self)))
215
215
216 def tryread(self, path):
216 def tryread(self, path):
217 '''gracefully return an empty string for missing files'''
217 '''gracefully return an empty string for missing files'''
218 try:
218 try:
219 return self.read(path)
219 return self.read(path)
220 except IOError, inst:
220 except IOError, inst:
221 if inst.errno != errno.ENOENT:
221 if inst.errno != errno.ENOENT:
222 raise
222 raise
223 return ""
223 return ""
224
224
225 def tryreadlines(self, path, mode='rb'):
225 def tryreadlines(self, path, mode='rb'):
226 '''gracefully return an empty array for missing files'''
226 '''gracefully return an empty array for missing files'''
227 try:
227 try:
228 return self.readlines(path, mode=mode)
228 return self.readlines(path, mode=mode)
229 except IOError, inst:
229 except IOError, inst:
230 if inst.errno != errno.ENOENT:
230 if inst.errno != errno.ENOENT:
231 raise
231 raise
232 return []
232 return []
233
233
234 def open(self, path, mode="r", text=False, atomictemp=False,
234 def open(self, path, mode="r", text=False, atomictemp=False,
235 notindexed=False):
235 notindexed=False):
236 '''Open ``path`` file, which is relative to vfs root.
236 '''Open ``path`` file, which is relative to vfs root.
237
237
238 Newly created directories are marked as "not to be indexed by
238 Newly created directories are marked as "not to be indexed by
239 the content indexing service", if ``notindexed`` is specified
239 the content indexing service", if ``notindexed`` is specified
240 for "write" mode access.
240 for "write" mode access.
241 '''
241 '''
242 self.open = self.__call__
242 self.open = self.__call__
243 return self.__call__(path, mode, text, atomictemp, notindexed)
243 return self.__call__(path, mode, text, atomictemp, notindexed)
244
244
245 def read(self, path):
245 def read(self, path):
246 fp = self(path, 'rb')
246 fp = self(path, 'rb')
247 try:
247 try:
248 return fp.read()
248 return fp.read()
249 finally:
249 finally:
250 fp.close()
250 fp.close()
251
251
252 def readlines(self, path, mode='rb'):
252 def readlines(self, path, mode='rb'):
253 fp = self(path, mode=mode)
253 fp = self(path, mode=mode)
254 try:
254 try:
255 return fp.readlines()
255 return fp.readlines()
256 finally:
256 finally:
257 fp.close()
257 fp.close()
258
258
259 def write(self, path, data):
259 def write(self, path, data):
260 fp = self(path, 'wb')
260 fp = self(path, 'wb')
261 try:
261 try:
262 return fp.write(data)
262 return fp.write(data)
263 finally:
263 finally:
264 fp.close()
264 fp.close()
265
265
266 def writelines(self, path, data, mode='wb', notindexed=False):
266 def writelines(self, path, data, mode='wb', notindexed=False):
267 fp = self(path, mode=mode, notindexed=notindexed)
267 fp = self(path, mode=mode, notindexed=notindexed)
268 try:
268 try:
269 return fp.writelines(data)
269 return fp.writelines(data)
270 finally:
270 finally:
271 fp.close()
271 fp.close()
272
272
273 def append(self, path, data):
273 def append(self, path, data):
274 fp = self(path, 'ab')
274 fp = self(path, 'ab')
275 try:
275 try:
276 return fp.write(data)
276 return fp.write(data)
277 finally:
277 finally:
278 fp.close()
278 fp.close()
279
279
280 def chmod(self, path, mode):
280 def chmod(self, path, mode):
281 return os.chmod(self.join(path), mode)
281 return os.chmod(self.join(path), mode)
282
282
283 def exists(self, path=None):
283 def exists(self, path=None):
284 return os.path.exists(self.join(path))
284 return os.path.exists(self.join(path))
285
285
286 def fstat(self, fp):
286 def fstat(self, fp):
287 return util.fstat(fp)
287 return util.fstat(fp)
288
288
289 def isdir(self, path=None):
289 def isdir(self, path=None):
290 return os.path.isdir(self.join(path))
290 return os.path.isdir(self.join(path))
291
291
292 def isfile(self, path=None):
292 def isfile(self, path=None):
293 return os.path.isfile(self.join(path))
293 return os.path.isfile(self.join(path))
294
294
295 def islink(self, path=None):
295 def islink(self, path=None):
296 return os.path.islink(self.join(path))
296 return os.path.islink(self.join(path))
297
297
298 def reljoin(self, *paths):
298 def reljoin(self, *paths):
299 """join various elements of a path together (as os.path.join would do)
299 """join various elements of a path together (as os.path.join would do)
300
300
301 The vfs base is not injected so that path stay relative. This exists
301 The vfs base is not injected so that path stay relative. This exists
302 to allow handling of strange encoding if needed."""
302 to allow handling of strange encoding if needed."""
303 return os.path.join(*paths)
303 return os.path.join(*paths)
304
304
305 def split(self, path):
305 def split(self, path):
306 """split top-most element of a path (as os.path.split would do)
306 """split top-most element of a path (as os.path.split would do)
307
307
308 This exists to allow handling of strange encoding if needed."""
308 This exists to allow handling of strange encoding if needed."""
309 return os.path.split(path)
309 return os.path.split(path)
310
310
311 def lexists(self, path=None):
311 def lexists(self, path=None):
312 return os.path.lexists(self.join(path))
312 return os.path.lexists(self.join(path))
313
313
314 def lstat(self, path=None):
314 def lstat(self, path=None):
315 return os.lstat(self.join(path))
315 return os.lstat(self.join(path))
316
316
317 def listdir(self, path=None):
317 def listdir(self, path=None):
318 return os.listdir(self.join(path))
318 return os.listdir(self.join(path))
319
319
320 def makedir(self, path=None, notindexed=True):
320 def makedir(self, path=None, notindexed=True):
321 return util.makedir(self.join(path), notindexed)
321 return util.makedir(self.join(path), notindexed)
322
322
323 def makedirs(self, path=None, mode=None):
323 def makedirs(self, path=None, mode=None):
324 return util.makedirs(self.join(path), mode)
324 return util.makedirs(self.join(path), mode)
325
325
326 def makelock(self, info, path):
326 def makelock(self, info, path):
327 return util.makelock(info, self.join(path))
327 return util.makelock(info, self.join(path))
328
328
329 def mkdir(self, path=None):
329 def mkdir(self, path=None):
330 return os.mkdir(self.join(path))
330 return os.mkdir(self.join(path))
331
331
332 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
332 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
333 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
333 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
334 dir=self.join(dir), text=text)
334 dir=self.join(dir), text=text)
335 dname, fname = util.split(name)
335 dname, fname = util.split(name)
336 if dir:
336 if dir:
337 return fd, os.path.join(dir, fname)
337 return fd, os.path.join(dir, fname)
338 else:
338 else:
339 return fd, fname
339 return fd, fname
340
340
341 def readdir(self, path=None, stat=None, skip=None):
341 def readdir(self, path=None, stat=None, skip=None):
342 return osutil.listdir(self.join(path), stat, skip)
342 return osutil.listdir(self.join(path), stat, skip)
343
343
344 def readlock(self, path):
344 def readlock(self, path):
345 return util.readlock(self.join(path))
345 return util.readlock(self.join(path))
346
346
347 def rename(self, src, dst):
347 def rename(self, src, dst):
348 return util.rename(self.join(src), self.join(dst))
348 return util.rename(self.join(src), self.join(dst))
349
349
350 def readlink(self, path):
350 def readlink(self, path):
351 return os.readlink(self.join(path))
351 return os.readlink(self.join(path))
352
352
353 def removedirs(self, path=None):
353 def removedirs(self, path=None):
354 """Remove a leaf directory and all empty intermediate ones
354 """Remove a leaf directory and all empty intermediate ones
355 """
355 """
356 return util.removedirs(self.join(path))
356 return util.removedirs(self.join(path))
357
357
358 def rmtree(self, path=None, ignore_errors=False, forcibly=False):
358 def rmtree(self, path=None, ignore_errors=False, forcibly=False):
359 """Remove a directory tree recursively
359 """Remove a directory tree recursively
360
360
361 If ``forcibly``, this tries to remove READ-ONLY files, too.
361 If ``forcibly``, this tries to remove READ-ONLY files, too.
362 """
362 """
363 if forcibly:
363 if forcibly:
364 def onerror(function, path, excinfo):
364 def onerror(function, path, excinfo):
365 if function is not os.remove:
365 if function is not os.remove:
366 raise
366 raise
367 # read-only files cannot be unlinked under Windows
367 # read-only files cannot be unlinked under Windows
368 s = os.stat(path)
368 s = os.stat(path)
369 if (s.st_mode & stat.S_IWRITE) != 0:
369 if (s.st_mode & stat.S_IWRITE) != 0:
370 raise
370 raise
371 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
371 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
372 os.remove(path)
372 os.remove(path)
373 else:
373 else:
374 onerror = None
374 onerror = None
375 return shutil.rmtree(self.join(path),
375 return shutil.rmtree(self.join(path),
376 ignore_errors=ignore_errors, onerror=onerror)
376 ignore_errors=ignore_errors, onerror=onerror)
377
377
378 def setflags(self, path, l, x):
378 def setflags(self, path, l, x):
379 return util.setflags(self.join(path), l, x)
379 return util.setflags(self.join(path), l, x)
380
380
381 def stat(self, path=None):
381 def stat(self, path=None):
382 return os.stat(self.join(path))
382 return os.stat(self.join(path))
383
383
384 def unlink(self, path=None):
384 def unlink(self, path=None):
385 return util.unlink(self.join(path))
385 return util.unlink(self.join(path))
386
386
387 def unlinkpath(self, path=None, ignoremissing=False):
387 def unlinkpath(self, path=None, ignoremissing=False):
388 return util.unlinkpath(self.join(path), ignoremissing)
388 return util.unlinkpath(self.join(path), ignoremissing)
389
389
390 def utime(self, path=None, t=None):
390 def utime(self, path=None, t=None):
391 return os.utime(self.join(path), t)
391 return os.utime(self.join(path), t)
392
392
393 def walk(self, path=None, onerror=None):
393 def walk(self, path=None, onerror=None):
394 """Yield (dirpath, dirs, files) tuple for each directories under path
394 """Yield (dirpath, dirs, files) tuple for each directories under path
395
395
396 ``dirpath`` is relative one from the root of this vfs. This
396 ``dirpath`` is relative one from the root of this vfs. This
397 uses ``os.sep`` as path separator, even you specify POSIX
397 uses ``os.sep`` as path separator, even you specify POSIX
398 style ``path``.
398 style ``path``.
399
399
400 "The root of this vfs" is represented as empty ``dirpath``.
400 "The root of this vfs" is represented as empty ``dirpath``.
401 """
401 """
402 root = os.path.normpath(self.join(None))
402 root = os.path.normpath(self.join(None))
403 # when dirpath == root, dirpath[prefixlen:] becomes empty
403 # when dirpath == root, dirpath[prefixlen:] becomes empty
404 # because len(dirpath) < prefixlen.
404 # because len(dirpath) < prefixlen.
405 prefixlen = len(pathutil.normasprefix(root))
405 prefixlen = len(pathutil.normasprefix(root))
406 for dirpath, dirs, files in os.walk(self.join(path), onerror=onerror):
406 for dirpath, dirs, files in os.walk(self.join(path), onerror=onerror):
407 yield (dirpath[prefixlen:], dirs, files)
407 yield (dirpath[prefixlen:], dirs, files)
408
408
409 class vfs(abstractvfs):
409 class vfs(abstractvfs):
410 '''Operate files relative to a base directory
410 '''Operate files relative to a base directory
411
411
412 This class is used to hide the details of COW semantics and
412 This class is used to hide the details of COW semantics and
413 remote file access from higher level code.
413 remote file access from higher level code.
414 '''
414 '''
415 def __init__(self, base, audit=True, expandpath=False, realpath=False):
415 def __init__(self, base, audit=True, expandpath=False, realpath=False):
416 if expandpath:
416 if expandpath:
417 base = util.expandpath(base)
417 base = util.expandpath(base)
418 if realpath:
418 if realpath:
419 base = os.path.realpath(base)
419 base = os.path.realpath(base)
420 self.base = base
420 self.base = base
421 self._setmustaudit(audit)
421 self._setmustaudit(audit)
422 self.createmode = None
422 self.createmode = None
423 self._trustnlink = None
423 self._trustnlink = None
424
424
425 def _getmustaudit(self):
425 def _getmustaudit(self):
426 return self._audit
426 return self._audit
427
427
428 def _setmustaudit(self, onoff):
428 def _setmustaudit(self, onoff):
429 self._audit = onoff
429 self._audit = onoff
430 if onoff:
430 if onoff:
431 self.audit = pathutil.pathauditor(self.base)
431 self.audit = pathutil.pathauditor(self.base)
432 else:
432 else:
433 self.audit = util.always
433 self.audit = util.always
434
434
435 mustaudit = property(_getmustaudit, _setmustaudit)
435 mustaudit = property(_getmustaudit, _setmustaudit)
436
436
437 @util.propertycache
437 @util.propertycache
438 def _cansymlink(self):
438 def _cansymlink(self):
439 return util.checklink(self.base)
439 return util.checklink(self.base)
440
440
441 @util.propertycache
441 @util.propertycache
442 def _chmod(self):
442 def _chmod(self):
443 return util.checkexec(self.base)
443 return util.checkexec(self.base)
444
444
445 def _fixfilemode(self, name):
445 def _fixfilemode(self, name):
446 if self.createmode is None or not self._chmod:
446 if self.createmode is None or not self._chmod:
447 return
447 return
448 os.chmod(name, self.createmode & 0666)
448 os.chmod(name, self.createmode & 0666)
449
449
450 def __call__(self, path, mode="r", text=False, atomictemp=False,
450 def __call__(self, path, mode="r", text=False, atomictemp=False,
451 notindexed=False):
451 notindexed=False):
452 '''Open ``path`` file, which is relative to vfs root.
452 '''Open ``path`` file, which is relative to vfs root.
453
453
454 Newly created directories are marked as "not to be indexed by
454 Newly created directories are marked as "not to be indexed by
455 the content indexing service", if ``notindexed`` is specified
455 the content indexing service", if ``notindexed`` is specified
456 for "write" mode access.
456 for "write" mode access.
457 '''
457 '''
458 if self._audit:
458 if self._audit:
459 r = util.checkosfilename(path)
459 r = util.checkosfilename(path)
460 if r:
460 if r:
461 raise util.Abort("%s: %r" % (r, path))
461 raise util.Abort("%s: %r" % (r, path))
462 self.audit(path)
462 self.audit(path)
463 f = self.join(path)
463 f = self.join(path)
464
464
465 if not text and "b" not in mode:
465 if not text and "b" not in mode:
466 mode += "b" # for that other OS
466 mode += "b" # for that other OS
467
467
468 nlink = -1
468 nlink = -1
469 if mode not in ('r', 'rb'):
469 if mode not in ('r', 'rb'):
470 dirname, basename = util.split(f)
470 dirname, basename = util.split(f)
471 # If basename is empty, then the path is malformed because it points
471 # If basename is empty, then the path is malformed because it points
472 # to a directory. Let the posixfile() call below raise IOError.
472 # to a directory. Let the posixfile() call below raise IOError.
473 if basename:
473 if basename:
474 if atomictemp:
474 if atomictemp:
475 util.ensuredirs(dirname, self.createmode, notindexed)
475 util.ensuredirs(dirname, self.createmode, notindexed)
476 return util.atomictempfile(f, mode, self.createmode)
476 return util.atomictempfile(f, mode, self.createmode)
477 try:
477 try:
478 if 'w' in mode:
478 if 'w' in mode:
479 util.unlink(f)
479 util.unlink(f)
480 nlink = 0
480 nlink = 0
481 else:
481 else:
482 # nlinks() may behave differently for files on Windows
482 # nlinks() may behave differently for files on Windows
483 # shares if the file is open.
483 # shares if the file is open.
484 fd = util.posixfile(f)
484 fd = util.posixfile(f)
485 nlink = util.nlinks(f)
485 nlink = util.nlinks(f)
486 if nlink < 1:
486 if nlink < 1:
487 nlink = 2 # force mktempcopy (issue1922)
487 nlink = 2 # force mktempcopy (issue1922)
488 fd.close()
488 fd.close()
489 except (OSError, IOError), e:
489 except (OSError, IOError), e:
490 if e.errno != errno.ENOENT:
490 if e.errno != errno.ENOENT:
491 raise
491 raise
492 nlink = 0
492 nlink = 0
493 util.ensuredirs(dirname, self.createmode, notindexed)
493 util.ensuredirs(dirname, self.createmode, notindexed)
494 if nlink > 0:
494 if nlink > 0:
495 if self._trustnlink is None:
495 if self._trustnlink is None:
496 self._trustnlink = nlink > 1 or util.checknlink(f)
496 self._trustnlink = nlink > 1 or util.checknlink(f)
497 if nlink > 1 or not self._trustnlink:
497 if nlink > 1 or not self._trustnlink:
498 util.rename(util.mktempcopy(f), f)
498 util.rename(util.mktempcopy(f), f)
499 fp = util.posixfile(f, mode)
499 fp = util.posixfile(f, mode)
500 if nlink == 0:
500 if nlink == 0:
501 self._fixfilemode(f)
501 self._fixfilemode(f)
502 return fp
502 return fp
503
503
504 def symlink(self, src, dst):
504 def symlink(self, src, dst):
505 self.audit(dst)
505 self.audit(dst)
506 linkname = self.join(dst)
506 linkname = self.join(dst)
507 try:
507 try:
508 os.unlink(linkname)
508 os.unlink(linkname)
509 except OSError:
509 except OSError:
510 pass
510 pass
511
511
512 util.ensuredirs(os.path.dirname(linkname), self.createmode)
512 util.ensuredirs(os.path.dirname(linkname), self.createmode)
513
513
514 if self._cansymlink:
514 if self._cansymlink:
515 try:
515 try:
516 os.symlink(src, linkname)
516 os.symlink(src, linkname)
517 except OSError, err:
517 except OSError, err:
518 raise OSError(err.errno, _('could not symlink to %r: %s') %
518 raise OSError(err.errno, _('could not symlink to %r: %s') %
519 (src, err.strerror), linkname)
519 (src, err.strerror), linkname)
520 else:
520 else:
521 self.write(dst, src)
521 self.write(dst, src)
522
522
523 def join(self, path, *insidef):
523 def join(self, path, *insidef):
524 if path:
524 if path:
525 return os.path.join(self.base, path, *insidef)
525 return os.path.join(self.base, path, *insidef)
526 else:
526 else:
527 return self.base
527 return self.base
528
528
529 opener = vfs
529 opener = vfs
530
530
531 class auditvfs(object):
531 class auditvfs(object):
532 def __init__(self, vfs):
532 def __init__(self, vfs):
533 self.vfs = vfs
533 self.vfs = vfs
534
534
535 def _getmustaudit(self):
535 def _getmustaudit(self):
536 return self.vfs.mustaudit
536 return self.vfs.mustaudit
537
537
538 def _setmustaudit(self, onoff):
538 def _setmustaudit(self, onoff):
539 self.vfs.mustaudit = onoff
539 self.vfs.mustaudit = onoff
540
540
541 mustaudit = property(_getmustaudit, _setmustaudit)
541 mustaudit = property(_getmustaudit, _setmustaudit)
542
542
543 class filtervfs(abstractvfs, auditvfs):
543 class filtervfs(abstractvfs, auditvfs):
544 '''Wrapper vfs for filtering filenames with a function.'''
544 '''Wrapper vfs for filtering filenames with a function.'''
545
545
546 def __init__(self, vfs, filter):
546 def __init__(self, vfs, filter):
547 auditvfs.__init__(self, vfs)
547 auditvfs.__init__(self, vfs)
548 self._filter = filter
548 self._filter = filter
549
549
550 def __call__(self, path, *args, **kwargs):
550 def __call__(self, path, *args, **kwargs):
551 return self.vfs(self._filter(path), *args, **kwargs)
551 return self.vfs(self._filter(path), *args, **kwargs)
552
552
553 def join(self, path, *insidef):
553 def join(self, path, *insidef):
554 if path:
554 if path:
555 return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef)))
555 return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef)))
556 else:
556 else:
557 return self.vfs.join(path)
557 return self.vfs.join(path)
558
558
559 filteropener = filtervfs
559 filteropener = filtervfs
560
560
561 class readonlyvfs(abstractvfs, auditvfs):
561 class readonlyvfs(abstractvfs, auditvfs):
562 '''Wrapper vfs preventing any writing.'''
562 '''Wrapper vfs preventing any writing.'''
563
563
564 def __init__(self, vfs):
564 def __init__(self, vfs):
565 auditvfs.__init__(self, vfs)
565 auditvfs.__init__(self, vfs)
566
566
567 def __call__(self, path, mode='r', *args, **kw):
567 def __call__(self, path, mode='r', *args, **kw):
568 if mode not in ('r', 'rb'):
568 if mode not in ('r', 'rb'):
569 raise util.Abort('this vfs is read only')
569 raise util.Abort('this vfs is read only')
570 return self.vfs(path, mode, *args, **kw)
570 return self.vfs(path, mode, *args, **kw)
571
571
572
572
573 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
573 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
574 '''yield every hg repository under path, always recursively.
574 '''yield every hg repository under path, always recursively.
575 The recurse flag will only control recursion into repo working dirs'''
575 The recurse flag will only control recursion into repo working dirs'''
576 def errhandler(err):
576 def errhandler(err):
577 if err.filename == path:
577 if err.filename == path:
578 raise err
578 raise err
579 samestat = getattr(os.path, 'samestat', None)
579 samestat = getattr(os.path, 'samestat', None)
580 if followsym and samestat is not None:
580 if followsym and samestat is not None:
581 def adddir(dirlst, dirname):
581 def adddir(dirlst, dirname):
582 match = False
582 match = False
583 dirstat = os.stat(dirname)
583 dirstat = os.stat(dirname)
584 for lstdirstat in dirlst:
584 for lstdirstat in dirlst:
585 if samestat(dirstat, lstdirstat):
585 if samestat(dirstat, lstdirstat):
586 match = True
586 match = True
587 break
587 break
588 if not match:
588 if not match:
589 dirlst.append(dirstat)
589 dirlst.append(dirstat)
590 return not match
590 return not match
591 else:
591 else:
592 followsym = False
592 followsym = False
593
593
594 if (seen_dirs is None) and followsym:
594 if (seen_dirs is None) and followsym:
595 seen_dirs = []
595 seen_dirs = []
596 adddir(seen_dirs, path)
596 adddir(seen_dirs, path)
597 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
597 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
598 dirs.sort()
598 dirs.sort()
599 if '.hg' in dirs:
599 if '.hg' in dirs:
600 yield root # found a repository
600 yield root # found a repository
601 qroot = os.path.join(root, '.hg', 'patches')
601 qroot = os.path.join(root, '.hg', 'patches')
602 if os.path.isdir(os.path.join(qroot, '.hg')):
602 if os.path.isdir(os.path.join(qroot, '.hg')):
603 yield qroot # we have a patch queue repo here
603 yield qroot # we have a patch queue repo here
604 if recurse:
604 if recurse:
605 # avoid recursing inside the .hg directory
605 # avoid recursing inside the .hg directory
606 dirs.remove('.hg')
606 dirs.remove('.hg')
607 else:
607 else:
608 dirs[:] = [] # don't descend further
608 dirs[:] = [] # don't descend further
609 elif followsym:
609 elif followsym:
610 newdirs = []
610 newdirs = []
611 for d in dirs:
611 for d in dirs:
612 fname = os.path.join(root, d)
612 fname = os.path.join(root, d)
613 if adddir(seen_dirs, fname):
613 if adddir(seen_dirs, fname):
614 if os.path.islink(fname):
614 if os.path.islink(fname):
615 for hgname in walkrepos(fname, True, seen_dirs):
615 for hgname in walkrepos(fname, True, seen_dirs):
616 yield hgname
616 yield hgname
617 else:
617 else:
618 newdirs.append(d)
618 newdirs.append(d)
619 dirs[:] = newdirs
619 dirs[:] = newdirs
620
620
621 def osrcpath():
621 def osrcpath():
622 '''return default os-specific hgrc search path'''
622 '''return default os-specific hgrc search path'''
623 path = []
623 path = []
624 defaultpath = os.path.join(util.datapath, 'default.d')
624 defaultpath = os.path.join(util.datapath, 'default.d')
625 if os.path.isdir(defaultpath):
625 if os.path.isdir(defaultpath):
626 for f, kind in osutil.listdir(defaultpath):
626 for f, kind in osutil.listdir(defaultpath):
627 if f.endswith('.rc'):
627 if f.endswith('.rc'):
628 path.append(os.path.join(defaultpath, f))
628 path.append(os.path.join(defaultpath, f))
629 path.extend(systemrcpath())
629 path.extend(systemrcpath())
630 path.extend(userrcpath())
630 path.extend(userrcpath())
631 path = [os.path.normpath(f) for f in path]
631 path = [os.path.normpath(f) for f in path]
632 return path
632 return path
633
633
634 _rcpath = None
634 _rcpath = None
635
635
636 def rcpath():
636 def rcpath():
637 '''return hgrc search path. if env var HGRCPATH is set, use it.
637 '''return hgrc search path. if env var HGRCPATH is set, use it.
638 for each item in path, if directory, use files ending in .rc,
638 for each item in path, if directory, use files ending in .rc,
639 else use item.
639 else use item.
640 make HGRCPATH empty to only look in .hg/hgrc of current repo.
640 make HGRCPATH empty to only look in .hg/hgrc of current repo.
641 if no HGRCPATH, use default os-specific path.'''
641 if no HGRCPATH, use default os-specific path.'''
642 global _rcpath
642 global _rcpath
643 if _rcpath is None:
643 if _rcpath is None:
644 if 'HGRCPATH' in os.environ:
644 if 'HGRCPATH' in os.environ:
645 _rcpath = []
645 _rcpath = []
646 for p in os.environ['HGRCPATH'].split(os.pathsep):
646 for p in os.environ['HGRCPATH'].split(os.pathsep):
647 if not p:
647 if not p:
648 continue
648 continue
649 p = util.expandpath(p)
649 p = util.expandpath(p)
650 if os.path.isdir(p):
650 if os.path.isdir(p):
651 for f, kind in osutil.listdir(p):
651 for f, kind in osutil.listdir(p):
652 if f.endswith('.rc'):
652 if f.endswith('.rc'):
653 _rcpath.append(os.path.join(p, f))
653 _rcpath.append(os.path.join(p, f))
654 else:
654 else:
655 _rcpath.append(p)
655 _rcpath.append(p)
656 else:
656 else:
657 _rcpath = osrcpath()
657 _rcpath = osrcpath()
658 return _rcpath
658 return _rcpath
659
659
660 def intrev(repo, rev):
660 def intrev(repo, rev):
661 """Return integer for a given revision that can be used in comparison or
661 """Return integer for a given revision that can be used in comparison or
662 arithmetic operation"""
662 arithmetic operation"""
663 if rev is None:
663 if rev is None:
664 return len(repo)
664 return len(repo)
665 return rev
665 return rev
666
666
667 def revsingle(repo, revspec, default='.'):
667 def revsingle(repo, revspec, default='.'):
668 if not revspec and revspec != 0:
668 if not revspec and revspec != 0:
669 return repo[default]
669 return repo[default]
670
670
671 l = revrange(repo, [revspec])
671 l = revrange(repo, [revspec])
672 if not l:
672 if not l:
673 raise util.Abort(_('empty revision set'))
673 raise util.Abort(_('empty revision set'))
674 return repo[l.last()]
674 return repo[l.last()]
675
675
676 def revpair(repo, revs):
676 def revpair(repo, revs):
677 if not revs:
677 if not revs:
678 return repo.dirstate.p1(), None
678 return repo.dirstate.p1(), None
679
679
680 l = revrange(repo, revs)
680 l = revrange(repo, revs)
681
681
682 if not l:
682 if not l:
683 first = second = None
683 first = second = None
684 elif l.isascending():
684 elif l.isascending():
685 first = l.min()
685 first = l.min()
686 second = l.max()
686 second = l.max()
687 elif l.isdescending():
687 elif l.isdescending():
688 first = l.max()
688 first = l.max()
689 second = l.min()
689 second = l.min()
690 else:
690 else:
691 first = l.first()
691 first = l.first()
692 second = l.last()
692 second = l.last()
693
693
694 if first is None:
694 if first is None:
695 raise util.Abort(_('empty revision range'))
695 raise util.Abort(_('empty revision range'))
696
696
697 if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
697 if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
698 return repo.lookup(first), None
698 return repo.lookup(first), None
699
699
700 return repo.lookup(first), repo.lookup(second)
700 return repo.lookup(first), repo.lookup(second)
701
701
702 _revrangesep = ':'
702 _revrangesep = ':'
703
703
704 def revrange(repo, revs):
704 def revrange(repo, revs):
705 """Yield revision as strings from a list of revision specifications."""
705 """Yield revision as strings from a list of revision specifications."""
706
706
707 def revfix(repo, val, defval):
707 def revfix(repo, val, defval):
708 if not val and val != 0 and defval is not None:
708 if not val and val != 0 and defval is not None:
709 return defval
709 return defval
710 return repo[val].rev()
710 return repo[val].rev()
711
711
712 subsets = []
712 subsets = []
713
713
714 revsetaliases = [alias for (alias, _) in
714 revsetaliases = [alias for (alias, _) in
715 repo.ui.configitems("revsetalias")]
715 repo.ui.configitems("revsetalias")]
716
716
717 for spec in revs:
717 for spec in revs:
718 # attempt to parse old-style ranges first to deal with
718 # attempt to parse old-style ranges first to deal with
719 # things like old-tag which contain query metacharacters
719 # things like old-tag which contain query metacharacters
720 try:
720 try:
721 # ... except for revset aliases without arguments. These
721 # ... except for revset aliases without arguments. These
722 # should be parsed as soon as possible, because they might
722 # should be parsed as soon as possible, because they might
723 # clash with a hash prefix.
723 # clash with a hash prefix.
724 if spec in revsetaliases:
724 if spec in revsetaliases:
725 raise error.RepoLookupError
725 raise error.RepoLookupError
726
726
727 if isinstance(spec, int):
727 if isinstance(spec, int):
728 subsets.append(revset.baseset([spec]))
728 subsets.append(revset.baseset([spec]))
729 continue
729 continue
730
730
731 if _revrangesep in spec:
731 if _revrangesep in spec:
732 start, end = spec.split(_revrangesep, 1)
732 start, end = spec.split(_revrangesep, 1)
733 if start in revsetaliases or end in revsetaliases:
733 if start in revsetaliases or end in revsetaliases:
734 raise error.RepoLookupError
734 raise error.RepoLookupError
735
735
736 start = revfix(repo, start, 0)
736 start = revfix(repo, start, 0)
737 end = revfix(repo, end, len(repo) - 1)
737 end = revfix(repo, end, len(repo) - 1)
738 if end == nullrev and start < 0:
738 if end == nullrev and start < 0:
739 start = nullrev
739 start = nullrev
740 rangeiter = repo.changelog.revs(start, end)
740 if start < end:
741 l = revset.baseset(rangeiter)
741 l = revset.spanset(repo, start, end + 1)
742 else:
743 l = revset.spanset(repo, start, end - 1)
742 subsets.append(l)
744 subsets.append(l)
743 continue
745 continue
744 elif spec and spec in repo: # single unquoted rev
746 elif spec and spec in repo: # single unquoted rev
745 rev = revfix(repo, spec, None)
747 rev = revfix(repo, spec, None)
746 subsets.append(revset.baseset([rev]))
748 subsets.append(revset.baseset([rev]))
747 continue
749 continue
748 except error.RepoLookupError:
750 except error.RepoLookupError:
749 pass
751 pass
750
752
751 # fall through to new-style queries if old-style fails
753 # fall through to new-style queries if old-style fails
752 m = revset.match(repo.ui, spec, repo)
754 m = revset.match(repo.ui, spec, repo)
753 subsets.append(m(repo))
755 subsets.append(m(repo))
754
756
755 return revset._combinesets(subsets)
757 return revset._combinesets(subsets)
756
758
757 def expandpats(pats):
759 def expandpats(pats):
758 '''Expand bare globs when running on windows.
760 '''Expand bare globs when running on windows.
759 On posix we assume it already has already been done by sh.'''
761 On posix we assume it already has already been done by sh.'''
760 if not util.expandglobs:
762 if not util.expandglobs:
761 return list(pats)
763 return list(pats)
762 ret = []
764 ret = []
763 for kindpat in pats:
765 for kindpat in pats:
764 kind, pat = matchmod._patsplit(kindpat, None)
766 kind, pat = matchmod._patsplit(kindpat, None)
765 if kind is None:
767 if kind is None:
766 try:
768 try:
767 globbed = glob.glob(pat)
769 globbed = glob.glob(pat)
768 except re.error:
770 except re.error:
769 globbed = [pat]
771 globbed = [pat]
770 if globbed:
772 if globbed:
771 ret.extend(globbed)
773 ret.extend(globbed)
772 continue
774 continue
773 ret.append(kindpat)
775 ret.append(kindpat)
774 return ret
776 return ret
775
777
776 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
778 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
777 '''Return a matcher and the patterns that were used.
779 '''Return a matcher and the patterns that were used.
778 The matcher will warn about bad matches.'''
780 The matcher will warn about bad matches.'''
779 if pats == ("",):
781 if pats == ("",):
780 pats = []
782 pats = []
781 if not globbed and default == 'relpath':
783 if not globbed and default == 'relpath':
782 pats = expandpats(pats or [])
784 pats = expandpats(pats or [])
783
785
784 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
786 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
785 default, listsubrepos=opts.get('subrepos'))
787 default, listsubrepos=opts.get('subrepos'))
786 def badfn(f, msg):
788 def badfn(f, msg):
787 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
789 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
788 m.bad = badfn
790 m.bad = badfn
789 if m.always():
791 if m.always():
790 pats = []
792 pats = []
791 return m, pats
793 return m, pats
792
794
793 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
795 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
794 '''Return a matcher that will warn about bad matches.'''
796 '''Return a matcher that will warn about bad matches.'''
795 return matchandpats(ctx, pats, opts, globbed, default)[0]
797 return matchandpats(ctx, pats, opts, globbed, default)[0]
796
798
797 def matchall(repo):
799 def matchall(repo):
798 '''Return a matcher that will efficiently match everything.'''
800 '''Return a matcher that will efficiently match everything.'''
799 return matchmod.always(repo.root, repo.getcwd())
801 return matchmod.always(repo.root, repo.getcwd())
800
802
801 def matchfiles(repo, files):
803 def matchfiles(repo, files):
802 '''Return a matcher that will efficiently match exactly these files.'''
804 '''Return a matcher that will efficiently match exactly these files.'''
803 return matchmod.exact(repo.root, repo.getcwd(), files)
805 return matchmod.exact(repo.root, repo.getcwd(), files)
804
806
805 def addremove(repo, matcher, prefix, opts={}, dry_run=None, similarity=None):
807 def addremove(repo, matcher, prefix, opts={}, dry_run=None, similarity=None):
806 m = matcher
808 m = matcher
807 if dry_run is None:
809 if dry_run is None:
808 dry_run = opts.get('dry_run')
810 dry_run = opts.get('dry_run')
809 if similarity is None:
811 if similarity is None:
810 similarity = float(opts.get('similarity') or 0)
812 similarity = float(opts.get('similarity') or 0)
811
813
812 ret = 0
814 ret = 0
813 join = lambda f: os.path.join(prefix, f)
815 join = lambda f: os.path.join(prefix, f)
814
816
815 def matchessubrepo(matcher, subpath):
817 def matchessubrepo(matcher, subpath):
816 if matcher.exact(subpath):
818 if matcher.exact(subpath):
817 return True
819 return True
818 for f in matcher.files():
820 for f in matcher.files():
819 if f.startswith(subpath):
821 if f.startswith(subpath):
820 return True
822 return True
821 return False
823 return False
822
824
823 wctx = repo[None]
825 wctx = repo[None]
824 for subpath in sorted(wctx.substate):
826 for subpath in sorted(wctx.substate):
825 if opts.get('subrepos') or matchessubrepo(m, subpath):
827 if opts.get('subrepos') or matchessubrepo(m, subpath):
826 sub = wctx.sub(subpath)
828 sub = wctx.sub(subpath)
827 try:
829 try:
828 submatch = matchmod.narrowmatcher(subpath, m)
830 submatch = matchmod.narrowmatcher(subpath, m)
829 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
831 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
830 ret = 1
832 ret = 1
831 except error.LookupError:
833 except error.LookupError:
832 repo.ui.status(_("skipping missing subrepository: %s\n")
834 repo.ui.status(_("skipping missing subrepository: %s\n")
833 % join(subpath))
835 % join(subpath))
834
836
835 rejected = []
837 rejected = []
836 origbad = m.bad
838 origbad = m.bad
837 def badfn(f, msg):
839 def badfn(f, msg):
838 if f in m.files():
840 if f in m.files():
839 origbad(f, msg)
841 origbad(f, msg)
840 rejected.append(f)
842 rejected.append(f)
841
843
842 m.bad = badfn
844 m.bad = badfn
843 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
845 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
844 m.bad = origbad
846 m.bad = origbad
845
847
846 unknownset = set(unknown + forgotten)
848 unknownset = set(unknown + forgotten)
847 toprint = unknownset.copy()
849 toprint = unknownset.copy()
848 toprint.update(deleted)
850 toprint.update(deleted)
849 for abs in sorted(toprint):
851 for abs in sorted(toprint):
850 if repo.ui.verbose or not m.exact(abs):
852 if repo.ui.verbose or not m.exact(abs):
851 if abs in unknownset:
853 if abs in unknownset:
852 status = _('adding %s\n') % m.uipath(abs)
854 status = _('adding %s\n') % m.uipath(abs)
853 else:
855 else:
854 status = _('removing %s\n') % m.uipath(abs)
856 status = _('removing %s\n') % m.uipath(abs)
855 repo.ui.status(status)
857 repo.ui.status(status)
856
858
857 renames = _findrenames(repo, m, added + unknown, removed + deleted,
859 renames = _findrenames(repo, m, added + unknown, removed + deleted,
858 similarity)
860 similarity)
859
861
860 if not dry_run:
862 if not dry_run:
861 _markchanges(repo, unknown + forgotten, deleted, renames)
863 _markchanges(repo, unknown + forgotten, deleted, renames)
862
864
863 for f in rejected:
865 for f in rejected:
864 if f in m.files():
866 if f in m.files():
865 return 1
867 return 1
866 return ret
868 return ret
867
869
868 def marktouched(repo, files, similarity=0.0):
870 def marktouched(repo, files, similarity=0.0):
869 '''Assert that files have somehow been operated upon. files are relative to
871 '''Assert that files have somehow been operated upon. files are relative to
870 the repo root.'''
872 the repo root.'''
871 m = matchfiles(repo, files)
873 m = matchfiles(repo, files)
872 rejected = []
874 rejected = []
873 m.bad = lambda x, y: rejected.append(x)
875 m.bad = lambda x, y: rejected.append(x)
874
876
875 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
877 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
876
878
877 if repo.ui.verbose:
879 if repo.ui.verbose:
878 unknownset = set(unknown + forgotten)
880 unknownset = set(unknown + forgotten)
879 toprint = unknownset.copy()
881 toprint = unknownset.copy()
880 toprint.update(deleted)
882 toprint.update(deleted)
881 for abs in sorted(toprint):
883 for abs in sorted(toprint):
882 if abs in unknownset:
884 if abs in unknownset:
883 status = _('adding %s\n') % abs
885 status = _('adding %s\n') % abs
884 else:
886 else:
885 status = _('removing %s\n') % abs
887 status = _('removing %s\n') % abs
886 repo.ui.status(status)
888 repo.ui.status(status)
887
889
888 renames = _findrenames(repo, m, added + unknown, removed + deleted,
890 renames = _findrenames(repo, m, added + unknown, removed + deleted,
889 similarity)
891 similarity)
890
892
891 _markchanges(repo, unknown + forgotten, deleted, renames)
893 _markchanges(repo, unknown + forgotten, deleted, renames)
892
894
893 for f in rejected:
895 for f in rejected:
894 if f in m.files():
896 if f in m.files():
895 return 1
897 return 1
896 return 0
898 return 0
897
899
898 def _interestingfiles(repo, matcher):
900 def _interestingfiles(repo, matcher):
899 '''Walk dirstate with matcher, looking for files that addremove would care
901 '''Walk dirstate with matcher, looking for files that addremove would care
900 about.
902 about.
901
903
902 This is different from dirstate.status because it doesn't care about
904 This is different from dirstate.status because it doesn't care about
903 whether files are modified or clean.'''
905 whether files are modified or clean.'''
904 added, unknown, deleted, removed, forgotten = [], [], [], [], []
906 added, unknown, deleted, removed, forgotten = [], [], [], [], []
905 audit_path = pathutil.pathauditor(repo.root)
907 audit_path = pathutil.pathauditor(repo.root)
906
908
907 ctx = repo[None]
909 ctx = repo[None]
908 dirstate = repo.dirstate
910 dirstate = repo.dirstate
909 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
911 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
910 full=False)
912 full=False)
911 for abs, st in walkresults.iteritems():
913 for abs, st in walkresults.iteritems():
912 dstate = dirstate[abs]
914 dstate = dirstate[abs]
913 if dstate == '?' and audit_path.check(abs):
915 if dstate == '?' and audit_path.check(abs):
914 unknown.append(abs)
916 unknown.append(abs)
915 elif dstate != 'r' and not st:
917 elif dstate != 'r' and not st:
916 deleted.append(abs)
918 deleted.append(abs)
917 elif dstate == 'r' and st:
919 elif dstate == 'r' and st:
918 forgotten.append(abs)
920 forgotten.append(abs)
919 # for finding renames
921 # for finding renames
920 elif dstate == 'r' and not st:
922 elif dstate == 'r' and not st:
921 removed.append(abs)
923 removed.append(abs)
922 elif dstate == 'a':
924 elif dstate == 'a':
923 added.append(abs)
925 added.append(abs)
924
926
925 return added, unknown, deleted, removed, forgotten
927 return added, unknown, deleted, removed, forgotten
926
928
927 def _findrenames(repo, matcher, added, removed, similarity):
929 def _findrenames(repo, matcher, added, removed, similarity):
928 '''Find renames from removed files to added ones.'''
930 '''Find renames from removed files to added ones.'''
929 renames = {}
931 renames = {}
930 if similarity > 0:
932 if similarity > 0:
931 for old, new, score in similar.findrenames(repo, added, removed,
933 for old, new, score in similar.findrenames(repo, added, removed,
932 similarity):
934 similarity):
933 if (repo.ui.verbose or not matcher.exact(old)
935 if (repo.ui.verbose or not matcher.exact(old)
934 or not matcher.exact(new)):
936 or not matcher.exact(new)):
935 repo.ui.status(_('recording removal of %s as rename to %s '
937 repo.ui.status(_('recording removal of %s as rename to %s '
936 '(%d%% similar)\n') %
938 '(%d%% similar)\n') %
937 (matcher.rel(old), matcher.rel(new),
939 (matcher.rel(old), matcher.rel(new),
938 score * 100))
940 score * 100))
939 renames[new] = old
941 renames[new] = old
940 return renames
942 return renames
941
943
942 def _markchanges(repo, unknown, deleted, renames):
944 def _markchanges(repo, unknown, deleted, renames):
943 '''Marks the files in unknown as added, the files in deleted as removed,
945 '''Marks the files in unknown as added, the files in deleted as removed,
944 and the files in renames as copied.'''
946 and the files in renames as copied.'''
945 wctx = repo[None]
947 wctx = repo[None]
946 wlock = repo.wlock()
948 wlock = repo.wlock()
947 try:
949 try:
948 wctx.forget(deleted)
950 wctx.forget(deleted)
949 wctx.add(unknown)
951 wctx.add(unknown)
950 for new, old in renames.iteritems():
952 for new, old in renames.iteritems():
951 wctx.copy(old, new)
953 wctx.copy(old, new)
952 finally:
954 finally:
953 wlock.release()
955 wlock.release()
954
956
955 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
957 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
956 """Update the dirstate to reflect the intent of copying src to dst. For
958 """Update the dirstate to reflect the intent of copying src to dst. For
957 different reasons it might not end with dst being marked as copied from src.
959 different reasons it might not end with dst being marked as copied from src.
958 """
960 """
959 origsrc = repo.dirstate.copied(src) or src
961 origsrc = repo.dirstate.copied(src) or src
960 if dst == origsrc: # copying back a copy?
962 if dst == origsrc: # copying back a copy?
961 if repo.dirstate[dst] not in 'mn' and not dryrun:
963 if repo.dirstate[dst] not in 'mn' and not dryrun:
962 repo.dirstate.normallookup(dst)
964 repo.dirstate.normallookup(dst)
963 else:
965 else:
964 if repo.dirstate[origsrc] == 'a' and origsrc == src:
966 if repo.dirstate[origsrc] == 'a' and origsrc == src:
965 if not ui.quiet:
967 if not ui.quiet:
966 ui.warn(_("%s has not been committed yet, so no copy "
968 ui.warn(_("%s has not been committed yet, so no copy "
967 "data will be stored for %s.\n")
969 "data will be stored for %s.\n")
968 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
970 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
969 if repo.dirstate[dst] in '?r' and not dryrun:
971 if repo.dirstate[dst] in '?r' and not dryrun:
970 wctx.add([dst])
972 wctx.add([dst])
971 elif not dryrun:
973 elif not dryrun:
972 wctx.copy(origsrc, dst)
974 wctx.copy(origsrc, dst)
973
975
974 def readrequires(opener, supported):
976 def readrequires(opener, supported):
975 '''Reads and parses .hg/requires and checks if all entries found
977 '''Reads and parses .hg/requires and checks if all entries found
976 are in the list of supported features.'''
978 are in the list of supported features.'''
977 requirements = set(opener.read("requires").splitlines())
979 requirements = set(opener.read("requires").splitlines())
978 missings = []
980 missings = []
979 for r in requirements:
981 for r in requirements:
980 if r not in supported:
982 if r not in supported:
981 if not r or not r[0].isalnum():
983 if not r or not r[0].isalnum():
982 raise error.RequirementError(_(".hg/requires file is corrupt"))
984 raise error.RequirementError(_(".hg/requires file is corrupt"))
983 missings.append(r)
985 missings.append(r)
984 missings.sort()
986 missings.sort()
985 if missings:
987 if missings:
986 raise error.RequirementError(
988 raise error.RequirementError(
987 _("repository requires features unknown to this Mercurial: %s")
989 _("repository requires features unknown to this Mercurial: %s")
988 % " ".join(missings),
990 % " ".join(missings),
989 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
991 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
990 " for more information"))
992 " for more information"))
991 return requirements
993 return requirements
992
994
993 def writerequires(opener, requirements):
995 def writerequires(opener, requirements):
994 reqfile = opener("requires", "w")
996 reqfile = opener("requires", "w")
995 for r in sorted(requirements):
997 for r in sorted(requirements):
996 reqfile.write("%s\n" % r)
998 reqfile.write("%s\n" % r)
997 reqfile.close()
999 reqfile.close()
998
1000
999 class filecachesubentry(object):
1001 class filecachesubentry(object):
1000 def __init__(self, path, stat):
1002 def __init__(self, path, stat):
1001 self.path = path
1003 self.path = path
1002 self.cachestat = None
1004 self.cachestat = None
1003 self._cacheable = None
1005 self._cacheable = None
1004
1006
1005 if stat:
1007 if stat:
1006 self.cachestat = filecachesubentry.stat(self.path)
1008 self.cachestat = filecachesubentry.stat(self.path)
1007
1009
1008 if self.cachestat:
1010 if self.cachestat:
1009 self._cacheable = self.cachestat.cacheable()
1011 self._cacheable = self.cachestat.cacheable()
1010 else:
1012 else:
1011 # None means we don't know yet
1013 # None means we don't know yet
1012 self._cacheable = None
1014 self._cacheable = None
1013
1015
1014 def refresh(self):
1016 def refresh(self):
1015 if self.cacheable():
1017 if self.cacheable():
1016 self.cachestat = filecachesubentry.stat(self.path)
1018 self.cachestat = filecachesubentry.stat(self.path)
1017
1019
1018 def cacheable(self):
1020 def cacheable(self):
1019 if self._cacheable is not None:
1021 if self._cacheable is not None:
1020 return self._cacheable
1022 return self._cacheable
1021
1023
1022 # we don't know yet, assume it is for now
1024 # we don't know yet, assume it is for now
1023 return True
1025 return True
1024
1026
1025 def changed(self):
1027 def changed(self):
1026 # no point in going further if we can't cache it
1028 # no point in going further if we can't cache it
1027 if not self.cacheable():
1029 if not self.cacheable():
1028 return True
1030 return True
1029
1031
1030 newstat = filecachesubentry.stat(self.path)
1032 newstat = filecachesubentry.stat(self.path)
1031
1033
1032 # we may not know if it's cacheable yet, check again now
1034 # we may not know if it's cacheable yet, check again now
1033 if newstat and self._cacheable is None:
1035 if newstat and self._cacheable is None:
1034 self._cacheable = newstat.cacheable()
1036 self._cacheable = newstat.cacheable()
1035
1037
1036 # check again
1038 # check again
1037 if not self._cacheable:
1039 if not self._cacheable:
1038 return True
1040 return True
1039
1041
1040 if self.cachestat != newstat:
1042 if self.cachestat != newstat:
1041 self.cachestat = newstat
1043 self.cachestat = newstat
1042 return True
1044 return True
1043 else:
1045 else:
1044 return False
1046 return False
1045
1047
1046 @staticmethod
1048 @staticmethod
1047 def stat(path):
1049 def stat(path):
1048 try:
1050 try:
1049 return util.cachestat(path)
1051 return util.cachestat(path)
1050 except OSError, e:
1052 except OSError, e:
1051 if e.errno != errno.ENOENT:
1053 if e.errno != errno.ENOENT:
1052 raise
1054 raise
1053
1055
1054 class filecacheentry(object):
1056 class filecacheentry(object):
1055 def __init__(self, paths, stat=True):
1057 def __init__(self, paths, stat=True):
1056 self._entries = []
1058 self._entries = []
1057 for path in paths:
1059 for path in paths:
1058 self._entries.append(filecachesubentry(path, stat))
1060 self._entries.append(filecachesubentry(path, stat))
1059
1061
1060 def changed(self):
1062 def changed(self):
1061 '''true if any entry has changed'''
1063 '''true if any entry has changed'''
1062 for entry in self._entries:
1064 for entry in self._entries:
1063 if entry.changed():
1065 if entry.changed():
1064 return True
1066 return True
1065 return False
1067 return False
1066
1068
1067 def refresh(self):
1069 def refresh(self):
1068 for entry in self._entries:
1070 for entry in self._entries:
1069 entry.refresh()
1071 entry.refresh()
1070
1072
1071 class filecache(object):
1073 class filecache(object):
1072 '''A property like decorator that tracks files under .hg/ for updates.
1074 '''A property like decorator that tracks files under .hg/ for updates.
1073
1075
1074 Records stat info when called in _filecache.
1076 Records stat info when called in _filecache.
1075
1077
1076 On subsequent calls, compares old stat info with new info, and recreates the
1078 On subsequent calls, compares old stat info with new info, and recreates the
1077 object when any of the files changes, updating the new stat info in
1079 object when any of the files changes, updating the new stat info in
1078 _filecache.
1080 _filecache.
1079
1081
1080 Mercurial either atomic renames or appends for files under .hg,
1082 Mercurial either atomic renames or appends for files under .hg,
1081 so to ensure the cache is reliable we need the filesystem to be able
1083 so to ensure the cache is reliable we need the filesystem to be able
1082 to tell us if a file has been replaced. If it can't, we fallback to
1084 to tell us if a file has been replaced. If it can't, we fallback to
1083 recreating the object on every call (essentially the same behaviour as
1085 recreating the object on every call (essentially the same behaviour as
1084 propertycache).
1086 propertycache).
1085
1087
1086 '''
1088 '''
1087 def __init__(self, *paths):
1089 def __init__(self, *paths):
1088 self.paths = paths
1090 self.paths = paths
1089
1091
1090 def join(self, obj, fname):
1092 def join(self, obj, fname):
1091 """Used to compute the runtime path of a cached file.
1093 """Used to compute the runtime path of a cached file.
1092
1094
1093 Users should subclass filecache and provide their own version of this
1095 Users should subclass filecache and provide their own version of this
1094 function to call the appropriate join function on 'obj' (an instance
1096 function to call the appropriate join function on 'obj' (an instance
1095 of the class that its member function was decorated).
1097 of the class that its member function was decorated).
1096 """
1098 """
1097 return obj.join(fname)
1099 return obj.join(fname)
1098
1100
1099 def __call__(self, func):
1101 def __call__(self, func):
1100 self.func = func
1102 self.func = func
1101 self.name = func.__name__
1103 self.name = func.__name__
1102 return self
1104 return self
1103
1105
1104 def __get__(self, obj, type=None):
1106 def __get__(self, obj, type=None):
1105 # do we need to check if the file changed?
1107 # do we need to check if the file changed?
1106 if self.name in obj.__dict__:
1108 if self.name in obj.__dict__:
1107 assert self.name in obj._filecache, self.name
1109 assert self.name in obj._filecache, self.name
1108 return obj.__dict__[self.name]
1110 return obj.__dict__[self.name]
1109
1111
1110 entry = obj._filecache.get(self.name)
1112 entry = obj._filecache.get(self.name)
1111
1113
1112 if entry:
1114 if entry:
1113 if entry.changed():
1115 if entry.changed():
1114 entry.obj = self.func(obj)
1116 entry.obj = self.func(obj)
1115 else:
1117 else:
1116 paths = [self.join(obj, path) for path in self.paths]
1118 paths = [self.join(obj, path) for path in self.paths]
1117
1119
1118 # We stat -before- creating the object so our cache doesn't lie if
1120 # We stat -before- creating the object so our cache doesn't lie if
1119 # a writer modified between the time we read and stat
1121 # a writer modified between the time we read and stat
1120 entry = filecacheentry(paths, True)
1122 entry = filecacheentry(paths, True)
1121 entry.obj = self.func(obj)
1123 entry.obj = self.func(obj)
1122
1124
1123 obj._filecache[self.name] = entry
1125 obj._filecache[self.name] = entry
1124
1126
1125 obj.__dict__[self.name] = entry.obj
1127 obj.__dict__[self.name] = entry.obj
1126 return entry.obj
1128 return entry.obj
1127
1129
1128 def __set__(self, obj, value):
1130 def __set__(self, obj, value):
1129 if self.name not in obj._filecache:
1131 if self.name not in obj._filecache:
1130 # we add an entry for the missing value because X in __dict__
1132 # we add an entry for the missing value because X in __dict__
1131 # implies X in _filecache
1133 # implies X in _filecache
1132 paths = [self.join(obj, path) for path in self.paths]
1134 paths = [self.join(obj, path) for path in self.paths]
1133 ce = filecacheentry(paths, False)
1135 ce = filecacheentry(paths, False)
1134 obj._filecache[self.name] = ce
1136 obj._filecache[self.name] = ce
1135 else:
1137 else:
1136 ce = obj._filecache[self.name]
1138 ce = obj._filecache[self.name]
1137
1139
1138 ce.obj = value # update cached copy
1140 ce.obj = value # update cached copy
1139 obj.__dict__[self.name] = value # update copy returned by obj.x
1141 obj.__dict__[self.name] = value # update copy returned by obj.x
1140
1142
1141 def __delete__(self, obj):
1143 def __delete__(self, obj):
1142 try:
1144 try:
1143 del obj.__dict__[self.name]
1145 del obj.__dict__[self.name]
1144 except KeyError:
1146 except KeyError:
1145 raise AttributeError(self.name)
1147 raise AttributeError(self.name)
General Comments 0
You need to be logged in to leave comments. Login now