##// END OF EJS Templates
revrange: drop old-style parser in favor of revset (API)...
Yuya Nishihara -
r25904:fbaa2de1 default
parent child Browse files
Show More
@@ -1,1168 +1,1124
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from mercurial.node import nullrev, wdirrev
9 from mercurial.node import wdirrev
10 import util, error, osutil, revset, similar, encoding, phases
10 import util, error, osutil, revset, similar, encoding, phases
11 import pathutil
11 import pathutil
12 import match as matchmod
12 import match as matchmod
13 import os, errno, re, glob, tempfile, shutil, stat
13 import os, errno, re, glob, tempfile, shutil, stat
14
14
15 if os.name == 'nt':
15 if os.name == 'nt':
16 import scmwindows as scmplatform
16 import scmwindows as scmplatform
17 else:
17 else:
18 import scmposix as scmplatform
18 import scmposix as scmplatform
19
19
20 systemrcpath = scmplatform.systemrcpath
20 systemrcpath = scmplatform.systemrcpath
21 userrcpath = scmplatform.userrcpath
21 userrcpath = scmplatform.userrcpath
22
22
23 class status(tuple):
23 class status(tuple):
24 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
24 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
25 and 'ignored' properties are only relevant to the working copy.
25 and 'ignored' properties are only relevant to the working copy.
26 '''
26 '''
27
27
28 __slots__ = ()
28 __slots__ = ()
29
29
30 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
30 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
31 clean):
31 clean):
32 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
32 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
33 ignored, clean))
33 ignored, clean))
34
34
35 @property
35 @property
36 def modified(self):
36 def modified(self):
37 '''files that have been modified'''
37 '''files that have been modified'''
38 return self[0]
38 return self[0]
39
39
40 @property
40 @property
41 def added(self):
41 def added(self):
42 '''files that have been added'''
42 '''files that have been added'''
43 return self[1]
43 return self[1]
44
44
45 @property
45 @property
46 def removed(self):
46 def removed(self):
47 '''files that have been removed'''
47 '''files that have been removed'''
48 return self[2]
48 return self[2]
49
49
50 @property
50 @property
51 def deleted(self):
51 def deleted(self):
52 '''files that are in the dirstate, but have been deleted from the
52 '''files that are in the dirstate, but have been deleted from the
53 working copy (aka "missing")
53 working copy (aka "missing")
54 '''
54 '''
55 return self[3]
55 return self[3]
56
56
57 @property
57 @property
58 def unknown(self):
58 def unknown(self):
59 '''files not in the dirstate that are not ignored'''
59 '''files not in the dirstate that are not ignored'''
60 return self[4]
60 return self[4]
61
61
62 @property
62 @property
63 def ignored(self):
63 def ignored(self):
64 '''files not in the dirstate that are ignored (by _dirignore())'''
64 '''files not in the dirstate that are ignored (by _dirignore())'''
65 return self[5]
65 return self[5]
66
66
67 @property
67 @property
68 def clean(self):
68 def clean(self):
69 '''files that have not been modified'''
69 '''files that have not been modified'''
70 return self[6]
70 return self[6]
71
71
72 def __repr__(self, *args, **kwargs):
72 def __repr__(self, *args, **kwargs):
73 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
73 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
74 'unknown=%r, ignored=%r, clean=%r>') % self)
74 'unknown=%r, ignored=%r, clean=%r>') % self)
75
75
76 def itersubrepos(ctx1, ctx2):
76 def itersubrepos(ctx1, ctx2):
77 """find subrepos in ctx1 or ctx2"""
77 """find subrepos in ctx1 or ctx2"""
78 # Create a (subpath, ctx) mapping where we prefer subpaths from
78 # Create a (subpath, ctx) mapping where we prefer subpaths from
79 # ctx1. The subpaths from ctx2 are important when the .hgsub file
79 # ctx1. The subpaths from ctx2 are important when the .hgsub file
80 # has been modified (in ctx2) but not yet committed (in ctx1).
80 # has been modified (in ctx2) but not yet committed (in ctx1).
81 subpaths = dict.fromkeys(ctx2.substate, ctx2)
81 subpaths = dict.fromkeys(ctx2.substate, ctx2)
82 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
82 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
83
83
84 missing = set()
84 missing = set()
85
85
86 for subpath in ctx2.substate:
86 for subpath in ctx2.substate:
87 if subpath not in ctx1.substate:
87 if subpath not in ctx1.substate:
88 del subpaths[subpath]
88 del subpaths[subpath]
89 missing.add(subpath)
89 missing.add(subpath)
90
90
91 for subpath, ctx in sorted(subpaths.iteritems()):
91 for subpath, ctx in sorted(subpaths.iteritems()):
92 yield subpath, ctx.sub(subpath)
92 yield subpath, ctx.sub(subpath)
93
93
94 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
94 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
95 # status and diff will have an accurate result when it does
95 # status and diff will have an accurate result when it does
96 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
96 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
97 # against itself.
97 # against itself.
98 for subpath in missing:
98 for subpath in missing:
99 yield subpath, ctx2.nullsub(subpath, ctx1)
99 yield subpath, ctx2.nullsub(subpath, ctx1)
100
100
101 def nochangesfound(ui, repo, excluded=None):
101 def nochangesfound(ui, repo, excluded=None):
102 '''Report no changes for push/pull, excluded is None or a list of
102 '''Report no changes for push/pull, excluded is None or a list of
103 nodes excluded from the push/pull.
103 nodes excluded from the push/pull.
104 '''
104 '''
105 secretlist = []
105 secretlist = []
106 if excluded:
106 if excluded:
107 for n in excluded:
107 for n in excluded:
108 if n not in repo:
108 if n not in repo:
109 # discovery should not have included the filtered revision,
109 # discovery should not have included the filtered revision,
110 # we have to explicitly exclude it until discovery is cleanup.
110 # we have to explicitly exclude it until discovery is cleanup.
111 continue
111 continue
112 ctx = repo[n]
112 ctx = repo[n]
113 if ctx.phase() >= phases.secret and not ctx.extinct():
113 if ctx.phase() >= phases.secret and not ctx.extinct():
114 secretlist.append(n)
114 secretlist.append(n)
115
115
116 if secretlist:
116 if secretlist:
117 ui.status(_("no changes found (ignored %d secret changesets)\n")
117 ui.status(_("no changes found (ignored %d secret changesets)\n")
118 % len(secretlist))
118 % len(secretlist))
119 else:
119 else:
120 ui.status(_("no changes found\n"))
120 ui.status(_("no changes found\n"))
121
121
122 def checknewlabel(repo, lbl, kind):
122 def checknewlabel(repo, lbl, kind):
123 # Do not use the "kind" parameter in ui output.
123 # Do not use the "kind" parameter in ui output.
124 # It makes strings difficult to translate.
124 # It makes strings difficult to translate.
125 if lbl in ['tip', '.', 'null']:
125 if lbl in ['tip', '.', 'null']:
126 raise util.Abort(_("the name '%s' is reserved") % lbl)
126 raise util.Abort(_("the name '%s' is reserved") % lbl)
127 for c in (':', '\0', '\n', '\r'):
127 for c in (':', '\0', '\n', '\r'):
128 if c in lbl:
128 if c in lbl:
129 raise util.Abort(_("%r cannot be used in a name") % c)
129 raise util.Abort(_("%r cannot be used in a name") % c)
130 try:
130 try:
131 int(lbl)
131 int(lbl)
132 raise util.Abort(_("cannot use an integer as a name"))
132 raise util.Abort(_("cannot use an integer as a name"))
133 except ValueError:
133 except ValueError:
134 pass
134 pass
135
135
136 def checkfilename(f):
136 def checkfilename(f):
137 '''Check that the filename f is an acceptable filename for a tracked file'''
137 '''Check that the filename f is an acceptable filename for a tracked file'''
138 if '\r' in f or '\n' in f:
138 if '\r' in f or '\n' in f:
139 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
139 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
140
140
141 def checkportable(ui, f):
141 def checkportable(ui, f):
142 '''Check if filename f is portable and warn or abort depending on config'''
142 '''Check if filename f is portable and warn or abort depending on config'''
143 checkfilename(f)
143 checkfilename(f)
144 abort, warn = checkportabilityalert(ui)
144 abort, warn = checkportabilityalert(ui)
145 if abort or warn:
145 if abort or warn:
146 msg = util.checkwinfilename(f)
146 msg = util.checkwinfilename(f)
147 if msg:
147 if msg:
148 msg = "%s: %r" % (msg, f)
148 msg = "%s: %r" % (msg, f)
149 if abort:
149 if abort:
150 raise util.Abort(msg)
150 raise util.Abort(msg)
151 ui.warn(_("warning: %s\n") % msg)
151 ui.warn(_("warning: %s\n") % msg)
152
152
153 def checkportabilityalert(ui):
153 def checkportabilityalert(ui):
154 '''check if the user's config requests nothing, a warning, or abort for
154 '''check if the user's config requests nothing, a warning, or abort for
155 non-portable filenames'''
155 non-portable filenames'''
156 val = ui.config('ui', 'portablefilenames', 'warn')
156 val = ui.config('ui', 'portablefilenames', 'warn')
157 lval = val.lower()
157 lval = val.lower()
158 bval = util.parsebool(val)
158 bval = util.parsebool(val)
159 abort = os.name == 'nt' or lval == 'abort'
159 abort = os.name == 'nt' or lval == 'abort'
160 warn = bval or lval == 'warn'
160 warn = bval or lval == 'warn'
161 if bval is None and not (warn or abort or lval == 'ignore'):
161 if bval is None and not (warn or abort or lval == 'ignore'):
162 raise error.ConfigError(
162 raise error.ConfigError(
163 _("ui.portablefilenames value is invalid ('%s')") % val)
163 _("ui.portablefilenames value is invalid ('%s')") % val)
164 return abort, warn
164 return abort, warn
165
165
166 class casecollisionauditor(object):
166 class casecollisionauditor(object):
167 def __init__(self, ui, abort, dirstate):
167 def __init__(self, ui, abort, dirstate):
168 self._ui = ui
168 self._ui = ui
169 self._abort = abort
169 self._abort = abort
170 allfiles = '\0'.join(dirstate._map)
170 allfiles = '\0'.join(dirstate._map)
171 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
171 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
172 self._dirstate = dirstate
172 self._dirstate = dirstate
173 # The purpose of _newfiles is so that we don't complain about
173 # The purpose of _newfiles is so that we don't complain about
174 # case collisions if someone were to call this object with the
174 # case collisions if someone were to call this object with the
175 # same filename twice.
175 # same filename twice.
176 self._newfiles = set()
176 self._newfiles = set()
177
177
178 def __call__(self, f):
178 def __call__(self, f):
179 if f in self._newfiles:
179 if f in self._newfiles:
180 return
180 return
181 fl = encoding.lower(f)
181 fl = encoding.lower(f)
182 if fl in self._loweredfiles and f not in self._dirstate:
182 if fl in self._loweredfiles and f not in self._dirstate:
183 msg = _('possible case-folding collision for %s') % f
183 msg = _('possible case-folding collision for %s') % f
184 if self._abort:
184 if self._abort:
185 raise util.Abort(msg)
185 raise util.Abort(msg)
186 self._ui.warn(_("warning: %s\n") % msg)
186 self._ui.warn(_("warning: %s\n") % msg)
187 self._loweredfiles.add(fl)
187 self._loweredfiles.add(fl)
188 self._newfiles.add(f)
188 self._newfiles.add(f)
189
189
190 def filteredhash(repo, maxrev):
190 def filteredhash(repo, maxrev):
191 """build hash of filtered revisions in the current repoview.
191 """build hash of filtered revisions in the current repoview.
192
192
193 Multiple caches perform up-to-date validation by checking that the
193 Multiple caches perform up-to-date validation by checking that the
194 tiprev and tipnode stored in the cache file match the current repository.
194 tiprev and tipnode stored in the cache file match the current repository.
195 However, this is not sufficient for validating repoviews because the set
195 However, this is not sufficient for validating repoviews because the set
196 of revisions in the view may change without the repository tiprev and
196 of revisions in the view may change without the repository tiprev and
197 tipnode changing.
197 tipnode changing.
198
198
199 This function hashes all the revs filtered from the view and returns
199 This function hashes all the revs filtered from the view and returns
200 that SHA-1 digest.
200 that SHA-1 digest.
201 """
201 """
202 cl = repo.changelog
202 cl = repo.changelog
203 if not cl.filteredrevs:
203 if not cl.filteredrevs:
204 return None
204 return None
205 key = None
205 key = None
206 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
206 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
207 if revs:
207 if revs:
208 s = util.sha1()
208 s = util.sha1()
209 for rev in revs:
209 for rev in revs:
210 s.update('%s;' % rev)
210 s.update('%s;' % rev)
211 key = s.digest()
211 key = s.digest()
212 return key
212 return key
213
213
214 class abstractvfs(object):
214 class abstractvfs(object):
215 """Abstract base class; cannot be instantiated"""
215 """Abstract base class; cannot be instantiated"""
216
216
217 def __init__(self, *args, **kwargs):
217 def __init__(self, *args, **kwargs):
218 '''Prevent instantiation; don't call this from subclasses.'''
218 '''Prevent instantiation; don't call this from subclasses.'''
219 raise NotImplementedError('attempted instantiating ' + str(type(self)))
219 raise NotImplementedError('attempted instantiating ' + str(type(self)))
220
220
221 def tryread(self, path):
221 def tryread(self, path):
222 '''gracefully return an empty string for missing files'''
222 '''gracefully return an empty string for missing files'''
223 try:
223 try:
224 return self.read(path)
224 return self.read(path)
225 except IOError as inst:
225 except IOError as inst:
226 if inst.errno != errno.ENOENT:
226 if inst.errno != errno.ENOENT:
227 raise
227 raise
228 return ""
228 return ""
229
229
230 def tryreadlines(self, path, mode='rb'):
230 def tryreadlines(self, path, mode='rb'):
231 '''gracefully return an empty array for missing files'''
231 '''gracefully return an empty array for missing files'''
232 try:
232 try:
233 return self.readlines(path, mode=mode)
233 return self.readlines(path, mode=mode)
234 except IOError as inst:
234 except IOError as inst:
235 if inst.errno != errno.ENOENT:
235 if inst.errno != errno.ENOENT:
236 raise
236 raise
237 return []
237 return []
238
238
239 def open(self, path, mode="r", text=False, atomictemp=False,
239 def open(self, path, mode="r", text=False, atomictemp=False,
240 notindexed=False):
240 notindexed=False):
241 '''Open ``path`` file, which is relative to vfs root.
241 '''Open ``path`` file, which is relative to vfs root.
242
242
243 Newly created directories are marked as "not to be indexed by
243 Newly created directories are marked as "not to be indexed by
244 the content indexing service", if ``notindexed`` is specified
244 the content indexing service", if ``notindexed`` is specified
245 for "write" mode access.
245 for "write" mode access.
246 '''
246 '''
247 self.open = self.__call__
247 self.open = self.__call__
248 return self.__call__(path, mode, text, atomictemp, notindexed)
248 return self.__call__(path, mode, text, atomictemp, notindexed)
249
249
250 def read(self, path):
250 def read(self, path):
251 fp = self(path, 'rb')
251 fp = self(path, 'rb')
252 try:
252 try:
253 return fp.read()
253 return fp.read()
254 finally:
254 finally:
255 fp.close()
255 fp.close()
256
256
257 def readlines(self, path, mode='rb'):
257 def readlines(self, path, mode='rb'):
258 fp = self(path, mode=mode)
258 fp = self(path, mode=mode)
259 try:
259 try:
260 return fp.readlines()
260 return fp.readlines()
261 finally:
261 finally:
262 fp.close()
262 fp.close()
263
263
264 def write(self, path, data):
264 def write(self, path, data):
265 fp = self(path, 'wb')
265 fp = self(path, 'wb')
266 try:
266 try:
267 return fp.write(data)
267 return fp.write(data)
268 finally:
268 finally:
269 fp.close()
269 fp.close()
270
270
271 def writelines(self, path, data, mode='wb', notindexed=False):
271 def writelines(self, path, data, mode='wb', notindexed=False):
272 fp = self(path, mode=mode, notindexed=notindexed)
272 fp = self(path, mode=mode, notindexed=notindexed)
273 try:
273 try:
274 return fp.writelines(data)
274 return fp.writelines(data)
275 finally:
275 finally:
276 fp.close()
276 fp.close()
277
277
278 def append(self, path, data):
278 def append(self, path, data):
279 fp = self(path, 'ab')
279 fp = self(path, 'ab')
280 try:
280 try:
281 return fp.write(data)
281 return fp.write(data)
282 finally:
282 finally:
283 fp.close()
283 fp.close()
284
284
285 def basename(self, path):
285 def basename(self, path):
286 """return base element of a path (as os.path.basename would do)
286 """return base element of a path (as os.path.basename would do)
287
287
288 This exists to allow handling of strange encoding if needed."""
288 This exists to allow handling of strange encoding if needed."""
289 return os.path.basename(path)
289 return os.path.basename(path)
290
290
291 def chmod(self, path, mode):
291 def chmod(self, path, mode):
292 return os.chmod(self.join(path), mode)
292 return os.chmod(self.join(path), mode)
293
293
294 def dirname(self, path):
294 def dirname(self, path):
295 """return dirname element of a path (as os.path.dirname would do)
295 """return dirname element of a path (as os.path.dirname would do)
296
296
297 This exists to allow handling of strange encoding if needed."""
297 This exists to allow handling of strange encoding if needed."""
298 return os.path.dirname(path)
298 return os.path.dirname(path)
299
299
300 def exists(self, path=None):
300 def exists(self, path=None):
301 return os.path.exists(self.join(path))
301 return os.path.exists(self.join(path))
302
302
303 def fstat(self, fp):
303 def fstat(self, fp):
304 return util.fstat(fp)
304 return util.fstat(fp)
305
305
306 def isdir(self, path=None):
306 def isdir(self, path=None):
307 return os.path.isdir(self.join(path))
307 return os.path.isdir(self.join(path))
308
308
309 def isfile(self, path=None):
309 def isfile(self, path=None):
310 return os.path.isfile(self.join(path))
310 return os.path.isfile(self.join(path))
311
311
312 def islink(self, path=None):
312 def islink(self, path=None):
313 return os.path.islink(self.join(path))
313 return os.path.islink(self.join(path))
314
314
315 def reljoin(self, *paths):
315 def reljoin(self, *paths):
316 """join various elements of a path together (as os.path.join would do)
316 """join various elements of a path together (as os.path.join would do)
317
317
318 The vfs base is not injected so that path stay relative. This exists
318 The vfs base is not injected so that path stay relative. This exists
319 to allow handling of strange encoding if needed."""
319 to allow handling of strange encoding if needed."""
320 return os.path.join(*paths)
320 return os.path.join(*paths)
321
321
322 def split(self, path):
322 def split(self, path):
323 """split top-most element of a path (as os.path.split would do)
323 """split top-most element of a path (as os.path.split would do)
324
324
325 This exists to allow handling of strange encoding if needed."""
325 This exists to allow handling of strange encoding if needed."""
326 return os.path.split(path)
326 return os.path.split(path)
327
327
328 def lexists(self, path=None):
328 def lexists(self, path=None):
329 return os.path.lexists(self.join(path))
329 return os.path.lexists(self.join(path))
330
330
331 def lstat(self, path=None):
331 def lstat(self, path=None):
332 return os.lstat(self.join(path))
332 return os.lstat(self.join(path))
333
333
334 def listdir(self, path=None):
334 def listdir(self, path=None):
335 return os.listdir(self.join(path))
335 return os.listdir(self.join(path))
336
336
337 def makedir(self, path=None, notindexed=True):
337 def makedir(self, path=None, notindexed=True):
338 return util.makedir(self.join(path), notindexed)
338 return util.makedir(self.join(path), notindexed)
339
339
340 def makedirs(self, path=None, mode=None):
340 def makedirs(self, path=None, mode=None):
341 return util.makedirs(self.join(path), mode)
341 return util.makedirs(self.join(path), mode)
342
342
343 def makelock(self, info, path):
343 def makelock(self, info, path):
344 return util.makelock(info, self.join(path))
344 return util.makelock(info, self.join(path))
345
345
346 def mkdir(self, path=None):
346 def mkdir(self, path=None):
347 return os.mkdir(self.join(path))
347 return os.mkdir(self.join(path))
348
348
349 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
349 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
350 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
350 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
351 dir=self.join(dir), text=text)
351 dir=self.join(dir), text=text)
352 dname, fname = util.split(name)
352 dname, fname = util.split(name)
353 if dir:
353 if dir:
354 return fd, os.path.join(dir, fname)
354 return fd, os.path.join(dir, fname)
355 else:
355 else:
356 return fd, fname
356 return fd, fname
357
357
358 def readdir(self, path=None, stat=None, skip=None):
358 def readdir(self, path=None, stat=None, skip=None):
359 return osutil.listdir(self.join(path), stat, skip)
359 return osutil.listdir(self.join(path), stat, skip)
360
360
361 def readlock(self, path):
361 def readlock(self, path):
362 return util.readlock(self.join(path))
362 return util.readlock(self.join(path))
363
363
364 def rename(self, src, dst):
364 def rename(self, src, dst):
365 return util.rename(self.join(src), self.join(dst))
365 return util.rename(self.join(src), self.join(dst))
366
366
367 def readlink(self, path):
367 def readlink(self, path):
368 return os.readlink(self.join(path))
368 return os.readlink(self.join(path))
369
369
370 def removedirs(self, path=None):
370 def removedirs(self, path=None):
371 """Remove a leaf directory and all empty intermediate ones
371 """Remove a leaf directory and all empty intermediate ones
372 """
372 """
373 return util.removedirs(self.join(path))
373 return util.removedirs(self.join(path))
374
374
375 def rmtree(self, path=None, ignore_errors=False, forcibly=False):
375 def rmtree(self, path=None, ignore_errors=False, forcibly=False):
376 """Remove a directory tree recursively
376 """Remove a directory tree recursively
377
377
378 If ``forcibly``, this tries to remove READ-ONLY files, too.
378 If ``forcibly``, this tries to remove READ-ONLY files, too.
379 """
379 """
380 if forcibly:
380 if forcibly:
381 def onerror(function, path, excinfo):
381 def onerror(function, path, excinfo):
382 if function is not os.remove:
382 if function is not os.remove:
383 raise
383 raise
384 # read-only files cannot be unlinked under Windows
384 # read-only files cannot be unlinked under Windows
385 s = os.stat(path)
385 s = os.stat(path)
386 if (s.st_mode & stat.S_IWRITE) != 0:
386 if (s.st_mode & stat.S_IWRITE) != 0:
387 raise
387 raise
388 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
388 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
389 os.remove(path)
389 os.remove(path)
390 else:
390 else:
391 onerror = None
391 onerror = None
392 return shutil.rmtree(self.join(path),
392 return shutil.rmtree(self.join(path),
393 ignore_errors=ignore_errors, onerror=onerror)
393 ignore_errors=ignore_errors, onerror=onerror)
394
394
395 def setflags(self, path, l, x):
395 def setflags(self, path, l, x):
396 return util.setflags(self.join(path), l, x)
396 return util.setflags(self.join(path), l, x)
397
397
398 def stat(self, path=None):
398 def stat(self, path=None):
399 return os.stat(self.join(path))
399 return os.stat(self.join(path))
400
400
401 def unlink(self, path=None):
401 def unlink(self, path=None):
402 return util.unlink(self.join(path))
402 return util.unlink(self.join(path))
403
403
404 def unlinkpath(self, path=None, ignoremissing=False):
404 def unlinkpath(self, path=None, ignoremissing=False):
405 return util.unlinkpath(self.join(path), ignoremissing)
405 return util.unlinkpath(self.join(path), ignoremissing)
406
406
407 def utime(self, path=None, t=None):
407 def utime(self, path=None, t=None):
408 return os.utime(self.join(path), t)
408 return os.utime(self.join(path), t)
409
409
410 def walk(self, path=None, onerror=None):
410 def walk(self, path=None, onerror=None):
411 """Yield (dirpath, dirs, files) tuple for each directories under path
411 """Yield (dirpath, dirs, files) tuple for each directories under path
412
412
413 ``dirpath`` is relative one from the root of this vfs. This
413 ``dirpath`` is relative one from the root of this vfs. This
414 uses ``os.sep`` as path separator, even you specify POSIX
414 uses ``os.sep`` as path separator, even you specify POSIX
415 style ``path``.
415 style ``path``.
416
416
417 "The root of this vfs" is represented as empty ``dirpath``.
417 "The root of this vfs" is represented as empty ``dirpath``.
418 """
418 """
419 root = os.path.normpath(self.join(None))
419 root = os.path.normpath(self.join(None))
420 # when dirpath == root, dirpath[prefixlen:] becomes empty
420 # when dirpath == root, dirpath[prefixlen:] becomes empty
421 # because len(dirpath) < prefixlen.
421 # because len(dirpath) < prefixlen.
422 prefixlen = len(pathutil.normasprefix(root))
422 prefixlen = len(pathutil.normasprefix(root))
423 for dirpath, dirs, files in os.walk(self.join(path), onerror=onerror):
423 for dirpath, dirs, files in os.walk(self.join(path), onerror=onerror):
424 yield (dirpath[prefixlen:], dirs, files)
424 yield (dirpath[prefixlen:], dirs, files)
425
425
426 class vfs(abstractvfs):
426 class vfs(abstractvfs):
427 '''Operate files relative to a base directory
427 '''Operate files relative to a base directory
428
428
429 This class is used to hide the details of COW semantics and
429 This class is used to hide the details of COW semantics and
430 remote file access from higher level code.
430 remote file access from higher level code.
431 '''
431 '''
432 def __init__(self, base, audit=True, expandpath=False, realpath=False):
432 def __init__(self, base, audit=True, expandpath=False, realpath=False):
433 if expandpath:
433 if expandpath:
434 base = util.expandpath(base)
434 base = util.expandpath(base)
435 if realpath:
435 if realpath:
436 base = os.path.realpath(base)
436 base = os.path.realpath(base)
437 self.base = base
437 self.base = base
438 self._setmustaudit(audit)
438 self._setmustaudit(audit)
439 self.createmode = None
439 self.createmode = None
440 self._trustnlink = None
440 self._trustnlink = None
441
441
442 def _getmustaudit(self):
442 def _getmustaudit(self):
443 return self._audit
443 return self._audit
444
444
445 def _setmustaudit(self, onoff):
445 def _setmustaudit(self, onoff):
446 self._audit = onoff
446 self._audit = onoff
447 if onoff:
447 if onoff:
448 self.audit = pathutil.pathauditor(self.base)
448 self.audit = pathutil.pathauditor(self.base)
449 else:
449 else:
450 self.audit = util.always
450 self.audit = util.always
451
451
452 mustaudit = property(_getmustaudit, _setmustaudit)
452 mustaudit = property(_getmustaudit, _setmustaudit)
453
453
454 @util.propertycache
454 @util.propertycache
455 def _cansymlink(self):
455 def _cansymlink(self):
456 return util.checklink(self.base)
456 return util.checklink(self.base)
457
457
458 @util.propertycache
458 @util.propertycache
459 def _chmod(self):
459 def _chmod(self):
460 return util.checkexec(self.base)
460 return util.checkexec(self.base)
461
461
462 def _fixfilemode(self, name):
462 def _fixfilemode(self, name):
463 if self.createmode is None or not self._chmod:
463 if self.createmode is None or not self._chmod:
464 return
464 return
465 os.chmod(name, self.createmode & 0o666)
465 os.chmod(name, self.createmode & 0o666)
466
466
467 def __call__(self, path, mode="r", text=False, atomictemp=False,
467 def __call__(self, path, mode="r", text=False, atomictemp=False,
468 notindexed=False):
468 notindexed=False):
469 '''Open ``path`` file, which is relative to vfs root.
469 '''Open ``path`` file, which is relative to vfs root.
470
470
471 Newly created directories are marked as "not to be indexed by
471 Newly created directories are marked as "not to be indexed by
472 the content indexing service", if ``notindexed`` is specified
472 the content indexing service", if ``notindexed`` is specified
473 for "write" mode access.
473 for "write" mode access.
474 '''
474 '''
475 if self._audit:
475 if self._audit:
476 r = util.checkosfilename(path)
476 r = util.checkosfilename(path)
477 if r:
477 if r:
478 raise util.Abort("%s: %r" % (r, path))
478 raise util.Abort("%s: %r" % (r, path))
479 self.audit(path)
479 self.audit(path)
480 f = self.join(path)
480 f = self.join(path)
481
481
482 if not text and "b" not in mode:
482 if not text and "b" not in mode:
483 mode += "b" # for that other OS
483 mode += "b" # for that other OS
484
484
485 nlink = -1
485 nlink = -1
486 if mode not in ('r', 'rb'):
486 if mode not in ('r', 'rb'):
487 dirname, basename = util.split(f)
487 dirname, basename = util.split(f)
488 # If basename is empty, then the path is malformed because it points
488 # If basename is empty, then the path is malformed because it points
489 # to a directory. Let the posixfile() call below raise IOError.
489 # to a directory. Let the posixfile() call below raise IOError.
490 if basename:
490 if basename:
491 if atomictemp:
491 if atomictemp:
492 util.ensuredirs(dirname, self.createmode, notindexed)
492 util.ensuredirs(dirname, self.createmode, notindexed)
493 return util.atomictempfile(f, mode, self.createmode)
493 return util.atomictempfile(f, mode, self.createmode)
494 try:
494 try:
495 if 'w' in mode:
495 if 'w' in mode:
496 util.unlink(f)
496 util.unlink(f)
497 nlink = 0
497 nlink = 0
498 else:
498 else:
499 # nlinks() may behave differently for files on Windows
499 # nlinks() may behave differently for files on Windows
500 # shares if the file is open.
500 # shares if the file is open.
501 fd = util.posixfile(f)
501 fd = util.posixfile(f)
502 nlink = util.nlinks(f)
502 nlink = util.nlinks(f)
503 if nlink < 1:
503 if nlink < 1:
504 nlink = 2 # force mktempcopy (issue1922)
504 nlink = 2 # force mktempcopy (issue1922)
505 fd.close()
505 fd.close()
506 except (OSError, IOError) as e:
506 except (OSError, IOError) as e:
507 if e.errno != errno.ENOENT:
507 if e.errno != errno.ENOENT:
508 raise
508 raise
509 nlink = 0
509 nlink = 0
510 util.ensuredirs(dirname, self.createmode, notindexed)
510 util.ensuredirs(dirname, self.createmode, notindexed)
511 if nlink > 0:
511 if nlink > 0:
512 if self._trustnlink is None:
512 if self._trustnlink is None:
513 self._trustnlink = nlink > 1 or util.checknlink(f)
513 self._trustnlink = nlink > 1 or util.checknlink(f)
514 if nlink > 1 or not self._trustnlink:
514 if nlink > 1 or not self._trustnlink:
515 util.rename(util.mktempcopy(f), f)
515 util.rename(util.mktempcopy(f), f)
516 fp = util.posixfile(f, mode)
516 fp = util.posixfile(f, mode)
517 if nlink == 0:
517 if nlink == 0:
518 self._fixfilemode(f)
518 self._fixfilemode(f)
519 return fp
519 return fp
520
520
521 def symlink(self, src, dst):
521 def symlink(self, src, dst):
522 self.audit(dst)
522 self.audit(dst)
523 linkname = self.join(dst)
523 linkname = self.join(dst)
524 try:
524 try:
525 os.unlink(linkname)
525 os.unlink(linkname)
526 except OSError:
526 except OSError:
527 pass
527 pass
528
528
529 util.ensuredirs(os.path.dirname(linkname), self.createmode)
529 util.ensuredirs(os.path.dirname(linkname), self.createmode)
530
530
531 if self._cansymlink:
531 if self._cansymlink:
532 try:
532 try:
533 os.symlink(src, linkname)
533 os.symlink(src, linkname)
534 except OSError as err:
534 except OSError as err:
535 raise OSError(err.errno, _('could not symlink to %r: %s') %
535 raise OSError(err.errno, _('could not symlink to %r: %s') %
536 (src, err.strerror), linkname)
536 (src, err.strerror), linkname)
537 else:
537 else:
538 self.write(dst, src)
538 self.write(dst, src)
539
539
540 def join(self, path, *insidef):
540 def join(self, path, *insidef):
541 if path:
541 if path:
542 return os.path.join(self.base, path, *insidef)
542 return os.path.join(self.base, path, *insidef)
543 else:
543 else:
544 return self.base
544 return self.base
545
545
546 opener = vfs
546 opener = vfs
547
547
548 class auditvfs(object):
548 class auditvfs(object):
549 def __init__(self, vfs):
549 def __init__(self, vfs):
550 self.vfs = vfs
550 self.vfs = vfs
551
551
552 def _getmustaudit(self):
552 def _getmustaudit(self):
553 return self.vfs.mustaudit
553 return self.vfs.mustaudit
554
554
555 def _setmustaudit(self, onoff):
555 def _setmustaudit(self, onoff):
556 self.vfs.mustaudit = onoff
556 self.vfs.mustaudit = onoff
557
557
558 mustaudit = property(_getmustaudit, _setmustaudit)
558 mustaudit = property(_getmustaudit, _setmustaudit)
559
559
560 class filtervfs(abstractvfs, auditvfs):
560 class filtervfs(abstractvfs, auditvfs):
561 '''Wrapper vfs for filtering filenames with a function.'''
561 '''Wrapper vfs for filtering filenames with a function.'''
562
562
563 def __init__(self, vfs, filter):
563 def __init__(self, vfs, filter):
564 auditvfs.__init__(self, vfs)
564 auditvfs.__init__(self, vfs)
565 self._filter = filter
565 self._filter = filter
566
566
567 def __call__(self, path, *args, **kwargs):
567 def __call__(self, path, *args, **kwargs):
568 return self.vfs(self._filter(path), *args, **kwargs)
568 return self.vfs(self._filter(path), *args, **kwargs)
569
569
570 def join(self, path, *insidef):
570 def join(self, path, *insidef):
571 if path:
571 if path:
572 return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef)))
572 return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef)))
573 else:
573 else:
574 return self.vfs.join(path)
574 return self.vfs.join(path)
575
575
576 filteropener = filtervfs
576 filteropener = filtervfs
577
577
578 class readonlyvfs(abstractvfs, auditvfs):
578 class readonlyvfs(abstractvfs, auditvfs):
579 '''Wrapper vfs preventing any writing.'''
579 '''Wrapper vfs preventing any writing.'''
580
580
581 def __init__(self, vfs):
581 def __init__(self, vfs):
582 auditvfs.__init__(self, vfs)
582 auditvfs.__init__(self, vfs)
583
583
584 def __call__(self, path, mode='r', *args, **kw):
584 def __call__(self, path, mode='r', *args, **kw):
585 if mode not in ('r', 'rb'):
585 if mode not in ('r', 'rb'):
586 raise util.Abort('this vfs is read only')
586 raise util.Abort('this vfs is read only')
587 return self.vfs(path, mode, *args, **kw)
587 return self.vfs(path, mode, *args, **kw)
588
588
589
589
590 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
590 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
591 '''yield every hg repository under path, always recursively.
591 '''yield every hg repository under path, always recursively.
592 The recurse flag will only control recursion into repo working dirs'''
592 The recurse flag will only control recursion into repo working dirs'''
593 def errhandler(err):
593 def errhandler(err):
594 if err.filename == path:
594 if err.filename == path:
595 raise err
595 raise err
596 samestat = getattr(os.path, 'samestat', None)
596 samestat = getattr(os.path, 'samestat', None)
597 if followsym and samestat is not None:
597 if followsym and samestat is not None:
598 def adddir(dirlst, dirname):
598 def adddir(dirlst, dirname):
599 match = False
599 match = False
600 dirstat = os.stat(dirname)
600 dirstat = os.stat(dirname)
601 for lstdirstat in dirlst:
601 for lstdirstat in dirlst:
602 if samestat(dirstat, lstdirstat):
602 if samestat(dirstat, lstdirstat):
603 match = True
603 match = True
604 break
604 break
605 if not match:
605 if not match:
606 dirlst.append(dirstat)
606 dirlst.append(dirstat)
607 return not match
607 return not match
608 else:
608 else:
609 followsym = False
609 followsym = False
610
610
611 if (seen_dirs is None) and followsym:
611 if (seen_dirs is None) and followsym:
612 seen_dirs = []
612 seen_dirs = []
613 adddir(seen_dirs, path)
613 adddir(seen_dirs, path)
614 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
614 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
615 dirs.sort()
615 dirs.sort()
616 if '.hg' in dirs:
616 if '.hg' in dirs:
617 yield root # found a repository
617 yield root # found a repository
618 qroot = os.path.join(root, '.hg', 'patches')
618 qroot = os.path.join(root, '.hg', 'patches')
619 if os.path.isdir(os.path.join(qroot, '.hg')):
619 if os.path.isdir(os.path.join(qroot, '.hg')):
620 yield qroot # we have a patch queue repo here
620 yield qroot # we have a patch queue repo here
621 if recurse:
621 if recurse:
622 # avoid recursing inside the .hg directory
622 # avoid recursing inside the .hg directory
623 dirs.remove('.hg')
623 dirs.remove('.hg')
624 else:
624 else:
625 dirs[:] = [] # don't descend further
625 dirs[:] = [] # don't descend further
626 elif followsym:
626 elif followsym:
627 newdirs = []
627 newdirs = []
628 for d in dirs:
628 for d in dirs:
629 fname = os.path.join(root, d)
629 fname = os.path.join(root, d)
630 if adddir(seen_dirs, fname):
630 if adddir(seen_dirs, fname):
631 if os.path.islink(fname):
631 if os.path.islink(fname):
632 for hgname in walkrepos(fname, True, seen_dirs):
632 for hgname in walkrepos(fname, True, seen_dirs):
633 yield hgname
633 yield hgname
634 else:
634 else:
635 newdirs.append(d)
635 newdirs.append(d)
636 dirs[:] = newdirs
636 dirs[:] = newdirs
637
637
638 def osrcpath():
638 def osrcpath():
639 '''return default os-specific hgrc search path'''
639 '''return default os-specific hgrc search path'''
640 path = []
640 path = []
641 defaultpath = os.path.join(util.datapath, 'default.d')
641 defaultpath = os.path.join(util.datapath, 'default.d')
642 if os.path.isdir(defaultpath):
642 if os.path.isdir(defaultpath):
643 for f, kind in osutil.listdir(defaultpath):
643 for f, kind in osutil.listdir(defaultpath):
644 if f.endswith('.rc'):
644 if f.endswith('.rc'):
645 path.append(os.path.join(defaultpath, f))
645 path.append(os.path.join(defaultpath, f))
646 path.extend(systemrcpath())
646 path.extend(systemrcpath())
647 path.extend(userrcpath())
647 path.extend(userrcpath())
648 path = [os.path.normpath(f) for f in path]
648 path = [os.path.normpath(f) for f in path]
649 return path
649 return path
650
650
651 _rcpath = None
651 _rcpath = None
652
652
653 def rcpath():
653 def rcpath():
654 '''return hgrc search path. if env var HGRCPATH is set, use it.
654 '''return hgrc search path. if env var HGRCPATH is set, use it.
655 for each item in path, if directory, use files ending in .rc,
655 for each item in path, if directory, use files ending in .rc,
656 else use item.
656 else use item.
657 make HGRCPATH empty to only look in .hg/hgrc of current repo.
657 make HGRCPATH empty to only look in .hg/hgrc of current repo.
658 if no HGRCPATH, use default os-specific path.'''
658 if no HGRCPATH, use default os-specific path.'''
659 global _rcpath
659 global _rcpath
660 if _rcpath is None:
660 if _rcpath is None:
661 if 'HGRCPATH' in os.environ:
661 if 'HGRCPATH' in os.environ:
662 _rcpath = []
662 _rcpath = []
663 for p in os.environ['HGRCPATH'].split(os.pathsep):
663 for p in os.environ['HGRCPATH'].split(os.pathsep):
664 if not p:
664 if not p:
665 continue
665 continue
666 p = util.expandpath(p)
666 p = util.expandpath(p)
667 if os.path.isdir(p):
667 if os.path.isdir(p):
668 for f, kind in osutil.listdir(p):
668 for f, kind in osutil.listdir(p):
669 if f.endswith('.rc'):
669 if f.endswith('.rc'):
670 _rcpath.append(os.path.join(p, f))
670 _rcpath.append(os.path.join(p, f))
671 else:
671 else:
672 _rcpath.append(p)
672 _rcpath.append(p)
673 else:
673 else:
674 _rcpath = osrcpath()
674 _rcpath = osrcpath()
675 return _rcpath
675 return _rcpath
676
676
677 def intrev(rev):
677 def intrev(rev):
678 """Return integer for a given revision that can be used in comparison or
678 """Return integer for a given revision that can be used in comparison or
679 arithmetic operation"""
679 arithmetic operation"""
680 if rev is None:
680 if rev is None:
681 return wdirrev
681 return wdirrev
682 return rev
682 return rev
683
683
684 def revsingle(repo, revspec, default='.'):
684 def revsingle(repo, revspec, default='.'):
685 if not revspec and revspec != 0:
685 if not revspec and revspec != 0:
686 return repo[default]
686 return repo[default]
687
687
688 l = revrange(repo, [revspec])
688 l = revrange(repo, [revspec])
689 if not l:
689 if not l:
690 raise util.Abort(_('empty revision set'))
690 raise util.Abort(_('empty revision set'))
691 return repo[l.last()]
691 return repo[l.last()]
692
692
693 def revpair(repo, revs):
693 def revpair(repo, revs):
694 if not revs:
694 if not revs:
695 return repo.dirstate.p1(), None
695 return repo.dirstate.p1(), None
696
696
697 l = revrange(repo, revs)
697 l = revrange(repo, revs)
698
698
699 if not l:
699 if not l:
700 first = second = None
700 first = second = None
701 elif l.isascending():
701 elif l.isascending():
702 first = l.min()
702 first = l.min()
703 second = l.max()
703 second = l.max()
704 elif l.isdescending():
704 elif l.isdescending():
705 first = l.max()
705 first = l.max()
706 second = l.min()
706 second = l.min()
707 else:
707 else:
708 first = l.first()
708 first = l.first()
709 second = l.last()
709 second = l.last()
710
710
711 if first is None:
711 if first is None:
712 raise util.Abort(_('empty revision range'))
712 raise util.Abort(_('empty revision range'))
713
713
714 if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
714 if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
715 return repo.lookup(first), None
715 return repo.lookup(first), None
716
716
717 return repo.lookup(first), repo.lookup(second)
717 return repo.lookup(first), repo.lookup(second)
718
718
719 _revrangesep = ':'
719 _revrangesep = ':'
720
720
721 def revrange(repo, revs):
721 def revrange(repo, revs):
722 """Yield revision as strings from a list of revision specifications."""
722 """Yield revision as strings from a list of revision specifications."""
723
724 def revfix(repo, val, defval):
725 if not val and val != 0 and defval is not None:
726 return defval
727 return repo[val].rev()
728
729 subsets = []
723 subsets = []
730
731 revsetaliases = [alias for (alias, _) in
732 repo.ui.configitems("revsetalias")]
733
734 for spec in revs:
724 for spec in revs:
735 # attempt to parse old-style ranges first to deal with
725 if isinstance(spec, int):
736 # things like old-tag which contain query metacharacters
726 spec = revset.formatspec('rev(%d)', spec)
737 try:
738 # ... except for revset aliases without arguments. These
739 # should be parsed as soon as possible, because they might
740 # clash with a hash prefix.
741 if spec in revsetaliases:
742 raise error.RepoLookupError
743
744 if isinstance(spec, int):
745 subsets.append(revset.baseset([spec]))
746 continue
747
748 if _revrangesep in spec:
749 start, end = spec.split(_revrangesep, 1)
750 if start in revsetaliases or end in revsetaliases:
751 raise error.RepoLookupError
752
753 start = revfix(repo, start, 0)
754 end = revfix(repo, end, len(repo) - 1)
755 if end == nullrev and start < 0:
756 start = nullrev
757 if start < end:
758 l = revset.spanset(repo, start, end + 1)
759 else:
760 l = revset.spanset(repo, start, end - 1)
761 subsets.append(l)
762 continue
763 elif spec and spec in repo: # single unquoted rev
764 rev = revfix(repo, spec, None)
765 subsets.append(revset.baseset([rev]))
766 continue
767 except error.RepoLookupError:
768 pass
769
770 # fall through to new-style queries if old-style fails
771 m = revset.match(repo.ui, spec, repo)
727 m = revset.match(repo.ui, spec, repo)
772 subsets.append(m(repo))
728 subsets.append(m(repo))
773
729
774 return revset._combinesets(subsets)
730 return revset._combinesets(subsets)
775
731
776 def expandpats(pats):
732 def expandpats(pats):
777 '''Expand bare globs when running on windows.
733 '''Expand bare globs when running on windows.
778 On posix we assume it already has already been done by sh.'''
734 On posix we assume it already has already been done by sh.'''
779 if not util.expandglobs:
735 if not util.expandglobs:
780 return list(pats)
736 return list(pats)
781 ret = []
737 ret = []
782 for kindpat in pats:
738 for kindpat in pats:
783 kind, pat = matchmod._patsplit(kindpat, None)
739 kind, pat = matchmod._patsplit(kindpat, None)
784 if kind is None:
740 if kind is None:
785 try:
741 try:
786 globbed = glob.glob(pat)
742 globbed = glob.glob(pat)
787 except re.error:
743 except re.error:
788 globbed = [pat]
744 globbed = [pat]
789 if globbed:
745 if globbed:
790 ret.extend(globbed)
746 ret.extend(globbed)
791 continue
747 continue
792 ret.append(kindpat)
748 ret.append(kindpat)
793 return ret
749 return ret
794
750
795 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath',
751 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath',
796 badfn=None):
752 badfn=None):
797 '''Return a matcher and the patterns that were used.
753 '''Return a matcher and the patterns that were used.
798 The matcher will warn about bad matches, unless an alternate badfn callback
754 The matcher will warn about bad matches, unless an alternate badfn callback
799 is provided.'''
755 is provided.'''
800 if pats == ("",):
756 if pats == ("",):
801 pats = []
757 pats = []
802 if not globbed and default == 'relpath':
758 if not globbed and default == 'relpath':
803 pats = expandpats(pats or [])
759 pats = expandpats(pats or [])
804
760
805 def bad(f, msg):
761 def bad(f, msg):
806 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
762 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
807
763
808 if badfn is None:
764 if badfn is None:
809 badfn = bad
765 badfn = bad
810
766
811 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
767 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
812 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
768 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
813
769
814 if m.always():
770 if m.always():
815 pats = []
771 pats = []
816 return m, pats
772 return m, pats
817
773
818 def match(ctx, pats=[], opts={}, globbed=False, default='relpath', badfn=None):
774 def match(ctx, pats=[], opts={}, globbed=False, default='relpath', badfn=None):
819 '''Return a matcher that will warn about bad matches.'''
775 '''Return a matcher that will warn about bad matches.'''
820 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
776 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
821
777
822 def matchall(repo):
778 def matchall(repo):
823 '''Return a matcher that will efficiently match everything.'''
779 '''Return a matcher that will efficiently match everything.'''
824 return matchmod.always(repo.root, repo.getcwd())
780 return matchmod.always(repo.root, repo.getcwd())
825
781
826 def matchfiles(repo, files, badfn=None):
782 def matchfiles(repo, files, badfn=None):
827 '''Return a matcher that will efficiently match exactly these files.'''
783 '''Return a matcher that will efficiently match exactly these files.'''
828 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
784 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
829
785
830 def addremove(repo, matcher, prefix, opts={}, dry_run=None, similarity=None):
786 def addremove(repo, matcher, prefix, opts={}, dry_run=None, similarity=None):
831 m = matcher
787 m = matcher
832 if dry_run is None:
788 if dry_run is None:
833 dry_run = opts.get('dry_run')
789 dry_run = opts.get('dry_run')
834 if similarity is None:
790 if similarity is None:
835 similarity = float(opts.get('similarity') or 0)
791 similarity = float(opts.get('similarity') or 0)
836
792
837 ret = 0
793 ret = 0
838 join = lambda f: os.path.join(prefix, f)
794 join = lambda f: os.path.join(prefix, f)
839
795
840 def matchessubrepo(matcher, subpath):
796 def matchessubrepo(matcher, subpath):
841 if matcher.exact(subpath):
797 if matcher.exact(subpath):
842 return True
798 return True
843 for f in matcher.files():
799 for f in matcher.files():
844 if f.startswith(subpath):
800 if f.startswith(subpath):
845 return True
801 return True
846 return False
802 return False
847
803
848 wctx = repo[None]
804 wctx = repo[None]
849 for subpath in sorted(wctx.substate):
805 for subpath in sorted(wctx.substate):
850 if opts.get('subrepos') or matchessubrepo(m, subpath):
806 if opts.get('subrepos') or matchessubrepo(m, subpath):
851 sub = wctx.sub(subpath)
807 sub = wctx.sub(subpath)
852 try:
808 try:
853 submatch = matchmod.narrowmatcher(subpath, m)
809 submatch = matchmod.narrowmatcher(subpath, m)
854 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
810 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
855 ret = 1
811 ret = 1
856 except error.LookupError:
812 except error.LookupError:
857 repo.ui.status(_("skipping missing subrepository: %s\n")
813 repo.ui.status(_("skipping missing subrepository: %s\n")
858 % join(subpath))
814 % join(subpath))
859
815
860 rejected = []
816 rejected = []
861 def badfn(f, msg):
817 def badfn(f, msg):
862 if f in m.files():
818 if f in m.files():
863 m.bad(f, msg)
819 m.bad(f, msg)
864 rejected.append(f)
820 rejected.append(f)
865
821
866 badmatch = matchmod.badmatch(m, badfn)
822 badmatch = matchmod.badmatch(m, badfn)
867 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
823 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
868 badmatch)
824 badmatch)
869
825
870 unknownset = set(unknown + forgotten)
826 unknownset = set(unknown + forgotten)
871 toprint = unknownset.copy()
827 toprint = unknownset.copy()
872 toprint.update(deleted)
828 toprint.update(deleted)
873 for abs in sorted(toprint):
829 for abs in sorted(toprint):
874 if repo.ui.verbose or not m.exact(abs):
830 if repo.ui.verbose or not m.exact(abs):
875 if abs in unknownset:
831 if abs in unknownset:
876 status = _('adding %s\n') % m.uipath(abs)
832 status = _('adding %s\n') % m.uipath(abs)
877 else:
833 else:
878 status = _('removing %s\n') % m.uipath(abs)
834 status = _('removing %s\n') % m.uipath(abs)
879 repo.ui.status(status)
835 repo.ui.status(status)
880
836
881 renames = _findrenames(repo, m, added + unknown, removed + deleted,
837 renames = _findrenames(repo, m, added + unknown, removed + deleted,
882 similarity)
838 similarity)
883
839
884 if not dry_run:
840 if not dry_run:
885 _markchanges(repo, unknown + forgotten, deleted, renames)
841 _markchanges(repo, unknown + forgotten, deleted, renames)
886
842
887 for f in rejected:
843 for f in rejected:
888 if f in m.files():
844 if f in m.files():
889 return 1
845 return 1
890 return ret
846 return ret
891
847
892 def marktouched(repo, files, similarity=0.0):
848 def marktouched(repo, files, similarity=0.0):
893 '''Assert that files have somehow been operated upon. files are relative to
849 '''Assert that files have somehow been operated upon. files are relative to
894 the repo root.'''
850 the repo root.'''
895 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
851 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
896 rejected = []
852 rejected = []
897
853
898 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
854 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
899
855
900 if repo.ui.verbose:
856 if repo.ui.verbose:
901 unknownset = set(unknown + forgotten)
857 unknownset = set(unknown + forgotten)
902 toprint = unknownset.copy()
858 toprint = unknownset.copy()
903 toprint.update(deleted)
859 toprint.update(deleted)
904 for abs in sorted(toprint):
860 for abs in sorted(toprint):
905 if abs in unknownset:
861 if abs in unknownset:
906 status = _('adding %s\n') % abs
862 status = _('adding %s\n') % abs
907 else:
863 else:
908 status = _('removing %s\n') % abs
864 status = _('removing %s\n') % abs
909 repo.ui.status(status)
865 repo.ui.status(status)
910
866
911 renames = _findrenames(repo, m, added + unknown, removed + deleted,
867 renames = _findrenames(repo, m, added + unknown, removed + deleted,
912 similarity)
868 similarity)
913
869
914 _markchanges(repo, unknown + forgotten, deleted, renames)
870 _markchanges(repo, unknown + forgotten, deleted, renames)
915
871
916 for f in rejected:
872 for f in rejected:
917 if f in m.files():
873 if f in m.files():
918 return 1
874 return 1
919 return 0
875 return 0
920
876
921 def _interestingfiles(repo, matcher):
877 def _interestingfiles(repo, matcher):
922 '''Walk dirstate with matcher, looking for files that addremove would care
878 '''Walk dirstate with matcher, looking for files that addremove would care
923 about.
879 about.
924
880
925 This is different from dirstate.status because it doesn't care about
881 This is different from dirstate.status because it doesn't care about
926 whether files are modified or clean.'''
882 whether files are modified or clean.'''
927 added, unknown, deleted, removed, forgotten = [], [], [], [], []
883 added, unknown, deleted, removed, forgotten = [], [], [], [], []
928 audit_path = pathutil.pathauditor(repo.root)
884 audit_path = pathutil.pathauditor(repo.root)
929
885
930 ctx = repo[None]
886 ctx = repo[None]
931 dirstate = repo.dirstate
887 dirstate = repo.dirstate
932 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
888 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
933 full=False)
889 full=False)
934 for abs, st in walkresults.iteritems():
890 for abs, st in walkresults.iteritems():
935 dstate = dirstate[abs]
891 dstate = dirstate[abs]
936 if dstate == '?' and audit_path.check(abs):
892 if dstate == '?' and audit_path.check(abs):
937 unknown.append(abs)
893 unknown.append(abs)
938 elif dstate != 'r' and not st:
894 elif dstate != 'r' and not st:
939 deleted.append(abs)
895 deleted.append(abs)
940 elif dstate == 'r' and st:
896 elif dstate == 'r' and st:
941 forgotten.append(abs)
897 forgotten.append(abs)
942 # for finding renames
898 # for finding renames
943 elif dstate == 'r' and not st:
899 elif dstate == 'r' and not st:
944 removed.append(abs)
900 removed.append(abs)
945 elif dstate == 'a':
901 elif dstate == 'a':
946 added.append(abs)
902 added.append(abs)
947
903
948 return added, unknown, deleted, removed, forgotten
904 return added, unknown, deleted, removed, forgotten
949
905
950 def _findrenames(repo, matcher, added, removed, similarity):
906 def _findrenames(repo, matcher, added, removed, similarity):
951 '''Find renames from removed files to added ones.'''
907 '''Find renames from removed files to added ones.'''
952 renames = {}
908 renames = {}
953 if similarity > 0:
909 if similarity > 0:
954 for old, new, score in similar.findrenames(repo, added, removed,
910 for old, new, score in similar.findrenames(repo, added, removed,
955 similarity):
911 similarity):
956 if (repo.ui.verbose or not matcher.exact(old)
912 if (repo.ui.verbose or not matcher.exact(old)
957 or not matcher.exact(new)):
913 or not matcher.exact(new)):
958 repo.ui.status(_('recording removal of %s as rename to %s '
914 repo.ui.status(_('recording removal of %s as rename to %s '
959 '(%d%% similar)\n') %
915 '(%d%% similar)\n') %
960 (matcher.rel(old), matcher.rel(new),
916 (matcher.rel(old), matcher.rel(new),
961 score * 100))
917 score * 100))
962 renames[new] = old
918 renames[new] = old
963 return renames
919 return renames
964
920
965 def _markchanges(repo, unknown, deleted, renames):
921 def _markchanges(repo, unknown, deleted, renames):
966 '''Marks the files in unknown as added, the files in deleted as removed,
922 '''Marks the files in unknown as added, the files in deleted as removed,
967 and the files in renames as copied.'''
923 and the files in renames as copied.'''
968 wctx = repo[None]
924 wctx = repo[None]
969 wlock = repo.wlock()
925 wlock = repo.wlock()
970 try:
926 try:
971 wctx.forget(deleted)
927 wctx.forget(deleted)
972 wctx.add(unknown)
928 wctx.add(unknown)
973 for new, old in renames.iteritems():
929 for new, old in renames.iteritems():
974 wctx.copy(old, new)
930 wctx.copy(old, new)
975 finally:
931 finally:
976 wlock.release()
932 wlock.release()
977
933
978 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
934 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
979 """Update the dirstate to reflect the intent of copying src to dst. For
935 """Update the dirstate to reflect the intent of copying src to dst. For
980 different reasons it might not end with dst being marked as copied from src.
936 different reasons it might not end with dst being marked as copied from src.
981 """
937 """
982 origsrc = repo.dirstate.copied(src) or src
938 origsrc = repo.dirstate.copied(src) or src
983 if dst == origsrc: # copying back a copy?
939 if dst == origsrc: # copying back a copy?
984 if repo.dirstate[dst] not in 'mn' and not dryrun:
940 if repo.dirstate[dst] not in 'mn' and not dryrun:
985 repo.dirstate.normallookup(dst)
941 repo.dirstate.normallookup(dst)
986 else:
942 else:
987 if repo.dirstate[origsrc] == 'a' and origsrc == src:
943 if repo.dirstate[origsrc] == 'a' and origsrc == src:
988 if not ui.quiet:
944 if not ui.quiet:
989 ui.warn(_("%s has not been committed yet, so no copy "
945 ui.warn(_("%s has not been committed yet, so no copy "
990 "data will be stored for %s.\n")
946 "data will be stored for %s.\n")
991 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
947 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
992 if repo.dirstate[dst] in '?r' and not dryrun:
948 if repo.dirstate[dst] in '?r' and not dryrun:
993 wctx.add([dst])
949 wctx.add([dst])
994 elif not dryrun:
950 elif not dryrun:
995 wctx.copy(origsrc, dst)
951 wctx.copy(origsrc, dst)
996
952
997 def readrequires(opener, supported):
953 def readrequires(opener, supported):
998 '''Reads and parses .hg/requires and checks if all entries found
954 '''Reads and parses .hg/requires and checks if all entries found
999 are in the list of supported features.'''
955 are in the list of supported features.'''
1000 requirements = set(opener.read("requires").splitlines())
956 requirements = set(opener.read("requires").splitlines())
1001 missings = []
957 missings = []
1002 for r in requirements:
958 for r in requirements:
1003 if r not in supported:
959 if r not in supported:
1004 if not r or not r[0].isalnum():
960 if not r or not r[0].isalnum():
1005 raise error.RequirementError(_(".hg/requires file is corrupt"))
961 raise error.RequirementError(_(".hg/requires file is corrupt"))
1006 missings.append(r)
962 missings.append(r)
1007 missings.sort()
963 missings.sort()
1008 if missings:
964 if missings:
1009 raise error.RequirementError(
965 raise error.RequirementError(
1010 _("repository requires features unknown to this Mercurial: %s")
966 _("repository requires features unknown to this Mercurial: %s")
1011 % " ".join(missings),
967 % " ".join(missings),
1012 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
968 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
1013 " for more information"))
969 " for more information"))
1014 return requirements
970 return requirements
1015
971
1016 def writerequires(opener, requirements):
972 def writerequires(opener, requirements):
1017 reqfile = opener("requires", "w")
973 reqfile = opener("requires", "w")
1018 for r in sorted(requirements):
974 for r in sorted(requirements):
1019 reqfile.write("%s\n" % r)
975 reqfile.write("%s\n" % r)
1020 reqfile.close()
976 reqfile.close()
1021
977
1022 class filecachesubentry(object):
978 class filecachesubentry(object):
1023 def __init__(self, path, stat):
979 def __init__(self, path, stat):
1024 self.path = path
980 self.path = path
1025 self.cachestat = None
981 self.cachestat = None
1026 self._cacheable = None
982 self._cacheable = None
1027
983
1028 if stat:
984 if stat:
1029 self.cachestat = filecachesubentry.stat(self.path)
985 self.cachestat = filecachesubentry.stat(self.path)
1030
986
1031 if self.cachestat:
987 if self.cachestat:
1032 self._cacheable = self.cachestat.cacheable()
988 self._cacheable = self.cachestat.cacheable()
1033 else:
989 else:
1034 # None means we don't know yet
990 # None means we don't know yet
1035 self._cacheable = None
991 self._cacheable = None
1036
992
1037 def refresh(self):
993 def refresh(self):
1038 if self.cacheable():
994 if self.cacheable():
1039 self.cachestat = filecachesubentry.stat(self.path)
995 self.cachestat = filecachesubentry.stat(self.path)
1040
996
1041 def cacheable(self):
997 def cacheable(self):
1042 if self._cacheable is not None:
998 if self._cacheable is not None:
1043 return self._cacheable
999 return self._cacheable
1044
1000
1045 # we don't know yet, assume it is for now
1001 # we don't know yet, assume it is for now
1046 return True
1002 return True
1047
1003
1048 def changed(self):
1004 def changed(self):
1049 # no point in going further if we can't cache it
1005 # no point in going further if we can't cache it
1050 if not self.cacheable():
1006 if not self.cacheable():
1051 return True
1007 return True
1052
1008
1053 newstat = filecachesubentry.stat(self.path)
1009 newstat = filecachesubentry.stat(self.path)
1054
1010
1055 # we may not know if it's cacheable yet, check again now
1011 # we may not know if it's cacheable yet, check again now
1056 if newstat and self._cacheable is None:
1012 if newstat and self._cacheable is None:
1057 self._cacheable = newstat.cacheable()
1013 self._cacheable = newstat.cacheable()
1058
1014
1059 # check again
1015 # check again
1060 if not self._cacheable:
1016 if not self._cacheable:
1061 return True
1017 return True
1062
1018
1063 if self.cachestat != newstat:
1019 if self.cachestat != newstat:
1064 self.cachestat = newstat
1020 self.cachestat = newstat
1065 return True
1021 return True
1066 else:
1022 else:
1067 return False
1023 return False
1068
1024
1069 @staticmethod
1025 @staticmethod
1070 def stat(path):
1026 def stat(path):
1071 try:
1027 try:
1072 return util.cachestat(path)
1028 return util.cachestat(path)
1073 except OSError as e:
1029 except OSError as e:
1074 if e.errno != errno.ENOENT:
1030 if e.errno != errno.ENOENT:
1075 raise
1031 raise
1076
1032
1077 class filecacheentry(object):
1033 class filecacheentry(object):
1078 def __init__(self, paths, stat=True):
1034 def __init__(self, paths, stat=True):
1079 self._entries = []
1035 self._entries = []
1080 for path in paths:
1036 for path in paths:
1081 self._entries.append(filecachesubentry(path, stat))
1037 self._entries.append(filecachesubentry(path, stat))
1082
1038
1083 def changed(self):
1039 def changed(self):
1084 '''true if any entry has changed'''
1040 '''true if any entry has changed'''
1085 for entry in self._entries:
1041 for entry in self._entries:
1086 if entry.changed():
1042 if entry.changed():
1087 return True
1043 return True
1088 return False
1044 return False
1089
1045
1090 def refresh(self):
1046 def refresh(self):
1091 for entry in self._entries:
1047 for entry in self._entries:
1092 entry.refresh()
1048 entry.refresh()
1093
1049
1094 class filecache(object):
1050 class filecache(object):
1095 '''A property like decorator that tracks files under .hg/ for updates.
1051 '''A property like decorator that tracks files under .hg/ for updates.
1096
1052
1097 Records stat info when called in _filecache.
1053 Records stat info when called in _filecache.
1098
1054
1099 On subsequent calls, compares old stat info with new info, and recreates the
1055 On subsequent calls, compares old stat info with new info, and recreates the
1100 object when any of the files changes, updating the new stat info in
1056 object when any of the files changes, updating the new stat info in
1101 _filecache.
1057 _filecache.
1102
1058
1103 Mercurial either atomic renames or appends for files under .hg,
1059 Mercurial either atomic renames or appends for files under .hg,
1104 so to ensure the cache is reliable we need the filesystem to be able
1060 so to ensure the cache is reliable we need the filesystem to be able
1105 to tell us if a file has been replaced. If it can't, we fallback to
1061 to tell us if a file has been replaced. If it can't, we fallback to
1106 recreating the object on every call (essentially the same behaviour as
1062 recreating the object on every call (essentially the same behaviour as
1107 propertycache).
1063 propertycache).
1108
1064
1109 '''
1065 '''
1110 def __init__(self, *paths):
1066 def __init__(self, *paths):
1111 self.paths = paths
1067 self.paths = paths
1112
1068
1113 def join(self, obj, fname):
1069 def join(self, obj, fname):
1114 """Used to compute the runtime path of a cached file.
1070 """Used to compute the runtime path of a cached file.
1115
1071
1116 Users should subclass filecache and provide their own version of this
1072 Users should subclass filecache and provide their own version of this
1117 function to call the appropriate join function on 'obj' (an instance
1073 function to call the appropriate join function on 'obj' (an instance
1118 of the class that its member function was decorated).
1074 of the class that its member function was decorated).
1119 """
1075 """
1120 return obj.join(fname)
1076 return obj.join(fname)
1121
1077
1122 def __call__(self, func):
1078 def __call__(self, func):
1123 self.func = func
1079 self.func = func
1124 self.name = func.__name__
1080 self.name = func.__name__
1125 return self
1081 return self
1126
1082
1127 def __get__(self, obj, type=None):
1083 def __get__(self, obj, type=None):
1128 # do we need to check if the file changed?
1084 # do we need to check if the file changed?
1129 if self.name in obj.__dict__:
1085 if self.name in obj.__dict__:
1130 assert self.name in obj._filecache, self.name
1086 assert self.name in obj._filecache, self.name
1131 return obj.__dict__[self.name]
1087 return obj.__dict__[self.name]
1132
1088
1133 entry = obj._filecache.get(self.name)
1089 entry = obj._filecache.get(self.name)
1134
1090
1135 if entry:
1091 if entry:
1136 if entry.changed():
1092 if entry.changed():
1137 entry.obj = self.func(obj)
1093 entry.obj = self.func(obj)
1138 else:
1094 else:
1139 paths = [self.join(obj, path) for path in self.paths]
1095 paths = [self.join(obj, path) for path in self.paths]
1140
1096
1141 # We stat -before- creating the object so our cache doesn't lie if
1097 # We stat -before- creating the object so our cache doesn't lie if
1142 # a writer modified between the time we read and stat
1098 # a writer modified between the time we read and stat
1143 entry = filecacheentry(paths, True)
1099 entry = filecacheentry(paths, True)
1144 entry.obj = self.func(obj)
1100 entry.obj = self.func(obj)
1145
1101
1146 obj._filecache[self.name] = entry
1102 obj._filecache[self.name] = entry
1147
1103
1148 obj.__dict__[self.name] = entry.obj
1104 obj.__dict__[self.name] = entry.obj
1149 return entry.obj
1105 return entry.obj
1150
1106
1151 def __set__(self, obj, value):
1107 def __set__(self, obj, value):
1152 if self.name not in obj._filecache:
1108 if self.name not in obj._filecache:
1153 # we add an entry for the missing value because X in __dict__
1109 # we add an entry for the missing value because X in __dict__
1154 # implies X in _filecache
1110 # implies X in _filecache
1155 paths = [self.join(obj, path) for path in self.paths]
1111 paths = [self.join(obj, path) for path in self.paths]
1156 ce = filecacheentry(paths, False)
1112 ce = filecacheentry(paths, False)
1157 obj._filecache[self.name] = ce
1113 obj._filecache[self.name] = ce
1158 else:
1114 else:
1159 ce = obj._filecache[self.name]
1115 ce = obj._filecache[self.name]
1160
1116
1161 ce.obj = value # update cached copy
1117 ce.obj = value # update cached copy
1162 obj.__dict__[self.name] = value # update copy returned by obj.x
1118 obj.__dict__[self.name] = value # update copy returned by obj.x
1163
1119
1164 def __delete__(self, obj):
1120 def __delete__(self, obj):
1165 try:
1121 try:
1166 del obj.__dict__[self.name]
1122 del obj.__dict__[self.name]
1167 except KeyError:
1123 except KeyError:
1168 raise AttributeError(self.name)
1124 raise AttributeError(self.name)
General Comments 0
You need to be logged in to leave comments. Login now