##// END OF EJS Templates
addremove: remove a mutable default argument...
Pierre-Yves David -
r26329:d9537ce6 default
parent child Browse files
Show More
@@ -1,1132 +1,1134 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from mercurial.node import wdirrev
9 from mercurial.node import wdirrev
10 import util, error, osutil, revset, similar, encoding, phases
10 import util, error, osutil, revset, similar, encoding, phases
11 import pathutil
11 import pathutil
12 import match as matchmod
12 import match as matchmod
13 import os, errno, re, glob, tempfile, shutil, stat
13 import os, errno, re, glob, tempfile, shutil, stat
14
14
15 if os.name == 'nt':
15 if os.name == 'nt':
16 import scmwindows as scmplatform
16 import scmwindows as scmplatform
17 else:
17 else:
18 import scmposix as scmplatform
18 import scmposix as scmplatform
19
19
20 systemrcpath = scmplatform.systemrcpath
20 systemrcpath = scmplatform.systemrcpath
21 userrcpath = scmplatform.userrcpath
21 userrcpath = scmplatform.userrcpath
22
22
23 class status(tuple):
23 class status(tuple):
24 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
24 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
25 and 'ignored' properties are only relevant to the working copy.
25 and 'ignored' properties are only relevant to the working copy.
26 '''
26 '''
27
27
28 __slots__ = ()
28 __slots__ = ()
29
29
30 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
30 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
31 clean):
31 clean):
32 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
32 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
33 ignored, clean))
33 ignored, clean))
34
34
35 @property
35 @property
36 def modified(self):
36 def modified(self):
37 '''files that have been modified'''
37 '''files that have been modified'''
38 return self[0]
38 return self[0]
39
39
40 @property
40 @property
41 def added(self):
41 def added(self):
42 '''files that have been added'''
42 '''files that have been added'''
43 return self[1]
43 return self[1]
44
44
45 @property
45 @property
46 def removed(self):
46 def removed(self):
47 '''files that have been removed'''
47 '''files that have been removed'''
48 return self[2]
48 return self[2]
49
49
50 @property
50 @property
51 def deleted(self):
51 def deleted(self):
52 '''files that are in the dirstate, but have been deleted from the
52 '''files that are in the dirstate, but have been deleted from the
53 working copy (aka "missing")
53 working copy (aka "missing")
54 '''
54 '''
55 return self[3]
55 return self[3]
56
56
57 @property
57 @property
58 def unknown(self):
58 def unknown(self):
59 '''files not in the dirstate that are not ignored'''
59 '''files not in the dirstate that are not ignored'''
60 return self[4]
60 return self[4]
61
61
62 @property
62 @property
63 def ignored(self):
63 def ignored(self):
64 '''files not in the dirstate that are ignored (by _dirignore())'''
64 '''files not in the dirstate that are ignored (by _dirignore())'''
65 return self[5]
65 return self[5]
66
66
67 @property
67 @property
68 def clean(self):
68 def clean(self):
69 '''files that have not been modified'''
69 '''files that have not been modified'''
70 return self[6]
70 return self[6]
71
71
72 def __repr__(self, *args, **kwargs):
72 def __repr__(self, *args, **kwargs):
73 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
73 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
74 'unknown=%r, ignored=%r, clean=%r>') % self)
74 'unknown=%r, ignored=%r, clean=%r>') % self)
75
75
76 def itersubrepos(ctx1, ctx2):
76 def itersubrepos(ctx1, ctx2):
77 """find subrepos in ctx1 or ctx2"""
77 """find subrepos in ctx1 or ctx2"""
78 # Create a (subpath, ctx) mapping where we prefer subpaths from
78 # Create a (subpath, ctx) mapping where we prefer subpaths from
79 # ctx1. The subpaths from ctx2 are important when the .hgsub file
79 # ctx1. The subpaths from ctx2 are important when the .hgsub file
80 # has been modified (in ctx2) but not yet committed (in ctx1).
80 # has been modified (in ctx2) but not yet committed (in ctx1).
81 subpaths = dict.fromkeys(ctx2.substate, ctx2)
81 subpaths = dict.fromkeys(ctx2.substate, ctx2)
82 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
82 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
83
83
84 missing = set()
84 missing = set()
85
85
86 for subpath in ctx2.substate:
86 for subpath in ctx2.substate:
87 if subpath not in ctx1.substate:
87 if subpath not in ctx1.substate:
88 del subpaths[subpath]
88 del subpaths[subpath]
89 missing.add(subpath)
89 missing.add(subpath)
90
90
91 for subpath, ctx in sorted(subpaths.iteritems()):
91 for subpath, ctx in sorted(subpaths.iteritems()):
92 yield subpath, ctx.sub(subpath)
92 yield subpath, ctx.sub(subpath)
93
93
94 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
94 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
95 # status and diff will have an accurate result when it does
95 # status and diff will have an accurate result when it does
96 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
96 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
97 # against itself.
97 # against itself.
98 for subpath in missing:
98 for subpath in missing:
99 yield subpath, ctx2.nullsub(subpath, ctx1)
99 yield subpath, ctx2.nullsub(subpath, ctx1)
100
100
101 def nochangesfound(ui, repo, excluded=None):
101 def nochangesfound(ui, repo, excluded=None):
102 '''Report no changes for push/pull, excluded is None or a list of
102 '''Report no changes for push/pull, excluded is None or a list of
103 nodes excluded from the push/pull.
103 nodes excluded from the push/pull.
104 '''
104 '''
105 secretlist = []
105 secretlist = []
106 if excluded:
106 if excluded:
107 for n in excluded:
107 for n in excluded:
108 if n not in repo:
108 if n not in repo:
109 # discovery should not have included the filtered revision,
109 # discovery should not have included the filtered revision,
110 # we have to explicitly exclude it until discovery is cleanup.
110 # we have to explicitly exclude it until discovery is cleanup.
111 continue
111 continue
112 ctx = repo[n]
112 ctx = repo[n]
113 if ctx.phase() >= phases.secret and not ctx.extinct():
113 if ctx.phase() >= phases.secret and not ctx.extinct():
114 secretlist.append(n)
114 secretlist.append(n)
115
115
116 if secretlist:
116 if secretlist:
117 ui.status(_("no changes found (ignored %d secret changesets)\n")
117 ui.status(_("no changes found (ignored %d secret changesets)\n")
118 % len(secretlist))
118 % len(secretlist))
119 else:
119 else:
120 ui.status(_("no changes found\n"))
120 ui.status(_("no changes found\n"))
121
121
122 def checknewlabel(repo, lbl, kind):
122 def checknewlabel(repo, lbl, kind):
123 # Do not use the "kind" parameter in ui output.
123 # Do not use the "kind" parameter in ui output.
124 # It makes strings difficult to translate.
124 # It makes strings difficult to translate.
125 if lbl in ['tip', '.', 'null']:
125 if lbl in ['tip', '.', 'null']:
126 raise util.Abort(_("the name '%s' is reserved") % lbl)
126 raise util.Abort(_("the name '%s' is reserved") % lbl)
127 for c in (':', '\0', '\n', '\r'):
127 for c in (':', '\0', '\n', '\r'):
128 if c in lbl:
128 if c in lbl:
129 raise util.Abort(_("%r cannot be used in a name") % c)
129 raise util.Abort(_("%r cannot be used in a name") % c)
130 try:
130 try:
131 int(lbl)
131 int(lbl)
132 raise util.Abort(_("cannot use an integer as a name"))
132 raise util.Abort(_("cannot use an integer as a name"))
133 except ValueError:
133 except ValueError:
134 pass
134 pass
135
135
136 def checkfilename(f):
136 def checkfilename(f):
137 '''Check that the filename f is an acceptable filename for a tracked file'''
137 '''Check that the filename f is an acceptable filename for a tracked file'''
138 if '\r' in f or '\n' in f:
138 if '\r' in f or '\n' in f:
139 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
139 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
140
140
141 def checkportable(ui, f):
141 def checkportable(ui, f):
142 '''Check if filename f is portable and warn or abort depending on config'''
142 '''Check if filename f is portable and warn or abort depending on config'''
143 checkfilename(f)
143 checkfilename(f)
144 abort, warn = checkportabilityalert(ui)
144 abort, warn = checkportabilityalert(ui)
145 if abort or warn:
145 if abort or warn:
146 msg = util.checkwinfilename(f)
146 msg = util.checkwinfilename(f)
147 if msg:
147 if msg:
148 msg = "%s: %r" % (msg, f)
148 msg = "%s: %r" % (msg, f)
149 if abort:
149 if abort:
150 raise util.Abort(msg)
150 raise util.Abort(msg)
151 ui.warn(_("warning: %s\n") % msg)
151 ui.warn(_("warning: %s\n") % msg)
152
152
153 def checkportabilityalert(ui):
153 def checkportabilityalert(ui):
154 '''check if the user's config requests nothing, a warning, or abort for
154 '''check if the user's config requests nothing, a warning, or abort for
155 non-portable filenames'''
155 non-portable filenames'''
156 val = ui.config('ui', 'portablefilenames', 'warn')
156 val = ui.config('ui', 'portablefilenames', 'warn')
157 lval = val.lower()
157 lval = val.lower()
158 bval = util.parsebool(val)
158 bval = util.parsebool(val)
159 abort = os.name == 'nt' or lval == 'abort'
159 abort = os.name == 'nt' or lval == 'abort'
160 warn = bval or lval == 'warn'
160 warn = bval or lval == 'warn'
161 if bval is None and not (warn or abort or lval == 'ignore'):
161 if bval is None and not (warn or abort or lval == 'ignore'):
162 raise error.ConfigError(
162 raise error.ConfigError(
163 _("ui.portablefilenames value is invalid ('%s')") % val)
163 _("ui.portablefilenames value is invalid ('%s')") % val)
164 return abort, warn
164 return abort, warn
165
165
166 class casecollisionauditor(object):
166 class casecollisionauditor(object):
167 def __init__(self, ui, abort, dirstate):
167 def __init__(self, ui, abort, dirstate):
168 self._ui = ui
168 self._ui = ui
169 self._abort = abort
169 self._abort = abort
170 allfiles = '\0'.join(dirstate._map)
170 allfiles = '\0'.join(dirstate._map)
171 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
171 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
172 self._dirstate = dirstate
172 self._dirstate = dirstate
173 # The purpose of _newfiles is so that we don't complain about
173 # The purpose of _newfiles is so that we don't complain about
174 # case collisions if someone were to call this object with the
174 # case collisions if someone were to call this object with the
175 # same filename twice.
175 # same filename twice.
176 self._newfiles = set()
176 self._newfiles = set()
177
177
178 def __call__(self, f):
178 def __call__(self, f):
179 if f in self._newfiles:
179 if f in self._newfiles:
180 return
180 return
181 fl = encoding.lower(f)
181 fl = encoding.lower(f)
182 if fl in self._loweredfiles and f not in self._dirstate:
182 if fl in self._loweredfiles and f not in self._dirstate:
183 msg = _('possible case-folding collision for %s') % f
183 msg = _('possible case-folding collision for %s') % f
184 if self._abort:
184 if self._abort:
185 raise util.Abort(msg)
185 raise util.Abort(msg)
186 self._ui.warn(_("warning: %s\n") % msg)
186 self._ui.warn(_("warning: %s\n") % msg)
187 self._loweredfiles.add(fl)
187 self._loweredfiles.add(fl)
188 self._newfiles.add(f)
188 self._newfiles.add(f)
189
189
190 def filteredhash(repo, maxrev):
190 def filteredhash(repo, maxrev):
191 """build hash of filtered revisions in the current repoview.
191 """build hash of filtered revisions in the current repoview.
192
192
193 Multiple caches perform up-to-date validation by checking that the
193 Multiple caches perform up-to-date validation by checking that the
194 tiprev and tipnode stored in the cache file match the current repository.
194 tiprev and tipnode stored in the cache file match the current repository.
195 However, this is not sufficient for validating repoviews because the set
195 However, this is not sufficient for validating repoviews because the set
196 of revisions in the view may change without the repository tiprev and
196 of revisions in the view may change without the repository tiprev and
197 tipnode changing.
197 tipnode changing.
198
198
199 This function hashes all the revs filtered from the view and returns
199 This function hashes all the revs filtered from the view and returns
200 that SHA-1 digest.
200 that SHA-1 digest.
201 """
201 """
202 cl = repo.changelog
202 cl = repo.changelog
203 if not cl.filteredrevs:
203 if not cl.filteredrevs:
204 return None
204 return None
205 key = None
205 key = None
206 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
206 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
207 if revs:
207 if revs:
208 s = util.sha1()
208 s = util.sha1()
209 for rev in revs:
209 for rev in revs:
210 s.update('%s;' % rev)
210 s.update('%s;' % rev)
211 key = s.digest()
211 key = s.digest()
212 return key
212 return key
213
213
214 class abstractvfs(object):
214 class abstractvfs(object):
215 """Abstract base class; cannot be instantiated"""
215 """Abstract base class; cannot be instantiated"""
216
216
217 def __init__(self, *args, **kwargs):
217 def __init__(self, *args, **kwargs):
218 '''Prevent instantiation; don't call this from subclasses.'''
218 '''Prevent instantiation; don't call this from subclasses.'''
219 raise NotImplementedError('attempted instantiating ' + str(type(self)))
219 raise NotImplementedError('attempted instantiating ' + str(type(self)))
220
220
221 def tryread(self, path):
221 def tryread(self, path):
222 '''gracefully return an empty string for missing files'''
222 '''gracefully return an empty string for missing files'''
223 try:
223 try:
224 return self.read(path)
224 return self.read(path)
225 except IOError as inst:
225 except IOError as inst:
226 if inst.errno != errno.ENOENT:
226 if inst.errno != errno.ENOENT:
227 raise
227 raise
228 return ""
228 return ""
229
229
230 def tryreadlines(self, path, mode='rb'):
230 def tryreadlines(self, path, mode='rb'):
231 '''gracefully return an empty array for missing files'''
231 '''gracefully return an empty array for missing files'''
232 try:
232 try:
233 return self.readlines(path, mode=mode)
233 return self.readlines(path, mode=mode)
234 except IOError as inst:
234 except IOError as inst:
235 if inst.errno != errno.ENOENT:
235 if inst.errno != errno.ENOENT:
236 raise
236 raise
237 return []
237 return []
238
238
239 def open(self, path, mode="r", text=False, atomictemp=False,
239 def open(self, path, mode="r", text=False, atomictemp=False,
240 notindexed=False):
240 notindexed=False):
241 '''Open ``path`` file, which is relative to vfs root.
241 '''Open ``path`` file, which is relative to vfs root.
242
242
243 Newly created directories are marked as "not to be indexed by
243 Newly created directories are marked as "not to be indexed by
244 the content indexing service", if ``notindexed`` is specified
244 the content indexing service", if ``notindexed`` is specified
245 for "write" mode access.
245 for "write" mode access.
246 '''
246 '''
247 self.open = self.__call__
247 self.open = self.__call__
248 return self.__call__(path, mode, text, atomictemp, notindexed)
248 return self.__call__(path, mode, text, atomictemp, notindexed)
249
249
250 def read(self, path):
250 def read(self, path):
251 fp = self(path, 'rb')
251 fp = self(path, 'rb')
252 try:
252 try:
253 return fp.read()
253 return fp.read()
254 finally:
254 finally:
255 fp.close()
255 fp.close()
256
256
257 def readlines(self, path, mode='rb'):
257 def readlines(self, path, mode='rb'):
258 fp = self(path, mode=mode)
258 fp = self(path, mode=mode)
259 try:
259 try:
260 return fp.readlines()
260 return fp.readlines()
261 finally:
261 finally:
262 fp.close()
262 fp.close()
263
263
264 def write(self, path, data):
264 def write(self, path, data):
265 fp = self(path, 'wb')
265 fp = self(path, 'wb')
266 try:
266 try:
267 return fp.write(data)
267 return fp.write(data)
268 finally:
268 finally:
269 fp.close()
269 fp.close()
270
270
271 def writelines(self, path, data, mode='wb', notindexed=False):
271 def writelines(self, path, data, mode='wb', notindexed=False):
272 fp = self(path, mode=mode, notindexed=notindexed)
272 fp = self(path, mode=mode, notindexed=notindexed)
273 try:
273 try:
274 return fp.writelines(data)
274 return fp.writelines(data)
275 finally:
275 finally:
276 fp.close()
276 fp.close()
277
277
278 def append(self, path, data):
278 def append(self, path, data):
279 fp = self(path, 'ab')
279 fp = self(path, 'ab')
280 try:
280 try:
281 return fp.write(data)
281 return fp.write(data)
282 finally:
282 finally:
283 fp.close()
283 fp.close()
284
284
285 def basename(self, path):
285 def basename(self, path):
286 """return base element of a path (as os.path.basename would do)
286 """return base element of a path (as os.path.basename would do)
287
287
288 This exists to allow handling of strange encoding if needed."""
288 This exists to allow handling of strange encoding if needed."""
289 return os.path.basename(path)
289 return os.path.basename(path)
290
290
291 def chmod(self, path, mode):
291 def chmod(self, path, mode):
292 return os.chmod(self.join(path), mode)
292 return os.chmod(self.join(path), mode)
293
293
294 def dirname(self, path):
294 def dirname(self, path):
295 """return dirname element of a path (as os.path.dirname would do)
295 """return dirname element of a path (as os.path.dirname would do)
296
296
297 This exists to allow handling of strange encoding if needed."""
297 This exists to allow handling of strange encoding if needed."""
298 return os.path.dirname(path)
298 return os.path.dirname(path)
299
299
300 def exists(self, path=None):
300 def exists(self, path=None):
301 return os.path.exists(self.join(path))
301 return os.path.exists(self.join(path))
302
302
303 def fstat(self, fp):
303 def fstat(self, fp):
304 return util.fstat(fp)
304 return util.fstat(fp)
305
305
306 def isdir(self, path=None):
306 def isdir(self, path=None):
307 return os.path.isdir(self.join(path))
307 return os.path.isdir(self.join(path))
308
308
309 def isfile(self, path=None):
309 def isfile(self, path=None):
310 return os.path.isfile(self.join(path))
310 return os.path.isfile(self.join(path))
311
311
312 def islink(self, path=None):
312 def islink(self, path=None):
313 return os.path.islink(self.join(path))
313 return os.path.islink(self.join(path))
314
314
315 def reljoin(self, *paths):
315 def reljoin(self, *paths):
316 """join various elements of a path together (as os.path.join would do)
316 """join various elements of a path together (as os.path.join would do)
317
317
318 The vfs base is not injected so that path stay relative. This exists
318 The vfs base is not injected so that path stay relative. This exists
319 to allow handling of strange encoding if needed."""
319 to allow handling of strange encoding if needed."""
320 return os.path.join(*paths)
320 return os.path.join(*paths)
321
321
322 def split(self, path):
322 def split(self, path):
323 """split top-most element of a path (as os.path.split would do)
323 """split top-most element of a path (as os.path.split would do)
324
324
325 This exists to allow handling of strange encoding if needed."""
325 This exists to allow handling of strange encoding if needed."""
326 return os.path.split(path)
326 return os.path.split(path)
327
327
328 def lexists(self, path=None):
328 def lexists(self, path=None):
329 return os.path.lexists(self.join(path))
329 return os.path.lexists(self.join(path))
330
330
331 def lstat(self, path=None):
331 def lstat(self, path=None):
332 return os.lstat(self.join(path))
332 return os.lstat(self.join(path))
333
333
334 def listdir(self, path=None):
334 def listdir(self, path=None):
335 return os.listdir(self.join(path))
335 return os.listdir(self.join(path))
336
336
337 def makedir(self, path=None, notindexed=True):
337 def makedir(self, path=None, notindexed=True):
338 return util.makedir(self.join(path), notindexed)
338 return util.makedir(self.join(path), notindexed)
339
339
340 def makedirs(self, path=None, mode=None):
340 def makedirs(self, path=None, mode=None):
341 return util.makedirs(self.join(path), mode)
341 return util.makedirs(self.join(path), mode)
342
342
343 def makelock(self, info, path):
343 def makelock(self, info, path):
344 return util.makelock(info, self.join(path))
344 return util.makelock(info, self.join(path))
345
345
346 def mkdir(self, path=None):
346 def mkdir(self, path=None):
347 return os.mkdir(self.join(path))
347 return os.mkdir(self.join(path))
348
348
349 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
349 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
350 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
350 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
351 dir=self.join(dir), text=text)
351 dir=self.join(dir), text=text)
352 dname, fname = util.split(name)
352 dname, fname = util.split(name)
353 if dir:
353 if dir:
354 return fd, os.path.join(dir, fname)
354 return fd, os.path.join(dir, fname)
355 else:
355 else:
356 return fd, fname
356 return fd, fname
357
357
358 def readdir(self, path=None, stat=None, skip=None):
358 def readdir(self, path=None, stat=None, skip=None):
359 return osutil.listdir(self.join(path), stat, skip)
359 return osutil.listdir(self.join(path), stat, skip)
360
360
361 def readlock(self, path):
361 def readlock(self, path):
362 return util.readlock(self.join(path))
362 return util.readlock(self.join(path))
363
363
364 def rename(self, src, dst):
364 def rename(self, src, dst):
365 return util.rename(self.join(src), self.join(dst))
365 return util.rename(self.join(src), self.join(dst))
366
366
367 def readlink(self, path):
367 def readlink(self, path):
368 return os.readlink(self.join(path))
368 return os.readlink(self.join(path))
369
369
370 def removedirs(self, path=None):
370 def removedirs(self, path=None):
371 """Remove a leaf directory and all empty intermediate ones
371 """Remove a leaf directory and all empty intermediate ones
372 """
372 """
373 return util.removedirs(self.join(path))
373 return util.removedirs(self.join(path))
374
374
375 def rmtree(self, path=None, ignore_errors=False, forcibly=False):
375 def rmtree(self, path=None, ignore_errors=False, forcibly=False):
376 """Remove a directory tree recursively
376 """Remove a directory tree recursively
377
377
378 If ``forcibly``, this tries to remove READ-ONLY files, too.
378 If ``forcibly``, this tries to remove READ-ONLY files, too.
379 """
379 """
380 if forcibly:
380 if forcibly:
381 def onerror(function, path, excinfo):
381 def onerror(function, path, excinfo):
382 if function is not os.remove:
382 if function is not os.remove:
383 raise
383 raise
384 # read-only files cannot be unlinked under Windows
384 # read-only files cannot be unlinked under Windows
385 s = os.stat(path)
385 s = os.stat(path)
386 if (s.st_mode & stat.S_IWRITE) != 0:
386 if (s.st_mode & stat.S_IWRITE) != 0:
387 raise
387 raise
388 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
388 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
389 os.remove(path)
389 os.remove(path)
390 else:
390 else:
391 onerror = None
391 onerror = None
392 return shutil.rmtree(self.join(path),
392 return shutil.rmtree(self.join(path),
393 ignore_errors=ignore_errors, onerror=onerror)
393 ignore_errors=ignore_errors, onerror=onerror)
394
394
395 def setflags(self, path, l, x):
395 def setflags(self, path, l, x):
396 return util.setflags(self.join(path), l, x)
396 return util.setflags(self.join(path), l, x)
397
397
398 def stat(self, path=None):
398 def stat(self, path=None):
399 return os.stat(self.join(path))
399 return os.stat(self.join(path))
400
400
401 def unlink(self, path=None):
401 def unlink(self, path=None):
402 return util.unlink(self.join(path))
402 return util.unlink(self.join(path))
403
403
404 def unlinkpath(self, path=None, ignoremissing=False):
404 def unlinkpath(self, path=None, ignoremissing=False):
405 return util.unlinkpath(self.join(path), ignoremissing)
405 return util.unlinkpath(self.join(path), ignoremissing)
406
406
407 def utime(self, path=None, t=None):
407 def utime(self, path=None, t=None):
408 return os.utime(self.join(path), t)
408 return os.utime(self.join(path), t)
409
409
410 def walk(self, path=None, onerror=None):
410 def walk(self, path=None, onerror=None):
411 """Yield (dirpath, dirs, files) tuple for each directories under path
411 """Yield (dirpath, dirs, files) tuple for each directories under path
412
412
413 ``dirpath`` is relative one from the root of this vfs. This
413 ``dirpath`` is relative one from the root of this vfs. This
414 uses ``os.sep`` as path separator, even you specify POSIX
414 uses ``os.sep`` as path separator, even you specify POSIX
415 style ``path``.
415 style ``path``.
416
416
417 "The root of this vfs" is represented as empty ``dirpath``.
417 "The root of this vfs" is represented as empty ``dirpath``.
418 """
418 """
419 root = os.path.normpath(self.join(None))
419 root = os.path.normpath(self.join(None))
420 # when dirpath == root, dirpath[prefixlen:] becomes empty
420 # when dirpath == root, dirpath[prefixlen:] becomes empty
421 # because len(dirpath) < prefixlen.
421 # because len(dirpath) < prefixlen.
422 prefixlen = len(pathutil.normasprefix(root))
422 prefixlen = len(pathutil.normasprefix(root))
423 for dirpath, dirs, files in os.walk(self.join(path), onerror=onerror):
423 for dirpath, dirs, files in os.walk(self.join(path), onerror=onerror):
424 yield (dirpath[prefixlen:], dirs, files)
424 yield (dirpath[prefixlen:], dirs, files)
425
425
426 class vfs(abstractvfs):
426 class vfs(abstractvfs):
427 '''Operate files relative to a base directory
427 '''Operate files relative to a base directory
428
428
429 This class is used to hide the details of COW semantics and
429 This class is used to hide the details of COW semantics and
430 remote file access from higher level code.
430 remote file access from higher level code.
431 '''
431 '''
432 def __init__(self, base, audit=True, expandpath=False, realpath=False):
432 def __init__(self, base, audit=True, expandpath=False, realpath=False):
433 if expandpath:
433 if expandpath:
434 base = util.expandpath(base)
434 base = util.expandpath(base)
435 if realpath:
435 if realpath:
436 base = os.path.realpath(base)
436 base = os.path.realpath(base)
437 self.base = base
437 self.base = base
438 self._setmustaudit(audit)
438 self._setmustaudit(audit)
439 self.createmode = None
439 self.createmode = None
440 self._trustnlink = None
440 self._trustnlink = None
441
441
442 def _getmustaudit(self):
442 def _getmustaudit(self):
443 return self._audit
443 return self._audit
444
444
445 def _setmustaudit(self, onoff):
445 def _setmustaudit(self, onoff):
446 self._audit = onoff
446 self._audit = onoff
447 if onoff:
447 if onoff:
448 self.audit = pathutil.pathauditor(self.base)
448 self.audit = pathutil.pathauditor(self.base)
449 else:
449 else:
450 self.audit = util.always
450 self.audit = util.always
451
451
452 mustaudit = property(_getmustaudit, _setmustaudit)
452 mustaudit = property(_getmustaudit, _setmustaudit)
453
453
454 @util.propertycache
454 @util.propertycache
455 def _cansymlink(self):
455 def _cansymlink(self):
456 return util.checklink(self.base)
456 return util.checklink(self.base)
457
457
458 @util.propertycache
458 @util.propertycache
459 def _chmod(self):
459 def _chmod(self):
460 return util.checkexec(self.base)
460 return util.checkexec(self.base)
461
461
462 def _fixfilemode(self, name):
462 def _fixfilemode(self, name):
463 if self.createmode is None or not self._chmod:
463 if self.createmode is None or not self._chmod:
464 return
464 return
465 os.chmod(name, self.createmode & 0o666)
465 os.chmod(name, self.createmode & 0o666)
466
466
467 def __call__(self, path, mode="r", text=False, atomictemp=False,
467 def __call__(self, path, mode="r", text=False, atomictemp=False,
468 notindexed=False):
468 notindexed=False):
469 '''Open ``path`` file, which is relative to vfs root.
469 '''Open ``path`` file, which is relative to vfs root.
470
470
471 Newly created directories are marked as "not to be indexed by
471 Newly created directories are marked as "not to be indexed by
472 the content indexing service", if ``notindexed`` is specified
472 the content indexing service", if ``notindexed`` is specified
473 for "write" mode access.
473 for "write" mode access.
474 '''
474 '''
475 if self._audit:
475 if self._audit:
476 r = util.checkosfilename(path)
476 r = util.checkosfilename(path)
477 if r:
477 if r:
478 raise util.Abort("%s: %r" % (r, path))
478 raise util.Abort("%s: %r" % (r, path))
479 self.audit(path)
479 self.audit(path)
480 f = self.join(path)
480 f = self.join(path)
481
481
482 if not text and "b" not in mode:
482 if not text and "b" not in mode:
483 mode += "b" # for that other OS
483 mode += "b" # for that other OS
484
484
485 nlink = -1
485 nlink = -1
486 if mode not in ('r', 'rb'):
486 if mode not in ('r', 'rb'):
487 dirname, basename = util.split(f)
487 dirname, basename = util.split(f)
488 # If basename is empty, then the path is malformed because it points
488 # If basename is empty, then the path is malformed because it points
489 # to a directory. Let the posixfile() call below raise IOError.
489 # to a directory. Let the posixfile() call below raise IOError.
490 if basename:
490 if basename:
491 if atomictemp:
491 if atomictemp:
492 util.ensuredirs(dirname, self.createmode, notindexed)
492 util.ensuredirs(dirname, self.createmode, notindexed)
493 return util.atomictempfile(f, mode, self.createmode)
493 return util.atomictempfile(f, mode, self.createmode)
494 try:
494 try:
495 if 'w' in mode:
495 if 'w' in mode:
496 util.unlink(f)
496 util.unlink(f)
497 nlink = 0
497 nlink = 0
498 else:
498 else:
499 # nlinks() may behave differently for files on Windows
499 # nlinks() may behave differently for files on Windows
500 # shares if the file is open.
500 # shares if the file is open.
501 fd = util.posixfile(f)
501 fd = util.posixfile(f)
502 nlink = util.nlinks(f)
502 nlink = util.nlinks(f)
503 if nlink < 1:
503 if nlink < 1:
504 nlink = 2 # force mktempcopy (issue1922)
504 nlink = 2 # force mktempcopy (issue1922)
505 fd.close()
505 fd.close()
506 except (OSError, IOError) as e:
506 except (OSError, IOError) as e:
507 if e.errno != errno.ENOENT:
507 if e.errno != errno.ENOENT:
508 raise
508 raise
509 nlink = 0
509 nlink = 0
510 util.ensuredirs(dirname, self.createmode, notindexed)
510 util.ensuredirs(dirname, self.createmode, notindexed)
511 if nlink > 0:
511 if nlink > 0:
512 if self._trustnlink is None:
512 if self._trustnlink is None:
513 self._trustnlink = nlink > 1 or util.checknlink(f)
513 self._trustnlink = nlink > 1 or util.checknlink(f)
514 if nlink > 1 or not self._trustnlink:
514 if nlink > 1 or not self._trustnlink:
515 util.rename(util.mktempcopy(f), f)
515 util.rename(util.mktempcopy(f), f)
516 fp = util.posixfile(f, mode)
516 fp = util.posixfile(f, mode)
517 if nlink == 0:
517 if nlink == 0:
518 self._fixfilemode(f)
518 self._fixfilemode(f)
519 return fp
519 return fp
520
520
521 def symlink(self, src, dst):
521 def symlink(self, src, dst):
522 self.audit(dst)
522 self.audit(dst)
523 linkname = self.join(dst)
523 linkname = self.join(dst)
524 try:
524 try:
525 os.unlink(linkname)
525 os.unlink(linkname)
526 except OSError:
526 except OSError:
527 pass
527 pass
528
528
529 util.ensuredirs(os.path.dirname(linkname), self.createmode)
529 util.ensuredirs(os.path.dirname(linkname), self.createmode)
530
530
531 if self._cansymlink:
531 if self._cansymlink:
532 try:
532 try:
533 os.symlink(src, linkname)
533 os.symlink(src, linkname)
534 except OSError as err:
534 except OSError as err:
535 raise OSError(err.errno, _('could not symlink to %r: %s') %
535 raise OSError(err.errno, _('could not symlink to %r: %s') %
536 (src, err.strerror), linkname)
536 (src, err.strerror), linkname)
537 else:
537 else:
538 self.write(dst, src)
538 self.write(dst, src)
539
539
540 def join(self, path, *insidef):
540 def join(self, path, *insidef):
541 if path:
541 if path:
542 return os.path.join(self.base, path, *insidef)
542 return os.path.join(self.base, path, *insidef)
543 else:
543 else:
544 return self.base
544 return self.base
545
545
546 opener = vfs
546 opener = vfs
547
547
548 class auditvfs(object):
548 class auditvfs(object):
549 def __init__(self, vfs):
549 def __init__(self, vfs):
550 self.vfs = vfs
550 self.vfs = vfs
551
551
552 def _getmustaudit(self):
552 def _getmustaudit(self):
553 return self.vfs.mustaudit
553 return self.vfs.mustaudit
554
554
555 def _setmustaudit(self, onoff):
555 def _setmustaudit(self, onoff):
556 self.vfs.mustaudit = onoff
556 self.vfs.mustaudit = onoff
557
557
558 mustaudit = property(_getmustaudit, _setmustaudit)
558 mustaudit = property(_getmustaudit, _setmustaudit)
559
559
560 class filtervfs(abstractvfs, auditvfs):
560 class filtervfs(abstractvfs, auditvfs):
561 '''Wrapper vfs for filtering filenames with a function.'''
561 '''Wrapper vfs for filtering filenames with a function.'''
562
562
563 def __init__(self, vfs, filter):
563 def __init__(self, vfs, filter):
564 auditvfs.__init__(self, vfs)
564 auditvfs.__init__(self, vfs)
565 self._filter = filter
565 self._filter = filter
566
566
567 def __call__(self, path, *args, **kwargs):
567 def __call__(self, path, *args, **kwargs):
568 return self.vfs(self._filter(path), *args, **kwargs)
568 return self.vfs(self._filter(path), *args, **kwargs)
569
569
570 def join(self, path, *insidef):
570 def join(self, path, *insidef):
571 if path:
571 if path:
572 return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef)))
572 return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef)))
573 else:
573 else:
574 return self.vfs.join(path)
574 return self.vfs.join(path)
575
575
576 filteropener = filtervfs
576 filteropener = filtervfs
577
577
578 class readonlyvfs(abstractvfs, auditvfs):
578 class readonlyvfs(abstractvfs, auditvfs):
579 '''Wrapper vfs preventing any writing.'''
579 '''Wrapper vfs preventing any writing.'''
580
580
581 def __init__(self, vfs):
581 def __init__(self, vfs):
582 auditvfs.__init__(self, vfs)
582 auditvfs.__init__(self, vfs)
583
583
584 def __call__(self, path, mode='r', *args, **kw):
584 def __call__(self, path, mode='r', *args, **kw):
585 if mode not in ('r', 'rb'):
585 if mode not in ('r', 'rb'):
586 raise util.Abort('this vfs is read only')
586 raise util.Abort('this vfs is read only')
587 return self.vfs(path, mode, *args, **kw)
587 return self.vfs(path, mode, *args, **kw)
588
588
589 def join(self, path, *insidef):
589 def join(self, path, *insidef):
590 return self.vfs.join(path, *insidef)
590 return self.vfs.join(path, *insidef)
591
591
592 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
592 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
593 '''yield every hg repository under path, always recursively.
593 '''yield every hg repository under path, always recursively.
594 The recurse flag will only control recursion into repo working dirs'''
594 The recurse flag will only control recursion into repo working dirs'''
595 def errhandler(err):
595 def errhandler(err):
596 if err.filename == path:
596 if err.filename == path:
597 raise err
597 raise err
598 samestat = getattr(os.path, 'samestat', None)
598 samestat = getattr(os.path, 'samestat', None)
599 if followsym and samestat is not None:
599 if followsym and samestat is not None:
600 def adddir(dirlst, dirname):
600 def adddir(dirlst, dirname):
601 match = False
601 match = False
602 dirstat = os.stat(dirname)
602 dirstat = os.stat(dirname)
603 for lstdirstat in dirlst:
603 for lstdirstat in dirlst:
604 if samestat(dirstat, lstdirstat):
604 if samestat(dirstat, lstdirstat):
605 match = True
605 match = True
606 break
606 break
607 if not match:
607 if not match:
608 dirlst.append(dirstat)
608 dirlst.append(dirstat)
609 return not match
609 return not match
610 else:
610 else:
611 followsym = False
611 followsym = False
612
612
613 if (seen_dirs is None) and followsym:
613 if (seen_dirs is None) and followsym:
614 seen_dirs = []
614 seen_dirs = []
615 adddir(seen_dirs, path)
615 adddir(seen_dirs, path)
616 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
616 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
617 dirs.sort()
617 dirs.sort()
618 if '.hg' in dirs:
618 if '.hg' in dirs:
619 yield root # found a repository
619 yield root # found a repository
620 qroot = os.path.join(root, '.hg', 'patches')
620 qroot = os.path.join(root, '.hg', 'patches')
621 if os.path.isdir(os.path.join(qroot, '.hg')):
621 if os.path.isdir(os.path.join(qroot, '.hg')):
622 yield qroot # we have a patch queue repo here
622 yield qroot # we have a patch queue repo here
623 if recurse:
623 if recurse:
624 # avoid recursing inside the .hg directory
624 # avoid recursing inside the .hg directory
625 dirs.remove('.hg')
625 dirs.remove('.hg')
626 else:
626 else:
627 dirs[:] = [] # don't descend further
627 dirs[:] = [] # don't descend further
628 elif followsym:
628 elif followsym:
629 newdirs = []
629 newdirs = []
630 for d in dirs:
630 for d in dirs:
631 fname = os.path.join(root, d)
631 fname = os.path.join(root, d)
632 if adddir(seen_dirs, fname):
632 if adddir(seen_dirs, fname):
633 if os.path.islink(fname):
633 if os.path.islink(fname):
634 for hgname in walkrepos(fname, True, seen_dirs):
634 for hgname in walkrepos(fname, True, seen_dirs):
635 yield hgname
635 yield hgname
636 else:
636 else:
637 newdirs.append(d)
637 newdirs.append(d)
638 dirs[:] = newdirs
638 dirs[:] = newdirs
639
639
640 def osrcpath():
640 def osrcpath():
641 '''return default os-specific hgrc search path'''
641 '''return default os-specific hgrc search path'''
642 path = []
642 path = []
643 defaultpath = os.path.join(util.datapath, 'default.d')
643 defaultpath = os.path.join(util.datapath, 'default.d')
644 if os.path.isdir(defaultpath):
644 if os.path.isdir(defaultpath):
645 for f, kind in osutil.listdir(defaultpath):
645 for f, kind in osutil.listdir(defaultpath):
646 if f.endswith('.rc'):
646 if f.endswith('.rc'):
647 path.append(os.path.join(defaultpath, f))
647 path.append(os.path.join(defaultpath, f))
648 path.extend(systemrcpath())
648 path.extend(systemrcpath())
649 path.extend(userrcpath())
649 path.extend(userrcpath())
650 path = [os.path.normpath(f) for f in path]
650 path = [os.path.normpath(f) for f in path]
651 return path
651 return path
652
652
653 _rcpath = None
653 _rcpath = None
654
654
655 def rcpath():
655 def rcpath():
656 '''return hgrc search path. if env var HGRCPATH is set, use it.
656 '''return hgrc search path. if env var HGRCPATH is set, use it.
657 for each item in path, if directory, use files ending in .rc,
657 for each item in path, if directory, use files ending in .rc,
658 else use item.
658 else use item.
659 make HGRCPATH empty to only look in .hg/hgrc of current repo.
659 make HGRCPATH empty to only look in .hg/hgrc of current repo.
660 if no HGRCPATH, use default os-specific path.'''
660 if no HGRCPATH, use default os-specific path.'''
661 global _rcpath
661 global _rcpath
662 if _rcpath is None:
662 if _rcpath is None:
663 if 'HGRCPATH' in os.environ:
663 if 'HGRCPATH' in os.environ:
664 _rcpath = []
664 _rcpath = []
665 for p in os.environ['HGRCPATH'].split(os.pathsep):
665 for p in os.environ['HGRCPATH'].split(os.pathsep):
666 if not p:
666 if not p:
667 continue
667 continue
668 p = util.expandpath(p)
668 p = util.expandpath(p)
669 if os.path.isdir(p):
669 if os.path.isdir(p):
670 for f, kind in osutil.listdir(p):
670 for f, kind in osutil.listdir(p):
671 if f.endswith('.rc'):
671 if f.endswith('.rc'):
672 _rcpath.append(os.path.join(p, f))
672 _rcpath.append(os.path.join(p, f))
673 else:
673 else:
674 _rcpath.append(p)
674 _rcpath.append(p)
675 else:
675 else:
676 _rcpath = osrcpath()
676 _rcpath = osrcpath()
677 return _rcpath
677 return _rcpath
678
678
679 def intrev(rev):
679 def intrev(rev):
680 """Return integer for a given revision that can be used in comparison or
680 """Return integer for a given revision that can be used in comparison or
681 arithmetic operation"""
681 arithmetic operation"""
682 if rev is None:
682 if rev is None:
683 return wdirrev
683 return wdirrev
684 return rev
684 return rev
685
685
686 def revsingle(repo, revspec, default='.'):
686 def revsingle(repo, revspec, default='.'):
687 if not revspec and revspec != 0:
687 if not revspec and revspec != 0:
688 return repo[default]
688 return repo[default]
689
689
690 l = revrange(repo, [revspec])
690 l = revrange(repo, [revspec])
691 if not l:
691 if not l:
692 raise util.Abort(_('empty revision set'))
692 raise util.Abort(_('empty revision set'))
693 return repo[l.last()]
693 return repo[l.last()]
694
694
695 def _pairspec(revspec):
695 def _pairspec(revspec):
696 tree = revset.parse(revspec)
696 tree = revset.parse(revspec)
697 tree = revset.optimize(tree, True)[1] # fix up "x^:y" -> "(x^):y"
697 tree = revset.optimize(tree, True)[1] # fix up "x^:y" -> "(x^):y"
698 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
698 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
699
699
700 def revpair(repo, revs):
700 def revpair(repo, revs):
701 if not revs:
701 if not revs:
702 return repo.dirstate.p1(), None
702 return repo.dirstate.p1(), None
703
703
704 l = revrange(repo, revs)
704 l = revrange(repo, revs)
705
705
706 if not l:
706 if not l:
707 first = second = None
707 first = second = None
708 elif l.isascending():
708 elif l.isascending():
709 first = l.min()
709 first = l.min()
710 second = l.max()
710 second = l.max()
711 elif l.isdescending():
711 elif l.isdescending():
712 first = l.max()
712 first = l.max()
713 second = l.min()
713 second = l.min()
714 else:
714 else:
715 first = l.first()
715 first = l.first()
716 second = l.last()
716 second = l.last()
717
717
718 if first is None:
718 if first is None:
719 raise util.Abort(_('empty revision range'))
719 raise util.Abort(_('empty revision range'))
720
720
721 # if top-level is range expression, the result must always be a pair
721 # if top-level is range expression, the result must always be a pair
722 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
722 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
723 return repo.lookup(first), None
723 return repo.lookup(first), None
724
724
725 return repo.lookup(first), repo.lookup(second)
725 return repo.lookup(first), repo.lookup(second)
726
726
727 def revrange(repo, revs):
727 def revrange(repo, revs):
728 """Yield revision as strings from a list of revision specifications."""
728 """Yield revision as strings from a list of revision specifications."""
729 allspecs = []
729 allspecs = []
730 for spec in revs:
730 for spec in revs:
731 if isinstance(spec, int):
731 if isinstance(spec, int):
732 spec = revset.formatspec('rev(%d)', spec)
732 spec = revset.formatspec('rev(%d)', spec)
733 allspecs.append(spec)
733 allspecs.append(spec)
734 m = revset.matchany(repo.ui, allspecs, repo)
734 m = revset.matchany(repo.ui, allspecs, repo)
735 return m(repo)
735 return m(repo)
736
736
737 def expandpats(pats):
737 def expandpats(pats):
738 '''Expand bare globs when running on windows.
738 '''Expand bare globs when running on windows.
739 On posix we assume it already has already been done by sh.'''
739 On posix we assume it already has already been done by sh.'''
740 if not util.expandglobs:
740 if not util.expandglobs:
741 return list(pats)
741 return list(pats)
742 ret = []
742 ret = []
743 for kindpat in pats:
743 for kindpat in pats:
744 kind, pat = matchmod._patsplit(kindpat, None)
744 kind, pat = matchmod._patsplit(kindpat, None)
745 if kind is None:
745 if kind is None:
746 try:
746 try:
747 globbed = glob.glob(pat)
747 globbed = glob.glob(pat)
748 except re.error:
748 except re.error:
749 globbed = [pat]
749 globbed = [pat]
750 if globbed:
750 if globbed:
751 ret.extend(globbed)
751 ret.extend(globbed)
752 continue
752 continue
753 ret.append(kindpat)
753 ret.append(kindpat)
754 return ret
754 return ret
755
755
756 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
756 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
757 badfn=None):
757 badfn=None):
758 '''Return a matcher and the patterns that were used.
758 '''Return a matcher and the patterns that were used.
759 The matcher will warn about bad matches, unless an alternate badfn callback
759 The matcher will warn about bad matches, unless an alternate badfn callback
760 is provided.'''
760 is provided.'''
761 if pats == ("",):
761 if pats == ("",):
762 pats = []
762 pats = []
763 if opts is None:
763 if opts is None:
764 opts = {}
764 opts = {}
765 if not globbed and default == 'relpath':
765 if not globbed and default == 'relpath':
766 pats = expandpats(pats or [])
766 pats = expandpats(pats or [])
767
767
768 def bad(f, msg):
768 def bad(f, msg):
769 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
769 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
770
770
771 if badfn is None:
771 if badfn is None:
772 badfn = bad
772 badfn = bad
773
773
774 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
774 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
775 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
775 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
776
776
777 if m.always():
777 if m.always():
778 pats = []
778 pats = []
779 return m, pats
779 return m, pats
780
780
781 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
781 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
782 badfn=None):
782 badfn=None):
783 '''Return a matcher that will warn about bad matches.'''
783 '''Return a matcher that will warn about bad matches.'''
784 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
784 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
785
785
786 def matchall(repo):
786 def matchall(repo):
787 '''Return a matcher that will efficiently match everything.'''
787 '''Return a matcher that will efficiently match everything.'''
788 return matchmod.always(repo.root, repo.getcwd())
788 return matchmod.always(repo.root, repo.getcwd())
789
789
790 def matchfiles(repo, files, badfn=None):
790 def matchfiles(repo, files, badfn=None):
791 '''Return a matcher that will efficiently match exactly these files.'''
791 '''Return a matcher that will efficiently match exactly these files.'''
792 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
792 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
793
793
794 def addremove(repo, matcher, prefix, opts={}, dry_run=None, similarity=None):
794 def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None):
795 if opts is None:
796 opts = {}
795 m = matcher
797 m = matcher
796 if dry_run is None:
798 if dry_run is None:
797 dry_run = opts.get('dry_run')
799 dry_run = opts.get('dry_run')
798 if similarity is None:
800 if similarity is None:
799 similarity = float(opts.get('similarity') or 0)
801 similarity = float(opts.get('similarity') or 0)
800
802
801 ret = 0
803 ret = 0
802 join = lambda f: os.path.join(prefix, f)
804 join = lambda f: os.path.join(prefix, f)
803
805
804 def matchessubrepo(matcher, subpath):
806 def matchessubrepo(matcher, subpath):
805 if matcher.exact(subpath):
807 if matcher.exact(subpath):
806 return True
808 return True
807 for f in matcher.files():
809 for f in matcher.files():
808 if f.startswith(subpath):
810 if f.startswith(subpath):
809 return True
811 return True
810 return False
812 return False
811
813
812 wctx = repo[None]
814 wctx = repo[None]
813 for subpath in sorted(wctx.substate):
815 for subpath in sorted(wctx.substate):
814 if opts.get('subrepos') or matchessubrepo(m, subpath):
816 if opts.get('subrepos') or matchessubrepo(m, subpath):
815 sub = wctx.sub(subpath)
817 sub = wctx.sub(subpath)
816 try:
818 try:
817 submatch = matchmod.narrowmatcher(subpath, m)
819 submatch = matchmod.narrowmatcher(subpath, m)
818 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
820 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
819 ret = 1
821 ret = 1
820 except error.LookupError:
822 except error.LookupError:
821 repo.ui.status(_("skipping missing subrepository: %s\n")
823 repo.ui.status(_("skipping missing subrepository: %s\n")
822 % join(subpath))
824 % join(subpath))
823
825
824 rejected = []
826 rejected = []
825 def badfn(f, msg):
827 def badfn(f, msg):
826 if f in m.files():
828 if f in m.files():
827 m.bad(f, msg)
829 m.bad(f, msg)
828 rejected.append(f)
830 rejected.append(f)
829
831
830 badmatch = matchmod.badmatch(m, badfn)
832 badmatch = matchmod.badmatch(m, badfn)
831 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
833 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
832 badmatch)
834 badmatch)
833
835
834 unknownset = set(unknown + forgotten)
836 unknownset = set(unknown + forgotten)
835 toprint = unknownset.copy()
837 toprint = unknownset.copy()
836 toprint.update(deleted)
838 toprint.update(deleted)
837 for abs in sorted(toprint):
839 for abs in sorted(toprint):
838 if repo.ui.verbose or not m.exact(abs):
840 if repo.ui.verbose or not m.exact(abs):
839 if abs in unknownset:
841 if abs in unknownset:
840 status = _('adding %s\n') % m.uipath(abs)
842 status = _('adding %s\n') % m.uipath(abs)
841 else:
843 else:
842 status = _('removing %s\n') % m.uipath(abs)
844 status = _('removing %s\n') % m.uipath(abs)
843 repo.ui.status(status)
845 repo.ui.status(status)
844
846
845 renames = _findrenames(repo, m, added + unknown, removed + deleted,
847 renames = _findrenames(repo, m, added + unknown, removed + deleted,
846 similarity)
848 similarity)
847
849
848 if not dry_run:
850 if not dry_run:
849 _markchanges(repo, unknown + forgotten, deleted, renames)
851 _markchanges(repo, unknown + forgotten, deleted, renames)
850
852
851 for f in rejected:
853 for f in rejected:
852 if f in m.files():
854 if f in m.files():
853 return 1
855 return 1
854 return ret
856 return ret
855
857
856 def marktouched(repo, files, similarity=0.0):
858 def marktouched(repo, files, similarity=0.0):
857 '''Assert that files have somehow been operated upon. files are relative to
859 '''Assert that files have somehow been operated upon. files are relative to
858 the repo root.'''
860 the repo root.'''
859 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
861 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
860 rejected = []
862 rejected = []
861
863
862 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
864 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
863
865
864 if repo.ui.verbose:
866 if repo.ui.verbose:
865 unknownset = set(unknown + forgotten)
867 unknownset = set(unknown + forgotten)
866 toprint = unknownset.copy()
868 toprint = unknownset.copy()
867 toprint.update(deleted)
869 toprint.update(deleted)
868 for abs in sorted(toprint):
870 for abs in sorted(toprint):
869 if abs in unknownset:
871 if abs in unknownset:
870 status = _('adding %s\n') % abs
872 status = _('adding %s\n') % abs
871 else:
873 else:
872 status = _('removing %s\n') % abs
874 status = _('removing %s\n') % abs
873 repo.ui.status(status)
875 repo.ui.status(status)
874
876
875 renames = _findrenames(repo, m, added + unknown, removed + deleted,
877 renames = _findrenames(repo, m, added + unknown, removed + deleted,
876 similarity)
878 similarity)
877
879
878 _markchanges(repo, unknown + forgotten, deleted, renames)
880 _markchanges(repo, unknown + forgotten, deleted, renames)
879
881
880 for f in rejected:
882 for f in rejected:
881 if f in m.files():
883 if f in m.files():
882 return 1
884 return 1
883 return 0
885 return 0
884
886
885 def _interestingfiles(repo, matcher):
887 def _interestingfiles(repo, matcher):
886 '''Walk dirstate with matcher, looking for files that addremove would care
888 '''Walk dirstate with matcher, looking for files that addremove would care
887 about.
889 about.
888
890
889 This is different from dirstate.status because it doesn't care about
891 This is different from dirstate.status because it doesn't care about
890 whether files are modified or clean.'''
892 whether files are modified or clean.'''
891 added, unknown, deleted, removed, forgotten = [], [], [], [], []
893 added, unknown, deleted, removed, forgotten = [], [], [], [], []
892 audit_path = pathutil.pathauditor(repo.root)
894 audit_path = pathutil.pathauditor(repo.root)
893
895
894 ctx = repo[None]
896 ctx = repo[None]
895 dirstate = repo.dirstate
897 dirstate = repo.dirstate
896 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
898 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
897 full=False)
899 full=False)
898 for abs, st in walkresults.iteritems():
900 for abs, st in walkresults.iteritems():
899 dstate = dirstate[abs]
901 dstate = dirstate[abs]
900 if dstate == '?' and audit_path.check(abs):
902 if dstate == '?' and audit_path.check(abs):
901 unknown.append(abs)
903 unknown.append(abs)
902 elif dstate != 'r' and not st:
904 elif dstate != 'r' and not st:
903 deleted.append(abs)
905 deleted.append(abs)
904 elif dstate == 'r' and st:
906 elif dstate == 'r' and st:
905 forgotten.append(abs)
907 forgotten.append(abs)
906 # for finding renames
908 # for finding renames
907 elif dstate == 'r' and not st:
909 elif dstate == 'r' and not st:
908 removed.append(abs)
910 removed.append(abs)
909 elif dstate == 'a':
911 elif dstate == 'a':
910 added.append(abs)
912 added.append(abs)
911
913
912 return added, unknown, deleted, removed, forgotten
914 return added, unknown, deleted, removed, forgotten
913
915
914 def _findrenames(repo, matcher, added, removed, similarity):
916 def _findrenames(repo, matcher, added, removed, similarity):
915 '''Find renames from removed files to added ones.'''
917 '''Find renames from removed files to added ones.'''
916 renames = {}
918 renames = {}
917 if similarity > 0:
919 if similarity > 0:
918 for old, new, score in similar.findrenames(repo, added, removed,
920 for old, new, score in similar.findrenames(repo, added, removed,
919 similarity):
921 similarity):
920 if (repo.ui.verbose or not matcher.exact(old)
922 if (repo.ui.verbose or not matcher.exact(old)
921 or not matcher.exact(new)):
923 or not matcher.exact(new)):
922 repo.ui.status(_('recording removal of %s as rename to %s '
924 repo.ui.status(_('recording removal of %s as rename to %s '
923 '(%d%% similar)\n') %
925 '(%d%% similar)\n') %
924 (matcher.rel(old), matcher.rel(new),
926 (matcher.rel(old), matcher.rel(new),
925 score * 100))
927 score * 100))
926 renames[new] = old
928 renames[new] = old
927 return renames
929 return renames
928
930
929 def _markchanges(repo, unknown, deleted, renames):
931 def _markchanges(repo, unknown, deleted, renames):
930 '''Marks the files in unknown as added, the files in deleted as removed,
932 '''Marks the files in unknown as added, the files in deleted as removed,
931 and the files in renames as copied.'''
933 and the files in renames as copied.'''
932 wctx = repo[None]
934 wctx = repo[None]
933 wlock = repo.wlock()
935 wlock = repo.wlock()
934 try:
936 try:
935 wctx.forget(deleted)
937 wctx.forget(deleted)
936 wctx.add(unknown)
938 wctx.add(unknown)
937 for new, old in renames.iteritems():
939 for new, old in renames.iteritems():
938 wctx.copy(old, new)
940 wctx.copy(old, new)
939 finally:
941 finally:
940 wlock.release()
942 wlock.release()
941
943
942 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
944 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
943 """Update the dirstate to reflect the intent of copying src to dst. For
945 """Update the dirstate to reflect the intent of copying src to dst. For
944 different reasons it might not end with dst being marked as copied from src.
946 different reasons it might not end with dst being marked as copied from src.
945 """
947 """
946 origsrc = repo.dirstate.copied(src) or src
948 origsrc = repo.dirstate.copied(src) or src
947 if dst == origsrc: # copying back a copy?
949 if dst == origsrc: # copying back a copy?
948 if repo.dirstate[dst] not in 'mn' and not dryrun:
950 if repo.dirstate[dst] not in 'mn' and not dryrun:
949 repo.dirstate.normallookup(dst)
951 repo.dirstate.normallookup(dst)
950 else:
952 else:
951 if repo.dirstate[origsrc] == 'a' and origsrc == src:
953 if repo.dirstate[origsrc] == 'a' and origsrc == src:
952 if not ui.quiet:
954 if not ui.quiet:
953 ui.warn(_("%s has not been committed yet, so no copy "
955 ui.warn(_("%s has not been committed yet, so no copy "
954 "data will be stored for %s.\n")
956 "data will be stored for %s.\n")
955 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
957 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
956 if repo.dirstate[dst] in '?r' and not dryrun:
958 if repo.dirstate[dst] in '?r' and not dryrun:
957 wctx.add([dst])
959 wctx.add([dst])
958 elif not dryrun:
960 elif not dryrun:
959 wctx.copy(origsrc, dst)
961 wctx.copy(origsrc, dst)
960
962
961 def readrequires(opener, supported):
963 def readrequires(opener, supported):
962 '''Reads and parses .hg/requires and checks if all entries found
964 '''Reads and parses .hg/requires and checks if all entries found
963 are in the list of supported features.'''
965 are in the list of supported features.'''
964 requirements = set(opener.read("requires").splitlines())
966 requirements = set(opener.read("requires").splitlines())
965 missings = []
967 missings = []
966 for r in requirements:
968 for r in requirements:
967 if r not in supported:
969 if r not in supported:
968 if not r or not r[0].isalnum():
970 if not r or not r[0].isalnum():
969 raise error.RequirementError(_(".hg/requires file is corrupt"))
971 raise error.RequirementError(_(".hg/requires file is corrupt"))
970 missings.append(r)
972 missings.append(r)
971 missings.sort()
973 missings.sort()
972 if missings:
974 if missings:
973 raise error.RequirementError(
975 raise error.RequirementError(
974 _("repository requires features unknown to this Mercurial: %s")
976 _("repository requires features unknown to this Mercurial: %s")
975 % " ".join(missings),
977 % " ".join(missings),
976 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
978 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
977 " for more information"))
979 " for more information"))
978 return requirements
980 return requirements
979
981
980 def writerequires(opener, requirements):
982 def writerequires(opener, requirements):
981 reqfile = opener("requires", "w")
983 reqfile = opener("requires", "w")
982 for r in sorted(requirements):
984 for r in sorted(requirements):
983 reqfile.write("%s\n" % r)
985 reqfile.write("%s\n" % r)
984 reqfile.close()
986 reqfile.close()
985
987
986 class filecachesubentry(object):
988 class filecachesubentry(object):
987 def __init__(self, path, stat):
989 def __init__(self, path, stat):
988 self.path = path
990 self.path = path
989 self.cachestat = None
991 self.cachestat = None
990 self._cacheable = None
992 self._cacheable = None
991
993
992 if stat:
994 if stat:
993 self.cachestat = filecachesubentry.stat(self.path)
995 self.cachestat = filecachesubentry.stat(self.path)
994
996
995 if self.cachestat:
997 if self.cachestat:
996 self._cacheable = self.cachestat.cacheable()
998 self._cacheable = self.cachestat.cacheable()
997 else:
999 else:
998 # None means we don't know yet
1000 # None means we don't know yet
999 self._cacheable = None
1001 self._cacheable = None
1000
1002
1001 def refresh(self):
1003 def refresh(self):
1002 if self.cacheable():
1004 if self.cacheable():
1003 self.cachestat = filecachesubentry.stat(self.path)
1005 self.cachestat = filecachesubentry.stat(self.path)
1004
1006
1005 def cacheable(self):
1007 def cacheable(self):
1006 if self._cacheable is not None:
1008 if self._cacheable is not None:
1007 return self._cacheable
1009 return self._cacheable
1008
1010
1009 # we don't know yet, assume it is for now
1011 # we don't know yet, assume it is for now
1010 return True
1012 return True
1011
1013
1012 def changed(self):
1014 def changed(self):
1013 # no point in going further if we can't cache it
1015 # no point in going further if we can't cache it
1014 if not self.cacheable():
1016 if not self.cacheable():
1015 return True
1017 return True
1016
1018
1017 newstat = filecachesubentry.stat(self.path)
1019 newstat = filecachesubentry.stat(self.path)
1018
1020
1019 # we may not know if it's cacheable yet, check again now
1021 # we may not know if it's cacheable yet, check again now
1020 if newstat and self._cacheable is None:
1022 if newstat and self._cacheable is None:
1021 self._cacheable = newstat.cacheable()
1023 self._cacheable = newstat.cacheable()
1022
1024
1023 # check again
1025 # check again
1024 if not self._cacheable:
1026 if not self._cacheable:
1025 return True
1027 return True
1026
1028
1027 if self.cachestat != newstat:
1029 if self.cachestat != newstat:
1028 self.cachestat = newstat
1030 self.cachestat = newstat
1029 return True
1031 return True
1030 else:
1032 else:
1031 return False
1033 return False
1032
1034
1033 @staticmethod
1035 @staticmethod
1034 def stat(path):
1036 def stat(path):
1035 try:
1037 try:
1036 return util.cachestat(path)
1038 return util.cachestat(path)
1037 except OSError as e:
1039 except OSError as e:
1038 if e.errno != errno.ENOENT:
1040 if e.errno != errno.ENOENT:
1039 raise
1041 raise
1040
1042
1041 class filecacheentry(object):
1043 class filecacheentry(object):
1042 def __init__(self, paths, stat=True):
1044 def __init__(self, paths, stat=True):
1043 self._entries = []
1045 self._entries = []
1044 for path in paths:
1046 for path in paths:
1045 self._entries.append(filecachesubentry(path, stat))
1047 self._entries.append(filecachesubentry(path, stat))
1046
1048
1047 def changed(self):
1049 def changed(self):
1048 '''true if any entry has changed'''
1050 '''true if any entry has changed'''
1049 for entry in self._entries:
1051 for entry in self._entries:
1050 if entry.changed():
1052 if entry.changed():
1051 return True
1053 return True
1052 return False
1054 return False
1053
1055
1054 def refresh(self):
1056 def refresh(self):
1055 for entry in self._entries:
1057 for entry in self._entries:
1056 entry.refresh()
1058 entry.refresh()
1057
1059
1058 class filecache(object):
1060 class filecache(object):
1059 '''A property like decorator that tracks files under .hg/ for updates.
1061 '''A property like decorator that tracks files under .hg/ for updates.
1060
1062
1061 Records stat info when called in _filecache.
1063 Records stat info when called in _filecache.
1062
1064
1063 On subsequent calls, compares old stat info with new info, and recreates the
1065 On subsequent calls, compares old stat info with new info, and recreates the
1064 object when any of the files changes, updating the new stat info in
1066 object when any of the files changes, updating the new stat info in
1065 _filecache.
1067 _filecache.
1066
1068
1067 Mercurial either atomic renames or appends for files under .hg,
1069 Mercurial either atomic renames or appends for files under .hg,
1068 so to ensure the cache is reliable we need the filesystem to be able
1070 so to ensure the cache is reliable we need the filesystem to be able
1069 to tell us if a file has been replaced. If it can't, we fallback to
1071 to tell us if a file has been replaced. If it can't, we fallback to
1070 recreating the object on every call (essentially the same behavior as
1072 recreating the object on every call (essentially the same behavior as
1071 propertycache).
1073 propertycache).
1072
1074
1073 '''
1075 '''
1074 def __init__(self, *paths):
1076 def __init__(self, *paths):
1075 self.paths = paths
1077 self.paths = paths
1076
1078
1077 def join(self, obj, fname):
1079 def join(self, obj, fname):
1078 """Used to compute the runtime path of a cached file.
1080 """Used to compute the runtime path of a cached file.
1079
1081
1080 Users should subclass filecache and provide their own version of this
1082 Users should subclass filecache and provide their own version of this
1081 function to call the appropriate join function on 'obj' (an instance
1083 function to call the appropriate join function on 'obj' (an instance
1082 of the class that its member function was decorated).
1084 of the class that its member function was decorated).
1083 """
1085 """
1084 return obj.join(fname)
1086 return obj.join(fname)
1085
1087
1086 def __call__(self, func):
1088 def __call__(self, func):
1087 self.func = func
1089 self.func = func
1088 self.name = func.__name__
1090 self.name = func.__name__
1089 return self
1091 return self
1090
1092
1091 def __get__(self, obj, type=None):
1093 def __get__(self, obj, type=None):
1092 # do we need to check if the file changed?
1094 # do we need to check if the file changed?
1093 if self.name in obj.__dict__:
1095 if self.name in obj.__dict__:
1094 assert self.name in obj._filecache, self.name
1096 assert self.name in obj._filecache, self.name
1095 return obj.__dict__[self.name]
1097 return obj.__dict__[self.name]
1096
1098
1097 entry = obj._filecache.get(self.name)
1099 entry = obj._filecache.get(self.name)
1098
1100
1099 if entry:
1101 if entry:
1100 if entry.changed():
1102 if entry.changed():
1101 entry.obj = self.func(obj)
1103 entry.obj = self.func(obj)
1102 else:
1104 else:
1103 paths = [self.join(obj, path) for path in self.paths]
1105 paths = [self.join(obj, path) for path in self.paths]
1104
1106
1105 # We stat -before- creating the object so our cache doesn't lie if
1107 # We stat -before- creating the object so our cache doesn't lie if
1106 # a writer modified between the time we read and stat
1108 # a writer modified between the time we read and stat
1107 entry = filecacheentry(paths, True)
1109 entry = filecacheentry(paths, True)
1108 entry.obj = self.func(obj)
1110 entry.obj = self.func(obj)
1109
1111
1110 obj._filecache[self.name] = entry
1112 obj._filecache[self.name] = entry
1111
1113
1112 obj.__dict__[self.name] = entry.obj
1114 obj.__dict__[self.name] = entry.obj
1113 return entry.obj
1115 return entry.obj
1114
1116
1115 def __set__(self, obj, value):
1117 def __set__(self, obj, value):
1116 if self.name not in obj._filecache:
1118 if self.name not in obj._filecache:
1117 # we add an entry for the missing value because X in __dict__
1119 # we add an entry for the missing value because X in __dict__
1118 # implies X in _filecache
1120 # implies X in _filecache
1119 paths = [self.join(obj, path) for path in self.paths]
1121 paths = [self.join(obj, path) for path in self.paths]
1120 ce = filecacheentry(paths, False)
1122 ce = filecacheentry(paths, False)
1121 obj._filecache[self.name] = ce
1123 obj._filecache[self.name] = ce
1122 else:
1124 else:
1123 ce = obj._filecache[self.name]
1125 ce = obj._filecache[self.name]
1124
1126
1125 ce.obj = value # update cached copy
1127 ce.obj = value # update cached copy
1126 obj.__dict__[self.name] = value # update copy returned by obj.x
1128 obj.__dict__[self.name] = value # update copy returned by obj.x
1127
1129
1128 def __delete__(self, obj):
1130 def __delete__(self, obj):
1129 try:
1131 try:
1130 del obj.__dict__[self.name]
1132 del obj.__dict__[self.name]
1131 except KeyError:
1133 except KeyError:
1132 raise AttributeError(self.name)
1134 raise AttributeError(self.name)
General Comments 0
You need to be logged in to leave comments. Login now