##// END OF EJS Templates
vfs: add "writelines"...
FUJIWARA Katsunori -
r23371:1df6519e default
parent child Browse files
Show More
@@ -1,1060 +1,1067 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from mercurial.node import nullrev
9 from mercurial.node import nullrev
10 import util, error, osutil, revset, similar, encoding, phases, parsers
10 import util, error, osutil, revset, similar, encoding, phases, parsers
11 import pathutil
11 import pathutil
12 import match as matchmod
12 import match as matchmod
13 import os, errno, re, glob, tempfile
13 import os, errno, re, glob, tempfile
14
14
15 if os.name == 'nt':
15 if os.name == 'nt':
16 import scmwindows as scmplatform
16 import scmwindows as scmplatform
17 else:
17 else:
18 import scmposix as scmplatform
18 import scmposix as scmplatform
19
19
20 systemrcpath = scmplatform.systemrcpath
20 systemrcpath = scmplatform.systemrcpath
21 userrcpath = scmplatform.userrcpath
21 userrcpath = scmplatform.userrcpath
22
22
23 class status(tuple):
23 class status(tuple):
24 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
24 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
25 and 'ignored' properties are only relevant to the working copy.
25 and 'ignored' properties are only relevant to the working copy.
26 '''
26 '''
27
27
28 __slots__ = ()
28 __slots__ = ()
29
29
30 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
30 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
31 clean):
31 clean):
32 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
32 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
33 ignored, clean))
33 ignored, clean))
34
34
35 @property
35 @property
36 def modified(self):
36 def modified(self):
37 '''files that have been modified'''
37 '''files that have been modified'''
38 return self[0]
38 return self[0]
39
39
40 @property
40 @property
41 def added(self):
41 def added(self):
42 '''files that have been added'''
42 '''files that have been added'''
43 return self[1]
43 return self[1]
44
44
45 @property
45 @property
46 def removed(self):
46 def removed(self):
47 '''files that have been removed'''
47 '''files that have been removed'''
48 return self[2]
48 return self[2]
49
49
50 @property
50 @property
51 def deleted(self):
51 def deleted(self):
52 '''files that are in the dirstate, but have been deleted from the
52 '''files that are in the dirstate, but have been deleted from the
53 working copy (aka "missing")
53 working copy (aka "missing")
54 '''
54 '''
55 return self[3]
55 return self[3]
56
56
57 @property
57 @property
58 def unknown(self):
58 def unknown(self):
59 '''files not in the dirstate that are not ignored'''
59 '''files not in the dirstate that are not ignored'''
60 return self[4]
60 return self[4]
61
61
62 @property
62 @property
63 def ignored(self):
63 def ignored(self):
64 '''files not in the dirstate that are ignored (by _dirignore())'''
64 '''files not in the dirstate that are ignored (by _dirignore())'''
65 return self[5]
65 return self[5]
66
66
67 @property
67 @property
68 def clean(self):
68 def clean(self):
69 '''files that have not been modified'''
69 '''files that have not been modified'''
70 return self[6]
70 return self[6]
71
71
72 def __repr__(self, *args, **kwargs):
72 def __repr__(self, *args, **kwargs):
73 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
73 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
74 'unknown=%r, ignored=%r, clean=%r>') % self)
74 'unknown=%r, ignored=%r, clean=%r>') % self)
75
75
76 def itersubrepos(ctx1, ctx2):
76 def itersubrepos(ctx1, ctx2):
77 """find subrepos in ctx1 or ctx2"""
77 """find subrepos in ctx1 or ctx2"""
78 # Create a (subpath, ctx) mapping where we prefer subpaths from
78 # Create a (subpath, ctx) mapping where we prefer subpaths from
79 # ctx1. The subpaths from ctx2 are important when the .hgsub file
79 # ctx1. The subpaths from ctx2 are important when the .hgsub file
80 # has been modified (in ctx2) but not yet committed (in ctx1).
80 # has been modified (in ctx2) but not yet committed (in ctx1).
81 subpaths = dict.fromkeys(ctx2.substate, ctx2)
81 subpaths = dict.fromkeys(ctx2.substate, ctx2)
82 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
82 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
83 for subpath, ctx in sorted(subpaths.iteritems()):
83 for subpath, ctx in sorted(subpaths.iteritems()):
84 yield subpath, ctx.sub(subpath)
84 yield subpath, ctx.sub(subpath)
85
85
86 def nochangesfound(ui, repo, excluded=None):
86 def nochangesfound(ui, repo, excluded=None):
87 '''Report no changes for push/pull, excluded is None or a list of
87 '''Report no changes for push/pull, excluded is None or a list of
88 nodes excluded from the push/pull.
88 nodes excluded from the push/pull.
89 '''
89 '''
90 secretlist = []
90 secretlist = []
91 if excluded:
91 if excluded:
92 for n in excluded:
92 for n in excluded:
93 if n not in repo:
93 if n not in repo:
94 # discovery should not have included the filtered revision,
94 # discovery should not have included the filtered revision,
95 # we have to explicitly exclude it until discovery is cleanup.
95 # we have to explicitly exclude it until discovery is cleanup.
96 continue
96 continue
97 ctx = repo[n]
97 ctx = repo[n]
98 if ctx.phase() >= phases.secret and not ctx.extinct():
98 if ctx.phase() >= phases.secret and not ctx.extinct():
99 secretlist.append(n)
99 secretlist.append(n)
100
100
101 if secretlist:
101 if secretlist:
102 ui.status(_("no changes found (ignored %d secret changesets)\n")
102 ui.status(_("no changes found (ignored %d secret changesets)\n")
103 % len(secretlist))
103 % len(secretlist))
104 else:
104 else:
105 ui.status(_("no changes found\n"))
105 ui.status(_("no changes found\n"))
106
106
107 def checknewlabel(repo, lbl, kind):
107 def checknewlabel(repo, lbl, kind):
108 # Do not use the "kind" parameter in ui output.
108 # Do not use the "kind" parameter in ui output.
109 # It makes strings difficult to translate.
109 # It makes strings difficult to translate.
110 if lbl in ['tip', '.', 'null']:
110 if lbl in ['tip', '.', 'null']:
111 raise util.Abort(_("the name '%s' is reserved") % lbl)
111 raise util.Abort(_("the name '%s' is reserved") % lbl)
112 for c in (':', '\0', '\n', '\r'):
112 for c in (':', '\0', '\n', '\r'):
113 if c in lbl:
113 if c in lbl:
114 raise util.Abort(_("%r cannot be used in a name") % c)
114 raise util.Abort(_("%r cannot be used in a name") % c)
115 try:
115 try:
116 int(lbl)
116 int(lbl)
117 raise util.Abort(_("cannot use an integer as a name"))
117 raise util.Abort(_("cannot use an integer as a name"))
118 except ValueError:
118 except ValueError:
119 pass
119 pass
120
120
121 def checkfilename(f):
121 def checkfilename(f):
122 '''Check that the filename f is an acceptable filename for a tracked file'''
122 '''Check that the filename f is an acceptable filename for a tracked file'''
123 if '\r' in f or '\n' in f:
123 if '\r' in f or '\n' in f:
124 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
124 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
125
125
126 def checkportable(ui, f):
126 def checkportable(ui, f):
127 '''Check if filename f is portable and warn or abort depending on config'''
127 '''Check if filename f is portable and warn or abort depending on config'''
128 checkfilename(f)
128 checkfilename(f)
129 abort, warn = checkportabilityalert(ui)
129 abort, warn = checkportabilityalert(ui)
130 if abort or warn:
130 if abort or warn:
131 msg = util.checkwinfilename(f)
131 msg = util.checkwinfilename(f)
132 if msg:
132 if msg:
133 msg = "%s: %r" % (msg, f)
133 msg = "%s: %r" % (msg, f)
134 if abort:
134 if abort:
135 raise util.Abort(msg)
135 raise util.Abort(msg)
136 ui.warn(_("warning: %s\n") % msg)
136 ui.warn(_("warning: %s\n") % msg)
137
137
138 def checkportabilityalert(ui):
138 def checkportabilityalert(ui):
139 '''check if the user's config requests nothing, a warning, or abort for
139 '''check if the user's config requests nothing, a warning, or abort for
140 non-portable filenames'''
140 non-portable filenames'''
141 val = ui.config('ui', 'portablefilenames', 'warn')
141 val = ui.config('ui', 'portablefilenames', 'warn')
142 lval = val.lower()
142 lval = val.lower()
143 bval = util.parsebool(val)
143 bval = util.parsebool(val)
144 abort = os.name == 'nt' or lval == 'abort'
144 abort = os.name == 'nt' or lval == 'abort'
145 warn = bval or lval == 'warn'
145 warn = bval or lval == 'warn'
146 if bval is None and not (warn or abort or lval == 'ignore'):
146 if bval is None and not (warn or abort or lval == 'ignore'):
147 raise error.ConfigError(
147 raise error.ConfigError(
148 _("ui.portablefilenames value is invalid ('%s')") % val)
148 _("ui.portablefilenames value is invalid ('%s')") % val)
149 return abort, warn
149 return abort, warn
150
150
151 class casecollisionauditor(object):
151 class casecollisionauditor(object):
152 def __init__(self, ui, abort, dirstate):
152 def __init__(self, ui, abort, dirstate):
153 self._ui = ui
153 self._ui = ui
154 self._abort = abort
154 self._abort = abort
155 allfiles = '\0'.join(dirstate._map)
155 allfiles = '\0'.join(dirstate._map)
156 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
156 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
157 self._dirstate = dirstate
157 self._dirstate = dirstate
158 # The purpose of _newfiles is so that we don't complain about
158 # The purpose of _newfiles is so that we don't complain about
159 # case collisions if someone were to call this object with the
159 # case collisions if someone were to call this object with the
160 # same filename twice.
160 # same filename twice.
161 self._newfiles = set()
161 self._newfiles = set()
162
162
163 def __call__(self, f):
163 def __call__(self, f):
164 if f in self._newfiles:
164 if f in self._newfiles:
165 return
165 return
166 fl = encoding.lower(f)
166 fl = encoding.lower(f)
167 if fl in self._loweredfiles and f not in self._dirstate:
167 if fl in self._loweredfiles and f not in self._dirstate:
168 msg = _('possible case-folding collision for %s') % f
168 msg = _('possible case-folding collision for %s') % f
169 if self._abort:
169 if self._abort:
170 raise util.Abort(msg)
170 raise util.Abort(msg)
171 self._ui.warn(_("warning: %s\n") % msg)
171 self._ui.warn(_("warning: %s\n") % msg)
172 self._loweredfiles.add(fl)
172 self._loweredfiles.add(fl)
173 self._newfiles.add(f)
173 self._newfiles.add(f)
174
174
175 class abstractvfs(object):
175 class abstractvfs(object):
176 """Abstract base class; cannot be instantiated"""
176 """Abstract base class; cannot be instantiated"""
177
177
178 def __init__(self, *args, **kwargs):
178 def __init__(self, *args, **kwargs):
179 '''Prevent instantiation; don't call this from subclasses.'''
179 '''Prevent instantiation; don't call this from subclasses.'''
180 raise NotImplementedError('attempted instantiating ' + str(type(self)))
180 raise NotImplementedError('attempted instantiating ' + str(type(self)))
181
181
182 def tryread(self, path):
182 def tryread(self, path):
183 '''gracefully return an empty string for missing files'''
183 '''gracefully return an empty string for missing files'''
184 try:
184 try:
185 return self.read(path)
185 return self.read(path)
186 except IOError, inst:
186 except IOError, inst:
187 if inst.errno != errno.ENOENT:
187 if inst.errno != errno.ENOENT:
188 raise
188 raise
189 return ""
189 return ""
190
190
191 def tryreadlines(self, path, mode='rb'):
191 def tryreadlines(self, path, mode='rb'):
192 '''gracefully return an empty array for missing files'''
192 '''gracefully return an empty array for missing files'''
193 try:
193 try:
194 return self.readlines(path, mode=mode)
194 return self.readlines(path, mode=mode)
195 except IOError, inst:
195 except IOError, inst:
196 if inst.errno != errno.ENOENT:
196 if inst.errno != errno.ENOENT:
197 raise
197 raise
198 return []
198 return []
199
199
200 def open(self, path, mode="r", text=False, atomictemp=False,
200 def open(self, path, mode="r", text=False, atomictemp=False,
201 notindexed=False):
201 notindexed=False):
202 '''Open ``path`` file, which is relative to vfs root.
202 '''Open ``path`` file, which is relative to vfs root.
203
203
204 Newly created directories are marked as "not to be indexed by
204 Newly created directories are marked as "not to be indexed by
205 the content indexing service", if ``notindexed`` is specified
205 the content indexing service", if ``notindexed`` is specified
206 for "write" mode access.
206 for "write" mode access.
207 '''
207 '''
208 self.open = self.__call__
208 self.open = self.__call__
209 return self.__call__(path, mode, text, atomictemp, notindexed)
209 return self.__call__(path, mode, text, atomictemp, notindexed)
210
210
211 def read(self, path):
211 def read(self, path):
212 fp = self(path, 'rb')
212 fp = self(path, 'rb')
213 try:
213 try:
214 return fp.read()
214 return fp.read()
215 finally:
215 finally:
216 fp.close()
216 fp.close()
217
217
218 def readlines(self, path, mode='rb'):
218 def readlines(self, path, mode='rb'):
219 fp = self(path, mode=mode)
219 fp = self(path, mode=mode)
220 try:
220 try:
221 return fp.readlines()
221 return fp.readlines()
222 finally:
222 finally:
223 fp.close()
223 fp.close()
224
224
225 def write(self, path, data):
225 def write(self, path, data):
226 fp = self(path, 'wb')
226 fp = self(path, 'wb')
227 try:
227 try:
228 return fp.write(data)
228 return fp.write(data)
229 finally:
229 finally:
230 fp.close()
230 fp.close()
231
231
232 def writelines(self, path, data, mode='wb', notindexed=False):
233 fp = self(path, mode=mode, notindexed=notindexed)
234 try:
235 return fp.writelines(data)
236 finally:
237 fp.close()
238
232 def append(self, path, data):
239 def append(self, path, data):
233 fp = self(path, 'ab')
240 fp = self(path, 'ab')
234 try:
241 try:
235 return fp.write(data)
242 return fp.write(data)
236 finally:
243 finally:
237 fp.close()
244 fp.close()
238
245
239 def chmod(self, path, mode):
246 def chmod(self, path, mode):
240 return os.chmod(self.join(path), mode)
247 return os.chmod(self.join(path), mode)
241
248
242 def exists(self, path=None):
249 def exists(self, path=None):
243 return os.path.exists(self.join(path))
250 return os.path.exists(self.join(path))
244
251
245 def fstat(self, fp):
252 def fstat(self, fp):
246 return util.fstat(fp)
253 return util.fstat(fp)
247
254
248 def isdir(self, path=None):
255 def isdir(self, path=None):
249 return os.path.isdir(self.join(path))
256 return os.path.isdir(self.join(path))
250
257
251 def isfile(self, path=None):
258 def isfile(self, path=None):
252 return os.path.isfile(self.join(path))
259 return os.path.isfile(self.join(path))
253
260
254 def islink(self, path=None):
261 def islink(self, path=None):
255 return os.path.islink(self.join(path))
262 return os.path.islink(self.join(path))
256
263
257 def lexists(self, path=None):
264 def lexists(self, path=None):
258 return os.path.lexists(self.join(path))
265 return os.path.lexists(self.join(path))
259
266
260 def lstat(self, path=None):
267 def lstat(self, path=None):
261 return os.lstat(self.join(path))
268 return os.lstat(self.join(path))
262
269
263 def listdir(self, path=None):
270 def listdir(self, path=None):
264 return os.listdir(self.join(path))
271 return os.listdir(self.join(path))
265
272
266 def makedir(self, path=None, notindexed=True):
273 def makedir(self, path=None, notindexed=True):
267 return util.makedir(self.join(path), notindexed)
274 return util.makedir(self.join(path), notindexed)
268
275
269 def makedirs(self, path=None, mode=None):
276 def makedirs(self, path=None, mode=None):
270 return util.makedirs(self.join(path), mode)
277 return util.makedirs(self.join(path), mode)
271
278
272 def makelock(self, info, path):
279 def makelock(self, info, path):
273 return util.makelock(info, self.join(path))
280 return util.makelock(info, self.join(path))
274
281
275 def mkdir(self, path=None):
282 def mkdir(self, path=None):
276 return os.mkdir(self.join(path))
283 return os.mkdir(self.join(path))
277
284
278 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
285 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
279 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
286 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
280 dir=self.join(dir), text=text)
287 dir=self.join(dir), text=text)
281 dname, fname = util.split(name)
288 dname, fname = util.split(name)
282 if dir:
289 if dir:
283 return fd, os.path.join(dir, fname)
290 return fd, os.path.join(dir, fname)
284 else:
291 else:
285 return fd, fname
292 return fd, fname
286
293
287 def readdir(self, path=None, stat=None, skip=None):
294 def readdir(self, path=None, stat=None, skip=None):
288 return osutil.listdir(self.join(path), stat, skip)
295 return osutil.listdir(self.join(path), stat, skip)
289
296
290 def readlock(self, path):
297 def readlock(self, path):
291 return util.readlock(self.join(path))
298 return util.readlock(self.join(path))
292
299
293 def rename(self, src, dst):
300 def rename(self, src, dst):
294 return util.rename(self.join(src), self.join(dst))
301 return util.rename(self.join(src), self.join(dst))
295
302
296 def readlink(self, path):
303 def readlink(self, path):
297 return os.readlink(self.join(path))
304 return os.readlink(self.join(path))
298
305
299 def setflags(self, path, l, x):
306 def setflags(self, path, l, x):
300 return util.setflags(self.join(path), l, x)
307 return util.setflags(self.join(path), l, x)
301
308
302 def stat(self, path=None):
309 def stat(self, path=None):
303 return os.stat(self.join(path))
310 return os.stat(self.join(path))
304
311
305 def unlink(self, path=None):
312 def unlink(self, path=None):
306 return util.unlink(self.join(path))
313 return util.unlink(self.join(path))
307
314
308 def unlinkpath(self, path=None, ignoremissing=False):
315 def unlinkpath(self, path=None, ignoremissing=False):
309 return util.unlinkpath(self.join(path), ignoremissing)
316 return util.unlinkpath(self.join(path), ignoremissing)
310
317
311 def utime(self, path=None, t=None):
318 def utime(self, path=None, t=None):
312 return os.utime(self.join(path), t)
319 return os.utime(self.join(path), t)
313
320
314 class vfs(abstractvfs):
321 class vfs(abstractvfs):
315 '''Operate files relative to a base directory
322 '''Operate files relative to a base directory
316
323
317 This class is used to hide the details of COW semantics and
324 This class is used to hide the details of COW semantics and
318 remote file access from higher level code.
325 remote file access from higher level code.
319 '''
326 '''
320 def __init__(self, base, audit=True, expandpath=False, realpath=False):
327 def __init__(self, base, audit=True, expandpath=False, realpath=False):
321 if expandpath:
328 if expandpath:
322 base = util.expandpath(base)
329 base = util.expandpath(base)
323 if realpath:
330 if realpath:
324 base = os.path.realpath(base)
331 base = os.path.realpath(base)
325 self.base = base
332 self.base = base
326 self._setmustaudit(audit)
333 self._setmustaudit(audit)
327 self.createmode = None
334 self.createmode = None
328 self._trustnlink = None
335 self._trustnlink = None
329
336
330 def _getmustaudit(self):
337 def _getmustaudit(self):
331 return self._audit
338 return self._audit
332
339
333 def _setmustaudit(self, onoff):
340 def _setmustaudit(self, onoff):
334 self._audit = onoff
341 self._audit = onoff
335 if onoff:
342 if onoff:
336 self.audit = pathutil.pathauditor(self.base)
343 self.audit = pathutil.pathauditor(self.base)
337 else:
344 else:
338 self.audit = util.always
345 self.audit = util.always
339
346
340 mustaudit = property(_getmustaudit, _setmustaudit)
347 mustaudit = property(_getmustaudit, _setmustaudit)
341
348
342 @util.propertycache
349 @util.propertycache
343 def _cansymlink(self):
350 def _cansymlink(self):
344 return util.checklink(self.base)
351 return util.checklink(self.base)
345
352
346 @util.propertycache
353 @util.propertycache
347 def _chmod(self):
354 def _chmod(self):
348 return util.checkexec(self.base)
355 return util.checkexec(self.base)
349
356
350 def _fixfilemode(self, name):
357 def _fixfilemode(self, name):
351 if self.createmode is None or not self._chmod:
358 if self.createmode is None or not self._chmod:
352 return
359 return
353 os.chmod(name, self.createmode & 0666)
360 os.chmod(name, self.createmode & 0666)
354
361
355 def __call__(self, path, mode="r", text=False, atomictemp=False,
362 def __call__(self, path, mode="r", text=False, atomictemp=False,
356 notindexed=False):
363 notindexed=False):
357 '''Open ``path`` file, which is relative to vfs root.
364 '''Open ``path`` file, which is relative to vfs root.
358
365
359 Newly created directories are marked as "not to be indexed by
366 Newly created directories are marked as "not to be indexed by
360 the content indexing service", if ``notindexed`` is specified
367 the content indexing service", if ``notindexed`` is specified
361 for "write" mode access.
368 for "write" mode access.
362 '''
369 '''
363 if self._audit:
370 if self._audit:
364 r = util.checkosfilename(path)
371 r = util.checkosfilename(path)
365 if r:
372 if r:
366 raise util.Abort("%s: %r" % (r, path))
373 raise util.Abort("%s: %r" % (r, path))
367 self.audit(path)
374 self.audit(path)
368 f = self.join(path)
375 f = self.join(path)
369
376
370 if not text and "b" not in mode:
377 if not text and "b" not in mode:
371 mode += "b" # for that other OS
378 mode += "b" # for that other OS
372
379
373 nlink = -1
380 nlink = -1
374 if mode not in ('r', 'rb'):
381 if mode not in ('r', 'rb'):
375 dirname, basename = util.split(f)
382 dirname, basename = util.split(f)
376 # If basename is empty, then the path is malformed because it points
383 # If basename is empty, then the path is malformed because it points
377 # to a directory. Let the posixfile() call below raise IOError.
384 # to a directory. Let the posixfile() call below raise IOError.
378 if basename:
385 if basename:
379 if atomictemp:
386 if atomictemp:
380 util.ensuredirs(dirname, self.createmode, notindexed)
387 util.ensuredirs(dirname, self.createmode, notindexed)
381 return util.atomictempfile(f, mode, self.createmode)
388 return util.atomictempfile(f, mode, self.createmode)
382 try:
389 try:
383 if 'w' in mode:
390 if 'w' in mode:
384 util.unlink(f)
391 util.unlink(f)
385 nlink = 0
392 nlink = 0
386 else:
393 else:
387 # nlinks() may behave differently for files on Windows
394 # nlinks() may behave differently for files on Windows
388 # shares if the file is open.
395 # shares if the file is open.
389 fd = util.posixfile(f)
396 fd = util.posixfile(f)
390 nlink = util.nlinks(f)
397 nlink = util.nlinks(f)
391 if nlink < 1:
398 if nlink < 1:
392 nlink = 2 # force mktempcopy (issue1922)
399 nlink = 2 # force mktempcopy (issue1922)
393 fd.close()
400 fd.close()
394 except (OSError, IOError), e:
401 except (OSError, IOError), e:
395 if e.errno != errno.ENOENT:
402 if e.errno != errno.ENOENT:
396 raise
403 raise
397 nlink = 0
404 nlink = 0
398 util.ensuredirs(dirname, self.createmode, notindexed)
405 util.ensuredirs(dirname, self.createmode, notindexed)
399 if nlink > 0:
406 if nlink > 0:
400 if self._trustnlink is None:
407 if self._trustnlink is None:
401 self._trustnlink = nlink > 1 or util.checknlink(f)
408 self._trustnlink = nlink > 1 or util.checknlink(f)
402 if nlink > 1 or not self._trustnlink:
409 if nlink > 1 or not self._trustnlink:
403 util.rename(util.mktempcopy(f), f)
410 util.rename(util.mktempcopy(f), f)
404 fp = util.posixfile(f, mode)
411 fp = util.posixfile(f, mode)
405 if nlink == 0:
412 if nlink == 0:
406 self._fixfilemode(f)
413 self._fixfilemode(f)
407 return fp
414 return fp
408
415
409 def symlink(self, src, dst):
416 def symlink(self, src, dst):
410 self.audit(dst)
417 self.audit(dst)
411 linkname = self.join(dst)
418 linkname = self.join(dst)
412 try:
419 try:
413 os.unlink(linkname)
420 os.unlink(linkname)
414 except OSError:
421 except OSError:
415 pass
422 pass
416
423
417 util.ensuredirs(os.path.dirname(linkname), self.createmode)
424 util.ensuredirs(os.path.dirname(linkname), self.createmode)
418
425
419 if self._cansymlink:
426 if self._cansymlink:
420 try:
427 try:
421 os.symlink(src, linkname)
428 os.symlink(src, linkname)
422 except OSError, err:
429 except OSError, err:
423 raise OSError(err.errno, _('could not symlink to %r: %s') %
430 raise OSError(err.errno, _('could not symlink to %r: %s') %
424 (src, err.strerror), linkname)
431 (src, err.strerror), linkname)
425 else:
432 else:
426 self.write(dst, src)
433 self.write(dst, src)
427
434
428 def join(self, path):
435 def join(self, path):
429 if path:
436 if path:
430 return os.path.join(self.base, path)
437 return os.path.join(self.base, path)
431 else:
438 else:
432 return self.base
439 return self.base
433
440
434 opener = vfs
441 opener = vfs
435
442
436 class auditvfs(object):
443 class auditvfs(object):
437 def __init__(self, vfs):
444 def __init__(self, vfs):
438 self.vfs = vfs
445 self.vfs = vfs
439
446
440 def _getmustaudit(self):
447 def _getmustaudit(self):
441 return self.vfs.mustaudit
448 return self.vfs.mustaudit
442
449
443 def _setmustaudit(self, onoff):
450 def _setmustaudit(self, onoff):
444 self.vfs.mustaudit = onoff
451 self.vfs.mustaudit = onoff
445
452
446 mustaudit = property(_getmustaudit, _setmustaudit)
453 mustaudit = property(_getmustaudit, _setmustaudit)
447
454
448 class filtervfs(abstractvfs, auditvfs):
455 class filtervfs(abstractvfs, auditvfs):
449 '''Wrapper vfs for filtering filenames with a function.'''
456 '''Wrapper vfs for filtering filenames with a function.'''
450
457
451 def __init__(self, vfs, filter):
458 def __init__(self, vfs, filter):
452 auditvfs.__init__(self, vfs)
459 auditvfs.__init__(self, vfs)
453 self._filter = filter
460 self._filter = filter
454
461
455 def __call__(self, path, *args, **kwargs):
462 def __call__(self, path, *args, **kwargs):
456 return self.vfs(self._filter(path), *args, **kwargs)
463 return self.vfs(self._filter(path), *args, **kwargs)
457
464
458 def join(self, path):
465 def join(self, path):
459 if path:
466 if path:
460 return self.vfs.join(self._filter(path))
467 return self.vfs.join(self._filter(path))
461 else:
468 else:
462 return self.vfs.join(path)
469 return self.vfs.join(path)
463
470
464 filteropener = filtervfs
471 filteropener = filtervfs
465
472
466 class readonlyvfs(abstractvfs, auditvfs):
473 class readonlyvfs(abstractvfs, auditvfs):
467 '''Wrapper vfs preventing any writing.'''
474 '''Wrapper vfs preventing any writing.'''
468
475
469 def __init__(self, vfs):
476 def __init__(self, vfs):
470 auditvfs.__init__(self, vfs)
477 auditvfs.__init__(self, vfs)
471
478
472 def __call__(self, path, mode='r', *args, **kw):
479 def __call__(self, path, mode='r', *args, **kw):
473 if mode not in ('r', 'rb'):
480 if mode not in ('r', 'rb'):
474 raise util.Abort('this vfs is read only')
481 raise util.Abort('this vfs is read only')
475 return self.vfs(path, mode, *args, **kw)
482 return self.vfs(path, mode, *args, **kw)
476
483
477
484
478 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
485 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
479 '''yield every hg repository under path, always recursively.
486 '''yield every hg repository under path, always recursively.
480 The recurse flag will only control recursion into repo working dirs'''
487 The recurse flag will only control recursion into repo working dirs'''
481 def errhandler(err):
488 def errhandler(err):
482 if err.filename == path:
489 if err.filename == path:
483 raise err
490 raise err
484 samestat = getattr(os.path, 'samestat', None)
491 samestat = getattr(os.path, 'samestat', None)
485 if followsym and samestat is not None:
492 if followsym and samestat is not None:
486 def adddir(dirlst, dirname):
493 def adddir(dirlst, dirname):
487 match = False
494 match = False
488 dirstat = os.stat(dirname)
495 dirstat = os.stat(dirname)
489 for lstdirstat in dirlst:
496 for lstdirstat in dirlst:
490 if samestat(dirstat, lstdirstat):
497 if samestat(dirstat, lstdirstat):
491 match = True
498 match = True
492 break
499 break
493 if not match:
500 if not match:
494 dirlst.append(dirstat)
501 dirlst.append(dirstat)
495 return not match
502 return not match
496 else:
503 else:
497 followsym = False
504 followsym = False
498
505
499 if (seen_dirs is None) and followsym:
506 if (seen_dirs is None) and followsym:
500 seen_dirs = []
507 seen_dirs = []
501 adddir(seen_dirs, path)
508 adddir(seen_dirs, path)
502 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
509 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
503 dirs.sort()
510 dirs.sort()
504 if '.hg' in dirs:
511 if '.hg' in dirs:
505 yield root # found a repository
512 yield root # found a repository
506 qroot = os.path.join(root, '.hg', 'patches')
513 qroot = os.path.join(root, '.hg', 'patches')
507 if os.path.isdir(os.path.join(qroot, '.hg')):
514 if os.path.isdir(os.path.join(qroot, '.hg')):
508 yield qroot # we have a patch queue repo here
515 yield qroot # we have a patch queue repo here
509 if recurse:
516 if recurse:
510 # avoid recursing inside the .hg directory
517 # avoid recursing inside the .hg directory
511 dirs.remove('.hg')
518 dirs.remove('.hg')
512 else:
519 else:
513 dirs[:] = [] # don't descend further
520 dirs[:] = [] # don't descend further
514 elif followsym:
521 elif followsym:
515 newdirs = []
522 newdirs = []
516 for d in dirs:
523 for d in dirs:
517 fname = os.path.join(root, d)
524 fname = os.path.join(root, d)
518 if adddir(seen_dirs, fname):
525 if adddir(seen_dirs, fname):
519 if os.path.islink(fname):
526 if os.path.islink(fname):
520 for hgname in walkrepos(fname, True, seen_dirs):
527 for hgname in walkrepos(fname, True, seen_dirs):
521 yield hgname
528 yield hgname
522 else:
529 else:
523 newdirs.append(d)
530 newdirs.append(d)
524 dirs[:] = newdirs
531 dirs[:] = newdirs
525
532
526 def osrcpath():
533 def osrcpath():
527 '''return default os-specific hgrc search path'''
534 '''return default os-specific hgrc search path'''
528 path = []
535 path = []
529 defaultpath = os.path.join(util.datapath, 'default.d')
536 defaultpath = os.path.join(util.datapath, 'default.d')
530 if os.path.isdir(defaultpath):
537 if os.path.isdir(defaultpath):
531 for f, kind in osutil.listdir(defaultpath):
538 for f, kind in osutil.listdir(defaultpath):
532 if f.endswith('.rc'):
539 if f.endswith('.rc'):
533 path.append(os.path.join(defaultpath, f))
540 path.append(os.path.join(defaultpath, f))
534 path.extend(systemrcpath())
541 path.extend(systemrcpath())
535 path.extend(userrcpath())
542 path.extend(userrcpath())
536 path = [os.path.normpath(f) for f in path]
543 path = [os.path.normpath(f) for f in path]
537 return path
544 return path
538
545
539 _rcpath = None
546 _rcpath = None
540
547
541 def rcpath():
548 def rcpath():
542 '''return hgrc search path. if env var HGRCPATH is set, use it.
549 '''return hgrc search path. if env var HGRCPATH is set, use it.
543 for each item in path, if directory, use files ending in .rc,
550 for each item in path, if directory, use files ending in .rc,
544 else use item.
551 else use item.
545 make HGRCPATH empty to only look in .hg/hgrc of current repo.
552 make HGRCPATH empty to only look in .hg/hgrc of current repo.
546 if no HGRCPATH, use default os-specific path.'''
553 if no HGRCPATH, use default os-specific path.'''
547 global _rcpath
554 global _rcpath
548 if _rcpath is None:
555 if _rcpath is None:
549 if 'HGRCPATH' in os.environ:
556 if 'HGRCPATH' in os.environ:
550 _rcpath = []
557 _rcpath = []
551 for p in os.environ['HGRCPATH'].split(os.pathsep):
558 for p in os.environ['HGRCPATH'].split(os.pathsep):
552 if not p:
559 if not p:
553 continue
560 continue
554 p = util.expandpath(p)
561 p = util.expandpath(p)
555 if os.path.isdir(p):
562 if os.path.isdir(p):
556 for f, kind in osutil.listdir(p):
563 for f, kind in osutil.listdir(p):
557 if f.endswith('.rc'):
564 if f.endswith('.rc'):
558 _rcpath.append(os.path.join(p, f))
565 _rcpath.append(os.path.join(p, f))
559 else:
566 else:
560 _rcpath.append(p)
567 _rcpath.append(p)
561 else:
568 else:
562 _rcpath = osrcpath()
569 _rcpath = osrcpath()
563 return _rcpath
570 return _rcpath
564
571
565 def revsingle(repo, revspec, default='.'):
572 def revsingle(repo, revspec, default='.'):
566 if not revspec and revspec != 0:
573 if not revspec and revspec != 0:
567 return repo[default]
574 return repo[default]
568
575
569 l = revrange(repo, [revspec])
576 l = revrange(repo, [revspec])
570 if not l:
577 if not l:
571 raise util.Abort(_('empty revision set'))
578 raise util.Abort(_('empty revision set'))
572 return repo[l.last()]
579 return repo[l.last()]
573
580
574 def revpair(repo, revs):
581 def revpair(repo, revs):
575 if not revs:
582 if not revs:
576 return repo.dirstate.p1(), None
583 return repo.dirstate.p1(), None
577
584
578 l = revrange(repo, revs)
585 l = revrange(repo, revs)
579
586
580 if not l:
587 if not l:
581 first = second = None
588 first = second = None
582 elif l.isascending():
589 elif l.isascending():
583 first = l.min()
590 first = l.min()
584 second = l.max()
591 second = l.max()
585 elif l.isdescending():
592 elif l.isdescending():
586 first = l.max()
593 first = l.max()
587 second = l.min()
594 second = l.min()
588 else:
595 else:
589 first = l.first()
596 first = l.first()
590 second = l.last()
597 second = l.last()
591
598
592 if first is None:
599 if first is None:
593 raise util.Abort(_('empty revision range'))
600 raise util.Abort(_('empty revision range'))
594
601
595 if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
602 if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
596 return repo.lookup(first), None
603 return repo.lookup(first), None
597
604
598 return repo.lookup(first), repo.lookup(second)
605 return repo.lookup(first), repo.lookup(second)
599
606
600 _revrangesep = ':'
607 _revrangesep = ':'
601
608
602 def revrange(repo, revs):
609 def revrange(repo, revs):
603 """Yield revision as strings from a list of revision specifications."""
610 """Yield revision as strings from a list of revision specifications."""
604
611
605 def revfix(repo, val, defval):
612 def revfix(repo, val, defval):
606 if not val and val != 0 and defval is not None:
613 if not val and val != 0 and defval is not None:
607 return defval
614 return defval
608 return repo[val].rev()
615 return repo[val].rev()
609
616
610 seen, l = set(), revset.baseset([])
617 seen, l = set(), revset.baseset([])
611 for spec in revs:
618 for spec in revs:
612 if l and not seen:
619 if l and not seen:
613 seen = set(l)
620 seen = set(l)
614 # attempt to parse old-style ranges first to deal with
621 # attempt to parse old-style ranges first to deal with
615 # things like old-tag which contain query metacharacters
622 # things like old-tag which contain query metacharacters
616 try:
623 try:
617 if isinstance(spec, int):
624 if isinstance(spec, int):
618 seen.add(spec)
625 seen.add(spec)
619 l = l + revset.baseset([spec])
626 l = l + revset.baseset([spec])
620 continue
627 continue
621
628
622 if _revrangesep in spec:
629 if _revrangesep in spec:
623 start, end = spec.split(_revrangesep, 1)
630 start, end = spec.split(_revrangesep, 1)
624 start = revfix(repo, start, 0)
631 start = revfix(repo, start, 0)
625 end = revfix(repo, end, len(repo) - 1)
632 end = revfix(repo, end, len(repo) - 1)
626 if end == nullrev and start < 0:
633 if end == nullrev and start < 0:
627 start = nullrev
634 start = nullrev
628 rangeiter = repo.changelog.revs(start, end)
635 rangeiter = repo.changelog.revs(start, end)
629 if not seen and not l:
636 if not seen and not l:
630 # by far the most common case: revs = ["-1:0"]
637 # by far the most common case: revs = ["-1:0"]
631 l = revset.baseset(rangeiter)
638 l = revset.baseset(rangeiter)
632 # defer syncing seen until next iteration
639 # defer syncing seen until next iteration
633 continue
640 continue
634 newrevs = set(rangeiter)
641 newrevs = set(rangeiter)
635 if seen:
642 if seen:
636 newrevs.difference_update(seen)
643 newrevs.difference_update(seen)
637 seen.update(newrevs)
644 seen.update(newrevs)
638 else:
645 else:
639 seen = newrevs
646 seen = newrevs
640 l = l + revset.baseset(sorted(newrevs, reverse=start > end))
647 l = l + revset.baseset(sorted(newrevs, reverse=start > end))
641 continue
648 continue
642 elif spec and spec in repo: # single unquoted rev
649 elif spec and spec in repo: # single unquoted rev
643 rev = revfix(repo, spec, None)
650 rev = revfix(repo, spec, None)
644 if rev in seen:
651 if rev in seen:
645 continue
652 continue
646 seen.add(rev)
653 seen.add(rev)
647 l = l + revset.baseset([rev])
654 l = l + revset.baseset([rev])
648 continue
655 continue
649 except error.RepoLookupError:
656 except error.RepoLookupError:
650 pass
657 pass
651
658
652 # fall through to new-style queries if old-style fails
659 # fall through to new-style queries if old-style fails
653 m = revset.match(repo.ui, spec, repo)
660 m = revset.match(repo.ui, spec, repo)
654 if seen or l:
661 if seen or l:
655 dl = [r for r in m(repo, revset.spanset(repo)) if r not in seen]
662 dl = [r for r in m(repo, revset.spanset(repo)) if r not in seen]
656 l = l + revset.baseset(dl)
663 l = l + revset.baseset(dl)
657 seen.update(dl)
664 seen.update(dl)
658 else:
665 else:
659 l = m(repo, revset.spanset(repo))
666 l = m(repo, revset.spanset(repo))
660
667
661 return l
668 return l
662
669
663 def expandpats(pats):
670 def expandpats(pats):
664 '''Expand bare globs when running on windows.
671 '''Expand bare globs when running on windows.
665 On posix we assume it already has already been done by sh.'''
672 On posix we assume it already has already been done by sh.'''
666 if not util.expandglobs:
673 if not util.expandglobs:
667 return list(pats)
674 return list(pats)
668 ret = []
675 ret = []
669 for kindpat in pats:
676 for kindpat in pats:
670 kind, pat = matchmod._patsplit(kindpat, None)
677 kind, pat = matchmod._patsplit(kindpat, None)
671 if kind is None:
678 if kind is None:
672 try:
679 try:
673 globbed = glob.glob(pat)
680 globbed = glob.glob(pat)
674 except re.error:
681 except re.error:
675 globbed = [pat]
682 globbed = [pat]
676 if globbed:
683 if globbed:
677 ret.extend(globbed)
684 ret.extend(globbed)
678 continue
685 continue
679 ret.append(kindpat)
686 ret.append(kindpat)
680 return ret
687 return ret
681
688
682 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
689 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
683 '''Return a matcher and the patterns that were used.
690 '''Return a matcher and the patterns that were used.
684 The matcher will warn about bad matches.'''
691 The matcher will warn about bad matches.'''
685 if pats == ("",):
692 if pats == ("",):
686 pats = []
693 pats = []
687 if not globbed and default == 'relpath':
694 if not globbed and default == 'relpath':
688 pats = expandpats(pats or [])
695 pats = expandpats(pats or [])
689
696
690 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
697 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
691 default)
698 default)
692 def badfn(f, msg):
699 def badfn(f, msg):
693 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
700 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
694 m.bad = badfn
701 m.bad = badfn
695 return m, pats
702 return m, pats
696
703
697 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
704 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
698 '''Return a matcher that will warn about bad matches.'''
705 '''Return a matcher that will warn about bad matches.'''
699 return matchandpats(ctx, pats, opts, globbed, default)[0]
706 return matchandpats(ctx, pats, opts, globbed, default)[0]
700
707
701 def matchall(repo):
708 def matchall(repo):
702 '''Return a matcher that will efficiently match everything.'''
709 '''Return a matcher that will efficiently match everything.'''
703 return matchmod.always(repo.root, repo.getcwd())
710 return matchmod.always(repo.root, repo.getcwd())
704
711
705 def matchfiles(repo, files):
712 def matchfiles(repo, files):
706 '''Return a matcher that will efficiently match exactly these files.'''
713 '''Return a matcher that will efficiently match exactly these files.'''
707 return matchmod.exact(repo.root, repo.getcwd(), files)
714 return matchmod.exact(repo.root, repo.getcwd(), files)
708
715
709 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
716 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
710 if dry_run is None:
717 if dry_run is None:
711 dry_run = opts.get('dry_run')
718 dry_run = opts.get('dry_run')
712 if similarity is None:
719 if similarity is None:
713 similarity = float(opts.get('similarity') or 0)
720 similarity = float(opts.get('similarity') or 0)
714 # we'd use status here, except handling of symlinks and ignore is tricky
721 # we'd use status here, except handling of symlinks and ignore is tricky
715 m = match(repo[None], pats, opts)
722 m = match(repo[None], pats, opts)
716 rejected = []
723 rejected = []
717 m.bad = lambda x, y: rejected.append(x)
724 m.bad = lambda x, y: rejected.append(x)
718
725
719 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
726 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
720
727
721 unknownset = set(unknown + forgotten)
728 unknownset = set(unknown + forgotten)
722 toprint = unknownset.copy()
729 toprint = unknownset.copy()
723 toprint.update(deleted)
730 toprint.update(deleted)
724 for abs in sorted(toprint):
731 for abs in sorted(toprint):
725 if repo.ui.verbose or not m.exact(abs):
732 if repo.ui.verbose or not m.exact(abs):
726 rel = m.rel(abs)
733 rel = m.rel(abs)
727 if abs in unknownset:
734 if abs in unknownset:
728 status = _('adding %s\n') % ((pats and rel) or abs)
735 status = _('adding %s\n') % ((pats and rel) or abs)
729 else:
736 else:
730 status = _('removing %s\n') % ((pats and rel) or abs)
737 status = _('removing %s\n') % ((pats and rel) or abs)
731 repo.ui.status(status)
738 repo.ui.status(status)
732
739
733 renames = _findrenames(repo, m, added + unknown, removed + deleted,
740 renames = _findrenames(repo, m, added + unknown, removed + deleted,
734 similarity)
741 similarity)
735
742
736 if not dry_run:
743 if not dry_run:
737 _markchanges(repo, unknown + forgotten, deleted, renames)
744 _markchanges(repo, unknown + forgotten, deleted, renames)
738
745
739 for f in rejected:
746 for f in rejected:
740 if f in m.files():
747 if f in m.files():
741 return 1
748 return 1
742 return 0
749 return 0
743
750
744 def marktouched(repo, files, similarity=0.0):
751 def marktouched(repo, files, similarity=0.0):
745 '''Assert that files have somehow been operated upon. files are relative to
752 '''Assert that files have somehow been operated upon. files are relative to
746 the repo root.'''
753 the repo root.'''
747 m = matchfiles(repo, files)
754 m = matchfiles(repo, files)
748 rejected = []
755 rejected = []
749 m.bad = lambda x, y: rejected.append(x)
756 m.bad = lambda x, y: rejected.append(x)
750
757
751 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
758 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
752
759
753 if repo.ui.verbose:
760 if repo.ui.verbose:
754 unknownset = set(unknown + forgotten)
761 unknownset = set(unknown + forgotten)
755 toprint = unknownset.copy()
762 toprint = unknownset.copy()
756 toprint.update(deleted)
763 toprint.update(deleted)
757 for abs in sorted(toprint):
764 for abs in sorted(toprint):
758 if abs in unknownset:
765 if abs in unknownset:
759 status = _('adding %s\n') % abs
766 status = _('adding %s\n') % abs
760 else:
767 else:
761 status = _('removing %s\n') % abs
768 status = _('removing %s\n') % abs
762 repo.ui.status(status)
769 repo.ui.status(status)
763
770
764 renames = _findrenames(repo, m, added + unknown, removed + deleted,
771 renames = _findrenames(repo, m, added + unknown, removed + deleted,
765 similarity)
772 similarity)
766
773
767 _markchanges(repo, unknown + forgotten, deleted, renames)
774 _markchanges(repo, unknown + forgotten, deleted, renames)
768
775
769 for f in rejected:
776 for f in rejected:
770 if f in m.files():
777 if f in m.files():
771 return 1
778 return 1
772 return 0
779 return 0
773
780
774 def _interestingfiles(repo, matcher):
781 def _interestingfiles(repo, matcher):
775 '''Walk dirstate with matcher, looking for files that addremove would care
782 '''Walk dirstate with matcher, looking for files that addremove would care
776 about.
783 about.
777
784
778 This is different from dirstate.status because it doesn't care about
785 This is different from dirstate.status because it doesn't care about
779 whether files are modified or clean.'''
786 whether files are modified or clean.'''
780 added, unknown, deleted, removed, forgotten = [], [], [], [], []
787 added, unknown, deleted, removed, forgotten = [], [], [], [], []
781 audit_path = pathutil.pathauditor(repo.root)
788 audit_path = pathutil.pathauditor(repo.root)
782
789
783 ctx = repo[None]
790 ctx = repo[None]
784 dirstate = repo.dirstate
791 dirstate = repo.dirstate
785 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
792 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
786 full=False)
793 full=False)
787 for abs, st in walkresults.iteritems():
794 for abs, st in walkresults.iteritems():
788 dstate = dirstate[abs]
795 dstate = dirstate[abs]
789 if dstate == '?' and audit_path.check(abs):
796 if dstate == '?' and audit_path.check(abs):
790 unknown.append(abs)
797 unknown.append(abs)
791 elif dstate != 'r' and not st:
798 elif dstate != 'r' and not st:
792 deleted.append(abs)
799 deleted.append(abs)
793 elif dstate == 'r' and st:
800 elif dstate == 'r' and st:
794 forgotten.append(abs)
801 forgotten.append(abs)
795 # for finding renames
802 # for finding renames
796 elif dstate == 'r' and not st:
803 elif dstate == 'r' and not st:
797 removed.append(abs)
804 removed.append(abs)
798 elif dstate == 'a':
805 elif dstate == 'a':
799 added.append(abs)
806 added.append(abs)
800
807
801 return added, unknown, deleted, removed, forgotten
808 return added, unknown, deleted, removed, forgotten
802
809
803 def _findrenames(repo, matcher, added, removed, similarity):
810 def _findrenames(repo, matcher, added, removed, similarity):
804 '''Find renames from removed files to added ones.'''
811 '''Find renames from removed files to added ones.'''
805 renames = {}
812 renames = {}
806 if similarity > 0:
813 if similarity > 0:
807 for old, new, score in similar.findrenames(repo, added, removed,
814 for old, new, score in similar.findrenames(repo, added, removed,
808 similarity):
815 similarity):
809 if (repo.ui.verbose or not matcher.exact(old)
816 if (repo.ui.verbose or not matcher.exact(old)
810 or not matcher.exact(new)):
817 or not matcher.exact(new)):
811 repo.ui.status(_('recording removal of %s as rename to %s '
818 repo.ui.status(_('recording removal of %s as rename to %s '
812 '(%d%% similar)\n') %
819 '(%d%% similar)\n') %
813 (matcher.rel(old), matcher.rel(new),
820 (matcher.rel(old), matcher.rel(new),
814 score * 100))
821 score * 100))
815 renames[new] = old
822 renames[new] = old
816 return renames
823 return renames
817
824
818 def _markchanges(repo, unknown, deleted, renames):
825 def _markchanges(repo, unknown, deleted, renames):
819 '''Marks the files in unknown as added, the files in deleted as removed,
826 '''Marks the files in unknown as added, the files in deleted as removed,
820 and the files in renames as copied.'''
827 and the files in renames as copied.'''
821 wctx = repo[None]
828 wctx = repo[None]
822 wlock = repo.wlock()
829 wlock = repo.wlock()
823 try:
830 try:
824 wctx.forget(deleted)
831 wctx.forget(deleted)
825 wctx.add(unknown)
832 wctx.add(unknown)
826 for new, old in renames.iteritems():
833 for new, old in renames.iteritems():
827 wctx.copy(old, new)
834 wctx.copy(old, new)
828 finally:
835 finally:
829 wlock.release()
836 wlock.release()
830
837
831 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
838 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
832 """Update the dirstate to reflect the intent of copying src to dst. For
839 """Update the dirstate to reflect the intent of copying src to dst. For
833 different reasons it might not end with dst being marked as copied from src.
840 different reasons it might not end with dst being marked as copied from src.
834 """
841 """
835 origsrc = repo.dirstate.copied(src) or src
842 origsrc = repo.dirstate.copied(src) or src
836 if dst == origsrc: # copying back a copy?
843 if dst == origsrc: # copying back a copy?
837 if repo.dirstate[dst] not in 'mn' and not dryrun:
844 if repo.dirstate[dst] not in 'mn' and not dryrun:
838 repo.dirstate.normallookup(dst)
845 repo.dirstate.normallookup(dst)
839 else:
846 else:
840 if repo.dirstate[origsrc] == 'a' and origsrc == src:
847 if repo.dirstate[origsrc] == 'a' and origsrc == src:
841 if not ui.quiet:
848 if not ui.quiet:
842 ui.warn(_("%s has not been committed yet, so no copy "
849 ui.warn(_("%s has not been committed yet, so no copy "
843 "data will be stored for %s.\n")
850 "data will be stored for %s.\n")
844 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
851 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
845 if repo.dirstate[dst] in '?r' and not dryrun:
852 if repo.dirstate[dst] in '?r' and not dryrun:
846 wctx.add([dst])
853 wctx.add([dst])
847 elif not dryrun:
854 elif not dryrun:
848 wctx.copy(origsrc, dst)
855 wctx.copy(origsrc, dst)
849
856
850 def readrequires(opener, supported):
857 def readrequires(opener, supported):
851 '''Reads and parses .hg/requires and checks if all entries found
858 '''Reads and parses .hg/requires and checks if all entries found
852 are in the list of supported features.'''
859 are in the list of supported features.'''
853 requirements = set(opener.read("requires").splitlines())
860 requirements = set(opener.read("requires").splitlines())
854 missings = []
861 missings = []
855 for r in requirements:
862 for r in requirements:
856 if r not in supported:
863 if r not in supported:
857 if not r or not r[0].isalnum():
864 if not r or not r[0].isalnum():
858 raise error.RequirementError(_(".hg/requires file is corrupt"))
865 raise error.RequirementError(_(".hg/requires file is corrupt"))
859 missings.append(r)
866 missings.append(r)
860 missings.sort()
867 missings.sort()
861 if missings:
868 if missings:
862 raise error.RequirementError(
869 raise error.RequirementError(
863 _("repository requires features unknown to this Mercurial: %s")
870 _("repository requires features unknown to this Mercurial: %s")
864 % " ".join(missings),
871 % " ".join(missings),
865 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
872 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
866 " for more information"))
873 " for more information"))
867 return requirements
874 return requirements
868
875
869 class filecachesubentry(object):
876 class filecachesubentry(object):
870 def __init__(self, path, stat):
877 def __init__(self, path, stat):
871 self.path = path
878 self.path = path
872 self.cachestat = None
879 self.cachestat = None
873 self._cacheable = None
880 self._cacheable = None
874
881
875 if stat:
882 if stat:
876 self.cachestat = filecachesubentry.stat(self.path)
883 self.cachestat = filecachesubentry.stat(self.path)
877
884
878 if self.cachestat:
885 if self.cachestat:
879 self._cacheable = self.cachestat.cacheable()
886 self._cacheable = self.cachestat.cacheable()
880 else:
887 else:
881 # None means we don't know yet
888 # None means we don't know yet
882 self._cacheable = None
889 self._cacheable = None
883
890
884 def refresh(self):
891 def refresh(self):
885 if self.cacheable():
892 if self.cacheable():
886 self.cachestat = filecachesubentry.stat(self.path)
893 self.cachestat = filecachesubentry.stat(self.path)
887
894
888 def cacheable(self):
895 def cacheable(self):
889 if self._cacheable is not None:
896 if self._cacheable is not None:
890 return self._cacheable
897 return self._cacheable
891
898
892 # we don't know yet, assume it is for now
899 # we don't know yet, assume it is for now
893 return True
900 return True
894
901
895 def changed(self):
902 def changed(self):
896 # no point in going further if we can't cache it
903 # no point in going further if we can't cache it
897 if not self.cacheable():
904 if not self.cacheable():
898 return True
905 return True
899
906
900 newstat = filecachesubentry.stat(self.path)
907 newstat = filecachesubentry.stat(self.path)
901
908
902 # we may not know if it's cacheable yet, check again now
909 # we may not know if it's cacheable yet, check again now
903 if newstat and self._cacheable is None:
910 if newstat and self._cacheable is None:
904 self._cacheable = newstat.cacheable()
911 self._cacheable = newstat.cacheable()
905
912
906 # check again
913 # check again
907 if not self._cacheable:
914 if not self._cacheable:
908 return True
915 return True
909
916
910 if self.cachestat != newstat:
917 if self.cachestat != newstat:
911 self.cachestat = newstat
918 self.cachestat = newstat
912 return True
919 return True
913 else:
920 else:
914 return False
921 return False
915
922
916 @staticmethod
923 @staticmethod
917 def stat(path):
924 def stat(path):
918 try:
925 try:
919 return util.cachestat(path)
926 return util.cachestat(path)
920 except OSError, e:
927 except OSError, e:
921 if e.errno != errno.ENOENT:
928 if e.errno != errno.ENOENT:
922 raise
929 raise
923
930
924 class filecacheentry(object):
931 class filecacheentry(object):
925 def __init__(self, paths, stat=True):
932 def __init__(self, paths, stat=True):
926 self._entries = []
933 self._entries = []
927 for path in paths:
934 for path in paths:
928 self._entries.append(filecachesubentry(path, stat))
935 self._entries.append(filecachesubentry(path, stat))
929
936
930 def changed(self):
937 def changed(self):
931 '''true if any entry has changed'''
938 '''true if any entry has changed'''
932 for entry in self._entries:
939 for entry in self._entries:
933 if entry.changed():
940 if entry.changed():
934 return True
941 return True
935 return False
942 return False
936
943
937 def refresh(self):
944 def refresh(self):
938 for entry in self._entries:
945 for entry in self._entries:
939 entry.refresh()
946 entry.refresh()
940
947
941 class filecache(object):
948 class filecache(object):
942 '''A property like decorator that tracks files under .hg/ for updates.
949 '''A property like decorator that tracks files under .hg/ for updates.
943
950
944 Records stat info when called in _filecache.
951 Records stat info when called in _filecache.
945
952
946 On subsequent calls, compares old stat info with new info, and recreates the
953 On subsequent calls, compares old stat info with new info, and recreates the
947 object when any of the files changes, updating the new stat info in
954 object when any of the files changes, updating the new stat info in
948 _filecache.
955 _filecache.
949
956
950 Mercurial either atomic renames or appends for files under .hg,
957 Mercurial either atomic renames or appends for files under .hg,
951 so to ensure the cache is reliable we need the filesystem to be able
958 so to ensure the cache is reliable we need the filesystem to be able
952 to tell us if a file has been replaced. If it can't, we fallback to
959 to tell us if a file has been replaced. If it can't, we fallback to
953 recreating the object on every call (essentially the same behaviour as
960 recreating the object on every call (essentially the same behaviour as
954 propertycache).
961 propertycache).
955
962
956 '''
963 '''
957 def __init__(self, *paths):
964 def __init__(self, *paths):
958 self.paths = paths
965 self.paths = paths
959
966
960 def join(self, obj, fname):
967 def join(self, obj, fname):
961 """Used to compute the runtime path of a cached file.
968 """Used to compute the runtime path of a cached file.
962
969
963 Users should subclass filecache and provide their own version of this
970 Users should subclass filecache and provide their own version of this
964 function to call the appropriate join function on 'obj' (an instance
971 function to call the appropriate join function on 'obj' (an instance
965 of the class that its member function was decorated).
972 of the class that its member function was decorated).
966 """
973 """
967 return obj.join(fname)
974 return obj.join(fname)
968
975
969 def __call__(self, func):
976 def __call__(self, func):
970 self.func = func
977 self.func = func
971 self.name = func.__name__
978 self.name = func.__name__
972 return self
979 return self
973
980
974 def __get__(self, obj, type=None):
981 def __get__(self, obj, type=None):
975 # do we need to check if the file changed?
982 # do we need to check if the file changed?
976 if self.name in obj.__dict__:
983 if self.name in obj.__dict__:
977 assert self.name in obj._filecache, self.name
984 assert self.name in obj._filecache, self.name
978 return obj.__dict__[self.name]
985 return obj.__dict__[self.name]
979
986
980 entry = obj._filecache.get(self.name)
987 entry = obj._filecache.get(self.name)
981
988
982 if entry:
989 if entry:
983 if entry.changed():
990 if entry.changed():
984 entry.obj = self.func(obj)
991 entry.obj = self.func(obj)
985 else:
992 else:
986 paths = [self.join(obj, path) for path in self.paths]
993 paths = [self.join(obj, path) for path in self.paths]
987
994
988 # We stat -before- creating the object so our cache doesn't lie if
995 # We stat -before- creating the object so our cache doesn't lie if
989 # a writer modified between the time we read and stat
996 # a writer modified between the time we read and stat
990 entry = filecacheentry(paths, True)
997 entry = filecacheentry(paths, True)
991 entry.obj = self.func(obj)
998 entry.obj = self.func(obj)
992
999
993 obj._filecache[self.name] = entry
1000 obj._filecache[self.name] = entry
994
1001
995 obj.__dict__[self.name] = entry.obj
1002 obj.__dict__[self.name] = entry.obj
996 return entry.obj
1003 return entry.obj
997
1004
998 def __set__(self, obj, value):
1005 def __set__(self, obj, value):
999 if self.name not in obj._filecache:
1006 if self.name not in obj._filecache:
1000 # we add an entry for the missing value because X in __dict__
1007 # we add an entry for the missing value because X in __dict__
1001 # implies X in _filecache
1008 # implies X in _filecache
1002 paths = [self.join(obj, path) for path in self.paths]
1009 paths = [self.join(obj, path) for path in self.paths]
1003 ce = filecacheentry(paths, False)
1010 ce = filecacheentry(paths, False)
1004 obj._filecache[self.name] = ce
1011 obj._filecache[self.name] = ce
1005 else:
1012 else:
1006 ce = obj._filecache[self.name]
1013 ce = obj._filecache[self.name]
1007
1014
1008 ce.obj = value # update cached copy
1015 ce.obj = value # update cached copy
1009 obj.__dict__[self.name] = value # update copy returned by obj.x
1016 obj.__dict__[self.name] = value # update copy returned by obj.x
1010
1017
1011 def __delete__(self, obj):
1018 def __delete__(self, obj):
1012 try:
1019 try:
1013 del obj.__dict__[self.name]
1020 del obj.__dict__[self.name]
1014 except KeyError:
1021 except KeyError:
1015 raise AttributeError(self.name)
1022 raise AttributeError(self.name)
1016
1023
1017 class dirs(object):
1024 class dirs(object):
1018 '''a multiset of directory names from a dirstate or manifest'''
1025 '''a multiset of directory names from a dirstate or manifest'''
1019
1026
1020 def __init__(self, map, skip=None):
1027 def __init__(self, map, skip=None):
1021 self._dirs = {}
1028 self._dirs = {}
1022 addpath = self.addpath
1029 addpath = self.addpath
1023 if util.safehasattr(map, 'iteritems') and skip is not None:
1030 if util.safehasattr(map, 'iteritems') and skip is not None:
1024 for f, s in map.iteritems():
1031 for f, s in map.iteritems():
1025 if s[0] != skip:
1032 if s[0] != skip:
1026 addpath(f)
1033 addpath(f)
1027 else:
1034 else:
1028 for f in map:
1035 for f in map:
1029 addpath(f)
1036 addpath(f)
1030
1037
1031 def addpath(self, path):
1038 def addpath(self, path):
1032 dirs = self._dirs
1039 dirs = self._dirs
1033 for base in finddirs(path):
1040 for base in finddirs(path):
1034 if base in dirs:
1041 if base in dirs:
1035 dirs[base] += 1
1042 dirs[base] += 1
1036 return
1043 return
1037 dirs[base] = 1
1044 dirs[base] = 1
1038
1045
1039 def delpath(self, path):
1046 def delpath(self, path):
1040 dirs = self._dirs
1047 dirs = self._dirs
1041 for base in finddirs(path):
1048 for base in finddirs(path):
1042 if dirs[base] > 1:
1049 if dirs[base] > 1:
1043 dirs[base] -= 1
1050 dirs[base] -= 1
1044 return
1051 return
1045 del dirs[base]
1052 del dirs[base]
1046
1053
1047 def __iter__(self):
1054 def __iter__(self):
1048 return self._dirs.iterkeys()
1055 return self._dirs.iterkeys()
1049
1056
1050 def __contains__(self, d):
1057 def __contains__(self, d):
1051 return d in self._dirs
1058 return d in self._dirs
1052
1059
1053 if util.safehasattr(parsers, 'dirs'):
1060 if util.safehasattr(parsers, 'dirs'):
1054 dirs = parsers.dirs
1061 dirs = parsers.dirs
1055
1062
1056 def finddirs(path):
1063 def finddirs(path):
1057 pos = path.rfind('/')
1064 pos = path.rfind('/')
1058 while pos != -1:
1065 while pos != -1:
1059 yield path[:pos]
1066 yield path[:pos]
1060 pos = path.rfind('/', 0, pos)
1067 pos = path.rfind('/', 0, pos)
General Comments 0
You need to be logged in to leave comments. Login now