##// END OF EJS Templates
vfs: add basename
FUJIWARA Katsunori -
r25770:39de2e9c default
parent child Browse files
Show More
@@ -1,1156 +1,1162
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from mercurial.node import nullrev, wdirrev
9 from mercurial.node import nullrev, wdirrev
10 import util, error, osutil, revset, similar, encoding, phases
10 import util, error, osutil, revset, similar, encoding, phases
11 import pathutil
11 import pathutil
12 import match as matchmod
12 import match as matchmod
13 import os, errno, re, glob, tempfile, shutil, stat
13 import os, errno, re, glob, tempfile, shutil, stat
14
14
15 if os.name == 'nt':
15 if os.name == 'nt':
16 import scmwindows as scmplatform
16 import scmwindows as scmplatform
17 else:
17 else:
18 import scmposix as scmplatform
18 import scmposix as scmplatform
19
19
20 systemrcpath = scmplatform.systemrcpath
20 systemrcpath = scmplatform.systemrcpath
21 userrcpath = scmplatform.userrcpath
21 userrcpath = scmplatform.userrcpath
22
22
23 class status(tuple):
23 class status(tuple):
24 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
24 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
25 and 'ignored' properties are only relevant to the working copy.
25 and 'ignored' properties are only relevant to the working copy.
26 '''
26 '''
27
27
28 __slots__ = ()
28 __slots__ = ()
29
29
30 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
30 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
31 clean):
31 clean):
32 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
32 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
33 ignored, clean))
33 ignored, clean))
34
34
35 @property
35 @property
36 def modified(self):
36 def modified(self):
37 '''files that have been modified'''
37 '''files that have been modified'''
38 return self[0]
38 return self[0]
39
39
40 @property
40 @property
41 def added(self):
41 def added(self):
42 '''files that have been added'''
42 '''files that have been added'''
43 return self[1]
43 return self[1]
44
44
45 @property
45 @property
46 def removed(self):
46 def removed(self):
47 '''files that have been removed'''
47 '''files that have been removed'''
48 return self[2]
48 return self[2]
49
49
50 @property
50 @property
51 def deleted(self):
51 def deleted(self):
52 '''files that are in the dirstate, but have been deleted from the
52 '''files that are in the dirstate, but have been deleted from the
53 working copy (aka "missing")
53 working copy (aka "missing")
54 '''
54 '''
55 return self[3]
55 return self[3]
56
56
57 @property
57 @property
58 def unknown(self):
58 def unknown(self):
59 '''files not in the dirstate that are not ignored'''
59 '''files not in the dirstate that are not ignored'''
60 return self[4]
60 return self[4]
61
61
62 @property
62 @property
63 def ignored(self):
63 def ignored(self):
64 '''files not in the dirstate that are ignored (by _dirignore())'''
64 '''files not in the dirstate that are ignored (by _dirignore())'''
65 return self[5]
65 return self[5]
66
66
67 @property
67 @property
68 def clean(self):
68 def clean(self):
69 '''files that have not been modified'''
69 '''files that have not been modified'''
70 return self[6]
70 return self[6]
71
71
72 def __repr__(self, *args, **kwargs):
72 def __repr__(self, *args, **kwargs):
73 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
73 return (('<status modified=%r, added=%r, removed=%r, deleted=%r, '
74 'unknown=%r, ignored=%r, clean=%r>') % self)
74 'unknown=%r, ignored=%r, clean=%r>') % self)
75
75
76 def itersubrepos(ctx1, ctx2):
76 def itersubrepos(ctx1, ctx2):
77 """find subrepos in ctx1 or ctx2"""
77 """find subrepos in ctx1 or ctx2"""
78 # Create a (subpath, ctx) mapping where we prefer subpaths from
78 # Create a (subpath, ctx) mapping where we prefer subpaths from
79 # ctx1. The subpaths from ctx2 are important when the .hgsub file
79 # ctx1. The subpaths from ctx2 are important when the .hgsub file
80 # has been modified (in ctx2) but not yet committed (in ctx1).
80 # has been modified (in ctx2) but not yet committed (in ctx1).
81 subpaths = dict.fromkeys(ctx2.substate, ctx2)
81 subpaths = dict.fromkeys(ctx2.substate, ctx2)
82 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
82 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
83
83
84 missing = set()
84 missing = set()
85
85
86 for subpath in ctx2.substate:
86 for subpath in ctx2.substate:
87 if subpath not in ctx1.substate:
87 if subpath not in ctx1.substate:
88 del subpaths[subpath]
88 del subpaths[subpath]
89 missing.add(subpath)
89 missing.add(subpath)
90
90
91 for subpath, ctx in sorted(subpaths.iteritems()):
91 for subpath, ctx in sorted(subpaths.iteritems()):
92 yield subpath, ctx.sub(subpath)
92 yield subpath, ctx.sub(subpath)
93
93
94 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
94 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
95 # status and diff will have an accurate result when it does
95 # status and diff will have an accurate result when it does
96 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
96 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
97 # against itself.
97 # against itself.
98 for subpath in missing:
98 for subpath in missing:
99 yield subpath, ctx2.nullsub(subpath, ctx1)
99 yield subpath, ctx2.nullsub(subpath, ctx1)
100
100
101 def nochangesfound(ui, repo, excluded=None):
101 def nochangesfound(ui, repo, excluded=None):
102 '''Report no changes for push/pull, excluded is None or a list of
102 '''Report no changes for push/pull, excluded is None or a list of
103 nodes excluded from the push/pull.
103 nodes excluded from the push/pull.
104 '''
104 '''
105 secretlist = []
105 secretlist = []
106 if excluded:
106 if excluded:
107 for n in excluded:
107 for n in excluded:
108 if n not in repo:
108 if n not in repo:
109 # discovery should not have included the filtered revision,
109 # discovery should not have included the filtered revision,
110 # we have to explicitly exclude it until discovery is cleanup.
110 # we have to explicitly exclude it until discovery is cleanup.
111 continue
111 continue
112 ctx = repo[n]
112 ctx = repo[n]
113 if ctx.phase() >= phases.secret and not ctx.extinct():
113 if ctx.phase() >= phases.secret and not ctx.extinct():
114 secretlist.append(n)
114 secretlist.append(n)
115
115
116 if secretlist:
116 if secretlist:
117 ui.status(_("no changes found (ignored %d secret changesets)\n")
117 ui.status(_("no changes found (ignored %d secret changesets)\n")
118 % len(secretlist))
118 % len(secretlist))
119 else:
119 else:
120 ui.status(_("no changes found\n"))
120 ui.status(_("no changes found\n"))
121
121
122 def checknewlabel(repo, lbl, kind):
122 def checknewlabel(repo, lbl, kind):
123 # Do not use the "kind" parameter in ui output.
123 # Do not use the "kind" parameter in ui output.
124 # It makes strings difficult to translate.
124 # It makes strings difficult to translate.
125 if lbl in ['tip', '.', 'null']:
125 if lbl in ['tip', '.', 'null']:
126 raise util.Abort(_("the name '%s' is reserved") % lbl)
126 raise util.Abort(_("the name '%s' is reserved") % lbl)
127 for c in (':', '\0', '\n', '\r'):
127 for c in (':', '\0', '\n', '\r'):
128 if c in lbl:
128 if c in lbl:
129 raise util.Abort(_("%r cannot be used in a name") % c)
129 raise util.Abort(_("%r cannot be used in a name") % c)
130 try:
130 try:
131 int(lbl)
131 int(lbl)
132 raise util.Abort(_("cannot use an integer as a name"))
132 raise util.Abort(_("cannot use an integer as a name"))
133 except ValueError:
133 except ValueError:
134 pass
134 pass
135
135
136 def checkfilename(f):
136 def checkfilename(f):
137 '''Check that the filename f is an acceptable filename for a tracked file'''
137 '''Check that the filename f is an acceptable filename for a tracked file'''
138 if '\r' in f or '\n' in f:
138 if '\r' in f or '\n' in f:
139 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
139 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
140
140
141 def checkportable(ui, f):
141 def checkportable(ui, f):
142 '''Check if filename f is portable and warn or abort depending on config'''
142 '''Check if filename f is portable and warn or abort depending on config'''
143 checkfilename(f)
143 checkfilename(f)
144 abort, warn = checkportabilityalert(ui)
144 abort, warn = checkportabilityalert(ui)
145 if abort or warn:
145 if abort or warn:
146 msg = util.checkwinfilename(f)
146 msg = util.checkwinfilename(f)
147 if msg:
147 if msg:
148 msg = "%s: %r" % (msg, f)
148 msg = "%s: %r" % (msg, f)
149 if abort:
149 if abort:
150 raise util.Abort(msg)
150 raise util.Abort(msg)
151 ui.warn(_("warning: %s\n") % msg)
151 ui.warn(_("warning: %s\n") % msg)
152
152
153 def checkportabilityalert(ui):
153 def checkportabilityalert(ui):
154 '''check if the user's config requests nothing, a warning, or abort for
154 '''check if the user's config requests nothing, a warning, or abort for
155 non-portable filenames'''
155 non-portable filenames'''
156 val = ui.config('ui', 'portablefilenames', 'warn')
156 val = ui.config('ui', 'portablefilenames', 'warn')
157 lval = val.lower()
157 lval = val.lower()
158 bval = util.parsebool(val)
158 bval = util.parsebool(val)
159 abort = os.name == 'nt' or lval == 'abort'
159 abort = os.name == 'nt' or lval == 'abort'
160 warn = bval or lval == 'warn'
160 warn = bval or lval == 'warn'
161 if bval is None and not (warn or abort or lval == 'ignore'):
161 if bval is None and not (warn or abort or lval == 'ignore'):
162 raise error.ConfigError(
162 raise error.ConfigError(
163 _("ui.portablefilenames value is invalid ('%s')") % val)
163 _("ui.portablefilenames value is invalid ('%s')") % val)
164 return abort, warn
164 return abort, warn
165
165
166 class casecollisionauditor(object):
166 class casecollisionauditor(object):
167 def __init__(self, ui, abort, dirstate):
167 def __init__(self, ui, abort, dirstate):
168 self._ui = ui
168 self._ui = ui
169 self._abort = abort
169 self._abort = abort
170 allfiles = '\0'.join(dirstate._map)
170 allfiles = '\0'.join(dirstate._map)
171 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
171 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
172 self._dirstate = dirstate
172 self._dirstate = dirstate
173 # The purpose of _newfiles is so that we don't complain about
173 # The purpose of _newfiles is so that we don't complain about
174 # case collisions if someone were to call this object with the
174 # case collisions if someone were to call this object with the
175 # same filename twice.
175 # same filename twice.
176 self._newfiles = set()
176 self._newfiles = set()
177
177
178 def __call__(self, f):
178 def __call__(self, f):
179 if f in self._newfiles:
179 if f in self._newfiles:
180 return
180 return
181 fl = encoding.lower(f)
181 fl = encoding.lower(f)
182 if fl in self._loweredfiles and f not in self._dirstate:
182 if fl in self._loweredfiles and f not in self._dirstate:
183 msg = _('possible case-folding collision for %s') % f
183 msg = _('possible case-folding collision for %s') % f
184 if self._abort:
184 if self._abort:
185 raise util.Abort(msg)
185 raise util.Abort(msg)
186 self._ui.warn(_("warning: %s\n") % msg)
186 self._ui.warn(_("warning: %s\n") % msg)
187 self._loweredfiles.add(fl)
187 self._loweredfiles.add(fl)
188 self._newfiles.add(f)
188 self._newfiles.add(f)
189
189
190 def filteredhash(repo, maxrev):
190 def filteredhash(repo, maxrev):
191 """build hash of filtered revisions in the current repoview.
191 """build hash of filtered revisions in the current repoview.
192
192
193 Multiple caches perform up-to-date validation by checking that the
193 Multiple caches perform up-to-date validation by checking that the
194 tiprev and tipnode stored in the cache file match the current repository.
194 tiprev and tipnode stored in the cache file match the current repository.
195 However, this is not sufficient for validating repoviews because the set
195 However, this is not sufficient for validating repoviews because the set
196 of revisions in the view may change without the repository tiprev and
196 of revisions in the view may change without the repository tiprev and
197 tipnode changing.
197 tipnode changing.
198
198
199 This function hashes all the revs filtered from the view and returns
199 This function hashes all the revs filtered from the view and returns
200 that SHA-1 digest.
200 that SHA-1 digest.
201 """
201 """
202 cl = repo.changelog
202 cl = repo.changelog
203 if not cl.filteredrevs:
203 if not cl.filteredrevs:
204 return None
204 return None
205 key = None
205 key = None
206 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
206 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
207 if revs:
207 if revs:
208 s = util.sha1()
208 s = util.sha1()
209 for rev in revs:
209 for rev in revs:
210 s.update('%s;' % rev)
210 s.update('%s;' % rev)
211 key = s.digest()
211 key = s.digest()
212 return key
212 return key
213
213
214 class abstractvfs(object):
214 class abstractvfs(object):
215 """Abstract base class; cannot be instantiated"""
215 """Abstract base class; cannot be instantiated"""
216
216
217 def __init__(self, *args, **kwargs):
217 def __init__(self, *args, **kwargs):
218 '''Prevent instantiation; don't call this from subclasses.'''
218 '''Prevent instantiation; don't call this from subclasses.'''
219 raise NotImplementedError('attempted instantiating ' + str(type(self)))
219 raise NotImplementedError('attempted instantiating ' + str(type(self)))
220
220
221 def tryread(self, path):
221 def tryread(self, path):
222 '''gracefully return an empty string for missing files'''
222 '''gracefully return an empty string for missing files'''
223 try:
223 try:
224 return self.read(path)
224 return self.read(path)
225 except IOError as inst:
225 except IOError as inst:
226 if inst.errno != errno.ENOENT:
226 if inst.errno != errno.ENOENT:
227 raise
227 raise
228 return ""
228 return ""
229
229
230 def tryreadlines(self, path, mode='rb'):
230 def tryreadlines(self, path, mode='rb'):
231 '''gracefully return an empty array for missing files'''
231 '''gracefully return an empty array for missing files'''
232 try:
232 try:
233 return self.readlines(path, mode=mode)
233 return self.readlines(path, mode=mode)
234 except IOError as inst:
234 except IOError as inst:
235 if inst.errno != errno.ENOENT:
235 if inst.errno != errno.ENOENT:
236 raise
236 raise
237 return []
237 return []
238
238
239 def open(self, path, mode="r", text=False, atomictemp=False,
239 def open(self, path, mode="r", text=False, atomictemp=False,
240 notindexed=False):
240 notindexed=False):
241 '''Open ``path`` file, which is relative to vfs root.
241 '''Open ``path`` file, which is relative to vfs root.
242
242
243 Newly created directories are marked as "not to be indexed by
243 Newly created directories are marked as "not to be indexed by
244 the content indexing service", if ``notindexed`` is specified
244 the content indexing service", if ``notindexed`` is specified
245 for "write" mode access.
245 for "write" mode access.
246 '''
246 '''
247 self.open = self.__call__
247 self.open = self.__call__
248 return self.__call__(path, mode, text, atomictemp, notindexed)
248 return self.__call__(path, mode, text, atomictemp, notindexed)
249
249
250 def read(self, path):
250 def read(self, path):
251 fp = self(path, 'rb')
251 fp = self(path, 'rb')
252 try:
252 try:
253 return fp.read()
253 return fp.read()
254 finally:
254 finally:
255 fp.close()
255 fp.close()
256
256
257 def readlines(self, path, mode='rb'):
257 def readlines(self, path, mode='rb'):
258 fp = self(path, mode=mode)
258 fp = self(path, mode=mode)
259 try:
259 try:
260 return fp.readlines()
260 return fp.readlines()
261 finally:
261 finally:
262 fp.close()
262 fp.close()
263
263
264 def write(self, path, data):
264 def write(self, path, data):
265 fp = self(path, 'wb')
265 fp = self(path, 'wb')
266 try:
266 try:
267 return fp.write(data)
267 return fp.write(data)
268 finally:
268 finally:
269 fp.close()
269 fp.close()
270
270
271 def writelines(self, path, data, mode='wb', notindexed=False):
271 def writelines(self, path, data, mode='wb', notindexed=False):
272 fp = self(path, mode=mode, notindexed=notindexed)
272 fp = self(path, mode=mode, notindexed=notindexed)
273 try:
273 try:
274 return fp.writelines(data)
274 return fp.writelines(data)
275 finally:
275 finally:
276 fp.close()
276 fp.close()
277
277
278 def append(self, path, data):
278 def append(self, path, data):
279 fp = self(path, 'ab')
279 fp = self(path, 'ab')
280 try:
280 try:
281 return fp.write(data)
281 return fp.write(data)
282 finally:
282 finally:
283 fp.close()
283 fp.close()
284
284
285 def basename(self, path):
286 """return base element of a path (as os.path.basename would do)
287
288 This exists to allow handling of strange encoding if needed."""
289 return os.path.basename(path)
290
285 def chmod(self, path, mode):
291 def chmod(self, path, mode):
286 return os.chmod(self.join(path), mode)
292 return os.chmod(self.join(path), mode)
287
293
288 def exists(self, path=None):
294 def exists(self, path=None):
289 return os.path.exists(self.join(path))
295 return os.path.exists(self.join(path))
290
296
291 def fstat(self, fp):
297 def fstat(self, fp):
292 return util.fstat(fp)
298 return util.fstat(fp)
293
299
294 def isdir(self, path=None):
300 def isdir(self, path=None):
295 return os.path.isdir(self.join(path))
301 return os.path.isdir(self.join(path))
296
302
297 def isfile(self, path=None):
303 def isfile(self, path=None):
298 return os.path.isfile(self.join(path))
304 return os.path.isfile(self.join(path))
299
305
300 def islink(self, path=None):
306 def islink(self, path=None):
301 return os.path.islink(self.join(path))
307 return os.path.islink(self.join(path))
302
308
303 def reljoin(self, *paths):
309 def reljoin(self, *paths):
304 """join various elements of a path together (as os.path.join would do)
310 """join various elements of a path together (as os.path.join would do)
305
311
306 The vfs base is not injected so that path stay relative. This exists
312 The vfs base is not injected so that path stay relative. This exists
307 to allow handling of strange encoding if needed."""
313 to allow handling of strange encoding if needed."""
308 return os.path.join(*paths)
314 return os.path.join(*paths)
309
315
310 def split(self, path):
316 def split(self, path):
311 """split top-most element of a path (as os.path.split would do)
317 """split top-most element of a path (as os.path.split would do)
312
318
313 This exists to allow handling of strange encoding if needed."""
319 This exists to allow handling of strange encoding if needed."""
314 return os.path.split(path)
320 return os.path.split(path)
315
321
316 def lexists(self, path=None):
322 def lexists(self, path=None):
317 return os.path.lexists(self.join(path))
323 return os.path.lexists(self.join(path))
318
324
319 def lstat(self, path=None):
325 def lstat(self, path=None):
320 return os.lstat(self.join(path))
326 return os.lstat(self.join(path))
321
327
322 def listdir(self, path=None):
328 def listdir(self, path=None):
323 return os.listdir(self.join(path))
329 return os.listdir(self.join(path))
324
330
325 def makedir(self, path=None, notindexed=True):
331 def makedir(self, path=None, notindexed=True):
326 return util.makedir(self.join(path), notindexed)
332 return util.makedir(self.join(path), notindexed)
327
333
328 def makedirs(self, path=None, mode=None):
334 def makedirs(self, path=None, mode=None):
329 return util.makedirs(self.join(path), mode)
335 return util.makedirs(self.join(path), mode)
330
336
331 def makelock(self, info, path):
337 def makelock(self, info, path):
332 return util.makelock(info, self.join(path))
338 return util.makelock(info, self.join(path))
333
339
334 def mkdir(self, path=None):
340 def mkdir(self, path=None):
335 return os.mkdir(self.join(path))
341 return os.mkdir(self.join(path))
336
342
337 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
343 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
338 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
344 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
339 dir=self.join(dir), text=text)
345 dir=self.join(dir), text=text)
340 dname, fname = util.split(name)
346 dname, fname = util.split(name)
341 if dir:
347 if dir:
342 return fd, os.path.join(dir, fname)
348 return fd, os.path.join(dir, fname)
343 else:
349 else:
344 return fd, fname
350 return fd, fname
345
351
346 def readdir(self, path=None, stat=None, skip=None):
352 def readdir(self, path=None, stat=None, skip=None):
347 return osutil.listdir(self.join(path), stat, skip)
353 return osutil.listdir(self.join(path), stat, skip)
348
354
349 def readlock(self, path):
355 def readlock(self, path):
350 return util.readlock(self.join(path))
356 return util.readlock(self.join(path))
351
357
352 def rename(self, src, dst):
358 def rename(self, src, dst):
353 return util.rename(self.join(src), self.join(dst))
359 return util.rename(self.join(src), self.join(dst))
354
360
355 def readlink(self, path):
361 def readlink(self, path):
356 return os.readlink(self.join(path))
362 return os.readlink(self.join(path))
357
363
358 def removedirs(self, path=None):
364 def removedirs(self, path=None):
359 """Remove a leaf directory and all empty intermediate ones
365 """Remove a leaf directory and all empty intermediate ones
360 """
366 """
361 return util.removedirs(self.join(path))
367 return util.removedirs(self.join(path))
362
368
363 def rmtree(self, path=None, ignore_errors=False, forcibly=False):
369 def rmtree(self, path=None, ignore_errors=False, forcibly=False):
364 """Remove a directory tree recursively
370 """Remove a directory tree recursively
365
371
366 If ``forcibly``, this tries to remove READ-ONLY files, too.
372 If ``forcibly``, this tries to remove READ-ONLY files, too.
367 """
373 """
368 if forcibly:
374 if forcibly:
369 def onerror(function, path, excinfo):
375 def onerror(function, path, excinfo):
370 if function is not os.remove:
376 if function is not os.remove:
371 raise
377 raise
372 # read-only files cannot be unlinked under Windows
378 # read-only files cannot be unlinked under Windows
373 s = os.stat(path)
379 s = os.stat(path)
374 if (s.st_mode & stat.S_IWRITE) != 0:
380 if (s.st_mode & stat.S_IWRITE) != 0:
375 raise
381 raise
376 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
382 os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
377 os.remove(path)
383 os.remove(path)
378 else:
384 else:
379 onerror = None
385 onerror = None
380 return shutil.rmtree(self.join(path),
386 return shutil.rmtree(self.join(path),
381 ignore_errors=ignore_errors, onerror=onerror)
387 ignore_errors=ignore_errors, onerror=onerror)
382
388
383 def setflags(self, path, l, x):
389 def setflags(self, path, l, x):
384 return util.setflags(self.join(path), l, x)
390 return util.setflags(self.join(path), l, x)
385
391
386 def stat(self, path=None):
392 def stat(self, path=None):
387 return os.stat(self.join(path))
393 return os.stat(self.join(path))
388
394
389 def unlink(self, path=None):
395 def unlink(self, path=None):
390 return util.unlink(self.join(path))
396 return util.unlink(self.join(path))
391
397
392 def unlinkpath(self, path=None, ignoremissing=False):
398 def unlinkpath(self, path=None, ignoremissing=False):
393 return util.unlinkpath(self.join(path), ignoremissing)
399 return util.unlinkpath(self.join(path), ignoremissing)
394
400
395 def utime(self, path=None, t=None):
401 def utime(self, path=None, t=None):
396 return os.utime(self.join(path), t)
402 return os.utime(self.join(path), t)
397
403
398 def walk(self, path=None, onerror=None):
404 def walk(self, path=None, onerror=None):
399 """Yield (dirpath, dirs, files) tuple for each directories under path
405 """Yield (dirpath, dirs, files) tuple for each directories under path
400
406
401 ``dirpath`` is relative one from the root of this vfs. This
407 ``dirpath`` is relative one from the root of this vfs. This
402 uses ``os.sep`` as path separator, even you specify POSIX
408 uses ``os.sep`` as path separator, even you specify POSIX
403 style ``path``.
409 style ``path``.
404
410
405 "The root of this vfs" is represented as empty ``dirpath``.
411 "The root of this vfs" is represented as empty ``dirpath``.
406 """
412 """
407 root = os.path.normpath(self.join(None))
413 root = os.path.normpath(self.join(None))
408 # when dirpath == root, dirpath[prefixlen:] becomes empty
414 # when dirpath == root, dirpath[prefixlen:] becomes empty
409 # because len(dirpath) < prefixlen.
415 # because len(dirpath) < prefixlen.
410 prefixlen = len(pathutil.normasprefix(root))
416 prefixlen = len(pathutil.normasprefix(root))
411 for dirpath, dirs, files in os.walk(self.join(path), onerror=onerror):
417 for dirpath, dirs, files in os.walk(self.join(path), onerror=onerror):
412 yield (dirpath[prefixlen:], dirs, files)
418 yield (dirpath[prefixlen:], dirs, files)
413
419
414 class vfs(abstractvfs):
420 class vfs(abstractvfs):
415 '''Operate files relative to a base directory
421 '''Operate files relative to a base directory
416
422
417 This class is used to hide the details of COW semantics and
423 This class is used to hide the details of COW semantics and
418 remote file access from higher level code.
424 remote file access from higher level code.
419 '''
425 '''
420 def __init__(self, base, audit=True, expandpath=False, realpath=False):
426 def __init__(self, base, audit=True, expandpath=False, realpath=False):
421 if expandpath:
427 if expandpath:
422 base = util.expandpath(base)
428 base = util.expandpath(base)
423 if realpath:
429 if realpath:
424 base = os.path.realpath(base)
430 base = os.path.realpath(base)
425 self.base = base
431 self.base = base
426 self._setmustaudit(audit)
432 self._setmustaudit(audit)
427 self.createmode = None
433 self.createmode = None
428 self._trustnlink = None
434 self._trustnlink = None
429
435
430 def _getmustaudit(self):
436 def _getmustaudit(self):
431 return self._audit
437 return self._audit
432
438
433 def _setmustaudit(self, onoff):
439 def _setmustaudit(self, onoff):
434 self._audit = onoff
440 self._audit = onoff
435 if onoff:
441 if onoff:
436 self.audit = pathutil.pathauditor(self.base)
442 self.audit = pathutil.pathauditor(self.base)
437 else:
443 else:
438 self.audit = util.always
444 self.audit = util.always
439
445
440 mustaudit = property(_getmustaudit, _setmustaudit)
446 mustaudit = property(_getmustaudit, _setmustaudit)
441
447
442 @util.propertycache
448 @util.propertycache
443 def _cansymlink(self):
449 def _cansymlink(self):
444 return util.checklink(self.base)
450 return util.checklink(self.base)
445
451
446 @util.propertycache
452 @util.propertycache
447 def _chmod(self):
453 def _chmod(self):
448 return util.checkexec(self.base)
454 return util.checkexec(self.base)
449
455
450 def _fixfilemode(self, name):
456 def _fixfilemode(self, name):
451 if self.createmode is None or not self._chmod:
457 if self.createmode is None or not self._chmod:
452 return
458 return
453 os.chmod(name, self.createmode & 0o666)
459 os.chmod(name, self.createmode & 0o666)
454
460
455 def __call__(self, path, mode="r", text=False, atomictemp=False,
461 def __call__(self, path, mode="r", text=False, atomictemp=False,
456 notindexed=False):
462 notindexed=False):
457 '''Open ``path`` file, which is relative to vfs root.
463 '''Open ``path`` file, which is relative to vfs root.
458
464
459 Newly created directories are marked as "not to be indexed by
465 Newly created directories are marked as "not to be indexed by
460 the content indexing service", if ``notindexed`` is specified
466 the content indexing service", if ``notindexed`` is specified
461 for "write" mode access.
467 for "write" mode access.
462 '''
468 '''
463 if self._audit:
469 if self._audit:
464 r = util.checkosfilename(path)
470 r = util.checkosfilename(path)
465 if r:
471 if r:
466 raise util.Abort("%s: %r" % (r, path))
472 raise util.Abort("%s: %r" % (r, path))
467 self.audit(path)
473 self.audit(path)
468 f = self.join(path)
474 f = self.join(path)
469
475
470 if not text and "b" not in mode:
476 if not text and "b" not in mode:
471 mode += "b" # for that other OS
477 mode += "b" # for that other OS
472
478
473 nlink = -1
479 nlink = -1
474 if mode not in ('r', 'rb'):
480 if mode not in ('r', 'rb'):
475 dirname, basename = util.split(f)
481 dirname, basename = util.split(f)
476 # If basename is empty, then the path is malformed because it points
482 # If basename is empty, then the path is malformed because it points
477 # to a directory. Let the posixfile() call below raise IOError.
483 # to a directory. Let the posixfile() call below raise IOError.
478 if basename:
484 if basename:
479 if atomictemp:
485 if atomictemp:
480 util.ensuredirs(dirname, self.createmode, notindexed)
486 util.ensuredirs(dirname, self.createmode, notindexed)
481 return util.atomictempfile(f, mode, self.createmode)
487 return util.atomictempfile(f, mode, self.createmode)
482 try:
488 try:
483 if 'w' in mode:
489 if 'w' in mode:
484 util.unlink(f)
490 util.unlink(f)
485 nlink = 0
491 nlink = 0
486 else:
492 else:
487 # nlinks() may behave differently for files on Windows
493 # nlinks() may behave differently for files on Windows
488 # shares if the file is open.
494 # shares if the file is open.
489 fd = util.posixfile(f)
495 fd = util.posixfile(f)
490 nlink = util.nlinks(f)
496 nlink = util.nlinks(f)
491 if nlink < 1:
497 if nlink < 1:
492 nlink = 2 # force mktempcopy (issue1922)
498 nlink = 2 # force mktempcopy (issue1922)
493 fd.close()
499 fd.close()
494 except (OSError, IOError) as e:
500 except (OSError, IOError) as e:
495 if e.errno != errno.ENOENT:
501 if e.errno != errno.ENOENT:
496 raise
502 raise
497 nlink = 0
503 nlink = 0
498 util.ensuredirs(dirname, self.createmode, notindexed)
504 util.ensuredirs(dirname, self.createmode, notindexed)
499 if nlink > 0:
505 if nlink > 0:
500 if self._trustnlink is None:
506 if self._trustnlink is None:
501 self._trustnlink = nlink > 1 or util.checknlink(f)
507 self._trustnlink = nlink > 1 or util.checknlink(f)
502 if nlink > 1 or not self._trustnlink:
508 if nlink > 1 or not self._trustnlink:
503 util.rename(util.mktempcopy(f), f)
509 util.rename(util.mktempcopy(f), f)
504 fp = util.posixfile(f, mode)
510 fp = util.posixfile(f, mode)
505 if nlink == 0:
511 if nlink == 0:
506 self._fixfilemode(f)
512 self._fixfilemode(f)
507 return fp
513 return fp
508
514
509 def symlink(self, src, dst):
515 def symlink(self, src, dst):
510 self.audit(dst)
516 self.audit(dst)
511 linkname = self.join(dst)
517 linkname = self.join(dst)
512 try:
518 try:
513 os.unlink(linkname)
519 os.unlink(linkname)
514 except OSError:
520 except OSError:
515 pass
521 pass
516
522
517 util.ensuredirs(os.path.dirname(linkname), self.createmode)
523 util.ensuredirs(os.path.dirname(linkname), self.createmode)
518
524
519 if self._cansymlink:
525 if self._cansymlink:
520 try:
526 try:
521 os.symlink(src, linkname)
527 os.symlink(src, linkname)
522 except OSError as err:
528 except OSError as err:
523 raise OSError(err.errno, _('could not symlink to %r: %s') %
529 raise OSError(err.errno, _('could not symlink to %r: %s') %
524 (src, err.strerror), linkname)
530 (src, err.strerror), linkname)
525 else:
531 else:
526 self.write(dst, src)
532 self.write(dst, src)
527
533
528 def join(self, path, *insidef):
534 def join(self, path, *insidef):
529 if path:
535 if path:
530 return os.path.join(self.base, path, *insidef)
536 return os.path.join(self.base, path, *insidef)
531 else:
537 else:
532 return self.base
538 return self.base
533
539
534 opener = vfs
540 opener = vfs
535
541
536 class auditvfs(object):
542 class auditvfs(object):
537 def __init__(self, vfs):
543 def __init__(self, vfs):
538 self.vfs = vfs
544 self.vfs = vfs
539
545
540 def _getmustaudit(self):
546 def _getmustaudit(self):
541 return self.vfs.mustaudit
547 return self.vfs.mustaudit
542
548
543 def _setmustaudit(self, onoff):
549 def _setmustaudit(self, onoff):
544 self.vfs.mustaudit = onoff
550 self.vfs.mustaudit = onoff
545
551
546 mustaudit = property(_getmustaudit, _setmustaudit)
552 mustaudit = property(_getmustaudit, _setmustaudit)
547
553
548 class filtervfs(abstractvfs, auditvfs):
554 class filtervfs(abstractvfs, auditvfs):
549 '''Wrapper vfs for filtering filenames with a function.'''
555 '''Wrapper vfs for filtering filenames with a function.'''
550
556
551 def __init__(self, vfs, filter):
557 def __init__(self, vfs, filter):
552 auditvfs.__init__(self, vfs)
558 auditvfs.__init__(self, vfs)
553 self._filter = filter
559 self._filter = filter
554
560
555 def __call__(self, path, *args, **kwargs):
561 def __call__(self, path, *args, **kwargs):
556 return self.vfs(self._filter(path), *args, **kwargs)
562 return self.vfs(self._filter(path), *args, **kwargs)
557
563
558 def join(self, path, *insidef):
564 def join(self, path, *insidef):
559 if path:
565 if path:
560 return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef)))
566 return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef)))
561 else:
567 else:
562 return self.vfs.join(path)
568 return self.vfs.join(path)
563
569
564 filteropener = filtervfs
570 filteropener = filtervfs
565
571
566 class readonlyvfs(abstractvfs, auditvfs):
572 class readonlyvfs(abstractvfs, auditvfs):
567 '''Wrapper vfs preventing any writing.'''
573 '''Wrapper vfs preventing any writing.'''
568
574
569 def __init__(self, vfs):
575 def __init__(self, vfs):
570 auditvfs.__init__(self, vfs)
576 auditvfs.__init__(self, vfs)
571
577
572 def __call__(self, path, mode='r', *args, **kw):
578 def __call__(self, path, mode='r', *args, **kw):
573 if mode not in ('r', 'rb'):
579 if mode not in ('r', 'rb'):
574 raise util.Abort('this vfs is read only')
580 raise util.Abort('this vfs is read only')
575 return self.vfs(path, mode, *args, **kw)
581 return self.vfs(path, mode, *args, **kw)
576
582
577
583
578 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
584 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
579 '''yield every hg repository under path, always recursively.
585 '''yield every hg repository under path, always recursively.
580 The recurse flag will only control recursion into repo working dirs'''
586 The recurse flag will only control recursion into repo working dirs'''
581 def errhandler(err):
587 def errhandler(err):
582 if err.filename == path:
588 if err.filename == path:
583 raise err
589 raise err
584 samestat = getattr(os.path, 'samestat', None)
590 samestat = getattr(os.path, 'samestat', None)
585 if followsym and samestat is not None:
591 if followsym and samestat is not None:
586 def adddir(dirlst, dirname):
592 def adddir(dirlst, dirname):
587 match = False
593 match = False
588 dirstat = os.stat(dirname)
594 dirstat = os.stat(dirname)
589 for lstdirstat in dirlst:
595 for lstdirstat in dirlst:
590 if samestat(dirstat, lstdirstat):
596 if samestat(dirstat, lstdirstat):
591 match = True
597 match = True
592 break
598 break
593 if not match:
599 if not match:
594 dirlst.append(dirstat)
600 dirlst.append(dirstat)
595 return not match
601 return not match
596 else:
602 else:
597 followsym = False
603 followsym = False
598
604
599 if (seen_dirs is None) and followsym:
605 if (seen_dirs is None) and followsym:
600 seen_dirs = []
606 seen_dirs = []
601 adddir(seen_dirs, path)
607 adddir(seen_dirs, path)
602 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
608 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
603 dirs.sort()
609 dirs.sort()
604 if '.hg' in dirs:
610 if '.hg' in dirs:
605 yield root # found a repository
611 yield root # found a repository
606 qroot = os.path.join(root, '.hg', 'patches')
612 qroot = os.path.join(root, '.hg', 'patches')
607 if os.path.isdir(os.path.join(qroot, '.hg')):
613 if os.path.isdir(os.path.join(qroot, '.hg')):
608 yield qroot # we have a patch queue repo here
614 yield qroot # we have a patch queue repo here
609 if recurse:
615 if recurse:
610 # avoid recursing inside the .hg directory
616 # avoid recursing inside the .hg directory
611 dirs.remove('.hg')
617 dirs.remove('.hg')
612 else:
618 else:
613 dirs[:] = [] # don't descend further
619 dirs[:] = [] # don't descend further
614 elif followsym:
620 elif followsym:
615 newdirs = []
621 newdirs = []
616 for d in dirs:
622 for d in dirs:
617 fname = os.path.join(root, d)
623 fname = os.path.join(root, d)
618 if adddir(seen_dirs, fname):
624 if adddir(seen_dirs, fname):
619 if os.path.islink(fname):
625 if os.path.islink(fname):
620 for hgname in walkrepos(fname, True, seen_dirs):
626 for hgname in walkrepos(fname, True, seen_dirs):
621 yield hgname
627 yield hgname
622 else:
628 else:
623 newdirs.append(d)
629 newdirs.append(d)
624 dirs[:] = newdirs
630 dirs[:] = newdirs
625
631
626 def osrcpath():
632 def osrcpath():
627 '''return default os-specific hgrc search path'''
633 '''return default os-specific hgrc search path'''
628 path = []
634 path = []
629 defaultpath = os.path.join(util.datapath, 'default.d')
635 defaultpath = os.path.join(util.datapath, 'default.d')
630 if os.path.isdir(defaultpath):
636 if os.path.isdir(defaultpath):
631 for f, kind in osutil.listdir(defaultpath):
637 for f, kind in osutil.listdir(defaultpath):
632 if f.endswith('.rc'):
638 if f.endswith('.rc'):
633 path.append(os.path.join(defaultpath, f))
639 path.append(os.path.join(defaultpath, f))
634 path.extend(systemrcpath())
640 path.extend(systemrcpath())
635 path.extend(userrcpath())
641 path.extend(userrcpath())
636 path = [os.path.normpath(f) for f in path]
642 path = [os.path.normpath(f) for f in path]
637 return path
643 return path
638
644
639 _rcpath = None
645 _rcpath = None
640
646
641 def rcpath():
647 def rcpath():
642 '''return hgrc search path. if env var HGRCPATH is set, use it.
648 '''return hgrc search path. if env var HGRCPATH is set, use it.
643 for each item in path, if directory, use files ending in .rc,
649 for each item in path, if directory, use files ending in .rc,
644 else use item.
650 else use item.
645 make HGRCPATH empty to only look in .hg/hgrc of current repo.
651 make HGRCPATH empty to only look in .hg/hgrc of current repo.
646 if no HGRCPATH, use default os-specific path.'''
652 if no HGRCPATH, use default os-specific path.'''
647 global _rcpath
653 global _rcpath
648 if _rcpath is None:
654 if _rcpath is None:
649 if 'HGRCPATH' in os.environ:
655 if 'HGRCPATH' in os.environ:
650 _rcpath = []
656 _rcpath = []
651 for p in os.environ['HGRCPATH'].split(os.pathsep):
657 for p in os.environ['HGRCPATH'].split(os.pathsep):
652 if not p:
658 if not p:
653 continue
659 continue
654 p = util.expandpath(p)
660 p = util.expandpath(p)
655 if os.path.isdir(p):
661 if os.path.isdir(p):
656 for f, kind in osutil.listdir(p):
662 for f, kind in osutil.listdir(p):
657 if f.endswith('.rc'):
663 if f.endswith('.rc'):
658 _rcpath.append(os.path.join(p, f))
664 _rcpath.append(os.path.join(p, f))
659 else:
665 else:
660 _rcpath.append(p)
666 _rcpath.append(p)
661 else:
667 else:
662 _rcpath = osrcpath()
668 _rcpath = osrcpath()
663 return _rcpath
669 return _rcpath
664
670
665 def intrev(rev):
671 def intrev(rev):
666 """Return integer for a given revision that can be used in comparison or
672 """Return integer for a given revision that can be used in comparison or
667 arithmetic operation"""
673 arithmetic operation"""
668 if rev is None:
674 if rev is None:
669 return wdirrev
675 return wdirrev
670 return rev
676 return rev
671
677
672 def revsingle(repo, revspec, default='.'):
678 def revsingle(repo, revspec, default='.'):
673 if not revspec and revspec != 0:
679 if not revspec and revspec != 0:
674 return repo[default]
680 return repo[default]
675
681
676 l = revrange(repo, [revspec])
682 l = revrange(repo, [revspec])
677 if not l:
683 if not l:
678 raise util.Abort(_('empty revision set'))
684 raise util.Abort(_('empty revision set'))
679 return repo[l.last()]
685 return repo[l.last()]
680
686
681 def revpair(repo, revs):
687 def revpair(repo, revs):
682 if not revs:
688 if not revs:
683 return repo.dirstate.p1(), None
689 return repo.dirstate.p1(), None
684
690
685 l = revrange(repo, revs)
691 l = revrange(repo, revs)
686
692
687 if not l:
693 if not l:
688 first = second = None
694 first = second = None
689 elif l.isascending():
695 elif l.isascending():
690 first = l.min()
696 first = l.min()
691 second = l.max()
697 second = l.max()
692 elif l.isdescending():
698 elif l.isdescending():
693 first = l.max()
699 first = l.max()
694 second = l.min()
700 second = l.min()
695 else:
701 else:
696 first = l.first()
702 first = l.first()
697 second = l.last()
703 second = l.last()
698
704
699 if first is None:
705 if first is None:
700 raise util.Abort(_('empty revision range'))
706 raise util.Abort(_('empty revision range'))
701
707
702 if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
708 if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
703 return repo.lookup(first), None
709 return repo.lookup(first), None
704
710
705 return repo.lookup(first), repo.lookup(second)
711 return repo.lookup(first), repo.lookup(second)
706
712
707 _revrangesep = ':'
713 _revrangesep = ':'
708
714
709 def revrange(repo, revs):
715 def revrange(repo, revs):
710 """Yield revision as strings from a list of revision specifications."""
716 """Yield revision as strings from a list of revision specifications."""
711
717
712 def revfix(repo, val, defval):
718 def revfix(repo, val, defval):
713 if not val and val != 0 and defval is not None:
719 if not val and val != 0 and defval is not None:
714 return defval
720 return defval
715 return repo[val].rev()
721 return repo[val].rev()
716
722
717 subsets = []
723 subsets = []
718
724
719 revsetaliases = [alias for (alias, _) in
725 revsetaliases = [alias for (alias, _) in
720 repo.ui.configitems("revsetalias")]
726 repo.ui.configitems("revsetalias")]
721
727
722 for spec in revs:
728 for spec in revs:
723 # attempt to parse old-style ranges first to deal with
729 # attempt to parse old-style ranges first to deal with
724 # things like old-tag which contain query metacharacters
730 # things like old-tag which contain query metacharacters
725 try:
731 try:
726 # ... except for revset aliases without arguments. These
732 # ... except for revset aliases without arguments. These
727 # should be parsed as soon as possible, because they might
733 # should be parsed as soon as possible, because they might
728 # clash with a hash prefix.
734 # clash with a hash prefix.
729 if spec in revsetaliases:
735 if spec in revsetaliases:
730 raise error.RepoLookupError
736 raise error.RepoLookupError
731
737
732 if isinstance(spec, int):
738 if isinstance(spec, int):
733 subsets.append(revset.baseset([spec]))
739 subsets.append(revset.baseset([spec]))
734 continue
740 continue
735
741
736 if _revrangesep in spec:
742 if _revrangesep in spec:
737 start, end = spec.split(_revrangesep, 1)
743 start, end = spec.split(_revrangesep, 1)
738 if start in revsetaliases or end in revsetaliases:
744 if start in revsetaliases or end in revsetaliases:
739 raise error.RepoLookupError
745 raise error.RepoLookupError
740
746
741 start = revfix(repo, start, 0)
747 start = revfix(repo, start, 0)
742 end = revfix(repo, end, len(repo) - 1)
748 end = revfix(repo, end, len(repo) - 1)
743 if end == nullrev and start < 0:
749 if end == nullrev and start < 0:
744 start = nullrev
750 start = nullrev
745 if start < end:
751 if start < end:
746 l = revset.spanset(repo, start, end + 1)
752 l = revset.spanset(repo, start, end + 1)
747 else:
753 else:
748 l = revset.spanset(repo, start, end - 1)
754 l = revset.spanset(repo, start, end - 1)
749 subsets.append(l)
755 subsets.append(l)
750 continue
756 continue
751 elif spec and spec in repo: # single unquoted rev
757 elif spec and spec in repo: # single unquoted rev
752 rev = revfix(repo, spec, None)
758 rev = revfix(repo, spec, None)
753 subsets.append(revset.baseset([rev]))
759 subsets.append(revset.baseset([rev]))
754 continue
760 continue
755 except error.RepoLookupError:
761 except error.RepoLookupError:
756 pass
762 pass
757
763
758 # fall through to new-style queries if old-style fails
764 # fall through to new-style queries if old-style fails
759 m = revset.match(repo.ui, spec, repo)
765 m = revset.match(repo.ui, spec, repo)
760 subsets.append(m(repo))
766 subsets.append(m(repo))
761
767
762 return revset._combinesets(subsets)
768 return revset._combinesets(subsets)
763
769
764 def expandpats(pats):
770 def expandpats(pats):
765 '''Expand bare globs when running on windows.
771 '''Expand bare globs when running on windows.
766 On posix we assume it already has already been done by sh.'''
772 On posix we assume it already has already been done by sh.'''
767 if not util.expandglobs:
773 if not util.expandglobs:
768 return list(pats)
774 return list(pats)
769 ret = []
775 ret = []
770 for kindpat in pats:
776 for kindpat in pats:
771 kind, pat = matchmod._patsplit(kindpat, None)
777 kind, pat = matchmod._patsplit(kindpat, None)
772 if kind is None:
778 if kind is None:
773 try:
779 try:
774 globbed = glob.glob(pat)
780 globbed = glob.glob(pat)
775 except re.error:
781 except re.error:
776 globbed = [pat]
782 globbed = [pat]
777 if globbed:
783 if globbed:
778 ret.extend(globbed)
784 ret.extend(globbed)
779 continue
785 continue
780 ret.append(kindpat)
786 ret.append(kindpat)
781 return ret
787 return ret
782
788
783 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath',
789 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath',
784 badfn=None):
790 badfn=None):
785 '''Return a matcher and the patterns that were used.
791 '''Return a matcher and the patterns that were used.
786 The matcher will warn about bad matches, unless an alternate badfn callback
792 The matcher will warn about bad matches, unless an alternate badfn callback
787 is provided.'''
793 is provided.'''
788 if pats == ("",):
794 if pats == ("",):
789 pats = []
795 pats = []
790 if not globbed and default == 'relpath':
796 if not globbed and default == 'relpath':
791 pats = expandpats(pats or [])
797 pats = expandpats(pats or [])
792
798
793 def bad(f, msg):
799 def bad(f, msg):
794 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
800 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
795
801
796 if badfn is None:
802 if badfn is None:
797 badfn = bad
803 badfn = bad
798
804
799 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
805 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
800 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
806 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
801
807
802 if m.always():
808 if m.always():
803 pats = []
809 pats = []
804 return m, pats
810 return m, pats
805
811
806 def match(ctx, pats=[], opts={}, globbed=False, default='relpath', badfn=None):
812 def match(ctx, pats=[], opts={}, globbed=False, default='relpath', badfn=None):
807 '''Return a matcher that will warn about bad matches.'''
813 '''Return a matcher that will warn about bad matches.'''
808 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
814 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
809
815
810 def matchall(repo):
816 def matchall(repo):
811 '''Return a matcher that will efficiently match everything.'''
817 '''Return a matcher that will efficiently match everything.'''
812 return matchmod.always(repo.root, repo.getcwd())
818 return matchmod.always(repo.root, repo.getcwd())
813
819
814 def matchfiles(repo, files, badfn=None):
820 def matchfiles(repo, files, badfn=None):
815 '''Return a matcher that will efficiently match exactly these files.'''
821 '''Return a matcher that will efficiently match exactly these files.'''
816 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
822 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
817
823
818 def addremove(repo, matcher, prefix, opts={}, dry_run=None, similarity=None):
824 def addremove(repo, matcher, prefix, opts={}, dry_run=None, similarity=None):
819 m = matcher
825 m = matcher
820 if dry_run is None:
826 if dry_run is None:
821 dry_run = opts.get('dry_run')
827 dry_run = opts.get('dry_run')
822 if similarity is None:
828 if similarity is None:
823 similarity = float(opts.get('similarity') or 0)
829 similarity = float(opts.get('similarity') or 0)
824
830
825 ret = 0
831 ret = 0
826 join = lambda f: os.path.join(prefix, f)
832 join = lambda f: os.path.join(prefix, f)
827
833
828 def matchessubrepo(matcher, subpath):
834 def matchessubrepo(matcher, subpath):
829 if matcher.exact(subpath):
835 if matcher.exact(subpath):
830 return True
836 return True
831 for f in matcher.files():
837 for f in matcher.files():
832 if f.startswith(subpath):
838 if f.startswith(subpath):
833 return True
839 return True
834 return False
840 return False
835
841
836 wctx = repo[None]
842 wctx = repo[None]
837 for subpath in sorted(wctx.substate):
843 for subpath in sorted(wctx.substate):
838 if opts.get('subrepos') or matchessubrepo(m, subpath):
844 if opts.get('subrepos') or matchessubrepo(m, subpath):
839 sub = wctx.sub(subpath)
845 sub = wctx.sub(subpath)
840 try:
846 try:
841 submatch = matchmod.narrowmatcher(subpath, m)
847 submatch = matchmod.narrowmatcher(subpath, m)
842 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
848 if sub.addremove(submatch, prefix, opts, dry_run, similarity):
843 ret = 1
849 ret = 1
844 except error.LookupError:
850 except error.LookupError:
845 repo.ui.status(_("skipping missing subrepository: %s\n")
851 repo.ui.status(_("skipping missing subrepository: %s\n")
846 % join(subpath))
852 % join(subpath))
847
853
848 rejected = []
854 rejected = []
849 def badfn(f, msg):
855 def badfn(f, msg):
850 if f in m.files():
856 if f in m.files():
851 m.bad(f, msg)
857 m.bad(f, msg)
852 rejected.append(f)
858 rejected.append(f)
853
859
854 badmatch = matchmod.badmatch(m, badfn)
860 badmatch = matchmod.badmatch(m, badfn)
855 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
861 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
856 badmatch)
862 badmatch)
857
863
858 unknownset = set(unknown + forgotten)
864 unknownset = set(unknown + forgotten)
859 toprint = unknownset.copy()
865 toprint = unknownset.copy()
860 toprint.update(deleted)
866 toprint.update(deleted)
861 for abs in sorted(toprint):
867 for abs in sorted(toprint):
862 if repo.ui.verbose or not m.exact(abs):
868 if repo.ui.verbose or not m.exact(abs):
863 if abs in unknownset:
869 if abs in unknownset:
864 status = _('adding %s\n') % m.uipath(abs)
870 status = _('adding %s\n') % m.uipath(abs)
865 else:
871 else:
866 status = _('removing %s\n') % m.uipath(abs)
872 status = _('removing %s\n') % m.uipath(abs)
867 repo.ui.status(status)
873 repo.ui.status(status)
868
874
869 renames = _findrenames(repo, m, added + unknown, removed + deleted,
875 renames = _findrenames(repo, m, added + unknown, removed + deleted,
870 similarity)
876 similarity)
871
877
872 if not dry_run:
878 if not dry_run:
873 _markchanges(repo, unknown + forgotten, deleted, renames)
879 _markchanges(repo, unknown + forgotten, deleted, renames)
874
880
875 for f in rejected:
881 for f in rejected:
876 if f in m.files():
882 if f in m.files():
877 return 1
883 return 1
878 return ret
884 return ret
879
885
880 def marktouched(repo, files, similarity=0.0):
886 def marktouched(repo, files, similarity=0.0):
881 '''Assert that files have somehow been operated upon. files are relative to
887 '''Assert that files have somehow been operated upon. files are relative to
882 the repo root.'''
888 the repo root.'''
883 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
889 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
884 rejected = []
890 rejected = []
885
891
886 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
892 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
887
893
888 if repo.ui.verbose:
894 if repo.ui.verbose:
889 unknownset = set(unknown + forgotten)
895 unknownset = set(unknown + forgotten)
890 toprint = unknownset.copy()
896 toprint = unknownset.copy()
891 toprint.update(deleted)
897 toprint.update(deleted)
892 for abs in sorted(toprint):
898 for abs in sorted(toprint):
893 if abs in unknownset:
899 if abs in unknownset:
894 status = _('adding %s\n') % abs
900 status = _('adding %s\n') % abs
895 else:
901 else:
896 status = _('removing %s\n') % abs
902 status = _('removing %s\n') % abs
897 repo.ui.status(status)
903 repo.ui.status(status)
898
904
899 renames = _findrenames(repo, m, added + unknown, removed + deleted,
905 renames = _findrenames(repo, m, added + unknown, removed + deleted,
900 similarity)
906 similarity)
901
907
902 _markchanges(repo, unknown + forgotten, deleted, renames)
908 _markchanges(repo, unknown + forgotten, deleted, renames)
903
909
904 for f in rejected:
910 for f in rejected:
905 if f in m.files():
911 if f in m.files():
906 return 1
912 return 1
907 return 0
913 return 0
908
914
909 def _interestingfiles(repo, matcher):
915 def _interestingfiles(repo, matcher):
910 '''Walk dirstate with matcher, looking for files that addremove would care
916 '''Walk dirstate with matcher, looking for files that addremove would care
911 about.
917 about.
912
918
913 This is different from dirstate.status because it doesn't care about
919 This is different from dirstate.status because it doesn't care about
914 whether files are modified or clean.'''
920 whether files are modified or clean.'''
915 added, unknown, deleted, removed, forgotten = [], [], [], [], []
921 added, unknown, deleted, removed, forgotten = [], [], [], [], []
916 audit_path = pathutil.pathauditor(repo.root)
922 audit_path = pathutil.pathauditor(repo.root)
917
923
918 ctx = repo[None]
924 ctx = repo[None]
919 dirstate = repo.dirstate
925 dirstate = repo.dirstate
920 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
926 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
921 full=False)
927 full=False)
922 for abs, st in walkresults.iteritems():
928 for abs, st in walkresults.iteritems():
923 dstate = dirstate[abs]
929 dstate = dirstate[abs]
924 if dstate == '?' and audit_path.check(abs):
930 if dstate == '?' and audit_path.check(abs):
925 unknown.append(abs)
931 unknown.append(abs)
926 elif dstate != 'r' and not st:
932 elif dstate != 'r' and not st:
927 deleted.append(abs)
933 deleted.append(abs)
928 elif dstate == 'r' and st:
934 elif dstate == 'r' and st:
929 forgotten.append(abs)
935 forgotten.append(abs)
930 # for finding renames
936 # for finding renames
931 elif dstate == 'r' and not st:
937 elif dstate == 'r' and not st:
932 removed.append(abs)
938 removed.append(abs)
933 elif dstate == 'a':
939 elif dstate == 'a':
934 added.append(abs)
940 added.append(abs)
935
941
936 return added, unknown, deleted, removed, forgotten
942 return added, unknown, deleted, removed, forgotten
937
943
938 def _findrenames(repo, matcher, added, removed, similarity):
944 def _findrenames(repo, matcher, added, removed, similarity):
939 '''Find renames from removed files to added ones.'''
945 '''Find renames from removed files to added ones.'''
940 renames = {}
946 renames = {}
941 if similarity > 0:
947 if similarity > 0:
942 for old, new, score in similar.findrenames(repo, added, removed,
948 for old, new, score in similar.findrenames(repo, added, removed,
943 similarity):
949 similarity):
944 if (repo.ui.verbose or not matcher.exact(old)
950 if (repo.ui.verbose or not matcher.exact(old)
945 or not matcher.exact(new)):
951 or not matcher.exact(new)):
946 repo.ui.status(_('recording removal of %s as rename to %s '
952 repo.ui.status(_('recording removal of %s as rename to %s '
947 '(%d%% similar)\n') %
953 '(%d%% similar)\n') %
948 (matcher.rel(old), matcher.rel(new),
954 (matcher.rel(old), matcher.rel(new),
949 score * 100))
955 score * 100))
950 renames[new] = old
956 renames[new] = old
951 return renames
957 return renames
952
958
953 def _markchanges(repo, unknown, deleted, renames):
959 def _markchanges(repo, unknown, deleted, renames):
954 '''Marks the files in unknown as added, the files in deleted as removed,
960 '''Marks the files in unknown as added, the files in deleted as removed,
955 and the files in renames as copied.'''
961 and the files in renames as copied.'''
956 wctx = repo[None]
962 wctx = repo[None]
957 wlock = repo.wlock()
963 wlock = repo.wlock()
958 try:
964 try:
959 wctx.forget(deleted)
965 wctx.forget(deleted)
960 wctx.add(unknown)
966 wctx.add(unknown)
961 for new, old in renames.iteritems():
967 for new, old in renames.iteritems():
962 wctx.copy(old, new)
968 wctx.copy(old, new)
963 finally:
969 finally:
964 wlock.release()
970 wlock.release()
965
971
966 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
972 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
967 """Update the dirstate to reflect the intent of copying src to dst. For
973 """Update the dirstate to reflect the intent of copying src to dst. For
968 different reasons it might not end with dst being marked as copied from src.
974 different reasons it might not end with dst being marked as copied from src.
969 """
975 """
970 origsrc = repo.dirstate.copied(src) or src
976 origsrc = repo.dirstate.copied(src) or src
971 if dst == origsrc: # copying back a copy?
977 if dst == origsrc: # copying back a copy?
972 if repo.dirstate[dst] not in 'mn' and not dryrun:
978 if repo.dirstate[dst] not in 'mn' and not dryrun:
973 repo.dirstate.normallookup(dst)
979 repo.dirstate.normallookup(dst)
974 else:
980 else:
975 if repo.dirstate[origsrc] == 'a' and origsrc == src:
981 if repo.dirstate[origsrc] == 'a' and origsrc == src:
976 if not ui.quiet:
982 if not ui.quiet:
977 ui.warn(_("%s has not been committed yet, so no copy "
983 ui.warn(_("%s has not been committed yet, so no copy "
978 "data will be stored for %s.\n")
984 "data will be stored for %s.\n")
979 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
985 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
980 if repo.dirstate[dst] in '?r' and not dryrun:
986 if repo.dirstate[dst] in '?r' and not dryrun:
981 wctx.add([dst])
987 wctx.add([dst])
982 elif not dryrun:
988 elif not dryrun:
983 wctx.copy(origsrc, dst)
989 wctx.copy(origsrc, dst)
984
990
985 def readrequires(opener, supported):
991 def readrequires(opener, supported):
986 '''Reads and parses .hg/requires and checks if all entries found
992 '''Reads and parses .hg/requires and checks if all entries found
987 are in the list of supported features.'''
993 are in the list of supported features.'''
988 requirements = set(opener.read("requires").splitlines())
994 requirements = set(opener.read("requires").splitlines())
989 missings = []
995 missings = []
990 for r in requirements:
996 for r in requirements:
991 if r not in supported:
997 if r not in supported:
992 if not r or not r[0].isalnum():
998 if not r or not r[0].isalnum():
993 raise error.RequirementError(_(".hg/requires file is corrupt"))
999 raise error.RequirementError(_(".hg/requires file is corrupt"))
994 missings.append(r)
1000 missings.append(r)
995 missings.sort()
1001 missings.sort()
996 if missings:
1002 if missings:
997 raise error.RequirementError(
1003 raise error.RequirementError(
998 _("repository requires features unknown to this Mercurial: %s")
1004 _("repository requires features unknown to this Mercurial: %s")
999 % " ".join(missings),
1005 % " ".join(missings),
1000 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
1006 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
1001 " for more information"))
1007 " for more information"))
1002 return requirements
1008 return requirements
1003
1009
1004 def writerequires(opener, requirements):
1010 def writerequires(opener, requirements):
1005 reqfile = opener("requires", "w")
1011 reqfile = opener("requires", "w")
1006 for r in sorted(requirements):
1012 for r in sorted(requirements):
1007 reqfile.write("%s\n" % r)
1013 reqfile.write("%s\n" % r)
1008 reqfile.close()
1014 reqfile.close()
1009
1015
1010 class filecachesubentry(object):
1016 class filecachesubentry(object):
1011 def __init__(self, path, stat):
1017 def __init__(self, path, stat):
1012 self.path = path
1018 self.path = path
1013 self.cachestat = None
1019 self.cachestat = None
1014 self._cacheable = None
1020 self._cacheable = None
1015
1021
1016 if stat:
1022 if stat:
1017 self.cachestat = filecachesubentry.stat(self.path)
1023 self.cachestat = filecachesubentry.stat(self.path)
1018
1024
1019 if self.cachestat:
1025 if self.cachestat:
1020 self._cacheable = self.cachestat.cacheable()
1026 self._cacheable = self.cachestat.cacheable()
1021 else:
1027 else:
1022 # None means we don't know yet
1028 # None means we don't know yet
1023 self._cacheable = None
1029 self._cacheable = None
1024
1030
1025 def refresh(self):
1031 def refresh(self):
1026 if self.cacheable():
1032 if self.cacheable():
1027 self.cachestat = filecachesubentry.stat(self.path)
1033 self.cachestat = filecachesubentry.stat(self.path)
1028
1034
1029 def cacheable(self):
1035 def cacheable(self):
1030 if self._cacheable is not None:
1036 if self._cacheable is not None:
1031 return self._cacheable
1037 return self._cacheable
1032
1038
1033 # we don't know yet, assume it is for now
1039 # we don't know yet, assume it is for now
1034 return True
1040 return True
1035
1041
1036 def changed(self):
1042 def changed(self):
1037 # no point in going further if we can't cache it
1043 # no point in going further if we can't cache it
1038 if not self.cacheable():
1044 if not self.cacheable():
1039 return True
1045 return True
1040
1046
1041 newstat = filecachesubentry.stat(self.path)
1047 newstat = filecachesubentry.stat(self.path)
1042
1048
1043 # we may not know if it's cacheable yet, check again now
1049 # we may not know if it's cacheable yet, check again now
1044 if newstat and self._cacheable is None:
1050 if newstat and self._cacheable is None:
1045 self._cacheable = newstat.cacheable()
1051 self._cacheable = newstat.cacheable()
1046
1052
1047 # check again
1053 # check again
1048 if not self._cacheable:
1054 if not self._cacheable:
1049 return True
1055 return True
1050
1056
1051 if self.cachestat != newstat:
1057 if self.cachestat != newstat:
1052 self.cachestat = newstat
1058 self.cachestat = newstat
1053 return True
1059 return True
1054 else:
1060 else:
1055 return False
1061 return False
1056
1062
1057 @staticmethod
1063 @staticmethod
1058 def stat(path):
1064 def stat(path):
1059 try:
1065 try:
1060 return util.cachestat(path)
1066 return util.cachestat(path)
1061 except OSError as e:
1067 except OSError as e:
1062 if e.errno != errno.ENOENT:
1068 if e.errno != errno.ENOENT:
1063 raise
1069 raise
1064
1070
1065 class filecacheentry(object):
1071 class filecacheentry(object):
1066 def __init__(self, paths, stat=True):
1072 def __init__(self, paths, stat=True):
1067 self._entries = []
1073 self._entries = []
1068 for path in paths:
1074 for path in paths:
1069 self._entries.append(filecachesubentry(path, stat))
1075 self._entries.append(filecachesubentry(path, stat))
1070
1076
1071 def changed(self):
1077 def changed(self):
1072 '''true if any entry has changed'''
1078 '''true if any entry has changed'''
1073 for entry in self._entries:
1079 for entry in self._entries:
1074 if entry.changed():
1080 if entry.changed():
1075 return True
1081 return True
1076 return False
1082 return False
1077
1083
1078 def refresh(self):
1084 def refresh(self):
1079 for entry in self._entries:
1085 for entry in self._entries:
1080 entry.refresh()
1086 entry.refresh()
1081
1087
1082 class filecache(object):
1088 class filecache(object):
1083 '''A property like decorator that tracks files under .hg/ for updates.
1089 '''A property like decorator that tracks files under .hg/ for updates.
1084
1090
1085 Records stat info when called in _filecache.
1091 Records stat info when called in _filecache.
1086
1092
1087 On subsequent calls, compares old stat info with new info, and recreates the
1093 On subsequent calls, compares old stat info with new info, and recreates the
1088 object when any of the files changes, updating the new stat info in
1094 object when any of the files changes, updating the new stat info in
1089 _filecache.
1095 _filecache.
1090
1096
1091 Mercurial either atomic renames or appends for files under .hg,
1097 Mercurial either atomic renames or appends for files under .hg,
1092 so to ensure the cache is reliable we need the filesystem to be able
1098 so to ensure the cache is reliable we need the filesystem to be able
1093 to tell us if a file has been replaced. If it can't, we fallback to
1099 to tell us if a file has been replaced. If it can't, we fallback to
1094 recreating the object on every call (essentially the same behaviour as
1100 recreating the object on every call (essentially the same behaviour as
1095 propertycache).
1101 propertycache).
1096
1102
1097 '''
1103 '''
1098 def __init__(self, *paths):
1104 def __init__(self, *paths):
1099 self.paths = paths
1105 self.paths = paths
1100
1106
1101 def join(self, obj, fname):
1107 def join(self, obj, fname):
1102 """Used to compute the runtime path of a cached file.
1108 """Used to compute the runtime path of a cached file.
1103
1109
1104 Users should subclass filecache and provide their own version of this
1110 Users should subclass filecache and provide their own version of this
1105 function to call the appropriate join function on 'obj' (an instance
1111 function to call the appropriate join function on 'obj' (an instance
1106 of the class that its member function was decorated).
1112 of the class that its member function was decorated).
1107 """
1113 """
1108 return obj.join(fname)
1114 return obj.join(fname)
1109
1115
1110 def __call__(self, func):
1116 def __call__(self, func):
1111 self.func = func
1117 self.func = func
1112 self.name = func.__name__
1118 self.name = func.__name__
1113 return self
1119 return self
1114
1120
1115 def __get__(self, obj, type=None):
1121 def __get__(self, obj, type=None):
1116 # do we need to check if the file changed?
1122 # do we need to check if the file changed?
1117 if self.name in obj.__dict__:
1123 if self.name in obj.__dict__:
1118 assert self.name in obj._filecache, self.name
1124 assert self.name in obj._filecache, self.name
1119 return obj.__dict__[self.name]
1125 return obj.__dict__[self.name]
1120
1126
1121 entry = obj._filecache.get(self.name)
1127 entry = obj._filecache.get(self.name)
1122
1128
1123 if entry:
1129 if entry:
1124 if entry.changed():
1130 if entry.changed():
1125 entry.obj = self.func(obj)
1131 entry.obj = self.func(obj)
1126 else:
1132 else:
1127 paths = [self.join(obj, path) for path in self.paths]
1133 paths = [self.join(obj, path) for path in self.paths]
1128
1134
1129 # We stat -before- creating the object so our cache doesn't lie if
1135 # We stat -before- creating the object so our cache doesn't lie if
1130 # a writer modified between the time we read and stat
1136 # a writer modified between the time we read and stat
1131 entry = filecacheentry(paths, True)
1137 entry = filecacheentry(paths, True)
1132 entry.obj = self.func(obj)
1138 entry.obj = self.func(obj)
1133
1139
1134 obj._filecache[self.name] = entry
1140 obj._filecache[self.name] = entry
1135
1141
1136 obj.__dict__[self.name] = entry.obj
1142 obj.__dict__[self.name] = entry.obj
1137 return entry.obj
1143 return entry.obj
1138
1144
1139 def __set__(self, obj, value):
1145 def __set__(self, obj, value):
1140 if self.name not in obj._filecache:
1146 if self.name not in obj._filecache:
1141 # we add an entry for the missing value because X in __dict__
1147 # we add an entry for the missing value because X in __dict__
1142 # implies X in _filecache
1148 # implies X in _filecache
1143 paths = [self.join(obj, path) for path in self.paths]
1149 paths = [self.join(obj, path) for path in self.paths]
1144 ce = filecacheentry(paths, False)
1150 ce = filecacheentry(paths, False)
1145 obj._filecache[self.name] = ce
1151 obj._filecache[self.name] = ce
1146 else:
1152 else:
1147 ce = obj._filecache[self.name]
1153 ce = obj._filecache[self.name]
1148
1154
1149 ce.obj = value # update cached copy
1155 ce.obj = value # update cached copy
1150 obj.__dict__[self.name] = value # update copy returned by obj.x
1156 obj.__dict__[self.name] = value # update copy returned by obj.x
1151
1157
1152 def __delete__(self, obj):
1158 def __delete__(self, obj):
1153 try:
1159 try:
1154 del obj.__dict__[self.name]
1160 del obj.__dict__[self.name]
1155 except KeyError:
1161 except KeyError:
1156 raise AttributeError(self.name)
1162 raise AttributeError(self.name)
General Comments 0
You need to be logged in to leave comments. Login now