##// END OF EJS Templates
match: improve documentation - docstrings and more descriptive variable naming...
Mads Kiilerich -
r21111:9d28fd79 default
parent child Browse files
Show More
@@ -1,367 +1,397 b''
1 1 # match.py - filename matching
2 2 #
3 3 # Copyright 2008, 2009 Matt Mackall <mpm@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 import re
9 9 import util, pathutil
10 10 from i18n import _
11 11
12 def _rematcher(pat):
13 m = util.compilere(pat)
12 def _rematcher(regex):
13 '''compile the regexp with the best available regexp engine and return a
14 matcher function'''
15 m = util.compilere(regex)
14 16 try:
15 17 # slightly faster, provided by facebook's re2 bindings
16 18 return m.test_match
17 19 except AttributeError:
18 20 return m.match
19 21
20 def _expandsets(pats, ctx):
21 '''convert set: patterns into a list of files in the given context'''
22 def _expandsets(kindpats, ctx):
23 '''Returns the kindpats list with the 'set' patterns expanded.'''
22 24 fset = set()
23 25 other = []
24 26
25 for kind, expr in pats:
27 for kind, pat in kindpats:
26 28 if kind == 'set':
27 29 if not ctx:
28 30 raise util.Abort("fileset expression with no context")
29 s = ctx.getfileset(expr)
31 s = ctx.getfileset(pat)
30 32 fset.update(s)
31 33 continue
32 other.append((kind, expr))
34 other.append((kind, pat))
33 35 return fset, other
34 36
35 37 class match(object):
36 38 def __init__(self, root, cwd, patterns, include=[], exclude=[],
37 39 default='glob', exact=False, auditor=None, ctx=None):
38 40 """build an object to match a set of file patterns
39 41
40 42 arguments:
41 43 root - the canonical root of the tree you're matching against
42 44 cwd - the current working directory, if relevant
43 45 patterns - patterns to find
44 include - patterns to include
45 exclude - patterns to exclude
46 default - if a pattern in names has no explicit type, assume this one
47 exact - patterns are actually literals
46 include - patterns to include (unless they are excluded)
47 exclude - patterns to exclude (even if they are included)
48 default - if a pattern in patterns has no explicit type, assume this one
49 exact - patterns are actually filenames (include/exclude still apply)
48 50
49 51 a pattern is one of:
50 52 'glob:<glob>' - a glob relative to cwd
51 53 're:<regexp>' - a regular expression
52 54 'path:<path>' - a path relative to repository root
53 55 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
54 56 'relpath:<path>' - a path relative to cwd
55 57 'relre:<regexp>' - a regexp that needn't match the start of a name
56 58 'set:<fileset>' - a fileset expression
57 59 '<something>' - a pattern of the specified default type
58 60 """
59 61
60 62 self._root = root
61 63 self._cwd = cwd
62 64 self._files = [] # exact files and roots of patterns
63 65 self._anypats = bool(include or exclude)
64 66 self._ctx = ctx
65 67 self._always = False
66 68
67 69 if include:
68 pats = _normalize(include, 'glob', root, cwd, auditor)
69 self.includepat, im = _buildmatch(ctx, pats, '(?:/|$)')
70 kindpats = _normalize(include, 'glob', root, cwd, auditor)
71 self.includepat, im = _buildmatch(ctx, kindpats, '(?:/|$)')
70 72 if exclude:
71 pats = _normalize(exclude, 'glob', root, cwd, auditor)
72 self.excludepat, em = _buildmatch(ctx, pats, '(?:/|$)')
73 kindpats = _normalize(exclude, 'glob', root, cwd, auditor)
74 self.excludepat, em = _buildmatch(ctx, kindpats, '(?:/|$)')
73 75 if exact:
74 76 if isinstance(patterns, list):
75 77 self._files = patterns
76 78 else:
77 79 self._files = list(patterns)
78 80 pm = self.exact
79 81 elif patterns:
80 pats = _normalize(patterns, default, root, cwd, auditor)
81 self._files = _roots(pats)
82 self._anypats = self._anypats or _anypats(pats)
83 self.patternspat, pm = _buildmatch(ctx, pats, '$')
82 kindpats = _normalize(patterns, default, root, cwd, auditor)
83 self._files = _roots(kindpats)
84 self._anypats = self._anypats or _anypats(kindpats)
85 self.patternspat, pm = _buildmatch(ctx, kindpats, '$')
84 86
85 87 if patterns or exact:
86 88 if include:
87 89 if exclude:
88 90 m = lambda f: im(f) and not em(f) and pm(f)
89 91 else:
90 92 m = lambda f: im(f) and pm(f)
91 93 else:
92 94 if exclude:
93 95 m = lambda f: not em(f) and pm(f)
94 96 else:
95 97 m = pm
96 98 else:
97 99 if include:
98 100 if exclude:
99 101 m = lambda f: im(f) and not em(f)
100 102 else:
101 103 m = im
102 104 else:
103 105 if exclude:
104 106 m = lambda f: not em(f)
105 107 else:
106 108 m = lambda f: True
107 109 self._always = True
108 110
109 111 self.matchfn = m
110 112 self._fmap = set(self._files)
111 113
112 114 def __call__(self, fn):
113 115 return self.matchfn(fn)
114 116 def __iter__(self):
115 117 for f in self._files:
116 118 yield f
119
120 # Callbacks related to how the matcher is used by dirstate.walk.
121 # Subscribers to these events must monkeypatch the matcher object.
117 122 def bad(self, f, msg):
118 '''callback for each explicit file that can't be
119 found/accessed, with an error message
120 '''
123 '''Callback from dirstate.walk for each explicit file that can't be
124 found/accessed, with an error message.'''
121 125 pass
122 # If this is set, it will be called when an explicitly listed directory is
123 # visited.
126
127 # If an explicitdir is set, it will be called when an explicitly listed
128 # directory is visited.
124 129 explicitdir = None
125 # If this is set, it will be called when a directory discovered by recursive
126 # traversal is visited.
130
131 # If an traversedir is set, it will be called when a directory discovered
132 # by recursive traversal is visited.
127 133 traversedir = None
134
128 135 def missing(self, f):
129 136 pass
130 def exact(self, f):
131 return f in self._fmap
137
132 138 def rel(self, f):
139 '''Convert repo path back to path that is relative to cwd of matcher.'''
133 140 return util.pathto(self._root, self._cwd, f)
141
134 142 def files(self):
143 '''Explicitly listed files or patterns or roots:
144 if no patterns or .always(): empty list,
145 if exact: list exact files,
146 if not .anypats(): list all files and dirs,
147 else: optimal roots'''
135 148 return self._files
149
150 def exact(self, f):
151 '''Returns True if f is in .files().'''
152 return f in self._fmap
153
136 154 def anypats(self):
155 '''Matcher uses patterns or include/exclude.'''
137 156 return self._anypats
157
138 158 def always(self):
159 '''Matcher will match everything and .files() will be empty
160 - optimization might be possible and necessary.'''
139 161 return self._always
140 162
141 163 class exact(match):
142 164 def __init__(self, root, cwd, files):
143 165 match.__init__(self, root, cwd, files, exact=True)
144 166
145 167 class always(match):
146 168 def __init__(self, root, cwd):
147 169 match.__init__(self, root, cwd, [])
148 170 self._always = True
149 171
150 172 class narrowmatcher(match):
151 173 """Adapt a matcher to work on a subdirectory only.
152 174
153 175 The paths are remapped to remove/insert the path as needed:
154 176
155 177 >>> m1 = match('root', '', ['a.txt', 'sub/b.txt'])
156 178 >>> m2 = narrowmatcher('sub', m1)
157 179 >>> bool(m2('a.txt'))
158 180 False
159 181 >>> bool(m2('b.txt'))
160 182 True
161 183 >>> bool(m2.matchfn('a.txt'))
162 184 False
163 185 >>> bool(m2.matchfn('b.txt'))
164 186 True
165 187 >>> m2.files()
166 188 ['b.txt']
167 189 >>> m2.exact('b.txt')
168 190 True
169 191 >>> m2.rel('b.txt')
170 192 'b.txt'
171 193 >>> def bad(f, msg):
172 194 ... print "%s: %s" % (f, msg)
173 195 >>> m1.bad = bad
174 196 >>> m2.bad('x.txt', 'No such file')
175 197 sub/x.txt: No such file
176 198 """
177 199
178 200 def __init__(self, path, matcher):
179 201 self._root = matcher._root
180 202 self._cwd = matcher._cwd
181 203 self._path = path
182 204 self._matcher = matcher
183 205 self._always = matcher._always
184 206
185 207 self._files = [f[len(path) + 1:] for f in matcher._files
186 208 if f.startswith(path + "/")]
187 209 self._anypats = matcher._anypats
188 210 self.matchfn = lambda fn: matcher.matchfn(self._path + "/" + fn)
189 211 self._fmap = set(self._files)
190 212
191 213 def bad(self, f, msg):
192 214 self._matcher.bad(self._path + "/" + f, msg)
193 215
194 def patkind(pat):
195 return _patsplit(pat, None)[0]
216 def patkind(pattern, default=None):
217 '''If pattern is 'kind:pat' with a known kind, return kind.'''
218 return _patsplit(pattern, default)[0]
196 219
197 def _patsplit(pat, default):
198 """Split a string into an optional pattern kind prefix and the
199 actual pattern."""
200 if ':' in pat:
201 kind, val = pat.split(':', 1)
220 def _patsplit(pattern, default):
221 """Split a string into the optional pattern kind prefix and the actual
222 pattern."""
223 if ':' in pattern:
224 kind, pat = pattern.split(':', 1)
202 225 if kind in ('re', 'glob', 'path', 'relglob', 'relpath', 'relre',
203 226 'listfile', 'listfile0', 'set'):
204 return kind, val
205 return default, pat
227 return kind, pat
228 return default, pattern
206 229
207 230 def _globre(pat):
208 "convert a glob pattern into a regexp"
231 '''Convert an extended glob string to a regexp string.'''
209 232 i, n = 0, len(pat)
210 233 res = ''
211 234 group = 0
212 235 escape = re.escape
213 236 def peek():
214 237 return i < n and pat[i]
215 238 while i < n:
216 239 c = pat[i]
217 240 i += 1
218 241 if c not in '*?[{},\\':
219 242 res += escape(c)
220 243 elif c == '*':
221 244 if peek() == '*':
222 245 i += 1
223 246 res += '.*'
224 247 else:
225 248 res += '[^/]*'
226 249 elif c == '?':
227 250 res += '.'
228 251 elif c == '[':
229 252 j = i
230 253 if j < n and pat[j] in '!]':
231 254 j += 1
232 255 while j < n and pat[j] != ']':
233 256 j += 1
234 257 if j >= n:
235 258 res += '\\['
236 259 else:
237 260 stuff = pat[i:j].replace('\\','\\\\')
238 261 i = j + 1
239 262 if stuff[0] == '!':
240 263 stuff = '^' + stuff[1:]
241 264 elif stuff[0] == '^':
242 265 stuff = '\\' + stuff
243 266 res = '%s[%s]' % (res, stuff)
244 267 elif c == '{':
245 268 group += 1
246 269 res += '(?:'
247 270 elif c == '}' and group:
248 271 res += ')'
249 272 group -= 1
250 273 elif c == ',' and group:
251 274 res += '|'
252 275 elif c == '\\':
253 276 p = peek()
254 277 if p:
255 278 i += 1
256 279 res += escape(p)
257 280 else:
258 281 res += escape(c)
259 282 else:
260 283 res += escape(c)
261 284 return res
262 285
263 def _regex(kind, name, tail):
264 '''convert a pattern into a regular expression'''
265 if not name:
286 def _regex(kind, pat, globsuffix):
287 '''Convert a (normalized) pattern of any kind into a regular expression.
288 globsuffix is appended to the regexp of globs.'''
289 if not pat:
266 290 return ''
267 291 if kind == 're':
268 return name
269 elif kind == 'path':
270 return '^' + re.escape(name) + '(?:/|$)'
271 elif kind == 'relglob':
272 return '(?:|.*/)' + _globre(name) + tail
273 elif kind == 'relpath':
274 return re.escape(name) + '(?:/|$)'
275 elif kind == 'relre':
276 if name.startswith('^'):
277 return name
278 return '.*' + name
279 return _globre(name) + tail
292 return pat
293 if kind == 'path':
294 return '^' + re.escape(pat) + '(?:/|$)'
295 if kind == 'relglob':
296 return '(?:|.*/)' + _globre(pat) + globsuffix
297 if kind == 'relpath':
298 return re.escape(pat) + '(?:/|$)'
299 if kind == 'relre':
300 if pat.startswith('^'):
301 return pat
302 return '.*' + pat
303 return _globre(pat) + globsuffix
280 304
281 def _buildmatch(ctx, pats, tail):
282 fset, pats = _expandsets(pats, ctx)
283 if not pats:
305 def _buildmatch(ctx, kindpats, globsuffix):
306 '''Return regexp string and a matcher function for kindpats.
307 globsuffix is appended to the regexp of globs.'''
308 fset, kindpats = _expandsets(kindpats, ctx)
309 if not kindpats:
284 310 return "", fset.__contains__
285 311
286 pat, mf = _buildregexmatch(pats, tail)
312 regex, mf = _buildregexmatch(kindpats, globsuffix)
287 313 if fset:
288 return pat, lambda f: f in fset or mf(f)
289 return pat, mf
314 return regex, lambda f: f in fset or mf(f)
315 return regex, mf
290 316
291 def _buildregexmatch(pats, tail):
292 """build a matching function from a set of patterns"""
317 def _buildregexmatch(kindpats, globsuffix):
318 """Build a match function from a list of kinds and kindpats,
319 return regexp string and a matcher function."""
293 320 try:
294 pat = '(?:%s)' % '|'.join([_regex(k, p, tail) for (k, p) in pats])
295 if len(pat) > 20000:
321 regex = '(?:%s)' % '|'.join([_regex(k, p, globsuffix)
322 for (k, p) in kindpats])
323 if len(regex) > 20000:
296 324 raise OverflowError
297 return pat, _rematcher(pat)
325 return regex, _rematcher(regex)
298 326 except OverflowError:
299 327 # We're using a Python with a tiny regex engine and we
300 328 # made it explode, so we'll divide the pattern list in two
301 329 # until it works
302 l = len(pats)
330 l = len(kindpats)
303 331 if l < 2:
304 332 raise
305 pata, a = _buildregexmatch(pats[:l//2], tail)
306 patb, b = _buildregexmatch(pats[l//2:], tail)
333 regexa, a = _buildregexmatch(kindpats[:l//2], globsuffix)
334 regexb, b = _buildregexmatch(kindpats[l//2:], globsuffix)
307 335 return pat, lambda s: a(s) or b(s)
308 336 except re.error:
309 for k, p in pats:
337 for k, p in kindpats:
310 338 try:
311 _rematcher('(?:%s)' % _regex(k, p, tail))
339 _rematcher('(?:%s)' % _regex(k, p, globsuffix))
312 340 except re.error:
313 341 raise util.Abort(_("invalid pattern (%s): %s") % (k, p))
314 342 raise util.Abort(_("invalid pattern"))
315 343
316 def _normalize(names, default, root, cwd, auditor):
317 pats = []
318 for kind, name in [_patsplit(p, default) for p in names]:
344 def _normalize(patterns, default, root, cwd, auditor):
345 '''Convert 'kind:pat' from the patterns list to tuples with kind and
346 normalized and rooted patterns and with listfiles expanded.'''
347 kindpats = []
348 for kind, pat in [_patsplit(p, default) for p in patterns]:
319 349 if kind in ('glob', 'relpath'):
320 name = pathutil.canonpath(root, cwd, name, auditor)
350 pat = pathutil.canonpath(root, cwd, pat, auditor)
321 351 elif kind in ('relglob', 'path'):
322 name = util.normpath(name)
352 pat = util.normpath(pat)
323 353 elif kind in ('listfile', 'listfile0'):
324 354 try:
325 files = util.readfile(name)
355 files = util.readfile(pat)
326 356 if kind == 'listfile0':
327 357 files = files.split('\0')
328 358 else:
329 359 files = files.splitlines()
330 360 files = [f for f in files if f]
331 361 except EnvironmentError:
332 raise util.Abort(_("unable to read file list (%s)") % name)
333 pats += _normalize(files, default, root, cwd, auditor)
362 raise util.Abort(_("unable to read file list (%s)") % pat)
363 kindpats += _normalize(files, default, root, cwd, auditor)
334 364 continue
365 # else: re or relre - which cannot be normalized
366 kindpats.append((kind, pat))
367 return kindpats
335 368
336 pats.append((kind, name))
337 return pats
338
339 def _roots(patterns):
369 def _roots(kindpats):
340 370 '''return roots and exact explicitly listed files from patterns
341 371
342 372 >>> _roots([('glob', 'g/*'), ('glob', 'g'), ('glob', 'g*')])
343 373 ['g', 'g', '.']
344 374 >>> _roots([('relpath', 'r'), ('path', 'p/p'), ('path', '')])
345 375 ['r', 'p/p', '.']
346 376 >>> _roots([('relglob', 'rg*'), ('re', 're/'), ('relre', 'rr')])
347 377 ['.', '.', '.']
348 378 '''
349 379 r = []
350 for kind, name in patterns:
380 for kind, pat in kindpats:
351 381 if kind == 'glob': # find the non-glob prefix
352 382 root = []
353 for p in name.split('/'):
383 for p in pat.split('/'):
354 384 if '[' in p or '{' in p or '*' in p or '?' in p:
355 385 break
356 386 root.append(p)
357 387 r.append('/'.join(root) or '.')
358 388 elif kind in ('relpath', 'path'):
359 r.append(name or '.')
389 r.append(pat or '.')
360 390 else: # relglob, re, relre
361 391 r.append('.')
362 392 return r
363 393
364 def _anypats(patterns):
365 for kind, name in patterns:
394 def _anypats(kindpats):
395 for kind, pat in kindpats:
366 396 if kind in ('glob', 're', 'relglob', 'relre', 'set'):
367 397 return True
@@ -1,954 +1,961 b''
1 1 # scmutil.py - Mercurial core utility functions
2 2 #
3 3 # Copyright Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from i18n import _
9 9 from mercurial.node import nullrev
10 10 import util, error, osutil, revset, similar, encoding, phases, parsers
11 11 import pathutil
12 12 import match as matchmod
13 13 import os, errno, re, glob, tempfile
14 14
15 15 if os.name == 'nt':
16 16 import scmwindows as scmplatform
17 17 else:
18 18 import scmposix as scmplatform
19 19
20 20 systemrcpath = scmplatform.systemrcpath
21 21 userrcpath = scmplatform.userrcpath
22 22
23 23 def itersubrepos(ctx1, ctx2):
24 24 """find subrepos in ctx1 or ctx2"""
25 25 # Create a (subpath, ctx) mapping where we prefer subpaths from
26 26 # ctx1. The subpaths from ctx2 are important when the .hgsub file
27 27 # has been modified (in ctx2) but not yet committed (in ctx1).
28 28 subpaths = dict.fromkeys(ctx2.substate, ctx2)
29 29 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
30 30 for subpath, ctx in sorted(subpaths.iteritems()):
31 31 yield subpath, ctx.sub(subpath)
32 32
33 33 def nochangesfound(ui, repo, excluded=None):
34 34 '''Report no changes for push/pull, excluded is None or a list of
35 35 nodes excluded from the push/pull.
36 36 '''
37 37 secretlist = []
38 38 if excluded:
39 39 for n in excluded:
40 40 if n not in repo:
41 41 # discovery should not have included the filtered revision,
42 42 # we have to explicitly exclude it until discovery is cleanup.
43 43 continue
44 44 ctx = repo[n]
45 45 if ctx.phase() >= phases.secret and not ctx.extinct():
46 46 secretlist.append(n)
47 47
48 48 if secretlist:
49 49 ui.status(_("no changes found (ignored %d secret changesets)\n")
50 50 % len(secretlist))
51 51 else:
52 52 ui.status(_("no changes found\n"))
53 53
54 54 def checknewlabel(repo, lbl, kind):
55 55 # Do not use the "kind" parameter in ui output.
56 56 # It makes strings difficult to translate.
57 57 if lbl in ['tip', '.', 'null']:
58 58 raise util.Abort(_("the name '%s' is reserved") % lbl)
59 59 for c in (':', '\0', '\n', '\r'):
60 60 if c in lbl:
61 61 raise util.Abort(_("%r cannot be used in a name") % c)
62 62 try:
63 63 int(lbl)
64 64 raise util.Abort(_("cannot use an integer as a name"))
65 65 except ValueError:
66 66 pass
67 67
68 68 def checkfilename(f):
69 69 '''Check that the filename f is an acceptable filename for a tracked file'''
70 70 if '\r' in f or '\n' in f:
71 71 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
72 72
73 73 def checkportable(ui, f):
74 74 '''Check if filename f is portable and warn or abort depending on config'''
75 75 checkfilename(f)
76 76 abort, warn = checkportabilityalert(ui)
77 77 if abort or warn:
78 78 msg = util.checkwinfilename(f)
79 79 if msg:
80 80 msg = "%s: %r" % (msg, f)
81 81 if abort:
82 82 raise util.Abort(msg)
83 83 ui.warn(_("warning: %s\n") % msg)
84 84
85 85 def checkportabilityalert(ui):
86 86 '''check if the user's config requests nothing, a warning, or abort for
87 87 non-portable filenames'''
88 88 val = ui.config('ui', 'portablefilenames', 'warn')
89 89 lval = val.lower()
90 90 bval = util.parsebool(val)
91 91 abort = os.name == 'nt' or lval == 'abort'
92 92 warn = bval or lval == 'warn'
93 93 if bval is None and not (warn or abort or lval == 'ignore'):
94 94 raise error.ConfigError(
95 95 _("ui.portablefilenames value is invalid ('%s')") % val)
96 96 return abort, warn
97 97
98 98 class casecollisionauditor(object):
99 99 def __init__(self, ui, abort, dirstate):
100 100 self._ui = ui
101 101 self._abort = abort
102 102 allfiles = '\0'.join(dirstate._map)
103 103 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
104 104 self._dirstate = dirstate
105 105 # The purpose of _newfiles is so that we don't complain about
106 106 # case collisions if someone were to call this object with the
107 107 # same filename twice.
108 108 self._newfiles = set()
109 109
110 110 def __call__(self, f):
111 111 if f in self._newfiles:
112 112 return
113 113 fl = encoding.lower(f)
114 114 if fl in self._loweredfiles and f not in self._dirstate:
115 115 msg = _('possible case-folding collision for %s') % f
116 116 if self._abort:
117 117 raise util.Abort(msg)
118 118 self._ui.warn(_("warning: %s\n") % msg)
119 119 self._loweredfiles.add(fl)
120 120 self._newfiles.add(f)
121 121
122 122 class abstractvfs(object):
123 123 """Abstract base class; cannot be instantiated"""
124 124
125 125 def __init__(self, *args, **kwargs):
126 126 '''Prevent instantiation; don't call this from subclasses.'''
127 127 raise NotImplementedError('attempted instantiating ' + str(type(self)))
128 128
129 129 def tryread(self, path):
130 130 '''gracefully return an empty string for missing files'''
131 131 try:
132 132 return self.read(path)
133 133 except IOError, inst:
134 134 if inst.errno != errno.ENOENT:
135 135 raise
136 136 return ""
137 137
138 138 def open(self, path, mode="r", text=False, atomictemp=False):
139 139 self.open = self.__call__
140 140 return self.__call__(path, mode, text, atomictemp)
141 141
142 142 def read(self, path):
143 143 fp = self(path, 'rb')
144 144 try:
145 145 return fp.read()
146 146 finally:
147 147 fp.close()
148 148
149 149 def write(self, path, data):
150 150 fp = self(path, 'wb')
151 151 try:
152 152 return fp.write(data)
153 153 finally:
154 154 fp.close()
155 155
156 156 def append(self, path, data):
157 157 fp = self(path, 'ab')
158 158 try:
159 159 return fp.write(data)
160 160 finally:
161 161 fp.close()
162 162
163 163 def chmod(self, path, mode):
164 164 return os.chmod(self.join(path), mode)
165 165
166 166 def exists(self, path=None):
167 167 return os.path.exists(self.join(path))
168 168
169 169 def fstat(self, fp):
170 170 return util.fstat(fp)
171 171
172 172 def isdir(self, path=None):
173 173 return os.path.isdir(self.join(path))
174 174
175 175 def isfile(self, path=None):
176 176 return os.path.isfile(self.join(path))
177 177
178 178 def islink(self, path=None):
179 179 return os.path.islink(self.join(path))
180 180
181 181 def lstat(self, path=None):
182 182 return os.lstat(self.join(path))
183 183
184 184 def makedir(self, path=None, notindexed=True):
185 185 return util.makedir(self.join(path), notindexed)
186 186
187 187 def makedirs(self, path=None, mode=None):
188 188 return util.makedirs(self.join(path), mode)
189 189
190 190 def makelock(self, info, path):
191 191 return util.makelock(info, self.join(path))
192 192
193 193 def mkdir(self, path=None):
194 194 return os.mkdir(self.join(path))
195 195
196 196 def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False):
197 197 fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix,
198 198 dir=self.join(dir), text=text)
199 199 dname, fname = util.split(name)
200 200 if dir:
201 201 return fd, os.path.join(dir, fname)
202 202 else:
203 203 return fd, fname
204 204
205 205 def readdir(self, path=None, stat=None, skip=None):
206 206 return osutil.listdir(self.join(path), stat, skip)
207 207
208 208 def readlock(self, path):
209 209 return util.readlock(self.join(path))
210 210
211 211 def rename(self, src, dst):
212 212 return util.rename(self.join(src), self.join(dst))
213 213
214 214 def readlink(self, path):
215 215 return os.readlink(self.join(path))
216 216
217 217 def setflags(self, path, l, x):
218 218 return util.setflags(self.join(path), l, x)
219 219
220 220 def stat(self, path=None):
221 221 return os.stat(self.join(path))
222 222
223 223 def unlink(self, path=None):
224 224 return util.unlink(self.join(path))
225 225
226 226 def utime(self, path=None, t=None):
227 227 return os.utime(self.join(path), t)
228 228
229 229 class vfs(abstractvfs):
230 230 '''Operate files relative to a base directory
231 231
232 232 This class is used to hide the details of COW semantics and
233 233 remote file access from higher level code.
234 234 '''
235 235 def __init__(self, base, audit=True, expandpath=False, realpath=False):
236 236 if expandpath:
237 237 base = util.expandpath(base)
238 238 if realpath:
239 239 base = os.path.realpath(base)
240 240 self.base = base
241 241 self._setmustaudit(audit)
242 242 self.createmode = None
243 243 self._trustnlink = None
244 244
245 245 def _getmustaudit(self):
246 246 return self._audit
247 247
248 248 def _setmustaudit(self, onoff):
249 249 self._audit = onoff
250 250 if onoff:
251 251 self.audit = pathutil.pathauditor(self.base)
252 252 else:
253 253 self.audit = util.always
254 254
255 255 mustaudit = property(_getmustaudit, _setmustaudit)
256 256
257 257 @util.propertycache
258 258 def _cansymlink(self):
259 259 return util.checklink(self.base)
260 260
261 261 @util.propertycache
262 262 def _chmod(self):
263 263 return util.checkexec(self.base)
264 264
265 265 def _fixfilemode(self, name):
266 266 if self.createmode is None or not self._chmod:
267 267 return
268 268 os.chmod(name, self.createmode & 0666)
269 269
270 270 def __call__(self, path, mode="r", text=False, atomictemp=False):
271 271 if self._audit:
272 272 r = util.checkosfilename(path)
273 273 if r:
274 274 raise util.Abort("%s: %r" % (r, path))
275 275 self.audit(path)
276 276 f = self.join(path)
277 277
278 278 if not text and "b" not in mode:
279 279 mode += "b" # for that other OS
280 280
281 281 nlink = -1
282 282 if mode not in ('r', 'rb'):
283 283 dirname, basename = util.split(f)
284 284 # If basename is empty, then the path is malformed because it points
285 285 # to a directory. Let the posixfile() call below raise IOError.
286 286 if basename:
287 287 if atomictemp:
288 288 util.ensuredirs(dirname, self.createmode)
289 289 return util.atomictempfile(f, mode, self.createmode)
290 290 try:
291 291 if 'w' in mode:
292 292 util.unlink(f)
293 293 nlink = 0
294 294 else:
295 295 # nlinks() may behave differently for files on Windows
296 296 # shares if the file is open.
297 297 fd = util.posixfile(f)
298 298 nlink = util.nlinks(f)
299 299 if nlink < 1:
300 300 nlink = 2 # force mktempcopy (issue1922)
301 301 fd.close()
302 302 except (OSError, IOError), e:
303 303 if e.errno != errno.ENOENT:
304 304 raise
305 305 nlink = 0
306 306 util.ensuredirs(dirname, self.createmode)
307 307 if nlink > 0:
308 308 if self._trustnlink is None:
309 309 self._trustnlink = nlink > 1 or util.checknlink(f)
310 310 if nlink > 1 or not self._trustnlink:
311 311 util.rename(util.mktempcopy(f), f)
312 312 fp = util.posixfile(f, mode)
313 313 if nlink == 0:
314 314 self._fixfilemode(f)
315 315 return fp
316 316
317 317 def symlink(self, src, dst):
318 318 self.audit(dst)
319 319 linkname = self.join(dst)
320 320 try:
321 321 os.unlink(linkname)
322 322 except OSError:
323 323 pass
324 324
325 325 util.ensuredirs(os.path.dirname(linkname), self.createmode)
326 326
327 327 if self._cansymlink:
328 328 try:
329 329 os.symlink(src, linkname)
330 330 except OSError, err:
331 331 raise OSError(err.errno, _('could not symlink to %r: %s') %
332 332 (src, err.strerror), linkname)
333 333 else:
334 334 self.write(dst, src)
335 335
336 336 def join(self, path):
337 337 if path:
338 338 return os.path.join(self.base, path)
339 339 else:
340 340 return self.base
341 341
342 342 opener = vfs
343 343
344 344 class auditvfs(object):
345 345 def __init__(self, vfs):
346 346 self.vfs = vfs
347 347
348 348 def _getmustaudit(self):
349 349 return self.vfs.mustaudit
350 350
351 351 def _setmustaudit(self, onoff):
352 352 self.vfs.mustaudit = onoff
353 353
354 354 mustaudit = property(_getmustaudit, _setmustaudit)
355 355
356 356 class filtervfs(abstractvfs, auditvfs):
357 357 '''Wrapper vfs for filtering filenames with a function.'''
358 358
359 359 def __init__(self, vfs, filter):
360 360 auditvfs.__init__(self, vfs)
361 361 self._filter = filter
362 362
363 363 def __call__(self, path, *args, **kwargs):
364 364 return self.vfs(self._filter(path), *args, **kwargs)
365 365
366 366 def join(self, path):
367 367 if path:
368 368 return self.vfs.join(self._filter(path))
369 369 else:
370 370 return self.vfs.join(path)
371 371
372 372 filteropener = filtervfs
373 373
374 374 class readonlyvfs(abstractvfs, auditvfs):
375 375 '''Wrapper vfs preventing any writing.'''
376 376
377 377 def __init__(self, vfs):
378 378 auditvfs.__init__(self, vfs)
379 379
380 380 def __call__(self, path, mode='r', *args, **kw):
381 381 if mode not in ('r', 'rb'):
382 382 raise util.Abort('this vfs is read only')
383 383 return self.vfs(path, mode, *args, **kw)
384 384
385 385
386 386 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
387 387 '''yield every hg repository under path, always recursively.
388 388 The recurse flag will only control recursion into repo working dirs'''
389 389 def errhandler(err):
390 390 if err.filename == path:
391 391 raise err
392 392 samestat = getattr(os.path, 'samestat', None)
393 393 if followsym and samestat is not None:
394 394 def adddir(dirlst, dirname):
395 395 match = False
396 396 dirstat = os.stat(dirname)
397 397 for lstdirstat in dirlst:
398 398 if samestat(dirstat, lstdirstat):
399 399 match = True
400 400 break
401 401 if not match:
402 402 dirlst.append(dirstat)
403 403 return not match
404 404 else:
405 405 followsym = False
406 406
407 407 if (seen_dirs is None) and followsym:
408 408 seen_dirs = []
409 409 adddir(seen_dirs, path)
410 410 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
411 411 dirs.sort()
412 412 if '.hg' in dirs:
413 413 yield root # found a repository
414 414 qroot = os.path.join(root, '.hg', 'patches')
415 415 if os.path.isdir(os.path.join(qroot, '.hg')):
416 416 yield qroot # we have a patch queue repo here
417 417 if recurse:
418 418 # avoid recursing inside the .hg directory
419 419 dirs.remove('.hg')
420 420 else:
421 421 dirs[:] = [] # don't descend further
422 422 elif followsym:
423 423 newdirs = []
424 424 for d in dirs:
425 425 fname = os.path.join(root, d)
426 426 if adddir(seen_dirs, fname):
427 427 if os.path.islink(fname):
428 428 for hgname in walkrepos(fname, True, seen_dirs):
429 429 yield hgname
430 430 else:
431 431 newdirs.append(d)
432 432 dirs[:] = newdirs
433 433
434 434 def osrcpath():
435 435 '''return default os-specific hgrc search path'''
436 436 path = systemrcpath()
437 437 path.extend(userrcpath())
438 438 path = [os.path.normpath(f) for f in path]
439 439 return path
440 440
441 441 _rcpath = None
442 442
443 443 def rcpath():
444 444 '''return hgrc search path. if env var HGRCPATH is set, use it.
445 445 for each item in path, if directory, use files ending in .rc,
446 446 else use item.
447 447 make HGRCPATH empty to only look in .hg/hgrc of current repo.
448 448 if no HGRCPATH, use default os-specific path.'''
449 449 global _rcpath
450 450 if _rcpath is None:
451 451 if 'HGRCPATH' in os.environ:
452 452 _rcpath = []
453 453 for p in os.environ['HGRCPATH'].split(os.pathsep):
454 454 if not p:
455 455 continue
456 456 p = util.expandpath(p)
457 457 if os.path.isdir(p):
458 458 for f, kind in osutil.listdir(p):
459 459 if f.endswith('.rc'):
460 460 _rcpath.append(os.path.join(p, f))
461 461 else:
462 462 _rcpath.append(p)
463 463 else:
464 464 _rcpath = osrcpath()
465 465 return _rcpath
466 466
467 467 def revsingle(repo, revspec, default='.'):
468 468 if not revspec and revspec != 0:
469 469 return repo[default]
470 470
471 471 l = revrange(repo, [revspec])
472 472 if len(l) < 1:
473 473 raise util.Abort(_('empty revision set'))
474 474 return repo[l[-1]]
475 475
476 476 def revpair(repo, revs):
477 477 if not revs:
478 478 return repo.dirstate.p1(), None
479 479
480 480 l = revrange(repo, revs)
481 481
482 482 if not l:
483 483 first = second = None
484 484 elif l.isascending():
485 485 first = l.min()
486 486 second = l.max()
487 487 elif l.isdescending():
488 488 first = l.max()
489 489 second = l.min()
490 490 else:
491 491 l = list(l)
492 492 first = l[0]
493 493 second = l[-1]
494 494
495 495 if first is None:
496 496 raise util.Abort(_('empty revision range'))
497 497
498 498 if first == second and len(revs) == 1 and _revrangesep not in revs[0]:
499 499 return repo.lookup(first), None
500 500
501 501 return repo.lookup(first), repo.lookup(second)
502 502
503 503 _revrangesep = ':'
504 504
505 505 def revrange(repo, revs):
506 506 """Yield revision as strings from a list of revision specifications."""
507 507
508 508 def revfix(repo, val, defval):
509 509 if not val and val != 0 and defval is not None:
510 510 return defval
511 511 return repo[val].rev()
512 512
513 513 seen, l = set(), revset.baseset([])
514 514 for spec in revs:
515 515 if l and not seen:
516 516 seen = set(l)
517 517 # attempt to parse old-style ranges first to deal with
518 518 # things like old-tag which contain query metacharacters
519 519 try:
520 520 if isinstance(spec, int):
521 521 seen.add(spec)
522 522 l = l + revset.baseset([spec])
523 523 continue
524 524
525 525 if _revrangesep in spec:
526 526 start, end = spec.split(_revrangesep, 1)
527 527 start = revfix(repo, start, 0)
528 528 end = revfix(repo, end, len(repo) - 1)
529 529 if end == nullrev and start < 0:
530 530 start = nullrev
531 531 rangeiter = repo.changelog.revs(start, end)
532 532 if not seen and not l:
533 533 # by far the most common case: revs = ["-1:0"]
534 534 l = revset.baseset(rangeiter)
535 535 # defer syncing seen until next iteration
536 536 continue
537 537 newrevs = set(rangeiter)
538 538 if seen:
539 539 newrevs.difference_update(seen)
540 540 seen.update(newrevs)
541 541 else:
542 542 seen = newrevs
543 543 l = l + revset.baseset(sorted(newrevs, reverse=start > end))
544 544 continue
545 545 elif spec and spec in repo: # single unquoted rev
546 546 rev = revfix(repo, spec, None)
547 547 if rev in seen:
548 548 continue
549 549 seen.add(rev)
550 550 l = l + revset.baseset([rev])
551 551 continue
552 552 except error.RepoLookupError:
553 553 pass
554 554
555 555 # fall through to new-style queries if old-style fails
556 556 m = revset.match(repo.ui, spec, repo)
557 557 if seen or l:
558 558 dl = [r for r in m(repo, revset.spanset(repo)) if r not in seen]
559 559 l = l + revset.baseset(dl)
560 560 seen.update(dl)
561 561 else:
562 562 l = m(repo, revset.spanset(repo))
563 563
564 564 return l
565 565
566 566 def expandpats(pats):
567 '''Expand bare globs when running on windows.
568 On posix we assume it already has already been done by sh.'''
567 569 if not util.expandglobs:
568 570 return list(pats)
569 571 ret = []
570 for p in pats:
571 kind, name = matchmod._patsplit(p, None)
572 for kindpat in pats:
573 kind, pat = matchmod._patsplit(kindpat, None)
572 574 if kind is None:
573 575 try:
574 globbed = glob.glob(name)
576 globbed = glob.glob(pat)
575 577 except re.error:
576 globbed = [name]
578 globbed = [pat]
577 579 if globbed:
578 580 ret.extend(globbed)
579 581 continue
580 ret.append(p)
582 ret.append(kindpat)
581 583 return ret
582 584
583 585 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
586 '''Return a matcher and the patterns that were used.
587 The matcher will warn about bad matches.'''
584 588 if pats == ("",):
585 589 pats = []
586 590 if not globbed and default == 'relpath':
587 591 pats = expandpats(pats or [])
588 592
589 593 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
590 594 default)
591 595 def badfn(f, msg):
592 596 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
593 597 m.bad = badfn
594 598 return m, pats
595 599
596 600 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
601 '''Return a matcher that will warn about bad matches.'''
597 602 return matchandpats(ctx, pats, opts, globbed, default)[0]
598 603
599 604 def matchall(repo):
605 '''Return a matcher that will efficiently match everything.'''
600 606 return matchmod.always(repo.root, repo.getcwd())
601 607
602 608 def matchfiles(repo, files):
609 '''Return a matcher that will efficiently match exactly these files.'''
603 610 return matchmod.exact(repo.root, repo.getcwd(), files)
604 611
605 612 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
606 613 if dry_run is None:
607 614 dry_run = opts.get('dry_run')
608 615 if similarity is None:
609 616 similarity = float(opts.get('similarity') or 0)
610 617 # we'd use status here, except handling of symlinks and ignore is tricky
611 618 m = match(repo[None], pats, opts)
612 619 rejected = []
613 620 m.bad = lambda x, y: rejected.append(x)
614 621
615 622 added, unknown, deleted, removed = _interestingfiles(repo, m)
616 623
617 624 unknownset = set(unknown)
618 625 toprint = unknownset.copy()
619 626 toprint.update(deleted)
620 627 for abs in sorted(toprint):
621 628 if repo.ui.verbose or not m.exact(abs):
622 629 rel = m.rel(abs)
623 630 if abs in unknownset:
624 631 status = _('adding %s\n') % ((pats and rel) or abs)
625 632 else:
626 633 status = _('removing %s\n') % ((pats and rel) or abs)
627 634 repo.ui.status(status)
628 635
629 636 renames = _findrenames(repo, m, added + unknown, removed + deleted,
630 637 similarity)
631 638
632 639 if not dry_run:
633 640 _markchanges(repo, unknown, deleted, renames)
634 641
635 642 for f in rejected:
636 643 if f in m.files():
637 644 return 1
638 645 return 0
639 646
640 647 def marktouched(repo, files, similarity=0.0):
641 648 '''Assert that files have somehow been operated upon. files are relative to
642 649 the repo root.'''
643 650 m = matchfiles(repo, files)
644 651 rejected = []
645 652 m.bad = lambda x, y: rejected.append(x)
646 653
647 654 added, unknown, deleted, removed = _interestingfiles(repo, m)
648 655
649 656 if repo.ui.verbose:
650 657 unknownset = set(unknown)
651 658 toprint = unknownset.copy()
652 659 toprint.update(deleted)
653 660 for abs in sorted(toprint):
654 661 if abs in unknownset:
655 662 status = _('adding %s\n') % abs
656 663 else:
657 664 status = _('removing %s\n') % abs
658 665 repo.ui.status(status)
659 666
660 667 renames = _findrenames(repo, m, added + unknown, removed + deleted,
661 668 similarity)
662 669
663 670 _markchanges(repo, unknown, deleted, renames)
664 671
665 672 for f in rejected:
666 673 if f in m.files():
667 674 return 1
668 675 return 0
669 676
670 677 def _interestingfiles(repo, matcher):
671 678 '''Walk dirstate with matcher, looking for files that addremove would care
672 679 about.
673 680
674 681 This is different from dirstate.status because it doesn't care about
675 682 whether files are modified or clean.'''
676 683 added, unknown, deleted, removed = [], [], [], []
677 684 audit_path = pathutil.pathauditor(repo.root)
678 685
679 686 ctx = repo[None]
680 687 dirstate = repo.dirstate
681 688 walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False,
682 689 full=False)
683 690 for abs, st in walkresults.iteritems():
684 691 dstate = dirstate[abs]
685 692 if dstate == '?' and audit_path.check(abs):
686 693 unknown.append(abs)
687 694 elif dstate != 'r' and not st:
688 695 deleted.append(abs)
689 696 # for finding renames
690 697 elif dstate == 'r':
691 698 removed.append(abs)
692 699 elif dstate == 'a':
693 700 added.append(abs)
694 701
695 702 return added, unknown, deleted, removed
696 703
697 704 def _findrenames(repo, matcher, added, removed, similarity):
698 705 '''Find renames from removed files to added ones.'''
699 706 renames = {}
700 707 if similarity > 0:
701 708 for old, new, score in similar.findrenames(repo, added, removed,
702 709 similarity):
703 710 if (repo.ui.verbose or not matcher.exact(old)
704 711 or not matcher.exact(new)):
705 712 repo.ui.status(_('recording removal of %s as rename to %s '
706 713 '(%d%% similar)\n') %
707 714 (matcher.rel(old), matcher.rel(new),
708 715 score * 100))
709 716 renames[new] = old
710 717 return renames
711 718
712 719 def _markchanges(repo, unknown, deleted, renames):
713 720 '''Marks the files in unknown as added, the files in deleted as removed,
714 721 and the files in renames as copied.'''
715 722 wctx = repo[None]
716 723 wlock = repo.wlock()
717 724 try:
718 725 wctx.forget(deleted)
719 726 wctx.add(unknown)
720 727 for new, old in renames.iteritems():
721 728 wctx.copy(old, new)
722 729 finally:
723 730 wlock.release()
724 731
725 732 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
726 733 """Update the dirstate to reflect the intent of copying src to dst. For
727 734 different reasons it might not end with dst being marked as copied from src.
728 735 """
729 736 origsrc = repo.dirstate.copied(src) or src
730 737 if dst == origsrc: # copying back a copy?
731 738 if repo.dirstate[dst] not in 'mn' and not dryrun:
732 739 repo.dirstate.normallookup(dst)
733 740 else:
734 741 if repo.dirstate[origsrc] == 'a' and origsrc == src:
735 742 if not ui.quiet:
736 743 ui.warn(_("%s has not been committed yet, so no copy "
737 744 "data will be stored for %s.\n")
738 745 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
739 746 if repo.dirstate[dst] in '?r' and not dryrun:
740 747 wctx.add([dst])
741 748 elif not dryrun:
742 749 wctx.copy(origsrc, dst)
743 750
744 751 def readrequires(opener, supported):
745 752 '''Reads and parses .hg/requires and checks if all entries found
746 753 are in the list of supported features.'''
747 754 requirements = set(opener.read("requires").splitlines())
748 755 missings = []
749 756 for r in requirements:
750 757 if r not in supported:
751 758 if not r or not r[0].isalnum():
752 759 raise error.RequirementError(_(".hg/requires file is corrupt"))
753 760 missings.append(r)
754 761 missings.sort()
755 762 if missings:
756 763 raise error.RequirementError(
757 764 _("repository requires features unknown to this Mercurial: %s")
758 765 % " ".join(missings),
759 766 hint=_("see http://mercurial.selenic.com/wiki/MissingRequirement"
760 767 " for more information"))
761 768 return requirements
762 769
763 770 class filecachesubentry(object):
764 771 def __init__(self, path, stat):
765 772 self.path = path
766 773 self.cachestat = None
767 774 self._cacheable = None
768 775
769 776 if stat:
770 777 self.cachestat = filecachesubentry.stat(self.path)
771 778
772 779 if self.cachestat:
773 780 self._cacheable = self.cachestat.cacheable()
774 781 else:
775 782 # None means we don't know yet
776 783 self._cacheable = None
777 784
778 785 def refresh(self):
779 786 if self.cacheable():
780 787 self.cachestat = filecachesubentry.stat(self.path)
781 788
782 789 def cacheable(self):
783 790 if self._cacheable is not None:
784 791 return self._cacheable
785 792
786 793 # we don't know yet, assume it is for now
787 794 return True
788 795
789 796 def changed(self):
790 797 # no point in going further if we can't cache it
791 798 if not self.cacheable():
792 799 return True
793 800
794 801 newstat = filecachesubentry.stat(self.path)
795 802
796 803 # we may not know if it's cacheable yet, check again now
797 804 if newstat and self._cacheable is None:
798 805 self._cacheable = newstat.cacheable()
799 806
800 807 # check again
801 808 if not self._cacheable:
802 809 return True
803 810
804 811 if self.cachestat != newstat:
805 812 self.cachestat = newstat
806 813 return True
807 814 else:
808 815 return False
809 816
810 817 @staticmethod
811 818 def stat(path):
812 819 try:
813 820 return util.cachestat(path)
814 821 except OSError, e:
815 822 if e.errno != errno.ENOENT:
816 823 raise
817 824
818 825 class filecacheentry(object):
819 826 def __init__(self, paths, stat=True):
820 827 self._entries = []
821 828 for path in paths:
822 829 self._entries.append(filecachesubentry(path, stat))
823 830
824 831 def changed(self):
825 832 '''true if any entry has changed'''
826 833 for entry in self._entries:
827 834 if entry.changed():
828 835 return True
829 836 return False
830 837
831 838 def refresh(self):
832 839 for entry in self._entries:
833 840 entry.refresh()
834 841
835 842 class filecache(object):
836 843 '''A property like decorator that tracks files under .hg/ for updates.
837 844
838 845 Records stat info when called in _filecache.
839 846
840 847 On subsequent calls, compares old stat info with new info, and recreates the
841 848 object when any of the files changes, updating the new stat info in
842 849 _filecache.
843 850
844 851 Mercurial either atomic renames or appends for files under .hg,
845 852 so to ensure the cache is reliable we need the filesystem to be able
846 853 to tell us if a file has been replaced. If it can't, we fallback to
847 854 recreating the object on every call (essentially the same behaviour as
848 855 propertycache).
849 856
850 857 '''
851 858 def __init__(self, *paths):
852 859 self.paths = paths
853 860
854 861 def join(self, obj, fname):
855 862 """Used to compute the runtime path of a cached file.
856 863
857 864 Users should subclass filecache and provide their own version of this
858 865 function to call the appropriate join function on 'obj' (an instance
859 866 of the class that its member function was decorated).
860 867 """
861 868 return obj.join(fname)
862 869
863 870 def __call__(self, func):
864 871 self.func = func
865 872 self.name = func.__name__
866 873 return self
867 874
868 875 def __get__(self, obj, type=None):
869 876 # do we need to check if the file changed?
870 877 if self.name in obj.__dict__:
871 878 assert self.name in obj._filecache, self.name
872 879 return obj.__dict__[self.name]
873 880
874 881 entry = obj._filecache.get(self.name)
875 882
876 883 if entry:
877 884 if entry.changed():
878 885 entry.obj = self.func(obj)
879 886 else:
880 887 paths = [self.join(obj, path) for path in self.paths]
881 888
882 889 # We stat -before- creating the object so our cache doesn't lie if
883 890 # a writer modified between the time we read and stat
884 891 entry = filecacheentry(paths, True)
885 892 entry.obj = self.func(obj)
886 893
887 894 obj._filecache[self.name] = entry
888 895
889 896 obj.__dict__[self.name] = entry.obj
890 897 return entry.obj
891 898
892 899 def __set__(self, obj, value):
893 900 if self.name not in obj._filecache:
894 901 # we add an entry for the missing value because X in __dict__
895 902 # implies X in _filecache
896 903 paths = [self.join(obj, path) for path in self.paths]
897 904 ce = filecacheentry(paths, False)
898 905 obj._filecache[self.name] = ce
899 906 else:
900 907 ce = obj._filecache[self.name]
901 908
902 909 ce.obj = value # update cached copy
903 910 obj.__dict__[self.name] = value # update copy returned by obj.x
904 911
905 912 def __delete__(self, obj):
906 913 try:
907 914 del obj.__dict__[self.name]
908 915 except KeyError:
909 916 raise AttributeError(self.name)
910 917
911 918 class dirs(object):
912 919 '''a multiset of directory names from a dirstate or manifest'''
913 920
914 921 def __init__(self, map, skip=None):
915 922 self._dirs = {}
916 923 addpath = self.addpath
917 924 if util.safehasattr(map, 'iteritems') and skip is not None:
918 925 for f, s in map.iteritems():
919 926 if s[0] != skip:
920 927 addpath(f)
921 928 else:
922 929 for f in map:
923 930 addpath(f)
924 931
925 932 def addpath(self, path):
926 933 dirs = self._dirs
927 934 for base in finddirs(path):
928 935 if base in dirs:
929 936 dirs[base] += 1
930 937 return
931 938 dirs[base] = 1
932 939
933 940 def delpath(self, path):
934 941 dirs = self._dirs
935 942 for base in finddirs(path):
936 943 if dirs[base] > 1:
937 944 dirs[base] -= 1
938 945 return
939 946 del dirs[base]
940 947
941 948 def __iter__(self):
942 949 return self._dirs.iterkeys()
943 950
944 951 def __contains__(self, d):
945 952 return d in self._dirs
946 953
947 954 if util.safehasattr(parsers, 'dirs'):
948 955 dirs = parsers.dirs
949 956
950 957 def finddirs(path):
951 958 pos = path.rfind('/')
952 959 while pos != -1:
953 960 yield path[:pos]
954 961 pos = path.rfind('/', 0, pos)
General Comments 0
You need to be logged in to leave comments. Login now