##// END OF EJS Templates
merge with stable
Matt Mackall -
r16791:977c8012 merge default
parent child Browse files
Show More
@@ -1,341 +1,344 b''
1 # match.py - filename matching
1 # match.py - filename matching
2 #
2 #
3 # Copyright 2008, 2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2008, 2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import re
8 import re
9 import scmutil, util, fileset
9 import scmutil, util, fileset
10 from i18n import _
10 from i18n import _
11
11
12 def _expandsets(pats, ctx):
12 def _expandsets(pats, ctx):
13 '''convert set: patterns into a list of files in the given context'''
13 '''convert set: patterns into a list of files in the given context'''
14 fset = set()
14 fset = set()
15 other = []
15 other = []
16
16
17 for kind, expr in pats:
17 for kind, expr in pats:
18 if kind == 'set':
18 if kind == 'set':
19 if not ctx:
19 if not ctx:
20 raise util.Abort("fileset expression with no context")
20 raise util.Abort("fileset expression with no context")
21 s = fileset.getfileset(ctx, expr)
21 s = fileset.getfileset(ctx, expr)
22 fset.update(s)
22 fset.update(s)
23 continue
23 continue
24 other.append((kind, expr))
24 other.append((kind, expr))
25 return fset, other
25 return fset, other
26
26
27 class match(object):
27 class match(object):
28 def __init__(self, root, cwd, patterns, include=[], exclude=[],
28 def __init__(self, root, cwd, patterns, include=[], exclude=[],
29 default='glob', exact=False, auditor=None, ctx=None):
29 default='glob', exact=False, auditor=None, ctx=None):
30 """build an object to match a set of file patterns
30 """build an object to match a set of file patterns
31
31
32 arguments:
32 arguments:
33 root - the canonical root of the tree you're matching against
33 root - the canonical root of the tree you're matching against
34 cwd - the current working directory, if relevant
34 cwd - the current working directory, if relevant
35 patterns - patterns to find
35 patterns - patterns to find
36 include - patterns to include
36 include - patterns to include
37 exclude - patterns to exclude
37 exclude - patterns to exclude
38 default - if a pattern in names has no explicit type, assume this one
38 default - if a pattern in names has no explicit type, assume this one
39 exact - patterns are actually literals
39 exact - patterns are actually literals
40
40
41 a pattern is one of:
41 a pattern is one of:
42 'glob:<glob>' - a glob relative to cwd
42 'glob:<glob>' - a glob relative to cwd
43 're:<regexp>' - a regular expression
43 're:<regexp>' - a regular expression
44 'path:<path>' - a path relative to canonroot
44 'path:<path>' - a path relative to canonroot
45 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
45 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
46 'relpath:<path>' - a path relative to cwd
46 'relpath:<path>' - a path relative to cwd
47 'relre:<regexp>' - a regexp that needn't match the start of a name
47 'relre:<regexp>' - a regexp that needn't match the start of a name
48 'set:<fileset>' - a fileset expression
48 'set:<fileset>' - a fileset expression
49 '<something>' - a pattern of the specified default type
49 '<something>' - a pattern of the specified default type
50 """
50 """
51
51
52 self._root = root
52 self._root = root
53 self._cwd = cwd
53 self._cwd = cwd
54 self._files = []
54 self._files = []
55 self._anypats = bool(include or exclude)
55 self._anypats = bool(include or exclude)
56 self._ctx = ctx
56 self._ctx = ctx
57
57
58 if include:
58 if include:
59 pats = _normalize(include, 'glob', root, cwd, auditor)
59 pats = _normalize(include, 'glob', root, cwd, auditor)
60 self.includepat, im = _buildmatch(ctx, pats, '(?:/|$)')
60 self.includepat, im = _buildmatch(ctx, pats, '(?:/|$)')
61 if exclude:
61 if exclude:
62 pats = _normalize(exclude, 'glob', root, cwd, auditor)
62 pats = _normalize(exclude, 'glob', root, cwd, auditor)
63 self.excludepat, em = _buildmatch(ctx, pats, '(?:/|$)')
63 self.excludepat, em = _buildmatch(ctx, pats, '(?:/|$)')
64 if exact:
64 if exact:
65 self._files = patterns
65 if isinstance(patterns, list):
66 self._files = patterns
67 else:
68 self._files = list(patterns)
66 pm = self.exact
69 pm = self.exact
67 elif patterns:
70 elif patterns:
68 pats = _normalize(patterns, default, root, cwd, auditor)
71 pats = _normalize(patterns, default, root, cwd, auditor)
69 self._files = _roots(pats)
72 self._files = _roots(pats)
70 self._anypats = self._anypats or _anypats(pats)
73 self._anypats = self._anypats or _anypats(pats)
71 self.patternspat, pm = _buildmatch(ctx, pats, '$')
74 self.patternspat, pm = _buildmatch(ctx, pats, '$')
72
75
73 if patterns or exact:
76 if patterns or exact:
74 if include:
77 if include:
75 if exclude:
78 if exclude:
76 m = lambda f: im(f) and not em(f) and pm(f)
79 m = lambda f: im(f) and not em(f) and pm(f)
77 else:
80 else:
78 m = lambda f: im(f) and pm(f)
81 m = lambda f: im(f) and pm(f)
79 else:
82 else:
80 if exclude:
83 if exclude:
81 m = lambda f: not em(f) and pm(f)
84 m = lambda f: not em(f) and pm(f)
82 else:
85 else:
83 m = pm
86 m = pm
84 else:
87 else:
85 if include:
88 if include:
86 if exclude:
89 if exclude:
87 m = lambda f: im(f) and not em(f)
90 m = lambda f: im(f) and not em(f)
88 else:
91 else:
89 m = im
92 m = im
90 else:
93 else:
91 if exclude:
94 if exclude:
92 m = lambda f: not em(f)
95 m = lambda f: not em(f)
93 else:
96 else:
94 m = lambda f: True
97 m = lambda f: True
95
98
96 self.matchfn = m
99 self.matchfn = m
97 self._fmap = set(self._files)
100 self._fmap = set(self._files)
98
101
99 def __call__(self, fn):
102 def __call__(self, fn):
100 return self.matchfn(fn)
103 return self.matchfn(fn)
101 def __iter__(self):
104 def __iter__(self):
102 for f in self._files:
105 for f in self._files:
103 yield f
106 yield f
104 def bad(self, f, msg):
107 def bad(self, f, msg):
105 '''callback for each explicit file that can't be
108 '''callback for each explicit file that can't be
106 found/accessed, with an error message
109 found/accessed, with an error message
107 '''
110 '''
108 pass
111 pass
109 def dir(self, f):
112 def dir(self, f):
110 pass
113 pass
111 def missing(self, f):
114 def missing(self, f):
112 pass
115 pass
113 def exact(self, f):
116 def exact(self, f):
114 return f in self._fmap
117 return f in self._fmap
115 def rel(self, f):
118 def rel(self, f):
116 return util.pathto(self._root, self._cwd, f)
119 return util.pathto(self._root, self._cwd, f)
117 def files(self):
120 def files(self):
118 return self._files
121 return self._files
119 def anypats(self):
122 def anypats(self):
120 return self._anypats
123 return self._anypats
121 def always(self):
124 def always(self):
122 return False
125 return False
123
126
124 class exact(match):
127 class exact(match):
125 def __init__(self, root, cwd, files):
128 def __init__(self, root, cwd, files):
126 match.__init__(self, root, cwd, files, exact = True)
129 match.__init__(self, root, cwd, files, exact = True)
127
130
128 class always(match):
131 class always(match):
129 def __init__(self, root, cwd):
132 def __init__(self, root, cwd):
130 match.__init__(self, root, cwd, [])
133 match.__init__(self, root, cwd, [])
131 def always(self):
134 def always(self):
132 return True
135 return True
133
136
134 class narrowmatcher(match):
137 class narrowmatcher(match):
135 """Adapt a matcher to work on a subdirectory only.
138 """Adapt a matcher to work on a subdirectory only.
136
139
137 The paths are remapped to remove/insert the path as needed:
140 The paths are remapped to remove/insert the path as needed:
138
141
139 >>> m1 = match('root', '', ['a.txt', 'sub/b.txt'])
142 >>> m1 = match('root', '', ['a.txt', 'sub/b.txt'])
140 >>> m2 = narrowmatcher('sub', m1)
143 >>> m2 = narrowmatcher('sub', m1)
141 >>> bool(m2('a.txt'))
144 >>> bool(m2('a.txt'))
142 False
145 False
143 >>> bool(m2('b.txt'))
146 >>> bool(m2('b.txt'))
144 True
147 True
145 >>> bool(m2.matchfn('a.txt'))
148 >>> bool(m2.matchfn('a.txt'))
146 False
149 False
147 >>> bool(m2.matchfn('b.txt'))
150 >>> bool(m2.matchfn('b.txt'))
148 True
151 True
149 >>> m2.files()
152 >>> m2.files()
150 ['b.txt']
153 ['b.txt']
151 >>> m2.exact('b.txt')
154 >>> m2.exact('b.txt')
152 True
155 True
153 >>> m2.rel('b.txt')
156 >>> m2.rel('b.txt')
154 'b.txt'
157 'b.txt'
155 >>> def bad(f, msg):
158 >>> def bad(f, msg):
156 ... print "%s: %s" % (f, msg)
159 ... print "%s: %s" % (f, msg)
157 >>> m1.bad = bad
160 >>> m1.bad = bad
158 >>> m2.bad('x.txt', 'No such file')
161 >>> m2.bad('x.txt', 'No such file')
159 sub/x.txt: No such file
162 sub/x.txt: No such file
160 """
163 """
161
164
162 def __init__(self, path, matcher):
165 def __init__(self, path, matcher):
163 self._root = matcher._root
166 self._root = matcher._root
164 self._cwd = matcher._cwd
167 self._cwd = matcher._cwd
165 self._path = path
168 self._path = path
166 self._matcher = matcher
169 self._matcher = matcher
167
170
168 self._files = [f[len(path) + 1:] for f in matcher._files
171 self._files = [f[len(path) + 1:] for f in matcher._files
169 if f.startswith(path + "/")]
172 if f.startswith(path + "/")]
170 self._anypats = matcher._anypats
173 self._anypats = matcher._anypats
171 self.matchfn = lambda fn: matcher.matchfn(self._path + "/" + fn)
174 self.matchfn = lambda fn: matcher.matchfn(self._path + "/" + fn)
172 self._fmap = set(self._files)
175 self._fmap = set(self._files)
173
176
174 def bad(self, f, msg):
177 def bad(self, f, msg):
175 self._matcher.bad(self._path + "/" + f, msg)
178 self._matcher.bad(self._path + "/" + f, msg)
176
179
177 def patkind(pat):
180 def patkind(pat):
178 return _patsplit(pat, None)[0]
181 return _patsplit(pat, None)[0]
179
182
180 def _patsplit(pat, default):
183 def _patsplit(pat, default):
181 """Split a string into an optional pattern kind prefix and the
184 """Split a string into an optional pattern kind prefix and the
182 actual pattern."""
185 actual pattern."""
183 if ':' in pat:
186 if ':' in pat:
184 kind, val = pat.split(':', 1)
187 kind, val = pat.split(':', 1)
185 if kind in ('re', 'glob', 'path', 'relglob', 'relpath', 'relre',
188 if kind in ('re', 'glob', 'path', 'relglob', 'relpath', 'relre',
186 'listfile', 'listfile0', 'set'):
189 'listfile', 'listfile0', 'set'):
187 return kind, val
190 return kind, val
188 return default, pat
191 return default, pat
189
192
190 def _globre(pat):
193 def _globre(pat):
191 "convert a glob pattern into a regexp"
194 "convert a glob pattern into a regexp"
192 i, n = 0, len(pat)
195 i, n = 0, len(pat)
193 res = ''
196 res = ''
194 group = 0
197 group = 0
195 escape = re.escape
198 escape = re.escape
196 def peek():
199 def peek():
197 return i < n and pat[i]
200 return i < n and pat[i]
198 while i < n:
201 while i < n:
199 c = pat[i]
202 c = pat[i]
200 i += 1
203 i += 1
201 if c not in '*?[{},\\':
204 if c not in '*?[{},\\':
202 res += escape(c)
205 res += escape(c)
203 elif c == '*':
206 elif c == '*':
204 if peek() == '*':
207 if peek() == '*':
205 i += 1
208 i += 1
206 res += '.*'
209 res += '.*'
207 else:
210 else:
208 res += '[^/]*'
211 res += '[^/]*'
209 elif c == '?':
212 elif c == '?':
210 res += '.'
213 res += '.'
211 elif c == '[':
214 elif c == '[':
212 j = i
215 j = i
213 if j < n and pat[j] in '!]':
216 if j < n and pat[j] in '!]':
214 j += 1
217 j += 1
215 while j < n and pat[j] != ']':
218 while j < n and pat[j] != ']':
216 j += 1
219 j += 1
217 if j >= n:
220 if j >= n:
218 res += '\\['
221 res += '\\['
219 else:
222 else:
220 stuff = pat[i:j].replace('\\','\\\\')
223 stuff = pat[i:j].replace('\\','\\\\')
221 i = j + 1
224 i = j + 1
222 if stuff[0] == '!':
225 if stuff[0] == '!':
223 stuff = '^' + stuff[1:]
226 stuff = '^' + stuff[1:]
224 elif stuff[0] == '^':
227 elif stuff[0] == '^':
225 stuff = '\\' + stuff
228 stuff = '\\' + stuff
226 res = '%s[%s]' % (res, stuff)
229 res = '%s[%s]' % (res, stuff)
227 elif c == '{':
230 elif c == '{':
228 group += 1
231 group += 1
229 res += '(?:'
232 res += '(?:'
230 elif c == '}' and group:
233 elif c == '}' and group:
231 res += ')'
234 res += ')'
232 group -= 1
235 group -= 1
233 elif c == ',' and group:
236 elif c == ',' and group:
234 res += '|'
237 res += '|'
235 elif c == '\\':
238 elif c == '\\':
236 p = peek()
239 p = peek()
237 if p:
240 if p:
238 i += 1
241 i += 1
239 res += escape(p)
242 res += escape(p)
240 else:
243 else:
241 res += escape(c)
244 res += escape(c)
242 else:
245 else:
243 res += escape(c)
246 res += escape(c)
244 return res
247 return res
245
248
246 def _regex(kind, name, tail):
249 def _regex(kind, name, tail):
247 '''convert a pattern into a regular expression'''
250 '''convert a pattern into a regular expression'''
248 if not name:
251 if not name:
249 return ''
252 return ''
250 if kind == 're':
253 if kind == 're':
251 return name
254 return name
252 elif kind == 'path':
255 elif kind == 'path':
253 return '^' + re.escape(name) + '(?:/|$)'
256 return '^' + re.escape(name) + '(?:/|$)'
254 elif kind == 'relglob':
257 elif kind == 'relglob':
255 return '(?:|.*/)' + _globre(name) + tail
258 return '(?:|.*/)' + _globre(name) + tail
256 elif kind == 'relpath':
259 elif kind == 'relpath':
257 return re.escape(name) + '(?:/|$)'
260 return re.escape(name) + '(?:/|$)'
258 elif kind == 'relre':
261 elif kind == 'relre':
259 if name.startswith('^'):
262 if name.startswith('^'):
260 return name
263 return name
261 return '.*' + name
264 return '.*' + name
262 return _globre(name) + tail
265 return _globre(name) + tail
263
266
264 def _buildmatch(ctx, pats, tail):
267 def _buildmatch(ctx, pats, tail):
265 fset, pats = _expandsets(pats, ctx)
268 fset, pats = _expandsets(pats, ctx)
266 if not pats:
269 if not pats:
267 return "", fset.__contains__
270 return "", fset.__contains__
268
271
269 pat, mf = _buildregexmatch(pats, tail)
272 pat, mf = _buildregexmatch(pats, tail)
270 if fset:
273 if fset:
271 return pat, lambda f: f in fset or mf(f)
274 return pat, lambda f: f in fset or mf(f)
272 return pat, mf
275 return pat, mf
273
276
274 def _buildregexmatch(pats, tail):
277 def _buildregexmatch(pats, tail):
275 """build a matching function from a set of patterns"""
278 """build a matching function from a set of patterns"""
276 try:
279 try:
277 pat = '(?:%s)' % '|'.join([_regex(k, p, tail) for (k, p) in pats])
280 pat = '(?:%s)' % '|'.join([_regex(k, p, tail) for (k, p) in pats])
278 if len(pat) > 20000:
281 if len(pat) > 20000:
279 raise OverflowError
282 raise OverflowError
280 return pat, re.compile(pat).match
283 return pat, re.compile(pat).match
281 except OverflowError:
284 except OverflowError:
282 # We're using a Python with a tiny regex engine and we
285 # We're using a Python with a tiny regex engine and we
283 # made it explode, so we'll divide the pattern list in two
286 # made it explode, so we'll divide the pattern list in two
284 # until it works
287 # until it works
285 l = len(pats)
288 l = len(pats)
286 if l < 2:
289 if l < 2:
287 raise
290 raise
288 pata, a = _buildregexmatch(pats[:l//2], tail)
291 pata, a = _buildregexmatch(pats[:l//2], tail)
289 patb, b = _buildregexmatch(pats[l//2:], tail)
292 patb, b = _buildregexmatch(pats[l//2:], tail)
290 return pat, lambda s: a(s) or b(s)
293 return pat, lambda s: a(s) or b(s)
291 except re.error:
294 except re.error:
292 for k, p in pats:
295 for k, p in pats:
293 try:
296 try:
294 re.compile('(?:%s)' % _regex(k, p, tail))
297 re.compile('(?:%s)' % _regex(k, p, tail))
295 except re.error:
298 except re.error:
296 raise util.Abort(_("invalid pattern (%s): %s") % (k, p))
299 raise util.Abort(_("invalid pattern (%s): %s") % (k, p))
297 raise util.Abort(_("invalid pattern"))
300 raise util.Abort(_("invalid pattern"))
298
301
299 def _normalize(names, default, root, cwd, auditor):
302 def _normalize(names, default, root, cwd, auditor):
300 pats = []
303 pats = []
301 for kind, name in [_patsplit(p, default) for p in names]:
304 for kind, name in [_patsplit(p, default) for p in names]:
302 if kind in ('glob', 'relpath'):
305 if kind in ('glob', 'relpath'):
303 name = scmutil.canonpath(root, cwd, name, auditor)
306 name = scmutil.canonpath(root, cwd, name, auditor)
304 elif kind in ('relglob', 'path'):
307 elif kind in ('relglob', 'path'):
305 name = util.normpath(name)
308 name = util.normpath(name)
306 elif kind in ('listfile', 'listfile0'):
309 elif kind in ('listfile', 'listfile0'):
307 try:
310 try:
308 files = util.readfile(name)
311 files = util.readfile(name)
309 if kind == 'listfile0':
312 if kind == 'listfile0':
310 files = files.split('\0')
313 files = files.split('\0')
311 else:
314 else:
312 files = files.splitlines()
315 files = files.splitlines()
313 files = [f for f in files if f]
316 files = [f for f in files if f]
314 except EnvironmentError:
317 except EnvironmentError:
315 raise util.Abort(_("unable to read file list (%s)") % name)
318 raise util.Abort(_("unable to read file list (%s)") % name)
316 pats += _normalize(files, default, root, cwd, auditor)
319 pats += _normalize(files, default, root, cwd, auditor)
317 continue
320 continue
318
321
319 pats.append((kind, name))
322 pats.append((kind, name))
320 return pats
323 return pats
321
324
322 def _roots(patterns):
325 def _roots(patterns):
323 r = []
326 r = []
324 for kind, name in patterns:
327 for kind, name in patterns:
325 if kind == 'glob': # find the non-glob prefix
328 if kind == 'glob': # find the non-glob prefix
326 root = []
329 root = []
327 for p in name.split('/'):
330 for p in name.split('/'):
328 if '[' in p or '{' in p or '*' in p or '?' in p:
331 if '[' in p or '{' in p or '*' in p or '?' in p:
329 break
332 break
330 root.append(p)
333 root.append(p)
331 r.append('/'.join(root) or '.')
334 r.append('/'.join(root) or '.')
332 elif kind in ('relpath', 'path'):
335 elif kind in ('relpath', 'path'):
333 r.append(name or '.')
336 r.append(name or '.')
334 elif kind == 'relglob':
337 elif kind == 'relglob':
335 r.append('.')
338 r.append('.')
336 return r
339 return r
337
340
338 def _anypats(patterns):
341 def _anypats(patterns):
339 for kind, name in patterns:
342 for kind, name in patterns:
340 if kind in ('glob', 're', 'relglob', 'relre', 'set'):
343 if kind in ('glob', 're', 'relglob', 'relre', 'set'):
341 return True
344 return True
@@ -1,887 +1,889 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import util, error, osutil, revset, similar, encoding
9 import util, error, osutil, revset, similar, encoding
10 import match as matchmod
10 import match as matchmod
11 import os, errno, re, stat, sys, glob
11 import os, errno, re, stat, sys, glob
12
12
13 def nochangesfound(ui, secretlist=None):
13 def nochangesfound(ui, secretlist=None):
14 '''report no changes for push/pull'''
14 '''report no changes for push/pull'''
15 if secretlist:
15 if secretlist:
16 ui.status(_("no changes found (ignored %d secret changesets)\n")
16 ui.status(_("no changes found (ignored %d secret changesets)\n")
17 % len(secretlist))
17 % len(secretlist))
18 else:
18 else:
19 ui.status(_("no changes found\n"))
19 ui.status(_("no changes found\n"))
20
20
21 def checkfilename(f):
21 def checkfilename(f):
22 '''Check that the filename f is an acceptable filename for a tracked file'''
22 '''Check that the filename f is an acceptable filename for a tracked file'''
23 if '\r' in f or '\n' in f:
23 if '\r' in f or '\n' in f:
24 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
24 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
25
25
26 def checkportable(ui, f):
26 def checkportable(ui, f):
27 '''Check if filename f is portable and warn or abort depending on config'''
27 '''Check if filename f is portable and warn or abort depending on config'''
28 checkfilename(f)
28 checkfilename(f)
29 abort, warn = checkportabilityalert(ui)
29 abort, warn = checkportabilityalert(ui)
30 if abort or warn:
30 if abort or warn:
31 msg = util.checkwinfilename(f)
31 msg = util.checkwinfilename(f)
32 if msg:
32 if msg:
33 msg = "%s: %r" % (msg, f)
33 msg = "%s: %r" % (msg, f)
34 if abort:
34 if abort:
35 raise util.Abort(msg)
35 raise util.Abort(msg)
36 ui.warn(_("warning: %s\n") % msg)
36 ui.warn(_("warning: %s\n") % msg)
37
37
38 def checkportabilityalert(ui):
38 def checkportabilityalert(ui):
39 '''check if the user's config requests nothing, a warning, or abort for
39 '''check if the user's config requests nothing, a warning, or abort for
40 non-portable filenames'''
40 non-portable filenames'''
41 val = ui.config('ui', 'portablefilenames', 'warn')
41 val = ui.config('ui', 'portablefilenames', 'warn')
42 lval = val.lower()
42 lval = val.lower()
43 bval = util.parsebool(val)
43 bval = util.parsebool(val)
44 abort = os.name == 'nt' or lval == 'abort'
44 abort = os.name == 'nt' or lval == 'abort'
45 warn = bval or lval == 'warn'
45 warn = bval or lval == 'warn'
46 if bval is None and not (warn or abort or lval == 'ignore'):
46 if bval is None and not (warn or abort or lval == 'ignore'):
47 raise error.ConfigError(
47 raise error.ConfigError(
48 _("ui.portablefilenames value is invalid ('%s')") % val)
48 _("ui.portablefilenames value is invalid ('%s')") % val)
49 return abort, warn
49 return abort, warn
50
50
51 class casecollisionauditor(object):
51 class casecollisionauditor(object):
52 def __init__(self, ui, abort, existingiter):
52 def __init__(self, ui, abort, existingiter):
53 self._ui = ui
53 self._ui = ui
54 self._abort = abort
54 self._abort = abort
55 self._map = {}
55 self._map = {}
56 for f in existingiter:
56 for f in existingiter:
57 self._map[encoding.lower(f)] = f
57 self._map[encoding.lower(f)] = f
58
58
59 def __call__(self, f):
59 def __call__(self, f):
60 fl = encoding.lower(f)
60 fl = encoding.lower(f)
61 map = self._map
61 map = self._map
62 if fl in map and map[fl] != f:
62 if fl in map and map[fl] != f:
63 msg = _('possible case-folding collision for %s') % f
63 msg = _('possible case-folding collision for %s') % f
64 if self._abort:
64 if self._abort:
65 raise util.Abort(msg)
65 raise util.Abort(msg)
66 self._ui.warn(_("warning: %s\n") % msg)
66 self._ui.warn(_("warning: %s\n") % msg)
67 map[fl] = f
67 map[fl] = f
68
68
69 class pathauditor(object):
69 class pathauditor(object):
70 '''ensure that a filesystem path contains no banned components.
70 '''ensure that a filesystem path contains no banned components.
71 the following properties of a path are checked:
71 the following properties of a path are checked:
72
72
73 - ends with a directory separator
73 - ends with a directory separator
74 - under top-level .hg
74 - under top-level .hg
75 - starts at the root of a windows drive
75 - starts at the root of a windows drive
76 - contains ".."
76 - contains ".."
77 - traverses a symlink (e.g. a/symlink_here/b)
77 - traverses a symlink (e.g. a/symlink_here/b)
78 - inside a nested repository (a callback can be used to approve
78 - inside a nested repository (a callback can be used to approve
79 some nested repositories, e.g., subrepositories)
79 some nested repositories, e.g., subrepositories)
80 '''
80 '''
81
81
82 def __init__(self, root, callback=None):
82 def __init__(self, root, callback=None):
83 self.audited = set()
83 self.audited = set()
84 self.auditeddir = set()
84 self.auditeddir = set()
85 self.root = root
85 self.root = root
86 self.callback = callback
86 self.callback = callback
87 if os.path.lexists(root) and not util.checkcase(root):
87 if os.path.lexists(root) and not util.checkcase(root):
88 self.normcase = util.normcase
88 self.normcase = util.normcase
89 else:
89 else:
90 self.normcase = lambda x: x
90 self.normcase = lambda x: x
91
91
92 def __call__(self, path):
92 def __call__(self, path):
93 '''Check the relative path.
93 '''Check the relative path.
94 path may contain a pattern (e.g. foodir/**.txt)'''
94 path may contain a pattern (e.g. foodir/**.txt)'''
95
95
96 path = util.localpath(path)
96 path = util.localpath(path)
97 normpath = self.normcase(path)
97 normpath = self.normcase(path)
98 if normpath in self.audited:
98 if normpath in self.audited:
99 return
99 return
100 # AIX ignores "/" at end of path, others raise EISDIR.
100 # AIX ignores "/" at end of path, others raise EISDIR.
101 if util.endswithsep(path):
101 if util.endswithsep(path):
102 raise util.Abort(_("path ends in directory separator: %s") % path)
102 raise util.Abort(_("path ends in directory separator: %s") % path)
103 parts = util.splitpath(path)
103 parts = util.splitpath(path)
104 if (os.path.splitdrive(path)[0]
104 if (os.path.splitdrive(path)[0]
105 or parts[0].lower() in ('.hg', '.hg.', '')
105 or parts[0].lower() in ('.hg', '.hg.', '')
106 or os.pardir in parts):
106 or os.pardir in parts):
107 raise util.Abort(_("path contains illegal component: %s") % path)
107 raise util.Abort(_("path contains illegal component: %s") % path)
108 if '.hg' in path.lower():
108 if '.hg' in path.lower():
109 lparts = [p.lower() for p in parts]
109 lparts = [p.lower() for p in parts]
110 for p in '.hg', '.hg.':
110 for p in '.hg', '.hg.':
111 if p in lparts[1:]:
111 if p in lparts[1:]:
112 pos = lparts.index(p)
112 pos = lparts.index(p)
113 base = os.path.join(*parts[:pos])
113 base = os.path.join(*parts[:pos])
114 raise util.Abort(_("path '%s' is inside nested repo %r")
114 raise util.Abort(_("path '%s' is inside nested repo %r")
115 % (path, base))
115 % (path, base))
116
116
117 normparts = util.splitpath(normpath)
117 normparts = util.splitpath(normpath)
118 assert len(parts) == len(normparts)
118 assert len(parts) == len(normparts)
119
119
120 parts.pop()
120 parts.pop()
121 normparts.pop()
121 normparts.pop()
122 prefixes = []
122 prefixes = []
123 while parts:
123 while parts:
124 prefix = os.sep.join(parts)
124 prefix = os.sep.join(parts)
125 normprefix = os.sep.join(normparts)
125 normprefix = os.sep.join(normparts)
126 if normprefix in self.auditeddir:
126 if normprefix in self.auditeddir:
127 break
127 break
128 curpath = os.path.join(self.root, prefix)
128 curpath = os.path.join(self.root, prefix)
129 try:
129 try:
130 st = os.lstat(curpath)
130 st = os.lstat(curpath)
131 except OSError, err:
131 except OSError, err:
132 # EINVAL can be raised as invalid path syntax under win32.
132 # EINVAL can be raised as invalid path syntax under win32.
133 # They must be ignored for patterns can be checked too.
133 # They must be ignored for patterns can be checked too.
134 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
134 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
135 raise
135 raise
136 else:
136 else:
137 if stat.S_ISLNK(st.st_mode):
137 if stat.S_ISLNK(st.st_mode):
138 raise util.Abort(
138 raise util.Abort(
139 _('path %r traverses symbolic link %r')
139 _('path %r traverses symbolic link %r')
140 % (path, prefix))
140 % (path, prefix))
141 elif (stat.S_ISDIR(st.st_mode) and
141 elif (stat.S_ISDIR(st.st_mode) and
142 os.path.isdir(os.path.join(curpath, '.hg'))):
142 os.path.isdir(os.path.join(curpath, '.hg'))):
143 if not self.callback or not self.callback(curpath):
143 if not self.callback or not self.callback(curpath):
144 raise util.Abort(_("path '%s' is inside nested "
144 raise util.Abort(_("path '%s' is inside nested "
145 "repo %r")
145 "repo %r")
146 % (path, prefix))
146 % (path, prefix))
147 prefixes.append(normprefix)
147 prefixes.append(normprefix)
148 parts.pop()
148 parts.pop()
149 normparts.pop()
149 normparts.pop()
150
150
151 self.audited.add(normpath)
151 self.audited.add(normpath)
152 # only add prefixes to the cache after checking everything: we don't
152 # only add prefixes to the cache after checking everything: we don't
153 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
153 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
154 self.auditeddir.update(prefixes)
154 self.auditeddir.update(prefixes)
155
155
156 class abstractopener(object):
156 class abstractopener(object):
157 """Abstract base class; cannot be instantiated"""
157 """Abstract base class; cannot be instantiated"""
158
158
159 def __init__(self, *args, **kwargs):
159 def __init__(self, *args, **kwargs):
160 '''Prevent instantiation; don't call this from subclasses.'''
160 '''Prevent instantiation; don't call this from subclasses.'''
161 raise NotImplementedError('attempted instantiating ' + str(type(self)))
161 raise NotImplementedError('attempted instantiating ' + str(type(self)))
162
162
163 def tryread(self, path):
163 def tryread(self, path):
164 '''gracefully return an empty string for missing files'''
164 '''gracefully return an empty string for missing files'''
165 try:
165 try:
166 return self.read(path)
166 return self.read(path)
167 except IOError, inst:
167 except IOError, inst:
168 if inst.errno != errno.ENOENT:
168 if inst.errno != errno.ENOENT:
169 raise
169 raise
170 return ""
170 return ""
171
171
172 def read(self, path):
172 def read(self, path):
173 fp = self(path, 'rb')
173 fp = self(path, 'rb')
174 try:
174 try:
175 return fp.read()
175 return fp.read()
176 finally:
176 finally:
177 fp.close()
177 fp.close()
178
178
179 def write(self, path, data):
179 def write(self, path, data):
180 fp = self(path, 'wb')
180 fp = self(path, 'wb')
181 try:
181 try:
182 return fp.write(data)
182 return fp.write(data)
183 finally:
183 finally:
184 fp.close()
184 fp.close()
185
185
186 def append(self, path, data):
186 def append(self, path, data):
187 fp = self(path, 'ab')
187 fp = self(path, 'ab')
188 try:
188 try:
189 return fp.write(data)
189 return fp.write(data)
190 finally:
190 finally:
191 fp.close()
191 fp.close()
192
192
193 class opener(abstractopener):
193 class opener(abstractopener):
194 '''Open files relative to a base directory
194 '''Open files relative to a base directory
195
195
196 This class is used to hide the details of COW semantics and
196 This class is used to hide the details of COW semantics and
197 remote file access from higher level code.
197 remote file access from higher level code.
198 '''
198 '''
199 def __init__(self, base, audit=True):
199 def __init__(self, base, audit=True):
200 self.base = base
200 self.base = base
201 self._audit = audit
201 self._audit = audit
202 if audit:
202 if audit:
203 self.auditor = pathauditor(base)
203 self.auditor = pathauditor(base)
204 else:
204 else:
205 self.auditor = util.always
205 self.auditor = util.always
206 self.createmode = None
206 self.createmode = None
207 self._trustnlink = None
207 self._trustnlink = None
208
208
209 @util.propertycache
209 @util.propertycache
210 def _cansymlink(self):
210 def _cansymlink(self):
211 return util.checklink(self.base)
211 return util.checklink(self.base)
212
212
213 def _fixfilemode(self, name):
213 def _fixfilemode(self, name):
214 if self.createmode is None:
214 if self.createmode is None:
215 return
215 return
216 os.chmod(name, self.createmode & 0666)
216 os.chmod(name, self.createmode & 0666)
217
217
218 def __call__(self, path, mode="r", text=False, atomictemp=False):
218 def __call__(self, path, mode="r", text=False, atomictemp=False):
219 if self._audit:
219 if self._audit:
220 r = util.checkosfilename(path)
220 r = util.checkosfilename(path)
221 if r:
221 if r:
222 raise util.Abort("%s: %r" % (r, path))
222 raise util.Abort("%s: %r" % (r, path))
223 self.auditor(path)
223 self.auditor(path)
224 f = self.join(path)
224 f = self.join(path)
225
225
226 if not text and "b" not in mode:
226 if not text and "b" not in mode:
227 mode += "b" # for that other OS
227 mode += "b" # for that other OS
228
228
229 nlink = -1
229 nlink = -1
230 dirname, basename = os.path.split(f)
230 dirname, basename = os.path.split(f)
231 # If basename is empty, then the path is malformed because it points
231 # If basename is empty, then the path is malformed because it points
232 # to a directory. Let the posixfile() call below raise IOError.
232 # to a directory. Let the posixfile() call below raise IOError.
233 if basename and mode not in ('r', 'rb'):
233 if basename and mode not in ('r', 'rb'):
234 if atomictemp:
234 if atomictemp:
235 if not os.path.isdir(dirname):
235 if not os.path.isdir(dirname):
236 util.makedirs(dirname, self.createmode)
236 util.makedirs(dirname, self.createmode)
237 return util.atomictempfile(f, mode, self.createmode)
237 return util.atomictempfile(f, mode, self.createmode)
238 try:
238 try:
239 if 'w' in mode:
239 if 'w' in mode:
240 util.unlink(f)
240 util.unlink(f)
241 nlink = 0
241 nlink = 0
242 else:
242 else:
243 # nlinks() may behave differently for files on Windows
243 # nlinks() may behave differently for files on Windows
244 # shares if the file is open.
244 # shares if the file is open.
245 fd = util.posixfile(f)
245 fd = util.posixfile(f)
246 nlink = util.nlinks(f)
246 nlink = util.nlinks(f)
247 if nlink < 1:
247 if nlink < 1:
248 nlink = 2 # force mktempcopy (issue1922)
248 nlink = 2 # force mktempcopy (issue1922)
249 fd.close()
249 fd.close()
250 except (OSError, IOError), e:
250 except (OSError, IOError), e:
251 if e.errno != errno.ENOENT:
251 if e.errno != errno.ENOENT:
252 raise
252 raise
253 nlink = 0
253 nlink = 0
254 if not os.path.isdir(dirname):
254 if not os.path.isdir(dirname):
255 util.makedirs(dirname, self.createmode)
255 util.makedirs(dirname, self.createmode)
256 if nlink > 0:
256 if nlink > 0:
257 if self._trustnlink is None:
257 if self._trustnlink is None:
258 self._trustnlink = nlink > 1 or util.checknlink(f)
258 self._trustnlink = nlink > 1 or util.checknlink(f)
259 if nlink > 1 or not self._trustnlink:
259 if nlink > 1 or not self._trustnlink:
260 util.rename(util.mktempcopy(f), f)
260 util.rename(util.mktempcopy(f), f)
261 fp = util.posixfile(f, mode)
261 fp = util.posixfile(f, mode)
262 if nlink == 0:
262 if nlink == 0:
263 self._fixfilemode(f)
263 self._fixfilemode(f)
264 return fp
264 return fp
265
265
266 def symlink(self, src, dst):
266 def symlink(self, src, dst):
267 self.auditor(dst)
267 self.auditor(dst)
268 linkname = self.join(dst)
268 linkname = self.join(dst)
269 try:
269 try:
270 os.unlink(linkname)
270 os.unlink(linkname)
271 except OSError:
271 except OSError:
272 pass
272 pass
273
273
274 dirname = os.path.dirname(linkname)
274 dirname = os.path.dirname(linkname)
275 if not os.path.exists(dirname):
275 if not os.path.exists(dirname):
276 util.makedirs(dirname, self.createmode)
276 util.makedirs(dirname, self.createmode)
277
277
278 if self._cansymlink:
278 if self._cansymlink:
279 try:
279 try:
280 os.symlink(src, linkname)
280 os.symlink(src, linkname)
281 except OSError, err:
281 except OSError, err:
282 raise OSError(err.errno, _('could not symlink to %r: %s') %
282 raise OSError(err.errno, _('could not symlink to %r: %s') %
283 (src, err.strerror), linkname)
283 (src, err.strerror), linkname)
284 else:
284 else:
285 f = self(dst, "w")
285 f = self(dst, "w")
286 f.write(src)
286 f.write(src)
287 f.close()
287 f.close()
288 self._fixfilemode(dst)
288 self._fixfilemode(dst)
289
289
290 def audit(self, path):
290 def audit(self, path):
291 self.auditor(path)
291 self.auditor(path)
292
292
293 def join(self, path):
293 def join(self, path):
294 return os.path.join(self.base, path)
294 return os.path.join(self.base, path)
295
295
296 class filteropener(abstractopener):
296 class filteropener(abstractopener):
297 '''Wrapper opener for filtering filenames with a function.'''
297 '''Wrapper opener for filtering filenames with a function.'''
298
298
299 def __init__(self, opener, filter):
299 def __init__(self, opener, filter):
300 self._filter = filter
300 self._filter = filter
301 self._orig = opener
301 self._orig = opener
302
302
303 def __call__(self, path, *args, **kwargs):
303 def __call__(self, path, *args, **kwargs):
304 return self._orig(self._filter(path), *args, **kwargs)
304 return self._orig(self._filter(path), *args, **kwargs)
305
305
306 def canonpath(root, cwd, myname, auditor=None):
306 def canonpath(root, cwd, myname, auditor=None):
307 '''return the canonical path of myname, given cwd and root'''
307 '''return the canonical path of myname, given cwd and root'''
308 if util.endswithsep(root):
308 if util.endswithsep(root):
309 rootsep = root
309 rootsep = root
310 else:
310 else:
311 rootsep = root + os.sep
311 rootsep = root + os.sep
312 name = myname
312 name = myname
313 if not os.path.isabs(name):
313 if not os.path.isabs(name):
314 name = os.path.join(root, cwd, name)
314 name = os.path.join(root, cwd, name)
315 name = os.path.normpath(name)
315 name = os.path.normpath(name)
316 if auditor is None:
316 if auditor is None:
317 auditor = pathauditor(root)
317 auditor = pathauditor(root)
318 if name != rootsep and name.startswith(rootsep):
318 if name != rootsep and name.startswith(rootsep):
319 name = name[len(rootsep):]
319 name = name[len(rootsep):]
320 auditor(name)
320 auditor(name)
321 return util.pconvert(name)
321 return util.pconvert(name)
322 elif name == root:
322 elif name == root:
323 return ''
323 return ''
324 else:
324 else:
325 # Determine whether `name' is in the hierarchy at or beneath `root',
325 # Determine whether `name' is in the hierarchy at or beneath `root',
326 # by iterating name=dirname(name) until that causes no change (can't
326 # by iterating name=dirname(name) until that causes no change (can't
327 # check name == '/', because that doesn't work on windows). For each
327 # check name == '/', because that doesn't work on windows). For each
328 # `name', compare dev/inode numbers. If they match, the list `rel'
328 # `name', compare dev/inode numbers. If they match, the list `rel'
329 # holds the reversed list of components making up the relative file
329 # holds the reversed list of components making up the relative file
330 # name we want.
330 # name we want.
331 root_st = os.stat(root)
331 root_st = os.stat(root)
332 rel = []
332 rel = []
333 while True:
333 while True:
334 try:
334 try:
335 name_st = os.stat(name)
335 name_st = os.stat(name)
336 except OSError:
336 except OSError:
337 name_st = None
337 name_st = None
338 if name_st and util.samestat(name_st, root_st):
338 if name_st and util.samestat(name_st, root_st):
339 if not rel:
339 if not rel:
340 # name was actually the same as root (maybe a symlink)
340 # name was actually the same as root (maybe a symlink)
341 return ''
341 return ''
342 rel.reverse()
342 rel.reverse()
343 name = os.path.join(*rel)
343 name = os.path.join(*rel)
344 auditor(name)
344 auditor(name)
345 return util.pconvert(name)
345 return util.pconvert(name)
346 dirname, basename = os.path.split(name)
346 dirname, basename = os.path.split(name)
347 rel.append(basename)
347 rel.append(basename)
348 if dirname == name:
348 if dirname == name:
349 break
349 break
350 name = dirname
350 name = dirname
351
351
352 raise util.Abort('%s not under root' % myname)
352 raise util.Abort('%s not under root' % myname)
353
353
354 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
354 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
355 '''yield every hg repository under path, recursively.'''
355 '''yield every hg repository under path, recursively.'''
356 def errhandler(err):
356 def errhandler(err):
357 if err.filename == path:
357 if err.filename == path:
358 raise err
358 raise err
359 samestat = getattr(os.path, 'samestat', None)
359 samestat = getattr(os.path, 'samestat', None)
360 if followsym and samestat is not None:
360 if followsym and samestat is not None:
361 def adddir(dirlst, dirname):
361 def adddir(dirlst, dirname):
362 match = False
362 match = False
363 dirstat = os.stat(dirname)
363 dirstat = os.stat(dirname)
364 for lstdirstat in dirlst:
364 for lstdirstat in dirlst:
365 if samestat(dirstat, lstdirstat):
365 if samestat(dirstat, lstdirstat):
366 match = True
366 match = True
367 break
367 break
368 if not match:
368 if not match:
369 dirlst.append(dirstat)
369 dirlst.append(dirstat)
370 return not match
370 return not match
371 else:
371 else:
372 followsym = False
372 followsym = False
373
373
374 if (seen_dirs is None) and followsym:
374 if (seen_dirs is None) and followsym:
375 seen_dirs = []
375 seen_dirs = []
376 adddir(seen_dirs, path)
376 adddir(seen_dirs, path)
377 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
377 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
378 dirs.sort()
378 dirs.sort()
379 if '.hg' in dirs:
379 if '.hg' in dirs:
380 yield root # found a repository
380 yield root # found a repository
381 qroot = os.path.join(root, '.hg', 'patches')
381 qroot = os.path.join(root, '.hg', 'patches')
382 if os.path.isdir(os.path.join(qroot, '.hg')):
382 if os.path.isdir(os.path.join(qroot, '.hg')):
383 yield qroot # we have a patch queue repo here
383 yield qroot # we have a patch queue repo here
384 if recurse:
384 if recurse:
385 # avoid recursing inside the .hg directory
385 # avoid recursing inside the .hg directory
386 dirs.remove('.hg')
386 dirs.remove('.hg')
387 else:
387 else:
388 dirs[:] = [] # don't descend further
388 dirs[:] = [] # don't descend further
389 elif followsym:
389 elif followsym:
390 newdirs = []
390 newdirs = []
391 for d in dirs:
391 for d in dirs:
392 fname = os.path.join(root, d)
392 fname = os.path.join(root, d)
393 if adddir(seen_dirs, fname):
393 if adddir(seen_dirs, fname):
394 if os.path.islink(fname):
394 if os.path.islink(fname):
395 for hgname in walkrepos(fname, True, seen_dirs):
395 for hgname in walkrepos(fname, True, seen_dirs):
396 yield hgname
396 yield hgname
397 else:
397 else:
398 newdirs.append(d)
398 newdirs.append(d)
399 dirs[:] = newdirs
399 dirs[:] = newdirs
400
400
401 def osrcpath():
401 def osrcpath():
402 '''return default os-specific hgrc search path'''
402 '''return default os-specific hgrc search path'''
403 path = systemrcpath()
403 path = systemrcpath()
404 path.extend(userrcpath())
404 path.extend(userrcpath())
405 path = [os.path.normpath(f) for f in path]
405 path = [os.path.normpath(f) for f in path]
406 return path
406 return path
407
407
408 _rcpath = None
408 _rcpath = None
409
409
410 def rcpath():
410 def rcpath():
411 '''return hgrc search path. if env var HGRCPATH is set, use it.
411 '''return hgrc search path. if env var HGRCPATH is set, use it.
412 for each item in path, if directory, use files ending in .rc,
412 for each item in path, if directory, use files ending in .rc,
413 else use item.
413 else use item.
414 make HGRCPATH empty to only look in .hg/hgrc of current repo.
414 make HGRCPATH empty to only look in .hg/hgrc of current repo.
415 if no HGRCPATH, use default os-specific path.'''
415 if no HGRCPATH, use default os-specific path.'''
416 global _rcpath
416 global _rcpath
417 if _rcpath is None:
417 if _rcpath is None:
418 if 'HGRCPATH' in os.environ:
418 if 'HGRCPATH' in os.environ:
419 _rcpath = []
419 _rcpath = []
420 for p in os.environ['HGRCPATH'].split(os.pathsep):
420 for p in os.environ['HGRCPATH'].split(os.pathsep):
421 if not p:
421 if not p:
422 continue
422 continue
423 p = util.expandpath(p)
423 p = util.expandpath(p)
424 if os.path.isdir(p):
424 if os.path.isdir(p):
425 for f, kind in osutil.listdir(p):
425 for f, kind in osutil.listdir(p):
426 if f.endswith('.rc'):
426 if f.endswith('.rc'):
427 _rcpath.append(os.path.join(p, f))
427 _rcpath.append(os.path.join(p, f))
428 else:
428 else:
429 _rcpath.append(p)
429 _rcpath.append(p)
430 else:
430 else:
431 _rcpath = osrcpath()
431 _rcpath = osrcpath()
432 return _rcpath
432 return _rcpath
433
433
434 if os.name != 'nt':
434 if os.name != 'nt':
435
435
436 def rcfiles(path):
436 def rcfiles(path):
437 rcs = [os.path.join(path, 'hgrc')]
437 rcs = [os.path.join(path, 'hgrc')]
438 rcdir = os.path.join(path, 'hgrc.d')
438 rcdir = os.path.join(path, 'hgrc.d')
439 try:
439 try:
440 rcs.extend([os.path.join(rcdir, f)
440 rcs.extend([os.path.join(rcdir, f)
441 for f, kind in osutil.listdir(rcdir)
441 for f, kind in osutil.listdir(rcdir)
442 if f.endswith(".rc")])
442 if f.endswith(".rc")])
443 except OSError:
443 except OSError:
444 pass
444 pass
445 return rcs
445 return rcs
446
446
447 def systemrcpath():
447 def systemrcpath():
448 path = []
448 path = []
449 if sys.platform == 'plan9':
449 if sys.platform == 'plan9':
450 root = 'lib/mercurial'
450 root = 'lib/mercurial'
451 else:
451 else:
452 root = 'etc/mercurial'
452 root = 'etc/mercurial'
453 # old mod_python does not set sys.argv
453 # old mod_python does not set sys.argv
454 if len(getattr(sys, 'argv', [])) > 0:
454 if len(getattr(sys, 'argv', [])) > 0:
455 p = os.path.dirname(os.path.dirname(sys.argv[0]))
455 p = os.path.dirname(os.path.dirname(sys.argv[0]))
456 path.extend(rcfiles(os.path.join(p, root)))
456 path.extend(rcfiles(os.path.join(p, root)))
457 path.extend(rcfiles('/' + root))
457 path.extend(rcfiles('/' + root))
458 return path
458 return path
459
459
460 def userrcpath():
460 def userrcpath():
461 if sys.platform == 'plan9':
461 if sys.platform == 'plan9':
462 return [os.environ['home'] + '/lib/hgrc']
462 return [os.environ['home'] + '/lib/hgrc']
463 else:
463 else:
464 return [os.path.expanduser('~/.hgrc')]
464 return [os.path.expanduser('~/.hgrc')]
465
465
466 else:
466 else:
467
467
468 _HKEY_LOCAL_MACHINE = 0x80000002L
468 _HKEY_LOCAL_MACHINE = 0x80000002L
469
469
470 def systemrcpath():
470 def systemrcpath():
471 '''return default os-specific hgrc search path'''
471 '''return default os-specific hgrc search path'''
472 rcpath = []
472 rcpath = []
473 filename = util.executablepath()
473 filename = util.executablepath()
474 # Use mercurial.ini found in directory with hg.exe
474 # Use mercurial.ini found in directory with hg.exe
475 progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
475 progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
476 if os.path.isfile(progrc):
476 if os.path.isfile(progrc):
477 rcpath.append(progrc)
477 rcpath.append(progrc)
478 return rcpath
478 return rcpath
479 # Use hgrc.d found in directory with hg.exe
479 # Use hgrc.d found in directory with hg.exe
480 progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d')
480 progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d')
481 if os.path.isdir(progrcd):
481 if os.path.isdir(progrcd):
482 for f, kind in osutil.listdir(progrcd):
482 for f, kind in osutil.listdir(progrcd):
483 if f.endswith('.rc'):
483 if f.endswith('.rc'):
484 rcpath.append(os.path.join(progrcd, f))
484 rcpath.append(os.path.join(progrcd, f))
485 return rcpath
485 return rcpath
486 # else look for a system rcpath in the registry
486 # else look for a system rcpath in the registry
487 value = util.lookupreg('SOFTWARE\\Mercurial', None,
487 value = util.lookupreg('SOFTWARE\\Mercurial', None,
488 _HKEY_LOCAL_MACHINE)
488 _HKEY_LOCAL_MACHINE)
489 if not isinstance(value, str) or not value:
489 if not isinstance(value, str) or not value:
490 return rcpath
490 return rcpath
491 value = util.localpath(value)
491 value = util.localpath(value)
492 for p in value.split(os.pathsep):
492 for p in value.split(os.pathsep):
493 if p.lower().endswith('mercurial.ini'):
493 if p.lower().endswith('mercurial.ini'):
494 rcpath.append(p)
494 rcpath.append(p)
495 elif os.path.isdir(p):
495 elif os.path.isdir(p):
496 for f, kind in osutil.listdir(p):
496 for f, kind in osutil.listdir(p):
497 if f.endswith('.rc'):
497 if f.endswith('.rc'):
498 rcpath.append(os.path.join(p, f))
498 rcpath.append(os.path.join(p, f))
499 return rcpath
499 return rcpath
500
500
501 def userrcpath():
501 def userrcpath():
502 '''return os-specific hgrc search path to the user dir'''
502 '''return os-specific hgrc search path to the user dir'''
503 home = os.path.expanduser('~')
503 home = os.path.expanduser('~')
504 path = [os.path.join(home, 'mercurial.ini'),
504 path = [os.path.join(home, 'mercurial.ini'),
505 os.path.join(home, '.hgrc')]
505 os.path.join(home, '.hgrc')]
506 userprofile = os.environ.get('USERPROFILE')
506 userprofile = os.environ.get('USERPROFILE')
507 if userprofile:
507 if userprofile:
508 path.append(os.path.join(userprofile, 'mercurial.ini'))
508 path.append(os.path.join(userprofile, 'mercurial.ini'))
509 path.append(os.path.join(userprofile, '.hgrc'))
509 path.append(os.path.join(userprofile, '.hgrc'))
510 return path
510 return path
511
511
512 def revsingle(repo, revspec, default='.'):
512 def revsingle(repo, revspec, default='.'):
513 if not revspec:
513 if not revspec:
514 return repo[default]
514 return repo[default]
515
515
516 l = revrange(repo, [revspec])
516 l = revrange(repo, [revspec])
517 if len(l) < 1:
517 if len(l) < 1:
518 raise util.Abort(_('empty revision set'))
518 raise util.Abort(_('empty revision set'))
519 return repo[l[-1]]
519 return repo[l[-1]]
520
520
521 def revpair(repo, revs):
521 def revpair(repo, revs):
522 if not revs:
522 if not revs:
523 return repo.dirstate.p1(), None
523 return repo.dirstate.p1(), None
524
524
525 l = revrange(repo, revs)
525 l = revrange(repo, revs)
526
526
527 if len(l) == 0:
527 if len(l) == 0:
528 if revs:
529 raise util.Abort(_('empty revision range'))
528 return repo.dirstate.p1(), None
530 return repo.dirstate.p1(), None
529
531
530 if len(l) == 1:
532 if len(l) == 1 and len(revs) == 1 and _revrangesep not in revs[0]:
531 return repo.lookup(l[0]), None
533 return repo.lookup(l[0]), None
532
534
533 return repo.lookup(l[0]), repo.lookup(l[-1])
535 return repo.lookup(l[0]), repo.lookup(l[-1])
534
536
535 _revrangesep = ':'
537 _revrangesep = ':'
536
538
537 def revrange(repo, revs):
539 def revrange(repo, revs):
538 """Yield revision as strings from a list of revision specifications."""
540 """Yield revision as strings from a list of revision specifications."""
539
541
540 def revfix(repo, val, defval):
542 def revfix(repo, val, defval):
541 if not val and val != 0 and defval is not None:
543 if not val and val != 0 and defval is not None:
542 return defval
544 return defval
543 return repo[val].rev()
545 return repo[val].rev()
544
546
545 seen, l = set(), []
547 seen, l = set(), []
546 for spec in revs:
548 for spec in revs:
547 if l and not seen:
549 if l and not seen:
548 seen = set(l)
550 seen = set(l)
549 # attempt to parse old-style ranges first to deal with
551 # attempt to parse old-style ranges first to deal with
550 # things like old-tag which contain query metacharacters
552 # things like old-tag which contain query metacharacters
551 try:
553 try:
552 if isinstance(spec, int):
554 if isinstance(spec, int):
553 seen.add(spec)
555 seen.add(spec)
554 l.append(spec)
556 l.append(spec)
555 continue
557 continue
556
558
557 if _revrangesep in spec:
559 if _revrangesep in spec:
558 start, end = spec.split(_revrangesep, 1)
560 start, end = spec.split(_revrangesep, 1)
559 start = revfix(repo, start, 0)
561 start = revfix(repo, start, 0)
560 end = revfix(repo, end, len(repo) - 1)
562 end = revfix(repo, end, len(repo) - 1)
561 step = start > end and -1 or 1
563 step = start > end and -1 or 1
562 if not seen and not l:
564 if not seen and not l:
563 # by far the most common case: revs = ["-1:0"]
565 # by far the most common case: revs = ["-1:0"]
564 l = range(start, end + step, step)
566 l = range(start, end + step, step)
565 # defer syncing seen until next iteration
567 # defer syncing seen until next iteration
566 continue
568 continue
567 newrevs = set(xrange(start, end + step, step))
569 newrevs = set(xrange(start, end + step, step))
568 if seen:
570 if seen:
569 newrevs.difference_update(seen)
571 newrevs.difference_update(seen)
570 seen.union(newrevs)
572 seen.union(newrevs)
571 else:
573 else:
572 seen = newrevs
574 seen = newrevs
573 l.extend(sorted(newrevs, reverse=start > end))
575 l.extend(sorted(newrevs, reverse=start > end))
574 continue
576 continue
575 elif spec and spec in repo: # single unquoted rev
577 elif spec and spec in repo: # single unquoted rev
576 rev = revfix(repo, spec, None)
578 rev = revfix(repo, spec, None)
577 if rev in seen:
579 if rev in seen:
578 continue
580 continue
579 seen.add(rev)
581 seen.add(rev)
580 l.append(rev)
582 l.append(rev)
581 continue
583 continue
582 except error.RepoLookupError:
584 except error.RepoLookupError:
583 pass
585 pass
584
586
585 # fall through to new-style queries if old-style fails
587 # fall through to new-style queries if old-style fails
586 m = revset.match(repo.ui, spec)
588 m = revset.match(repo.ui, spec)
587 for r in m(repo, range(len(repo))):
589 for r in m(repo, range(len(repo))):
588 if r not in seen:
590 if r not in seen:
589 l.append(r)
591 l.append(r)
590 seen.update(l)
592 seen.update(l)
591
593
592 return l
594 return l
593
595
594 def expandpats(pats):
596 def expandpats(pats):
595 if not util.expandglobs:
597 if not util.expandglobs:
596 return list(pats)
598 return list(pats)
597 ret = []
599 ret = []
598 for p in pats:
600 for p in pats:
599 kind, name = matchmod._patsplit(p, None)
601 kind, name = matchmod._patsplit(p, None)
600 if kind is None:
602 if kind is None:
601 try:
603 try:
602 globbed = glob.glob(name)
604 globbed = glob.glob(name)
603 except re.error:
605 except re.error:
604 globbed = [name]
606 globbed = [name]
605 if globbed:
607 if globbed:
606 ret.extend(globbed)
608 ret.extend(globbed)
607 continue
609 continue
608 ret.append(p)
610 ret.append(p)
609 return ret
611 return ret
610
612
611 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
613 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
612 if pats == ("",):
614 if pats == ("",):
613 pats = []
615 pats = []
614 if not globbed and default == 'relpath':
616 if not globbed and default == 'relpath':
615 pats = expandpats(pats or [])
617 pats = expandpats(pats or [])
616
618
617 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
619 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
618 default)
620 default)
619 def badfn(f, msg):
621 def badfn(f, msg):
620 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
622 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
621 m.bad = badfn
623 m.bad = badfn
622 return m, pats
624 return m, pats
623
625
624 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
626 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
625 return matchandpats(ctx, pats, opts, globbed, default)[0]
627 return matchandpats(ctx, pats, opts, globbed, default)[0]
626
628
627 def matchall(repo):
629 def matchall(repo):
628 return matchmod.always(repo.root, repo.getcwd())
630 return matchmod.always(repo.root, repo.getcwd())
629
631
630 def matchfiles(repo, files):
632 def matchfiles(repo, files):
631 return matchmod.exact(repo.root, repo.getcwd(), files)
633 return matchmod.exact(repo.root, repo.getcwd(), files)
632
634
633 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
635 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
634 if dry_run is None:
636 if dry_run is None:
635 dry_run = opts.get('dry_run')
637 dry_run = opts.get('dry_run')
636 if similarity is None:
638 if similarity is None:
637 similarity = float(opts.get('similarity') or 0)
639 similarity = float(opts.get('similarity') or 0)
638 # we'd use status here, except handling of symlinks and ignore is tricky
640 # we'd use status here, except handling of symlinks and ignore is tricky
639 added, unknown, deleted, removed = [], [], [], []
641 added, unknown, deleted, removed = [], [], [], []
640 audit_path = pathauditor(repo.root)
642 audit_path = pathauditor(repo.root)
641 m = match(repo[None], pats, opts)
643 m = match(repo[None], pats, opts)
642 rejected = []
644 rejected = []
643 m.bad = lambda x, y: rejected.append(x)
645 m.bad = lambda x, y: rejected.append(x)
644
646
645 for abs in repo.walk(m):
647 for abs in repo.walk(m):
646 target = repo.wjoin(abs)
648 target = repo.wjoin(abs)
647 good = True
649 good = True
648 try:
650 try:
649 audit_path(abs)
651 audit_path(abs)
650 except (OSError, util.Abort):
652 except (OSError, util.Abort):
651 good = False
653 good = False
652 rel = m.rel(abs)
654 rel = m.rel(abs)
653 exact = m.exact(abs)
655 exact = m.exact(abs)
654 if good and abs not in repo.dirstate:
656 if good and abs not in repo.dirstate:
655 unknown.append(abs)
657 unknown.append(abs)
656 if repo.ui.verbose or not exact:
658 if repo.ui.verbose or not exact:
657 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
659 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
658 elif (repo.dirstate[abs] != 'r' and
660 elif (repo.dirstate[abs] != 'r' and
659 (not good or not os.path.lexists(target) or
661 (not good or not os.path.lexists(target) or
660 (os.path.isdir(target) and not os.path.islink(target)))):
662 (os.path.isdir(target) and not os.path.islink(target)))):
661 deleted.append(abs)
663 deleted.append(abs)
662 if repo.ui.verbose or not exact:
664 if repo.ui.verbose or not exact:
663 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
665 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
664 # for finding renames
666 # for finding renames
665 elif repo.dirstate[abs] == 'r':
667 elif repo.dirstate[abs] == 'r':
666 removed.append(abs)
668 removed.append(abs)
667 elif repo.dirstate[abs] == 'a':
669 elif repo.dirstate[abs] == 'a':
668 added.append(abs)
670 added.append(abs)
669 copies = {}
671 copies = {}
670 if similarity > 0:
672 if similarity > 0:
671 for old, new, score in similar.findrenames(repo,
673 for old, new, score in similar.findrenames(repo,
672 added + unknown, removed + deleted, similarity):
674 added + unknown, removed + deleted, similarity):
673 if repo.ui.verbose or not m.exact(old) or not m.exact(new):
675 if repo.ui.verbose or not m.exact(old) or not m.exact(new):
674 repo.ui.status(_('recording removal of %s as rename to %s '
676 repo.ui.status(_('recording removal of %s as rename to %s '
675 '(%d%% similar)\n') %
677 '(%d%% similar)\n') %
676 (m.rel(old), m.rel(new), score * 100))
678 (m.rel(old), m.rel(new), score * 100))
677 copies[new] = old
679 copies[new] = old
678
680
679 if not dry_run:
681 if not dry_run:
680 wctx = repo[None]
682 wctx = repo[None]
681 wlock = repo.wlock()
683 wlock = repo.wlock()
682 try:
684 try:
683 wctx.forget(deleted)
685 wctx.forget(deleted)
684 wctx.add(unknown)
686 wctx.add(unknown)
685 for new, old in copies.iteritems():
687 for new, old in copies.iteritems():
686 wctx.copy(old, new)
688 wctx.copy(old, new)
687 finally:
689 finally:
688 wlock.release()
690 wlock.release()
689
691
690 for f in rejected:
692 for f in rejected:
691 if f in m.files():
693 if f in m.files():
692 return 1
694 return 1
693 return 0
695 return 0
694
696
695 def updatedir(ui, repo, patches, similarity=0):
697 def updatedir(ui, repo, patches, similarity=0):
696 '''Update dirstate after patch application according to metadata'''
698 '''Update dirstate after patch application according to metadata'''
697 if not patches:
699 if not patches:
698 return []
700 return []
699 copies = []
701 copies = []
700 removes = set()
702 removes = set()
701 cfiles = patches.keys()
703 cfiles = patches.keys()
702 cwd = repo.getcwd()
704 cwd = repo.getcwd()
703 if cwd:
705 if cwd:
704 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
706 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
705 for f in patches:
707 for f in patches:
706 gp = patches[f]
708 gp = patches[f]
707 if not gp:
709 if not gp:
708 continue
710 continue
709 if gp.op == 'RENAME':
711 if gp.op == 'RENAME':
710 copies.append((gp.oldpath, gp.path))
712 copies.append((gp.oldpath, gp.path))
711 removes.add(gp.oldpath)
713 removes.add(gp.oldpath)
712 elif gp.op == 'COPY':
714 elif gp.op == 'COPY':
713 copies.append((gp.oldpath, gp.path))
715 copies.append((gp.oldpath, gp.path))
714 elif gp.op == 'DELETE':
716 elif gp.op == 'DELETE':
715 removes.add(gp.path)
717 removes.add(gp.path)
716
718
717 wctx = repo[None]
719 wctx = repo[None]
718 for src, dst in copies:
720 for src, dst in copies:
719 dirstatecopy(ui, repo, wctx, src, dst, cwd=cwd)
721 dirstatecopy(ui, repo, wctx, src, dst, cwd=cwd)
720 if (not similarity) and removes:
722 if (not similarity) and removes:
721 wctx.remove(sorted(removes), True)
723 wctx.remove(sorted(removes), True)
722
724
723 for f in patches:
725 for f in patches:
724 gp = patches[f]
726 gp = patches[f]
725 if gp and gp.mode:
727 if gp and gp.mode:
726 islink, isexec = gp.mode
728 islink, isexec = gp.mode
727 dst = repo.wjoin(gp.path)
729 dst = repo.wjoin(gp.path)
728 # patch won't create empty files
730 # patch won't create empty files
729 if gp.op == 'ADD' and not os.path.lexists(dst):
731 if gp.op == 'ADD' and not os.path.lexists(dst):
730 flags = (isexec and 'x' or '') + (islink and 'l' or '')
732 flags = (isexec and 'x' or '') + (islink and 'l' or '')
731 repo.wwrite(gp.path, '', flags)
733 repo.wwrite(gp.path, '', flags)
732 util.setflags(dst, islink, isexec)
734 util.setflags(dst, islink, isexec)
733 addremove(repo, cfiles, similarity=similarity)
735 addremove(repo, cfiles, similarity=similarity)
734 files = patches.keys()
736 files = patches.keys()
735 files.extend([r for r in removes if r not in files])
737 files.extend([r for r in removes if r not in files])
736 return sorted(files)
738 return sorted(files)
737
739
738 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
740 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
739 """Update the dirstate to reflect the intent of copying src to dst. For
741 """Update the dirstate to reflect the intent of copying src to dst. For
740 different reasons it might not end with dst being marked as copied from src.
742 different reasons it might not end with dst being marked as copied from src.
741 """
743 """
742 origsrc = repo.dirstate.copied(src) or src
744 origsrc = repo.dirstate.copied(src) or src
743 if dst == origsrc: # copying back a copy?
745 if dst == origsrc: # copying back a copy?
744 if repo.dirstate[dst] not in 'mn' and not dryrun:
746 if repo.dirstate[dst] not in 'mn' and not dryrun:
745 repo.dirstate.normallookup(dst)
747 repo.dirstate.normallookup(dst)
746 else:
748 else:
747 if repo.dirstate[origsrc] == 'a' and origsrc == src:
749 if repo.dirstate[origsrc] == 'a' and origsrc == src:
748 if not ui.quiet:
750 if not ui.quiet:
749 ui.warn(_("%s has not been committed yet, so no copy "
751 ui.warn(_("%s has not been committed yet, so no copy "
750 "data will be stored for %s.\n")
752 "data will be stored for %s.\n")
751 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
753 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
752 if repo.dirstate[dst] in '?r' and not dryrun:
754 if repo.dirstate[dst] in '?r' and not dryrun:
753 wctx.add([dst])
755 wctx.add([dst])
754 elif not dryrun:
756 elif not dryrun:
755 wctx.copy(origsrc, dst)
757 wctx.copy(origsrc, dst)
756
758
757 def readrequires(opener, supported):
759 def readrequires(opener, supported):
758 '''Reads and parses .hg/requires and checks if all entries found
760 '''Reads and parses .hg/requires and checks if all entries found
759 are in the list of supported features.'''
761 are in the list of supported features.'''
760 requirements = set(opener.read("requires").splitlines())
762 requirements = set(opener.read("requires").splitlines())
761 missings = []
763 missings = []
762 for r in requirements:
764 for r in requirements:
763 if r not in supported:
765 if r not in supported:
764 if not r or not r[0].isalnum():
766 if not r or not r[0].isalnum():
765 raise error.RequirementError(_(".hg/requires file is corrupt"))
767 raise error.RequirementError(_(".hg/requires file is corrupt"))
766 missings.append(r)
768 missings.append(r)
767 missings.sort()
769 missings.sort()
768 if missings:
770 if missings:
769 raise error.RequirementError(
771 raise error.RequirementError(
770 _("unknown repository format: requires features '%s' (upgrade "
772 _("unknown repository format: requires features '%s' (upgrade "
771 "Mercurial)") % "', '".join(missings))
773 "Mercurial)") % "', '".join(missings))
772 return requirements
774 return requirements
773
775
774 class filecacheentry(object):
776 class filecacheentry(object):
775 def __init__(self, path):
777 def __init__(self, path):
776 self.path = path
778 self.path = path
777 self.cachestat = filecacheentry.stat(self.path)
779 self.cachestat = filecacheentry.stat(self.path)
778
780
779 if self.cachestat:
781 if self.cachestat:
780 self._cacheable = self.cachestat.cacheable()
782 self._cacheable = self.cachestat.cacheable()
781 else:
783 else:
782 # None means we don't know yet
784 # None means we don't know yet
783 self._cacheable = None
785 self._cacheable = None
784
786
785 def refresh(self):
787 def refresh(self):
786 if self.cacheable():
788 if self.cacheable():
787 self.cachestat = filecacheentry.stat(self.path)
789 self.cachestat = filecacheentry.stat(self.path)
788
790
789 def cacheable(self):
791 def cacheable(self):
790 if self._cacheable is not None:
792 if self._cacheable is not None:
791 return self._cacheable
793 return self._cacheable
792
794
793 # we don't know yet, assume it is for now
795 # we don't know yet, assume it is for now
794 return True
796 return True
795
797
796 def changed(self):
798 def changed(self):
797 # no point in going further if we can't cache it
799 # no point in going further if we can't cache it
798 if not self.cacheable():
800 if not self.cacheable():
799 return True
801 return True
800
802
801 newstat = filecacheentry.stat(self.path)
803 newstat = filecacheentry.stat(self.path)
802
804
803 # we may not know if it's cacheable yet, check again now
805 # we may not know if it's cacheable yet, check again now
804 if newstat and self._cacheable is None:
806 if newstat and self._cacheable is None:
805 self._cacheable = newstat.cacheable()
807 self._cacheable = newstat.cacheable()
806
808
807 # check again
809 # check again
808 if not self._cacheable:
810 if not self._cacheable:
809 return True
811 return True
810
812
811 if self.cachestat != newstat:
813 if self.cachestat != newstat:
812 self.cachestat = newstat
814 self.cachestat = newstat
813 return True
815 return True
814 else:
816 else:
815 return False
817 return False
816
818
817 @staticmethod
819 @staticmethod
818 def stat(path):
820 def stat(path):
819 try:
821 try:
820 return util.cachestat(path)
822 return util.cachestat(path)
821 except OSError, e:
823 except OSError, e:
822 if e.errno != errno.ENOENT:
824 if e.errno != errno.ENOENT:
823 raise
825 raise
824
826
825 class filecache(object):
827 class filecache(object):
826 '''A property like decorator that tracks a file under .hg/ for updates.
828 '''A property like decorator that tracks a file under .hg/ for updates.
827
829
828 Records stat info when called in _filecache.
830 Records stat info when called in _filecache.
829
831
830 On subsequent calls, compares old stat info with new info, and recreates
832 On subsequent calls, compares old stat info with new info, and recreates
831 the object when needed, updating the new stat info in _filecache.
833 the object when needed, updating the new stat info in _filecache.
832
834
833 Mercurial either atomic renames or appends for files under .hg,
835 Mercurial either atomic renames or appends for files under .hg,
834 so to ensure the cache is reliable we need the filesystem to be able
836 so to ensure the cache is reliable we need the filesystem to be able
835 to tell us if a file has been replaced. If it can't, we fallback to
837 to tell us if a file has been replaced. If it can't, we fallback to
836 recreating the object on every call (essentially the same behaviour as
838 recreating the object on every call (essentially the same behaviour as
837 propertycache).'''
839 propertycache).'''
838 def __init__(self, path):
840 def __init__(self, path):
839 self.path = path
841 self.path = path
840
842
841 def join(self, obj, fname):
843 def join(self, obj, fname):
842 """Used to compute the runtime path of the cached file.
844 """Used to compute the runtime path of the cached file.
843
845
844 Users should subclass filecache and provide their own version of this
846 Users should subclass filecache and provide their own version of this
845 function to call the appropriate join function on 'obj' (an instance
847 function to call the appropriate join function on 'obj' (an instance
846 of the class that its member function was decorated).
848 of the class that its member function was decorated).
847 """
849 """
848 return obj.join(fname)
850 return obj.join(fname)
849
851
850 def __call__(self, func):
852 def __call__(self, func):
851 self.func = func
853 self.func = func
852 self.name = func.__name__
854 self.name = func.__name__
853 return self
855 return self
854
856
855 def __get__(self, obj, type=None):
857 def __get__(self, obj, type=None):
856 # do we need to check if the file changed?
858 # do we need to check if the file changed?
857 if self.name in obj.__dict__:
859 if self.name in obj.__dict__:
858 return obj.__dict__[self.name]
860 return obj.__dict__[self.name]
859
861
860 entry = obj._filecache.get(self.name)
862 entry = obj._filecache.get(self.name)
861
863
862 if entry:
864 if entry:
863 if entry.changed():
865 if entry.changed():
864 entry.obj = self.func(obj)
866 entry.obj = self.func(obj)
865 else:
867 else:
866 path = self.join(obj, self.path)
868 path = self.join(obj, self.path)
867
869
868 # We stat -before- creating the object so our cache doesn't lie if
870 # We stat -before- creating the object so our cache doesn't lie if
869 # a writer modified between the time we read and stat
871 # a writer modified between the time we read and stat
870 entry = filecacheentry(path)
872 entry = filecacheentry(path)
871 entry.obj = self.func(obj)
873 entry.obj = self.func(obj)
872
874
873 obj._filecache[self.name] = entry
875 obj._filecache[self.name] = entry
874
876
875 obj.__dict__[self.name] = entry.obj
877 obj.__dict__[self.name] = entry.obj
876 return entry.obj
878 return entry.obj
877
879
878 def __set__(self, obj, value):
880 def __set__(self, obj, value):
879 if self.name in obj._filecache:
881 if self.name in obj._filecache:
880 obj._filecache[self.name].obj = value # update cached copy
882 obj._filecache[self.name].obj = value # update cached copy
881 obj.__dict__[self.name] = value # update copy returned by obj.x
883 obj.__dict__[self.name] = value # update copy returned by obj.x
882
884
883 def __delete__(self, obj):
885 def __delete__(self, obj):
884 try:
886 try:
885 del obj.__dict__[self.name]
887 del obj.__dict__[self.name]
886 except KeyError:
888 except KeyError:
887 raise AttributeError, self.name
889 raise AttributeError, self.name
@@ -1,86 +1,92 b''
1 Testing diff --change
1 Testing diff --change
2
2
3 $ hg init a
3 $ hg init a
4 $ cd a
4 $ cd a
5
5
6 $ echo "first" > file.txt
6 $ echo "first" > file.txt
7 $ hg add file.txt
7 $ hg add file.txt
8 $ hg commit -m 'first commit' # 0
8 $ hg commit -m 'first commit' # 0
9
9
10 $ echo "second" > file.txt
10 $ echo "second" > file.txt
11 $ hg commit -m 'second commit' # 1
11 $ hg commit -m 'second commit' # 1
12
12
13 $ echo "third" > file.txt
13 $ echo "third" > file.txt
14 $ hg commit -m 'third commit' # 2
14 $ hg commit -m 'third commit' # 2
15
15
16 $ hg diff --nodates --change 1
16 $ hg diff --nodates --change 1
17 diff -r 4bb65dda5db4 -r e9b286083166 file.txt
17 diff -r 4bb65dda5db4 -r e9b286083166 file.txt
18 --- a/file.txt
18 --- a/file.txt
19 +++ b/file.txt
19 +++ b/file.txt
20 @@ -1,1 +1,1 @@
20 @@ -1,1 +1,1 @@
21 -first
21 -first
22 +second
22 +second
23
23
24 $ hg diff --change e9b286083166
24 $ hg diff --change e9b286083166
25 diff -r 4bb65dda5db4 -r e9b286083166 file.txt
25 diff -r 4bb65dda5db4 -r e9b286083166 file.txt
26 --- a/file.txt Thu Jan 01 00:00:00 1970 +0000
26 --- a/file.txt Thu Jan 01 00:00:00 1970 +0000
27 +++ b/file.txt Thu Jan 01 00:00:00 1970 +0000
27 +++ b/file.txt Thu Jan 01 00:00:00 1970 +0000
28 @@ -1,1 +1,1 @@
28 @@ -1,1 +1,1 @@
29 -first
29 -first
30 +second
30 +second
31
31
32 Test dumb revspecs (issue3474)
33
34 $ hg diff -r 2:2
35 $ hg diff -r "2 and 1"
36 abort: empty revision range
37 [255]
32
38
33 Testing diff --change when merge:
39 Testing diff --change when merge:
34
40
35 $ for i in 1 2 3 4 5 6 7 8 9 10; do
41 $ for i in 1 2 3 4 5 6 7 8 9 10; do
36 > echo $i >> file.txt
42 > echo $i >> file.txt
37 > done
43 > done
38 $ hg commit -m "lots of text" # 3
44 $ hg commit -m "lots of text" # 3
39
45
40 $ sed -e 's,^2$,x,' file.txt > file.txt.tmp
46 $ sed -e 's,^2$,x,' file.txt > file.txt.tmp
41 $ mv file.txt.tmp file.txt
47 $ mv file.txt.tmp file.txt
42 $ hg commit -m "change 2 to x" # 4
48 $ hg commit -m "change 2 to x" # 4
43
49
44 $ hg up -r 3
50 $ hg up -r 3
45 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
51 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
46 $ sed -e 's,^8$,y,' file.txt > file.txt.tmp
52 $ sed -e 's,^8$,y,' file.txt > file.txt.tmp
47 $ mv file.txt.tmp file.txt
53 $ mv file.txt.tmp file.txt
48 $ hg commit -m "change 8 to y"
54 $ hg commit -m "change 8 to y"
49 created new head
55 created new head
50
56
51 $ hg up -C -r 4
57 $ hg up -C -r 4
52 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
58 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
53 $ hg merge -r 5
59 $ hg merge -r 5
54 merging file.txt
60 merging file.txt
55 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
61 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
56 (branch merge, don't forget to commit)
62 (branch merge, don't forget to commit)
57 $ hg commit -m "merge 8 to y" # 6
63 $ hg commit -m "merge 8 to y" # 6
58
64
59 $ hg diff --change 5
65 $ hg diff --change 5
60 diff -r ae119d680c82 -r 9085c5c02e52 file.txt
66 diff -r ae119d680c82 -r 9085c5c02e52 file.txt
61 --- a/file.txt Thu Jan 01 00:00:00 1970 +0000
67 --- a/file.txt Thu Jan 01 00:00:00 1970 +0000
62 +++ b/file.txt Thu Jan 01 00:00:00 1970 +0000
68 +++ b/file.txt Thu Jan 01 00:00:00 1970 +0000
63 @@ -6,6 +6,6 @@
69 @@ -6,6 +6,6 @@
64 5
70 5
65 6
71 6
66 7
72 7
67 -8
73 -8
68 +y
74 +y
69 9
75 9
70 10
76 10
71
77
72 must be similar to 'hg diff --change 5':
78 must be similar to 'hg diff --change 5':
73
79
74 $ hg diff -c 6
80 $ hg diff -c 6
75 diff -r 273b50f17c6d -r 979ca961fd2e file.txt
81 diff -r 273b50f17c6d -r 979ca961fd2e file.txt
76 --- a/file.txt Thu Jan 01 00:00:00 1970 +0000
82 --- a/file.txt Thu Jan 01 00:00:00 1970 +0000
77 +++ b/file.txt Thu Jan 01 00:00:00 1970 +0000
83 +++ b/file.txt Thu Jan 01 00:00:00 1970 +0000
78 @@ -6,6 +6,6 @@
84 @@ -6,6 +6,6 @@
79 5
85 5
80 6
86 6
81 7
87 7
82 -8
88 -8
83 +y
89 +y
84 9
90 9
85 10
91 10
86
92
General Comments 0
You need to be logged in to leave comments. Login now