##// END OF EJS Templates
match: make explicitdir and traversedir None by default...
Siddharth Agarwal -
r19143:3cb94685 default
parent child Browse files
Show More
@@ -1,172 +1,173 b''
1 # client.py - inotify status client
1 # client.py - inotify status client
2 #
2 #
3 # Copyright 2006, 2007, 2008 Bryan O'Sullivan <bos@serpentine.com>
3 # Copyright 2006, 2007, 2008 Bryan O'Sullivan <bos@serpentine.com>
4 # Copyright 2007, 2008 Brendan Cully <brendan@kublai.com>
4 # Copyright 2007, 2008 Brendan Cully <brendan@kublai.com>
5 # Copyright 2009 Nicolas Dumazet <nicdumz@gmail.com>
5 # Copyright 2009 Nicolas Dumazet <nicdumz@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 from mercurial.i18n import _
10 from mercurial.i18n import _
11 import common, server
11 import common, server
12 import errno, os, socket, struct
12 import errno, os, socket, struct
13
13
14 class QueryFailed(Exception):
14 class QueryFailed(Exception):
15 pass
15 pass
16
16
17 def start_server(function):
17 def start_server(function):
18 """
18 """
19 Decorator.
19 Decorator.
20 Tries to call function, if it fails, try to (re)start inotify server.
20 Tries to call function, if it fails, try to (re)start inotify server.
21 Raise QueryFailed if something went wrong
21 Raise QueryFailed if something went wrong
22 """
22 """
23 def decorated_function(self, *args):
23 def decorated_function(self, *args):
24 try:
24 try:
25 return function(self, *args)
25 return function(self, *args)
26 except (OSError, socket.error), err:
26 except (OSError, socket.error), err:
27 autostart = self.ui.configbool('inotify', 'autostart', True)
27 autostart = self.ui.configbool('inotify', 'autostart', True)
28
28
29 if err.args[0] == errno.ECONNREFUSED:
29 if err.args[0] == errno.ECONNREFUSED:
30 self.ui.warn(_('inotify-client: found dead inotify server '
30 self.ui.warn(_('inotify-client: found dead inotify server '
31 'socket; removing it\n'))
31 'socket; removing it\n'))
32 os.unlink(os.path.join(self.root, '.hg', 'inotify.sock'))
32 os.unlink(os.path.join(self.root, '.hg', 'inotify.sock'))
33 if err.args[0] in (errno.ECONNREFUSED, errno.ENOENT) and autostart:
33 if err.args[0] in (errno.ECONNREFUSED, errno.ENOENT) and autostart:
34 try:
34 try:
35 try:
35 try:
36 server.start(self.ui, self.dirstate, self.root,
36 server.start(self.ui, self.dirstate, self.root,
37 dict(daemon=True, daemon_pipefds=''))
37 dict(daemon=True, daemon_pipefds=''))
38 except server.AlreadyStartedException, inst:
38 except server.AlreadyStartedException, inst:
39 # another process may have started its own
39 # another process may have started its own
40 # inotify server while this one was starting.
40 # inotify server while this one was starting.
41 self.ui.debug(str(inst))
41 self.ui.debug(str(inst))
42 except Exception, inst:
42 except Exception, inst:
43 self.ui.warn(_('inotify-client: could not start inotify '
43 self.ui.warn(_('inotify-client: could not start inotify '
44 'server: %s\n') % inst)
44 'server: %s\n') % inst)
45 else:
45 else:
46 try:
46 try:
47 return function(self, *args)
47 return function(self, *args)
48 except socket.error, err:
48 except socket.error, err:
49 self.ui.warn(_('inotify-client: could not talk to new '
49 self.ui.warn(_('inotify-client: could not talk to new '
50 'inotify server: %s\n') % err.args[-1])
50 'inotify server: %s\n') % err.args[-1])
51 elif err.args[0] in (errno.ECONNREFUSED, errno.ENOENT):
51 elif err.args[0] in (errno.ECONNREFUSED, errno.ENOENT):
52 # silently ignore normal errors if autostart is False
52 # silently ignore normal errors if autostart is False
53 self.ui.debug('(inotify server not running)\n')
53 self.ui.debug('(inotify server not running)\n')
54 else:
54 else:
55 self.ui.warn(_('inotify-client: failed to contact inotify '
55 self.ui.warn(_('inotify-client: failed to contact inotify '
56 'server: %s\n') % err.args[-1])
56 'server: %s\n') % err.args[-1])
57
57
58 self.ui.traceback()
58 self.ui.traceback()
59 raise QueryFailed('inotify query failed')
59 raise QueryFailed('inotify query failed')
60
60
61 return decorated_function
61 return decorated_function
62
62
63
63
64 class client(object):
64 class client(object):
65 def __init__(self, ui, repo):
65 def __init__(self, ui, repo):
66 self.ui = ui
66 self.ui = ui
67 self.dirstate = repo.dirstate
67 self.dirstate = repo.dirstate
68 self.root = repo.root
68 self.root = repo.root
69 self.sock = socket.socket(socket.AF_UNIX)
69 self.sock = socket.socket(socket.AF_UNIX)
70
70
71 def _connect(self):
71 def _connect(self):
72 sockpath = os.path.join(self.root, '.hg', 'inotify.sock')
72 sockpath = os.path.join(self.root, '.hg', 'inotify.sock')
73 try:
73 try:
74 self.sock.connect(sockpath)
74 self.sock.connect(sockpath)
75 except socket.error, err:
75 except socket.error, err:
76 if err.args[0] == "AF_UNIX path too long":
76 if err.args[0] == "AF_UNIX path too long":
77 sockpath = os.readlink(sockpath)
77 sockpath = os.readlink(sockpath)
78 self.sock.connect(sockpath)
78 self.sock.connect(sockpath)
79 else:
79 else:
80 raise
80 raise
81
81
82 def _send(self, type, data):
82 def _send(self, type, data):
83 """Sends protocol version number, and the data"""
83 """Sends protocol version number, and the data"""
84 self.sock.sendall(chr(common.version) + type + data)
84 self.sock.sendall(chr(common.version) + type + data)
85
85
86 self.sock.shutdown(socket.SHUT_WR)
86 self.sock.shutdown(socket.SHUT_WR)
87
87
88 def _receive(self, type):
88 def _receive(self, type):
89 """
89 """
90 Read data, check version number, extract headers,
90 Read data, check version number, extract headers,
91 and returns a tuple (data descriptor, header)
91 and returns a tuple (data descriptor, header)
92 Raises QueryFailed on error
92 Raises QueryFailed on error
93 """
93 """
94 cs = common.recvcs(self.sock)
94 cs = common.recvcs(self.sock)
95 try:
95 try:
96 version = ord(cs.read(1))
96 version = ord(cs.read(1))
97 except TypeError:
97 except TypeError:
98 # empty answer, assume the server crashed
98 # empty answer, assume the server crashed
99 self.ui.warn(_('inotify-client: received empty answer from inotify '
99 self.ui.warn(_('inotify-client: received empty answer from inotify '
100 'server'))
100 'server'))
101 raise QueryFailed('server crashed')
101 raise QueryFailed('server crashed')
102
102
103 if version != common.version:
103 if version != common.version:
104 self.ui.warn(_('(inotify: received response from incompatible '
104 self.ui.warn(_('(inotify: received response from incompatible '
105 'server version %d)\n') % version)
105 'server version %d)\n') % version)
106 raise QueryFailed('incompatible server version')
106 raise QueryFailed('incompatible server version')
107
107
108 readtype = cs.read(4)
108 readtype = cs.read(4)
109 if readtype != type:
109 if readtype != type:
110 self.ui.warn(_('(inotify: received \'%s\' response when expecting'
110 self.ui.warn(_('(inotify: received \'%s\' response when expecting'
111 ' \'%s\')\n') % (readtype, type))
111 ' \'%s\')\n') % (readtype, type))
112 raise QueryFailed('wrong response type')
112 raise QueryFailed('wrong response type')
113
113
114 hdrfmt = common.resphdrfmts[type]
114 hdrfmt = common.resphdrfmts[type]
115 hdrsize = common.resphdrsizes[type]
115 hdrsize = common.resphdrsizes[type]
116 try:
116 try:
117 resphdr = struct.unpack(hdrfmt, cs.read(hdrsize))
117 resphdr = struct.unpack(hdrfmt, cs.read(hdrsize))
118 except struct.error:
118 except struct.error:
119 raise QueryFailed('unable to retrieve query response headers')
119 raise QueryFailed('unable to retrieve query response headers')
120
120
121 return cs, resphdr
121 return cs, resphdr
122
122
123 def query(self, type, req):
123 def query(self, type, req):
124 self._connect()
124 self._connect()
125
125
126 self._send(type, req)
126 self._send(type, req)
127
127
128 return self._receive(type)
128 return self._receive(type)
129
129
130 @start_server
130 @start_server
131 def statusquery(self, names, match, ignored, clean, unknown=True):
131 def statusquery(self, names, match, ignored, clean, unknown=True):
132
132
133 def genquery():
133 def genquery():
134 for n in names:
134 for n in names:
135 yield n
135 yield n
136 states = 'almrx!'
136 states = 'almrx!'
137 if ignored:
137 if ignored:
138 raise ValueError('this is insanity')
138 raise ValueError('this is insanity')
139 if clean:
139 if clean:
140 states += 'c'
140 states += 'c'
141 if unknown:
141 if unknown:
142 states += '?'
142 states += '?'
143 yield states
143 yield states
144
144
145 req = '\0'.join(genquery())
145 req = '\0'.join(genquery())
146
146
147 cs, resphdr = self.query('STAT', req)
147 cs, resphdr = self.query('STAT', req)
148
148
149 def readnames(nbytes):
149 def readnames(nbytes):
150 if nbytes:
150 if nbytes:
151 names = cs.read(nbytes)
151 names = cs.read(nbytes)
152 if names:
152 if names:
153 return filter(match, names.split('\0'))
153 return filter(match, names.split('\0'))
154 return []
154 return []
155 results = tuple(map(readnames, resphdr[:-1]))
155 results = tuple(map(readnames, resphdr[:-1]))
156
156
157 if names:
157 if names:
158 nbytes = resphdr[-1]
158 nbytes = resphdr[-1]
159 vdirs = cs.read(nbytes)
159 vdirs = cs.read(nbytes)
160 if vdirs:
160 if vdirs:
161 for vdir in vdirs.split('\0'):
161 for vdir in vdirs.split('\0'):
162 match.explicitdir(vdir)
162 if match.explicitdir:
163 match.explicitdir(vdir)
163
164
164 return results
165 return results
165
166
166 @start_server
167 @start_server
167 def debugquery(self):
168 def debugquery(self):
168 cs, resphdr = self.query('DBUG', '')
169 cs, resphdr = self.query('DBUG', '')
169
170
170 nbytes = resphdr[0]
171 nbytes = resphdr[0]
171 names = cs.read(nbytes)
172 names = cs.read(nbytes)
172 return names.split('\0')
173 return names.split('\0')
@@ -1,817 +1,820 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 import errno
7 import errno
8
8
9 from node import nullid
9 from node import nullid
10 from i18n import _
10 from i18n import _
11 import scmutil, util, ignore, osutil, parsers, encoding
11 import scmutil, util, ignore, osutil, parsers, encoding
12 import os, stat, errno, gc
12 import os, stat, errno, gc
13
13
14 propertycache = util.propertycache
14 propertycache = util.propertycache
15 filecache = scmutil.filecache
15 filecache = scmutil.filecache
16 _rangemask = 0x7fffffff
16 _rangemask = 0x7fffffff
17
17
18 class repocache(filecache):
18 class repocache(filecache):
19 """filecache for files in .hg/"""
19 """filecache for files in .hg/"""
20 def join(self, obj, fname):
20 def join(self, obj, fname):
21 return obj._opener.join(fname)
21 return obj._opener.join(fname)
22
22
23 class rootcache(filecache):
23 class rootcache(filecache):
24 """filecache for files in the repository root"""
24 """filecache for files in the repository root"""
25 def join(self, obj, fname):
25 def join(self, obj, fname):
26 return obj._join(fname)
26 return obj._join(fname)
27
27
28 class dirstate(object):
28 class dirstate(object):
29
29
30 def __init__(self, opener, ui, root, validate):
30 def __init__(self, opener, ui, root, validate):
31 '''Create a new dirstate object.
31 '''Create a new dirstate object.
32
32
33 opener is an open()-like callable that can be used to open the
33 opener is an open()-like callable that can be used to open the
34 dirstate file; root is the root of the directory tracked by
34 dirstate file; root is the root of the directory tracked by
35 the dirstate.
35 the dirstate.
36 '''
36 '''
37 self._opener = opener
37 self._opener = opener
38 self._validate = validate
38 self._validate = validate
39 self._root = root
39 self._root = root
40 self._rootdir = os.path.join(root, '')
40 self._rootdir = os.path.join(root, '')
41 self._dirty = False
41 self._dirty = False
42 self._dirtypl = False
42 self._dirtypl = False
43 self._lastnormaltime = 0
43 self._lastnormaltime = 0
44 self._ui = ui
44 self._ui = ui
45 self._filecache = {}
45 self._filecache = {}
46
46
47 @propertycache
47 @propertycache
48 def _map(self):
48 def _map(self):
49 '''Return the dirstate contents as a map from filename to
49 '''Return the dirstate contents as a map from filename to
50 (state, mode, size, time).'''
50 (state, mode, size, time).'''
51 self._read()
51 self._read()
52 return self._map
52 return self._map
53
53
54 @propertycache
54 @propertycache
55 def _copymap(self):
55 def _copymap(self):
56 self._read()
56 self._read()
57 return self._copymap
57 return self._copymap
58
58
59 @propertycache
59 @propertycache
60 def _foldmap(self):
60 def _foldmap(self):
61 f = {}
61 f = {}
62 for name, s in self._map.iteritems():
62 for name, s in self._map.iteritems():
63 if s[0] != 'r':
63 if s[0] != 'r':
64 f[util.normcase(name)] = name
64 f[util.normcase(name)] = name
65 for name in self._dirs:
65 for name in self._dirs:
66 f[util.normcase(name)] = name
66 f[util.normcase(name)] = name
67 f['.'] = '.' # prevents useless util.fspath() invocation
67 f['.'] = '.' # prevents useless util.fspath() invocation
68 return f
68 return f
69
69
70 @repocache('branch')
70 @repocache('branch')
71 def _branch(self):
71 def _branch(self):
72 try:
72 try:
73 return self._opener.read("branch").strip() or "default"
73 return self._opener.read("branch").strip() or "default"
74 except IOError, inst:
74 except IOError, inst:
75 if inst.errno != errno.ENOENT:
75 if inst.errno != errno.ENOENT:
76 raise
76 raise
77 return "default"
77 return "default"
78
78
79 @propertycache
79 @propertycache
80 def _pl(self):
80 def _pl(self):
81 try:
81 try:
82 fp = self._opener("dirstate")
82 fp = self._opener("dirstate")
83 st = fp.read(40)
83 st = fp.read(40)
84 fp.close()
84 fp.close()
85 l = len(st)
85 l = len(st)
86 if l == 40:
86 if l == 40:
87 return st[:20], st[20:40]
87 return st[:20], st[20:40]
88 elif l > 0 and l < 40:
88 elif l > 0 and l < 40:
89 raise util.Abort(_('working directory state appears damaged!'))
89 raise util.Abort(_('working directory state appears damaged!'))
90 except IOError, err:
90 except IOError, err:
91 if err.errno != errno.ENOENT:
91 if err.errno != errno.ENOENT:
92 raise
92 raise
93 return [nullid, nullid]
93 return [nullid, nullid]
94
94
95 @propertycache
95 @propertycache
96 def _dirs(self):
96 def _dirs(self):
97 return scmutil.dirs(self._map, 'r')
97 return scmutil.dirs(self._map, 'r')
98
98
99 def dirs(self):
99 def dirs(self):
100 return self._dirs
100 return self._dirs
101
101
102 @rootcache('.hgignore')
102 @rootcache('.hgignore')
103 def _ignore(self):
103 def _ignore(self):
104 files = [self._join('.hgignore')]
104 files = [self._join('.hgignore')]
105 for name, path in self._ui.configitems("ui"):
105 for name, path in self._ui.configitems("ui"):
106 if name == 'ignore' or name.startswith('ignore.'):
106 if name == 'ignore' or name.startswith('ignore.'):
107 files.append(util.expandpath(path))
107 files.append(util.expandpath(path))
108 return ignore.ignore(self._root, files, self._ui.warn)
108 return ignore.ignore(self._root, files, self._ui.warn)
109
109
110 @propertycache
110 @propertycache
111 def _slash(self):
111 def _slash(self):
112 return self._ui.configbool('ui', 'slash') and os.sep != '/'
112 return self._ui.configbool('ui', 'slash') and os.sep != '/'
113
113
114 @propertycache
114 @propertycache
115 def _checklink(self):
115 def _checklink(self):
116 return util.checklink(self._root)
116 return util.checklink(self._root)
117
117
118 @propertycache
118 @propertycache
119 def _checkexec(self):
119 def _checkexec(self):
120 return util.checkexec(self._root)
120 return util.checkexec(self._root)
121
121
122 @propertycache
122 @propertycache
123 def _checkcase(self):
123 def _checkcase(self):
124 return not util.checkcase(self._join('.hg'))
124 return not util.checkcase(self._join('.hg'))
125
125
126 def _join(self, f):
126 def _join(self, f):
127 # much faster than os.path.join()
127 # much faster than os.path.join()
128 # it's safe because f is always a relative path
128 # it's safe because f is always a relative path
129 return self._rootdir + f
129 return self._rootdir + f
130
130
131 def flagfunc(self, buildfallback):
131 def flagfunc(self, buildfallback):
132 if self._checklink and self._checkexec:
132 if self._checklink and self._checkexec:
133 def f(x):
133 def f(x):
134 try:
134 try:
135 st = os.lstat(self._join(x))
135 st = os.lstat(self._join(x))
136 if util.statislink(st):
136 if util.statislink(st):
137 return 'l'
137 return 'l'
138 if util.statisexec(st):
138 if util.statisexec(st):
139 return 'x'
139 return 'x'
140 except OSError:
140 except OSError:
141 pass
141 pass
142 return ''
142 return ''
143 return f
143 return f
144
144
145 fallback = buildfallback()
145 fallback = buildfallback()
146 if self._checklink:
146 if self._checklink:
147 def f(x):
147 def f(x):
148 if os.path.islink(self._join(x)):
148 if os.path.islink(self._join(x)):
149 return 'l'
149 return 'l'
150 if 'x' in fallback(x):
150 if 'x' in fallback(x):
151 return 'x'
151 return 'x'
152 return ''
152 return ''
153 return f
153 return f
154 if self._checkexec:
154 if self._checkexec:
155 def f(x):
155 def f(x):
156 if 'l' in fallback(x):
156 if 'l' in fallback(x):
157 return 'l'
157 return 'l'
158 if util.isexec(self._join(x)):
158 if util.isexec(self._join(x)):
159 return 'x'
159 return 'x'
160 return ''
160 return ''
161 return f
161 return f
162 else:
162 else:
163 return fallback
163 return fallback
164
164
165 def getcwd(self):
165 def getcwd(self):
166 cwd = os.getcwd()
166 cwd = os.getcwd()
167 if cwd == self._root:
167 if cwd == self._root:
168 return ''
168 return ''
169 # self._root ends with a path separator if self._root is '/' or 'C:\'
169 # self._root ends with a path separator if self._root is '/' or 'C:\'
170 rootsep = self._root
170 rootsep = self._root
171 if not util.endswithsep(rootsep):
171 if not util.endswithsep(rootsep):
172 rootsep += os.sep
172 rootsep += os.sep
173 if cwd.startswith(rootsep):
173 if cwd.startswith(rootsep):
174 return cwd[len(rootsep):]
174 return cwd[len(rootsep):]
175 else:
175 else:
176 # we're outside the repo. return an absolute path.
176 # we're outside the repo. return an absolute path.
177 return cwd
177 return cwd
178
178
179 def pathto(self, f, cwd=None):
179 def pathto(self, f, cwd=None):
180 if cwd is None:
180 if cwd is None:
181 cwd = self.getcwd()
181 cwd = self.getcwd()
182 path = util.pathto(self._root, cwd, f)
182 path = util.pathto(self._root, cwd, f)
183 if self._slash:
183 if self._slash:
184 return util.normpath(path)
184 return util.normpath(path)
185 return path
185 return path
186
186
187 def __getitem__(self, key):
187 def __getitem__(self, key):
188 '''Return the current state of key (a filename) in the dirstate.
188 '''Return the current state of key (a filename) in the dirstate.
189
189
190 States are:
190 States are:
191 n normal
191 n normal
192 m needs merging
192 m needs merging
193 r marked for removal
193 r marked for removal
194 a marked for addition
194 a marked for addition
195 ? not tracked
195 ? not tracked
196 '''
196 '''
197 return self._map.get(key, ("?",))[0]
197 return self._map.get(key, ("?",))[0]
198
198
199 def __contains__(self, key):
199 def __contains__(self, key):
200 return key in self._map
200 return key in self._map
201
201
202 def __iter__(self):
202 def __iter__(self):
203 for x in sorted(self._map):
203 for x in sorted(self._map):
204 yield x
204 yield x
205
205
206 def iteritems(self):
206 def iteritems(self):
207 return self._map.iteritems()
207 return self._map.iteritems()
208
208
209 def parents(self):
209 def parents(self):
210 return [self._validate(p) for p in self._pl]
210 return [self._validate(p) for p in self._pl]
211
211
212 def p1(self):
212 def p1(self):
213 return self._validate(self._pl[0])
213 return self._validate(self._pl[0])
214
214
215 def p2(self):
215 def p2(self):
216 return self._validate(self._pl[1])
216 return self._validate(self._pl[1])
217
217
218 def branch(self):
218 def branch(self):
219 return encoding.tolocal(self._branch)
219 return encoding.tolocal(self._branch)
220
220
221 def setparents(self, p1, p2=nullid):
221 def setparents(self, p1, p2=nullid):
222 """Set dirstate parents to p1 and p2.
222 """Set dirstate parents to p1 and p2.
223
223
224 When moving from two parents to one, 'm' merged entries a
224 When moving from two parents to one, 'm' merged entries a
225 adjusted to normal and previous copy records discarded and
225 adjusted to normal and previous copy records discarded and
226 returned by the call.
226 returned by the call.
227
227
228 See localrepo.setparents()
228 See localrepo.setparents()
229 """
229 """
230 self._dirty = self._dirtypl = True
230 self._dirty = self._dirtypl = True
231 oldp2 = self._pl[1]
231 oldp2 = self._pl[1]
232 self._pl = p1, p2
232 self._pl = p1, p2
233 copies = {}
233 copies = {}
234 if oldp2 != nullid and p2 == nullid:
234 if oldp2 != nullid and p2 == nullid:
235 # Discard 'm' markers when moving away from a merge state
235 # Discard 'm' markers when moving away from a merge state
236 for f, s in self._map.iteritems():
236 for f, s in self._map.iteritems():
237 if s[0] == 'm':
237 if s[0] == 'm':
238 if f in self._copymap:
238 if f in self._copymap:
239 copies[f] = self._copymap[f]
239 copies[f] = self._copymap[f]
240 self.normallookup(f)
240 self.normallookup(f)
241 return copies
241 return copies
242
242
243 def setbranch(self, branch):
243 def setbranch(self, branch):
244 self._branch = encoding.fromlocal(branch)
244 self._branch = encoding.fromlocal(branch)
245 f = self._opener('branch', 'w', atomictemp=True)
245 f = self._opener('branch', 'w', atomictemp=True)
246 try:
246 try:
247 f.write(self._branch + '\n')
247 f.write(self._branch + '\n')
248 f.close()
248 f.close()
249
249
250 # make sure filecache has the correct stat info for _branch after
250 # make sure filecache has the correct stat info for _branch after
251 # replacing the underlying file
251 # replacing the underlying file
252 ce = self._filecache['_branch']
252 ce = self._filecache['_branch']
253 if ce:
253 if ce:
254 ce.refresh()
254 ce.refresh()
255 except: # re-raises
255 except: # re-raises
256 f.discard()
256 f.discard()
257 raise
257 raise
258
258
259 def _read(self):
259 def _read(self):
260 self._map = {}
260 self._map = {}
261 self._copymap = {}
261 self._copymap = {}
262 try:
262 try:
263 st = self._opener.read("dirstate")
263 st = self._opener.read("dirstate")
264 except IOError, err:
264 except IOError, err:
265 if err.errno != errno.ENOENT:
265 if err.errno != errno.ENOENT:
266 raise
266 raise
267 return
267 return
268 if not st:
268 if not st:
269 return
269 return
270
270
271 # Python's garbage collector triggers a GC each time a certain number
271 # Python's garbage collector triggers a GC each time a certain number
272 # of container objects (the number being defined by
272 # of container objects (the number being defined by
273 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
273 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
274 # for each file in the dirstate. The C version then immediately marks
274 # for each file in the dirstate. The C version then immediately marks
275 # them as not to be tracked by the collector. However, this has no
275 # them as not to be tracked by the collector. However, this has no
276 # effect on when GCs are triggered, only on what objects the GC looks
276 # effect on when GCs are triggered, only on what objects the GC looks
277 # into. This means that O(number of files) GCs are unavoidable.
277 # into. This means that O(number of files) GCs are unavoidable.
278 # Depending on when in the process's lifetime the dirstate is parsed,
278 # Depending on when in the process's lifetime the dirstate is parsed,
279 # this can get very expensive. As a workaround, disable GC while
279 # this can get very expensive. As a workaround, disable GC while
280 # parsing the dirstate.
280 # parsing the dirstate.
281 gcenabled = gc.isenabled()
281 gcenabled = gc.isenabled()
282 gc.disable()
282 gc.disable()
283 try:
283 try:
284 p = parsers.parse_dirstate(self._map, self._copymap, st)
284 p = parsers.parse_dirstate(self._map, self._copymap, st)
285 finally:
285 finally:
286 if gcenabled:
286 if gcenabled:
287 gc.enable()
287 gc.enable()
288 if not self._dirtypl:
288 if not self._dirtypl:
289 self._pl = p
289 self._pl = p
290
290
291 def invalidate(self):
291 def invalidate(self):
292 for a in ("_map", "_copymap", "_foldmap", "_branch", "_pl", "_dirs",
292 for a in ("_map", "_copymap", "_foldmap", "_branch", "_pl", "_dirs",
293 "_ignore"):
293 "_ignore"):
294 if a in self.__dict__:
294 if a in self.__dict__:
295 delattr(self, a)
295 delattr(self, a)
296 self._lastnormaltime = 0
296 self._lastnormaltime = 0
297 self._dirty = False
297 self._dirty = False
298
298
299 def copy(self, source, dest):
299 def copy(self, source, dest):
300 """Mark dest as a copy of source. Unmark dest if source is None."""
300 """Mark dest as a copy of source. Unmark dest if source is None."""
301 if source == dest:
301 if source == dest:
302 return
302 return
303 self._dirty = True
303 self._dirty = True
304 if source is not None:
304 if source is not None:
305 self._copymap[dest] = source
305 self._copymap[dest] = source
306 elif dest in self._copymap:
306 elif dest in self._copymap:
307 del self._copymap[dest]
307 del self._copymap[dest]
308
308
309 def copied(self, file):
309 def copied(self, file):
310 return self._copymap.get(file, None)
310 return self._copymap.get(file, None)
311
311
312 def copies(self):
312 def copies(self):
313 return self._copymap
313 return self._copymap
314
314
315 def _droppath(self, f):
315 def _droppath(self, f):
316 if self[f] not in "?r" and "_dirs" in self.__dict__:
316 if self[f] not in "?r" and "_dirs" in self.__dict__:
317 self._dirs.delpath(f)
317 self._dirs.delpath(f)
318
318
319 def _addpath(self, f, state, mode, size, mtime):
319 def _addpath(self, f, state, mode, size, mtime):
320 oldstate = self[f]
320 oldstate = self[f]
321 if state == 'a' or oldstate == 'r':
321 if state == 'a' or oldstate == 'r':
322 scmutil.checkfilename(f)
322 scmutil.checkfilename(f)
323 if f in self._dirs:
323 if f in self._dirs:
324 raise util.Abort(_('directory %r already in dirstate') % f)
324 raise util.Abort(_('directory %r already in dirstate') % f)
325 # shadows
325 # shadows
326 for d in scmutil.finddirs(f):
326 for d in scmutil.finddirs(f):
327 if d in self._dirs:
327 if d in self._dirs:
328 break
328 break
329 if d in self._map and self[d] != 'r':
329 if d in self._map and self[d] != 'r':
330 raise util.Abort(
330 raise util.Abort(
331 _('file %r in dirstate clashes with %r') % (d, f))
331 _('file %r in dirstate clashes with %r') % (d, f))
332 if oldstate in "?r" and "_dirs" in self.__dict__:
332 if oldstate in "?r" and "_dirs" in self.__dict__:
333 self._dirs.addpath(f)
333 self._dirs.addpath(f)
334 self._dirty = True
334 self._dirty = True
335 self._map[f] = (state, mode, size, mtime)
335 self._map[f] = (state, mode, size, mtime)
336
336
337 def normal(self, f):
337 def normal(self, f):
338 '''Mark a file normal and clean.'''
338 '''Mark a file normal and clean.'''
339 s = os.lstat(self._join(f))
339 s = os.lstat(self._join(f))
340 mtime = int(s.st_mtime)
340 mtime = int(s.st_mtime)
341 self._addpath(f, 'n', s.st_mode,
341 self._addpath(f, 'n', s.st_mode,
342 s.st_size & _rangemask, mtime & _rangemask)
342 s.st_size & _rangemask, mtime & _rangemask)
343 if f in self._copymap:
343 if f in self._copymap:
344 del self._copymap[f]
344 del self._copymap[f]
345 if mtime > self._lastnormaltime:
345 if mtime > self._lastnormaltime:
346 # Remember the most recent modification timeslot for status(),
346 # Remember the most recent modification timeslot for status(),
347 # to make sure we won't miss future size-preserving file content
347 # to make sure we won't miss future size-preserving file content
348 # modifications that happen within the same timeslot.
348 # modifications that happen within the same timeslot.
349 self._lastnormaltime = mtime
349 self._lastnormaltime = mtime
350
350
351 def normallookup(self, f):
351 def normallookup(self, f):
352 '''Mark a file normal, but possibly dirty.'''
352 '''Mark a file normal, but possibly dirty.'''
353 if self._pl[1] != nullid and f in self._map:
353 if self._pl[1] != nullid and f in self._map:
354 # if there is a merge going on and the file was either
354 # if there is a merge going on and the file was either
355 # in state 'm' (-1) or coming from other parent (-2) before
355 # in state 'm' (-1) or coming from other parent (-2) before
356 # being removed, restore that state.
356 # being removed, restore that state.
357 entry = self._map[f]
357 entry = self._map[f]
358 if entry[0] == 'r' and entry[2] in (-1, -2):
358 if entry[0] == 'r' and entry[2] in (-1, -2):
359 source = self._copymap.get(f)
359 source = self._copymap.get(f)
360 if entry[2] == -1:
360 if entry[2] == -1:
361 self.merge(f)
361 self.merge(f)
362 elif entry[2] == -2:
362 elif entry[2] == -2:
363 self.otherparent(f)
363 self.otherparent(f)
364 if source:
364 if source:
365 self.copy(source, f)
365 self.copy(source, f)
366 return
366 return
367 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
367 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
368 return
368 return
369 self._addpath(f, 'n', 0, -1, -1)
369 self._addpath(f, 'n', 0, -1, -1)
370 if f in self._copymap:
370 if f in self._copymap:
371 del self._copymap[f]
371 del self._copymap[f]
372
372
373 def otherparent(self, f):
373 def otherparent(self, f):
374 '''Mark as coming from the other parent, always dirty.'''
374 '''Mark as coming from the other parent, always dirty.'''
375 if self._pl[1] == nullid:
375 if self._pl[1] == nullid:
376 raise util.Abort(_("setting %r to other parent "
376 raise util.Abort(_("setting %r to other parent "
377 "only allowed in merges") % f)
377 "only allowed in merges") % f)
378 self._addpath(f, 'n', 0, -2, -1)
378 self._addpath(f, 'n', 0, -2, -1)
379 if f in self._copymap:
379 if f in self._copymap:
380 del self._copymap[f]
380 del self._copymap[f]
381
381
382 def add(self, f):
382 def add(self, f):
383 '''Mark a file added.'''
383 '''Mark a file added.'''
384 self._addpath(f, 'a', 0, -1, -1)
384 self._addpath(f, 'a', 0, -1, -1)
385 if f in self._copymap:
385 if f in self._copymap:
386 del self._copymap[f]
386 del self._copymap[f]
387
387
388 def remove(self, f):
388 def remove(self, f):
389 '''Mark a file removed.'''
389 '''Mark a file removed.'''
390 self._dirty = True
390 self._dirty = True
391 self._droppath(f)
391 self._droppath(f)
392 size = 0
392 size = 0
393 if self._pl[1] != nullid and f in self._map:
393 if self._pl[1] != nullid and f in self._map:
394 # backup the previous state
394 # backup the previous state
395 entry = self._map[f]
395 entry = self._map[f]
396 if entry[0] == 'm': # merge
396 if entry[0] == 'm': # merge
397 size = -1
397 size = -1
398 elif entry[0] == 'n' and entry[2] == -2: # other parent
398 elif entry[0] == 'n' and entry[2] == -2: # other parent
399 size = -2
399 size = -2
400 self._map[f] = ('r', 0, size, 0)
400 self._map[f] = ('r', 0, size, 0)
401 if size == 0 and f in self._copymap:
401 if size == 0 and f in self._copymap:
402 del self._copymap[f]
402 del self._copymap[f]
403
403
404 def merge(self, f):
404 def merge(self, f):
405 '''Mark a file merged.'''
405 '''Mark a file merged.'''
406 if self._pl[1] == nullid:
406 if self._pl[1] == nullid:
407 return self.normallookup(f)
407 return self.normallookup(f)
408 s = os.lstat(self._join(f))
408 s = os.lstat(self._join(f))
409 self._addpath(f, 'm', s.st_mode,
409 self._addpath(f, 'm', s.st_mode,
410 s.st_size & _rangemask, int(s.st_mtime) & _rangemask)
410 s.st_size & _rangemask, int(s.st_mtime) & _rangemask)
411 if f in self._copymap:
411 if f in self._copymap:
412 del self._copymap[f]
412 del self._copymap[f]
413
413
414 def drop(self, f):
414 def drop(self, f):
415 '''Drop a file from the dirstate'''
415 '''Drop a file from the dirstate'''
416 if f in self._map:
416 if f in self._map:
417 self._dirty = True
417 self._dirty = True
418 self._droppath(f)
418 self._droppath(f)
419 del self._map[f]
419 del self._map[f]
420
420
421 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
421 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
422 normed = util.normcase(path)
422 normed = util.normcase(path)
423 folded = self._foldmap.get(normed, None)
423 folded = self._foldmap.get(normed, None)
424 if folded is None:
424 if folded is None:
425 if isknown:
425 if isknown:
426 folded = path
426 folded = path
427 else:
427 else:
428 if exists is None:
428 if exists is None:
429 exists = os.path.lexists(os.path.join(self._root, path))
429 exists = os.path.lexists(os.path.join(self._root, path))
430 if not exists:
430 if not exists:
431 # Maybe a path component exists
431 # Maybe a path component exists
432 if not ignoremissing and '/' in path:
432 if not ignoremissing and '/' in path:
433 d, f = path.rsplit('/', 1)
433 d, f = path.rsplit('/', 1)
434 d = self._normalize(d, isknown, ignoremissing, None)
434 d = self._normalize(d, isknown, ignoremissing, None)
435 folded = d + "/" + f
435 folded = d + "/" + f
436 else:
436 else:
437 # No path components, preserve original case
437 # No path components, preserve original case
438 folded = path
438 folded = path
439 else:
439 else:
440 # recursively normalize leading directory components
440 # recursively normalize leading directory components
441 # against dirstate
441 # against dirstate
442 if '/' in normed:
442 if '/' in normed:
443 d, f = normed.rsplit('/', 1)
443 d, f = normed.rsplit('/', 1)
444 d = self._normalize(d, isknown, ignoremissing, True)
444 d = self._normalize(d, isknown, ignoremissing, True)
445 r = self._root + "/" + d
445 r = self._root + "/" + d
446 folded = d + "/" + util.fspath(f, r)
446 folded = d + "/" + util.fspath(f, r)
447 else:
447 else:
448 folded = util.fspath(normed, self._root)
448 folded = util.fspath(normed, self._root)
449 self._foldmap[normed] = folded
449 self._foldmap[normed] = folded
450
450
451 return folded
451 return folded
452
452
453 def normalize(self, path, isknown=False, ignoremissing=False):
453 def normalize(self, path, isknown=False, ignoremissing=False):
454 '''
454 '''
455 normalize the case of a pathname when on a casefolding filesystem
455 normalize the case of a pathname when on a casefolding filesystem
456
456
457 isknown specifies whether the filename came from walking the
457 isknown specifies whether the filename came from walking the
458 disk, to avoid extra filesystem access.
458 disk, to avoid extra filesystem access.
459
459
460 If ignoremissing is True, missing path are returned
460 If ignoremissing is True, missing path are returned
461 unchanged. Otherwise, we try harder to normalize possibly
461 unchanged. Otherwise, we try harder to normalize possibly
462 existing path components.
462 existing path components.
463
463
464 The normalized case is determined based on the following precedence:
464 The normalized case is determined based on the following precedence:
465
465
466 - version of name already stored in the dirstate
466 - version of name already stored in the dirstate
467 - version of name stored on disk
467 - version of name stored on disk
468 - version provided via command arguments
468 - version provided via command arguments
469 '''
469 '''
470
470
471 if self._checkcase:
471 if self._checkcase:
472 return self._normalize(path, isknown, ignoremissing)
472 return self._normalize(path, isknown, ignoremissing)
473 return path
473 return path
474
474
475 def clear(self):
475 def clear(self):
476 self._map = {}
476 self._map = {}
477 if "_dirs" in self.__dict__:
477 if "_dirs" in self.__dict__:
478 delattr(self, "_dirs")
478 delattr(self, "_dirs")
479 self._copymap = {}
479 self._copymap = {}
480 self._pl = [nullid, nullid]
480 self._pl = [nullid, nullid]
481 self._lastnormaltime = 0
481 self._lastnormaltime = 0
482 self._dirty = True
482 self._dirty = True
483
483
484 def rebuild(self, parent, allfiles, changedfiles=None):
484 def rebuild(self, parent, allfiles, changedfiles=None):
485 changedfiles = changedfiles or allfiles
485 changedfiles = changedfiles or allfiles
486 oldmap = self._map
486 oldmap = self._map
487 self.clear()
487 self.clear()
488 for f in allfiles:
488 for f in allfiles:
489 if f not in changedfiles:
489 if f not in changedfiles:
490 self._map[f] = oldmap[f]
490 self._map[f] = oldmap[f]
491 else:
491 else:
492 if 'x' in allfiles.flags(f):
492 if 'x' in allfiles.flags(f):
493 self._map[f] = ('n', 0777, -1, 0)
493 self._map[f] = ('n', 0777, -1, 0)
494 else:
494 else:
495 self._map[f] = ('n', 0666, -1, 0)
495 self._map[f] = ('n', 0666, -1, 0)
496 self._pl = (parent, nullid)
496 self._pl = (parent, nullid)
497 self._dirty = True
497 self._dirty = True
498
498
499 def write(self):
499 def write(self):
500 if not self._dirty:
500 if not self._dirty:
501 return
501 return
502 st = self._opener("dirstate", "w", atomictemp=True)
502 st = self._opener("dirstate", "w", atomictemp=True)
503
503
504 def finish(s):
504 def finish(s):
505 st.write(s)
505 st.write(s)
506 st.close()
506 st.close()
507 self._lastnormaltime = 0
507 self._lastnormaltime = 0
508 self._dirty = self._dirtypl = False
508 self._dirty = self._dirtypl = False
509
509
510 # use the modification time of the newly created temporary file as the
510 # use the modification time of the newly created temporary file as the
511 # filesystem's notion of 'now'
511 # filesystem's notion of 'now'
512 now = util.fstat(st).st_mtime
512 now = util.fstat(st).st_mtime
513 finish(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
513 finish(parsers.pack_dirstate(self._map, self._copymap, self._pl, now))
514
514
515 def _dirignore(self, f):
515 def _dirignore(self, f):
516 if f == '.':
516 if f == '.':
517 return False
517 return False
518 if self._ignore(f):
518 if self._ignore(f):
519 return True
519 return True
520 for p in scmutil.finddirs(f):
520 for p in scmutil.finddirs(f):
521 if self._ignore(p):
521 if self._ignore(p):
522 return True
522 return True
523 return False
523 return False
524
524
525 def walk(self, match, subrepos, unknown, ignored):
525 def walk(self, match, subrepos, unknown, ignored):
526 '''
526 '''
527 Walk recursively through the directory tree, finding all files
527 Walk recursively through the directory tree, finding all files
528 matched by match.
528 matched by match.
529
529
530 Return a dict mapping filename to stat-like object (either
530 Return a dict mapping filename to stat-like object (either
531 mercurial.osutil.stat instance or return value of os.stat()).
531 mercurial.osutil.stat instance or return value of os.stat()).
532 '''
532 '''
533
533
534 def fwarn(f, msg):
534 def fwarn(f, msg):
535 self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
535 self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
536 return False
536 return False
537
537
538 def badtype(mode):
538 def badtype(mode):
539 kind = _('unknown')
539 kind = _('unknown')
540 if stat.S_ISCHR(mode):
540 if stat.S_ISCHR(mode):
541 kind = _('character device')
541 kind = _('character device')
542 elif stat.S_ISBLK(mode):
542 elif stat.S_ISBLK(mode):
543 kind = _('block device')
543 kind = _('block device')
544 elif stat.S_ISFIFO(mode):
544 elif stat.S_ISFIFO(mode):
545 kind = _('fifo')
545 kind = _('fifo')
546 elif stat.S_ISSOCK(mode):
546 elif stat.S_ISSOCK(mode):
547 kind = _('socket')
547 kind = _('socket')
548 elif stat.S_ISDIR(mode):
548 elif stat.S_ISDIR(mode):
549 kind = _('directory')
549 kind = _('directory')
550 return _('unsupported file type (type is %s)') % kind
550 return _('unsupported file type (type is %s)') % kind
551
551
552 ignore = self._ignore
552 ignore = self._ignore
553 dirignore = self._dirignore
553 dirignore = self._dirignore
554 if ignored:
554 if ignored:
555 ignore = util.never
555 ignore = util.never
556 dirignore = util.never
556 dirignore = util.never
557 elif not unknown:
557 elif not unknown:
558 # if unknown and ignored are False, skip step 2
558 # if unknown and ignored are False, skip step 2
559 ignore = util.always
559 ignore = util.always
560 dirignore = util.always
560 dirignore = util.always
561
561
562 matchfn = match.matchfn
562 matchfn = match.matchfn
563 matchalways = match.always()
563 matchalways = match.always()
564 matchedir = match.explicitdir
564 matchedir = match.explicitdir
565 matchtdir = match.traversedir
565 matchtdir = match.traversedir
566 badfn = match.bad
566 badfn = match.bad
567 dmap = self._map
567 dmap = self._map
568 normpath = util.normpath
568 normpath = util.normpath
569 listdir = osutil.listdir
569 listdir = osutil.listdir
570 lstat = os.lstat
570 lstat = os.lstat
571 getkind = stat.S_IFMT
571 getkind = stat.S_IFMT
572 dirkind = stat.S_IFDIR
572 dirkind = stat.S_IFDIR
573 regkind = stat.S_IFREG
573 regkind = stat.S_IFREG
574 lnkkind = stat.S_IFLNK
574 lnkkind = stat.S_IFLNK
575 join = self._join
575 join = self._join
576 work = []
576 work = []
577 wadd = work.append
577 wadd = work.append
578
578
579 exact = skipstep3 = False
579 exact = skipstep3 = False
580 if matchfn == match.exact: # match.exact
580 if matchfn == match.exact: # match.exact
581 exact = True
581 exact = True
582 dirignore = util.always # skip step 2
582 dirignore = util.always # skip step 2
583 elif match.files() and not match.anypats(): # match.match, no patterns
583 elif match.files() and not match.anypats(): # match.match, no patterns
584 skipstep3 = True
584 skipstep3 = True
585
585
586 if not exact and self._checkcase:
586 if not exact and self._checkcase:
587 normalize = self._normalize
587 normalize = self._normalize
588 skipstep3 = False
588 skipstep3 = False
589 else:
589 else:
590 normalize = None
590 normalize = None
591
591
592 files = sorted(match.files())
592 files = sorted(match.files())
593 subrepos.sort()
593 subrepos.sort()
594 i, j = 0, 0
594 i, j = 0, 0
595 while i < len(files) and j < len(subrepos):
595 while i < len(files) and j < len(subrepos):
596 subpath = subrepos[j] + "/"
596 subpath = subrepos[j] + "/"
597 if files[i] < subpath:
597 if files[i] < subpath:
598 i += 1
598 i += 1
599 continue
599 continue
600 while i < len(files) and files[i].startswith(subpath):
600 while i < len(files) and files[i].startswith(subpath):
601 del files[i]
601 del files[i]
602 j += 1
602 j += 1
603
603
604 if not files or '.' in files:
604 if not files or '.' in files:
605 files = ['']
605 files = ['']
606 results = dict.fromkeys(subrepos)
606 results = dict.fromkeys(subrepos)
607 results['.hg'] = None
607 results['.hg'] = None
608
608
609 # step 1: find all explicit files
609 # step 1: find all explicit files
610 for ff in files:
610 for ff in files:
611 if normalize:
611 if normalize:
612 nf = normalize(normpath(ff), False, True)
612 nf = normalize(normpath(ff), False, True)
613 else:
613 else:
614 nf = normpath(ff)
614 nf = normpath(ff)
615 if nf in results:
615 if nf in results:
616 continue
616 continue
617
617
618 try:
618 try:
619 st = lstat(join(nf))
619 st = lstat(join(nf))
620 kind = getkind(st.st_mode)
620 kind = getkind(st.st_mode)
621 if kind == dirkind:
621 if kind == dirkind:
622 skipstep3 = False
622 skipstep3 = False
623 if nf in dmap:
623 if nf in dmap:
624 #file deleted on disk but still in dirstate
624 #file deleted on disk but still in dirstate
625 results[nf] = None
625 results[nf] = None
626 matchedir(nf)
626 if matchedir:
627 matchedir(nf)
627 if not dirignore(nf):
628 if not dirignore(nf):
628 wadd(nf)
629 wadd(nf)
629 elif kind == regkind or kind == lnkkind:
630 elif kind == regkind or kind == lnkkind:
630 results[nf] = st
631 results[nf] = st
631 else:
632 else:
632 badfn(ff, badtype(kind))
633 badfn(ff, badtype(kind))
633 if nf in dmap:
634 if nf in dmap:
634 results[nf] = None
635 results[nf] = None
635 except OSError, inst:
636 except OSError, inst:
636 if nf in dmap: # does it exactly match a file?
637 if nf in dmap: # does it exactly match a file?
637 results[nf] = None
638 results[nf] = None
638 else: # does it match a directory?
639 else: # does it match a directory?
639 prefix = nf + "/"
640 prefix = nf + "/"
640 for fn in dmap:
641 for fn in dmap:
641 if fn.startswith(prefix):
642 if fn.startswith(prefix):
642 matchedir(nf)
643 if matchedir:
644 matchedir(nf)
643 skipstep3 = False
645 skipstep3 = False
644 break
646 break
645 else:
647 else:
646 badfn(ff, inst.strerror)
648 badfn(ff, inst.strerror)
647
649
648 # step 2: visit subdirectories
650 # step 2: visit subdirectories
649 while work:
651 while work:
650 nd = work.pop()
652 nd = work.pop()
651 skip = None
653 skip = None
652 if nd == '.':
654 if nd == '.':
653 nd = ''
655 nd = ''
654 else:
656 else:
655 skip = '.hg'
657 skip = '.hg'
656 try:
658 try:
657 entries = listdir(join(nd), stat=True, skip=skip)
659 entries = listdir(join(nd), stat=True, skip=skip)
658 except OSError, inst:
660 except OSError, inst:
659 if inst.errno in (errno.EACCES, errno.ENOENT):
661 if inst.errno in (errno.EACCES, errno.ENOENT):
660 fwarn(nd, inst.strerror)
662 fwarn(nd, inst.strerror)
661 continue
663 continue
662 raise
664 raise
663 for f, kind, st in entries:
665 for f, kind, st in entries:
664 if normalize:
666 if normalize:
665 nf = normalize(nd and (nd + "/" + f) or f, True, True)
667 nf = normalize(nd and (nd + "/" + f) or f, True, True)
666 else:
668 else:
667 nf = nd and (nd + "/" + f) or f
669 nf = nd and (nd + "/" + f) or f
668 if nf not in results:
670 if nf not in results:
669 if kind == dirkind:
671 if kind == dirkind:
670 if not ignore(nf):
672 if not ignore(nf):
671 matchtdir(nf)
673 if matchtdir:
674 matchtdir(nf)
672 wadd(nf)
675 wadd(nf)
673 if nf in dmap and (matchalways or matchfn(nf)):
676 if nf in dmap and (matchalways or matchfn(nf)):
674 results[nf] = None
677 results[nf] = None
675 elif kind == regkind or kind == lnkkind:
678 elif kind == regkind or kind == lnkkind:
676 if nf in dmap:
679 if nf in dmap:
677 if matchalways or matchfn(nf):
680 if matchalways or matchfn(nf):
678 results[nf] = st
681 results[nf] = st
679 elif (matchalways or matchfn(nf)) and not ignore(nf):
682 elif (matchalways or matchfn(nf)) and not ignore(nf):
680 results[nf] = st
683 results[nf] = st
681 elif nf in dmap and (matchalways or matchfn(nf)):
684 elif nf in dmap and (matchalways or matchfn(nf)):
682 results[nf] = None
685 results[nf] = None
683
686
684 for s in subrepos:
687 for s in subrepos:
685 del results[s]
688 del results[s]
686 del results['.hg']
689 del results['.hg']
687
690
688 # step 3: report unseen items in the dmap hash
691 # step 3: report unseen items in the dmap hash
689 if not skipstep3 and not exact:
692 if not skipstep3 and not exact:
690 if not results and matchalways:
693 if not results and matchalways:
691 visit = dmap.keys()
694 visit = dmap.keys()
692 else:
695 else:
693 visit = [f for f in dmap if f not in results and matchfn(f)]
696 visit = [f for f in dmap if f not in results and matchfn(f)]
694 visit.sort()
697 visit.sort()
695
698
696 if unknown:
699 if unknown:
697 # unknown == True means we walked the full directory tree above.
700 # unknown == True means we walked the full directory tree above.
698 # So if a file is not seen it was either a) not matching matchfn
701 # So if a file is not seen it was either a) not matching matchfn
699 # b) ignored, c) missing, or d) under a symlink directory.
702 # b) ignored, c) missing, or d) under a symlink directory.
700 audit_path = scmutil.pathauditor(self._root)
703 audit_path = scmutil.pathauditor(self._root)
701
704
702 for nf in iter(visit):
705 for nf in iter(visit):
703 # Report ignored items in the dmap as long as they are not
706 # Report ignored items in the dmap as long as they are not
704 # under a symlink directory.
707 # under a symlink directory.
705 if audit_path.check(nf):
708 if audit_path.check(nf):
706 try:
709 try:
707 results[nf] = lstat(join(nf))
710 results[nf] = lstat(join(nf))
708 except OSError:
711 except OSError:
709 # file doesn't exist
712 # file doesn't exist
710 results[nf] = None
713 results[nf] = None
711 else:
714 else:
712 # It's either missing or under a symlink directory
715 # It's either missing or under a symlink directory
713 results[nf] = None
716 results[nf] = None
714 else:
717 else:
715 # We may not have walked the full directory tree above,
718 # We may not have walked the full directory tree above,
716 # so stat everything we missed.
719 # so stat everything we missed.
717 nf = iter(visit).next
720 nf = iter(visit).next
718 for st in util.statfiles([join(i) for i in visit]):
721 for st in util.statfiles([join(i) for i in visit]):
719 results[nf()] = st
722 results[nf()] = st
720 return results
723 return results
721
724
722 def status(self, match, subrepos, ignored, clean, unknown):
725 def status(self, match, subrepos, ignored, clean, unknown):
723 '''Determine the status of the working copy relative to the
726 '''Determine the status of the working copy relative to the
724 dirstate and return a tuple of lists (unsure, modified, added,
727 dirstate and return a tuple of lists (unsure, modified, added,
725 removed, deleted, unknown, ignored, clean), where:
728 removed, deleted, unknown, ignored, clean), where:
726
729
727 unsure:
730 unsure:
728 files that might have been modified since the dirstate was
731 files that might have been modified since the dirstate was
729 written, but need to be read to be sure (size is the same
732 written, but need to be read to be sure (size is the same
730 but mtime differs)
733 but mtime differs)
731 modified:
734 modified:
732 files that have definitely been modified since the dirstate
735 files that have definitely been modified since the dirstate
733 was written (different size or mode)
736 was written (different size or mode)
734 added:
737 added:
735 files that have been explicitly added with hg add
738 files that have been explicitly added with hg add
736 removed:
739 removed:
737 files that have been explicitly removed with hg remove
740 files that have been explicitly removed with hg remove
738 deleted:
741 deleted:
739 files that have been deleted through other means ("missing")
742 files that have been deleted through other means ("missing")
740 unknown:
743 unknown:
741 files not in the dirstate that are not ignored
744 files not in the dirstate that are not ignored
742 ignored:
745 ignored:
743 files not in the dirstate that are ignored
746 files not in the dirstate that are ignored
744 (by _dirignore())
747 (by _dirignore())
745 clean:
748 clean:
746 files that have definitely not been modified since the
749 files that have definitely not been modified since the
747 dirstate was written
750 dirstate was written
748 '''
751 '''
749 listignored, listclean, listunknown = ignored, clean, unknown
752 listignored, listclean, listunknown = ignored, clean, unknown
750 lookup, modified, added, unknown, ignored = [], [], [], [], []
753 lookup, modified, added, unknown, ignored = [], [], [], [], []
751 removed, deleted, clean = [], [], []
754 removed, deleted, clean = [], [], []
752
755
753 dmap = self._map
756 dmap = self._map
754 ladd = lookup.append # aka "unsure"
757 ladd = lookup.append # aka "unsure"
755 madd = modified.append
758 madd = modified.append
756 aadd = added.append
759 aadd = added.append
757 uadd = unknown.append
760 uadd = unknown.append
758 iadd = ignored.append
761 iadd = ignored.append
759 radd = removed.append
762 radd = removed.append
760 dadd = deleted.append
763 dadd = deleted.append
761 cadd = clean.append
764 cadd = clean.append
762 mexact = match.exact
765 mexact = match.exact
763 dirignore = self._dirignore
766 dirignore = self._dirignore
764 checkexec = self._checkexec
767 checkexec = self._checkexec
765 checklink = self._checklink
768 checklink = self._checklink
766 copymap = self._copymap
769 copymap = self._copymap
767 lastnormaltime = self._lastnormaltime
770 lastnormaltime = self._lastnormaltime
768
771
769 lnkkind = stat.S_IFLNK
772 lnkkind = stat.S_IFLNK
770
773
771 for fn, st in self.walk(match, subrepos, listunknown,
774 for fn, st in self.walk(match, subrepos, listunknown,
772 listignored).iteritems():
775 listignored).iteritems():
773 if fn not in dmap:
776 if fn not in dmap:
774 if (listignored or mexact(fn)) and dirignore(fn):
777 if (listignored or mexact(fn)) and dirignore(fn):
775 if listignored:
778 if listignored:
776 iadd(fn)
779 iadd(fn)
777 elif listunknown:
780 elif listunknown:
778 uadd(fn)
781 uadd(fn)
779 continue
782 continue
780
783
781 state, mode, size, time = dmap[fn]
784 state, mode, size, time = dmap[fn]
782
785
783 if not st and state in "nma":
786 if not st and state in "nma":
784 dadd(fn)
787 dadd(fn)
785 elif state == 'n':
788 elif state == 'n':
786 # The "mode & lnkkind != lnkkind or self._checklink"
789 # The "mode & lnkkind != lnkkind or self._checklink"
787 # lines are an expansion of "islink => checklink"
790 # lines are an expansion of "islink => checklink"
788 # where islink means "is this a link?" and checklink
791 # where islink means "is this a link?" and checklink
789 # means "can we check links?".
792 # means "can we check links?".
790 mtime = int(st.st_mtime)
793 mtime = int(st.st_mtime)
791 if (size >= 0 and
794 if (size >= 0 and
792 ((size != st.st_size and size != st.st_size & _rangemask)
795 ((size != st.st_size and size != st.st_size & _rangemask)
793 or ((mode ^ st.st_mode) & 0100 and checkexec))
796 or ((mode ^ st.st_mode) & 0100 and checkexec))
794 and (mode & lnkkind != lnkkind or checklink)
797 and (mode & lnkkind != lnkkind or checklink)
795 or size == -2 # other parent
798 or size == -2 # other parent
796 or fn in copymap):
799 or fn in copymap):
797 madd(fn)
800 madd(fn)
798 elif ((time != mtime and time != mtime & _rangemask)
801 elif ((time != mtime and time != mtime & _rangemask)
799 and (mode & lnkkind != lnkkind or checklink)):
802 and (mode & lnkkind != lnkkind or checklink)):
800 ladd(fn)
803 ladd(fn)
801 elif mtime == lastnormaltime:
804 elif mtime == lastnormaltime:
802 # fn may have been changed in the same timeslot without
805 # fn may have been changed in the same timeslot without
803 # changing its size. This can happen if we quickly do
806 # changing its size. This can happen if we quickly do
804 # multiple commits in a single transaction.
807 # multiple commits in a single transaction.
805 # Force lookup, so we don't miss such a racy file change.
808 # Force lookup, so we don't miss such a racy file change.
806 ladd(fn)
809 ladd(fn)
807 elif listclean:
810 elif listclean:
808 cadd(fn)
811 cadd(fn)
809 elif state == 'm':
812 elif state == 'm':
810 madd(fn)
813 madd(fn)
811 elif state == 'a':
814 elif state == 'a':
812 aadd(fn)
815 aadd(fn)
813 elif state == 'r':
816 elif state == 'r':
814 radd(fn)
817 radd(fn)
815
818
816 return (lookup, modified, added, removed, deleted, unknown, ignored,
819 return (lookup, modified, added, removed, deleted, unknown, ignored,
817 clean)
820 clean)
@@ -1,356 +1,354 b''
1 # match.py - filename matching
1 # match.py - filename matching
2 #
2 #
3 # Copyright 2008, 2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2008, 2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import re
8 import re
9 import scmutil, util, fileset
9 import scmutil, util, fileset
10 from i18n import _
10 from i18n import _
11
11
12 def _rematcher(pat):
12 def _rematcher(pat):
13 m = util.compilere(pat)
13 m = util.compilere(pat)
14 try:
14 try:
15 # slightly faster, provided by facebook's re2 bindings
15 # slightly faster, provided by facebook's re2 bindings
16 return m.test_match
16 return m.test_match
17 except AttributeError:
17 except AttributeError:
18 return m.match
18 return m.match
19
19
20 def _expandsets(pats, ctx):
20 def _expandsets(pats, ctx):
21 '''convert set: patterns into a list of files in the given context'''
21 '''convert set: patterns into a list of files in the given context'''
22 fset = set()
22 fset = set()
23 other = []
23 other = []
24
24
25 for kind, expr in pats:
25 for kind, expr in pats:
26 if kind == 'set':
26 if kind == 'set':
27 if not ctx:
27 if not ctx:
28 raise util.Abort("fileset expression with no context")
28 raise util.Abort("fileset expression with no context")
29 s = fileset.getfileset(ctx, expr)
29 s = fileset.getfileset(ctx, expr)
30 fset.update(s)
30 fset.update(s)
31 continue
31 continue
32 other.append((kind, expr))
32 other.append((kind, expr))
33 return fset, other
33 return fset, other
34
34
35 class match(object):
35 class match(object):
36 def __init__(self, root, cwd, patterns, include=[], exclude=[],
36 def __init__(self, root, cwd, patterns, include=[], exclude=[],
37 default='glob', exact=False, auditor=None, ctx=None):
37 default='glob', exact=False, auditor=None, ctx=None):
38 """build an object to match a set of file patterns
38 """build an object to match a set of file patterns
39
39
40 arguments:
40 arguments:
41 root - the canonical root of the tree you're matching against
41 root - the canonical root of the tree you're matching against
42 cwd - the current working directory, if relevant
42 cwd - the current working directory, if relevant
43 patterns - patterns to find
43 patterns - patterns to find
44 include - patterns to include
44 include - patterns to include
45 exclude - patterns to exclude
45 exclude - patterns to exclude
46 default - if a pattern in names has no explicit type, assume this one
46 default - if a pattern in names has no explicit type, assume this one
47 exact - patterns are actually literals
47 exact - patterns are actually literals
48
48
49 a pattern is one of:
49 a pattern is one of:
50 'glob:<glob>' - a glob relative to cwd
50 'glob:<glob>' - a glob relative to cwd
51 're:<regexp>' - a regular expression
51 're:<regexp>' - a regular expression
52 'path:<path>' - a path relative to repository root
52 'path:<path>' - a path relative to repository root
53 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
53 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
54 'relpath:<path>' - a path relative to cwd
54 'relpath:<path>' - a path relative to cwd
55 'relre:<regexp>' - a regexp that needn't match the start of a name
55 'relre:<regexp>' - a regexp that needn't match the start of a name
56 'set:<fileset>' - a fileset expression
56 'set:<fileset>' - a fileset expression
57 '<something>' - a pattern of the specified default type
57 '<something>' - a pattern of the specified default type
58 """
58 """
59
59
60 self._root = root
60 self._root = root
61 self._cwd = cwd
61 self._cwd = cwd
62 self._files = []
62 self._files = []
63 self._anypats = bool(include or exclude)
63 self._anypats = bool(include or exclude)
64 self._ctx = ctx
64 self._ctx = ctx
65 self._always = False
65 self._always = False
66
66
67 if include:
67 if include:
68 pats = _normalize(include, 'glob', root, cwd, auditor)
68 pats = _normalize(include, 'glob', root, cwd, auditor)
69 self.includepat, im = _buildmatch(ctx, pats, '(?:/|$)')
69 self.includepat, im = _buildmatch(ctx, pats, '(?:/|$)')
70 if exclude:
70 if exclude:
71 pats = _normalize(exclude, 'glob', root, cwd, auditor)
71 pats = _normalize(exclude, 'glob', root, cwd, auditor)
72 self.excludepat, em = _buildmatch(ctx, pats, '(?:/|$)')
72 self.excludepat, em = _buildmatch(ctx, pats, '(?:/|$)')
73 if exact:
73 if exact:
74 if isinstance(patterns, list):
74 if isinstance(patterns, list):
75 self._files = patterns
75 self._files = patterns
76 else:
76 else:
77 self._files = list(patterns)
77 self._files = list(patterns)
78 pm = self.exact
78 pm = self.exact
79 elif patterns:
79 elif patterns:
80 pats = _normalize(patterns, default, root, cwd, auditor)
80 pats = _normalize(patterns, default, root, cwd, auditor)
81 self._files = _roots(pats)
81 self._files = _roots(pats)
82 self._anypats = self._anypats or _anypats(pats)
82 self._anypats = self._anypats or _anypats(pats)
83 self.patternspat, pm = _buildmatch(ctx, pats, '$')
83 self.patternspat, pm = _buildmatch(ctx, pats, '$')
84
84
85 if patterns or exact:
85 if patterns or exact:
86 if include:
86 if include:
87 if exclude:
87 if exclude:
88 m = lambda f: im(f) and not em(f) and pm(f)
88 m = lambda f: im(f) and not em(f) and pm(f)
89 else:
89 else:
90 m = lambda f: im(f) and pm(f)
90 m = lambda f: im(f) and pm(f)
91 else:
91 else:
92 if exclude:
92 if exclude:
93 m = lambda f: not em(f) and pm(f)
93 m = lambda f: not em(f) and pm(f)
94 else:
94 else:
95 m = pm
95 m = pm
96 else:
96 else:
97 if include:
97 if include:
98 if exclude:
98 if exclude:
99 m = lambda f: im(f) and not em(f)
99 m = lambda f: im(f) and not em(f)
100 else:
100 else:
101 m = im
101 m = im
102 else:
102 else:
103 if exclude:
103 if exclude:
104 m = lambda f: not em(f)
104 m = lambda f: not em(f)
105 else:
105 else:
106 m = lambda f: True
106 m = lambda f: True
107 self._always = True
107 self._always = True
108
108
109 self.matchfn = m
109 self.matchfn = m
110 self._fmap = set(self._files)
110 self._fmap = set(self._files)
111
111
112 def __call__(self, fn):
112 def __call__(self, fn):
113 return self.matchfn(fn)
113 return self.matchfn(fn)
114 def __iter__(self):
114 def __iter__(self):
115 for f in self._files:
115 for f in self._files:
116 yield f
116 yield f
117 def bad(self, f, msg):
117 def bad(self, f, msg):
118 '''callback for each explicit file that can't be
118 '''callback for each explicit file that can't be
119 found/accessed, with an error message
119 found/accessed, with an error message
120 '''
120 '''
121 pass
121 pass
122 def explicitdir(self, f):
122 explicitdir = None
123 pass
123 traversedir = None
124 def traversedir(self, f):
125 pass
126 def missing(self, f):
124 def missing(self, f):
127 pass
125 pass
128 def exact(self, f):
126 def exact(self, f):
129 return f in self._fmap
127 return f in self._fmap
130 def rel(self, f):
128 def rel(self, f):
131 return util.pathto(self._root, self._cwd, f)
129 return util.pathto(self._root, self._cwd, f)
132 def files(self):
130 def files(self):
133 return self._files
131 return self._files
134 def anypats(self):
132 def anypats(self):
135 return self._anypats
133 return self._anypats
136 def always(self):
134 def always(self):
137 return self._always
135 return self._always
138
136
139 class exact(match):
137 class exact(match):
140 def __init__(self, root, cwd, files):
138 def __init__(self, root, cwd, files):
141 match.__init__(self, root, cwd, files, exact = True)
139 match.__init__(self, root, cwd, files, exact = True)
142
140
143 class always(match):
141 class always(match):
144 def __init__(self, root, cwd):
142 def __init__(self, root, cwd):
145 match.__init__(self, root, cwd, [])
143 match.__init__(self, root, cwd, [])
146 self._always = True
144 self._always = True
147
145
148 class narrowmatcher(match):
146 class narrowmatcher(match):
149 """Adapt a matcher to work on a subdirectory only.
147 """Adapt a matcher to work on a subdirectory only.
150
148
151 The paths are remapped to remove/insert the path as needed:
149 The paths are remapped to remove/insert the path as needed:
152
150
153 >>> m1 = match('root', '', ['a.txt', 'sub/b.txt'])
151 >>> m1 = match('root', '', ['a.txt', 'sub/b.txt'])
154 >>> m2 = narrowmatcher('sub', m1)
152 >>> m2 = narrowmatcher('sub', m1)
155 >>> bool(m2('a.txt'))
153 >>> bool(m2('a.txt'))
156 False
154 False
157 >>> bool(m2('b.txt'))
155 >>> bool(m2('b.txt'))
158 True
156 True
159 >>> bool(m2.matchfn('a.txt'))
157 >>> bool(m2.matchfn('a.txt'))
160 False
158 False
161 >>> bool(m2.matchfn('b.txt'))
159 >>> bool(m2.matchfn('b.txt'))
162 True
160 True
163 >>> m2.files()
161 >>> m2.files()
164 ['b.txt']
162 ['b.txt']
165 >>> m2.exact('b.txt')
163 >>> m2.exact('b.txt')
166 True
164 True
167 >>> m2.rel('b.txt')
165 >>> m2.rel('b.txt')
168 'b.txt'
166 'b.txt'
169 >>> def bad(f, msg):
167 >>> def bad(f, msg):
170 ... print "%s: %s" % (f, msg)
168 ... print "%s: %s" % (f, msg)
171 >>> m1.bad = bad
169 >>> m1.bad = bad
172 >>> m2.bad('x.txt', 'No such file')
170 >>> m2.bad('x.txt', 'No such file')
173 sub/x.txt: No such file
171 sub/x.txt: No such file
174 """
172 """
175
173
176 def __init__(self, path, matcher):
174 def __init__(self, path, matcher):
177 self._root = matcher._root
175 self._root = matcher._root
178 self._cwd = matcher._cwd
176 self._cwd = matcher._cwd
179 self._path = path
177 self._path = path
180 self._matcher = matcher
178 self._matcher = matcher
181 self._always = matcher._always
179 self._always = matcher._always
182
180
183 self._files = [f[len(path) + 1:] for f in matcher._files
181 self._files = [f[len(path) + 1:] for f in matcher._files
184 if f.startswith(path + "/")]
182 if f.startswith(path + "/")]
185 self._anypats = matcher._anypats
183 self._anypats = matcher._anypats
186 self.matchfn = lambda fn: matcher.matchfn(self._path + "/" + fn)
184 self.matchfn = lambda fn: matcher.matchfn(self._path + "/" + fn)
187 self._fmap = set(self._files)
185 self._fmap = set(self._files)
188
186
189 def bad(self, f, msg):
187 def bad(self, f, msg):
190 self._matcher.bad(self._path + "/" + f, msg)
188 self._matcher.bad(self._path + "/" + f, msg)
191
189
192 def patkind(pat):
190 def patkind(pat):
193 return _patsplit(pat, None)[0]
191 return _patsplit(pat, None)[0]
194
192
195 def _patsplit(pat, default):
193 def _patsplit(pat, default):
196 """Split a string into an optional pattern kind prefix and the
194 """Split a string into an optional pattern kind prefix and the
197 actual pattern."""
195 actual pattern."""
198 if ':' in pat:
196 if ':' in pat:
199 kind, val = pat.split(':', 1)
197 kind, val = pat.split(':', 1)
200 if kind in ('re', 'glob', 'path', 'relglob', 'relpath', 'relre',
198 if kind in ('re', 'glob', 'path', 'relglob', 'relpath', 'relre',
201 'listfile', 'listfile0', 'set'):
199 'listfile', 'listfile0', 'set'):
202 return kind, val
200 return kind, val
203 return default, pat
201 return default, pat
204
202
205 def _globre(pat):
203 def _globre(pat):
206 "convert a glob pattern into a regexp"
204 "convert a glob pattern into a regexp"
207 i, n = 0, len(pat)
205 i, n = 0, len(pat)
208 res = ''
206 res = ''
209 group = 0
207 group = 0
210 escape = re.escape
208 escape = re.escape
211 def peek():
209 def peek():
212 return i < n and pat[i]
210 return i < n and pat[i]
213 while i < n:
211 while i < n:
214 c = pat[i]
212 c = pat[i]
215 i += 1
213 i += 1
216 if c not in '*?[{},\\':
214 if c not in '*?[{},\\':
217 res += escape(c)
215 res += escape(c)
218 elif c == '*':
216 elif c == '*':
219 if peek() == '*':
217 if peek() == '*':
220 i += 1
218 i += 1
221 res += '.*'
219 res += '.*'
222 else:
220 else:
223 res += '[^/]*'
221 res += '[^/]*'
224 elif c == '?':
222 elif c == '?':
225 res += '.'
223 res += '.'
226 elif c == '[':
224 elif c == '[':
227 j = i
225 j = i
228 if j < n and pat[j] in '!]':
226 if j < n and pat[j] in '!]':
229 j += 1
227 j += 1
230 while j < n and pat[j] != ']':
228 while j < n and pat[j] != ']':
231 j += 1
229 j += 1
232 if j >= n:
230 if j >= n:
233 res += '\\['
231 res += '\\['
234 else:
232 else:
235 stuff = pat[i:j].replace('\\','\\\\')
233 stuff = pat[i:j].replace('\\','\\\\')
236 i = j + 1
234 i = j + 1
237 if stuff[0] == '!':
235 if stuff[0] == '!':
238 stuff = '^' + stuff[1:]
236 stuff = '^' + stuff[1:]
239 elif stuff[0] == '^':
237 elif stuff[0] == '^':
240 stuff = '\\' + stuff
238 stuff = '\\' + stuff
241 res = '%s[%s]' % (res, stuff)
239 res = '%s[%s]' % (res, stuff)
242 elif c == '{':
240 elif c == '{':
243 group += 1
241 group += 1
244 res += '(?:'
242 res += '(?:'
245 elif c == '}' and group:
243 elif c == '}' and group:
246 res += ')'
244 res += ')'
247 group -= 1
245 group -= 1
248 elif c == ',' and group:
246 elif c == ',' and group:
249 res += '|'
247 res += '|'
250 elif c == '\\':
248 elif c == '\\':
251 p = peek()
249 p = peek()
252 if p:
250 if p:
253 i += 1
251 i += 1
254 res += escape(p)
252 res += escape(p)
255 else:
253 else:
256 res += escape(c)
254 res += escape(c)
257 else:
255 else:
258 res += escape(c)
256 res += escape(c)
259 return res
257 return res
260
258
261 def _regex(kind, name, tail):
259 def _regex(kind, name, tail):
262 '''convert a pattern into a regular expression'''
260 '''convert a pattern into a regular expression'''
263 if not name:
261 if not name:
264 return ''
262 return ''
265 if kind == 're':
263 if kind == 're':
266 return name
264 return name
267 elif kind == 'path':
265 elif kind == 'path':
268 return '^' + re.escape(name) + '(?:/|$)'
266 return '^' + re.escape(name) + '(?:/|$)'
269 elif kind == 'relglob':
267 elif kind == 'relglob':
270 return '(?:|.*/)' + _globre(name) + tail
268 return '(?:|.*/)' + _globre(name) + tail
271 elif kind == 'relpath':
269 elif kind == 'relpath':
272 return re.escape(name) + '(?:/|$)'
270 return re.escape(name) + '(?:/|$)'
273 elif kind == 'relre':
271 elif kind == 'relre':
274 if name.startswith('^'):
272 if name.startswith('^'):
275 return name
273 return name
276 return '.*' + name
274 return '.*' + name
277 return _globre(name) + tail
275 return _globre(name) + tail
278
276
279 def _buildmatch(ctx, pats, tail):
277 def _buildmatch(ctx, pats, tail):
280 fset, pats = _expandsets(pats, ctx)
278 fset, pats = _expandsets(pats, ctx)
281 if not pats:
279 if not pats:
282 return "", fset.__contains__
280 return "", fset.__contains__
283
281
284 pat, mf = _buildregexmatch(pats, tail)
282 pat, mf = _buildregexmatch(pats, tail)
285 if fset:
283 if fset:
286 return pat, lambda f: f in fset or mf(f)
284 return pat, lambda f: f in fset or mf(f)
287 return pat, mf
285 return pat, mf
288
286
289 def _buildregexmatch(pats, tail):
287 def _buildregexmatch(pats, tail):
290 """build a matching function from a set of patterns"""
288 """build a matching function from a set of patterns"""
291 try:
289 try:
292 pat = '(?:%s)' % '|'.join([_regex(k, p, tail) for (k, p) in pats])
290 pat = '(?:%s)' % '|'.join([_regex(k, p, tail) for (k, p) in pats])
293 if len(pat) > 20000:
291 if len(pat) > 20000:
294 raise OverflowError
292 raise OverflowError
295 return pat, _rematcher(pat)
293 return pat, _rematcher(pat)
296 except OverflowError:
294 except OverflowError:
297 # We're using a Python with a tiny regex engine and we
295 # We're using a Python with a tiny regex engine and we
298 # made it explode, so we'll divide the pattern list in two
296 # made it explode, so we'll divide the pattern list in two
299 # until it works
297 # until it works
300 l = len(pats)
298 l = len(pats)
301 if l < 2:
299 if l < 2:
302 raise
300 raise
303 pata, a = _buildregexmatch(pats[:l//2], tail)
301 pata, a = _buildregexmatch(pats[:l//2], tail)
304 patb, b = _buildregexmatch(pats[l//2:], tail)
302 patb, b = _buildregexmatch(pats[l//2:], tail)
305 return pat, lambda s: a(s) or b(s)
303 return pat, lambda s: a(s) or b(s)
306 except re.error:
304 except re.error:
307 for k, p in pats:
305 for k, p in pats:
308 try:
306 try:
309 _rematcher('(?:%s)' % _regex(k, p, tail))
307 _rematcher('(?:%s)' % _regex(k, p, tail))
310 except re.error:
308 except re.error:
311 raise util.Abort(_("invalid pattern (%s): %s") % (k, p))
309 raise util.Abort(_("invalid pattern (%s): %s") % (k, p))
312 raise util.Abort(_("invalid pattern"))
310 raise util.Abort(_("invalid pattern"))
313
311
314 def _normalize(names, default, root, cwd, auditor):
312 def _normalize(names, default, root, cwd, auditor):
315 pats = []
313 pats = []
316 for kind, name in [_patsplit(p, default) for p in names]:
314 for kind, name in [_patsplit(p, default) for p in names]:
317 if kind in ('glob', 'relpath'):
315 if kind in ('glob', 'relpath'):
318 name = scmutil.canonpath(root, cwd, name, auditor)
316 name = scmutil.canonpath(root, cwd, name, auditor)
319 elif kind in ('relglob', 'path'):
317 elif kind in ('relglob', 'path'):
320 name = util.normpath(name)
318 name = util.normpath(name)
321 elif kind in ('listfile', 'listfile0'):
319 elif kind in ('listfile', 'listfile0'):
322 try:
320 try:
323 files = util.readfile(name)
321 files = util.readfile(name)
324 if kind == 'listfile0':
322 if kind == 'listfile0':
325 files = files.split('\0')
323 files = files.split('\0')
326 else:
324 else:
327 files = files.splitlines()
325 files = files.splitlines()
328 files = [f for f in files if f]
326 files = [f for f in files if f]
329 except EnvironmentError:
327 except EnvironmentError:
330 raise util.Abort(_("unable to read file list (%s)") % name)
328 raise util.Abort(_("unable to read file list (%s)") % name)
331 pats += _normalize(files, default, root, cwd, auditor)
329 pats += _normalize(files, default, root, cwd, auditor)
332 continue
330 continue
333
331
334 pats.append((kind, name))
332 pats.append((kind, name))
335 return pats
333 return pats
336
334
337 def _roots(patterns):
335 def _roots(patterns):
338 r = []
336 r = []
339 for kind, name in patterns:
337 for kind, name in patterns:
340 if kind == 'glob': # find the non-glob prefix
338 if kind == 'glob': # find the non-glob prefix
341 root = []
339 root = []
342 for p in name.split('/'):
340 for p in name.split('/'):
343 if '[' in p or '{' in p or '*' in p or '?' in p:
341 if '[' in p or '{' in p or '*' in p or '?' in p:
344 break
342 break
345 root.append(p)
343 root.append(p)
346 r.append('/'.join(root) or '.')
344 r.append('/'.join(root) or '.')
347 elif kind in ('relpath', 'path'):
345 elif kind in ('relpath', 'path'):
348 r.append(name or '.')
346 r.append(name or '.')
349 else: # relglob, re, relre
347 else: # relglob, re, relre
350 r.append('.')
348 r.append('.')
351 return r
349 return r
352
350
353 def _anypats(patterns):
351 def _anypats(patterns):
354 for kind, name in patterns:
352 for kind, name in patterns:
355 if kind in ('glob', 're', 'relglob', 'relre', 'set'):
353 if kind in ('glob', 're', 'relglob', 'relre', 'set'):
356 return True
354 return True
General Comments 0
You need to be logged in to leave comments. Login now