##// END OF EJS Templates
interfaces: introduce an interface for dirstate implementations...
Augie Fackler -
r43197:d459cd8e default
parent child Browse files
Show More
@@ -0,0 +1,257 b''
1 from __future__ import absolute_import, print_function
2
3 import contextlib
4
5 from .. import (
6 node as nodemod,
7 )
8
9 from . import (
10 util as interfaceutil,
11 )
12
13 class idirstate(interfaceutil.Interface):
14
15 def __init__(opener, ui, root, validate, sparsematchfn):
16 '''Create a new dirstate object.
17
18 opener is an open()-like callable that can be used to open the
19 dirstate file; root is the root of the directory tracked by
20 the dirstate.
21 '''
22
23 @contextlib.contextmanager
24 def parentchange():
25 '''Context manager for handling dirstate parents.
26
27 If an exception occurs in the scope of the context manager,
28 the incoherent dirstate won't be written when wlock is
29 released.
30 '''
31
32 def pendingparentchange():
33 '''Returns true if the dirstate is in the middle of a set of changes
34 that modify the dirstate parent.
35 '''
36
37 _map = interfaceutil.Attribute(
38 """Return the dirstate contents (see documentation for dirstatemap).
39
40 TODO this should not be exposed.
41 """
42 )
43
44 def hasdir(d):
45 pass
46
47 _ignore = interfaceutil.Attribute('Matcher for ignored files.')
48
49 _checklink = interfaceutil.Attribute('Callable for checking symlinks.')
50 _checkexec = interfaceutil.Attribute('Callable for checking exec bits.')
51
52 def flagfunc(buildfallback):
53 pass
54
55 def getcwd():
56 '''Return the path from which a canonical path is calculated.
57
58 This path should be used to resolve file patterns or to convert
59 canonical paths back to file paths for display. It shouldn't be
60 used to get real file paths. Use vfs functions instead.
61 '''
62
63 def pathto(f, cwd=None):
64 pass
65
66 def __getitem__(key):
67 '''Return the current state of key (a filename) in the dirstate.
68
69 States are:
70 n normal
71 m needs merging
72 r marked for removal
73 a marked for addition
74 ? not tracked
75 '''
76
77 def __contains__(key):
78 """Check if bytestring `key` is known to the dirstate."""
79
80 def __iter__():
81 """Iterate the dirstate's contained filenames as bytestrings."""
82
83 def items():
84 """Iterate the dirstate's entries as (filename, dirstatetuple).
85
86 As usual, filename is a bytestring.
87 """
88
89 iteritems = items
90
91 def parents():
92 pass
93
94 def p1():
95 pass
96
97 def p2():
98 pass
99
100 def branch():
101 pass
102
103 def setparents(p1, p2=nodemod.nullid):
104 """Set dirstate parents to p1 and p2.
105
106 When moving from two parents to one, 'm' merged entries a
107 adjusted to normal and previous copy records discarded and
108 returned by the call.
109
110 See localrepo.setparents()
111 """
112
113 def setbranch(branch):
114 pass
115
116 def invalidate():
117 '''Causes the next access to reread the dirstate.
118
119 This is different from localrepo.invalidatedirstate() because it always
120 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
121 check whether the dirstate has changed before rereading it.'''
122
123 def copy(source, dest):
124 """Mark dest as a copy of source. Unmark dest if source is None."""
125
126 def copied(file):
127 pass
128
129 def copies():
130 pass
131
132 def normal(f, parentfiledata=None):
133 '''Mark a file normal and clean.
134
135 parentfiledata: (mode, size, mtime) of the clean file
136
137 parentfiledata should be computed from memory (for mode,
138 size), as or close as possible from the point where we
139 determined the file was clean, to limit the risk of the
140 file having been changed by an external process between the
141 moment where the file was determined to be clean and now.'''
142 pass
143
144 def normallookup(f):
145 '''Mark a file normal, but possibly dirty.'''
146
147 def otherparent(f):
148 '''Mark as coming from the other parent, always dirty.'''
149
150 def add(f):
151 '''Mark a file added.'''
152
153 def remove(f):
154 '''Mark a file removed.'''
155
156 def merge(f):
157 '''Mark a file merged.'''
158
159 def drop(f):
160 '''Drop a file from the dirstate'''
161
162 def normalize(path, isknown=False, ignoremissing=False):
163 '''
164 normalize the case of a pathname when on a casefolding filesystem
165
166 isknown specifies whether the filename came from walking the
167 disk, to avoid extra filesystem access.
168
169 If ignoremissing is True, missing path are returned
170 unchanged. Otherwise, we try harder to normalize possibly
171 existing path components.
172
173 The normalized case is determined based on the following precedence:
174
175 - version of name already stored in the dirstate
176 - version of name stored on disk
177 - version provided via command arguments
178 '''
179
180 def clear():
181 pass
182
183 def rebuild(parent, allfiles, changedfiles=None):
184 pass
185
186 def identity():
187 '''Return identity of dirstate it to detect changing in storage
188
189 If identity of previous dirstate is equal to this, writing
190 changes based on the former dirstate out can keep consistency.
191 '''
192
193 def write(tr):
194 pass
195
196 def addparentchangecallback(category, callback):
197 """add a callback to be called when the wd parents are changed
198
199 Callback will be called with the following arguments:
200 dirstate, (oldp1, oldp2), (newp1, newp2)
201
202 Category is a unique identifier to allow overwriting an old callback
203 with a newer callback.
204 """
205
206 def _ignorefiles():
207 """Return a list of files containing patterns to ignore.
208
209 TODO this should not be exposed."""
210
211 def _ignorefileandline(f):
212 """Given a file `f`, return the ignore file and line that ignores it.
213
214 TODO this should not be exposed."""
215
216 def walk(match, subrepos, unknown, ignored, full=True):
217 '''
218 Walk recursively through the directory tree, finding all files
219 matched by match.
220
221 If full is False, maybe skip some known-clean files.
222
223 Return a dict mapping filename to stat-like object (either
224 mercurial.osutil.stat instance or return value of os.stat()).
225
226 '''
227
228 def status(match, subrepos, ignored, clean, unknown):
229 '''Determine the status of the working copy relative to the
230 dirstate and return a pair of (unsure, status), where status is of type
231 scmutil.status and:
232
233 unsure:
234 files that might have been modified since the dirstate was
235 written, but need to be read to be sure (size is the same
236 but mtime differs)
237 status.modified:
238 files that have definitely been modified since the dirstate
239 was written (different size or mode)
240 status.clean:
241 files that have definitely not been modified since the
242 dirstate was written
243 '''
244
245 def matches(match):
246 '''
247 return files in the dirstate (in whatever state) filtered by match
248 '''
249
250 def savebackup(tr, backupname):
251 '''Save current dirstate into backup file'''
252
253 def restorebackup(tr, backupname):
254 '''Restore dirstate by backup file'''
255
256 def clearbackup(tr, backupname):
257 '''Clear backup file'''
@@ -1,1708 +1,1714 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import contextlib
11 import contextlib
12 import errno
12 import errno
13 import os
13 import os
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import nullid
17 from .node import nullid
18 from . import (
18 from . import (
19 encoding,
19 encoding,
20 error,
20 error,
21 match as matchmod,
21 match as matchmod,
22 pathutil,
22 pathutil,
23 policy,
23 policy,
24 pycompat,
24 pycompat,
25 scmutil,
25 scmutil,
26 txnutil,
26 txnutil,
27 util,
27 util,
28 )
28 )
29
29
30 from .interfaces import (
31 dirstate as intdirstate,
32 util as interfaceutil,
33 )
34
30 parsers = policy.importmod(r'parsers')
35 parsers = policy.importmod(r'parsers')
31 rustmod = policy.importrust(r'dirstate')
36 rustmod = policy.importrust(r'dirstate')
32
37
33 propertycache = util.propertycache
38 propertycache = util.propertycache
34 filecache = scmutil.filecache
39 filecache = scmutil.filecache
35 _rangemask = 0x7fffffff
40 _rangemask = 0x7fffffff
36
41
37 dirstatetuple = parsers.dirstatetuple
42 dirstatetuple = parsers.dirstatetuple
38
43
39 class repocache(filecache):
44 class repocache(filecache):
40 """filecache for files in .hg/"""
45 """filecache for files in .hg/"""
41 def join(self, obj, fname):
46 def join(self, obj, fname):
42 return obj._opener.join(fname)
47 return obj._opener.join(fname)
43
48
44 class rootcache(filecache):
49 class rootcache(filecache):
45 """filecache for files in the repository root"""
50 """filecache for files in the repository root"""
46 def join(self, obj, fname):
51 def join(self, obj, fname):
47 return obj._join(fname)
52 return obj._join(fname)
48
53
49 def _getfsnow(vfs):
54 def _getfsnow(vfs):
50 '''Get "now" timestamp on filesystem'''
55 '''Get "now" timestamp on filesystem'''
51 tmpfd, tmpname = vfs.mkstemp()
56 tmpfd, tmpname = vfs.mkstemp()
52 try:
57 try:
53 return os.fstat(tmpfd)[stat.ST_MTIME]
58 return os.fstat(tmpfd)[stat.ST_MTIME]
54 finally:
59 finally:
55 os.close(tmpfd)
60 os.close(tmpfd)
56 vfs.unlink(tmpname)
61 vfs.unlink(tmpname)
57
62
63 @interfaceutil.implementer(intdirstate.idirstate)
58 class dirstate(object):
64 class dirstate(object):
59
65
60 def __init__(self, opener, ui, root, validate, sparsematchfn):
66 def __init__(self, opener, ui, root, validate, sparsematchfn):
61 '''Create a new dirstate object.
67 '''Create a new dirstate object.
62
68
63 opener is an open()-like callable that can be used to open the
69 opener is an open()-like callable that can be used to open the
64 dirstate file; root is the root of the directory tracked by
70 dirstate file; root is the root of the directory tracked by
65 the dirstate.
71 the dirstate.
66 '''
72 '''
67 self._opener = opener
73 self._opener = opener
68 self._validate = validate
74 self._validate = validate
69 self._root = root
75 self._root = root
70 self._sparsematchfn = sparsematchfn
76 self._sparsematchfn = sparsematchfn
71 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
77 # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is
72 # UNC path pointing to root share (issue4557)
78 # UNC path pointing to root share (issue4557)
73 self._rootdir = pathutil.normasprefix(root)
79 self._rootdir = pathutil.normasprefix(root)
74 self._dirty = False
80 self._dirty = False
75 self._lastnormaltime = 0
81 self._lastnormaltime = 0
76 self._ui = ui
82 self._ui = ui
77 self._filecache = {}
83 self._filecache = {}
78 self._parentwriters = 0
84 self._parentwriters = 0
79 self._filename = 'dirstate'
85 self._filename = 'dirstate'
80 self._pendingfilename = '%s.pending' % self._filename
86 self._pendingfilename = '%s.pending' % self._filename
81 self._plchangecallbacks = {}
87 self._plchangecallbacks = {}
82 self._origpl = None
88 self._origpl = None
83 self._updatedfiles = set()
89 self._updatedfiles = set()
84 self._mapcls = dirstatemap
90 self._mapcls = dirstatemap
85 # Access and cache cwd early, so we don't access it for the first time
91 # Access and cache cwd early, so we don't access it for the first time
86 # after a working-copy update caused it to not exist (accessing it then
92 # after a working-copy update caused it to not exist (accessing it then
87 # raises an exception).
93 # raises an exception).
88 self._cwd
94 self._cwd
89
95
90 @contextlib.contextmanager
96 @contextlib.contextmanager
91 def parentchange(self):
97 def parentchange(self):
92 '''Context manager for handling dirstate parents.
98 '''Context manager for handling dirstate parents.
93
99
94 If an exception occurs in the scope of the context manager,
100 If an exception occurs in the scope of the context manager,
95 the incoherent dirstate won't be written when wlock is
101 the incoherent dirstate won't be written when wlock is
96 released.
102 released.
97 '''
103 '''
98 self._parentwriters += 1
104 self._parentwriters += 1
99 yield
105 yield
100 # Typically we want the "undo" step of a context manager in a
106 # Typically we want the "undo" step of a context manager in a
101 # finally block so it happens even when an exception
107 # finally block so it happens even when an exception
102 # occurs. In this case, however, we only want to decrement
108 # occurs. In this case, however, we only want to decrement
103 # parentwriters if the code in the with statement exits
109 # parentwriters if the code in the with statement exits
104 # normally, so we don't have a try/finally here on purpose.
110 # normally, so we don't have a try/finally here on purpose.
105 self._parentwriters -= 1
111 self._parentwriters -= 1
106
112
107 def pendingparentchange(self):
113 def pendingparentchange(self):
108 '''Returns true if the dirstate is in the middle of a set of changes
114 '''Returns true if the dirstate is in the middle of a set of changes
109 that modify the dirstate parent.
115 that modify the dirstate parent.
110 '''
116 '''
111 return self._parentwriters > 0
117 return self._parentwriters > 0
112
118
113 @propertycache
119 @propertycache
114 def _map(self):
120 def _map(self):
115 """Return the dirstate contents (see documentation for dirstatemap)."""
121 """Return the dirstate contents (see documentation for dirstatemap)."""
116 self._map = self._mapcls(self._ui, self._opener, self._root)
122 self._map = self._mapcls(self._ui, self._opener, self._root)
117 return self._map
123 return self._map
118
124
119 @property
125 @property
120 def _sparsematcher(self):
126 def _sparsematcher(self):
121 """The matcher for the sparse checkout.
127 """The matcher for the sparse checkout.
122
128
123 The working directory may not include every file from a manifest. The
129 The working directory may not include every file from a manifest. The
124 matcher obtained by this property will match a path if it is to be
130 matcher obtained by this property will match a path if it is to be
125 included in the working directory.
131 included in the working directory.
126 """
132 """
127 # TODO there is potential to cache this property. For now, the matcher
133 # TODO there is potential to cache this property. For now, the matcher
128 # is resolved on every access. (But the called function does use a
134 # is resolved on every access. (But the called function does use a
129 # cache to keep the lookup fast.)
135 # cache to keep the lookup fast.)
130 return self._sparsematchfn()
136 return self._sparsematchfn()
131
137
132 @repocache('branch')
138 @repocache('branch')
133 def _branch(self):
139 def _branch(self):
134 try:
140 try:
135 return self._opener.read("branch").strip() or "default"
141 return self._opener.read("branch").strip() or "default"
136 except IOError as inst:
142 except IOError as inst:
137 if inst.errno != errno.ENOENT:
143 if inst.errno != errno.ENOENT:
138 raise
144 raise
139 return "default"
145 return "default"
140
146
141 @property
147 @property
142 def _pl(self):
148 def _pl(self):
143 return self._map.parents()
149 return self._map.parents()
144
150
145 def hasdir(self, d):
151 def hasdir(self, d):
146 return self._map.hastrackeddir(d)
152 return self._map.hastrackeddir(d)
147
153
148 @rootcache('.hgignore')
154 @rootcache('.hgignore')
149 def _ignore(self):
155 def _ignore(self):
150 files = self._ignorefiles()
156 files = self._ignorefiles()
151 if not files:
157 if not files:
152 return matchmod.never()
158 return matchmod.never()
153
159
154 pats = ['include:%s' % f for f in files]
160 pats = ['include:%s' % f for f in files]
155 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
161 return matchmod.match(self._root, '', [], pats, warn=self._ui.warn)
156
162
157 @propertycache
163 @propertycache
158 def _slash(self):
164 def _slash(self):
159 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
165 return self._ui.configbool('ui', 'slash') and pycompat.ossep != '/'
160
166
161 @propertycache
167 @propertycache
162 def _checklink(self):
168 def _checklink(self):
163 return util.checklink(self._root)
169 return util.checklink(self._root)
164
170
165 @propertycache
171 @propertycache
166 def _checkexec(self):
172 def _checkexec(self):
167 return util.checkexec(self._root)
173 return util.checkexec(self._root)
168
174
169 @propertycache
175 @propertycache
170 def _checkcase(self):
176 def _checkcase(self):
171 return not util.fscasesensitive(self._join('.hg'))
177 return not util.fscasesensitive(self._join('.hg'))
172
178
173 def _join(self, f):
179 def _join(self, f):
174 # much faster than os.path.join()
180 # much faster than os.path.join()
175 # it's safe because f is always a relative path
181 # it's safe because f is always a relative path
176 return self._rootdir + f
182 return self._rootdir + f
177
183
178 def flagfunc(self, buildfallback):
184 def flagfunc(self, buildfallback):
179 if self._checklink and self._checkexec:
185 if self._checklink and self._checkexec:
180 def f(x):
186 def f(x):
181 try:
187 try:
182 st = os.lstat(self._join(x))
188 st = os.lstat(self._join(x))
183 if util.statislink(st):
189 if util.statislink(st):
184 return 'l'
190 return 'l'
185 if util.statisexec(st):
191 if util.statisexec(st):
186 return 'x'
192 return 'x'
187 except OSError:
193 except OSError:
188 pass
194 pass
189 return ''
195 return ''
190 return f
196 return f
191
197
192 fallback = buildfallback()
198 fallback = buildfallback()
193 if self._checklink:
199 if self._checklink:
194 def f(x):
200 def f(x):
195 if os.path.islink(self._join(x)):
201 if os.path.islink(self._join(x)):
196 return 'l'
202 return 'l'
197 if 'x' in fallback(x):
203 if 'x' in fallback(x):
198 return 'x'
204 return 'x'
199 return ''
205 return ''
200 return f
206 return f
201 if self._checkexec:
207 if self._checkexec:
202 def f(x):
208 def f(x):
203 if 'l' in fallback(x):
209 if 'l' in fallback(x):
204 return 'l'
210 return 'l'
205 if util.isexec(self._join(x)):
211 if util.isexec(self._join(x)):
206 return 'x'
212 return 'x'
207 return ''
213 return ''
208 return f
214 return f
209 else:
215 else:
210 return fallback
216 return fallback
211
217
212 @propertycache
218 @propertycache
213 def _cwd(self):
219 def _cwd(self):
214 # internal config: ui.forcecwd
220 # internal config: ui.forcecwd
215 forcecwd = self._ui.config('ui', 'forcecwd')
221 forcecwd = self._ui.config('ui', 'forcecwd')
216 if forcecwd:
222 if forcecwd:
217 return forcecwd
223 return forcecwd
218 return encoding.getcwd()
224 return encoding.getcwd()
219
225
220 def getcwd(self):
226 def getcwd(self):
221 '''Return the path from which a canonical path is calculated.
227 '''Return the path from which a canonical path is calculated.
222
228
223 This path should be used to resolve file patterns or to convert
229 This path should be used to resolve file patterns or to convert
224 canonical paths back to file paths for display. It shouldn't be
230 canonical paths back to file paths for display. It shouldn't be
225 used to get real file paths. Use vfs functions instead.
231 used to get real file paths. Use vfs functions instead.
226 '''
232 '''
227 cwd = self._cwd
233 cwd = self._cwd
228 if cwd == self._root:
234 if cwd == self._root:
229 return ''
235 return ''
230 # self._root ends with a path separator if self._root is '/' or 'C:\'
236 # self._root ends with a path separator if self._root is '/' or 'C:\'
231 rootsep = self._root
237 rootsep = self._root
232 if not util.endswithsep(rootsep):
238 if not util.endswithsep(rootsep):
233 rootsep += pycompat.ossep
239 rootsep += pycompat.ossep
234 if cwd.startswith(rootsep):
240 if cwd.startswith(rootsep):
235 return cwd[len(rootsep):]
241 return cwd[len(rootsep):]
236 else:
242 else:
237 # we're outside the repo. return an absolute path.
243 # we're outside the repo. return an absolute path.
238 return cwd
244 return cwd
239
245
240 def pathto(self, f, cwd=None):
246 def pathto(self, f, cwd=None):
241 if cwd is None:
247 if cwd is None:
242 cwd = self.getcwd()
248 cwd = self.getcwd()
243 path = util.pathto(self._root, cwd, f)
249 path = util.pathto(self._root, cwd, f)
244 if self._slash:
250 if self._slash:
245 return util.pconvert(path)
251 return util.pconvert(path)
246 return path
252 return path
247
253
248 def __getitem__(self, key):
254 def __getitem__(self, key):
249 '''Return the current state of key (a filename) in the dirstate.
255 '''Return the current state of key (a filename) in the dirstate.
250
256
251 States are:
257 States are:
252 n normal
258 n normal
253 m needs merging
259 m needs merging
254 r marked for removal
260 r marked for removal
255 a marked for addition
261 a marked for addition
256 ? not tracked
262 ? not tracked
257 '''
263 '''
258 return self._map.get(key, ("?",))[0]
264 return self._map.get(key, ("?",))[0]
259
265
260 def __contains__(self, key):
266 def __contains__(self, key):
261 return key in self._map
267 return key in self._map
262
268
263 def __iter__(self):
269 def __iter__(self):
264 return iter(sorted(self._map))
270 return iter(sorted(self._map))
265
271
266 def items(self):
272 def items(self):
267 return self._map.iteritems()
273 return self._map.iteritems()
268
274
269 iteritems = items
275 iteritems = items
270
276
271 def parents(self):
277 def parents(self):
272 return [self._validate(p) for p in self._pl]
278 return [self._validate(p) for p in self._pl]
273
279
274 def p1(self):
280 def p1(self):
275 return self._validate(self._pl[0])
281 return self._validate(self._pl[0])
276
282
277 def p2(self):
283 def p2(self):
278 return self._validate(self._pl[1])
284 return self._validate(self._pl[1])
279
285
280 def branch(self):
286 def branch(self):
281 return encoding.tolocal(self._branch)
287 return encoding.tolocal(self._branch)
282
288
283 def setparents(self, p1, p2=nullid):
289 def setparents(self, p1, p2=nullid):
284 """Set dirstate parents to p1 and p2.
290 """Set dirstate parents to p1 and p2.
285
291
286 When moving from two parents to one, 'm' merged entries a
292 When moving from two parents to one, 'm' merged entries a
287 adjusted to normal and previous copy records discarded and
293 adjusted to normal and previous copy records discarded and
288 returned by the call.
294 returned by the call.
289
295
290 See localrepo.setparents()
296 See localrepo.setparents()
291 """
297 """
292 if self._parentwriters == 0:
298 if self._parentwriters == 0:
293 raise ValueError("cannot set dirstate parent outside of "
299 raise ValueError("cannot set dirstate parent outside of "
294 "dirstate.parentchange context manager")
300 "dirstate.parentchange context manager")
295
301
296 self._dirty = True
302 self._dirty = True
297 oldp2 = self._pl[1]
303 oldp2 = self._pl[1]
298 if self._origpl is None:
304 if self._origpl is None:
299 self._origpl = self._pl
305 self._origpl = self._pl
300 self._map.setparents(p1, p2)
306 self._map.setparents(p1, p2)
301 copies = {}
307 copies = {}
302 if oldp2 != nullid and p2 == nullid:
308 if oldp2 != nullid and p2 == nullid:
303 candidatefiles = self._map.nonnormalset.union(
309 candidatefiles = self._map.nonnormalset.union(
304 self._map.otherparentset)
310 self._map.otherparentset)
305 for f in candidatefiles:
311 for f in candidatefiles:
306 s = self._map.get(f)
312 s = self._map.get(f)
307 if s is None:
313 if s is None:
308 continue
314 continue
309
315
310 # Discard 'm' markers when moving away from a merge state
316 # Discard 'm' markers when moving away from a merge state
311 if s[0] == 'm':
317 if s[0] == 'm':
312 source = self._map.copymap.get(f)
318 source = self._map.copymap.get(f)
313 if source:
319 if source:
314 copies[f] = source
320 copies[f] = source
315 self.normallookup(f)
321 self.normallookup(f)
316 # Also fix up otherparent markers
322 # Also fix up otherparent markers
317 elif s[0] == 'n' and s[2] == -2:
323 elif s[0] == 'n' and s[2] == -2:
318 source = self._map.copymap.get(f)
324 source = self._map.copymap.get(f)
319 if source:
325 if source:
320 copies[f] = source
326 copies[f] = source
321 self.add(f)
327 self.add(f)
322 return copies
328 return copies
323
329
324 def setbranch(self, branch):
330 def setbranch(self, branch):
325 self.__class__._branch.set(self, encoding.fromlocal(branch))
331 self.__class__._branch.set(self, encoding.fromlocal(branch))
326 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
332 f = self._opener('branch', 'w', atomictemp=True, checkambig=True)
327 try:
333 try:
328 f.write(self._branch + '\n')
334 f.write(self._branch + '\n')
329 f.close()
335 f.close()
330
336
331 # make sure filecache has the correct stat info for _branch after
337 # make sure filecache has the correct stat info for _branch after
332 # replacing the underlying file
338 # replacing the underlying file
333 ce = self._filecache['_branch']
339 ce = self._filecache['_branch']
334 if ce:
340 if ce:
335 ce.refresh()
341 ce.refresh()
336 except: # re-raises
342 except: # re-raises
337 f.discard()
343 f.discard()
338 raise
344 raise
339
345
340 def invalidate(self):
346 def invalidate(self):
341 '''Causes the next access to reread the dirstate.
347 '''Causes the next access to reread the dirstate.
342
348
343 This is different from localrepo.invalidatedirstate() because it always
349 This is different from localrepo.invalidatedirstate() because it always
344 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
350 rereads the dirstate. Use localrepo.invalidatedirstate() if you want to
345 check whether the dirstate has changed before rereading it.'''
351 check whether the dirstate has changed before rereading it.'''
346
352
347 for a in (r"_map", r"_branch", r"_ignore"):
353 for a in (r"_map", r"_branch", r"_ignore"):
348 if a in self.__dict__:
354 if a in self.__dict__:
349 delattr(self, a)
355 delattr(self, a)
350 self._lastnormaltime = 0
356 self._lastnormaltime = 0
351 self._dirty = False
357 self._dirty = False
352 self._updatedfiles.clear()
358 self._updatedfiles.clear()
353 self._parentwriters = 0
359 self._parentwriters = 0
354 self._origpl = None
360 self._origpl = None
355
361
356 def copy(self, source, dest):
362 def copy(self, source, dest):
357 """Mark dest as a copy of source. Unmark dest if source is None."""
363 """Mark dest as a copy of source. Unmark dest if source is None."""
358 if source == dest:
364 if source == dest:
359 return
365 return
360 self._dirty = True
366 self._dirty = True
361 if source is not None:
367 if source is not None:
362 self._map.copymap[dest] = source
368 self._map.copymap[dest] = source
363 self._updatedfiles.add(source)
369 self._updatedfiles.add(source)
364 self._updatedfiles.add(dest)
370 self._updatedfiles.add(dest)
365 elif self._map.copymap.pop(dest, None):
371 elif self._map.copymap.pop(dest, None):
366 self._updatedfiles.add(dest)
372 self._updatedfiles.add(dest)
367
373
368 def copied(self, file):
374 def copied(self, file):
369 return self._map.copymap.get(file, None)
375 return self._map.copymap.get(file, None)
370
376
371 def copies(self):
377 def copies(self):
372 return self._map.copymap
378 return self._map.copymap
373
379
374 def _addpath(self, f, state, mode, size, mtime):
380 def _addpath(self, f, state, mode, size, mtime):
375 oldstate = self[f]
381 oldstate = self[f]
376 if state == 'a' or oldstate == 'r':
382 if state == 'a' or oldstate == 'r':
377 scmutil.checkfilename(f)
383 scmutil.checkfilename(f)
378 if self._map.hastrackeddir(f):
384 if self._map.hastrackeddir(f):
379 raise error.Abort(_('directory %r already in dirstate') %
385 raise error.Abort(_('directory %r already in dirstate') %
380 pycompat.bytestr(f))
386 pycompat.bytestr(f))
381 # shadows
387 # shadows
382 for d in util.finddirs(f):
388 for d in util.finddirs(f):
383 if self._map.hastrackeddir(d):
389 if self._map.hastrackeddir(d):
384 break
390 break
385 entry = self._map.get(d)
391 entry = self._map.get(d)
386 if entry is not None and entry[0] != 'r':
392 if entry is not None and entry[0] != 'r':
387 raise error.Abort(
393 raise error.Abort(
388 _('file %r in dirstate clashes with %r') %
394 _('file %r in dirstate clashes with %r') %
389 (pycompat.bytestr(d), pycompat.bytestr(f)))
395 (pycompat.bytestr(d), pycompat.bytestr(f)))
390 self._dirty = True
396 self._dirty = True
391 self._updatedfiles.add(f)
397 self._updatedfiles.add(f)
392 self._map.addfile(f, oldstate, state, mode, size, mtime)
398 self._map.addfile(f, oldstate, state, mode, size, mtime)
393
399
394 def normal(self, f, parentfiledata=None):
400 def normal(self, f, parentfiledata=None):
395 '''Mark a file normal and clean.
401 '''Mark a file normal and clean.
396
402
397 parentfiledata: (mode, size, mtime) of the clean file
403 parentfiledata: (mode, size, mtime) of the clean file
398
404
399 parentfiledata should be computed from memory (for mode,
405 parentfiledata should be computed from memory (for mode,
400 size), as or close as possible from the point where we
406 size), as or close as possible from the point where we
401 determined the file was clean, to limit the risk of the
407 determined the file was clean, to limit the risk of the
402 file having been changed by an external process between the
408 file having been changed by an external process between the
403 moment where the file was determined to be clean and now.'''
409 moment where the file was determined to be clean and now.'''
404 if parentfiledata:
410 if parentfiledata:
405 (mode, size, mtime) = parentfiledata
411 (mode, size, mtime) = parentfiledata
406 else:
412 else:
407 s = os.lstat(self._join(f))
413 s = os.lstat(self._join(f))
408 mode = s.st_mode
414 mode = s.st_mode
409 size = s.st_size
415 size = s.st_size
410 mtime = s[stat.ST_MTIME]
416 mtime = s[stat.ST_MTIME]
411 self._addpath(f, 'n', mode, size & _rangemask, mtime & _rangemask)
417 self._addpath(f, 'n', mode, size & _rangemask, mtime & _rangemask)
412 self._map.copymap.pop(f, None)
418 self._map.copymap.pop(f, None)
413 if f in self._map.nonnormalset:
419 if f in self._map.nonnormalset:
414 self._map.nonnormalset.remove(f)
420 self._map.nonnormalset.remove(f)
415 if mtime > self._lastnormaltime:
421 if mtime > self._lastnormaltime:
416 # Remember the most recent modification timeslot for status(),
422 # Remember the most recent modification timeslot for status(),
417 # to make sure we won't miss future size-preserving file content
423 # to make sure we won't miss future size-preserving file content
418 # modifications that happen within the same timeslot.
424 # modifications that happen within the same timeslot.
419 self._lastnormaltime = mtime
425 self._lastnormaltime = mtime
420
426
421 def normallookup(self, f):
427 def normallookup(self, f):
422 '''Mark a file normal, but possibly dirty.'''
428 '''Mark a file normal, but possibly dirty.'''
423 if self._pl[1] != nullid:
429 if self._pl[1] != nullid:
424 # if there is a merge going on and the file was either
430 # if there is a merge going on and the file was either
425 # in state 'm' (-1) or coming from other parent (-2) before
431 # in state 'm' (-1) or coming from other parent (-2) before
426 # being removed, restore that state.
432 # being removed, restore that state.
427 entry = self._map.get(f)
433 entry = self._map.get(f)
428 if entry is not None:
434 if entry is not None:
429 if entry[0] == 'r' and entry[2] in (-1, -2):
435 if entry[0] == 'r' and entry[2] in (-1, -2):
430 source = self._map.copymap.get(f)
436 source = self._map.copymap.get(f)
431 if entry[2] == -1:
437 if entry[2] == -1:
432 self.merge(f)
438 self.merge(f)
433 elif entry[2] == -2:
439 elif entry[2] == -2:
434 self.otherparent(f)
440 self.otherparent(f)
435 if source:
441 if source:
436 self.copy(source, f)
442 self.copy(source, f)
437 return
443 return
438 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
444 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
439 return
445 return
440 self._addpath(f, 'n', 0, -1, -1)
446 self._addpath(f, 'n', 0, -1, -1)
441 self._map.copymap.pop(f, None)
447 self._map.copymap.pop(f, None)
442
448
443 def otherparent(self, f):
449 def otherparent(self, f):
444 '''Mark as coming from the other parent, always dirty.'''
450 '''Mark as coming from the other parent, always dirty.'''
445 if self._pl[1] == nullid:
451 if self._pl[1] == nullid:
446 raise error.Abort(_("setting %r to other parent "
452 raise error.Abort(_("setting %r to other parent "
447 "only allowed in merges") % f)
453 "only allowed in merges") % f)
448 if f in self and self[f] == 'n':
454 if f in self and self[f] == 'n':
449 # merge-like
455 # merge-like
450 self._addpath(f, 'm', 0, -2, -1)
456 self._addpath(f, 'm', 0, -2, -1)
451 else:
457 else:
452 # add-like
458 # add-like
453 self._addpath(f, 'n', 0, -2, -1)
459 self._addpath(f, 'n', 0, -2, -1)
454 self._map.copymap.pop(f, None)
460 self._map.copymap.pop(f, None)
455
461
456 def add(self, f):
462 def add(self, f):
457 '''Mark a file added.'''
463 '''Mark a file added.'''
458 self._addpath(f, 'a', 0, -1, -1)
464 self._addpath(f, 'a', 0, -1, -1)
459 self._map.copymap.pop(f, None)
465 self._map.copymap.pop(f, None)
460
466
461 def remove(self, f):
467 def remove(self, f):
462 '''Mark a file removed.'''
468 '''Mark a file removed.'''
463 self._dirty = True
469 self._dirty = True
464 oldstate = self[f]
470 oldstate = self[f]
465 size = 0
471 size = 0
466 if self._pl[1] != nullid:
472 if self._pl[1] != nullid:
467 entry = self._map.get(f)
473 entry = self._map.get(f)
468 if entry is not None:
474 if entry is not None:
469 # backup the previous state
475 # backup the previous state
470 if entry[0] == 'm': # merge
476 if entry[0] == 'm': # merge
471 size = -1
477 size = -1
472 elif entry[0] == 'n' and entry[2] == -2: # other parent
478 elif entry[0] == 'n' and entry[2] == -2: # other parent
473 size = -2
479 size = -2
474 self._map.otherparentset.add(f)
480 self._map.otherparentset.add(f)
475 self._updatedfiles.add(f)
481 self._updatedfiles.add(f)
476 self._map.removefile(f, oldstate, size)
482 self._map.removefile(f, oldstate, size)
477 if size == 0:
483 if size == 0:
478 self._map.copymap.pop(f, None)
484 self._map.copymap.pop(f, None)
479
485
480 def merge(self, f):
486 def merge(self, f):
481 '''Mark a file merged.'''
487 '''Mark a file merged.'''
482 if self._pl[1] == nullid:
488 if self._pl[1] == nullid:
483 return self.normallookup(f)
489 return self.normallookup(f)
484 return self.otherparent(f)
490 return self.otherparent(f)
485
491
486 def drop(self, f):
492 def drop(self, f):
487 '''Drop a file from the dirstate'''
493 '''Drop a file from the dirstate'''
488 oldstate = self[f]
494 oldstate = self[f]
489 if self._map.dropfile(f, oldstate):
495 if self._map.dropfile(f, oldstate):
490 self._dirty = True
496 self._dirty = True
491 self._updatedfiles.add(f)
497 self._updatedfiles.add(f)
492 self._map.copymap.pop(f, None)
498 self._map.copymap.pop(f, None)
493
499
494 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
500 def _discoverpath(self, path, normed, ignoremissing, exists, storemap):
495 if exists is None:
501 if exists is None:
496 exists = os.path.lexists(os.path.join(self._root, path))
502 exists = os.path.lexists(os.path.join(self._root, path))
497 if not exists:
503 if not exists:
498 # Maybe a path component exists
504 # Maybe a path component exists
499 if not ignoremissing and '/' in path:
505 if not ignoremissing and '/' in path:
500 d, f = path.rsplit('/', 1)
506 d, f = path.rsplit('/', 1)
501 d = self._normalize(d, False, ignoremissing, None)
507 d = self._normalize(d, False, ignoremissing, None)
502 folded = d + "/" + f
508 folded = d + "/" + f
503 else:
509 else:
504 # No path components, preserve original case
510 # No path components, preserve original case
505 folded = path
511 folded = path
506 else:
512 else:
507 # recursively normalize leading directory components
513 # recursively normalize leading directory components
508 # against dirstate
514 # against dirstate
509 if '/' in normed:
515 if '/' in normed:
510 d, f = normed.rsplit('/', 1)
516 d, f = normed.rsplit('/', 1)
511 d = self._normalize(d, False, ignoremissing, True)
517 d = self._normalize(d, False, ignoremissing, True)
512 r = self._root + "/" + d
518 r = self._root + "/" + d
513 folded = d + "/" + util.fspath(f, r)
519 folded = d + "/" + util.fspath(f, r)
514 else:
520 else:
515 folded = util.fspath(normed, self._root)
521 folded = util.fspath(normed, self._root)
516 storemap[normed] = folded
522 storemap[normed] = folded
517
523
518 return folded
524 return folded
519
525
520 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
526 def _normalizefile(self, path, isknown, ignoremissing=False, exists=None):
521 normed = util.normcase(path)
527 normed = util.normcase(path)
522 folded = self._map.filefoldmap.get(normed, None)
528 folded = self._map.filefoldmap.get(normed, None)
523 if folded is None:
529 if folded is None:
524 if isknown:
530 if isknown:
525 folded = path
531 folded = path
526 else:
532 else:
527 folded = self._discoverpath(path, normed, ignoremissing, exists,
533 folded = self._discoverpath(path, normed, ignoremissing, exists,
528 self._map.filefoldmap)
534 self._map.filefoldmap)
529 return folded
535 return folded
530
536
531 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
537 def _normalize(self, path, isknown, ignoremissing=False, exists=None):
532 normed = util.normcase(path)
538 normed = util.normcase(path)
533 folded = self._map.filefoldmap.get(normed, None)
539 folded = self._map.filefoldmap.get(normed, None)
534 if folded is None:
540 if folded is None:
535 folded = self._map.dirfoldmap.get(normed, None)
541 folded = self._map.dirfoldmap.get(normed, None)
536 if folded is None:
542 if folded is None:
537 if isknown:
543 if isknown:
538 folded = path
544 folded = path
539 else:
545 else:
540 # store discovered result in dirfoldmap so that future
546 # store discovered result in dirfoldmap so that future
541 # normalizefile calls don't start matching directories
547 # normalizefile calls don't start matching directories
542 folded = self._discoverpath(path, normed, ignoremissing, exists,
548 folded = self._discoverpath(path, normed, ignoremissing, exists,
543 self._map.dirfoldmap)
549 self._map.dirfoldmap)
544 return folded
550 return folded
545
551
546 def normalize(self, path, isknown=False, ignoremissing=False):
552 def normalize(self, path, isknown=False, ignoremissing=False):
547 '''
553 '''
548 normalize the case of a pathname when on a casefolding filesystem
554 normalize the case of a pathname when on a casefolding filesystem
549
555
550 isknown specifies whether the filename came from walking the
556 isknown specifies whether the filename came from walking the
551 disk, to avoid extra filesystem access.
557 disk, to avoid extra filesystem access.
552
558
553 If ignoremissing is True, missing path are returned
559 If ignoremissing is True, missing path are returned
554 unchanged. Otherwise, we try harder to normalize possibly
560 unchanged. Otherwise, we try harder to normalize possibly
555 existing path components.
561 existing path components.
556
562
557 The normalized case is determined based on the following precedence:
563 The normalized case is determined based on the following precedence:
558
564
559 - version of name already stored in the dirstate
565 - version of name already stored in the dirstate
560 - version of name stored on disk
566 - version of name stored on disk
561 - version provided via command arguments
567 - version provided via command arguments
562 '''
568 '''
563
569
564 if self._checkcase:
570 if self._checkcase:
565 return self._normalize(path, isknown, ignoremissing)
571 return self._normalize(path, isknown, ignoremissing)
566 return path
572 return path
567
573
568 def clear(self):
574 def clear(self):
569 self._map.clear()
575 self._map.clear()
570 self._lastnormaltime = 0
576 self._lastnormaltime = 0
571 self._updatedfiles.clear()
577 self._updatedfiles.clear()
572 self._dirty = True
578 self._dirty = True
573
579
574 def rebuild(self, parent, allfiles, changedfiles=None):
580 def rebuild(self, parent, allfiles, changedfiles=None):
575 if changedfiles is None:
581 if changedfiles is None:
576 # Rebuild entire dirstate
582 # Rebuild entire dirstate
577 changedfiles = allfiles
583 changedfiles = allfiles
578 lastnormaltime = self._lastnormaltime
584 lastnormaltime = self._lastnormaltime
579 self.clear()
585 self.clear()
580 self._lastnormaltime = lastnormaltime
586 self._lastnormaltime = lastnormaltime
581
587
582 if self._origpl is None:
588 if self._origpl is None:
583 self._origpl = self._pl
589 self._origpl = self._pl
584 self._map.setparents(parent, nullid)
590 self._map.setparents(parent, nullid)
585 for f in changedfiles:
591 for f in changedfiles:
586 if f in allfiles:
592 if f in allfiles:
587 self.normallookup(f)
593 self.normallookup(f)
588 else:
594 else:
589 self.drop(f)
595 self.drop(f)
590
596
591 self._dirty = True
597 self._dirty = True
592
598
593 def identity(self):
599 def identity(self):
594 '''Return identity of dirstate itself to detect changing in storage
600 '''Return identity of dirstate itself to detect changing in storage
595
601
596 If identity of previous dirstate is equal to this, writing
602 If identity of previous dirstate is equal to this, writing
597 changes based on the former dirstate out can keep consistency.
603 changes based on the former dirstate out can keep consistency.
598 '''
604 '''
599 return self._map.identity
605 return self._map.identity
600
606
601 def write(self, tr):
607 def write(self, tr):
602 if not self._dirty:
608 if not self._dirty:
603 return
609 return
604
610
605 filename = self._filename
611 filename = self._filename
606 if tr:
612 if tr:
607 # 'dirstate.write()' is not only for writing in-memory
613 # 'dirstate.write()' is not only for writing in-memory
608 # changes out, but also for dropping ambiguous timestamp.
614 # changes out, but also for dropping ambiguous timestamp.
609 # delayed writing re-raise "ambiguous timestamp issue".
615 # delayed writing re-raise "ambiguous timestamp issue".
610 # See also the wiki page below for detail:
616 # See also the wiki page below for detail:
611 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
617 # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan
612
618
613 # emulate dropping timestamp in 'parsers.pack_dirstate'
619 # emulate dropping timestamp in 'parsers.pack_dirstate'
614 now = _getfsnow(self._opener)
620 now = _getfsnow(self._opener)
615 self._map.clearambiguoustimes(self._updatedfiles, now)
621 self._map.clearambiguoustimes(self._updatedfiles, now)
616
622
617 # emulate that all 'dirstate.normal' results are written out
623 # emulate that all 'dirstate.normal' results are written out
618 self._lastnormaltime = 0
624 self._lastnormaltime = 0
619 self._updatedfiles.clear()
625 self._updatedfiles.clear()
620
626
621 # delay writing in-memory changes out
627 # delay writing in-memory changes out
622 tr.addfilegenerator('dirstate', (self._filename,),
628 tr.addfilegenerator('dirstate', (self._filename,),
623 self._writedirstate, location='plain')
629 self._writedirstate, location='plain')
624 return
630 return
625
631
626 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
632 st = self._opener(filename, "w", atomictemp=True, checkambig=True)
627 self._writedirstate(st)
633 self._writedirstate(st)
628
634
629 def addparentchangecallback(self, category, callback):
635 def addparentchangecallback(self, category, callback):
630 """add a callback to be called when the wd parents are changed
636 """add a callback to be called when the wd parents are changed
631
637
632 Callback will be called with the following arguments:
638 Callback will be called with the following arguments:
633 dirstate, (oldp1, oldp2), (newp1, newp2)
639 dirstate, (oldp1, oldp2), (newp1, newp2)
634
640
635 Category is a unique identifier to allow overwriting an old callback
641 Category is a unique identifier to allow overwriting an old callback
636 with a newer callback.
642 with a newer callback.
637 """
643 """
638 self._plchangecallbacks[category] = callback
644 self._plchangecallbacks[category] = callback
639
645
640 def _writedirstate(self, st):
646 def _writedirstate(self, st):
641 # notify callbacks about parents change
647 # notify callbacks about parents change
642 if self._origpl is not None and self._origpl != self._pl:
648 if self._origpl is not None and self._origpl != self._pl:
643 for c, callback in sorted(self._plchangecallbacks.iteritems()):
649 for c, callback in sorted(self._plchangecallbacks.iteritems()):
644 callback(self, self._origpl, self._pl)
650 callback(self, self._origpl, self._pl)
645 self._origpl = None
651 self._origpl = None
646 # use the modification time of the newly created temporary file as the
652 # use the modification time of the newly created temporary file as the
647 # filesystem's notion of 'now'
653 # filesystem's notion of 'now'
648 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
654 now = util.fstat(st)[stat.ST_MTIME] & _rangemask
649
655
650 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
656 # enough 'delaywrite' prevents 'pack_dirstate' from dropping
651 # timestamp of each entries in dirstate, because of 'now > mtime'
657 # timestamp of each entries in dirstate, because of 'now > mtime'
652 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
658 delaywrite = self._ui.configint('debug', 'dirstate.delaywrite')
653 if delaywrite > 0:
659 if delaywrite > 0:
654 # do we have any files to delay for?
660 # do we have any files to delay for?
655 items = self._map.iteritems()
661 items = self._map.iteritems()
656 for f, e in items:
662 for f, e in items:
657 if e[0] == 'n' and e[3] == now:
663 if e[0] == 'n' and e[3] == now:
658 import time # to avoid useless import
664 import time # to avoid useless import
659 # rather than sleep n seconds, sleep until the next
665 # rather than sleep n seconds, sleep until the next
660 # multiple of n seconds
666 # multiple of n seconds
661 clock = time.time()
667 clock = time.time()
662 start = int(clock) - (int(clock) % delaywrite)
668 start = int(clock) - (int(clock) % delaywrite)
663 end = start + delaywrite
669 end = start + delaywrite
664 time.sleep(end - clock)
670 time.sleep(end - clock)
665 now = end # trust our estimate that the end is near now
671 now = end # trust our estimate that the end is near now
666 break
672 break
667 # since the iterator is potentially not deleted,
673 # since the iterator is potentially not deleted,
668 # delete the iterator to release the reference for the Rust
674 # delete the iterator to release the reference for the Rust
669 # implementation.
675 # implementation.
670 # TODO make the Rust implementation behave like Python
676 # TODO make the Rust implementation behave like Python
671 # since this would not work with a non ref-counting GC.
677 # since this would not work with a non ref-counting GC.
672 del items
678 del items
673
679
674 self._map.write(st, now)
680 self._map.write(st, now)
675 self._lastnormaltime = 0
681 self._lastnormaltime = 0
676 self._dirty = False
682 self._dirty = False
677
683
678 def _dirignore(self, f):
684 def _dirignore(self, f):
679 if self._ignore(f):
685 if self._ignore(f):
680 return True
686 return True
681 for p in util.finddirs(f):
687 for p in util.finddirs(f):
682 if self._ignore(p):
688 if self._ignore(p):
683 return True
689 return True
684 return False
690 return False
685
691
686 def _ignorefiles(self):
692 def _ignorefiles(self):
687 files = []
693 files = []
688 if os.path.exists(self._join('.hgignore')):
694 if os.path.exists(self._join('.hgignore')):
689 files.append(self._join('.hgignore'))
695 files.append(self._join('.hgignore'))
690 for name, path in self._ui.configitems("ui"):
696 for name, path in self._ui.configitems("ui"):
691 if name == 'ignore' or name.startswith('ignore.'):
697 if name == 'ignore' or name.startswith('ignore.'):
692 # we need to use os.path.join here rather than self._join
698 # we need to use os.path.join here rather than self._join
693 # because path is arbitrary and user-specified
699 # because path is arbitrary and user-specified
694 files.append(os.path.join(self._rootdir, util.expandpath(path)))
700 files.append(os.path.join(self._rootdir, util.expandpath(path)))
695 return files
701 return files
696
702
697 def _ignorefileandline(self, f):
703 def _ignorefileandline(self, f):
698 files = collections.deque(self._ignorefiles())
704 files = collections.deque(self._ignorefiles())
699 visited = set()
705 visited = set()
700 while files:
706 while files:
701 i = files.popleft()
707 i = files.popleft()
702 patterns = matchmod.readpatternfile(i, self._ui.warn,
708 patterns = matchmod.readpatternfile(i, self._ui.warn,
703 sourceinfo=True)
709 sourceinfo=True)
704 for pattern, lineno, line in patterns:
710 for pattern, lineno, line in patterns:
705 kind, p = matchmod._patsplit(pattern, 'glob')
711 kind, p = matchmod._patsplit(pattern, 'glob')
706 if kind == "subinclude":
712 if kind == "subinclude":
707 if p not in visited:
713 if p not in visited:
708 files.append(p)
714 files.append(p)
709 continue
715 continue
710 m = matchmod.match(self._root, '', [], [pattern],
716 m = matchmod.match(self._root, '', [], [pattern],
711 warn=self._ui.warn)
717 warn=self._ui.warn)
712 if m(f):
718 if m(f):
713 return (i, lineno, line)
719 return (i, lineno, line)
714 visited.add(i)
720 visited.add(i)
715 return (None, -1, "")
721 return (None, -1, "")
716
722
717 def _walkexplicit(self, match, subrepos):
723 def _walkexplicit(self, match, subrepos):
718 '''Get stat data about the files explicitly specified by match.
724 '''Get stat data about the files explicitly specified by match.
719
725
720 Return a triple (results, dirsfound, dirsnotfound).
726 Return a triple (results, dirsfound, dirsnotfound).
721 - results is a mapping from filename to stat result. It also contains
727 - results is a mapping from filename to stat result. It also contains
722 listings mapping subrepos and .hg to None.
728 listings mapping subrepos and .hg to None.
723 - dirsfound is a list of files found to be directories.
729 - dirsfound is a list of files found to be directories.
724 - dirsnotfound is a list of files that the dirstate thinks are
730 - dirsnotfound is a list of files that the dirstate thinks are
725 directories and that were not found.'''
731 directories and that were not found.'''
726
732
727 def badtype(mode):
733 def badtype(mode):
728 kind = _('unknown')
734 kind = _('unknown')
729 if stat.S_ISCHR(mode):
735 if stat.S_ISCHR(mode):
730 kind = _('character device')
736 kind = _('character device')
731 elif stat.S_ISBLK(mode):
737 elif stat.S_ISBLK(mode):
732 kind = _('block device')
738 kind = _('block device')
733 elif stat.S_ISFIFO(mode):
739 elif stat.S_ISFIFO(mode):
734 kind = _('fifo')
740 kind = _('fifo')
735 elif stat.S_ISSOCK(mode):
741 elif stat.S_ISSOCK(mode):
736 kind = _('socket')
742 kind = _('socket')
737 elif stat.S_ISDIR(mode):
743 elif stat.S_ISDIR(mode):
738 kind = _('directory')
744 kind = _('directory')
739 return _('unsupported file type (type is %s)') % kind
745 return _('unsupported file type (type is %s)') % kind
740
746
741 matchedir = match.explicitdir
747 matchedir = match.explicitdir
742 badfn = match.bad
748 badfn = match.bad
743 dmap = self._map
749 dmap = self._map
744 lstat = os.lstat
750 lstat = os.lstat
745 getkind = stat.S_IFMT
751 getkind = stat.S_IFMT
746 dirkind = stat.S_IFDIR
752 dirkind = stat.S_IFDIR
747 regkind = stat.S_IFREG
753 regkind = stat.S_IFREG
748 lnkkind = stat.S_IFLNK
754 lnkkind = stat.S_IFLNK
749 join = self._join
755 join = self._join
750 dirsfound = []
756 dirsfound = []
751 foundadd = dirsfound.append
757 foundadd = dirsfound.append
752 dirsnotfound = []
758 dirsnotfound = []
753 notfoundadd = dirsnotfound.append
759 notfoundadd = dirsnotfound.append
754
760
755 if not match.isexact() and self._checkcase:
761 if not match.isexact() and self._checkcase:
756 normalize = self._normalize
762 normalize = self._normalize
757 else:
763 else:
758 normalize = None
764 normalize = None
759
765
760 files = sorted(match.files())
766 files = sorted(match.files())
761 subrepos.sort()
767 subrepos.sort()
762 i, j = 0, 0
768 i, j = 0, 0
763 while i < len(files) and j < len(subrepos):
769 while i < len(files) and j < len(subrepos):
764 subpath = subrepos[j] + "/"
770 subpath = subrepos[j] + "/"
765 if files[i] < subpath:
771 if files[i] < subpath:
766 i += 1
772 i += 1
767 continue
773 continue
768 while i < len(files) and files[i].startswith(subpath):
774 while i < len(files) and files[i].startswith(subpath):
769 del files[i]
775 del files[i]
770 j += 1
776 j += 1
771
777
772 if not files or '' in files:
778 if not files or '' in files:
773 files = ['']
779 files = ['']
774 # constructing the foldmap is expensive, so don't do it for the
780 # constructing the foldmap is expensive, so don't do it for the
775 # common case where files is ['']
781 # common case where files is ['']
776 normalize = None
782 normalize = None
777 results = dict.fromkeys(subrepos)
783 results = dict.fromkeys(subrepos)
778 results['.hg'] = None
784 results['.hg'] = None
779
785
780 for ff in files:
786 for ff in files:
781 if normalize:
787 if normalize:
782 nf = normalize(ff, False, True)
788 nf = normalize(ff, False, True)
783 else:
789 else:
784 nf = ff
790 nf = ff
785 if nf in results:
791 if nf in results:
786 continue
792 continue
787
793
788 try:
794 try:
789 st = lstat(join(nf))
795 st = lstat(join(nf))
790 kind = getkind(st.st_mode)
796 kind = getkind(st.st_mode)
791 if kind == dirkind:
797 if kind == dirkind:
792 if nf in dmap:
798 if nf in dmap:
793 # file replaced by dir on disk but still in dirstate
799 # file replaced by dir on disk but still in dirstate
794 results[nf] = None
800 results[nf] = None
795 if matchedir:
801 if matchedir:
796 matchedir(nf)
802 matchedir(nf)
797 foundadd((nf, ff))
803 foundadd((nf, ff))
798 elif kind == regkind or kind == lnkkind:
804 elif kind == regkind or kind == lnkkind:
799 results[nf] = st
805 results[nf] = st
800 else:
806 else:
801 badfn(ff, badtype(kind))
807 badfn(ff, badtype(kind))
802 if nf in dmap:
808 if nf in dmap:
803 results[nf] = None
809 results[nf] = None
804 except OSError as inst: # nf not found on disk - it is dirstate only
810 except OSError as inst: # nf not found on disk - it is dirstate only
805 if nf in dmap: # does it exactly match a missing file?
811 if nf in dmap: # does it exactly match a missing file?
806 results[nf] = None
812 results[nf] = None
807 else: # does it match a missing directory?
813 else: # does it match a missing directory?
808 if self._map.hasdir(nf):
814 if self._map.hasdir(nf):
809 if matchedir:
815 if matchedir:
810 matchedir(nf)
816 matchedir(nf)
811 notfoundadd(nf)
817 notfoundadd(nf)
812 else:
818 else:
813 badfn(ff, encoding.strtolocal(inst.strerror))
819 badfn(ff, encoding.strtolocal(inst.strerror))
814
820
815 # match.files() may contain explicitly-specified paths that shouldn't
821 # match.files() may contain explicitly-specified paths that shouldn't
816 # be taken; drop them from the list of files found. dirsfound/notfound
822 # be taken; drop them from the list of files found. dirsfound/notfound
817 # aren't filtered here because they will be tested later.
823 # aren't filtered here because they will be tested later.
818 if match.anypats():
824 if match.anypats():
819 for f in list(results):
825 for f in list(results):
820 if f == '.hg' or f in subrepos:
826 if f == '.hg' or f in subrepos:
821 # keep sentinel to disable further out-of-repo walks
827 # keep sentinel to disable further out-of-repo walks
822 continue
828 continue
823 if not match(f):
829 if not match(f):
824 del results[f]
830 del results[f]
825
831
826 # Case insensitive filesystems cannot rely on lstat() failing to detect
832 # Case insensitive filesystems cannot rely on lstat() failing to detect
827 # a case-only rename. Prune the stat object for any file that does not
833 # a case-only rename. Prune the stat object for any file that does not
828 # match the case in the filesystem, if there are multiple files that
834 # match the case in the filesystem, if there are multiple files that
829 # normalize to the same path.
835 # normalize to the same path.
830 if match.isexact() and self._checkcase:
836 if match.isexact() and self._checkcase:
831 normed = {}
837 normed = {}
832
838
833 for f, st in results.iteritems():
839 for f, st in results.iteritems():
834 if st is None:
840 if st is None:
835 continue
841 continue
836
842
837 nc = util.normcase(f)
843 nc = util.normcase(f)
838 paths = normed.get(nc)
844 paths = normed.get(nc)
839
845
840 if paths is None:
846 if paths is None:
841 paths = set()
847 paths = set()
842 normed[nc] = paths
848 normed[nc] = paths
843
849
844 paths.add(f)
850 paths.add(f)
845
851
846 for norm, paths in normed.iteritems():
852 for norm, paths in normed.iteritems():
847 if len(paths) > 1:
853 if len(paths) > 1:
848 for path in paths:
854 for path in paths:
849 folded = self._discoverpath(path, norm, True, None,
855 folded = self._discoverpath(path, norm, True, None,
850 self._map.dirfoldmap)
856 self._map.dirfoldmap)
851 if path != folded:
857 if path != folded:
852 results[path] = None
858 results[path] = None
853
859
854 return results, dirsfound, dirsnotfound
860 return results, dirsfound, dirsnotfound
855
861
856 def walk(self, match, subrepos, unknown, ignored, full=True):
862 def walk(self, match, subrepos, unknown, ignored, full=True):
857 '''
863 '''
858 Walk recursively through the directory tree, finding all files
864 Walk recursively through the directory tree, finding all files
859 matched by match.
865 matched by match.
860
866
861 If full is False, maybe skip some known-clean files.
867 If full is False, maybe skip some known-clean files.
862
868
863 Return a dict mapping filename to stat-like object (either
869 Return a dict mapping filename to stat-like object (either
864 mercurial.osutil.stat instance or return value of os.stat()).
870 mercurial.osutil.stat instance or return value of os.stat()).
865
871
866 '''
872 '''
867 # full is a flag that extensions that hook into walk can use -- this
873 # full is a flag that extensions that hook into walk can use -- this
868 # implementation doesn't use it at all. This satisfies the contract
874 # implementation doesn't use it at all. This satisfies the contract
869 # because we only guarantee a "maybe".
875 # because we only guarantee a "maybe".
870
876
871 if ignored:
877 if ignored:
872 ignore = util.never
878 ignore = util.never
873 dirignore = util.never
879 dirignore = util.never
874 elif unknown:
880 elif unknown:
875 ignore = self._ignore
881 ignore = self._ignore
876 dirignore = self._dirignore
882 dirignore = self._dirignore
877 else:
883 else:
878 # if not unknown and not ignored, drop dir recursion and step 2
884 # if not unknown and not ignored, drop dir recursion and step 2
879 ignore = util.always
885 ignore = util.always
880 dirignore = util.always
886 dirignore = util.always
881
887
882 matchfn = match.matchfn
888 matchfn = match.matchfn
883 matchalways = match.always()
889 matchalways = match.always()
884 matchtdir = match.traversedir
890 matchtdir = match.traversedir
885 dmap = self._map
891 dmap = self._map
886 listdir = util.listdir
892 listdir = util.listdir
887 lstat = os.lstat
893 lstat = os.lstat
888 dirkind = stat.S_IFDIR
894 dirkind = stat.S_IFDIR
889 regkind = stat.S_IFREG
895 regkind = stat.S_IFREG
890 lnkkind = stat.S_IFLNK
896 lnkkind = stat.S_IFLNK
891 join = self._join
897 join = self._join
892
898
893 exact = skipstep3 = False
899 exact = skipstep3 = False
894 if match.isexact(): # match.exact
900 if match.isexact(): # match.exact
895 exact = True
901 exact = True
896 dirignore = util.always # skip step 2
902 dirignore = util.always # skip step 2
897 elif match.prefix(): # match.match, no patterns
903 elif match.prefix(): # match.match, no patterns
898 skipstep3 = True
904 skipstep3 = True
899
905
900 if not exact and self._checkcase:
906 if not exact and self._checkcase:
901 normalize = self._normalize
907 normalize = self._normalize
902 normalizefile = self._normalizefile
908 normalizefile = self._normalizefile
903 skipstep3 = False
909 skipstep3 = False
904 else:
910 else:
905 normalize = self._normalize
911 normalize = self._normalize
906 normalizefile = None
912 normalizefile = None
907
913
908 # step 1: find all explicit files
914 # step 1: find all explicit files
909 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
915 results, work, dirsnotfound = self._walkexplicit(match, subrepos)
910
916
911 skipstep3 = skipstep3 and not (work or dirsnotfound)
917 skipstep3 = skipstep3 and not (work or dirsnotfound)
912 work = [d for d in work if not dirignore(d[0])]
918 work = [d for d in work if not dirignore(d[0])]
913
919
914 # step 2: visit subdirectories
920 # step 2: visit subdirectories
915 def traverse(work, alreadynormed):
921 def traverse(work, alreadynormed):
916 wadd = work.append
922 wadd = work.append
917 while work:
923 while work:
918 nd = work.pop()
924 nd = work.pop()
919 visitentries = match.visitchildrenset(nd)
925 visitentries = match.visitchildrenset(nd)
920 if not visitentries:
926 if not visitentries:
921 continue
927 continue
922 if visitentries == 'this' or visitentries == 'all':
928 if visitentries == 'this' or visitentries == 'all':
923 visitentries = None
929 visitentries = None
924 skip = None
930 skip = None
925 if nd != '':
931 if nd != '':
926 skip = '.hg'
932 skip = '.hg'
927 try:
933 try:
928 entries = listdir(join(nd), stat=True, skip=skip)
934 entries = listdir(join(nd), stat=True, skip=skip)
929 except OSError as inst:
935 except OSError as inst:
930 if inst.errno in (errno.EACCES, errno.ENOENT):
936 if inst.errno in (errno.EACCES, errno.ENOENT):
931 match.bad(self.pathto(nd),
937 match.bad(self.pathto(nd),
932 encoding.strtolocal(inst.strerror))
938 encoding.strtolocal(inst.strerror))
933 continue
939 continue
934 raise
940 raise
935 for f, kind, st in entries:
941 for f, kind, st in entries:
936 # Some matchers may return files in the visitentries set,
942 # Some matchers may return files in the visitentries set,
937 # instead of 'this', if the matcher explicitly mentions them
943 # instead of 'this', if the matcher explicitly mentions them
938 # and is not an exactmatcher. This is acceptable; we do not
944 # and is not an exactmatcher. This is acceptable; we do not
939 # make any hard assumptions about file-or-directory below
945 # make any hard assumptions about file-or-directory below
940 # based on the presence of `f` in visitentries. If
946 # based on the presence of `f` in visitentries. If
941 # visitchildrenset returned a set, we can always skip the
947 # visitchildrenset returned a set, we can always skip the
942 # entries *not* in the set it provided regardless of whether
948 # entries *not* in the set it provided regardless of whether
943 # they're actually a file or a directory.
949 # they're actually a file or a directory.
944 if visitentries and f not in visitentries:
950 if visitentries and f not in visitentries:
945 continue
951 continue
946 if normalizefile:
952 if normalizefile:
947 # even though f might be a directory, we're only
953 # even though f might be a directory, we're only
948 # interested in comparing it to files currently in the
954 # interested in comparing it to files currently in the
949 # dmap -- therefore normalizefile is enough
955 # dmap -- therefore normalizefile is enough
950 nf = normalizefile(nd and (nd + "/" + f) or f, True,
956 nf = normalizefile(nd and (nd + "/" + f) or f, True,
951 True)
957 True)
952 else:
958 else:
953 nf = nd and (nd + "/" + f) or f
959 nf = nd and (nd + "/" + f) or f
954 if nf not in results:
960 if nf not in results:
955 if kind == dirkind:
961 if kind == dirkind:
956 if not ignore(nf):
962 if not ignore(nf):
957 if matchtdir:
963 if matchtdir:
958 matchtdir(nf)
964 matchtdir(nf)
959 wadd(nf)
965 wadd(nf)
960 if nf in dmap and (matchalways or matchfn(nf)):
966 if nf in dmap and (matchalways or matchfn(nf)):
961 results[nf] = None
967 results[nf] = None
962 elif kind == regkind or kind == lnkkind:
968 elif kind == regkind or kind == lnkkind:
963 if nf in dmap:
969 if nf in dmap:
964 if matchalways or matchfn(nf):
970 if matchalways or matchfn(nf):
965 results[nf] = st
971 results[nf] = st
966 elif ((matchalways or matchfn(nf))
972 elif ((matchalways or matchfn(nf))
967 and not ignore(nf)):
973 and not ignore(nf)):
968 # unknown file -- normalize if necessary
974 # unknown file -- normalize if necessary
969 if not alreadynormed:
975 if not alreadynormed:
970 nf = normalize(nf, False, True)
976 nf = normalize(nf, False, True)
971 results[nf] = st
977 results[nf] = st
972 elif nf in dmap and (matchalways or matchfn(nf)):
978 elif nf in dmap and (matchalways or matchfn(nf)):
973 results[nf] = None
979 results[nf] = None
974
980
975 for nd, d in work:
981 for nd, d in work:
976 # alreadynormed means that processwork doesn't have to do any
982 # alreadynormed means that processwork doesn't have to do any
977 # expensive directory normalization
983 # expensive directory normalization
978 alreadynormed = not normalize or nd == d
984 alreadynormed = not normalize or nd == d
979 traverse([d], alreadynormed)
985 traverse([d], alreadynormed)
980
986
981 for s in subrepos:
987 for s in subrepos:
982 del results[s]
988 del results[s]
983 del results['.hg']
989 del results['.hg']
984
990
985 # step 3: visit remaining files from dmap
991 # step 3: visit remaining files from dmap
986 if not skipstep3 and not exact:
992 if not skipstep3 and not exact:
987 # If a dmap file is not in results yet, it was either
993 # If a dmap file is not in results yet, it was either
988 # a) not matching matchfn b) ignored, c) missing, or d) under a
994 # a) not matching matchfn b) ignored, c) missing, or d) under a
989 # symlink directory.
995 # symlink directory.
990 if not results and matchalways:
996 if not results and matchalways:
991 visit = [f for f in dmap]
997 visit = [f for f in dmap]
992 else:
998 else:
993 visit = [f for f in dmap if f not in results and matchfn(f)]
999 visit = [f for f in dmap if f not in results and matchfn(f)]
994 visit.sort()
1000 visit.sort()
995
1001
996 if unknown:
1002 if unknown:
997 # unknown == True means we walked all dirs under the roots
1003 # unknown == True means we walked all dirs under the roots
998 # that wasn't ignored, and everything that matched was stat'ed
1004 # that wasn't ignored, and everything that matched was stat'ed
999 # and is already in results.
1005 # and is already in results.
1000 # The rest must thus be ignored or under a symlink.
1006 # The rest must thus be ignored or under a symlink.
1001 audit_path = pathutil.pathauditor(self._root, cached=True)
1007 audit_path = pathutil.pathauditor(self._root, cached=True)
1002
1008
1003 for nf in iter(visit):
1009 for nf in iter(visit):
1004 # If a stat for the same file was already added with a
1010 # If a stat for the same file was already added with a
1005 # different case, don't add one for this, since that would
1011 # different case, don't add one for this, since that would
1006 # make it appear as if the file exists under both names
1012 # make it appear as if the file exists under both names
1007 # on disk.
1013 # on disk.
1008 if (normalizefile and
1014 if (normalizefile and
1009 normalizefile(nf, True, True) in results):
1015 normalizefile(nf, True, True) in results):
1010 results[nf] = None
1016 results[nf] = None
1011 # Report ignored items in the dmap as long as they are not
1017 # Report ignored items in the dmap as long as they are not
1012 # under a symlink directory.
1018 # under a symlink directory.
1013 elif audit_path.check(nf):
1019 elif audit_path.check(nf):
1014 try:
1020 try:
1015 results[nf] = lstat(join(nf))
1021 results[nf] = lstat(join(nf))
1016 # file was just ignored, no links, and exists
1022 # file was just ignored, no links, and exists
1017 except OSError:
1023 except OSError:
1018 # file doesn't exist
1024 # file doesn't exist
1019 results[nf] = None
1025 results[nf] = None
1020 else:
1026 else:
1021 # It's either missing or under a symlink directory
1027 # It's either missing or under a symlink directory
1022 # which we in this case report as missing
1028 # which we in this case report as missing
1023 results[nf] = None
1029 results[nf] = None
1024 else:
1030 else:
1025 # We may not have walked the full directory tree above,
1031 # We may not have walked the full directory tree above,
1026 # so stat and check everything we missed.
1032 # so stat and check everything we missed.
1027 iv = iter(visit)
1033 iv = iter(visit)
1028 for st in util.statfiles([join(i) for i in visit]):
1034 for st in util.statfiles([join(i) for i in visit]):
1029 results[next(iv)] = st
1035 results[next(iv)] = st
1030 return results
1036 return results
1031
1037
1032 def status(self, match, subrepos, ignored, clean, unknown):
1038 def status(self, match, subrepos, ignored, clean, unknown):
1033 '''Determine the status of the working copy relative to the
1039 '''Determine the status of the working copy relative to the
1034 dirstate and return a pair of (unsure, status), where status is of type
1040 dirstate and return a pair of (unsure, status), where status is of type
1035 scmutil.status and:
1041 scmutil.status and:
1036
1042
1037 unsure:
1043 unsure:
1038 files that might have been modified since the dirstate was
1044 files that might have been modified since the dirstate was
1039 written, but need to be read to be sure (size is the same
1045 written, but need to be read to be sure (size is the same
1040 but mtime differs)
1046 but mtime differs)
1041 status.modified:
1047 status.modified:
1042 files that have definitely been modified since the dirstate
1048 files that have definitely been modified since the dirstate
1043 was written (different size or mode)
1049 was written (different size or mode)
1044 status.clean:
1050 status.clean:
1045 files that have definitely not been modified since the
1051 files that have definitely not been modified since the
1046 dirstate was written
1052 dirstate was written
1047 '''
1053 '''
1048 listignored, listclean, listunknown = ignored, clean, unknown
1054 listignored, listclean, listunknown = ignored, clean, unknown
1049 lookup, modified, added, unknown, ignored = [], [], [], [], []
1055 lookup, modified, added, unknown, ignored = [], [], [], [], []
1050 removed, deleted, clean = [], [], []
1056 removed, deleted, clean = [], [], []
1051
1057
1052 dmap = self._map
1058 dmap = self._map
1053 dmap.preload()
1059 dmap.preload()
1054 dcontains = dmap.__contains__
1060 dcontains = dmap.__contains__
1055 dget = dmap.__getitem__
1061 dget = dmap.__getitem__
1056 ladd = lookup.append # aka "unsure"
1062 ladd = lookup.append # aka "unsure"
1057 madd = modified.append
1063 madd = modified.append
1058 aadd = added.append
1064 aadd = added.append
1059 uadd = unknown.append
1065 uadd = unknown.append
1060 iadd = ignored.append
1066 iadd = ignored.append
1061 radd = removed.append
1067 radd = removed.append
1062 dadd = deleted.append
1068 dadd = deleted.append
1063 cadd = clean.append
1069 cadd = clean.append
1064 mexact = match.exact
1070 mexact = match.exact
1065 dirignore = self._dirignore
1071 dirignore = self._dirignore
1066 checkexec = self._checkexec
1072 checkexec = self._checkexec
1067 copymap = self._map.copymap
1073 copymap = self._map.copymap
1068 lastnormaltime = self._lastnormaltime
1074 lastnormaltime = self._lastnormaltime
1069
1075
1070 # We need to do full walks when either
1076 # We need to do full walks when either
1071 # - we're listing all clean files, or
1077 # - we're listing all clean files, or
1072 # - match.traversedir does something, because match.traversedir should
1078 # - match.traversedir does something, because match.traversedir should
1073 # be called for every dir in the working dir
1079 # be called for every dir in the working dir
1074 full = listclean or match.traversedir is not None
1080 full = listclean or match.traversedir is not None
1075 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1081 for fn, st in self.walk(match, subrepos, listunknown, listignored,
1076 full=full).iteritems():
1082 full=full).iteritems():
1077 if not dcontains(fn):
1083 if not dcontains(fn):
1078 if (listignored or mexact(fn)) and dirignore(fn):
1084 if (listignored or mexact(fn)) and dirignore(fn):
1079 if listignored:
1085 if listignored:
1080 iadd(fn)
1086 iadd(fn)
1081 else:
1087 else:
1082 uadd(fn)
1088 uadd(fn)
1083 continue
1089 continue
1084
1090
1085 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1091 # This is equivalent to 'state, mode, size, time = dmap[fn]' but not
1086 # written like that for performance reasons. dmap[fn] is not a
1092 # written like that for performance reasons. dmap[fn] is not a
1087 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1093 # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE
1088 # opcode has fast paths when the value to be unpacked is a tuple or
1094 # opcode has fast paths when the value to be unpacked is a tuple or
1089 # a list, but falls back to creating a full-fledged iterator in
1095 # a list, but falls back to creating a full-fledged iterator in
1090 # general. That is much slower than simply accessing and storing the
1096 # general. That is much slower than simply accessing and storing the
1091 # tuple members one by one.
1097 # tuple members one by one.
1092 t = dget(fn)
1098 t = dget(fn)
1093 state = t[0]
1099 state = t[0]
1094 mode = t[1]
1100 mode = t[1]
1095 size = t[2]
1101 size = t[2]
1096 time = t[3]
1102 time = t[3]
1097
1103
1098 if not st and state in "nma":
1104 if not st and state in "nma":
1099 dadd(fn)
1105 dadd(fn)
1100 elif state == 'n':
1106 elif state == 'n':
1101 if (size >= 0 and
1107 if (size >= 0 and
1102 ((size != st.st_size and size != st.st_size & _rangemask)
1108 ((size != st.st_size and size != st.st_size & _rangemask)
1103 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1109 or ((mode ^ st.st_mode) & 0o100 and checkexec))
1104 or size == -2 # other parent
1110 or size == -2 # other parent
1105 or fn in copymap):
1111 or fn in copymap):
1106 madd(fn)
1112 madd(fn)
1107 elif (time != st[stat.ST_MTIME]
1113 elif (time != st[stat.ST_MTIME]
1108 and time != st[stat.ST_MTIME] & _rangemask):
1114 and time != st[stat.ST_MTIME] & _rangemask):
1109 ladd(fn)
1115 ladd(fn)
1110 elif st[stat.ST_MTIME] == lastnormaltime:
1116 elif st[stat.ST_MTIME] == lastnormaltime:
1111 # fn may have just been marked as normal and it may have
1117 # fn may have just been marked as normal and it may have
1112 # changed in the same second without changing its size.
1118 # changed in the same second without changing its size.
1113 # This can happen if we quickly do multiple commits.
1119 # This can happen if we quickly do multiple commits.
1114 # Force lookup, so we don't miss such a racy file change.
1120 # Force lookup, so we don't miss such a racy file change.
1115 ladd(fn)
1121 ladd(fn)
1116 elif listclean:
1122 elif listclean:
1117 cadd(fn)
1123 cadd(fn)
1118 elif state == 'm':
1124 elif state == 'm':
1119 madd(fn)
1125 madd(fn)
1120 elif state == 'a':
1126 elif state == 'a':
1121 aadd(fn)
1127 aadd(fn)
1122 elif state == 'r':
1128 elif state == 'r':
1123 radd(fn)
1129 radd(fn)
1124
1130
1125 return (lookup, scmutil.status(modified, added, removed, deleted,
1131 return (lookup, scmutil.status(modified, added, removed, deleted,
1126 unknown, ignored, clean))
1132 unknown, ignored, clean))
1127
1133
1128 def matches(self, match):
1134 def matches(self, match):
1129 '''
1135 '''
1130 return files in the dirstate (in whatever state) filtered by match
1136 return files in the dirstate (in whatever state) filtered by match
1131 '''
1137 '''
1132 dmap = self._map
1138 dmap = self._map
1133 if match.always():
1139 if match.always():
1134 return dmap.keys()
1140 return dmap.keys()
1135 files = match.files()
1141 files = match.files()
1136 if match.isexact():
1142 if match.isexact():
1137 # fast path -- filter the other way around, since typically files is
1143 # fast path -- filter the other way around, since typically files is
1138 # much smaller than dmap
1144 # much smaller than dmap
1139 return [f for f in files if f in dmap]
1145 return [f for f in files if f in dmap]
1140 if match.prefix() and all(fn in dmap for fn in files):
1146 if match.prefix() and all(fn in dmap for fn in files):
1141 # fast path -- all the values are known to be files, so just return
1147 # fast path -- all the values are known to be files, so just return
1142 # that
1148 # that
1143 return list(files)
1149 return list(files)
1144 return [f for f in dmap if match(f)]
1150 return [f for f in dmap if match(f)]
1145
1151
1146 def _actualfilename(self, tr):
1152 def _actualfilename(self, tr):
1147 if tr:
1153 if tr:
1148 return self._pendingfilename
1154 return self._pendingfilename
1149 else:
1155 else:
1150 return self._filename
1156 return self._filename
1151
1157
1152 def savebackup(self, tr, backupname):
1158 def savebackup(self, tr, backupname):
1153 '''Save current dirstate into backup file'''
1159 '''Save current dirstate into backup file'''
1154 filename = self._actualfilename(tr)
1160 filename = self._actualfilename(tr)
1155 assert backupname != filename
1161 assert backupname != filename
1156
1162
1157 # use '_writedirstate' instead of 'write' to write changes certainly,
1163 # use '_writedirstate' instead of 'write' to write changes certainly,
1158 # because the latter omits writing out if transaction is running.
1164 # because the latter omits writing out if transaction is running.
1159 # output file will be used to create backup of dirstate at this point.
1165 # output file will be used to create backup of dirstate at this point.
1160 if self._dirty or not self._opener.exists(filename):
1166 if self._dirty or not self._opener.exists(filename):
1161 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1167 self._writedirstate(self._opener(filename, "w", atomictemp=True,
1162 checkambig=True))
1168 checkambig=True))
1163
1169
1164 if tr:
1170 if tr:
1165 # ensure that subsequent tr.writepending returns True for
1171 # ensure that subsequent tr.writepending returns True for
1166 # changes written out above, even if dirstate is never
1172 # changes written out above, even if dirstate is never
1167 # changed after this
1173 # changed after this
1168 tr.addfilegenerator('dirstate', (self._filename,),
1174 tr.addfilegenerator('dirstate', (self._filename,),
1169 self._writedirstate, location='plain')
1175 self._writedirstate, location='plain')
1170
1176
1171 # ensure that pending file written above is unlinked at
1177 # ensure that pending file written above is unlinked at
1172 # failure, even if tr.writepending isn't invoked until the
1178 # failure, even if tr.writepending isn't invoked until the
1173 # end of this transaction
1179 # end of this transaction
1174 tr.registertmp(filename, location='plain')
1180 tr.registertmp(filename, location='plain')
1175
1181
1176 self._opener.tryunlink(backupname)
1182 self._opener.tryunlink(backupname)
1177 # hardlink backup is okay because _writedirstate is always called
1183 # hardlink backup is okay because _writedirstate is always called
1178 # with an "atomictemp=True" file.
1184 # with an "atomictemp=True" file.
1179 util.copyfile(self._opener.join(filename),
1185 util.copyfile(self._opener.join(filename),
1180 self._opener.join(backupname), hardlink=True)
1186 self._opener.join(backupname), hardlink=True)
1181
1187
1182 def restorebackup(self, tr, backupname):
1188 def restorebackup(self, tr, backupname):
1183 '''Restore dirstate by backup file'''
1189 '''Restore dirstate by backup file'''
1184 # this "invalidate()" prevents "wlock.release()" from writing
1190 # this "invalidate()" prevents "wlock.release()" from writing
1185 # changes of dirstate out after restoring from backup file
1191 # changes of dirstate out after restoring from backup file
1186 self.invalidate()
1192 self.invalidate()
1187 filename = self._actualfilename(tr)
1193 filename = self._actualfilename(tr)
1188 o = self._opener
1194 o = self._opener
1189 if util.samefile(o.join(backupname), o.join(filename)):
1195 if util.samefile(o.join(backupname), o.join(filename)):
1190 o.unlink(backupname)
1196 o.unlink(backupname)
1191 else:
1197 else:
1192 o.rename(backupname, filename, checkambig=True)
1198 o.rename(backupname, filename, checkambig=True)
1193
1199
1194 def clearbackup(self, tr, backupname):
1200 def clearbackup(self, tr, backupname):
1195 '''Clear backup file'''
1201 '''Clear backup file'''
1196 self._opener.unlink(backupname)
1202 self._opener.unlink(backupname)
1197
1203
1198 class dirstatemap(object):
1204 class dirstatemap(object):
1199 """Map encapsulating the dirstate's contents.
1205 """Map encapsulating the dirstate's contents.
1200
1206
1201 The dirstate contains the following state:
1207 The dirstate contains the following state:
1202
1208
1203 - `identity` is the identity of the dirstate file, which can be used to
1209 - `identity` is the identity of the dirstate file, which can be used to
1204 detect when changes have occurred to the dirstate file.
1210 detect when changes have occurred to the dirstate file.
1205
1211
1206 - `parents` is a pair containing the parents of the working copy. The
1212 - `parents` is a pair containing the parents of the working copy. The
1207 parents are updated by calling `setparents`.
1213 parents are updated by calling `setparents`.
1208
1214
1209 - the state map maps filenames to tuples of (state, mode, size, mtime),
1215 - the state map maps filenames to tuples of (state, mode, size, mtime),
1210 where state is a single character representing 'normal', 'added',
1216 where state is a single character representing 'normal', 'added',
1211 'removed', or 'merged'. It is read by treating the dirstate as a
1217 'removed', or 'merged'. It is read by treating the dirstate as a
1212 dict. File state is updated by calling the `addfile`, `removefile` and
1218 dict. File state is updated by calling the `addfile`, `removefile` and
1213 `dropfile` methods.
1219 `dropfile` methods.
1214
1220
1215 - `copymap` maps destination filenames to their source filename.
1221 - `copymap` maps destination filenames to their source filename.
1216
1222
1217 The dirstate also provides the following views onto the state:
1223 The dirstate also provides the following views onto the state:
1218
1224
1219 - `nonnormalset` is a set of the filenames that have state other
1225 - `nonnormalset` is a set of the filenames that have state other
1220 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1226 than 'normal', or are normal but have an mtime of -1 ('normallookup').
1221
1227
1222 - `otherparentset` is a set of the filenames that are marked as coming
1228 - `otherparentset` is a set of the filenames that are marked as coming
1223 from the second parent when the dirstate is currently being merged.
1229 from the second parent when the dirstate is currently being merged.
1224
1230
1225 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1231 - `filefoldmap` is a dict mapping normalized filenames to the denormalized
1226 form that they appear as in the dirstate.
1232 form that they appear as in the dirstate.
1227
1233
1228 - `dirfoldmap` is a dict mapping normalized directory names to the
1234 - `dirfoldmap` is a dict mapping normalized directory names to the
1229 denormalized form that they appear as in the dirstate.
1235 denormalized form that they appear as in the dirstate.
1230 """
1236 """
1231
1237
1232 def __init__(self, ui, opener, root):
1238 def __init__(self, ui, opener, root):
1233 self._ui = ui
1239 self._ui = ui
1234 self._opener = opener
1240 self._opener = opener
1235 self._root = root
1241 self._root = root
1236 self._filename = 'dirstate'
1242 self._filename = 'dirstate'
1237
1243
1238 self._parents = None
1244 self._parents = None
1239 self._dirtyparents = False
1245 self._dirtyparents = False
1240
1246
1241 # for consistent view between _pl() and _read() invocations
1247 # for consistent view between _pl() and _read() invocations
1242 self._pendingmode = None
1248 self._pendingmode = None
1243
1249
1244 @propertycache
1250 @propertycache
1245 def _map(self):
1251 def _map(self):
1246 self._map = {}
1252 self._map = {}
1247 self.read()
1253 self.read()
1248 return self._map
1254 return self._map
1249
1255
1250 @propertycache
1256 @propertycache
1251 def copymap(self):
1257 def copymap(self):
1252 self.copymap = {}
1258 self.copymap = {}
1253 self._map
1259 self._map
1254 return self.copymap
1260 return self.copymap
1255
1261
1256 def clear(self):
1262 def clear(self):
1257 self._map.clear()
1263 self._map.clear()
1258 self.copymap.clear()
1264 self.copymap.clear()
1259 self.setparents(nullid, nullid)
1265 self.setparents(nullid, nullid)
1260 util.clearcachedproperty(self, "_dirs")
1266 util.clearcachedproperty(self, "_dirs")
1261 util.clearcachedproperty(self, "_alldirs")
1267 util.clearcachedproperty(self, "_alldirs")
1262 util.clearcachedproperty(self, "filefoldmap")
1268 util.clearcachedproperty(self, "filefoldmap")
1263 util.clearcachedproperty(self, "dirfoldmap")
1269 util.clearcachedproperty(self, "dirfoldmap")
1264 util.clearcachedproperty(self, "nonnormalset")
1270 util.clearcachedproperty(self, "nonnormalset")
1265 util.clearcachedproperty(self, "otherparentset")
1271 util.clearcachedproperty(self, "otherparentset")
1266
1272
1267 def items(self):
1273 def items(self):
1268 return self._map.iteritems()
1274 return self._map.iteritems()
1269
1275
1270 # forward for python2,3 compat
1276 # forward for python2,3 compat
1271 iteritems = items
1277 iteritems = items
1272
1278
1273 def __len__(self):
1279 def __len__(self):
1274 return len(self._map)
1280 return len(self._map)
1275
1281
1276 def __iter__(self):
1282 def __iter__(self):
1277 return iter(self._map)
1283 return iter(self._map)
1278
1284
1279 def get(self, key, default=None):
1285 def get(self, key, default=None):
1280 return self._map.get(key, default)
1286 return self._map.get(key, default)
1281
1287
1282 def __contains__(self, key):
1288 def __contains__(self, key):
1283 return key in self._map
1289 return key in self._map
1284
1290
1285 def __getitem__(self, key):
1291 def __getitem__(self, key):
1286 return self._map[key]
1292 return self._map[key]
1287
1293
1288 def keys(self):
1294 def keys(self):
1289 return self._map.keys()
1295 return self._map.keys()
1290
1296
1291 def preload(self):
1297 def preload(self):
1292 """Loads the underlying data, if it's not already loaded"""
1298 """Loads the underlying data, if it's not already loaded"""
1293 self._map
1299 self._map
1294
1300
1295 def addfile(self, f, oldstate, state, mode, size, mtime):
1301 def addfile(self, f, oldstate, state, mode, size, mtime):
1296 """Add a tracked file to the dirstate."""
1302 """Add a tracked file to the dirstate."""
1297 if oldstate in "?r" and r"_dirs" in self.__dict__:
1303 if oldstate in "?r" and r"_dirs" in self.__dict__:
1298 self._dirs.addpath(f)
1304 self._dirs.addpath(f)
1299 if oldstate == "?" and r"_alldirs" in self.__dict__:
1305 if oldstate == "?" and r"_alldirs" in self.__dict__:
1300 self._alldirs.addpath(f)
1306 self._alldirs.addpath(f)
1301 self._map[f] = dirstatetuple(state, mode, size, mtime)
1307 self._map[f] = dirstatetuple(state, mode, size, mtime)
1302 if state != 'n' or mtime == -1:
1308 if state != 'n' or mtime == -1:
1303 self.nonnormalset.add(f)
1309 self.nonnormalset.add(f)
1304 if size == -2:
1310 if size == -2:
1305 self.otherparentset.add(f)
1311 self.otherparentset.add(f)
1306
1312
1307 def removefile(self, f, oldstate, size):
1313 def removefile(self, f, oldstate, size):
1308 """
1314 """
1309 Mark a file as removed in the dirstate.
1315 Mark a file as removed in the dirstate.
1310
1316
1311 The `size` parameter is used to store sentinel values that indicate
1317 The `size` parameter is used to store sentinel values that indicate
1312 the file's previous state. In the future, we should refactor this
1318 the file's previous state. In the future, we should refactor this
1313 to be more explicit about what that state is.
1319 to be more explicit about what that state is.
1314 """
1320 """
1315 if oldstate not in "?r" and r"_dirs" in self.__dict__:
1321 if oldstate not in "?r" and r"_dirs" in self.__dict__:
1316 self._dirs.delpath(f)
1322 self._dirs.delpath(f)
1317 if oldstate == "?" and r"_alldirs" in self.__dict__:
1323 if oldstate == "?" and r"_alldirs" in self.__dict__:
1318 self._alldirs.addpath(f)
1324 self._alldirs.addpath(f)
1319 if r"filefoldmap" in self.__dict__:
1325 if r"filefoldmap" in self.__dict__:
1320 normed = util.normcase(f)
1326 normed = util.normcase(f)
1321 self.filefoldmap.pop(normed, None)
1327 self.filefoldmap.pop(normed, None)
1322 self._map[f] = dirstatetuple('r', 0, size, 0)
1328 self._map[f] = dirstatetuple('r', 0, size, 0)
1323 self.nonnormalset.add(f)
1329 self.nonnormalset.add(f)
1324
1330
1325 def dropfile(self, f, oldstate):
1331 def dropfile(self, f, oldstate):
1326 """
1332 """
1327 Remove a file from the dirstate. Returns True if the file was
1333 Remove a file from the dirstate. Returns True if the file was
1328 previously recorded.
1334 previously recorded.
1329 """
1335 """
1330 exists = self._map.pop(f, None) is not None
1336 exists = self._map.pop(f, None) is not None
1331 if exists:
1337 if exists:
1332 if oldstate != "r" and r"_dirs" in self.__dict__:
1338 if oldstate != "r" and r"_dirs" in self.__dict__:
1333 self._dirs.delpath(f)
1339 self._dirs.delpath(f)
1334 if r"_alldirs" in self.__dict__:
1340 if r"_alldirs" in self.__dict__:
1335 self._alldirs.delpath(f)
1341 self._alldirs.delpath(f)
1336 if r"filefoldmap" in self.__dict__:
1342 if r"filefoldmap" in self.__dict__:
1337 normed = util.normcase(f)
1343 normed = util.normcase(f)
1338 self.filefoldmap.pop(normed, None)
1344 self.filefoldmap.pop(normed, None)
1339 self.nonnormalset.discard(f)
1345 self.nonnormalset.discard(f)
1340 return exists
1346 return exists
1341
1347
1342 def clearambiguoustimes(self, files, now):
1348 def clearambiguoustimes(self, files, now):
1343 for f in files:
1349 for f in files:
1344 e = self.get(f)
1350 e = self.get(f)
1345 if e is not None and e[0] == 'n' and e[3] == now:
1351 if e is not None and e[0] == 'n' and e[3] == now:
1346 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1352 self._map[f] = dirstatetuple(e[0], e[1], e[2], -1)
1347 self.nonnormalset.add(f)
1353 self.nonnormalset.add(f)
1348
1354
1349 def nonnormalentries(self):
1355 def nonnormalentries(self):
1350 '''Compute the nonnormal dirstate entries from the dmap'''
1356 '''Compute the nonnormal dirstate entries from the dmap'''
1351 try:
1357 try:
1352 return parsers.nonnormalotherparententries(self._map)
1358 return parsers.nonnormalotherparententries(self._map)
1353 except AttributeError:
1359 except AttributeError:
1354 nonnorm = set()
1360 nonnorm = set()
1355 otherparent = set()
1361 otherparent = set()
1356 for fname, e in self._map.iteritems():
1362 for fname, e in self._map.iteritems():
1357 if e[0] != 'n' or e[3] == -1:
1363 if e[0] != 'n' or e[3] == -1:
1358 nonnorm.add(fname)
1364 nonnorm.add(fname)
1359 if e[0] == 'n' and e[2] == -2:
1365 if e[0] == 'n' and e[2] == -2:
1360 otherparent.add(fname)
1366 otherparent.add(fname)
1361 return nonnorm, otherparent
1367 return nonnorm, otherparent
1362
1368
1363 @propertycache
1369 @propertycache
1364 def filefoldmap(self):
1370 def filefoldmap(self):
1365 """Returns a dictionary mapping normalized case paths to their
1371 """Returns a dictionary mapping normalized case paths to their
1366 non-normalized versions.
1372 non-normalized versions.
1367 """
1373 """
1368 try:
1374 try:
1369 makefilefoldmap = parsers.make_file_foldmap
1375 makefilefoldmap = parsers.make_file_foldmap
1370 except AttributeError:
1376 except AttributeError:
1371 pass
1377 pass
1372 else:
1378 else:
1373 return makefilefoldmap(self._map, util.normcasespec,
1379 return makefilefoldmap(self._map, util.normcasespec,
1374 util.normcasefallback)
1380 util.normcasefallback)
1375
1381
1376 f = {}
1382 f = {}
1377 normcase = util.normcase
1383 normcase = util.normcase
1378 for name, s in self._map.iteritems():
1384 for name, s in self._map.iteritems():
1379 if s[0] != 'r':
1385 if s[0] != 'r':
1380 f[normcase(name)] = name
1386 f[normcase(name)] = name
1381 f['.'] = '.' # prevents useless util.fspath() invocation
1387 f['.'] = '.' # prevents useless util.fspath() invocation
1382 return f
1388 return f
1383
1389
1384 def hastrackeddir(self, d):
1390 def hastrackeddir(self, d):
1385 """
1391 """
1386 Returns True if the dirstate contains a tracked (not removed) file
1392 Returns True if the dirstate contains a tracked (not removed) file
1387 in this directory.
1393 in this directory.
1388 """
1394 """
1389 return d in self._dirs
1395 return d in self._dirs
1390
1396
1391 def hasdir(self, d):
1397 def hasdir(self, d):
1392 """
1398 """
1393 Returns True if the dirstate contains a file (tracked or removed)
1399 Returns True if the dirstate contains a file (tracked or removed)
1394 in this directory.
1400 in this directory.
1395 """
1401 """
1396 return d in self._alldirs
1402 return d in self._alldirs
1397
1403
1398 @propertycache
1404 @propertycache
1399 def _dirs(self):
1405 def _dirs(self):
1400 return util.dirs(self._map, 'r')
1406 return util.dirs(self._map, 'r')
1401
1407
1402 @propertycache
1408 @propertycache
1403 def _alldirs(self):
1409 def _alldirs(self):
1404 return util.dirs(self._map)
1410 return util.dirs(self._map)
1405
1411
1406 def _opendirstatefile(self):
1412 def _opendirstatefile(self):
1407 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1413 fp, mode = txnutil.trypending(self._root, self._opener, self._filename)
1408 if self._pendingmode is not None and self._pendingmode != mode:
1414 if self._pendingmode is not None and self._pendingmode != mode:
1409 fp.close()
1415 fp.close()
1410 raise error.Abort(_('working directory state may be '
1416 raise error.Abort(_('working directory state may be '
1411 'changed parallelly'))
1417 'changed parallelly'))
1412 self._pendingmode = mode
1418 self._pendingmode = mode
1413 return fp
1419 return fp
1414
1420
1415 def parents(self):
1421 def parents(self):
1416 if not self._parents:
1422 if not self._parents:
1417 try:
1423 try:
1418 fp = self._opendirstatefile()
1424 fp = self._opendirstatefile()
1419 st = fp.read(40)
1425 st = fp.read(40)
1420 fp.close()
1426 fp.close()
1421 except IOError as err:
1427 except IOError as err:
1422 if err.errno != errno.ENOENT:
1428 if err.errno != errno.ENOENT:
1423 raise
1429 raise
1424 # File doesn't exist, so the current state is empty
1430 # File doesn't exist, so the current state is empty
1425 st = ''
1431 st = ''
1426
1432
1427 l = len(st)
1433 l = len(st)
1428 if l == 40:
1434 if l == 40:
1429 self._parents = (st[:20], st[20:40])
1435 self._parents = (st[:20], st[20:40])
1430 elif l == 0:
1436 elif l == 0:
1431 self._parents = (nullid, nullid)
1437 self._parents = (nullid, nullid)
1432 else:
1438 else:
1433 raise error.Abort(_('working directory state appears '
1439 raise error.Abort(_('working directory state appears '
1434 'damaged!'))
1440 'damaged!'))
1435
1441
1436 return self._parents
1442 return self._parents
1437
1443
1438 def setparents(self, p1, p2):
1444 def setparents(self, p1, p2):
1439 self._parents = (p1, p2)
1445 self._parents = (p1, p2)
1440 self._dirtyparents = True
1446 self._dirtyparents = True
1441
1447
1442 def read(self):
1448 def read(self):
1443 # ignore HG_PENDING because identity is used only for writing
1449 # ignore HG_PENDING because identity is used only for writing
1444 self.identity = util.filestat.frompath(
1450 self.identity = util.filestat.frompath(
1445 self._opener.join(self._filename))
1451 self._opener.join(self._filename))
1446
1452
1447 try:
1453 try:
1448 fp = self._opendirstatefile()
1454 fp = self._opendirstatefile()
1449 try:
1455 try:
1450 st = fp.read()
1456 st = fp.read()
1451 finally:
1457 finally:
1452 fp.close()
1458 fp.close()
1453 except IOError as err:
1459 except IOError as err:
1454 if err.errno != errno.ENOENT:
1460 if err.errno != errno.ENOENT:
1455 raise
1461 raise
1456 return
1462 return
1457 if not st:
1463 if not st:
1458 return
1464 return
1459
1465
1460 if util.safehasattr(parsers, 'dict_new_presized'):
1466 if util.safehasattr(parsers, 'dict_new_presized'):
1461 # Make an estimate of the number of files in the dirstate based on
1467 # Make an estimate of the number of files in the dirstate based on
1462 # its size. From a linear regression on a set of real-world repos,
1468 # its size. From a linear regression on a set of real-world repos,
1463 # all over 10,000 files, the size of a dirstate entry is 85
1469 # all over 10,000 files, the size of a dirstate entry is 85
1464 # bytes. The cost of resizing is significantly higher than the cost
1470 # bytes. The cost of resizing is significantly higher than the cost
1465 # of filling in a larger presized dict, so subtract 20% from the
1471 # of filling in a larger presized dict, so subtract 20% from the
1466 # size.
1472 # size.
1467 #
1473 #
1468 # This heuristic is imperfect in many ways, so in a future dirstate
1474 # This heuristic is imperfect in many ways, so in a future dirstate
1469 # format update it makes sense to just record the number of entries
1475 # format update it makes sense to just record the number of entries
1470 # on write.
1476 # on write.
1471 self._map = parsers.dict_new_presized(len(st) // 71)
1477 self._map = parsers.dict_new_presized(len(st) // 71)
1472
1478
1473 # Python's garbage collector triggers a GC each time a certain number
1479 # Python's garbage collector triggers a GC each time a certain number
1474 # of container objects (the number being defined by
1480 # of container objects (the number being defined by
1475 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1481 # gc.get_threshold()) are allocated. parse_dirstate creates a tuple
1476 # for each file in the dirstate. The C version then immediately marks
1482 # for each file in the dirstate. The C version then immediately marks
1477 # them as not to be tracked by the collector. However, this has no
1483 # them as not to be tracked by the collector. However, this has no
1478 # effect on when GCs are triggered, only on what objects the GC looks
1484 # effect on when GCs are triggered, only on what objects the GC looks
1479 # into. This means that O(number of files) GCs are unavoidable.
1485 # into. This means that O(number of files) GCs are unavoidable.
1480 # Depending on when in the process's lifetime the dirstate is parsed,
1486 # Depending on when in the process's lifetime the dirstate is parsed,
1481 # this can get very expensive. As a workaround, disable GC while
1487 # this can get very expensive. As a workaround, disable GC while
1482 # parsing the dirstate.
1488 # parsing the dirstate.
1483 #
1489 #
1484 # (we cannot decorate the function directly since it is in a C module)
1490 # (we cannot decorate the function directly since it is in a C module)
1485 parse_dirstate = util.nogc(parsers.parse_dirstate)
1491 parse_dirstate = util.nogc(parsers.parse_dirstate)
1486 p = parse_dirstate(self._map, self.copymap, st)
1492 p = parse_dirstate(self._map, self.copymap, st)
1487 if not self._dirtyparents:
1493 if not self._dirtyparents:
1488 self.setparents(*p)
1494 self.setparents(*p)
1489
1495
1490 # Avoid excess attribute lookups by fast pathing certain checks
1496 # Avoid excess attribute lookups by fast pathing certain checks
1491 self.__contains__ = self._map.__contains__
1497 self.__contains__ = self._map.__contains__
1492 self.__getitem__ = self._map.__getitem__
1498 self.__getitem__ = self._map.__getitem__
1493 self.get = self._map.get
1499 self.get = self._map.get
1494
1500
1495 def write(self, st, now):
1501 def write(self, st, now):
1496 st.write(parsers.pack_dirstate(self._map, self.copymap,
1502 st.write(parsers.pack_dirstate(self._map, self.copymap,
1497 self.parents(), now))
1503 self.parents(), now))
1498 st.close()
1504 st.close()
1499 self._dirtyparents = False
1505 self._dirtyparents = False
1500 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1506 self.nonnormalset, self.otherparentset = self.nonnormalentries()
1501
1507
1502 @propertycache
1508 @propertycache
1503 def nonnormalset(self):
1509 def nonnormalset(self):
1504 nonnorm, otherparents = self.nonnormalentries()
1510 nonnorm, otherparents = self.nonnormalentries()
1505 self.otherparentset = otherparents
1511 self.otherparentset = otherparents
1506 return nonnorm
1512 return nonnorm
1507
1513
1508 @propertycache
1514 @propertycache
1509 def otherparentset(self):
1515 def otherparentset(self):
1510 nonnorm, otherparents = self.nonnormalentries()
1516 nonnorm, otherparents = self.nonnormalentries()
1511 self.nonnormalset = nonnorm
1517 self.nonnormalset = nonnorm
1512 return otherparents
1518 return otherparents
1513
1519
1514 @propertycache
1520 @propertycache
1515 def identity(self):
1521 def identity(self):
1516 self._map
1522 self._map
1517 return self.identity
1523 return self.identity
1518
1524
1519 @propertycache
1525 @propertycache
1520 def dirfoldmap(self):
1526 def dirfoldmap(self):
1521 f = {}
1527 f = {}
1522 normcase = util.normcase
1528 normcase = util.normcase
1523 for name in self._dirs:
1529 for name in self._dirs:
1524 f[normcase(name)] = name
1530 f[normcase(name)] = name
1525 return f
1531 return f
1526
1532
1527
1533
1528 if rustmod is not None:
1534 if rustmod is not None:
1529 class dirstatemap(object):
1535 class dirstatemap(object):
1530 def __init__(self, ui, opener, root):
1536 def __init__(self, ui, opener, root):
1531 self._ui = ui
1537 self._ui = ui
1532 self._opener = opener
1538 self._opener = opener
1533 self._root = root
1539 self._root = root
1534 self._filename = 'dirstate'
1540 self._filename = 'dirstate'
1535 self._parents = None
1541 self._parents = None
1536 self._dirtyparents = False
1542 self._dirtyparents = False
1537
1543
1538 # for consistent view between _pl() and _read() invocations
1544 # for consistent view between _pl() and _read() invocations
1539 self._pendingmode = None
1545 self._pendingmode = None
1540
1546
1541 def addfile(self, *args, **kwargs):
1547 def addfile(self, *args, **kwargs):
1542 return self._rustmap.addfile(*args, **kwargs)
1548 return self._rustmap.addfile(*args, **kwargs)
1543
1549
1544 def removefile(self, *args, **kwargs):
1550 def removefile(self, *args, **kwargs):
1545 return self._rustmap.removefile(*args, **kwargs)
1551 return self._rustmap.removefile(*args, **kwargs)
1546
1552
1547 def dropfile(self, *args, **kwargs):
1553 def dropfile(self, *args, **kwargs):
1548 return self._rustmap.dropfile(*args, **kwargs)
1554 return self._rustmap.dropfile(*args, **kwargs)
1549
1555
1550 def clearambiguoustimes(self, *args, **kwargs):
1556 def clearambiguoustimes(self, *args, **kwargs):
1551 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1557 return self._rustmap.clearambiguoustimes(*args, **kwargs)
1552
1558
1553 def nonnormalentries(self):
1559 def nonnormalentries(self):
1554 return self._rustmap.nonnormalentries()
1560 return self._rustmap.nonnormalentries()
1555
1561
1556 def get(self, *args, **kwargs):
1562 def get(self, *args, **kwargs):
1557 return self._rustmap.get(*args, **kwargs)
1563 return self._rustmap.get(*args, **kwargs)
1558
1564
1559 @propertycache
1565 @propertycache
1560 def _rustmap(self):
1566 def _rustmap(self):
1561 self._rustmap = rustmod.DirstateMap(self._root)
1567 self._rustmap = rustmod.DirstateMap(self._root)
1562 self.read()
1568 self.read()
1563 return self._rustmap
1569 return self._rustmap
1564
1570
1565 @property
1571 @property
1566 def copymap(self):
1572 def copymap(self):
1567 return self._rustmap.copymap()
1573 return self._rustmap.copymap()
1568
1574
1569 def preload(self):
1575 def preload(self):
1570 self._rustmap
1576 self._rustmap
1571
1577
1572 def clear(self):
1578 def clear(self):
1573 self._rustmap.clear()
1579 self._rustmap.clear()
1574 self.setparents(nullid, nullid)
1580 self.setparents(nullid, nullid)
1575 util.clearcachedproperty(self, "_dirs")
1581 util.clearcachedproperty(self, "_dirs")
1576 util.clearcachedproperty(self, "_alldirs")
1582 util.clearcachedproperty(self, "_alldirs")
1577 util.clearcachedproperty(self, "dirfoldmap")
1583 util.clearcachedproperty(self, "dirfoldmap")
1578
1584
1579 def items(self):
1585 def items(self):
1580 return self._rustmap.items()
1586 return self._rustmap.items()
1581
1587
1582 def keys(self):
1588 def keys(self):
1583 return iter(self._rustmap)
1589 return iter(self._rustmap)
1584
1590
1585 def __contains__(self, key):
1591 def __contains__(self, key):
1586 return key in self._rustmap
1592 return key in self._rustmap
1587
1593
1588 def __getitem__(self, item):
1594 def __getitem__(self, item):
1589 return self._rustmap[item]
1595 return self._rustmap[item]
1590
1596
1591 def __len__(self):
1597 def __len__(self):
1592 return len(self._rustmap)
1598 return len(self._rustmap)
1593
1599
1594 def __iter__(self):
1600 def __iter__(self):
1595 return iter(self._rustmap)
1601 return iter(self._rustmap)
1596
1602
1597 # forward for python2,3 compat
1603 # forward for python2,3 compat
1598 iteritems = items
1604 iteritems = items
1599
1605
1600 def _opendirstatefile(self):
1606 def _opendirstatefile(self):
1601 fp, mode = txnutil.trypending(self._root, self._opener,
1607 fp, mode = txnutil.trypending(self._root, self._opener,
1602 self._filename)
1608 self._filename)
1603 if self._pendingmode is not None and self._pendingmode != mode:
1609 if self._pendingmode is not None and self._pendingmode != mode:
1604 fp.close()
1610 fp.close()
1605 raise error.Abort(_('working directory state may be '
1611 raise error.Abort(_('working directory state may be '
1606 'changed parallelly'))
1612 'changed parallelly'))
1607 self._pendingmode = mode
1613 self._pendingmode = mode
1608 return fp
1614 return fp
1609
1615
1610 def setparents(self, p1, p2):
1616 def setparents(self, p1, p2):
1611 self._rustmap.setparents(p1, p2)
1617 self._rustmap.setparents(p1, p2)
1612 self._parents = (p1, p2)
1618 self._parents = (p1, p2)
1613 self._dirtyparents = True
1619 self._dirtyparents = True
1614
1620
1615 def parents(self):
1621 def parents(self):
1616 if not self._parents:
1622 if not self._parents:
1617 try:
1623 try:
1618 fp = self._opendirstatefile()
1624 fp = self._opendirstatefile()
1619 st = fp.read(40)
1625 st = fp.read(40)
1620 fp.close()
1626 fp.close()
1621 except IOError as err:
1627 except IOError as err:
1622 if err.errno != errno.ENOENT:
1628 if err.errno != errno.ENOENT:
1623 raise
1629 raise
1624 # File doesn't exist, so the current state is empty
1630 # File doesn't exist, so the current state is empty
1625 st = ''
1631 st = ''
1626
1632
1627 try:
1633 try:
1628 self._parents = self._rustmap.parents(st)
1634 self._parents = self._rustmap.parents(st)
1629 except ValueError:
1635 except ValueError:
1630 raise error.Abort(_('working directory state appears '
1636 raise error.Abort(_('working directory state appears '
1631 'damaged!'))
1637 'damaged!'))
1632
1638
1633 return self._parents
1639 return self._parents
1634
1640
1635 def read(self):
1641 def read(self):
1636 # ignore HG_PENDING because identity is used only for writing
1642 # ignore HG_PENDING because identity is used only for writing
1637 self.identity = util.filestat.frompath(
1643 self.identity = util.filestat.frompath(
1638 self._opener.join(self._filename))
1644 self._opener.join(self._filename))
1639
1645
1640 try:
1646 try:
1641 fp = self._opendirstatefile()
1647 fp = self._opendirstatefile()
1642 try:
1648 try:
1643 st = fp.read()
1649 st = fp.read()
1644 finally:
1650 finally:
1645 fp.close()
1651 fp.close()
1646 except IOError as err:
1652 except IOError as err:
1647 if err.errno != errno.ENOENT:
1653 if err.errno != errno.ENOENT:
1648 raise
1654 raise
1649 return
1655 return
1650 if not st:
1656 if not st:
1651 return
1657 return
1652
1658
1653 parse_dirstate = util.nogc(self._rustmap.read)
1659 parse_dirstate = util.nogc(self._rustmap.read)
1654 parents = parse_dirstate(st)
1660 parents = parse_dirstate(st)
1655 if parents and not self._dirtyparents:
1661 if parents and not self._dirtyparents:
1656 self.setparents(*parents)
1662 self.setparents(*parents)
1657
1663
1658 def write(self, st, now):
1664 def write(self, st, now):
1659 parents = self.parents()
1665 parents = self.parents()
1660 st.write(self._rustmap.write(parents[0], parents[1], now))
1666 st.write(self._rustmap.write(parents[0], parents[1], now))
1661 st.close()
1667 st.close()
1662 self._dirtyparents = False
1668 self._dirtyparents = False
1663
1669
1664 @propertycache
1670 @propertycache
1665 def filefoldmap(self):
1671 def filefoldmap(self):
1666 """Returns a dictionary mapping normalized case paths to their
1672 """Returns a dictionary mapping normalized case paths to their
1667 non-normalized versions.
1673 non-normalized versions.
1668 """
1674 """
1669 return self._rustmap.filefoldmapasdict()
1675 return self._rustmap.filefoldmapasdict()
1670
1676
1671 def hastrackeddir(self, d):
1677 def hastrackeddir(self, d):
1672 self._dirs # Trigger Python's propertycache
1678 self._dirs # Trigger Python's propertycache
1673 return self._rustmap.hastrackeddir(d)
1679 return self._rustmap.hastrackeddir(d)
1674
1680
1675 def hasdir(self, d):
1681 def hasdir(self, d):
1676 self._dirs # Trigger Python's propertycache
1682 self._dirs # Trigger Python's propertycache
1677 return self._rustmap.hasdir(d)
1683 return self._rustmap.hasdir(d)
1678
1684
1679 @propertycache
1685 @propertycache
1680 def _dirs(self):
1686 def _dirs(self):
1681 return self._rustmap.getdirs()
1687 return self._rustmap.getdirs()
1682
1688
1683 @propertycache
1689 @propertycache
1684 def _alldirs(self):
1690 def _alldirs(self):
1685 return self._rustmap.getalldirs()
1691 return self._rustmap.getalldirs()
1686
1692
1687 @propertycache
1693 @propertycache
1688 def identity(self):
1694 def identity(self):
1689 self._rustmap
1695 self._rustmap
1690 return self.identity
1696 return self.identity
1691
1697
1692 @property
1698 @property
1693 def nonnormalset(self):
1699 def nonnormalset(self):
1694 nonnorm, otherparents = self._rustmap.nonnormalentries()
1700 nonnorm, otherparents = self._rustmap.nonnormalentries()
1695 return nonnorm
1701 return nonnorm
1696
1702
1697 @property
1703 @property
1698 def otherparentset(self):
1704 def otherparentset(self):
1699 nonnorm, otherparents = self._rustmap.nonnormalentries()
1705 nonnorm, otherparents = self._rustmap.nonnormalentries()
1700 return otherparents
1706 return otherparents
1701
1707
1702 @propertycache
1708 @propertycache
1703 def dirfoldmap(self):
1709 def dirfoldmap(self):
1704 f = {}
1710 f = {}
1705 normcase = util.normcase
1711 normcase = util.normcase
1706 for name in self._dirs:
1712 for name in self._dirs:
1707 f[normcase(name)] = name
1713 f[normcase(name)] = name
1708 return f
1714 return f
@@ -1,235 +1,239 b''
1 # Test that certain objects conform to well-defined interfaces.
1 # Test that certain objects conform to well-defined interfaces.
2
2
3 from __future__ import absolute_import, print_function
3 from __future__ import absolute_import, print_function
4
4
5 from mercurial import encoding
5 from mercurial import encoding
6 encoding.environ[b'HGREALINTERFACES'] = b'1'
6 encoding.environ[b'HGREALINTERFACES'] = b'1'
7
7
8 import os
8 import os
9 import subprocess
9 import subprocess
10 import sys
10 import sys
11
11
12 # Only run if tests are run in a repo
12 # Only run if tests are run in a repo
13 if subprocess.call(['python', '%s/hghave' % os.environ['TESTDIR'],
13 if subprocess.call(['python', '%s/hghave' % os.environ['TESTDIR'],
14 'test-repo']):
14 'test-repo']):
15 sys.exit(80)
15 sys.exit(80)
16
16
17 from mercurial.interfaces import (
17 from mercurial.interfaces import (
18 dirstate as intdirstate,
18 repository,
19 repository,
19 )
20 )
20 from mercurial.thirdparty.zope import (
21 from mercurial.thirdparty.zope import (
21 interface as zi,
22 interface as zi,
22 )
23 )
23 from mercurial.thirdparty.zope.interface import (
24 from mercurial.thirdparty.zope.interface import (
24 verify as ziverify,
25 verify as ziverify,
25 )
26 )
26 from mercurial import (
27 from mercurial import (
27 bundlerepo,
28 bundlerepo,
29 dirstate,
28 filelog,
30 filelog,
29 httppeer,
31 httppeer,
30 localrepo,
32 localrepo,
31 manifest,
33 manifest,
32 pycompat,
34 pycompat,
33 revlog,
35 revlog,
34 sshpeer,
36 sshpeer,
35 statichttprepo,
37 statichttprepo,
36 ui as uimod,
38 ui as uimod,
37 unionrepo,
39 unionrepo,
38 vfs as vfsmod,
40 vfs as vfsmod,
39 wireprotoserver,
41 wireprotoserver,
40 wireprototypes,
42 wireprototypes,
41 wireprotov1peer,
43 wireprotov1peer,
42 wireprotov2server,
44 wireprotov2server,
43 )
45 )
44
46
45 testdir = os.path.dirname(__file__)
47 testdir = os.path.dirname(__file__)
46 rootdir = pycompat.fsencode(os.path.normpath(os.path.join(testdir, '..')))
48 rootdir = pycompat.fsencode(os.path.normpath(os.path.join(testdir, '..')))
47
49
48 sys.path[0:0] = [testdir]
50 sys.path[0:0] = [testdir]
49 import simplestorerepo
51 import simplestorerepo
50 del sys.path[0]
52 del sys.path[0]
51
53
52 def checkzobject(o, allowextra=False):
54 def checkzobject(o, allowextra=False):
53 """Verify an object with a zope interface."""
55 """Verify an object with a zope interface."""
54 ifaces = zi.providedBy(o)
56 ifaces = zi.providedBy(o)
55 if not ifaces:
57 if not ifaces:
56 print('%r does not provide any zope interfaces' % o)
58 print('%r does not provide any zope interfaces' % o)
57 return
59 return
58
60
59 # Run zope.interface's built-in verification routine. This verifies that
61 # Run zope.interface's built-in verification routine. This verifies that
60 # everything that is supposed to be present is present.
62 # everything that is supposed to be present is present.
61 for iface in ifaces:
63 for iface in ifaces:
62 ziverify.verifyObject(iface, o)
64 ziverify.verifyObject(iface, o)
63
65
64 if allowextra:
66 if allowextra:
65 return
67 return
66
68
67 # Now verify that the object provides no extra public attributes that
69 # Now verify that the object provides no extra public attributes that
68 # aren't declared as part of interfaces.
70 # aren't declared as part of interfaces.
69 allowed = set()
71 allowed = set()
70 for iface in ifaces:
72 for iface in ifaces:
71 allowed |= set(iface.names(all=True))
73 allowed |= set(iface.names(all=True))
72
74
73 public = {a for a in dir(o) if not a.startswith('_')}
75 public = {a for a in dir(o) if not a.startswith('_')}
74
76
75 for attr in sorted(public - allowed):
77 for attr in sorted(public - allowed):
76 print('public attribute not declared in interfaces: %s.%s' % (
78 print('public attribute not declared in interfaces: %s.%s' % (
77 o.__class__.__name__, attr))
79 o.__class__.__name__, attr))
78
80
79 # Facilitates testing localpeer.
81 # Facilitates testing localpeer.
80 class dummyrepo(object):
82 class dummyrepo(object):
81 def __init__(self):
83 def __init__(self):
82 self.ui = uimod.ui()
84 self.ui = uimod.ui()
83 def filtered(self, name):
85 def filtered(self, name):
84 pass
86 pass
85 def _restrictcapabilities(self, caps):
87 def _restrictcapabilities(self, caps):
86 pass
88 pass
87
89
88 class dummyopener(object):
90 class dummyopener(object):
89 handlers = []
91 handlers = []
90
92
91 # Facilitates testing sshpeer without requiring a server.
93 # Facilitates testing sshpeer without requiring a server.
92 class badpeer(httppeer.httppeer):
94 class badpeer(httppeer.httppeer):
93 def __init__(self):
95 def __init__(self):
94 super(badpeer, self).__init__(None, None, None, dummyopener(), None,
96 super(badpeer, self).__init__(None, None, None, dummyopener(), None,
95 None)
97 None)
96 self.badattribute = True
98 self.badattribute = True
97
99
98 def badmethod(self):
100 def badmethod(self):
99 pass
101 pass
100
102
101 class dummypipe(object):
103 class dummypipe(object):
102 def close(self):
104 def close(self):
103 pass
105 pass
104
106
105 def main():
107 def main():
106 ui = uimod.ui()
108 ui = uimod.ui()
107 # Needed so we can open a local repo with obsstore without a warning.
109 # Needed so we can open a local repo with obsstore without a warning.
108 ui.setconfig(b'experimental', b'evolution.createmarkers', True)
110 ui.setconfig(b'experimental', b'evolution.createmarkers', True)
109
111
110 checkzobject(badpeer())
112 checkzobject(badpeer())
111
113
112 ziverify.verifyClass(repository.ipeerbase, httppeer.httppeer)
114 ziverify.verifyClass(repository.ipeerbase, httppeer.httppeer)
113 checkzobject(httppeer.httppeer(None, None, None, dummyopener(), None, None))
115 checkzobject(httppeer.httppeer(None, None, None, dummyopener(), None, None))
114
116
115 ziverify.verifyClass(repository.ipeerv2, httppeer.httpv2peer)
117 ziverify.verifyClass(repository.ipeerv2, httppeer.httpv2peer)
116 checkzobject(httppeer.httpv2peer(None, b'', b'', None, None, None))
118 checkzobject(httppeer.httpv2peer(None, b'', b'', None, None, None))
117
119
118 ziverify.verifyClass(repository.ipeerbase,
120 ziverify.verifyClass(repository.ipeerbase,
119 localrepo.localpeer)
121 localrepo.localpeer)
120 checkzobject(localrepo.localpeer(dummyrepo()))
122 checkzobject(localrepo.localpeer(dummyrepo()))
121
123
122 ziverify.verifyClass(repository.ipeercommandexecutor,
124 ziverify.verifyClass(repository.ipeercommandexecutor,
123 localrepo.localcommandexecutor)
125 localrepo.localcommandexecutor)
124 checkzobject(localrepo.localcommandexecutor(None))
126 checkzobject(localrepo.localcommandexecutor(None))
125
127
126 ziverify.verifyClass(repository.ipeercommandexecutor,
128 ziverify.verifyClass(repository.ipeercommandexecutor,
127 wireprotov1peer.peerexecutor)
129 wireprotov1peer.peerexecutor)
128 checkzobject(wireprotov1peer.peerexecutor(None))
130 checkzobject(wireprotov1peer.peerexecutor(None))
129
131
130 ziverify.verifyClass(repository.ipeerbase, sshpeer.sshv1peer)
132 ziverify.verifyClass(repository.ipeerbase, sshpeer.sshv1peer)
131 checkzobject(sshpeer.sshv1peer(ui, b'ssh://localhost/foo', b'', dummypipe(),
133 checkzobject(sshpeer.sshv1peer(ui, b'ssh://localhost/foo', b'', dummypipe(),
132 dummypipe(), None, None))
134 dummypipe(), None, None))
133
135
134 ziverify.verifyClass(repository.ipeerbase, sshpeer.sshv2peer)
136 ziverify.verifyClass(repository.ipeerbase, sshpeer.sshv2peer)
135 checkzobject(sshpeer.sshv2peer(ui, b'ssh://localhost/foo', b'', dummypipe(),
137 checkzobject(sshpeer.sshv2peer(ui, b'ssh://localhost/foo', b'', dummypipe(),
136 dummypipe(), None, None))
138 dummypipe(), None, None))
137
139
138 ziverify.verifyClass(repository.ipeerbase, bundlerepo.bundlepeer)
140 ziverify.verifyClass(repository.ipeerbase, bundlerepo.bundlepeer)
139 checkzobject(bundlerepo.bundlepeer(dummyrepo()))
141 checkzobject(bundlerepo.bundlepeer(dummyrepo()))
140
142
141 ziverify.verifyClass(repository.ipeerbase, statichttprepo.statichttppeer)
143 ziverify.verifyClass(repository.ipeerbase, statichttprepo.statichttppeer)
142 checkzobject(statichttprepo.statichttppeer(dummyrepo()))
144 checkzobject(statichttprepo.statichttppeer(dummyrepo()))
143
145
144 ziverify.verifyClass(repository.ipeerbase, unionrepo.unionpeer)
146 ziverify.verifyClass(repository.ipeerbase, unionrepo.unionpeer)
145 checkzobject(unionrepo.unionpeer(dummyrepo()))
147 checkzobject(unionrepo.unionpeer(dummyrepo()))
146
148
147 ziverify.verifyClass(repository.ilocalrepositorymain,
149 ziverify.verifyClass(repository.ilocalrepositorymain,
148 localrepo.localrepository)
150 localrepo.localrepository)
149 ziverify.verifyClass(repository.ilocalrepositoryfilestorage,
151 ziverify.verifyClass(repository.ilocalrepositoryfilestorage,
150 localrepo.revlogfilestorage)
152 localrepo.revlogfilestorage)
151 repo = localrepo.makelocalrepository(ui, rootdir)
153 repo = localrepo.makelocalrepository(ui, rootdir)
152 checkzobject(repo)
154 checkzobject(repo)
153
155
154 ziverify.verifyClass(wireprototypes.baseprotocolhandler,
156 ziverify.verifyClass(wireprototypes.baseprotocolhandler,
155 wireprotoserver.sshv1protocolhandler)
157 wireprotoserver.sshv1protocolhandler)
156 ziverify.verifyClass(wireprototypes.baseprotocolhandler,
158 ziverify.verifyClass(wireprototypes.baseprotocolhandler,
157 wireprotoserver.sshv2protocolhandler)
159 wireprotoserver.sshv2protocolhandler)
158 ziverify.verifyClass(wireprototypes.baseprotocolhandler,
160 ziverify.verifyClass(wireprototypes.baseprotocolhandler,
159 wireprotoserver.httpv1protocolhandler)
161 wireprotoserver.httpv1protocolhandler)
160 ziverify.verifyClass(wireprototypes.baseprotocolhandler,
162 ziverify.verifyClass(wireprototypes.baseprotocolhandler,
161 wireprotov2server.httpv2protocolhandler)
163 wireprotov2server.httpv2protocolhandler)
162
164
163 sshv1 = wireprotoserver.sshv1protocolhandler(None, None, None)
165 sshv1 = wireprotoserver.sshv1protocolhandler(None, None, None)
164 checkzobject(sshv1)
166 checkzobject(sshv1)
165 sshv2 = wireprotoserver.sshv2protocolhandler(None, None, None)
167 sshv2 = wireprotoserver.sshv2protocolhandler(None, None, None)
166 checkzobject(sshv2)
168 checkzobject(sshv2)
167
169
168 httpv1 = wireprotoserver.httpv1protocolhandler(None, None, None)
170 httpv1 = wireprotoserver.httpv1protocolhandler(None, None, None)
169 checkzobject(httpv1)
171 checkzobject(httpv1)
170 httpv2 = wireprotov2server.httpv2protocolhandler(None, None)
172 httpv2 = wireprotov2server.httpv2protocolhandler(None, None)
171 checkzobject(httpv2)
173 checkzobject(httpv2)
172
174
173 ziverify.verifyClass(repository.ifilestorage, filelog.filelog)
175 ziverify.verifyClass(repository.ifilestorage, filelog.filelog)
174 ziverify.verifyClass(repository.imanifestdict, manifest.manifestdict)
176 ziverify.verifyClass(repository.imanifestdict, manifest.manifestdict)
175 ziverify.verifyClass(repository.imanifestrevisionstored,
177 ziverify.verifyClass(repository.imanifestrevisionstored,
176 manifest.manifestctx)
178 manifest.manifestctx)
177 ziverify.verifyClass(repository.imanifestrevisionwritable,
179 ziverify.verifyClass(repository.imanifestrevisionwritable,
178 manifest.memmanifestctx)
180 manifest.memmanifestctx)
179 ziverify.verifyClass(repository.imanifestrevisionstored,
181 ziverify.verifyClass(repository.imanifestrevisionstored,
180 manifest.treemanifestctx)
182 manifest.treemanifestctx)
181 ziverify.verifyClass(repository.imanifestrevisionwritable,
183 ziverify.verifyClass(repository.imanifestrevisionwritable,
182 manifest.memtreemanifestctx)
184 manifest.memtreemanifestctx)
183 ziverify.verifyClass(repository.imanifestlog, manifest.manifestlog)
185 ziverify.verifyClass(repository.imanifestlog, manifest.manifestlog)
184 ziverify.verifyClass(repository.imanifeststorage, manifest.manifestrevlog)
186 ziverify.verifyClass(repository.imanifeststorage, manifest.manifestrevlog)
185
187
186 ziverify.verifyClass(repository.irevisiondelta,
188 ziverify.verifyClass(repository.irevisiondelta,
187 simplestorerepo.simplestorerevisiondelta)
189 simplestorerepo.simplestorerevisiondelta)
188 ziverify.verifyClass(repository.ifilestorage, simplestorerepo.filestorage)
190 ziverify.verifyClass(repository.ifilestorage, simplestorerepo.filestorage)
189 ziverify.verifyClass(repository.iverifyproblem,
191 ziverify.verifyClass(repository.iverifyproblem,
190 simplestorerepo.simplefilestoreproblem)
192 simplestorerepo.simplefilestoreproblem)
191
193
194 ziverify.verifyClass(intdirstate.idirstate, dirstate.dirstate)
195
192 vfs = vfsmod.vfs(b'.')
196 vfs = vfsmod.vfs(b'.')
193 fl = filelog.filelog(vfs, b'dummy.i')
197 fl = filelog.filelog(vfs, b'dummy.i')
194 checkzobject(fl, allowextra=True)
198 checkzobject(fl, allowextra=True)
195
199
196 # Conforms to imanifestlog.
200 # Conforms to imanifestlog.
197 ml = manifest.manifestlog(vfs, repo, manifest.manifestrevlog(repo.svfs),
201 ml = manifest.manifestlog(vfs, repo, manifest.manifestrevlog(repo.svfs),
198 repo.narrowmatch())
202 repo.narrowmatch())
199 checkzobject(ml)
203 checkzobject(ml)
200 checkzobject(repo.manifestlog)
204 checkzobject(repo.manifestlog)
201
205
202 # Conforms to imanifestrevision.
206 # Conforms to imanifestrevision.
203 mctx = ml[repo[0].manifestnode()]
207 mctx = ml[repo[0].manifestnode()]
204 checkzobject(mctx)
208 checkzobject(mctx)
205
209
206 # Conforms to imanifestrevisionwritable.
210 # Conforms to imanifestrevisionwritable.
207 checkzobject(mctx.new())
211 checkzobject(mctx.new())
208 checkzobject(mctx.copy())
212 checkzobject(mctx.copy())
209
213
210 # Conforms to imanifestdict.
214 # Conforms to imanifestdict.
211 checkzobject(mctx.read())
215 checkzobject(mctx.read())
212
216
213 mrl = manifest.manifestrevlog(vfs)
217 mrl = manifest.manifestrevlog(vfs)
214 checkzobject(mrl)
218 checkzobject(mrl)
215
219
216 ziverify.verifyClass(repository.irevisiondelta,
220 ziverify.verifyClass(repository.irevisiondelta,
217 revlog.revlogrevisiondelta)
221 revlog.revlogrevisiondelta)
218
222
219 rd = revlog.revlogrevisiondelta(
223 rd = revlog.revlogrevisiondelta(
220 node=b'',
224 node=b'',
221 p1node=b'',
225 p1node=b'',
222 p2node=b'',
226 p2node=b'',
223 basenode=b'',
227 basenode=b'',
224 linknode=b'',
228 linknode=b'',
225 flags=b'',
229 flags=b'',
226 baserevisionsize=None,
230 baserevisionsize=None,
227 revision=b'',
231 revision=b'',
228 delta=None)
232 delta=None)
229 checkzobject(rd)
233 checkzobject(rd)
230
234
231 ziverify.verifyClass(repository.iverifyproblem,
235 ziverify.verifyClass(repository.iverifyproblem,
232 revlog.revlogproblem)
236 revlog.revlogproblem)
233 checkzobject(revlog.revlogproblem())
237 checkzobject(revlog.revlogproblem())
234
238
235 main()
239 main()
General Comments 0
You need to be logged in to leave comments. Login now